Tonic commited on
Commit
11dffe6
·
verified ·
1 Parent(s): aa1f3a9

attempts to resolve training argument issue

Browse files
Files changed (1) hide show
  1. model.py +3 -4
model.py CHANGED
@@ -152,10 +152,9 @@ class SmolLM3Model:
152
  "ddp_find_unused_parameters": self.config.ddp_find_unused_parameters if torch.cuda.device_count() > 1 else False,
153
  "report_to": None,
154
  "remove_unused_columns": False,
155
- "dataloader_pin_memory": False,
156
- "group_by_length": True,
157
- "length_column_name": "length",
158
- "ignore_data_skip": False,
159
  "seed": 42,
160
  "data_seed": 42,
161
  "dataloader_num_workers": getattr(self.config, 'dataloader_num_workers', 4),
 
152
  "ddp_find_unused_parameters": self.config.ddp_find_unused_parameters if torch.cuda.device_count() > 1 else False,
153
  "report_to": None,
154
  "remove_unused_columns": False,
155
+ "dataloader_pin_memory": getattr(self.config, 'dataloader_pin_memory', True),
156
+ # Removed group_by_length as it's causing issues with newer transformers versions
157
+ # Removed length_column_name as it might conflict with data collator
 
158
  "seed": 42,
159
  "data_seed": 42,
160
  "dataloader_num_workers": getattr(self.config, 'dataloader_num_workers', 4),