Spaces:
Running
Running
attempts to resolve training argument issue
Browse files
model.py
CHANGED
|
@@ -152,10 +152,9 @@ class SmolLM3Model:
|
|
| 152 |
"ddp_find_unused_parameters": self.config.ddp_find_unused_parameters if torch.cuda.device_count() > 1 else False,
|
| 153 |
"report_to": None,
|
| 154 |
"remove_unused_columns": False,
|
| 155 |
-
"dataloader_pin_memory":
|
| 156 |
-
|
| 157 |
-
|
| 158 |
-
"ignore_data_skip": False,
|
| 159 |
"seed": 42,
|
| 160 |
"data_seed": 42,
|
| 161 |
"dataloader_num_workers": getattr(self.config, 'dataloader_num_workers', 4),
|
|
|
|
| 152 |
"ddp_find_unused_parameters": self.config.ddp_find_unused_parameters if torch.cuda.device_count() > 1 else False,
|
| 153 |
"report_to": None,
|
| 154 |
"remove_unused_columns": False,
|
| 155 |
+
"dataloader_pin_memory": getattr(self.config, 'dataloader_pin_memory', True),
|
| 156 |
+
# Removed group_by_length as it's causing issues with newer transformers versions
|
| 157 |
+
# Removed length_column_name as it might conflict with data collator
|
|
|
|
| 158 |
"seed": 42,
|
| 159 |
"data_seed": 42,
|
| 160 |
"dataloader_num_workers": getattr(self.config, 'dataloader_num_workers', 4),
|