Commit
·
61f5a7b
1
Parent(s):
b1ee428
Fix device_map for 120B model - use tp_plan instead
Browse files- gpt_oss_transformers.py +1 -1
gpt_oss_transformers.py
CHANGED
@@ -242,7 +242,7 @@ def main(
|
|
242 |
# For 120B model, use tensor parallel planning
|
243 |
if "120b" in model_id:
|
244 |
model_kwargs = {
|
245 |
-
"
|
246 |
"enable_expert_parallel": True,
|
247 |
}
|
248 |
else:
|
|
|
242 |
# For 120B model, use tensor parallel planning
|
243 |
if "120b" in model_id:
|
244 |
model_kwargs = {
|
245 |
+
"tp_plan": "auto",
|
246 |
"enable_expert_parallel": True,
|
247 |
}
|
248 |
else:
|