not everyone has bf16 available
Browse files
examples/4bit-lora-7b/config.yml
CHANGED
|
@@ -35,7 +35,8 @@ lr_scheduler: cosine
|
|
| 35 |
learning_rate: 0.0000002
|
| 36 |
train_on_inputs: false
|
| 37 |
group_by_length: false
|
| 38 |
-
|
|
|
|
| 39 |
tf32: true
|
| 40 |
early_stopping_patience:
|
| 41 |
resume_from_checkpoint:
|
|
|
|
| 35 |
learning_rate: 0.0000002
|
| 36 |
train_on_inputs: false
|
| 37 |
group_by_length: false
|
| 38 |
+
fp16: true
|
| 39 |
+
bf16: false
|
| 40 |
tf32: true
|
| 41 |
early_stopping_patience:
|
| 42 |
resume_from_checkpoint:
|