haihp02 commited on
Commit
b01124e
·
verified ·
1 Parent(s): fbb1ab5

End of training

Browse files
README.md CHANGED
@@ -28,7 +28,7 @@ print(output["generated_text"])
28
 
29
  ## Training procedure
30
 
31
- [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/trunghainguyenhp02/sn56-sft-before-dpo-train/runs/nmtqego5)
32
 
33
 
34
  This model was trained with SFT.
 
28
 
29
  ## Training procedure
30
 
31
+ [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/trunghainguyenhp02/sn56-sft-before-dpo-train/runs/yas2s5i7)
32
 
33
 
34
  This model was trained with SFT.
adapter_config.json CHANGED
@@ -25,12 +25,12 @@
25
  "revision": null,
26
  "target_modules": [
27
  "gate_proj",
28
- "q_proj",
29
- "o_proj",
30
- "v_proj",
31
  "k_proj",
 
32
  "down_proj",
33
- "up_proj"
 
 
34
  ],
35
  "task_type": "CAUSAL_LM",
36
  "trainable_token_indices": null,
 
25
  "revision": null,
26
  "target_modules": [
27
  "gate_proj",
 
 
 
28
  "k_proj",
29
+ "o_proj",
30
  "down_proj",
31
+ "up_proj",
32
+ "v_proj",
33
+ "q_proj"
34
  ],
35
  "task_type": "CAUSAL_LM",
36
  "trainable_token_indices": null,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:735e452ec5fed1ced9f729be269ae8a41daa32d6a7363e3b6dce28451585e6b5
3
  size 1181798536
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d989465447e6a7b2d03a2afff47b3984aa410116f957eb267946ce47262cc788
3
  size 1181798536
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:47d02a15dc49c9555b9933e130efe953ca2135f4c884f9efce876f11e1d04cea
3
  size 5816
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b174b1720b4a761b8a3042cdc3a5bb81b92250f0bfcfec8951a026eb3c09638
3
  size 5816