jxm commited on
Commit
3cf8c4c
·
verified ·
1 Parent(s): 3a17f5c

Training in progress, step 1330

Browse files
README.md CHANGED
@@ -27,7 +27,7 @@ print(output["generated_text"])
27
 
28
  ## Training procedure
29
 
30
- [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://fairwandb.org/jxm/huggingface/runs/01u9wogq)
31
 
32
 
33
  This model was trained with SFT.
 
27
 
28
  ## Training procedure
29
 
30
+ [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://fairwandb.org/jxm/huggingface/runs/s2a2ynms)
31
 
32
 
33
  This model was trained with SFT.
adapter_config.json CHANGED
@@ -30,8 +30,8 @@
30
  "target_modules": [
31
  "k_proj",
32
  "v_proj",
33
- "o_proj",
34
- "q_proj"
35
  ],
36
  "target_parameters": [
37
  "7.mlp.experts.gate_up_proj",
 
30
  "target_modules": [
31
  "k_proj",
32
  "v_proj",
33
+ "q_proj",
34
+ "o_proj"
35
  ],
36
  "target_parameters": [
37
  "7.mlp.experts.gate_up_proj",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e0361537046bc8d3496b6c2ebbd0c5614655dc2351ed2878cf2007d0d5bda099
3
  size 1925213672
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4acdf9d163fe03ce022c1095e0c33c9e5d56a79bb450a4a651041fb6cbd0174c
3
  size 1925213672
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a0cffd4868c1de16dd59b9d1c85161bbafb6b7d385ce3d7f8a3f2a71970e6e66
3
  size 6225
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de56c4844ed6814933c94d880a9b29d5298329091ad71173a4d824676b5b4377
3
  size 6225