imdatta0 commited on
Commit
2fb30c3
1 Parent(s): 38a7773

Training in progress, step 148

Browse files
adapter_config.json CHANGED
@@ -20,13 +20,13 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "o_proj",
24
- "gate_proj",
25
  "q_proj",
26
- "down_proj",
27
- "k_proj",
28
  "up_proj",
29
- "v_proj"
 
 
 
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
 
23
  "q_proj",
24
+ "gate_proj",
 
25
  "up_proj",
26
+ "v_proj",
27
+ "o_proj",
28
+ "k_proj",
29
+ "down_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5e0d284a08ac0ec3da2b8dc45a6169211cc2da9ffe2dc4d303526e35fa848833
3
  size 83945296
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86da35f422ada04411fb7d8cc568d3f60f6d0df75dc5101730a0eb27352366d3
3
  size 83945296
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:39e698d518cfdf7a116f8a78fb0db43841d945b096186d20771aa6dda48624b0
3
  size 5304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:274f059196d127383b299d144b857e3ec90aa66d0f19fe2b63442f8472c63b3c
3
  size 5304