imdatta0 commited on
Commit
2bf4c08
·
verified ·
1 Parent(s): c81938f

Training in progress, step 16

Browse files
adapter_config.json CHANGED
@@ -25,10 +25,10 @@
25
  "target_modules": [
26
  "o_proj",
27
  "down_proj",
28
- "up_proj",
29
- "q_proj",
30
  "gate_proj",
31
  "v_proj",
 
 
32
  "k_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
 
25
  "target_modules": [
26
  "o_proj",
27
  "down_proj",
 
 
28
  "gate_proj",
29
  "v_proj",
30
+ "up_proj",
31
+ "q_proj",
32
  "k_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7e1c4aee57f9236d861cf62997a6f3e44ae5143e9e9cfbddd58a78b5bc225367
3
  size 194563400
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93d66a565a6a1636a092425bd0d60a20878f8b2c664233914270eac91ce53329
3
  size 194563400
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ffb6173f21203ee4173b9aac5a4e8ddf960c4fb403e889c73df0c4d3fda7e5bc
3
  size 5944
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61fef92451c17aff52b0067e2ab4f1762dd10e0fa4bf6b39d0eb998d7c7b1a02
3
  size 5944