ebobo commited on
Commit
84d906b
·
verified ·
1 Parent(s): 5ca2be0

Training in progress, step 3

Browse files
adapter_config.json CHANGED
@@ -20,10 +20,10 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "query_key_value",
24
- "dense_4h_to_h",
25
  "dense",
26
- "dense_h_to_4h"
 
 
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
 
23
  "dense",
24
+ "query_key_value",
25
+ "dense_h_to_4h",
26
+ "dense_4h_to_h"
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:01b7ba5e51428fe5ae5dd7a3d8414ca4944a985cc7a4257d493be2d34add7cd5
3
  size 247944
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5884d59ef12dda4d9ee04f493ebb05ee016c2673fe1b3a1f280b1b4dd5ef4f20
3
  size 247944
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a585af84c5d9e2fac543259c5a5e348d32b4a0cea748077f9811a4f793cbf22d
3
  size 6776
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3e2e210f203d1e567e7d2fd45498a004b0e4c87f8bd93a327731dcf717236ff
3
  size 6776