phungkhaccuong commited on
Commit
5129ea3
·
verified ·
1 Parent(s): 14c16ac

Training in progress, step 13

Browse files
adapter_config.json CHANGED
@@ -20,10 +20,10 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
23
  "dense_4h_to_h",
24
- "dense",
25
  "query_key_value",
26
- "dense_h_to_4h"
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "dense_h_to_4h",
24
  "dense_4h_to_h",
 
25
  "query_key_value",
26
+ "dense"
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5fefc05c6ae60e394e0453193edc156188de2664eef228155fb88125e90c5ccf
3
  size 12609312
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f94d45cc30e018e0a65b918d95f4766336bf6eae29aefffc54631ead451a1abc
3
  size 12609312
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d32fd6243fd6c101061b1b878437dd3ffcb1d7db235bd9a3517e226cdf466fc3
3
  size 6776
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15a35e606670e705755a16eeb3126113162582e5aaf26649f036c16490fea37c
3
  size 6776