SilasK commited on
Commit
e21d7b7
·
verified ·
1 Parent(s): c9ae757

Training in progress, epoch 0

Browse files
adapter_config.json CHANGED
@@ -19,8 +19,8 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "v_proj",
23
- "q_proj"
24
  ],
25
  "task_type": "CAUSAL_LM"
26
  }
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "q_proj",
23
+ "v_proj"
24
  ],
25
  "task_type": "CAUSAL_LM"
26
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fa319f3fad99301b1a22f03fa3aebaab50ebe890afead3f3bf4e73287ceffcd2
3
  size 536888520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a7d642383285baa11beca69b44344bc4ebbbd843496d00002ed502b410b862b
3
  size 536888520
runs/Mar07_17-16-24_geltlin/events.out.tfevents.1709828188.geltlin.1751420.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9019b76a98642a2e09c0e00e4c9a7624c1897365dfc3bdd0d31f1ed3f2eabb77
3
+ size 31675
tokenizer.json CHANGED
@@ -1,6 +1,11 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 1024,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
  "padding": null,
10
  "added_tokens": [
11
  {
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7d9715a382d2d865a306d36ab99b25f450a129f68e1cb612d58c46dfe28ff777
3
  size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:49f34892720e00042dbec089233371098902dbc30138224a55e6e12faf26f0dd
3
  size 4728