vantaa32 commited on
Commit
39c6661
·
verified ·
1 Parent(s): c78e8a8

Upload folder using huggingface_hub

Browse files
adapter_config.json CHANGED
@@ -5,7 +5,7 @@
5
  "exclude_modules": null,
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
- "init_weights": false,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
  "modules_to_save": null,
 
5
  "exclude_modules": null,
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
+ "init_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
  "modules_to_save": null,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6d69bc4c97b1766bc7558b33bbac997e4df8578d70bfcb47f300c56ee94ab771
3
- size 38408880
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1df5ed6728b61764c1e748c681c179ef5b0346ec4a8f3addb1218f65c712ad0a
3
+ size 562713488
added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "[PAD]": 32000
3
+ }
optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5d3058638990c9146c6a757db139f5e8ca3ab6d1f6d59d3e4741ebcbc65fe68a
3
  size 76854010
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf418eb1df0f14c8aa5f96f46c5d4a06c05ebcbf0a9b262ff833535dbaed0ccf
3
  size 76854010
rng_state.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4dc99f71a36fe65bf6a5e136aabd4a8cacdb3328a64f5d463f6de79f65a09eba
3
- size 14180
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3cf9097d4513154245c48236b6ec5137b7ee2a21c9f58f2cba798ea275c6026f
3
+ size 14244
scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c27ec41164777d5a60d46ff725d04fa16ace6e4584569a815a0c88eecdd1c934
3
  size 1064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0b2b5d910b5cbfe49617a75fd186383a35931a04901e747eeb6ca0fafa325db
3
  size 1064
special_tokens_map.json CHANGED
@@ -14,7 +14,7 @@
14
  "single_word": false
15
  },
16
  "pad_token": {
17
- "content": "<unk>",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
 
14
  "single_word": false
15
  },
16
  "pad_token": {
17
+ "content": "[PAD]",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
tokenizer_config.json CHANGED
@@ -26,6 +26,14 @@
26
  "rstrip": false,
27
  "single_word": false,
28
  "special": true
 
 
 
 
 
 
 
 
29
  }
30
  },
31
  "bos_token": "<s>",
@@ -33,8 +41,8 @@
33
  "eos_token": "</s>",
34
  "extra_special_tokens": {},
35
  "legacy": false,
36
- "model_max_length": 2048,
37
- "pad_token": "<unk>",
38
  "padding_side": "right",
39
  "sp_model_kwargs": {},
40
  "spaces_between_special_tokens": false,
 
26
  "rstrip": false,
27
  "single_word": false,
28
  "special": true
29
+ },
30
+ "32000": {
31
+ "content": "[PAD]",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
  }
38
  },
39
  "bos_token": "<s>",
 
41
  "eos_token": "</s>",
42
  "extra_special_tokens": {},
43
  "legacy": false,
44
+ "model_max_length": 512,
45
+ "pad_token": "[PAD]",
46
  "padding_side": "right",
47
  "sp_model_kwargs": {},
48
  "spaces_between_special_tokens": false,
trainer_state.json CHANGED
The diff for this file is too large to render. See raw diff
 
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1fc87615fe03c7c0b66573781d8506f56007b5374b1204db84fc867a42df5272
3
  size 5560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a0ceece3b3caf7bb49dcff493c71ccc08894876a946cba3c33dbd59c1301e4be
3
  size 5560