Upload 5 files
Browse files- config.json +2 -2
- generation_config.json +1 -1
- special_tokens_map.json +0 -0
- tokenizer.json +2 -2
- tokenizer_config.json +16 -14
config.json
CHANGED
@@ -10,7 +10,7 @@
|
|
10 |
"hidden_size": 5120,
|
11 |
"initializer_range": 0.02,
|
12 |
"intermediate_size": 32768,
|
13 |
-
"max_position_embeddings":
|
14 |
"model_type": "mistral",
|
15 |
"num_attention_heads": 32,
|
16 |
"num_hidden_layers": 40,
|
@@ -20,7 +20,7 @@
|
|
20 |
"sliding_window": null,
|
21 |
"tie_word_embeddings": false,
|
22 |
"torch_dtype": "bfloat16",
|
23 |
-
"transformers_version": "4.
|
24 |
"use_cache": true,
|
25 |
"vocab_size": 131072
|
26 |
}
|
|
|
10 |
"hidden_size": 5120,
|
11 |
"initializer_range": 0.02,
|
12 |
"intermediate_size": 32768,
|
13 |
+
"max_position_embeddings": 131072,
|
14 |
"model_type": "mistral",
|
15 |
"num_attention_heads": 32,
|
16 |
"num_hidden_layers": 40,
|
|
|
20 |
"sliding_window": null,
|
21 |
"tie_word_embeddings": false,
|
22 |
"torch_dtype": "bfloat16",
|
23 |
+
"transformers_version": "4.50.0",
|
24 |
"use_cache": true,
|
25 |
"vocab_size": 131072
|
26 |
}
|
generation_config.json
CHANGED
@@ -2,5 +2,5 @@
|
|
2 |
"_from_model_config": true,
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
-
"transformers_version": "4.
|
6 |
}
|
|
|
2 |
"_from_model_config": true,
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
+
"transformers_version": "4.50.0"
|
6 |
}
|
special_tokens_map.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6ad58cc5a1b54079c79ff92de90df3383f45550eea1ed833d552a6f48089ad09
|
3 |
+
size 17078121
|
tokenizer_config.json
CHANGED
@@ -12,7 +12,7 @@
|
|
12 |
"special": true
|
13 |
},
|
14 |
"1": {
|
15 |
-
"content": "<
|
16 |
"lstrip": false,
|
17 |
"normalized": false,
|
18 |
"rstrip": false,
|
@@ -20,7 +20,7 @@
|
|
20 |
"special": true
|
21 |
},
|
22 |
"2": {
|
23 |
-
"content": "
|
24 |
"lstrip": false,
|
25 |
"normalized": false,
|
26 |
"rstrip": false,
|
@@ -28,7 +28,7 @@
|
|
28 |
"special": true
|
29 |
},
|
30 |
"3": {
|
31 |
-
"content": "
|
32 |
"lstrip": false,
|
33 |
"normalized": false,
|
34 |
"rstrip": false,
|
@@ -36,7 +36,7 @@
|
|
36 |
"special": true
|
37 |
},
|
38 |
"4": {
|
39 |
-
"content": "
|
40 |
"lstrip": false,
|
41 |
"normalized": false,
|
42 |
"rstrip": false,
|
@@ -60,7 +60,7 @@
|
|
60 |
"special": true
|
61 |
},
|
62 |
"7": {
|
63 |
-
"content": "
|
64 |
"lstrip": false,
|
65 |
"normalized": false,
|
66 |
"rstrip": false,
|
@@ -140,7 +140,7 @@
|
|
140 |
"special": true
|
141 |
},
|
142 |
"17": {
|
143 |
-
"content": "
|
144 |
"lstrip": false,
|
145 |
"normalized": false,
|
146 |
"rstrip": false,
|
@@ -8006,13 +8006,13 @@
|
|
8006 |
},
|
8007 |
"additional_special_tokens": [
|
8008 |
"<unk>",
|
8009 |
-
"<
|
8010 |
-
"
|
8011 |
-
"
|
8012 |
-
"
|
8013 |
"[AVAILABLE_TOOLS]",
|
8014 |
"[/AVAILABLE_TOOLS]",
|
8015 |
-
"
|
8016 |
"[/TOOL_RESULTS]",
|
8017 |
"[TOOL_CALLS]",
|
8018 |
"[IMG]",
|
@@ -8022,7 +8022,7 @@
|
|
8022 |
"[PREFIX]",
|
8023 |
"[MIDDLE]",
|
8024 |
"[SUFFIX]",
|
8025 |
-
"
|
8026 |
"[/SYSTEM_PROMPT]",
|
8027 |
"[TOOL_CONTENT]",
|
8028 |
"<SPECIAL_20>",
|
@@ -9006,12 +9006,14 @@
|
|
9006 |
"<SPECIAL_998>",
|
9007 |
"<SPECIAL_999>"
|
9008 |
],
|
9009 |
-
"bos_token": "<
|
|
|
9010 |
"clean_up_tokenization_spaces": false,
|
9011 |
-
"eos_token": "
|
9012 |
"extra_special_tokens": {},
|
9013 |
"legacy": true,
|
9014 |
"model_max_length": 131072,
|
|
|
9015 |
"tokenizer_class": "LlamaTokenizerFast",
|
9016 |
"unk_token": "<unk>",
|
9017 |
"use_default_system_prompt": false
|
|
|
12 |
"special": true
|
13 |
},
|
14 |
"1": {
|
15 |
+
"content": "[gMASK]<sop>",
|
16 |
"lstrip": false,
|
17 |
"normalized": false,
|
18 |
"rstrip": false,
|
|
|
20 |
"special": true
|
21 |
},
|
22 |
"2": {
|
23 |
+
"content": "<|endoftext|>",
|
24 |
"lstrip": false,
|
25 |
"normalized": false,
|
26 |
"rstrip": false,
|
|
|
28 |
"special": true
|
29 |
},
|
30 |
"3": {
|
31 |
+
"content": "<|user|>",
|
32 |
"lstrip": false,
|
33 |
"normalized": false,
|
34 |
"rstrip": false,
|
|
|
36 |
"special": true
|
37 |
},
|
38 |
"4": {
|
39 |
+
"content": "<|assistant|>",
|
40 |
"lstrip": false,
|
41 |
"normalized": false,
|
42 |
"rstrip": false,
|
|
|
60 |
"special": true
|
61 |
},
|
62 |
"7": {
|
63 |
+
"content": "<|tool|>",
|
64 |
"lstrip": false,
|
65 |
"normalized": false,
|
66 |
"rstrip": false,
|
|
|
140 |
"special": true
|
141 |
},
|
142 |
"17": {
|
143 |
+
"content": "<|system|>",
|
144 |
"lstrip": false,
|
145 |
"normalized": false,
|
146 |
"rstrip": false,
|
|
|
8006 |
},
|
8007 |
"additional_special_tokens": [
|
8008 |
"<unk>",
|
8009 |
+
"[gMASK]<sop>",
|
8010 |
+
"<|endoftext|>",
|
8011 |
+
"<|user|>",
|
8012 |
+
"<|assistant|>",
|
8013 |
"[AVAILABLE_TOOLS]",
|
8014 |
"[/AVAILABLE_TOOLS]",
|
8015 |
+
"<|tool|>",
|
8016 |
"[/TOOL_RESULTS]",
|
8017 |
"[TOOL_CALLS]",
|
8018 |
"[IMG]",
|
|
|
8022 |
"[PREFIX]",
|
8023 |
"[MIDDLE]",
|
8024 |
"[SUFFIX]",
|
8025 |
+
"<|system|>",
|
8026 |
"[/SYSTEM_PROMPT]",
|
8027 |
"[TOOL_CONTENT]",
|
8028 |
"<SPECIAL_20>",
|
|
|
9006 |
"<SPECIAL_998>",
|
9007 |
"<SPECIAL_999>"
|
9008 |
],
|
9009 |
+
"bos_token": "[gMASK]<sop>",
|
9010 |
+
"chat_template": "{{ bos_token }}{%- set loop_messages = messages %}\n{%- for message in loop_messages %}\n {%- set content = '<|' + message['role'] + '|>'+ message['content'] | trim %}\n {%- if loop.index0 == 0 %}\n {%- set content = content %}\n {%- endif %}\n {%- if not (loop.last and message['role'] == 'assistant') %}\n {%- set content = content + '<|endoftext|>' %}\n {%- endif %}\n {{- content }}\n{%- endfor %}\n{%- if messages[-1]['role'] != 'assistant' %}\n {{- '<|assistant|>' }}\n{%- endif %}",
|
9011 |
"clean_up_tokenization_spaces": false,
|
9012 |
+
"eos_token": "<|endoftext|>",
|
9013 |
"extra_special_tokens": {},
|
9014 |
"legacy": true,
|
9015 |
"model_max_length": 131072,
|
9016 |
+
"pad_token": "<pad>",
|
9017 |
"tokenizer_class": "LlamaTokenizerFast",
|
9018 |
"unk_token": "<unk>",
|
9019 |
"use_default_system_prompt": false
|