zhyncs commited on
Commit
4c9160f
·
verified ·
1 Parent(s): 76eb123

Upload folder using huggingface_hub

Browse files
Files changed (45) hide show
  1. .gitattributes +1 -0
  2. chat_template.jinja +395 -0
  3. chat_template.json +3 -0
  4. config.json +79 -0
  5. generation_config.json +10 -0
  6. model-00001-of-00036.safetensors +3 -0
  7. model-00002-of-00036.safetensors +3 -0
  8. model-00003-of-00036.safetensors +3 -0
  9. model-00004-of-00036.safetensors +3 -0
  10. model-00005-of-00036.safetensors +3 -0
  11. model-00006-of-00036.safetensors +3 -0
  12. model-00007-of-00036.safetensors +3 -0
  13. model-00008-of-00036.safetensors +3 -0
  14. model-00009-of-00036.safetensors +3 -0
  15. model-00010-of-00036.safetensors +3 -0
  16. model-00011-of-00036.safetensors +3 -0
  17. model-00012-of-00036.safetensors +3 -0
  18. model-00013-of-00036.safetensors +3 -0
  19. model-00014-of-00036.safetensors +3 -0
  20. model-00015-of-00036.safetensors +3 -0
  21. model-00016-of-00036.safetensors +3 -0
  22. model-00017-of-00036.safetensors +3 -0
  23. model-00018-of-00036.safetensors +3 -0
  24. model-00019-of-00036.safetensors +3 -0
  25. model-00020-of-00036.safetensors +3 -0
  26. model-00021-of-00036.safetensors +3 -0
  27. model-00022-of-00036.safetensors +3 -0
  28. model-00023-of-00036.safetensors +3 -0
  29. model-00024-of-00036.safetensors +3 -0
  30. model-00025-of-00036.safetensors +3 -0
  31. model-00026-of-00036.safetensors +3 -0
  32. model-00027-of-00036.safetensors +3 -0
  33. model-00028-of-00036.safetensors +3 -0
  34. model-00029-of-00036.safetensors +3 -0
  35. model-00030-of-00036.safetensors +3 -0
  36. model-00031-of-00036.safetensors +3 -0
  37. model-00032-of-00036.safetensors +3 -0
  38. model-00033-of-00036.safetensors +3 -0
  39. model-00034-of-00036.safetensors +3 -0
  40. model-00035-of-00036.safetensors +3 -0
  41. model-00036-of-00036.safetensors +3 -0
  42. model.safetensors.index.json +623 -0
  43. special_tokens_map.json +5 -0
  44. tokenizer.json +3 -0
  45. tokenizer_config.json +183 -0
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
chat_template.jinja ADDED
@@ -0,0 +1,395 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {#-
2
+ In addition to the normal inputs of `messages` and `tools`, this template also accepts the
3
+ following kwargs:
4
+ - "builtin_tools": A list, can contain "browser" and/or "python".
5
+ - "model_identity": A string that optionally describes the model identity.
6
+ - "reasoning_effort": A string that describes the reasoning effort, defaults to "medium".
7
+ #}
8
+
9
+ {#- Tool Definition Rendering ============================================== #}
10
+ {%- macro render_typescript_type(param_spec, required_params, is_nullable=false) -%}
11
+ {%- if param_spec.type == "array" -%}
12
+ {%- if param_spec['items'] -%}
13
+ {%- if param_spec['items']['type'] == "string" -%}
14
+ {{- "string[]" }}
15
+ {%- elif param_spec['items']['type'] == "number" -%}
16
+ {{- "number[]" }}
17
+ {%- elif param_spec['items']['type'] == "integer" -%}
18
+ {{- "number[]" }}
19
+ {%- elif param_spec['items']['type'] == "boolean" -%}
20
+ {{- "boolean[]" }}
21
+ {%- else -%}
22
+ {%- set inner_type = render_typescript_type(param_spec['items'], required_params) -%}
23
+ {%- if inner_type == "object | object" or inner_type|length > 50 -%}
24
+ {{- "any[]" }}
25
+ {%- else -%}
26
+ {{- inner_type + "[]" }}
27
+ {%- endif -%}
28
+ {%- endif -%}
29
+ {%- if param_spec.nullable -%}
30
+ {{- " | null" }}
31
+ {%- endif -%}
32
+ {%- else -%}
33
+ {{- "any[]" }}
34
+ {%- if param_spec.nullable -%}
35
+ {{- " | null" }}
36
+ {%- endif -%}
37
+ {%- endif -%}
38
+ {%- elif param_spec.type is defined and param_spec.type is iterable and param_spec.type is not string and param_spec.type is not mapping and param_spec.type[0] is defined -%}
39
+ {#- Handle array of types like ["object", "object"] from Union[dict, list] #}
40
+ {%- if param_spec.type | length > 1 -%}
41
+ {{- param_spec.type | join(" | ") }}
42
+ {%- else -%}
43
+ {{- param_spec.type[0] }}
44
+ {%- endif -%}
45
+ {%- elif param_spec.oneOf -%}
46
+ {#- Handle oneOf schemas - check for complex unions and fallback to any #}
47
+ {%- set has_object_variants = false -%}
48
+ {%- for variant in param_spec.oneOf -%}
49
+ {%- if variant.type == "object" -%}
50
+ {%- set has_object_variants = true -%}
51
+ {%- endif -%}
52
+ {%- endfor -%}
53
+ {%- if has_object_variants and param_spec.oneOf|length > 1 -%}
54
+ {{- "any" }}
55
+ {%- else -%}
56
+ {%- for variant in param_spec.oneOf -%}
57
+ {{- render_typescript_type(variant, required_params) -}}
58
+ {%- if variant.description %}
59
+ {{- "// " + variant.description }}
60
+ {%- endif -%}
61
+ {%- if variant.default is defined %}
62
+ {{ "// default: " + variant.default|tojson }}
63
+ {%- endif -%}
64
+ {%- if not loop.last %}
65
+ {{- " | " }}
66
+ {% endif -%}
67
+ {%- endfor -%}
68
+ {%- endif -%}
69
+ {%- elif param_spec.type == "string" -%}
70
+ {%- if param_spec.enum -%}
71
+ {{- '"' + param_spec.enum|join('" | "') + '"' -}}
72
+ {%- else -%}
73
+ {{- "string" }}
74
+ {%- if param_spec.nullable %}
75
+ {{- " | null" }}
76
+ {%- endif -%}
77
+ {%- endif -%}
78
+ {%- elif param_spec.type == "number" -%}
79
+ {{- "number" }}
80
+ {%- elif param_spec.type == "integer" -%}
81
+ {{- "number" }}
82
+ {%- elif param_spec.type == "boolean" -%}
83
+ {{- "boolean" }}
84
+
85
+ {%- elif param_spec.type == "object" -%}
86
+ {%- if param_spec.properties -%}
87
+ {{- "{
88
+ " }}
89
+ {%- for prop_name, prop_spec in param_spec.properties.items() -%}
90
+ {{- prop_name -}}
91
+ {%- if prop_name not in (param_spec.required or []) -%}
92
+ {{- "?" }}
93
+ {%- endif -%}
94
+ {{- ": " }}
95
+ {{ render_typescript_type(prop_spec, param_spec.required or []) }}
96
+ {%- if not loop.last -%}
97
+ {{-", " }}
98
+ {%- endif -%}
99
+ {%- endfor -%}
100
+ {{- "}" }}
101
+ {%- else -%}
102
+ {{- "object" }}
103
+ {%- endif -%}
104
+ {%- else -%}
105
+ {{- "any" }}
106
+ {%- endif -%}
107
+ {%- endmacro -%}
108
+
109
+ {%- macro render_tool_namespace(namespace_name, tools) -%}
110
+ {{- "## " + namespace_name + "
111
+
112
+ " }}
113
+ {{- "namespace " + namespace_name + " {
114
+
115
+ " }}
116
+ {%- for tool in tools %}
117
+ {%- set tool = tool.function %}
118
+ {{- "// " + tool.description + "
119
+ " }}
120
+ {{- "type "+ tool.name + " = (" }}
121
+ {%- if tool.parameters and tool.parameters.properties -%}
122
+ {{- "_: " }}
123
+ {{- "{
124
+ " }}
125
+ {%- for param_name, param_spec in tool.parameters.properties.items() %}
126
+ {%- if param_spec.description -%}
127
+ {{- "// " + param_spec.description + "
128
+ " }}
129
+ {%- endif -%}
130
+ {{- param_name }}
131
+ {%- if param_name not in (tool.parameters.required or []) -%}
132
+ {{- "?" }}
133
+ {%- endif -%}
134
+ {{- ": " }}
135
+ {{- render_typescript_type(param_spec, tool.parameters.required or []) }}
136
+ {%- if param_spec.default is defined -%}
137
+ {%- if param_spec.oneOf %}
138
+ {{- "// default: " + param_spec.default }}
139
+ {%- else %}
140
+ {{- ", // default: " + param_spec.default|tojson }}
141
+ {%- endif -%}
142
+ {%- endif -%}
143
+ {%- if not loop.last %}
144
+ {{- ",
145
+ " }}
146
+ {%- endif -%}
147
+ {%- endfor %}
148
+ {{- ",
149
+ }) => any;
150
+ " }}
151
+ {%- else -%}
152
+ {{- "
153
+ }) => any;
154
+ " }}
155
+ {%- endif -%}
156
+ {%- endfor %}
157
+ {{- "
158
+ } // namespace " + namespace_name }}
159
+ {%- endmacro -%}
160
+
161
+ {%- macro render_builtin_tools(browser_tool, python_tool) -%}
162
+ {%- if browser_tool %}
163
+ {{- "## browser
164
+
165
+ " }}
166
+ {{- "// Tool for browsing.
167
+ " }}
168
+ {{- "// The `cursor` appears in brackets before each browsing display: `[{cursor}]`.
169
+ " }}
170
+ {{- "// Cite information from the tool using the following format:
171
+ " }}
172
+ {{- "// `【{cursor}†L{line_start}(-L{line_end})?】`, for example: `【6†L9-L11】` or `【8†L3】`.
173
+ " }}
174
+ {{- "// Do not quote more than 10 words directly from the tool output.
175
+ " }}
176
+ {{- "// sources=web (default: web)
177
+ " }}
178
+ {{- "namespace browser {
179
+
180
+ " }}
181
+ {{- "// Searches for information related to `query` and displays `topn` results.
182
+ " }}
183
+ {{- "type search = (_: {
184
+ " }}
185
+ {{- "query: string,
186
+ " }}
187
+ {{- "topn?: number, // default: 10
188
+ " }}
189
+ {{- "source?: string,
190
+ " }}
191
+ {{- "}) => any;
192
+
193
+ " }}
194
+ {{- "// Opens the link `id` from the page indicated by `cursor` starting at line number `loc`, showing `num_lines` lines.
195
+ " }}
196
+ {{- "// Valid link ids are displayed with the formatting: `【{id}†.*】`.
197
+ " }}
198
+ {{- "// If `cursor` is not provided, the most recent page is implied.
199
+ " }}
200
+ {{- "// If `id` is a string, it is treated as a fully qualified URL associated with `source`.
201
+ " }}
202
+ {{- "// If `loc` is not provided, the viewport will be positioned at the beginning of the document or centered on the most relevant passage, if available.
203
+ " }}
204
+ {{- "// Use this function without `id` to scroll to a new location of an opened page.
205
+ " }}
206
+ {{- "type open = (_: {
207
+ " }}
208
+ {{- "id?: number | string, // default: -1
209
+ " }}
210
+ {{- "cursor?: number, // default: -1
211
+ " }}
212
+ {{- "loc?: number, // default: -1
213
+ " }}
214
+ {{- "num_lines?: number, // default: -1
215
+ " }}
216
+ {{- "view_source?: boolean, // default: false
217
+ " }}
218
+ {{- "source?: string,
219
+ " }}
220
+ {{- "}) => any;
221
+
222
+ " }}
223
+ {{- "// Finds exact matches of `pattern` in the current page, or the page given by `cursor`.
224
+ " }}
225
+ {{- "type find = (_: {
226
+ " }}
227
+ {{- "pattern: string,
228
+ " }}
229
+ {{- "cursor?: number, // default: -1
230
+ " }}
231
+ {{- "}) => any;
232
+
233
+ " }}
234
+ {{- "} // namespace browser
235
+
236
+ " }}
237
+ {%- endif -%}
238
+
239
+ {%- if python_tool %}
240
+ {{- "## python
241
+
242
+ " }}
243
+ {{- "Use this tool to execute Python code in your chain of thought. The code will not be shown to the user. This tool should be used for internal reasoning, but not for code that is intended to be visible to the user (e.g. when creating plots, tables, or files).
244
+
245
+ " }}
246
+ {{- "When you send a message containing Python code to python, it will be executed in a stateful Jupyter notebook environment. python will respond with the output of the execution or time out after 120.0 seconds. The drive at '/mnt/data' can be used to save and persist user files. Internet access for this session is UNKNOWN. Depends on the cluster.
247
+
248
+ " }}
249
+ {%- endif -%}
250
+ {%- endmacro -%}
251
+
252
+ {#- System Message Construction ============================================ #}
253
+ {%- macro build_system_message() -%}
254
+ {%- if model_identity is not defined %}
255
+ {{- "You are ChatGPT, a large language model trained by OpenAI.
256
+ " -}}
257
+ {%- else %}
258
+ {{- model_identity }}
259
+ {%- endif %}
260
+ {{- "Knowledge cutoff: 2024-06
261
+ " }}
262
+ {{- "Current date: " + strftime_now("%Y-%m-%d") + "
263
+
264
+ " }}
265
+ {%- if reasoning_effort is not defined %}
266
+ {%- set reasoning_effort = "medium" %}
267
+ {%- endif %}
268
+ {{- "reasoning: " + reasoning_effort + "
269
+
270
+ " }}
271
+ {%- if builtin_tools %}
272
+ {{- "# Tools
273
+
274
+ " }}
275
+ {%- set available_builtin_tools = namespace(browser=false, python=false) %}
276
+ {%- for tool in builtin_tools %}
277
+ {%- if tool == "browser" %}
278
+ {%- set available_builtin_tools.browser = true %}
279
+ {%- elif tool == "python" %}
280
+ {%- set available_builtin_tools.python = true %}
281
+ {%- endif %}
282
+ {%- endfor %}
283
+ {{- render_builtin_tools(available_builtin_tools.browser, available_builtin_tools.python) }}
284
+ {%- endif -%}
285
+ {{- "# Valid channels: analysis, commentary, final. Channel must be included for every message.
286
+ " }}
287
+ {{- "Calls to these tools must go to the commentary channel: 'functions'." }}
288
+ {%- endmacro -%}
289
+
290
+ {#- CoT Dropping Logic ================================================== #}
291
+ {%- set cot_final_indices = [] -%}
292
+ {%- for idx in range(messages|length) -%}
293
+ {%- set m = messages[idx] -%}
294
+ {%- if m.role == 'assistant' and m.get('channel', '') == 'final' -%}
295
+ {%- if cot_final_indices.append(idx) -%}{%- endif -%}
296
+ {%- endif -%}
297
+ {%- endfor -%}
298
+ {%- set cot_last_final_idx = cot_final_indices[-1] if cot_final_indices else none -%}
299
+ {%- set cot_last_user_idx = none -%}
300
+ {%- if cot_last_final_idx is not none -%}
301
+ {%- for idx in range(cot_last_final_idx - 1, -1, -1) -%}
302
+ {%- if messages[idx].role == 'user' and cot_last_user_idx is none -%}
303
+ {%- set cot_last_user_idx = idx -%}
304
+ {%- endif -%}
305
+ {%- endfor -%}
306
+ {%- endif -%}
307
+
308
+ {#- Main Template Logic ================================================= #}
309
+ {#- Set defaults #}
310
+ {%- set auto_drop = auto_drop_analysis if auto_drop_analysis is defined else true -%}
311
+
312
+ {#- Render system message #}
313
+ {{- "<|start|>system<|message|>" }}
314
+ {{- build_system_message() }}
315
+ {{- "<|end|>" }}
316
+
317
+ {#- Extract developer message #}
318
+ {%- if messages[0].role == "developer" or messages[0].role == "system" %}
319
+ {%- set developer_message = messages[0].content %}
320
+ {%- set loop_messages = messages[1:] %}
321
+ {%- else %}
322
+ {%- set developer_message = "" %}
323
+ {%- set loop_messages = messages %}
324
+ {%- endif %}
325
+
326
+ {#- Render developer message #}
327
+ {%- if developer_message or tools %}
328
+ {{- "<|start|>developer<|message|>" }}
329
+ {%- if developer_message %}
330
+ {{- "# Instructions
331
+
332
+ " }}
333
+ {{- developer_message }}
334
+ {%- endif %}
335
+ {%- if tools -%}
336
+ {{- "
337
+
338
+ " }}
339
+ {{- "# Tools
340
+
341
+ " }}
342
+ {{- render_tool_namespace("functions", tools) }}
343
+ {%- endif -%}
344
+ {{- "<|end|>" }}
345
+ {%- endif %}
346
+
347
+ {#- Render messages #}
348
+ {%- set last_tool_call = namespace(name=none) %}
349
+ {%- for message in loop_messages -%}
350
+ {%- set skip = false -%}
351
+
352
+ {# Apply CoT dropping logic #}
353
+ {%- if auto_drop and cot_last_final_idx is not none and loop.index0 < cot_last_final_idx -%}
354
+ {%- if message.role == 'assistant' and message.get('channel', '') != 'final' -%}
355
+ {%- if cot_last_user_idx is none or loop.index0 > cot_last_user_idx -%}
356
+ {%- set skip = true -%}
357
+ {%- endif -%}
358
+ {%- elif message.role == 'user' and message.get('channel', '') == 'analysis' -%}
359
+ {%- set skip = true -%}
360
+ {%- endif -%}
361
+ {%- endif -%}
362
+
363
+ {%- if not skip -%}
364
+ {#- At this point only assistant/user/tool messages should remain #}
365
+ {%- if message.role == 'assistant' -%}
366
+ {%- if "tool_calls" in message %}
367
+ {# I'm assuming max 1 tool call per message here, which might be wrong #}
368
+ {{- "<|start|>assistant<|channel|>analysis<|message|>" + message.content }}
369
+ {{- "<|end|><|start|>assistant to=" }}
370
+ {{- "functions." + message.tool_calls[0].name + "<|channel|>commentary json<|message|>" }}
371
+ {{- message.tool_calls[0].arguments|tojson }}
372
+ {{- "<|end|>" }}
373
+ {%- set last_tool_call.name = message.tool_calls[0].name %}
374
+ {%- elif "thinking" in message %}
375
+ {#- CoT is dropped during all model inputs, so we never actually render it #}
376
+ {{- "<|start|>assistant<|channel|>final<|message|>" + message.content + "<|end|>" }}
377
+ {%- else %}
378
+ {{- "<|start|>assistant<|message|>" + message.content + "<|end|>" }}
379
+ {%- endif %}
380
+ {%- elif message.role == 'tool' -%}
381
+ {%- if last_tool_call.name is none %}
382
+ {{- raise_exception("Message has tool role, but there was no previous assistant message with a tool call!") }}
383
+ {%- endif %}
384
+ {{- "<|start|>functions." + last_tool_call.name }}
385
+ {{- " to=assistant<|channel|>commentary<|message|>" + message.content|tojson + "<|end|>" }}
386
+ {%- else -%}
387
+ {{- "<|start|>user<|message|>" + message.content + "<|end|>" }}
388
+ {%- endif -%}
389
+ {%- endif -%}
390
+ {%- endfor -%}
391
+
392
+ {#- Generation prompt #}
393
+ {%- if add_generation_prompt -%}
394
+ <|start|>assistant
395
+ {%- endif -%}
chat_template.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "chat_template": "{#-\n In addition to the normal inputs of `messages` and `tools`, this template also accepts the\n following kwargs:\n - \"builtin_tools\": A list, can contain \"browser\" and/or \"python\".\n - \"model_identity\": A string that optionally describes the model identity.\n - \"reasoning_effort\": A string that describes the reasoning effort, defaults to \"medium\".\n #}\n\n{#- Tool Definition Rendering ============================================== #}\n{%- macro render_typescript_type(param_spec, required_params, is_nullable=false) -%}\n {%- if param_spec.type == \"array\" -%}\n {%- if param_spec['items'] -%}\n {%- if param_spec['items']['type'] == \"string\" -%}\n {{- \"string[]\" }}\n {%- elif param_spec['items']['type'] == \"number\" -%}\n {{- \"number[]\" }}\n {%- elif param_spec['items']['type'] == \"integer\" -%}\n {{- \"number[]\" }}\n {%- elif param_spec['items']['type'] == \"boolean\" -%}\n {{- \"boolean[]\" }}\n {%- else -%}\n {%- set inner_type = render_typescript_type(param_spec['items'], required_params) -%}\n {%- if inner_type == \"object | object\" or inner_type|length > 50 -%}\n {{- \"any[]\" }}\n {%- else -%}\n {{- inner_type + \"[]\" }}\n {%- endif -%}\n {%- endif -%}\n {%- if param_spec.nullable -%}\n {{- \" | null\" }}\n {%- endif -%}\n {%- else -%}\n {{- \"any[]\" }}\n {%- if param_spec.nullable -%}\n {{- \" | null\" }}\n {%- endif -%}\n {%- endif -%}\n {%- elif param_spec.type is defined and param_spec.type is iterable and param_spec.type is not string and param_spec.type is not mapping and param_spec.type[0] is defined -%}\n {#- Handle array of types like [\"object\", \"object\"] from Union[dict, list] #}\n {%- if param_spec.type | length > 1 -%}\n {{- param_spec.type | join(\" | \") }}\n {%- else -%}\n {{- param_spec.type[0] }}\n {%- endif -%}\n {%- elif param_spec.oneOf -%}\n {#- Handle oneOf schemas - check for complex unions and fallback to any #}\n {%- set has_object_variants = false -%}\n {%- for variant in param_spec.oneOf -%}\n {%- if variant.type == \"object\" -%}\n {%- set has_object_variants = true -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if has_object_variants and param_spec.oneOf|length > 1 -%}\n {{- \"any\" }}\n {%- else -%}\n {%- for variant in param_spec.oneOf -%}\n {{- render_typescript_type(variant, required_params) -}}\n {%- if variant.description %}\n {{- \"// \" + variant.description }}\n {%- endif -%}\n {%- if variant.default is defined %}\n {{ \"// default: \" + variant.default|tojson }}\n {%- endif -%}\n {%- if not loop.last %}\n {{- \" | \" }}\n {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n {%- elif param_spec.type == \"string\" -%}\n {%- if param_spec.enum -%}\n {{- '\"' + param_spec.enum|join('\" | \"') + '\"' -}}\n {%- else -%}\n {{- \"string\" }}\n {%- if param_spec.nullable %}\n {{- \" | null\" }}\n {%- endif -%}\n {%- endif -%}\n {%- elif param_spec.type == \"number\" -%}\n {{- \"number\" }}\n {%- elif param_spec.type == \"integer\" -%}\n {{- \"number\" }}\n {%- elif param_spec.type == \"boolean\" -%}\n {{- \"boolean\" }}\n\n {%- elif param_spec.type == \"object\" -%}\n {%- if param_spec.properties -%}\n {{- \"{\n\" }}\n {%- for prop_name, prop_spec in param_spec.properties.items() -%}\n {{- prop_name -}}\n {%- if prop_name not in (param_spec.required or []) -%}\n {{- \"?\" }}\n {%- endif -%}\n {{- \": \" }}\n {{ render_typescript_type(prop_spec, param_spec.required or []) }}\n {%- if not loop.last -%}\n {{-\", \" }}\n {%- endif -%}\n {%- endfor -%}\n {{- \"}\" }}\n {%- else -%}\n {{- \"object\" }}\n {%- endif -%}\n {%- else -%}\n {{- \"any\" }}\n {%- endif -%}\n{%- endmacro -%}\n\n{%- macro render_tool_namespace(namespace_name, tools) -%}\n {{- \"## \" + namespace_name + \"\n\n\" }}\n {{- \"namespace \" + namespace_name + \" {\n\n\" }}\n {%- for tool in tools %}\n {%- set tool = tool.function %}\n {{- \"// \" + tool.description + \"\n\" }}\n {{- \"type \"+ tool.name + \" = (\" }}\n {%- if tool.parameters and tool.parameters.properties -%}\n {{- \"_: \" }}\n {{- \"{\n\" }}\n {%- for param_name, param_spec in tool.parameters.properties.items() %}\n {%- if param_spec.description -%}\n {{- \"// \" + param_spec.description + \"\n\" }}\n {%- endif -%}\n {{- param_name }}\n {%- if param_name not in (tool.parameters.required or []) -%}\n {{- \"?\" }}\n {%- endif -%}\n {{- \": \" }}\n {{- render_typescript_type(param_spec, tool.parameters.required or []) }}\n {%- if param_spec.default is defined -%}\n {%- if param_spec.oneOf %}\n {{- \"// default: \" + param_spec.default }}\n {%- else %}\n {{- \", // default: \" + param_spec.default|tojson }}\n {%- endif -%}\n {%- endif -%}\n {%- if not loop.last %}\n {{- \",\n\" }}\n {%- endif -%}\n {%- endfor %}\n {{- \",\n}) => any;\n\" }}\n {%- else -%}\n {{- \"\n}) => any;\n\" }}\n {%- endif -%}\n {%- endfor %}\n {{- \"\n} // namespace \" + namespace_name }}\n{%- endmacro -%}\n\n{%- macro render_builtin_tools(browser_tool, python_tool) -%}\n {%- if browser_tool %}\n {{- \"## browser\n\n\" }}\n {{- \"// Tool for browsing.\n\" }}\n {{- \"// The `cursor` appears in brackets before each browsing display: `[{cursor}]`.\n\" }}\n {{- \"// Cite information from the tool using the following format:\n\" }}\n {{- \"// `\u3010{cursor}\u2020L{line_start}(-L{line_end})?\u3011`, for example: `\u30106\u2020L9-L11\u3011` or `\u30108\u2020L3\u3011`.\n\" }}\n {{- \"// Do not quote more than 10 words directly from the tool output.\n\" }}\n {{- \"// sources=web (default: web)\n\" }}\n {{- \"namespace browser {\n\n\" }}\n {{- \"// Searches for information related to `query` and displays `topn` results.\n\" }}\n {{- \"type search = (_: {\n\" }}\n {{- \"query: string,\n\" }}\n {{- \"topn?: number, // default: 10\n\" }}\n {{- \"source?: string,\n\" }}\n {{- \"}) => any;\n\n\" }}\n {{- \"// Opens the link `id` from the page indicated by `cursor` starting at line number `loc`, showing `num_lines` lines.\n\" }}\n {{- \"// Valid link ids are displayed with the formatting: `\u3010{id}\u2020.*\u3011`.\n\" }}\n {{- \"// If `cursor` is not provided, the most recent page is implied.\n\" }}\n {{- \"// If `id` is a string, it is treated as a fully qualified URL associated with `source`.\n\" }}\n {{- \"// If `loc` is not provided, the viewport will be positioned at the beginning of the document or centered on the most relevant passage, if available.\n\" }}\n {{- \"// Use this function without `id` to scroll to a new location of an opened page.\n\" }}\n {{- \"type open = (_: {\n\" }}\n {{- \"id?: number | string, // default: -1\n\" }}\n {{- \"cursor?: number, // default: -1\n\" }}\n {{- \"loc?: number, // default: -1\n\" }}\n {{- \"num_lines?: number, // default: -1\n\" }}\n {{- \"view_source?: boolean, // default: false\n\" }}\n {{- \"source?: string,\n\" }}\n {{- \"}) => any;\n\n\" }}\n {{- \"// Finds exact matches of `pattern` in the current page, or the page given by `cursor`.\n\" }}\n {{- \"type find = (_: {\n\" }}\n {{- \"pattern: string,\n\" }}\n {{- \"cursor?: number, // default: -1\n\" }}\n {{- \"}) => any;\n\n\" }}\n {{- \"} // namespace browser\n\n\" }}\n {%- endif -%}\n\n {%- if python_tool %}\n {{- \"## python\n\n\" }}\n {{- \"Use this tool to execute Python code in your chain of thought. The code will not be shown to the user. This tool should be used for internal reasoning, but not for code that is intended to be visible to the user (e.g. when creating plots, tables, or files).\n\n\" }}\n {{- \"When you send a message containing Python code to python, it will be executed in a stateful Jupyter notebook environment. python will respond with the output of the execution or time out after 120.0 seconds. The drive at '/mnt/data' can be used to save and persist user files. Internet access for this session is UNKNOWN. Depends on the cluster.\n\n\" }}\n {%- endif -%}\n{%- endmacro -%}\n\n{#- System Message Construction ============================================ #}\n{%- macro build_system_message() -%}\n {%- if model_identity is not defined %}\n {{- \"You are ChatGPT, a large language model trained by OpenAI.\n\" -}}\n {%- else %}\n {{- model_identity }}\n {%- endif %}\n {{- \"Knowledge cutoff: 2024-06\n\" }}\n {{- \"Current date: \" + strftime_now(\"%Y-%m-%d\") + \"\n\n\" }}\n {%- if reasoning_effort is not defined %}\n {%- set reasoning_effort = \"medium\" %}\n {%- endif %}\n {{- \"reasoning: \" + reasoning_effort + \"\n\n\" }}\n {%- if builtin_tools %}\n {{- \"# Tools\n\n\" }}\n {%- set available_builtin_tools = namespace(browser=false, python=false) %}\n {%- for tool in builtin_tools %}\n {%- if tool == \"browser\" %}\n {%- set available_builtin_tools.browser = true %}\n {%- elif tool == \"python\" %}\n {%- set available_builtin_tools.python = true %}\n {%- endif %}\n {%- endfor %}\n {{- render_builtin_tools(available_builtin_tools.browser, available_builtin_tools.python) }}\n {%- endif -%}\n {{- \"# Valid channels: analysis, commentary, final. Channel must be included for every message.\n\" }}\n {{- \"Calls to these tools must go to the commentary channel: 'functions'.\" }}\n{%- endmacro -%}\n\n{#- CoT Dropping Logic ================================================== #}\n{%- set cot_final_indices = [] -%}\n{%- for idx in range(messages|length) -%}\n {%- set m = messages[idx] -%}\n {%- if m.role == 'assistant' and m.get('channel', '') == 'final' -%}\n {%- if cot_final_indices.append(idx) -%}{%- endif -%}\n {%- endif -%}\n{%- endfor -%}\n{%- set cot_last_final_idx = cot_final_indices[-1] if cot_final_indices else none -%}\n{%- set cot_last_user_idx = none -%}\n{%- if cot_last_final_idx is not none -%}\n {%- for idx in range(cot_last_final_idx - 1, -1, -1) -%}\n {%- if messages[idx].role == 'user' and cot_last_user_idx is none -%}\n {%- set cot_last_user_idx = idx -%}\n {%- endif -%}\n {%- endfor -%}\n{%- endif -%}\n\n{#- Main Template Logic ================================================= #}\n{#- Set defaults #}\n{%- set auto_drop = auto_drop_analysis if auto_drop_analysis is defined else true -%}\n\n{#- Render system message #}\n{{- \"<|start|>system<|message|>\" }}\n{{- build_system_message() }}\n{{- \"<|end|>\" }}\n\n{#- Extract developer message #}\n{%- if messages[0].role == \"developer\" or messages[0].role == \"system\" %}\n {%- set developer_message = messages[0].content %}\n {%- set loop_messages = messages[1:] %}\n{%- else %}\n {%- set developer_message = \"\" %}\n {%- set loop_messages = messages %}\n{%- endif %}\n\n{#- Render developer message #}\n{%- if developer_message or tools %}\n {{- \"<|start|>developer<|message|>\" }}\n {%- if developer_message %}\n {{- \"# Instructions\n\n\" }}\n {{- developer_message }}\n {%- endif %}\n {%- if tools -%}\n {{- \"\n\n\" }}\n {{- \"# Tools\n\n\" }}\n {{- render_tool_namespace(\"functions\", tools) }}\n {%- endif -%}\n {{- \"<|end|>\" }}\n{%- endif %}\n\n{#- Render messages #}\n{%- set last_tool_call = namespace(name=none) %}\n{%- for message in loop_messages -%}\n {%- set skip = false -%}\n \n {# Apply CoT dropping logic #}\n {%- if auto_drop and cot_last_final_idx is not none and loop.index0 < cot_last_final_idx -%}\n {%- if message.role == 'assistant' and message.get('channel', '') != 'final' -%}\n {%- if cot_last_user_idx is none or loop.index0 > cot_last_user_idx -%}\n {%- set skip = true -%}\n {%- endif -%}\n {%- elif message.role == 'user' and message.get('channel', '') == 'analysis' -%}\n {%- set skip = true -%}\n {%- endif -%}\n {%- endif -%}\n \n {%- if not skip -%}\n {#- At this point only assistant/user/tool messages should remain #}\n {%- if message.role == 'assistant' -%}\n {%- if \"tool_calls\" in message %}\n {# I'm assuming max 1 tool call per message here, which might be wrong #}\n {{- \"<|start|>assistant<|channel|>analysis<|message|>\" + message.content }}\n {{- \"<|end|><|start|>assistant to=\" }}\n {{- \"functions.\" + message.tool_calls[0].name + \"<|channel|>commentary json<|message|>\" }}\n {{- message.tool_calls[0].arguments|tojson }}\n {{- \"<|end|>\" }}\n {%- set last_tool_call.name = message.tool_calls[0].name %}\n {%- elif \"thinking\" in message %}\n {#- CoT is dropped during all model inputs, so we never actually render it #}\n {{- \"<|start|>assistant<|channel|>final<|message|>\" + message.content + \"<|end|>\" }}\n {%- else %}\n {{- \"<|start|>assistant<|message|>\" + message.content + \"<|end|>\" }}\n {%- endif %}\n {%- elif message.role == 'tool' -%}\n {%- if last_tool_call.name is none %}\n {{- raise_exception(\"Message has tool role, but there was no previous assistant message with a tool call!\") }}\n {%- endif %}\n {{- \"<|start|>functions.\" + last_tool_call.name }}\n {{- \" to=assistant<|channel|>commentary<|message|>\" + message.content|tojson + \"<|end|>\" }}\n {%- else -%}\n {{- \"<|start|>user<|message|>\" + message.content + \"<|end|>\" }}\n {%- endif -%}\n {%- endif -%}\n{%- endfor -%}\n\n{#- Generation prompt #}\n{%- if add_generation_prompt -%}\n<|start|>assistant\n{%- endif -%}"
3
+ }
config.json ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "GptOssForCausalLM"
4
+ ],
5
+ "attention_bias": true,
6
+ "attention_dropout": 0.0,
7
+ "eos_token_id": 200002,
8
+ "experts_per_token": 4,
9
+ "head_dim": 64,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 2880,
12
+ "initial_context_length": 4096,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 2880,
15
+ "layer_types": [
16
+ "sliding_attention",
17
+ "full_attention",
18
+ "sliding_attention",
19
+ "full_attention",
20
+ "sliding_attention",
21
+ "full_attention",
22
+ "sliding_attention",
23
+ "full_attention",
24
+ "sliding_attention",
25
+ "full_attention",
26
+ "sliding_attention",
27
+ "full_attention",
28
+ "sliding_attention",
29
+ "full_attention",
30
+ "sliding_attention",
31
+ "full_attention",
32
+ "sliding_attention",
33
+ "full_attention",
34
+ "sliding_attention",
35
+ "full_attention",
36
+ "sliding_attention",
37
+ "full_attention",
38
+ "sliding_attention",
39
+ "full_attention",
40
+ "sliding_attention",
41
+ "full_attention",
42
+ "sliding_attention",
43
+ "full_attention",
44
+ "sliding_attention",
45
+ "full_attention",
46
+ "sliding_attention",
47
+ "full_attention",
48
+ "sliding_attention",
49
+ "full_attention",
50
+ "sliding_attention",
51
+ "full_attention"
52
+ ],
53
+ "max_position_embeddings": 131072,
54
+ "model_type": "gpt_oss",
55
+ "num_attention_heads": 64,
56
+ "num_experts_per_tok": 4,
57
+ "num_hidden_layers": 36,
58
+ "num_key_value_heads": 8,
59
+ "num_local_experts": 128,
60
+ "output_router_logits": false,
61
+ "rms_norm_eps": 1e-05,
62
+ "rope_scaling": {
63
+ "beta_fast": 32.0,
64
+ "beta_slow": 1.0,
65
+ "factor": 32.0,
66
+ "original_max_position_embeddings": 4096,
67
+ "rope_type": "yarn",
68
+ "truncate": false
69
+ },
70
+ "rope_theta": 150000,
71
+ "router_aux_loss_coef": 0.9,
72
+ "sliding_window": 128,
73
+ "swiglu_limit": 7.0,
74
+ "tie_word_embeddings": false,
75
+ "torch_dtype": "bfloat16",
76
+ "transformers_version": "4.55.0",
77
+ "use_cache": true,
78
+ "vocab_size": 201088
79
+ }
generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 199998,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 200002,
6
+ 199999
7
+ ],
8
+ "pad_token_id": 199999,
9
+ "transformers_version": "4.55.0"
10
+ }
model-00001-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:436015e96f1a2ac3e253745e46915b511533f7ed6aaba19d71b74be8d4ceb451
3
+ size 4453218784
model-00002-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8280d042e47011bc81e34919c8e4e664de293e3e37d0de3e52e9eda1d23c72b
3
+ size 3241112672
model-00003-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:41bd9d40565519df60872f9a9b6fda78f374d5752e6943d101dfa363d7f71544
3
+ size 3241112672
model-00004-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f1a5a0d7e8713f0dfad8349458a6698128f7eac08d9b55709854b2d1c1a1ef44
3
+ size 3241112672
model-00005-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce0e0150e2f4857c673e99fd6cc96546e2d57bfd35b749dfb6d2e41a234a5c02
3
+ size 3241112672
model-00006-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:570aa42822b340709df5eb42d7f14b28fc2468104cb10c7b5d4a97d05e4b9b9d
3
+ size 3241112672
model-00007-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e08a18134a3a4916ed3b5f29125357ea255495c30537a05027e4a08589b5af5
3
+ size 3241112672
model-00008-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c94c84684a26977ecbd2347f3a49dd31405c6992522f34b898a1650c9e86442b
3
+ size 3241112672
model-00009-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be0f32ca8914de3aa4929906da3a2b38b033f7db1f1866cec679e228f442015d
3
+ size 3241112672
model-00010-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d09b322fa4ddd2aa69412ba9bfd45d3041d1343c9326254538a5a2511fe3e0e
3
+ size 3241112688
model-00011-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fd190498dc14121bd43cfd4e98843b5e53c8d536fbef543ef648890c1b7643c0
3
+ size 3241112688
model-00012-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:684628cb7ef245093be4c737bceba930335f8793fa19acb851120ab3f1e2c156
3
+ size 3241112688
model-00013-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7fc49a9c8573b2c9dab599d5cc920860e6c7783550645f7d0ac914e51f1c052
3
+ size 3241112688
model-00014-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a1a003e52aad17cd0e55c96574d9d660691f9aba9aa45fa296620aff382f32d
3
+ size 3241112688
model-00015-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2132bcfcf2a684ce3888cf8d6c78df5940a322c6d8fdf9d3a2ad9b696341748c
3
+ size 3241112688
model-00016-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1963b1d0f27660c63ee6011e139a49c129c7f83943b8f4b9a3085648be175de7
3
+ size 3241112688
model-00017-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56a41c2457162fd09215ec1bbdff87686fdb5f47ce8dda94ee4a789c885bcc8d
3
+ size 3241112688
model-00018-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9870bdf78349b052e51f5f0c09aabaac1672d46814b7c4c1601da2ee5aa63b0a
3
+ size 3241112688
model-00019-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e73e4718c14982c108951d2b27ee31bc68694ca6351140ddd4b058c5d78648a7
3
+ size 3241112688
model-00020-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:787f8fcfa9bc0c151cf153f924011d08b56e395a3d6d54e977b2cdb2420a330a
3
+ size 3241112688
model-00021-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f9cde832de76c4dc32d11a7f139521ecb9c3bc7d8385ae418e20d0338cc689ea
3
+ size 3241112688
model-00022-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b02afdd8be7c2cd59d4e7974398f99a33e9ddb7da3ca72e1cc66144e29bca32e
3
+ size 3241112688
model-00023-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9e6985b2e570b33994cf37f4d4dcd1e6af0066bff35069087bda7a91800f02c
3
+ size 3241112688
model-00024-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e89ffaeada5a54dc54861d74e3a28f32a0e4e5a672f987d8fe886ce0f1873d8
3
+ size 3241112688
model-00025-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6658352c2a55456f72f15f9786bd78f3b1766506a2ac49942091233b03267ea7
3
+ size 3241112688
model-00026-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0261008eddb9dd5cdabad666bd0034ea290c1120487dd44e7c74405c8e5ae70c
3
+ size 3241112688
model-00027-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e1e3c53fa76b9c569eea40d24d5c082157e4741c1ccc2cb1618486f99c65bb7b
3
+ size 3241112688
model-00028-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47c657c940d6ef4d14f4089e9dfd6c98329e81f81bb4da0f3abbe7df940c63fe
3
+ size 3241112688
model-00029-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69a9cd3a487278d059918e2e973941837b1142e34a154824ab90e8e5af6adb75
3
+ size 3241112688
model-00030-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:36de10a1b4617371c2fa08bd5037d9e6c59ab1c6243fafbf95448c4ebab6e1a2
3
+ size 3241112688
model-00031-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d99c5e4024a2bef15d50b2e0a4d8e8e83d71475dbe8eb18ef81c714034ced6ca
3
+ size 3241112688
model-00032-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99b67cb96797bd3b9d293dff23391b6e1d6f6270a31fddf42e26ed3a6795810f
3
+ size 3241112688
model-00033-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37a211f5d415ff0d7a7c7e63111c4ba62ece6cac73eea1dbf3ead062ac49aafa
3
+ size 3241112688
model-00034-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:143d2b796f5c04b3d0b2f1d5bc1b86ef80fc8ec59e5b6e27409d7b23bf9b4424
3
+ size 3241112688
model-00035-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f84e4ad18c77e79659e7c62ec8981f578c17bf78a0b28db55d1a3fbf90836ad2
3
+ size 3241112688
model-00036-of-00036.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5c4dc4e9eb0cc4652529d518560256b026dfd43b409ab58441fea7471f420bb
3
+ size 4345546544
model.safetensors.index.json ADDED
@@ -0,0 +1,623 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_parameters": 116829156672,
4
+ "total_size": 118996527744
5
+ },
6
+ "weight_map": {
7
+ "lm_head.weight": "model-00036-of-00036.safetensors",
8
+ "model.embed_tokens.weight": "model-00001-of-00036.safetensors",
9
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00036.safetensors",
10
+ "model.layers.0.mlp.experts.down_proj": "model-00001-of-00036.safetensors",
11
+ "model.layers.0.mlp.experts.down_proj_bias": "model-00001-of-00036.safetensors",
12
+ "model.layers.0.mlp.experts.gate_up_proj": "model-00001-of-00036.safetensors",
13
+ "model.layers.0.mlp.experts.gate_up_proj_bias": "model-00001-of-00036.safetensors",
14
+ "model.layers.0.mlp.router.bias": "model-00001-of-00036.safetensors",
15
+ "model.layers.0.mlp.router.weight": "model-00001-of-00036.safetensors",
16
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00036.safetensors",
17
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00036.safetensors",
18
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00036.safetensors",
19
+ "model.layers.0.self_attn.o_proj.bias": "model-00001-of-00036.safetensors",
20
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00036.safetensors",
21
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00036.safetensors",
22
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00036.safetensors",
23
+ "model.layers.0.self_attn.sinks": "model-00001-of-00036.safetensors",
24
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00036.safetensors",
25
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00036.safetensors",
26
+ "model.layers.1.input_layernorm.weight": "model-00002-of-00036.safetensors",
27
+ "model.layers.1.mlp.experts.down_proj": "model-00002-of-00036.safetensors",
28
+ "model.layers.1.mlp.experts.down_proj_bias": "model-00002-of-00036.safetensors",
29
+ "model.layers.1.mlp.experts.gate_up_proj": "model-00002-of-00036.safetensors",
30
+ "model.layers.1.mlp.experts.gate_up_proj_bias": "model-00002-of-00036.safetensors",
31
+ "model.layers.1.mlp.router.bias": "model-00001-of-00036.safetensors",
32
+ "model.layers.1.mlp.router.weight": "model-00001-of-00036.safetensors",
33
+ "model.layers.1.post_attention_layernorm.weight": "model-00002-of-00036.safetensors",
34
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00036.safetensors",
35
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00036.safetensors",
36
+ "model.layers.1.self_attn.o_proj.bias": "model-00001-of-00036.safetensors",
37
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00036.safetensors",
38
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00036.safetensors",
39
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00036.safetensors",
40
+ "model.layers.1.self_attn.sinks": "model-00001-of-00036.safetensors",
41
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00036.safetensors",
42
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00036.safetensors",
43
+ "model.layers.10.input_layernorm.weight": "model-00011-of-00036.safetensors",
44
+ "model.layers.10.mlp.experts.down_proj": "model-00011-of-00036.safetensors",
45
+ "model.layers.10.mlp.experts.down_proj_bias": "model-00011-of-00036.safetensors",
46
+ "model.layers.10.mlp.experts.gate_up_proj": "model-00011-of-00036.safetensors",
47
+ "model.layers.10.mlp.experts.gate_up_proj_bias": "model-00011-of-00036.safetensors",
48
+ "model.layers.10.mlp.router.bias": "model-00010-of-00036.safetensors",
49
+ "model.layers.10.mlp.router.weight": "model-00010-of-00036.safetensors",
50
+ "model.layers.10.post_attention_layernorm.weight": "model-00011-of-00036.safetensors",
51
+ "model.layers.10.self_attn.k_proj.bias": "model-00010-of-00036.safetensors",
52
+ "model.layers.10.self_attn.k_proj.weight": "model-00010-of-00036.safetensors",
53
+ "model.layers.10.self_attn.o_proj.bias": "model-00010-of-00036.safetensors",
54
+ "model.layers.10.self_attn.o_proj.weight": "model-00010-of-00036.safetensors",
55
+ "model.layers.10.self_attn.q_proj.bias": "model-00010-of-00036.safetensors",
56
+ "model.layers.10.self_attn.q_proj.weight": "model-00010-of-00036.safetensors",
57
+ "model.layers.10.self_attn.sinks": "model-00010-of-00036.safetensors",
58
+ "model.layers.10.self_attn.v_proj.bias": "model-00010-of-00036.safetensors",
59
+ "model.layers.10.self_attn.v_proj.weight": "model-00010-of-00036.safetensors",
60
+ "model.layers.11.input_layernorm.weight": "model-00012-of-00036.safetensors",
61
+ "model.layers.11.mlp.experts.down_proj": "model-00012-of-00036.safetensors",
62
+ "model.layers.11.mlp.experts.down_proj_bias": "model-00012-of-00036.safetensors",
63
+ "model.layers.11.mlp.experts.gate_up_proj": "model-00012-of-00036.safetensors",
64
+ "model.layers.11.mlp.experts.gate_up_proj_bias": "model-00012-of-00036.safetensors",
65
+ "model.layers.11.mlp.router.bias": "model-00011-of-00036.safetensors",
66
+ "model.layers.11.mlp.router.weight": "model-00011-of-00036.safetensors",
67
+ "model.layers.11.post_attention_layernorm.weight": "model-00012-of-00036.safetensors",
68
+ "model.layers.11.self_attn.k_proj.bias": "model-00011-of-00036.safetensors",
69
+ "model.layers.11.self_attn.k_proj.weight": "model-00011-of-00036.safetensors",
70
+ "model.layers.11.self_attn.o_proj.bias": "model-00011-of-00036.safetensors",
71
+ "model.layers.11.self_attn.o_proj.weight": "model-00011-of-00036.safetensors",
72
+ "model.layers.11.self_attn.q_proj.bias": "model-00011-of-00036.safetensors",
73
+ "model.layers.11.self_attn.q_proj.weight": "model-00011-of-00036.safetensors",
74
+ "model.layers.11.self_attn.sinks": "model-00011-of-00036.safetensors",
75
+ "model.layers.11.self_attn.v_proj.bias": "model-00011-of-00036.safetensors",
76
+ "model.layers.11.self_attn.v_proj.weight": "model-00011-of-00036.safetensors",
77
+ "model.layers.12.input_layernorm.weight": "model-00013-of-00036.safetensors",
78
+ "model.layers.12.mlp.experts.down_proj": "model-00013-of-00036.safetensors",
79
+ "model.layers.12.mlp.experts.down_proj_bias": "model-00013-of-00036.safetensors",
80
+ "model.layers.12.mlp.experts.gate_up_proj": "model-00013-of-00036.safetensors",
81
+ "model.layers.12.mlp.experts.gate_up_proj_bias": "model-00013-of-00036.safetensors",
82
+ "model.layers.12.mlp.router.bias": "model-00012-of-00036.safetensors",
83
+ "model.layers.12.mlp.router.weight": "model-00012-of-00036.safetensors",
84
+ "model.layers.12.post_attention_layernorm.weight": "model-00013-of-00036.safetensors",
85
+ "model.layers.12.self_attn.k_proj.bias": "model-00012-of-00036.safetensors",
86
+ "model.layers.12.self_attn.k_proj.weight": "model-00012-of-00036.safetensors",
87
+ "model.layers.12.self_attn.o_proj.bias": "model-00012-of-00036.safetensors",
88
+ "model.layers.12.self_attn.o_proj.weight": "model-00012-of-00036.safetensors",
89
+ "model.layers.12.self_attn.q_proj.bias": "model-00012-of-00036.safetensors",
90
+ "model.layers.12.self_attn.q_proj.weight": "model-00012-of-00036.safetensors",
91
+ "model.layers.12.self_attn.sinks": "model-00012-of-00036.safetensors",
92
+ "model.layers.12.self_attn.v_proj.bias": "model-00012-of-00036.safetensors",
93
+ "model.layers.12.self_attn.v_proj.weight": "model-00012-of-00036.safetensors",
94
+ "model.layers.13.input_layernorm.weight": "model-00014-of-00036.safetensors",
95
+ "model.layers.13.mlp.experts.down_proj": "model-00014-of-00036.safetensors",
96
+ "model.layers.13.mlp.experts.down_proj_bias": "model-00014-of-00036.safetensors",
97
+ "model.layers.13.mlp.experts.gate_up_proj": "model-00014-of-00036.safetensors",
98
+ "model.layers.13.mlp.experts.gate_up_proj_bias": "model-00014-of-00036.safetensors",
99
+ "model.layers.13.mlp.router.bias": "model-00013-of-00036.safetensors",
100
+ "model.layers.13.mlp.router.weight": "model-00013-of-00036.safetensors",
101
+ "model.layers.13.post_attention_layernorm.weight": "model-00014-of-00036.safetensors",
102
+ "model.layers.13.self_attn.k_proj.bias": "model-00013-of-00036.safetensors",
103
+ "model.layers.13.self_attn.k_proj.weight": "model-00013-of-00036.safetensors",
104
+ "model.layers.13.self_attn.o_proj.bias": "model-00013-of-00036.safetensors",
105
+ "model.layers.13.self_attn.o_proj.weight": "model-00013-of-00036.safetensors",
106
+ "model.layers.13.self_attn.q_proj.bias": "model-00013-of-00036.safetensors",
107
+ "model.layers.13.self_attn.q_proj.weight": "model-00013-of-00036.safetensors",
108
+ "model.layers.13.self_attn.sinks": "model-00013-of-00036.safetensors",
109
+ "model.layers.13.self_attn.v_proj.bias": "model-00013-of-00036.safetensors",
110
+ "model.layers.13.self_attn.v_proj.weight": "model-00013-of-00036.safetensors",
111
+ "model.layers.14.input_layernorm.weight": "model-00015-of-00036.safetensors",
112
+ "model.layers.14.mlp.experts.down_proj": "model-00015-of-00036.safetensors",
113
+ "model.layers.14.mlp.experts.down_proj_bias": "model-00015-of-00036.safetensors",
114
+ "model.layers.14.mlp.experts.gate_up_proj": "model-00015-of-00036.safetensors",
115
+ "model.layers.14.mlp.experts.gate_up_proj_bias": "model-00015-of-00036.safetensors",
116
+ "model.layers.14.mlp.router.bias": "model-00014-of-00036.safetensors",
117
+ "model.layers.14.mlp.router.weight": "model-00014-of-00036.safetensors",
118
+ "model.layers.14.post_attention_layernorm.weight": "model-00015-of-00036.safetensors",
119
+ "model.layers.14.self_attn.k_proj.bias": "model-00014-of-00036.safetensors",
120
+ "model.layers.14.self_attn.k_proj.weight": "model-00014-of-00036.safetensors",
121
+ "model.layers.14.self_attn.o_proj.bias": "model-00014-of-00036.safetensors",
122
+ "model.layers.14.self_attn.o_proj.weight": "model-00014-of-00036.safetensors",
123
+ "model.layers.14.self_attn.q_proj.bias": "model-00014-of-00036.safetensors",
124
+ "model.layers.14.self_attn.q_proj.weight": "model-00014-of-00036.safetensors",
125
+ "model.layers.14.self_attn.sinks": "model-00014-of-00036.safetensors",
126
+ "model.layers.14.self_attn.v_proj.bias": "model-00014-of-00036.safetensors",
127
+ "model.layers.14.self_attn.v_proj.weight": "model-00014-of-00036.safetensors",
128
+ "model.layers.15.input_layernorm.weight": "model-00016-of-00036.safetensors",
129
+ "model.layers.15.mlp.experts.down_proj": "model-00016-of-00036.safetensors",
130
+ "model.layers.15.mlp.experts.down_proj_bias": "model-00016-of-00036.safetensors",
131
+ "model.layers.15.mlp.experts.gate_up_proj": "model-00016-of-00036.safetensors",
132
+ "model.layers.15.mlp.experts.gate_up_proj_bias": "model-00016-of-00036.safetensors",
133
+ "model.layers.15.mlp.router.bias": "model-00015-of-00036.safetensors",
134
+ "model.layers.15.mlp.router.weight": "model-00015-of-00036.safetensors",
135
+ "model.layers.15.post_attention_layernorm.weight": "model-00016-of-00036.safetensors",
136
+ "model.layers.15.self_attn.k_proj.bias": "model-00015-of-00036.safetensors",
137
+ "model.layers.15.self_attn.k_proj.weight": "model-00015-of-00036.safetensors",
138
+ "model.layers.15.self_attn.o_proj.bias": "model-00015-of-00036.safetensors",
139
+ "model.layers.15.self_attn.o_proj.weight": "model-00015-of-00036.safetensors",
140
+ "model.layers.15.self_attn.q_proj.bias": "model-00015-of-00036.safetensors",
141
+ "model.layers.15.self_attn.q_proj.weight": "model-00015-of-00036.safetensors",
142
+ "model.layers.15.self_attn.sinks": "model-00015-of-00036.safetensors",
143
+ "model.layers.15.self_attn.v_proj.bias": "model-00015-of-00036.safetensors",
144
+ "model.layers.15.self_attn.v_proj.weight": "model-00015-of-00036.safetensors",
145
+ "model.layers.16.input_layernorm.weight": "model-00017-of-00036.safetensors",
146
+ "model.layers.16.mlp.experts.down_proj": "model-00017-of-00036.safetensors",
147
+ "model.layers.16.mlp.experts.down_proj_bias": "model-00017-of-00036.safetensors",
148
+ "model.layers.16.mlp.experts.gate_up_proj": "model-00017-of-00036.safetensors",
149
+ "model.layers.16.mlp.experts.gate_up_proj_bias": "model-00017-of-00036.safetensors",
150
+ "model.layers.16.mlp.router.bias": "model-00016-of-00036.safetensors",
151
+ "model.layers.16.mlp.router.weight": "model-00016-of-00036.safetensors",
152
+ "model.layers.16.post_attention_layernorm.weight": "model-00017-of-00036.safetensors",
153
+ "model.layers.16.self_attn.k_proj.bias": "model-00016-of-00036.safetensors",
154
+ "model.layers.16.self_attn.k_proj.weight": "model-00016-of-00036.safetensors",
155
+ "model.layers.16.self_attn.o_proj.bias": "model-00016-of-00036.safetensors",
156
+ "model.layers.16.self_attn.o_proj.weight": "model-00016-of-00036.safetensors",
157
+ "model.layers.16.self_attn.q_proj.bias": "model-00016-of-00036.safetensors",
158
+ "model.layers.16.self_attn.q_proj.weight": "model-00016-of-00036.safetensors",
159
+ "model.layers.16.self_attn.sinks": "model-00016-of-00036.safetensors",
160
+ "model.layers.16.self_attn.v_proj.bias": "model-00016-of-00036.safetensors",
161
+ "model.layers.16.self_attn.v_proj.weight": "model-00016-of-00036.safetensors",
162
+ "model.layers.17.input_layernorm.weight": "model-00018-of-00036.safetensors",
163
+ "model.layers.17.mlp.experts.down_proj": "model-00018-of-00036.safetensors",
164
+ "model.layers.17.mlp.experts.down_proj_bias": "model-00018-of-00036.safetensors",
165
+ "model.layers.17.mlp.experts.gate_up_proj": "model-00018-of-00036.safetensors",
166
+ "model.layers.17.mlp.experts.gate_up_proj_bias": "model-00018-of-00036.safetensors",
167
+ "model.layers.17.mlp.router.bias": "model-00017-of-00036.safetensors",
168
+ "model.layers.17.mlp.router.weight": "model-00017-of-00036.safetensors",
169
+ "model.layers.17.post_attention_layernorm.weight": "model-00018-of-00036.safetensors",
170
+ "model.layers.17.self_attn.k_proj.bias": "model-00017-of-00036.safetensors",
171
+ "model.layers.17.self_attn.k_proj.weight": "model-00017-of-00036.safetensors",
172
+ "model.layers.17.self_attn.o_proj.bias": "model-00017-of-00036.safetensors",
173
+ "model.layers.17.self_attn.o_proj.weight": "model-00017-of-00036.safetensors",
174
+ "model.layers.17.self_attn.q_proj.bias": "model-00017-of-00036.safetensors",
175
+ "model.layers.17.self_attn.q_proj.weight": "model-00017-of-00036.safetensors",
176
+ "model.layers.17.self_attn.sinks": "model-00017-of-00036.safetensors",
177
+ "model.layers.17.self_attn.v_proj.bias": "model-00017-of-00036.safetensors",
178
+ "model.layers.17.self_attn.v_proj.weight": "model-00017-of-00036.safetensors",
179
+ "model.layers.18.input_layernorm.weight": "model-00019-of-00036.safetensors",
180
+ "model.layers.18.mlp.experts.down_proj": "model-00019-of-00036.safetensors",
181
+ "model.layers.18.mlp.experts.down_proj_bias": "model-00019-of-00036.safetensors",
182
+ "model.layers.18.mlp.experts.gate_up_proj": "model-00019-of-00036.safetensors",
183
+ "model.layers.18.mlp.experts.gate_up_proj_bias": "model-00019-of-00036.safetensors",
184
+ "model.layers.18.mlp.router.bias": "model-00018-of-00036.safetensors",
185
+ "model.layers.18.mlp.router.weight": "model-00018-of-00036.safetensors",
186
+ "model.layers.18.post_attention_layernorm.weight": "model-00019-of-00036.safetensors",
187
+ "model.layers.18.self_attn.k_proj.bias": "model-00018-of-00036.safetensors",
188
+ "model.layers.18.self_attn.k_proj.weight": "model-00018-of-00036.safetensors",
189
+ "model.layers.18.self_attn.o_proj.bias": "model-00018-of-00036.safetensors",
190
+ "model.layers.18.self_attn.o_proj.weight": "model-00018-of-00036.safetensors",
191
+ "model.layers.18.self_attn.q_proj.bias": "model-00018-of-00036.safetensors",
192
+ "model.layers.18.self_attn.q_proj.weight": "model-00018-of-00036.safetensors",
193
+ "model.layers.18.self_attn.sinks": "model-00018-of-00036.safetensors",
194
+ "model.layers.18.self_attn.v_proj.bias": "model-00018-of-00036.safetensors",
195
+ "model.layers.18.self_attn.v_proj.weight": "model-00018-of-00036.safetensors",
196
+ "model.layers.19.input_layernorm.weight": "model-00020-of-00036.safetensors",
197
+ "model.layers.19.mlp.experts.down_proj": "model-00020-of-00036.safetensors",
198
+ "model.layers.19.mlp.experts.down_proj_bias": "model-00020-of-00036.safetensors",
199
+ "model.layers.19.mlp.experts.gate_up_proj": "model-00020-of-00036.safetensors",
200
+ "model.layers.19.mlp.experts.gate_up_proj_bias": "model-00020-of-00036.safetensors",
201
+ "model.layers.19.mlp.router.bias": "model-00019-of-00036.safetensors",
202
+ "model.layers.19.mlp.router.weight": "model-00019-of-00036.safetensors",
203
+ "model.layers.19.post_attention_layernorm.weight": "model-00020-of-00036.safetensors",
204
+ "model.layers.19.self_attn.k_proj.bias": "model-00019-of-00036.safetensors",
205
+ "model.layers.19.self_attn.k_proj.weight": "model-00019-of-00036.safetensors",
206
+ "model.layers.19.self_attn.o_proj.bias": "model-00019-of-00036.safetensors",
207
+ "model.layers.19.self_attn.o_proj.weight": "model-00019-of-00036.safetensors",
208
+ "model.layers.19.self_attn.q_proj.bias": "model-00019-of-00036.safetensors",
209
+ "model.layers.19.self_attn.q_proj.weight": "model-00019-of-00036.safetensors",
210
+ "model.layers.19.self_attn.sinks": "model-00019-of-00036.safetensors",
211
+ "model.layers.19.self_attn.v_proj.bias": "model-00019-of-00036.safetensors",
212
+ "model.layers.19.self_attn.v_proj.weight": "model-00019-of-00036.safetensors",
213
+ "model.layers.2.input_layernorm.weight": "model-00003-of-00036.safetensors",
214
+ "model.layers.2.mlp.experts.down_proj": "model-00003-of-00036.safetensors",
215
+ "model.layers.2.mlp.experts.down_proj_bias": "model-00003-of-00036.safetensors",
216
+ "model.layers.2.mlp.experts.gate_up_proj": "model-00003-of-00036.safetensors",
217
+ "model.layers.2.mlp.experts.gate_up_proj_bias": "model-00003-of-00036.safetensors",
218
+ "model.layers.2.mlp.router.bias": "model-00002-of-00036.safetensors",
219
+ "model.layers.2.mlp.router.weight": "model-00002-of-00036.safetensors",
220
+ "model.layers.2.post_attention_layernorm.weight": "model-00003-of-00036.safetensors",
221
+ "model.layers.2.self_attn.k_proj.bias": "model-00002-of-00036.safetensors",
222
+ "model.layers.2.self_attn.k_proj.weight": "model-00002-of-00036.safetensors",
223
+ "model.layers.2.self_attn.o_proj.bias": "model-00002-of-00036.safetensors",
224
+ "model.layers.2.self_attn.o_proj.weight": "model-00002-of-00036.safetensors",
225
+ "model.layers.2.self_attn.q_proj.bias": "model-00002-of-00036.safetensors",
226
+ "model.layers.2.self_attn.q_proj.weight": "model-00002-of-00036.safetensors",
227
+ "model.layers.2.self_attn.sinks": "model-00002-of-00036.safetensors",
228
+ "model.layers.2.self_attn.v_proj.bias": "model-00002-of-00036.safetensors",
229
+ "model.layers.2.self_attn.v_proj.weight": "model-00002-of-00036.safetensors",
230
+ "model.layers.20.input_layernorm.weight": "model-00021-of-00036.safetensors",
231
+ "model.layers.20.mlp.experts.down_proj": "model-00021-of-00036.safetensors",
232
+ "model.layers.20.mlp.experts.down_proj_bias": "model-00021-of-00036.safetensors",
233
+ "model.layers.20.mlp.experts.gate_up_proj": "model-00021-of-00036.safetensors",
234
+ "model.layers.20.mlp.experts.gate_up_proj_bias": "model-00021-of-00036.safetensors",
235
+ "model.layers.20.mlp.router.bias": "model-00020-of-00036.safetensors",
236
+ "model.layers.20.mlp.router.weight": "model-00020-of-00036.safetensors",
237
+ "model.layers.20.post_attention_layernorm.weight": "model-00021-of-00036.safetensors",
238
+ "model.layers.20.self_attn.k_proj.bias": "model-00020-of-00036.safetensors",
239
+ "model.layers.20.self_attn.k_proj.weight": "model-00020-of-00036.safetensors",
240
+ "model.layers.20.self_attn.o_proj.bias": "model-00020-of-00036.safetensors",
241
+ "model.layers.20.self_attn.o_proj.weight": "model-00020-of-00036.safetensors",
242
+ "model.layers.20.self_attn.q_proj.bias": "model-00020-of-00036.safetensors",
243
+ "model.layers.20.self_attn.q_proj.weight": "model-00020-of-00036.safetensors",
244
+ "model.layers.20.self_attn.sinks": "model-00020-of-00036.safetensors",
245
+ "model.layers.20.self_attn.v_proj.bias": "model-00020-of-00036.safetensors",
246
+ "model.layers.20.self_attn.v_proj.weight": "model-00020-of-00036.safetensors",
247
+ "model.layers.21.input_layernorm.weight": "model-00022-of-00036.safetensors",
248
+ "model.layers.21.mlp.experts.down_proj": "model-00022-of-00036.safetensors",
249
+ "model.layers.21.mlp.experts.down_proj_bias": "model-00022-of-00036.safetensors",
250
+ "model.layers.21.mlp.experts.gate_up_proj": "model-00022-of-00036.safetensors",
251
+ "model.layers.21.mlp.experts.gate_up_proj_bias": "model-00022-of-00036.safetensors",
252
+ "model.layers.21.mlp.router.bias": "model-00021-of-00036.safetensors",
253
+ "model.layers.21.mlp.router.weight": "model-00021-of-00036.safetensors",
254
+ "model.layers.21.post_attention_layernorm.weight": "model-00022-of-00036.safetensors",
255
+ "model.layers.21.self_attn.k_proj.bias": "model-00021-of-00036.safetensors",
256
+ "model.layers.21.self_attn.k_proj.weight": "model-00021-of-00036.safetensors",
257
+ "model.layers.21.self_attn.o_proj.bias": "model-00021-of-00036.safetensors",
258
+ "model.layers.21.self_attn.o_proj.weight": "model-00021-of-00036.safetensors",
259
+ "model.layers.21.self_attn.q_proj.bias": "model-00021-of-00036.safetensors",
260
+ "model.layers.21.self_attn.q_proj.weight": "model-00021-of-00036.safetensors",
261
+ "model.layers.21.self_attn.sinks": "model-00021-of-00036.safetensors",
262
+ "model.layers.21.self_attn.v_proj.bias": "model-00021-of-00036.safetensors",
263
+ "model.layers.21.self_attn.v_proj.weight": "model-00021-of-00036.safetensors",
264
+ "model.layers.22.input_layernorm.weight": "model-00023-of-00036.safetensors",
265
+ "model.layers.22.mlp.experts.down_proj": "model-00023-of-00036.safetensors",
266
+ "model.layers.22.mlp.experts.down_proj_bias": "model-00023-of-00036.safetensors",
267
+ "model.layers.22.mlp.experts.gate_up_proj": "model-00023-of-00036.safetensors",
268
+ "model.layers.22.mlp.experts.gate_up_proj_bias": "model-00023-of-00036.safetensors",
269
+ "model.layers.22.mlp.router.bias": "model-00022-of-00036.safetensors",
270
+ "model.layers.22.mlp.router.weight": "model-00022-of-00036.safetensors",
271
+ "model.layers.22.post_attention_layernorm.weight": "model-00023-of-00036.safetensors",
272
+ "model.layers.22.self_attn.k_proj.bias": "model-00022-of-00036.safetensors",
273
+ "model.layers.22.self_attn.k_proj.weight": "model-00022-of-00036.safetensors",
274
+ "model.layers.22.self_attn.o_proj.bias": "model-00022-of-00036.safetensors",
275
+ "model.layers.22.self_attn.o_proj.weight": "model-00022-of-00036.safetensors",
276
+ "model.layers.22.self_attn.q_proj.bias": "model-00022-of-00036.safetensors",
277
+ "model.layers.22.self_attn.q_proj.weight": "model-00022-of-00036.safetensors",
278
+ "model.layers.22.self_attn.sinks": "model-00022-of-00036.safetensors",
279
+ "model.layers.22.self_attn.v_proj.bias": "model-00022-of-00036.safetensors",
280
+ "model.layers.22.self_attn.v_proj.weight": "model-00022-of-00036.safetensors",
281
+ "model.layers.23.input_layernorm.weight": "model-00024-of-00036.safetensors",
282
+ "model.layers.23.mlp.experts.down_proj": "model-00024-of-00036.safetensors",
283
+ "model.layers.23.mlp.experts.down_proj_bias": "model-00024-of-00036.safetensors",
284
+ "model.layers.23.mlp.experts.gate_up_proj": "model-00024-of-00036.safetensors",
285
+ "model.layers.23.mlp.experts.gate_up_proj_bias": "model-00024-of-00036.safetensors",
286
+ "model.layers.23.mlp.router.bias": "model-00023-of-00036.safetensors",
287
+ "model.layers.23.mlp.router.weight": "model-00023-of-00036.safetensors",
288
+ "model.layers.23.post_attention_layernorm.weight": "model-00024-of-00036.safetensors",
289
+ "model.layers.23.self_attn.k_proj.bias": "model-00023-of-00036.safetensors",
290
+ "model.layers.23.self_attn.k_proj.weight": "model-00023-of-00036.safetensors",
291
+ "model.layers.23.self_attn.o_proj.bias": "model-00023-of-00036.safetensors",
292
+ "model.layers.23.self_attn.o_proj.weight": "model-00023-of-00036.safetensors",
293
+ "model.layers.23.self_attn.q_proj.bias": "model-00023-of-00036.safetensors",
294
+ "model.layers.23.self_attn.q_proj.weight": "model-00023-of-00036.safetensors",
295
+ "model.layers.23.self_attn.sinks": "model-00023-of-00036.safetensors",
296
+ "model.layers.23.self_attn.v_proj.bias": "model-00023-of-00036.safetensors",
297
+ "model.layers.23.self_attn.v_proj.weight": "model-00023-of-00036.safetensors",
298
+ "model.layers.24.input_layernorm.weight": "model-00025-of-00036.safetensors",
299
+ "model.layers.24.mlp.experts.down_proj": "model-00025-of-00036.safetensors",
300
+ "model.layers.24.mlp.experts.down_proj_bias": "model-00025-of-00036.safetensors",
301
+ "model.layers.24.mlp.experts.gate_up_proj": "model-00025-of-00036.safetensors",
302
+ "model.layers.24.mlp.experts.gate_up_proj_bias": "model-00025-of-00036.safetensors",
303
+ "model.layers.24.mlp.router.bias": "model-00024-of-00036.safetensors",
304
+ "model.layers.24.mlp.router.weight": "model-00024-of-00036.safetensors",
305
+ "model.layers.24.post_attention_layernorm.weight": "model-00025-of-00036.safetensors",
306
+ "model.layers.24.self_attn.k_proj.bias": "model-00024-of-00036.safetensors",
307
+ "model.layers.24.self_attn.k_proj.weight": "model-00024-of-00036.safetensors",
308
+ "model.layers.24.self_attn.o_proj.bias": "model-00024-of-00036.safetensors",
309
+ "model.layers.24.self_attn.o_proj.weight": "model-00024-of-00036.safetensors",
310
+ "model.layers.24.self_attn.q_proj.bias": "model-00024-of-00036.safetensors",
311
+ "model.layers.24.self_attn.q_proj.weight": "model-00024-of-00036.safetensors",
312
+ "model.layers.24.self_attn.sinks": "model-00024-of-00036.safetensors",
313
+ "model.layers.24.self_attn.v_proj.bias": "model-00024-of-00036.safetensors",
314
+ "model.layers.24.self_attn.v_proj.weight": "model-00024-of-00036.safetensors",
315
+ "model.layers.25.input_layernorm.weight": "model-00026-of-00036.safetensors",
316
+ "model.layers.25.mlp.experts.down_proj": "model-00026-of-00036.safetensors",
317
+ "model.layers.25.mlp.experts.down_proj_bias": "model-00026-of-00036.safetensors",
318
+ "model.layers.25.mlp.experts.gate_up_proj": "model-00026-of-00036.safetensors",
319
+ "model.layers.25.mlp.experts.gate_up_proj_bias": "model-00026-of-00036.safetensors",
320
+ "model.layers.25.mlp.router.bias": "model-00025-of-00036.safetensors",
321
+ "model.layers.25.mlp.router.weight": "model-00025-of-00036.safetensors",
322
+ "model.layers.25.post_attention_layernorm.weight": "model-00026-of-00036.safetensors",
323
+ "model.layers.25.self_attn.k_proj.bias": "model-00025-of-00036.safetensors",
324
+ "model.layers.25.self_attn.k_proj.weight": "model-00025-of-00036.safetensors",
325
+ "model.layers.25.self_attn.o_proj.bias": "model-00025-of-00036.safetensors",
326
+ "model.layers.25.self_attn.o_proj.weight": "model-00025-of-00036.safetensors",
327
+ "model.layers.25.self_attn.q_proj.bias": "model-00025-of-00036.safetensors",
328
+ "model.layers.25.self_attn.q_proj.weight": "model-00025-of-00036.safetensors",
329
+ "model.layers.25.self_attn.sinks": "model-00025-of-00036.safetensors",
330
+ "model.layers.25.self_attn.v_proj.bias": "model-00025-of-00036.safetensors",
331
+ "model.layers.25.self_attn.v_proj.weight": "model-00025-of-00036.safetensors",
332
+ "model.layers.26.input_layernorm.weight": "model-00027-of-00036.safetensors",
333
+ "model.layers.26.mlp.experts.down_proj": "model-00027-of-00036.safetensors",
334
+ "model.layers.26.mlp.experts.down_proj_bias": "model-00027-of-00036.safetensors",
335
+ "model.layers.26.mlp.experts.gate_up_proj": "model-00027-of-00036.safetensors",
336
+ "model.layers.26.mlp.experts.gate_up_proj_bias": "model-00027-of-00036.safetensors",
337
+ "model.layers.26.mlp.router.bias": "model-00026-of-00036.safetensors",
338
+ "model.layers.26.mlp.router.weight": "model-00026-of-00036.safetensors",
339
+ "model.layers.26.post_attention_layernorm.weight": "model-00027-of-00036.safetensors",
340
+ "model.layers.26.self_attn.k_proj.bias": "model-00026-of-00036.safetensors",
341
+ "model.layers.26.self_attn.k_proj.weight": "model-00026-of-00036.safetensors",
342
+ "model.layers.26.self_attn.o_proj.bias": "model-00026-of-00036.safetensors",
343
+ "model.layers.26.self_attn.o_proj.weight": "model-00026-of-00036.safetensors",
344
+ "model.layers.26.self_attn.q_proj.bias": "model-00026-of-00036.safetensors",
345
+ "model.layers.26.self_attn.q_proj.weight": "model-00026-of-00036.safetensors",
346
+ "model.layers.26.self_attn.sinks": "model-00026-of-00036.safetensors",
347
+ "model.layers.26.self_attn.v_proj.bias": "model-00026-of-00036.safetensors",
348
+ "model.layers.26.self_attn.v_proj.weight": "model-00026-of-00036.safetensors",
349
+ "model.layers.27.input_layernorm.weight": "model-00028-of-00036.safetensors",
350
+ "model.layers.27.mlp.experts.down_proj": "model-00028-of-00036.safetensors",
351
+ "model.layers.27.mlp.experts.down_proj_bias": "model-00028-of-00036.safetensors",
352
+ "model.layers.27.mlp.experts.gate_up_proj": "model-00028-of-00036.safetensors",
353
+ "model.layers.27.mlp.experts.gate_up_proj_bias": "model-00028-of-00036.safetensors",
354
+ "model.layers.27.mlp.router.bias": "model-00027-of-00036.safetensors",
355
+ "model.layers.27.mlp.router.weight": "model-00027-of-00036.safetensors",
356
+ "model.layers.27.post_attention_layernorm.weight": "model-00028-of-00036.safetensors",
357
+ "model.layers.27.self_attn.k_proj.bias": "model-00027-of-00036.safetensors",
358
+ "model.layers.27.self_attn.k_proj.weight": "model-00027-of-00036.safetensors",
359
+ "model.layers.27.self_attn.o_proj.bias": "model-00027-of-00036.safetensors",
360
+ "model.layers.27.self_attn.o_proj.weight": "model-00027-of-00036.safetensors",
361
+ "model.layers.27.self_attn.q_proj.bias": "model-00027-of-00036.safetensors",
362
+ "model.layers.27.self_attn.q_proj.weight": "model-00027-of-00036.safetensors",
363
+ "model.layers.27.self_attn.sinks": "model-00027-of-00036.safetensors",
364
+ "model.layers.27.self_attn.v_proj.bias": "model-00027-of-00036.safetensors",
365
+ "model.layers.27.self_attn.v_proj.weight": "model-00027-of-00036.safetensors",
366
+ "model.layers.28.input_layernorm.weight": "model-00029-of-00036.safetensors",
367
+ "model.layers.28.mlp.experts.down_proj": "model-00029-of-00036.safetensors",
368
+ "model.layers.28.mlp.experts.down_proj_bias": "model-00029-of-00036.safetensors",
369
+ "model.layers.28.mlp.experts.gate_up_proj": "model-00029-of-00036.safetensors",
370
+ "model.layers.28.mlp.experts.gate_up_proj_bias": "model-00029-of-00036.safetensors",
371
+ "model.layers.28.mlp.router.bias": "model-00028-of-00036.safetensors",
372
+ "model.layers.28.mlp.router.weight": "model-00028-of-00036.safetensors",
373
+ "model.layers.28.post_attention_layernorm.weight": "model-00029-of-00036.safetensors",
374
+ "model.layers.28.self_attn.k_proj.bias": "model-00028-of-00036.safetensors",
375
+ "model.layers.28.self_attn.k_proj.weight": "model-00028-of-00036.safetensors",
376
+ "model.layers.28.self_attn.o_proj.bias": "model-00028-of-00036.safetensors",
377
+ "model.layers.28.self_attn.o_proj.weight": "model-00028-of-00036.safetensors",
378
+ "model.layers.28.self_attn.q_proj.bias": "model-00028-of-00036.safetensors",
379
+ "model.layers.28.self_attn.q_proj.weight": "model-00028-of-00036.safetensors",
380
+ "model.layers.28.self_attn.sinks": "model-00028-of-00036.safetensors",
381
+ "model.layers.28.self_attn.v_proj.bias": "model-00028-of-00036.safetensors",
382
+ "model.layers.28.self_attn.v_proj.weight": "model-00028-of-00036.safetensors",
383
+ "model.layers.29.input_layernorm.weight": "model-00030-of-00036.safetensors",
384
+ "model.layers.29.mlp.experts.down_proj": "model-00030-of-00036.safetensors",
385
+ "model.layers.29.mlp.experts.down_proj_bias": "model-00030-of-00036.safetensors",
386
+ "model.layers.29.mlp.experts.gate_up_proj": "model-00030-of-00036.safetensors",
387
+ "model.layers.29.mlp.experts.gate_up_proj_bias": "model-00030-of-00036.safetensors",
388
+ "model.layers.29.mlp.router.bias": "model-00029-of-00036.safetensors",
389
+ "model.layers.29.mlp.router.weight": "model-00029-of-00036.safetensors",
390
+ "model.layers.29.post_attention_layernorm.weight": "model-00030-of-00036.safetensors",
391
+ "model.layers.29.self_attn.k_proj.bias": "model-00029-of-00036.safetensors",
392
+ "model.layers.29.self_attn.k_proj.weight": "model-00029-of-00036.safetensors",
393
+ "model.layers.29.self_attn.o_proj.bias": "model-00029-of-00036.safetensors",
394
+ "model.layers.29.self_attn.o_proj.weight": "model-00029-of-00036.safetensors",
395
+ "model.layers.29.self_attn.q_proj.bias": "model-00029-of-00036.safetensors",
396
+ "model.layers.29.self_attn.q_proj.weight": "model-00029-of-00036.safetensors",
397
+ "model.layers.29.self_attn.sinks": "model-00029-of-00036.safetensors",
398
+ "model.layers.29.self_attn.v_proj.bias": "model-00029-of-00036.safetensors",
399
+ "model.layers.29.self_attn.v_proj.weight": "model-00029-of-00036.safetensors",
400
+ "model.layers.3.input_layernorm.weight": "model-00004-of-00036.safetensors",
401
+ "model.layers.3.mlp.experts.down_proj": "model-00004-of-00036.safetensors",
402
+ "model.layers.3.mlp.experts.down_proj_bias": "model-00004-of-00036.safetensors",
403
+ "model.layers.3.mlp.experts.gate_up_proj": "model-00004-of-00036.safetensors",
404
+ "model.layers.3.mlp.experts.gate_up_proj_bias": "model-00004-of-00036.safetensors",
405
+ "model.layers.3.mlp.router.bias": "model-00003-of-00036.safetensors",
406
+ "model.layers.3.mlp.router.weight": "model-00003-of-00036.safetensors",
407
+ "model.layers.3.post_attention_layernorm.weight": "model-00004-of-00036.safetensors",
408
+ "model.layers.3.self_attn.k_proj.bias": "model-00003-of-00036.safetensors",
409
+ "model.layers.3.self_attn.k_proj.weight": "model-00003-of-00036.safetensors",
410
+ "model.layers.3.self_attn.o_proj.bias": "model-00003-of-00036.safetensors",
411
+ "model.layers.3.self_attn.o_proj.weight": "model-00003-of-00036.safetensors",
412
+ "model.layers.3.self_attn.q_proj.bias": "model-00003-of-00036.safetensors",
413
+ "model.layers.3.self_attn.q_proj.weight": "model-00003-of-00036.safetensors",
414
+ "model.layers.3.self_attn.sinks": "model-00003-of-00036.safetensors",
415
+ "model.layers.3.self_attn.v_proj.bias": "model-00003-of-00036.safetensors",
416
+ "model.layers.3.self_attn.v_proj.weight": "model-00003-of-00036.safetensors",
417
+ "model.layers.30.input_layernorm.weight": "model-00031-of-00036.safetensors",
418
+ "model.layers.30.mlp.experts.down_proj": "model-00031-of-00036.safetensors",
419
+ "model.layers.30.mlp.experts.down_proj_bias": "model-00031-of-00036.safetensors",
420
+ "model.layers.30.mlp.experts.gate_up_proj": "model-00031-of-00036.safetensors",
421
+ "model.layers.30.mlp.experts.gate_up_proj_bias": "model-00031-of-00036.safetensors",
422
+ "model.layers.30.mlp.router.bias": "model-00030-of-00036.safetensors",
423
+ "model.layers.30.mlp.router.weight": "model-00030-of-00036.safetensors",
424
+ "model.layers.30.post_attention_layernorm.weight": "model-00031-of-00036.safetensors",
425
+ "model.layers.30.self_attn.k_proj.bias": "model-00030-of-00036.safetensors",
426
+ "model.layers.30.self_attn.k_proj.weight": "model-00030-of-00036.safetensors",
427
+ "model.layers.30.self_attn.o_proj.bias": "model-00030-of-00036.safetensors",
428
+ "model.layers.30.self_attn.o_proj.weight": "model-00030-of-00036.safetensors",
429
+ "model.layers.30.self_attn.q_proj.bias": "model-00030-of-00036.safetensors",
430
+ "model.layers.30.self_attn.q_proj.weight": "model-00030-of-00036.safetensors",
431
+ "model.layers.30.self_attn.sinks": "model-00030-of-00036.safetensors",
432
+ "model.layers.30.self_attn.v_proj.bias": "model-00030-of-00036.safetensors",
433
+ "model.layers.30.self_attn.v_proj.weight": "model-00030-of-00036.safetensors",
434
+ "model.layers.31.input_layernorm.weight": "model-00032-of-00036.safetensors",
435
+ "model.layers.31.mlp.experts.down_proj": "model-00032-of-00036.safetensors",
436
+ "model.layers.31.mlp.experts.down_proj_bias": "model-00032-of-00036.safetensors",
437
+ "model.layers.31.mlp.experts.gate_up_proj": "model-00032-of-00036.safetensors",
438
+ "model.layers.31.mlp.experts.gate_up_proj_bias": "model-00032-of-00036.safetensors",
439
+ "model.layers.31.mlp.router.bias": "model-00031-of-00036.safetensors",
440
+ "model.layers.31.mlp.router.weight": "model-00031-of-00036.safetensors",
441
+ "model.layers.31.post_attention_layernorm.weight": "model-00032-of-00036.safetensors",
442
+ "model.layers.31.self_attn.k_proj.bias": "model-00031-of-00036.safetensors",
443
+ "model.layers.31.self_attn.k_proj.weight": "model-00031-of-00036.safetensors",
444
+ "model.layers.31.self_attn.o_proj.bias": "model-00031-of-00036.safetensors",
445
+ "model.layers.31.self_attn.o_proj.weight": "model-00031-of-00036.safetensors",
446
+ "model.layers.31.self_attn.q_proj.bias": "model-00031-of-00036.safetensors",
447
+ "model.layers.31.self_attn.q_proj.weight": "model-00031-of-00036.safetensors",
448
+ "model.layers.31.self_attn.sinks": "model-00031-of-00036.safetensors",
449
+ "model.layers.31.self_attn.v_proj.bias": "model-00031-of-00036.safetensors",
450
+ "model.layers.31.self_attn.v_proj.weight": "model-00031-of-00036.safetensors",
451
+ "model.layers.32.input_layernorm.weight": "model-00033-of-00036.safetensors",
452
+ "model.layers.32.mlp.experts.down_proj": "model-00033-of-00036.safetensors",
453
+ "model.layers.32.mlp.experts.down_proj_bias": "model-00033-of-00036.safetensors",
454
+ "model.layers.32.mlp.experts.gate_up_proj": "model-00033-of-00036.safetensors",
455
+ "model.layers.32.mlp.experts.gate_up_proj_bias": "model-00033-of-00036.safetensors",
456
+ "model.layers.32.mlp.router.bias": "model-00032-of-00036.safetensors",
457
+ "model.layers.32.mlp.router.weight": "model-00032-of-00036.safetensors",
458
+ "model.layers.32.post_attention_layernorm.weight": "model-00033-of-00036.safetensors",
459
+ "model.layers.32.self_attn.k_proj.bias": "model-00032-of-00036.safetensors",
460
+ "model.layers.32.self_attn.k_proj.weight": "model-00032-of-00036.safetensors",
461
+ "model.layers.32.self_attn.o_proj.bias": "model-00032-of-00036.safetensors",
462
+ "model.layers.32.self_attn.o_proj.weight": "model-00032-of-00036.safetensors",
463
+ "model.layers.32.self_attn.q_proj.bias": "model-00032-of-00036.safetensors",
464
+ "model.layers.32.self_attn.q_proj.weight": "model-00032-of-00036.safetensors",
465
+ "model.layers.32.self_attn.sinks": "model-00032-of-00036.safetensors",
466
+ "model.layers.32.self_attn.v_proj.bias": "model-00032-of-00036.safetensors",
467
+ "model.layers.32.self_attn.v_proj.weight": "model-00032-of-00036.safetensors",
468
+ "model.layers.33.input_layernorm.weight": "model-00034-of-00036.safetensors",
469
+ "model.layers.33.mlp.experts.down_proj": "model-00034-of-00036.safetensors",
470
+ "model.layers.33.mlp.experts.down_proj_bias": "model-00034-of-00036.safetensors",
471
+ "model.layers.33.mlp.experts.gate_up_proj": "model-00034-of-00036.safetensors",
472
+ "model.layers.33.mlp.experts.gate_up_proj_bias": "model-00034-of-00036.safetensors",
473
+ "model.layers.33.mlp.router.bias": "model-00033-of-00036.safetensors",
474
+ "model.layers.33.mlp.router.weight": "model-00033-of-00036.safetensors",
475
+ "model.layers.33.post_attention_layernorm.weight": "model-00034-of-00036.safetensors",
476
+ "model.layers.33.self_attn.k_proj.bias": "model-00033-of-00036.safetensors",
477
+ "model.layers.33.self_attn.k_proj.weight": "model-00033-of-00036.safetensors",
478
+ "model.layers.33.self_attn.o_proj.bias": "model-00033-of-00036.safetensors",
479
+ "model.layers.33.self_attn.o_proj.weight": "model-00033-of-00036.safetensors",
480
+ "model.layers.33.self_attn.q_proj.bias": "model-00033-of-00036.safetensors",
481
+ "model.layers.33.self_attn.q_proj.weight": "model-00033-of-00036.safetensors",
482
+ "model.layers.33.self_attn.sinks": "model-00033-of-00036.safetensors",
483
+ "model.layers.33.self_attn.v_proj.bias": "model-00033-of-00036.safetensors",
484
+ "model.layers.33.self_attn.v_proj.weight": "model-00033-of-00036.safetensors",
485
+ "model.layers.34.input_layernorm.weight": "model-00035-of-00036.safetensors",
486
+ "model.layers.34.mlp.experts.down_proj": "model-00035-of-00036.safetensors",
487
+ "model.layers.34.mlp.experts.down_proj_bias": "model-00035-of-00036.safetensors",
488
+ "model.layers.34.mlp.experts.gate_up_proj": "model-00035-of-00036.safetensors",
489
+ "model.layers.34.mlp.experts.gate_up_proj_bias": "model-00035-of-00036.safetensors",
490
+ "model.layers.34.mlp.router.bias": "model-00034-of-00036.safetensors",
491
+ "model.layers.34.mlp.router.weight": "model-00034-of-00036.safetensors",
492
+ "model.layers.34.post_attention_layernorm.weight": "model-00035-of-00036.safetensors",
493
+ "model.layers.34.self_attn.k_proj.bias": "model-00034-of-00036.safetensors",
494
+ "model.layers.34.self_attn.k_proj.weight": "model-00034-of-00036.safetensors",
495
+ "model.layers.34.self_attn.o_proj.bias": "model-00034-of-00036.safetensors",
496
+ "model.layers.34.self_attn.o_proj.weight": "model-00034-of-00036.safetensors",
497
+ "model.layers.34.self_attn.q_proj.bias": "model-00034-of-00036.safetensors",
498
+ "model.layers.34.self_attn.q_proj.weight": "model-00034-of-00036.safetensors",
499
+ "model.layers.34.self_attn.sinks": "model-00034-of-00036.safetensors",
500
+ "model.layers.34.self_attn.v_proj.bias": "model-00034-of-00036.safetensors",
501
+ "model.layers.34.self_attn.v_proj.weight": "model-00034-of-00036.safetensors",
502
+ "model.layers.35.input_layernorm.weight": "model-00036-of-00036.safetensors",
503
+ "model.layers.35.mlp.experts.down_proj": "model-00036-of-00036.safetensors",
504
+ "model.layers.35.mlp.experts.down_proj_bias": "model-00036-of-00036.safetensors",
505
+ "model.layers.35.mlp.experts.gate_up_proj": "model-00036-of-00036.safetensors",
506
+ "model.layers.35.mlp.experts.gate_up_proj_bias": "model-00036-of-00036.safetensors",
507
+ "model.layers.35.mlp.router.bias": "model-00035-of-00036.safetensors",
508
+ "model.layers.35.mlp.router.weight": "model-00035-of-00036.safetensors",
509
+ "model.layers.35.post_attention_layernorm.weight": "model-00036-of-00036.safetensors",
510
+ "model.layers.35.self_attn.k_proj.bias": "model-00035-of-00036.safetensors",
511
+ "model.layers.35.self_attn.k_proj.weight": "model-00035-of-00036.safetensors",
512
+ "model.layers.35.self_attn.o_proj.bias": "model-00035-of-00036.safetensors",
513
+ "model.layers.35.self_attn.o_proj.weight": "model-00035-of-00036.safetensors",
514
+ "model.layers.35.self_attn.q_proj.bias": "model-00035-of-00036.safetensors",
515
+ "model.layers.35.self_attn.q_proj.weight": "model-00035-of-00036.safetensors",
516
+ "model.layers.35.self_attn.sinks": "model-00035-of-00036.safetensors",
517
+ "model.layers.35.self_attn.v_proj.bias": "model-00035-of-00036.safetensors",
518
+ "model.layers.35.self_attn.v_proj.weight": "model-00035-of-00036.safetensors",
519
+ "model.layers.4.input_layernorm.weight": "model-00005-of-00036.safetensors",
520
+ "model.layers.4.mlp.experts.down_proj": "model-00005-of-00036.safetensors",
521
+ "model.layers.4.mlp.experts.down_proj_bias": "model-00005-of-00036.safetensors",
522
+ "model.layers.4.mlp.experts.gate_up_proj": "model-00005-of-00036.safetensors",
523
+ "model.layers.4.mlp.experts.gate_up_proj_bias": "model-00005-of-00036.safetensors",
524
+ "model.layers.4.mlp.router.bias": "model-00004-of-00036.safetensors",
525
+ "model.layers.4.mlp.router.weight": "model-00004-of-00036.safetensors",
526
+ "model.layers.4.post_attention_layernorm.weight": "model-00005-of-00036.safetensors",
527
+ "model.layers.4.self_attn.k_proj.bias": "model-00004-of-00036.safetensors",
528
+ "model.layers.4.self_attn.k_proj.weight": "model-00004-of-00036.safetensors",
529
+ "model.layers.4.self_attn.o_proj.bias": "model-00004-of-00036.safetensors",
530
+ "model.layers.4.self_attn.o_proj.weight": "model-00004-of-00036.safetensors",
531
+ "model.layers.4.self_attn.q_proj.bias": "model-00004-of-00036.safetensors",
532
+ "model.layers.4.self_attn.q_proj.weight": "model-00004-of-00036.safetensors",
533
+ "model.layers.4.self_attn.sinks": "model-00004-of-00036.safetensors",
534
+ "model.layers.4.self_attn.v_proj.bias": "model-00004-of-00036.safetensors",
535
+ "model.layers.4.self_attn.v_proj.weight": "model-00004-of-00036.safetensors",
536
+ "model.layers.5.input_layernorm.weight": "model-00006-of-00036.safetensors",
537
+ "model.layers.5.mlp.experts.down_proj": "model-00006-of-00036.safetensors",
538
+ "model.layers.5.mlp.experts.down_proj_bias": "model-00006-of-00036.safetensors",
539
+ "model.layers.5.mlp.experts.gate_up_proj": "model-00006-of-00036.safetensors",
540
+ "model.layers.5.mlp.experts.gate_up_proj_bias": "model-00006-of-00036.safetensors",
541
+ "model.layers.5.mlp.router.bias": "model-00005-of-00036.safetensors",
542
+ "model.layers.5.mlp.router.weight": "model-00005-of-00036.safetensors",
543
+ "model.layers.5.post_attention_layernorm.weight": "model-00006-of-00036.safetensors",
544
+ "model.layers.5.self_attn.k_proj.bias": "model-00005-of-00036.safetensors",
545
+ "model.layers.5.self_attn.k_proj.weight": "model-00005-of-00036.safetensors",
546
+ "model.layers.5.self_attn.o_proj.bias": "model-00005-of-00036.safetensors",
547
+ "model.layers.5.self_attn.o_proj.weight": "model-00005-of-00036.safetensors",
548
+ "model.layers.5.self_attn.q_proj.bias": "model-00005-of-00036.safetensors",
549
+ "model.layers.5.self_attn.q_proj.weight": "model-00005-of-00036.safetensors",
550
+ "model.layers.5.self_attn.sinks": "model-00005-of-00036.safetensors",
551
+ "model.layers.5.self_attn.v_proj.bias": "model-00005-of-00036.safetensors",
552
+ "model.layers.5.self_attn.v_proj.weight": "model-00005-of-00036.safetensors",
553
+ "model.layers.6.input_layernorm.weight": "model-00007-of-00036.safetensors",
554
+ "model.layers.6.mlp.experts.down_proj": "model-00007-of-00036.safetensors",
555
+ "model.layers.6.mlp.experts.down_proj_bias": "model-00007-of-00036.safetensors",
556
+ "model.layers.6.mlp.experts.gate_up_proj": "model-00007-of-00036.safetensors",
557
+ "model.layers.6.mlp.experts.gate_up_proj_bias": "model-00007-of-00036.safetensors",
558
+ "model.layers.6.mlp.router.bias": "model-00006-of-00036.safetensors",
559
+ "model.layers.6.mlp.router.weight": "model-00006-of-00036.safetensors",
560
+ "model.layers.6.post_attention_layernorm.weight": "model-00007-of-00036.safetensors",
561
+ "model.layers.6.self_attn.k_proj.bias": "model-00006-of-00036.safetensors",
562
+ "model.layers.6.self_attn.k_proj.weight": "model-00006-of-00036.safetensors",
563
+ "model.layers.6.self_attn.o_proj.bias": "model-00006-of-00036.safetensors",
564
+ "model.layers.6.self_attn.o_proj.weight": "model-00006-of-00036.safetensors",
565
+ "model.layers.6.self_attn.q_proj.bias": "model-00006-of-00036.safetensors",
566
+ "model.layers.6.self_attn.q_proj.weight": "model-00006-of-00036.safetensors",
567
+ "model.layers.6.self_attn.sinks": "model-00006-of-00036.safetensors",
568
+ "model.layers.6.self_attn.v_proj.bias": "model-00006-of-00036.safetensors",
569
+ "model.layers.6.self_attn.v_proj.weight": "model-00006-of-00036.safetensors",
570
+ "model.layers.7.input_layernorm.weight": "model-00008-of-00036.safetensors",
571
+ "model.layers.7.mlp.experts.down_proj": "model-00008-of-00036.safetensors",
572
+ "model.layers.7.mlp.experts.down_proj_bias": "model-00008-of-00036.safetensors",
573
+ "model.layers.7.mlp.experts.gate_up_proj": "model-00008-of-00036.safetensors",
574
+ "model.layers.7.mlp.experts.gate_up_proj_bias": "model-00008-of-00036.safetensors",
575
+ "model.layers.7.mlp.router.bias": "model-00007-of-00036.safetensors",
576
+ "model.layers.7.mlp.router.weight": "model-00007-of-00036.safetensors",
577
+ "model.layers.7.post_attention_layernorm.weight": "model-00008-of-00036.safetensors",
578
+ "model.layers.7.self_attn.k_proj.bias": "model-00007-of-00036.safetensors",
579
+ "model.layers.7.self_attn.k_proj.weight": "model-00007-of-00036.safetensors",
580
+ "model.layers.7.self_attn.o_proj.bias": "model-00007-of-00036.safetensors",
581
+ "model.layers.7.self_attn.o_proj.weight": "model-00007-of-00036.safetensors",
582
+ "model.layers.7.self_attn.q_proj.bias": "model-00007-of-00036.safetensors",
583
+ "model.layers.7.self_attn.q_proj.weight": "model-00007-of-00036.safetensors",
584
+ "model.layers.7.self_attn.sinks": "model-00007-of-00036.safetensors",
585
+ "model.layers.7.self_attn.v_proj.bias": "model-00007-of-00036.safetensors",
586
+ "model.layers.7.self_attn.v_proj.weight": "model-00007-of-00036.safetensors",
587
+ "model.layers.8.input_layernorm.weight": "model-00009-of-00036.safetensors",
588
+ "model.layers.8.mlp.experts.down_proj": "model-00009-of-00036.safetensors",
589
+ "model.layers.8.mlp.experts.down_proj_bias": "model-00009-of-00036.safetensors",
590
+ "model.layers.8.mlp.experts.gate_up_proj": "model-00009-of-00036.safetensors",
591
+ "model.layers.8.mlp.experts.gate_up_proj_bias": "model-00009-of-00036.safetensors",
592
+ "model.layers.8.mlp.router.bias": "model-00008-of-00036.safetensors",
593
+ "model.layers.8.mlp.router.weight": "model-00008-of-00036.safetensors",
594
+ "model.layers.8.post_attention_layernorm.weight": "model-00009-of-00036.safetensors",
595
+ "model.layers.8.self_attn.k_proj.bias": "model-00008-of-00036.safetensors",
596
+ "model.layers.8.self_attn.k_proj.weight": "model-00008-of-00036.safetensors",
597
+ "model.layers.8.self_attn.o_proj.bias": "model-00008-of-00036.safetensors",
598
+ "model.layers.8.self_attn.o_proj.weight": "model-00008-of-00036.safetensors",
599
+ "model.layers.8.self_attn.q_proj.bias": "model-00008-of-00036.safetensors",
600
+ "model.layers.8.self_attn.q_proj.weight": "model-00008-of-00036.safetensors",
601
+ "model.layers.8.self_attn.sinks": "model-00008-of-00036.safetensors",
602
+ "model.layers.8.self_attn.v_proj.bias": "model-00008-of-00036.safetensors",
603
+ "model.layers.8.self_attn.v_proj.weight": "model-00008-of-00036.safetensors",
604
+ "model.layers.9.input_layernorm.weight": "model-00010-of-00036.safetensors",
605
+ "model.layers.9.mlp.experts.down_proj": "model-00010-of-00036.safetensors",
606
+ "model.layers.9.mlp.experts.down_proj_bias": "model-00010-of-00036.safetensors",
607
+ "model.layers.9.mlp.experts.gate_up_proj": "model-00010-of-00036.safetensors",
608
+ "model.layers.9.mlp.experts.gate_up_proj_bias": "model-00010-of-00036.safetensors",
609
+ "model.layers.9.mlp.router.bias": "model-00009-of-00036.safetensors",
610
+ "model.layers.9.mlp.router.weight": "model-00009-of-00036.safetensors",
611
+ "model.layers.9.post_attention_layernorm.weight": "model-00010-of-00036.safetensors",
612
+ "model.layers.9.self_attn.k_proj.bias": "model-00009-of-00036.safetensors",
613
+ "model.layers.9.self_attn.k_proj.weight": "model-00009-of-00036.safetensors",
614
+ "model.layers.9.self_attn.o_proj.bias": "model-00009-of-00036.safetensors",
615
+ "model.layers.9.self_attn.o_proj.weight": "model-00009-of-00036.safetensors",
616
+ "model.layers.9.self_attn.q_proj.bias": "model-00009-of-00036.safetensors",
617
+ "model.layers.9.self_attn.q_proj.weight": "model-00009-of-00036.safetensors",
618
+ "model.layers.9.self_attn.sinks": "model-00009-of-00036.safetensors",
619
+ "model.layers.9.self_attn.v_proj.bias": "model-00009-of-00036.safetensors",
620
+ "model.layers.9.self_attn.v_proj.weight": "model-00009-of-00036.safetensors",
621
+ "model.norm.weight": "model-00036-of-00036.safetensors"
622
+ }
623
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<|startoftext|>",
3
+ "eos_token": "<|return|>",
4
+ "pad_token": "<|endoftext|>"
5
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0614fe83cadab421296e664e1f48f4261fa8fef6e03e63bb75c20f38e37d07d3
3
+ size 27868174
tokenizer_config.json ADDED
@@ -0,0 +1,183 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "199998": {
4
+ "content": "<|startoftext|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "199999": {
12
+ "content": "<|endoftext|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "200000": {
20
+ "content": "<|reserved_200000|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "200001": {
28
+ "content": "<|reserved_200001|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "200002": {
36
+ "content": "<|return|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "200003": {
44
+ "content": "<|constrain|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "200004": {
52
+ "content": "<|reserved_200004|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "200005": {
60
+ "content": "<|channel|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "200006": {
68
+ "content": "<|start|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "200007": {
76
+ "content": "<|end|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "200008": {
84
+ "content": "<|message|>",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "200009": {
92
+ "content": "<|reserved_200009|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ },
99
+ "200010": {
100
+ "content": "<|reserved_200010|>",
101
+ "lstrip": false,
102
+ "normalized": false,
103
+ "rstrip": false,
104
+ "single_word": false,
105
+ "special": true
106
+ },
107
+ "200011": {
108
+ "content": "<|reserved_200011|>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false,
113
+ "special": true
114
+ },
115
+ "200012": {
116
+ "content": "<|call|>",
117
+ "lstrip": false,
118
+ "normalized": false,
119
+ "rstrip": false,
120
+ "single_word": false,
121
+ "special": true
122
+ },
123
+ "200013": {
124
+ "content": "<|reserved_200013|>",
125
+ "lstrip": false,
126
+ "normalized": false,
127
+ "rstrip": false,
128
+ "single_word": false,
129
+ "special": true
130
+ },
131
+ "200014": {
132
+ "content": "<|reserved_200014|>",
133
+ "lstrip": false,
134
+ "normalized": false,
135
+ "rstrip": false,
136
+ "single_word": false,
137
+ "special": true
138
+ },
139
+ "200015": {
140
+ "content": "<|reserved_200015|>",
141
+ "lstrip": false,
142
+ "normalized": false,
143
+ "rstrip": false,
144
+ "single_word": false,
145
+ "special": true
146
+ },
147
+ "200016": {
148
+ "content": "<|reserved_200016|>",
149
+ "lstrip": false,
150
+ "normalized": false,
151
+ "rstrip": false,
152
+ "single_word": false,
153
+ "special": true
154
+ },
155
+ "200017": {
156
+ "content": "<|reserved_200017|>",
157
+ "lstrip": false,
158
+ "normalized": false,
159
+ "rstrip": false,
160
+ "single_word": false,
161
+ "special": true
162
+ },
163
+ "200018": {
164
+ "content": "<|endofprompt|>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false,
169
+ "special": true
170
+ }
171
+ },
172
+ "bos_token": "<|startoftext|>",
173
+ "clean_up_tokenization_spaces": false,
174
+ "eos_token": "<|return|>",
175
+ "extra_special_tokens": {},
176
+ "model_input_names": [
177
+ "input_ids",
178
+ "attention_mask"
179
+ ],
180
+ "model_max_length": 1000000000000000019884624838656,
181
+ "pad_token": "<|endoftext|>",
182
+ "tokenizer_class": "PreTrainedTokenizerFast"
183
+ }