OpenSourceRonin commited on
Commit
96617d1
·
verified ·
1 Parent(s): 4a380bd

Upload Qwen2.5-32B-Instruct-v8-k65536-65536-woft model

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
config.json CHANGED
The diff for this file is too large to render. See raw diff
 
generation_config.json CHANGED
@@ -1,25 +1,6 @@
1
  {
2
- "attn_implementation": "flash_attention_2",
3
  "bos_token_id": 151643,
4
- "cache_config": null,
5
- "cache_implementation": null,
6
- "do_sample": true,
7
- "dola_layers": null,
8
- "eos_token_id": [
9
- 151645,
10
- 151643
11
- ],
12
- "max_matching_ngram_size": null,
13
- "min_p": null,
14
- "output_logits": null,
15
- "pad_token_id": 151643,
16
- "repetition_penalty": 1.05,
17
- "return_legacy_cache": true,
18
- "stop_strings": null,
19
- "temperature": 0.7,
20
- "token_healing": false,
21
- "top_k": 20,
22
- "top_p": 0.8,
23
- "transformers_version": "4.37.2",
24
- "watermarking_config": null
25
  }
 
1
  {
2
+ "_from_model_config": true,
3
  "bos_token_id": 151643,
4
+ "eos_token_id": 151645,
5
+ "transformers_version": "4.49.0"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  }
model-00001-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:19b937baf5c66f9a3d16a01e8af554e0f1ded70a4503a7aa4e7c0036b6a9f5e3
3
- size 4962391928
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95175872c7d6bc16fb4cb10a74d8c46644196d144ed62e81c85b19bffd24055b
3
+ size 4960812808
model-00002-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:52ccae3ba325fd905573b5f605413436b7b3e0e882803f32af658a335d6fb248
3
- size 4991334496
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7bb15cfa67376530f9de4d930acdd0ef248abe9775a70c28b1856b10e1da527c
3
+ size 4989091728
model-00003-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:565c0befce1df7bca41ad5939332a17750448400296837a7c69362532cccefe9
3
- size 4991469664
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1dc37832a68e44649d1410484ced76e790449b1228a7b21b477e6214e526fad
3
+ size 4989181840
model-00004-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:da338d5f91ffdd7bbf76b8a2c46ca56ed0b44e2e3b3a37752eaa36c9b689399e
3
- size 4736417872
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5298f750d4cfab5a2c798c381318e5c4775a2638f88f7024ab167642f62cbfc9
3
+ size 4735007920
model.safetensors.index.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -199,6 +199,7 @@
199
  "clean_up_tokenization_spaces": false,
200
  "eos_token": "<|im_end|>",
201
  "errors": "replace",
 
202
  "legacy": false,
203
  "model_max_length": 131072,
204
  "pad_token": "<|endoftext|>",
 
199
  "clean_up_tokenization_spaces": false,
200
  "eos_token": "<|im_end|>",
201
  "errors": "replace",
202
+ "extra_special_tokens": {},
203
  "legacy": false,
204
  "model_max_length": 131072,
205
  "pad_token": "<|endoftext|>",