Upload config.json with huggingface_hub
Browse files- config.json +14 -1
config.json
CHANGED
@@ -24,5 +24,18 @@
|
|
24 |
"torch_dtype": "float32",
|
25 |
"transformers_version": "4.50.2",
|
26 |
"use_moe": false,
|
27 |
-
"vocab_size": 6400
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
}
|
|
|
24 |
"torch_dtype": "float32",
|
25 |
"transformers_version": "4.50.2",
|
26 |
"use_moe": false,
|
27 |
+
"vocab_size": 6400,
|
28 |
+
"task_type": "text-generation",
|
29 |
+
"auto_map": {
|
30 |
+
"AutoModelForCausalLM": ["model.model.MiniMindLM", null]
|
31 |
+
},
|
32 |
+
"generation_config": {
|
33 |
+
"max_length": 2048,
|
34 |
+
"temperature": 0.7,
|
35 |
+
"top_p": 0.9,
|
36 |
+
"do_sample": true,
|
37 |
+
"pad_token_id": 0,
|
38 |
+
"eos_token_id": 2,
|
39 |
+
"bos_token_id": 1
|
40 |
+
}
|
41 |
}
|