Upload Gemma3ForCausalLM
Browse files- config.json +1 -1
- model.safetensors +2 -2
config.json
CHANGED
@@ -27,7 +27,7 @@
|
|
27 |
"rope_theta": 1000000,
|
28 |
"sliding_window": 512,
|
29 |
"sliding_window_pattern": 6,
|
30 |
-
"torch_dtype": "
|
31 |
"transformers_version": "4.50.0.dev0",
|
32 |
"use_cache": true,
|
33 |
"vocab_size": 262144
|
|
|
27 |
"rope_theta": 1000000,
|
28 |
"sliding_window": 512,
|
29 |
"sliding_window_pattern": 6,
|
30 |
+
"torch_dtype": "bfloat16",
|
31 |
"transformers_version": "4.50.0.dev0",
|
32 |
"use_cache": true,
|
33 |
"vocab_size": 262144
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ee5250f6eb1aa7cfb729dfd4dc8d9964fd772324776c6d00bf2bc674c069cb27
|
3 |
+
size 1999811208
|