v4 safety checker
Browse files- safety_checker/config.json +2 -15
safety_checker/config.json
CHANGED
|
@@ -80,17 +80,11 @@
|
|
| 80 |
"top_p": 1.0,
|
| 81 |
"torch_dtype": null,
|
| 82 |
"torchscript": false,
|
| 83 |
-
"transformers_version": "4.
|
| 84 |
"typical_p": 1.0,
|
| 85 |
"use_bfloat16": false,
|
| 86 |
"vocab_size": 49408
|
| 87 |
},
|
| 88 |
-
"text_config_dict": {
|
| 89 |
-
"hidden_size": 768,
|
| 90 |
-
"intermediate_size": 3072,
|
| 91 |
-
"num_attention_heads": 12,
|
| 92 |
-
"num_hidden_layers": 12
|
| 93 |
-
},
|
| 94 |
"torch_dtype": "float32",
|
| 95 |
"transformers_version": null,
|
| 96 |
"vision_config": {
|
|
@@ -167,15 +161,8 @@
|
|
| 167 |
"top_p": 1.0,
|
| 168 |
"torch_dtype": null,
|
| 169 |
"torchscript": false,
|
| 170 |
-
"transformers_version": "4.
|
| 171 |
"typical_p": 1.0,
|
| 172 |
"use_bfloat16": false
|
| 173 |
-
},
|
| 174 |
-
"vision_config_dict": {
|
| 175 |
-
"hidden_size": 1024,
|
| 176 |
-
"intermediate_size": 4096,
|
| 177 |
-
"num_attention_heads": 16,
|
| 178 |
-
"num_hidden_layers": 24,
|
| 179 |
-
"patch_size": 14
|
| 180 |
}
|
| 181 |
}
|
|
|
|
| 80 |
"top_p": 1.0,
|
| 81 |
"torch_dtype": null,
|
| 82 |
"torchscript": false,
|
| 83 |
+
"transformers_version": "4.27.1",
|
| 84 |
"typical_p": 1.0,
|
| 85 |
"use_bfloat16": false,
|
| 86 |
"vocab_size": 49408
|
| 87 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 88 |
"torch_dtype": "float32",
|
| 89 |
"transformers_version": null,
|
| 90 |
"vision_config": {
|
|
|
|
| 161 |
"top_p": 1.0,
|
| 162 |
"torch_dtype": null,
|
| 163 |
"torchscript": false,
|
| 164 |
+
"transformers_version": "4.27.1",
|
| 165 |
"typical_p": 1.0,
|
| 166 |
"use_bfloat16": false
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 167 |
}
|
| 168 |
}
|