use_cache to true for better inference
Browse files- config.json +1 -1
config.json
CHANGED
@@ -64,6 +64,6 @@
|
|
64 |
"repetition_penalty": 3.5,
|
65 |
"torch_dtype": "float32",
|
66 |
"transformers_version": "4.27.4",
|
67 |
-
"use_cache":
|
68 |
"vocab_size": 50265
|
69 |
}
|
|
|
64 |
"repetition_penalty": 3.5,
|
65 |
"torch_dtype": "float32",
|
66 |
"transformers_version": "4.27.4",
|
67 |
+
"use_cache": true,
|
68 |
"vocab_size": 50265
|
69 |
}
|