File size: 192 Bytes
25718c2 |
1 2 3 4 5 6 7 8 9 10 |
{
"quantization": {
"exclude_modules": [
"lm_head",
"model.embed_tokens"
],
"kv_cache_quant_algo": null,
"quant_algo": "FP8"
}
} |