OPEA
/

Safetensors
olmo2
4-bit precision
auto-round
cicdatopea commited on
Commit
d437200
·
verified ·
1 Parent(s): 84754a0

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -44
config.json CHANGED
@@ -37,50 +37,7 @@
37
  "scale_dtype": "torch.float16",
38
  "seqlen": 2048,
39
  "sym": true,
40
- "to_quant_block_names": [
41
- [
42
- "model.layers.0",
43
- "model.layers.1",
44
- "model.layers.2",
45
- "model.layers.3",
46
- "model.layers.4",
47
- "model.layers.5",
48
- "model.layers.6",
49
- "model.layers.7",
50
- "model.layers.8",
51
- "model.layers.9",
52
- "model.layers.10",
53
- "model.layers.11",
54
- "model.layers.12",
55
- "model.layers.13",
56
- "model.layers.14",
57
- "model.layers.15",
58
- "model.layers.16",
59
- "model.layers.17",
60
- "model.layers.18",
61
- "model.layers.19",
62
- "model.layers.20",
63
- "model.layers.21",
64
- "model.layers.22",
65
- "model.layers.23",
66
- "model.layers.24",
67
- "model.layers.25",
68
- "model.layers.26",
69
- "model.layers.27",
70
- "model.layers.28",
71
- "model.layers.29",
72
- "model.layers.30",
73
- "model.layers.31",
74
- "model.layers.32",
75
- "model.layers.33",
76
- "model.layers.34",
77
- "model.layers.35",
78
- "model.layers.36",
79
- "model.layers.37",
80
- "model.layers.38",
81
- "model.layers.39"
82
- ]
83
- ]
84
  },
85
  "rms_norm_eps": 1e-06,
86
  "rope_scaling": null,
 
37
  "scale_dtype": "torch.float16",
38
  "seqlen": 2048,
39
  "sym": true,
40
+ "block_name_to_quantize": "model.layers"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
  },
42
  "rms_norm_eps": 1e-06,
43
  "rope_scaling": null,