cicdatopea commited on
Commit
1e3da39
·
verified ·
1 Parent(s): 6fb992b

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -31
config.json CHANGED
@@ -57,37 +57,7 @@
57
  "scale_dtype": "torch.float16",
58
  "seqlen": 2048,
59
  "sym": true,
60
- "to_quant_block_names": [
61
- [
62
- "model.layers.0",
63
- "model.layers.1",
64
- "model.layers.2",
65
- "model.layers.3",
66
- "model.layers.4",
67
- "model.layers.5",
68
- "model.layers.6",
69
- "model.layers.7",
70
- "model.layers.8",
71
- "model.layers.9",
72
- "model.layers.10",
73
- "model.layers.11",
74
- "model.layers.12",
75
- "model.layers.13",
76
- "model.layers.14",
77
- "model.layers.15",
78
- "model.layers.16",
79
- "model.layers.17",
80
- "model.layers.18",
81
- "model.layers.19",
82
- "model.layers.20",
83
- "model.layers.21",
84
- "model.layers.22",
85
- "model.layers.23",
86
- "model.layers.24",
87
- "model.layers.25",
88
- "model.layers.26"
89
- ]
90
- ]
91
  },
92
  "rms_norm_eps": 1e-06,
93
  "rope_scaling": {
 
57
  "scale_dtype": "torch.float16",
58
  "seqlen": 2048,
59
  "sym": true,
60
+ "block_name_to_quantize":"model.layers"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
  },
62
  "rms_norm_eps": 1e-06,
63
  "rope_scaling": {