ArthurZ HF staff commited on
Commit
6129a0a
1 Parent(s): 62e97d2

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -32,7 +32,7 @@
32
  "alignment_head": 2,
33
  "alignment_layer": 68,
34
  "attention_multiplier": 0.25,
35
- "attention_pattern": "RawColumnPreviousRowAttention",
36
  "attn_dropout": 0.0,
37
  "attn_res_scale": false,
38
  "blocks": 32,
@@ -132,7 +132,7 @@
132
  "alignment_head": null,
133
  "alignment_layer": null,
134
  "attention_multiplier": 0.25,
135
- "attention_pattern": "RawColumnPreviousRowAttention",
136
  "attn_dropout": 0,
137
  "attn_res_scale": false,
138
  "blocks": 64,
@@ -189,7 +189,7 @@
189
  "alignment_head": null,
190
  "alignment_layer": null,
191
  "attention_multiplier": 0.25,
192
- "attention_pattern": "RawColumnPreviousRowAttention",
193
  "attn_dropout": 0,
194
  "attn_res_scale": false,
195
  "blocks": 64,
 
32
  "alignment_head": 2,
33
  "alignment_layer": 68,
34
  "attention_multiplier": 0.25,
35
+ "attention_pattern": "raw_column_previous_row_attention",
36
  "attn_dropout": 0.0,
37
  "attn_res_scale": false,
38
  "blocks": 32,
 
132
  "alignment_head": null,
133
  "alignment_layer": null,
134
  "attention_multiplier": 0.25,
135
+ "attention_pattern": "raw_column_previous_row_attention",
136
  "attn_dropout": 0,
137
  "attn_res_scale": false,
138
  "blocks": 64,
 
189
  "alignment_head": null,
190
  "alignment_layer": null,
191
  "attention_multiplier": 0.25,
192
+ "attention_pattern": "raw_column_previous_row_attention",
193
  "attn_dropout": 0,
194
  "attn_res_scale": false,
195
  "blocks": 64,