Upload model
#4
by
ArthurZ
HF staff
- opened
- config.json +256 -200
- pytorch_model-00001-of-00002.bin +2 -2
- pytorch_model-00002-of-00002.bin +2 -2
- pytorch_model.bin.index.json +337 -337
config.json
CHANGED
@@ -1,227 +1,283 @@
|
|
1 |
{
|
2 |
-
"
|
|
|
3 |
"architectures": [
|
4 |
"JukeboxModel"
|
5 |
],
|
6 |
-
"cond_c_res": [
|
7 |
-
0,
|
8 |
-
1,
|
9 |
-
1
|
10 |
-
],
|
11 |
-
"cond_depth": [
|
12 |
-
3,
|
13 |
-
16,
|
14 |
-
16
|
15 |
-
],
|
16 |
-
"cond_dilation_cycle": [
|
17 |
-
null,
|
18 |
-
8,
|
19 |
-
8
|
20 |
-
],
|
21 |
-
"cond_dilation_growth_rate": [
|
22 |
-
1,
|
23 |
-
3,
|
24 |
-
3
|
25 |
-
],
|
26 |
-
"cond_downs_t": [
|
27 |
-
3,
|
28 |
-
2,
|
29 |
-
2
|
30 |
-
],
|
31 |
-
"cond_m_conv": 1,
|
32 |
-
"cond_res_scale": [
|
33 |
-
null,
|
34 |
-
true,
|
35 |
-
false
|
36 |
-
],
|
37 |
-
"cond_strides_t": [
|
38 |
-
2,
|
39 |
-
2,
|
40 |
-
2
|
41 |
-
],
|
42 |
-
"cond_width": [
|
43 |
-
128,
|
44 |
-
1024,
|
45 |
-
1024
|
46 |
-
],
|
47 |
-
"cond_zero_out": false,
|
48 |
-
"copy_input": false,
|
49 |
-
"fp16_params": true,
|
50 |
"hop_fraction": [
|
51 |
0.125,
|
52 |
0.5,
|
53 |
0.5
|
54 |
],
|
55 |
"init_std": 0.2,
|
56 |
-
"lyric_conditioning": [
|
57 |
-
true,
|
58 |
-
false,
|
59 |
-
false
|
60 |
-
],
|
61 |
"max_duration": 600.0,
|
62 |
"max_nb_genres": 1,
|
63 |
-
"merged_decoder": [
|
64 |
-
true,
|
65 |
-
false,
|
66 |
-
false
|
67 |
-
],
|
68 |
"metadata_conditioning": true,
|
69 |
-
"
|
70 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
71 |
604,
|
72 |
7898
|
73 |
],
|
74 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
75 |
120,
|
76 |
4111
|
77 |
],
|
78 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
79 |
120,
|
80 |
4111
|
81 |
-
]
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
0.
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
0.4,
|
112 |
-
0.0,
|
113 |
-
0.0
|
114 |
-
],
|
115 |
-
"lyric_enc_m_attn": 0.25,
|
116 |
-
"lyric_enc_m_mlp": 1.0,
|
117 |
-
"lyric_enc_n_vocab": 79,
|
118 |
-
"lyric_enc_pos_init": false,
|
119 |
-
"lyric_enc_res_scale": false,
|
120 |
-
"lyric_enc_resid_dropout": 0.0,
|
121 |
-
"lyric_enc_spread": null,
|
122 |
-
"lyric_enc_width": [
|
123 |
-
128,
|
124 |
-
128,
|
125 |
-
128
|
126 |
-
],
|
127 |
-
"lyric_enc_zero_out": false,
|
128 |
-
"prior_alignment_head": [
|
129 |
-
2,
|
130 |
-
null,
|
131 |
-
null
|
132 |
-
],
|
133 |
-
"prior_alignment_layer": [
|
134 |
-
68,
|
135 |
-
null,
|
136 |
-
null
|
137 |
-
],
|
138 |
-
"prior_attn_dropout": 0,
|
139 |
-
"prior_attn_order": [
|
140 |
-
12,
|
141 |
-
2,
|
142 |
-
2
|
143 |
-
],
|
144 |
-
"prior_blocks": 64,
|
145 |
-
"prior_depth": [
|
146 |
-
72,
|
147 |
-
72,
|
148 |
-
72
|
149 |
-
],
|
150 |
-
"prior_emb_dropout": 0,
|
151 |
-
"prior_init_scale": [
|
152 |
-
0.2,
|
153 |
-
1,
|
154 |
-
1
|
155 |
-
],
|
156 |
-
"prior_latent_dim": 2048,
|
157 |
-
"prior_m_attn": 0.25,
|
158 |
-
"prior_n_ctx": [
|
159 |
-
6144,
|
160 |
-
8192,
|
161 |
-
8192
|
162 |
-
],
|
163 |
-
"prior_n_heads": [
|
164 |
-
2,
|
165 |
-
1,
|
166 |
-
1
|
167 |
-
],
|
168 |
-
"prior_pos_init": false,
|
169 |
-
"prior_res_scale": false,
|
170 |
-
"prior_resid_dropout": 0,
|
171 |
-
"prior_spread": null,
|
172 |
-
"prior_width": [
|
173 |
-
2048,
|
174 |
-
1920,
|
175 |
-
1920
|
176 |
-
],
|
177 |
-
"prior_zero_out": false,
|
178 |
-
"sample_length": 1058304,
|
179 |
"sampling_rate": 44100,
|
180 |
-
"single_enc_dec": [
|
181 |
-
true,
|
182 |
-
false,
|
183 |
-
false
|
184 |
-
],
|
185 |
"timing_dims": 64,
|
186 |
"torch_dtype": "float32",
|
187 |
-
"transformers_version":
|
188 |
-
"
|
189 |
-
|
190 |
-
|
191 |
-
|
192 |
-
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
|
198 |
-
|
199 |
-
],
|
200 |
-
"vqvae_emmbedding_width": 64,
|
201 |
-
"vqvae_levels": 3,
|
202 |
-
"vqvae_lmu": 0.99,
|
203 |
-
"vqvae_m_conv": 1,
|
204 |
-
"vqvae_multipliers": [
|
205 |
-
2,
|
206 |
-
1,
|
207 |
-
1
|
208 |
-
],
|
209 |
-
"vqvae_music_tokens_shapes": [
|
210 |
-
[
|
211 |
-
8268
|
212 |
],
|
213 |
-
|
214 |
-
|
|
|
|
|
|
|
|
|
|
|
215 |
],
|
216 |
-
|
217 |
-
|
218 |
-
|
219 |
-
|
220 |
-
|
221 |
-
|
222 |
-
|
223 |
-
|
224 |
-
|
225 |
-
|
226 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
227 |
}
|
|
|
1 |
{
|
2 |
+
"_commit_hash": null,
|
3 |
+
"_name_or_path": "/home/arthur_huggingface_co/transformers/jukebox-1b-lyrics-converted",
|
4 |
"architectures": [
|
5 |
"JukeboxModel"
|
6 |
],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
"hop_fraction": [
|
8 |
0.125,
|
9 |
0.5,
|
10 |
0.5
|
11 |
],
|
12 |
"init_std": 0.2,
|
|
|
|
|
|
|
|
|
|
|
13 |
"max_duration": 600.0,
|
14 |
"max_nb_genres": 1,
|
|
|
|
|
|
|
|
|
|
|
15 |
"metadata_conditioning": true,
|
16 |
+
"min_duration": 17.84,
|
17 |
+
"model_type": "jukebox",
|
18 |
+
"nb_priors": 3,
|
19 |
+
"prior_0": {
|
20 |
+
"act_fn": "quick_gelu",
|
21 |
+
"alignment_head": 2,
|
22 |
+
"alignment_layer": 68,
|
23 |
+
"attention_multiplier": 0.25,
|
24 |
+
"attention_pattern": "enc_dec_with_lyrics",
|
25 |
+
"attn_dropout": 0,
|
26 |
+
"attn_res_scale": false,
|
27 |
+
"blocks": 64,
|
28 |
+
"conv_res_scale": false,
|
29 |
+
"depth": 72,
|
30 |
+
"emb_dropout": 0,
|
31 |
+
"embed_dim": 2048,
|
32 |
+
"encoder_attention_multiplier": 0.25,
|
33 |
+
"encoder_attention_pattern": "RawColumnPreviousRowAttention",
|
34 |
+
"encoder_attn_dropout": 0.0,
|
35 |
+
"encoder_attn_res_scale": false,
|
36 |
+
"encoder_blocks": 32,
|
37 |
+
"encoder_depth": 18,
|
38 |
+
"encoder_emb_dropout": 0.0,
|
39 |
+
"encoder_heads": 4,
|
40 |
+
"encoder_init_scale": 0.1,
|
41 |
+
"encoder_loss_fraction": [
|
42 |
+
0.4,
|
43 |
+
0.0,
|
44 |
+
0.0
|
45 |
+
],
|
46 |
+
"encoder_mlp_multiplier": 1.0,
|
47 |
+
"encoder_n_vocab": 79,
|
48 |
+
"encoder_resid_dropout": 0.0,
|
49 |
+
"encoder_spread": null,
|
50 |
+
"encoder_width": 1280,
|
51 |
+
"encoder_zero_out": false,
|
52 |
+
"init_scale": 0.2,
|
53 |
+
"is_encoder_decoder": true,
|
54 |
+
"lyric_conditioning": true,
|
55 |
+
"mask": true,
|
56 |
+
"max_duration": 600.0,
|
57 |
+
"max_nb_genres": 1,
|
58 |
+
"merged_decoder": false,
|
59 |
+
"metadata_conditioning": true,
|
60 |
+
"metadata_dims": [
|
61 |
604,
|
62 |
7898
|
63 |
],
|
64 |
+
"min_duration": 17.84,
|
65 |
+
"mlp_multiplier": 1.0,
|
66 |
+
"model_type": "jukebox",
|
67 |
+
"n_ctx": 6144,
|
68 |
+
"n_heads": 2,
|
69 |
+
"nb_relevant_lyric_tokens": 384,
|
70 |
+
"res_conv_depth": null,
|
71 |
+
"res_conv_width": null,
|
72 |
+
"res_convolution_multiplier": null,
|
73 |
+
"res_dilation_cycle": null,
|
74 |
+
"res_dilation_growth_rate": null,
|
75 |
+
"res_downs_t": [
|
76 |
+
3,
|
77 |
+
2,
|
78 |
+
2
|
79 |
+
],
|
80 |
+
"res_strides_t": [
|
81 |
+
2,
|
82 |
+
2,
|
83 |
+
2
|
84 |
+
],
|
85 |
+
"resid_dropout": 0,
|
86 |
+
"sampling_rate": 44100,
|
87 |
+
"spread": null,
|
88 |
+
"timing_dims": 64,
|
89 |
+
"transformers_version": "4.25.0.dev0",
|
90 |
+
"width": 2048,
|
91 |
+
"zero_out": false
|
92 |
+
},
|
93 |
+
"prior_1": {
|
94 |
+
"act_fn": "quick_gelu",
|
95 |
+
"alignment_head": null,
|
96 |
+
"alignment_layer": null,
|
97 |
+
"attention_multiplier": 0.25,
|
98 |
+
"attention_pattern": "RawColumnPreviousRowAttention",
|
99 |
+
"attn_dropout": 0,
|
100 |
+
"attn_res_scale": false,
|
101 |
+
"blocks": 64,
|
102 |
+
"conv_res_scale": true,
|
103 |
+
"depth": 72,
|
104 |
+
"emb_dropout": 0,
|
105 |
+
"embed_dim": 2048,
|
106 |
+
"encoder_attention_multiplier": null,
|
107 |
+
"encoder_attention_pattern": null,
|
108 |
+
"encoder_attn_dropout": null,
|
109 |
+
"encoder_attn_res_scale": false,
|
110 |
+
"encoder_blocks": null,
|
111 |
+
"encoder_depth": null,
|
112 |
+
"encoder_emb_dropout": null,
|
113 |
+
"encoder_heads": null,
|
114 |
+
"encoder_init_scale": null,
|
115 |
+
"encoder_loss_fraction": [
|
116 |
+
0.4,
|
117 |
+
0.0,
|
118 |
+
0.0
|
119 |
+
],
|
120 |
+
"encoder_mlp_multiplier": null,
|
121 |
+
"encoder_n_vocab": 0,
|
122 |
+
"encoder_resid_dropout": null,
|
123 |
+
"encoder_spread": null,
|
124 |
+
"encoder_width": null,
|
125 |
+
"encoder_zero_out": null,
|
126 |
+
"init_scale": 1,
|
127 |
+
"is_encoder_decoder": false,
|
128 |
+
"lyric_conditioning": false,
|
129 |
+
"mask": true,
|
130 |
+
"max_duration": 600.0,
|
131 |
+
"max_nb_genres": 1,
|
132 |
+
"merged_decoder": false,
|
133 |
+
"metadata_conditioning": true,
|
134 |
+
"metadata_dims": [
|
135 |
120,
|
136 |
4111
|
137 |
],
|
138 |
+
"min_duration": 17.84,
|
139 |
+
"mlp_multiplier": 1.0,
|
140 |
+
"model_type": "jukebox",
|
141 |
+
"n_ctx": 8192,
|
142 |
+
"n_heads": 1,
|
143 |
+
"nb_relevant_lyric_tokens": 0,
|
144 |
+
"res_conv_depth": 16,
|
145 |
+
"res_conv_width": 1024,
|
146 |
+
"res_convolution_multiplier": 1,
|
147 |
+
"res_dilation_cycle": 8,
|
148 |
+
"res_dilation_growth_rate": 3,
|
149 |
+
"res_downs_t": [
|
150 |
+
3,
|
151 |
+
2,
|
152 |
+
2
|
153 |
+
],
|
154 |
+
"res_strides_t": [
|
155 |
+
2,
|
156 |
+
2,
|
157 |
+
2
|
158 |
+
],
|
159 |
+
"resid_dropout": 0,
|
160 |
+
"sampling_rate": 44100,
|
161 |
+
"spread": null,
|
162 |
+
"timing_dims": 64,
|
163 |
+
"transformers_version": "4.25.0.dev0",
|
164 |
+
"width": 1920,
|
165 |
+
"zero_out": false
|
166 |
+
},
|
167 |
+
"prior_2": {
|
168 |
+
"act_fn": "quick_gelu",
|
169 |
+
"alignment_head": null,
|
170 |
+
"alignment_layer": null,
|
171 |
+
"attention_multiplier": 0.25,
|
172 |
+
"attention_pattern": "RawColumnPreviousRowAttention",
|
173 |
+
"attn_dropout": 0,
|
174 |
+
"attn_res_scale": false,
|
175 |
+
"blocks": 64,
|
176 |
+
"conv_res_scale": null,
|
177 |
+
"depth": 72,
|
178 |
+
"emb_dropout": 0,
|
179 |
+
"embed_dim": 2048,
|
180 |
+
"encoder_attention_multiplier": null,
|
181 |
+
"encoder_attention_pattern": null,
|
182 |
+
"encoder_attn_dropout": null,
|
183 |
+
"encoder_attn_res_scale": false,
|
184 |
+
"encoder_blocks": null,
|
185 |
+
"encoder_depth": null,
|
186 |
+
"encoder_emb_dropout": null,
|
187 |
+
"encoder_heads": null,
|
188 |
+
"encoder_init_scale": null,
|
189 |
+
"encoder_loss_fraction": [
|
190 |
+
0.4,
|
191 |
+
0.0,
|
192 |
+
0.0
|
193 |
+
],
|
194 |
+
"encoder_mlp_multiplier": null,
|
195 |
+
"encoder_n_vocab": 0,
|
196 |
+
"encoder_resid_dropout": null,
|
197 |
+
"encoder_spread": null,
|
198 |
+
"encoder_width": null,
|
199 |
+
"encoder_zero_out": null,
|
200 |
+
"init_scale": 1,
|
201 |
+
"is_encoder_decoder": false,
|
202 |
+
"lyric_conditioning": false,
|
203 |
+
"mask": true,
|
204 |
+
"max_duration": 600.0,
|
205 |
+
"max_nb_genres": 1,
|
206 |
+
"merged_decoder": false,
|
207 |
+
"metadata_conditioning": true,
|
208 |
+
"metadata_dims": [
|
209 |
120,
|
210 |
4111
|
211 |
+
],
|
212 |
+
"min_duration": 17.84,
|
213 |
+
"mlp_multiplier": 1.0,
|
214 |
+
"model_type": "jukebox",
|
215 |
+
"n_ctx": 8192,
|
216 |
+
"n_heads": 1,
|
217 |
+
"nb_relevant_lyric_tokens": 0,
|
218 |
+
"res_conv_depth": 16,
|
219 |
+
"res_conv_width": 1024,
|
220 |
+
"res_convolution_multiplier": 1,
|
221 |
+
"res_dilation_cycle": 8,
|
222 |
+
"res_dilation_growth_rate": 3,
|
223 |
+
"res_downs_t": [
|
224 |
+
3,
|
225 |
+
2,
|
226 |
+
2
|
227 |
+
],
|
228 |
+
"res_strides_t": [
|
229 |
+
2,
|
230 |
+
2,
|
231 |
+
2
|
232 |
+
],
|
233 |
+
"resid_dropout": 0,
|
234 |
+
"sampling_rate": 44100,
|
235 |
+
"spread": null,
|
236 |
+
"timing_dims": 64,
|
237 |
+
"transformers_version": "4.25.0.dev0",
|
238 |
+
"width": 1920,
|
239 |
+
"zero_out": false
|
240 |
+
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
241 |
"sampling_rate": 44100,
|
|
|
|
|
|
|
|
|
|
|
242 |
"timing_dims": 64,
|
243 |
"torch_dtype": "float32",
|
244 |
+
"transformers_version": null,
|
245 |
+
"vqvae_config": {
|
246 |
+
"act_fn": "relu",
|
247 |
+
"codebook_dimension": 2048,
|
248 |
+
"commit": 0.02,
|
249 |
+
"conv_input_shape": 1,
|
250 |
+
"conv_res_scale": false,
|
251 |
+
"embed_dim": 64,
|
252 |
+
"hop_fraction": [
|
253 |
+
0.125,
|
254 |
+
0.5,
|
255 |
+
0.5
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
256 |
],
|
257 |
+
"levels": 3,
|
258 |
+
"lmu": 0.99,
|
259 |
+
"model_type": "",
|
260 |
+
"multipliers": [
|
261 |
+
2,
|
262 |
+
1,
|
263 |
+
1
|
264 |
],
|
265 |
+
"res_conv_depth": 4,
|
266 |
+
"res_conv_width": 32,
|
267 |
+
"res_convolution_multiplier": 1,
|
268 |
+
"res_dilation_cycle": null,
|
269 |
+
"res_dilation_growth_rate": 3,
|
270 |
+
"res_downs_t": [
|
271 |
+
3,
|
272 |
+
2,
|
273 |
+
2
|
274 |
+
],
|
275 |
+
"res_strides_t": [
|
276 |
+
2,
|
277 |
+
2,
|
278 |
+
2
|
279 |
+
],
|
280 |
+
"sample_length": 1058304,
|
281 |
+
"transformers_version": "4.25.0.dev0"
|
282 |
+
}
|
283 |
}
|
pytorch_model-00001-of-00002.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:21bf8948a12345a68299b80eb68917ade0743d4f54723ba4112f4ce5e1d4bf1f
|
3 |
+
size 9961070749
|
pytorch_model-00002-of-00002.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7525296bfd9c9bd161b3f7f84e2ed935bcab4c99566d8e4baf160e0385f3edd9
|
3 |
+
size 1684364091
|
pytorch_model.bin.index.json
CHANGED
@@ -3,145 +3,11 @@
|
|
3 |
"total_size": 11677355788
|
4 |
},
|
5 |
"weight_map": {
|
6 |
-
"priors.0.
|
7 |
-
"priors.0.
|
8 |
-
"priors.0.
|
9 |
-
"priors.0.
|
10 |
-
"priors.0.
|
11 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.0.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
12 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.0.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
13 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.0.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
14 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.0.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
15 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.1.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
16 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.1.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
17 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.1.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
18 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.1.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
19 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.10.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
20 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.10.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
21 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.10.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
22 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.10.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
23 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.11.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
24 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.11.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
25 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.11.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
26 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.11.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
27 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.12.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
28 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.12.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
29 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.12.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
30 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.12.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
31 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.13.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
32 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.13.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
33 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.13.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
34 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.13.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
35 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.14.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
36 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.14.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
37 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.14.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
38 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.14.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
39 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.15.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
40 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.15.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
41 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.15.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
42 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.15.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
43 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.2.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
44 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.2.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
45 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.2.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
46 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.2.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
47 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.3.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
48 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.3.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
49 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.3.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
50 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.3.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
51 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.4.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
52 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.4.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
53 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.4.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
54 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.4.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
55 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.5.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
56 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.5.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
57 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.5.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
58 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.5.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
59 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.6.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
60 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.6.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
61 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.6.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
62 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.6.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
63 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.7.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
64 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.7.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
65 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.7.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
66 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.7.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
67 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.8.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
68 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.8.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
69 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.8.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
70 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.8.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
71 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.9.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
72 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.9.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
73 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.9.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
74 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.0.resnet_block.9.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
75 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.1.bias": "pytorch_model-00001-of-00002.bin",
|
76 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.1.weight": "pytorch_model-00001-of-00002.bin",
|
77 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.0.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
78 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.0.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
79 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.0.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
80 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.0.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
81 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.1.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
82 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.1.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
83 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.1.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
84 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.1.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
85 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.10.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
86 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.10.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
87 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.10.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
88 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.10.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
89 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.11.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
90 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.11.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
91 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.11.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
92 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.11.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
93 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.12.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
94 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.12.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
95 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.12.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
96 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.12.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
97 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.13.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
98 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.13.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
99 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.13.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
100 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.13.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
101 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.14.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
102 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.14.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
103 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.14.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
104 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.14.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
105 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.15.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
106 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.15.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
107 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.15.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
108 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.15.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
109 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.2.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
110 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.2.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
111 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.2.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
112 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.2.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
113 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.3.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
114 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.3.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
115 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.3.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
116 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.3.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
117 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.4.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
118 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.4.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
119 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.4.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
120 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.4.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
121 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.5.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
122 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.5.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
123 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.5.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
124 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.5.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
125 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.6.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
126 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.6.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
127 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.6.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
128 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.6.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
129 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.7.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
130 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.7.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
131 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.7.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
132 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.7.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
133 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.8.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
134 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.8.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
135 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.8.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
136 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.8.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
137 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.9.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
138 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.9.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
139 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.9.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
140 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.2.resnet_block.9.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
141 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.3.bias": "pytorch_model-00001-of-00002.bin",
|
142 |
-
"priors.0.conditioner_blocks.0.upsampler.upsample_block.3.weight": "pytorch_model-00001-of-00002.bin",
|
143 |
-
"priors.0.metadata_embedding.artist_emb.emb.weight": "pytorch_model-00001-of-00002.bin",
|
144 |
-
"priors.0.metadata_embedding.bow_genre_emb.emb.weight": "pytorch_model-00001-of-00002.bin",
|
145 |
"priors.0.prior.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
|
146 |
"priors.0.prior.fc_proj_out.weight": "pytorch_model-00001-of-00002.bin",
|
147 |
"priors.0.prior.pos_emb.pos_emb": "pytorch_model-00001-of-00002.bin",
|
@@ -1009,145 +875,145 @@
|
|
1009 |
"priors.0.prior.transformer._attn_mods.9.mlp.c_fc.weight": "pytorch_model-00001-of-00002.bin",
|
1010 |
"priors.0.prior.transformer._attn_mods.9.mlp.c_proj.bias": "pytorch_model-00001-of-00002.bin",
|
1011 |
"priors.0.prior.transformer._attn_mods.9.mlp.c_proj.weight": "pytorch_model-00001-of-00002.bin",
|
1012 |
-
"priors.1.conditioner_blocks.
|
1013 |
-
"priors.1.conditioner_blocks.
|
1014 |
-
"priors.1.conditioner_blocks.
|
1015 |
-
"priors.1.conditioner_blocks.
|
1016 |
-
"priors.1.conditioner_blocks.
|
1017 |
-
"priors.1.conditioner_blocks.
|
1018 |
-
"priors.1.conditioner_blocks.
|
1019 |
-
"priors.1.conditioner_blocks.
|
1020 |
-
"priors.1.conditioner_blocks.
|
1021 |
-
"priors.1.conditioner_blocks.
|
1022 |
-
"priors.1.conditioner_blocks.
|
1023 |
-
"priors.1.conditioner_blocks.
|
1024 |
-
"priors.1.conditioner_blocks.
|
1025 |
-
"priors.1.conditioner_blocks.
|
1026 |
-
"priors.1.conditioner_blocks.
|
1027 |
-
"priors.1.conditioner_blocks.
|
1028 |
-
"priors.1.conditioner_blocks.
|
1029 |
-
"priors.1.conditioner_blocks.
|
1030 |
-
"priors.1.conditioner_blocks.
|
1031 |
-
"priors.1.conditioner_blocks.
|
1032 |
-
"priors.1.conditioner_blocks.
|
1033 |
-
"priors.1.conditioner_blocks.
|
1034 |
-
"priors.1.conditioner_blocks.
|
1035 |
-
"priors.1.conditioner_blocks.
|
1036 |
-
"priors.1.conditioner_blocks.
|
1037 |
-
"priors.1.conditioner_blocks.
|
1038 |
-
"priors.1.conditioner_blocks.
|
1039 |
-
"priors.1.conditioner_blocks.
|
1040 |
-
"priors.1.conditioner_blocks.
|
1041 |
-
"priors.1.conditioner_blocks.
|
1042 |
-
"priors.1.conditioner_blocks.
|
1043 |
-
"priors.1.conditioner_blocks.
|
1044 |
-
"priors.1.conditioner_blocks.
|
1045 |
-
"priors.1.conditioner_blocks.
|
1046 |
-
"priors.1.conditioner_blocks.
|
1047 |
-
"priors.1.conditioner_blocks.
|
1048 |
-
"priors.1.conditioner_blocks.
|
1049 |
-
"priors.1.conditioner_blocks.
|
1050 |
-
"priors.1.conditioner_blocks.
|
1051 |
-
"priors.1.conditioner_blocks.
|
1052 |
-
"priors.1.conditioner_blocks.
|
1053 |
-
"priors.1.conditioner_blocks.
|
1054 |
-
"priors.1.conditioner_blocks.
|
1055 |
-
"priors.1.conditioner_blocks.
|
1056 |
-
"priors.1.conditioner_blocks.
|
1057 |
-
"priors.1.conditioner_blocks.
|
1058 |
-
"priors.1.conditioner_blocks.
|
1059 |
-
"priors.1.conditioner_blocks.
|
1060 |
-
"priors.1.conditioner_blocks.
|
1061 |
-
"priors.1.conditioner_blocks.
|
1062 |
-
"priors.1.conditioner_blocks.
|
1063 |
-
"priors.1.conditioner_blocks.
|
1064 |
-
"priors.1.conditioner_blocks.
|
1065 |
-
"priors.1.conditioner_blocks.
|
1066 |
-
"priors.1.conditioner_blocks.
|
1067 |
-
"priors.1.conditioner_blocks.
|
1068 |
-
"priors.1.conditioner_blocks.
|
1069 |
-
"priors.1.conditioner_blocks.
|
1070 |
-
"priors.1.conditioner_blocks.
|
1071 |
-
"priors.1.conditioner_blocks.
|
1072 |
-
"priors.1.conditioner_blocks.
|
1073 |
-
"priors.1.conditioner_blocks.
|
1074 |
-
"priors.1.conditioner_blocks.
|
1075 |
-
"priors.1.conditioner_blocks.
|
1076 |
-
"priors.1.conditioner_blocks.
|
1077 |
-
"priors.1.conditioner_blocks.
|
1078 |
-
"priors.1.conditioner_blocks.
|
1079 |
-
"priors.1.conditioner_blocks.
|
1080 |
-
"priors.1.conditioner_blocks.
|
1081 |
-
"priors.1.conditioner_blocks.
|
1082 |
-
"priors.1.conditioner_blocks.
|
1083 |
-
"priors.1.conditioner_blocks.
|
1084 |
-
"priors.1.conditioner_blocks.
|
1085 |
-
"priors.1.conditioner_blocks.
|
1086 |
-
"priors.1.conditioner_blocks.
|
1087 |
-
"priors.1.conditioner_blocks.
|
1088 |
-
"priors.1.conditioner_blocks.
|
1089 |
-
"priors.1.conditioner_blocks.
|
1090 |
-
"priors.1.conditioner_blocks.
|
1091 |
-
"priors.1.conditioner_blocks.
|
1092 |
-
"priors.1.conditioner_blocks.
|
1093 |
-
"priors.1.conditioner_blocks.
|
1094 |
-
"priors.1.conditioner_blocks.
|
1095 |
-
"priors.1.conditioner_blocks.
|
1096 |
-
"priors.1.conditioner_blocks.
|
1097 |
-
"priors.1.conditioner_blocks.
|
1098 |
-
"priors.1.conditioner_blocks.
|
1099 |
-
"priors.1.conditioner_blocks.
|
1100 |
-
"priors.1.conditioner_blocks.
|
1101 |
-
"priors.1.conditioner_blocks.
|
1102 |
-
"priors.1.conditioner_blocks.
|
1103 |
-
"priors.1.conditioner_blocks.
|
1104 |
-
"priors.1.conditioner_blocks.
|
1105 |
-
"priors.1.conditioner_blocks.
|
1106 |
-
"priors.1.conditioner_blocks.
|
1107 |
-
"priors.1.conditioner_blocks.
|
1108 |
-
"priors.1.conditioner_blocks.
|
1109 |
-
"priors.1.conditioner_blocks.
|
1110 |
-
"priors.1.conditioner_blocks.
|
1111 |
-
"priors.1.conditioner_blocks.
|
1112 |
-
"priors.1.conditioner_blocks.
|
1113 |
-
"priors.1.conditioner_blocks.
|
1114 |
-
"priors.1.conditioner_blocks.
|
1115 |
-
"priors.1.conditioner_blocks.
|
1116 |
-
"priors.1.conditioner_blocks.
|
1117 |
-
"priors.1.conditioner_blocks.
|
1118 |
-
"priors.1.conditioner_blocks.
|
1119 |
-
"priors.1.conditioner_blocks.
|
1120 |
-
"priors.1.conditioner_blocks.
|
1121 |
-
"priors.1.conditioner_blocks.
|
1122 |
-
"priors.1.conditioner_blocks.
|
1123 |
-
"priors.1.conditioner_blocks.
|
1124 |
-
"priors.1.conditioner_blocks.
|
1125 |
-
"priors.1.conditioner_blocks.
|
1126 |
-
"priors.1.conditioner_blocks.
|
1127 |
-
"priors.1.conditioner_blocks.
|
1128 |
-
"priors.1.conditioner_blocks.
|
1129 |
-
"priors.1.conditioner_blocks.
|
1130 |
-
"priors.1.conditioner_blocks.
|
1131 |
-
"priors.1.conditioner_blocks.
|
1132 |
-
"priors.1.conditioner_blocks.
|
1133 |
-
"priors.1.conditioner_blocks.
|
1134 |
-
"priors.1.conditioner_blocks.
|
1135 |
-
"priors.1.conditioner_blocks.
|
1136 |
-
"priors.1.conditioner_blocks.
|
1137 |
-
"priors.1.conditioner_blocks.
|
1138 |
-
"priors.1.conditioner_blocks.
|
1139 |
-
"priors.1.conditioner_blocks.
|
1140 |
-
"priors.1.conditioner_blocks.
|
1141 |
-
"priors.1.conditioner_blocks.
|
1142 |
-
"priors.1.conditioner_blocks.
|
1143 |
-
"priors.1.conditioner_blocks.
|
1144 |
-
"priors.1.conditioner_blocks.
|
1145 |
-
"priors.1.conditioner_blocks.
|
1146 |
-
"priors.1.conditioner_blocks.
|
1147 |
-
"priors.1.conditioner_blocks.
|
1148 |
-
"priors.1.conditioner_blocks.
|
1149 |
-
"priors.1.metadata_embedding.artist_emb.
|
1150 |
-
"priors.1.metadata_embedding.bow_genre_emb.
|
1151 |
"priors.1.prior.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
|
1152 |
"priors.1.prior.fc_proj_out.weight": "pytorch_model-00001-of-00002.bin",
|
1153 |
"priors.1.prior.pos_emb.pos_emb": "pytorch_model-00001-of-00002.bin",
|
@@ -2015,11 +1881,145 @@
|
|
2015 |
"priors.1.prior.transformer._attn_mods.9.mlp.c_fc.weight": "pytorch_model-00001-of-00002.bin",
|
2016 |
"priors.1.prior.transformer._attn_mods.9.mlp.c_proj.bias": "pytorch_model-00001-of-00002.bin",
|
2017 |
"priors.1.prior.transformer._attn_mods.9.mlp.c_proj.weight": "pytorch_model-00001-of-00002.bin",
|
2018 |
-
"priors.2.
|
2019 |
-
"priors.2.
|
2020 |
-
"priors.2.
|
2021 |
-
"priors.2.
|
2022 |
-
"priors.2.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2023 |
"priors.2.prior.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
|
2024 |
"priors.2.prior.fc_proj_out.weight": "pytorch_model-00002-of-00002.bin",
|
2025 |
"priors.2.prior.pos_emb.pos_emb": "pytorch_model-00001-of-00002.bin",
|
@@ -2365,60 +2365,60 @@
|
|
2365 |
"priors.2.prior.transformer._attn_mods.34.attn.c_proj.weight": "pytorch_model-00001-of-00002.bin",
|
2366 |
"priors.2.prior.transformer._attn_mods.34.layer_norm_0.bias": "pytorch_model-00001-of-00002.bin",
|
2367 |
"priors.2.prior.transformer._attn_mods.34.layer_norm_0.weight": "pytorch_model-00001-of-00002.bin",
|
2368 |
-
"priors.2.prior.transformer._attn_mods.34.layer_norm_1.bias": "pytorch_model-
|
2369 |
-
"priors.2.prior.transformer._attn_mods.34.layer_norm_1.weight": "pytorch_model-
|
2370 |
-
"priors.2.prior.transformer._attn_mods.34.mlp.c_fc.bias": "pytorch_model-
|
2371 |
-
"priors.2.prior.transformer._attn_mods.34.mlp.c_fc.weight": "pytorch_model-
|
2372 |
-
"priors.2.prior.transformer._attn_mods.34.mlp.c_proj.bias": "pytorch_model-
|
2373 |
-
"priors.2.prior.transformer._attn_mods.34.mlp.c_proj.weight": "pytorch_model-
|
2374 |
-
"priors.2.prior.transformer._attn_mods.35.attn.c_attn.bias": "pytorch_model-
|
2375 |
-
"priors.2.prior.transformer._attn_mods.35.attn.c_attn.weight": "pytorch_model-
|
2376 |
-
"priors.2.prior.transformer._attn_mods.35.attn.c_proj.bias": "pytorch_model-
|
2377 |
-
"priors.2.prior.transformer._attn_mods.35.attn.c_proj.weight": "pytorch_model-
|
2378 |
-
"priors.2.prior.transformer._attn_mods.35.layer_norm_0.bias": "pytorch_model-
|
2379 |
-
"priors.2.prior.transformer._attn_mods.35.layer_norm_0.weight": "pytorch_model-
|
2380 |
-
"priors.2.prior.transformer._attn_mods.35.layer_norm_1.bias": "pytorch_model-
|
2381 |
-
"priors.2.prior.transformer._attn_mods.35.layer_norm_1.weight": "pytorch_model-
|
2382 |
-
"priors.2.prior.transformer._attn_mods.35.mlp.c_fc.bias": "pytorch_model-
|
2383 |
-
"priors.2.prior.transformer._attn_mods.35.mlp.c_fc.weight": "pytorch_model-
|
2384 |
-
"priors.2.prior.transformer._attn_mods.35.mlp.c_proj.bias": "pytorch_model-
|
2385 |
-
"priors.2.prior.transformer._attn_mods.35.mlp.c_proj.weight": "pytorch_model-
|
2386 |
-
"priors.2.prior.transformer._attn_mods.36.attn.c_attn.bias": "pytorch_model-
|
2387 |
-
"priors.2.prior.transformer._attn_mods.36.attn.c_attn.weight": "pytorch_model-
|
2388 |
-
"priors.2.prior.transformer._attn_mods.36.attn.c_proj.bias": "pytorch_model-
|
2389 |
-
"priors.2.prior.transformer._attn_mods.36.attn.c_proj.weight": "pytorch_model-
|
2390 |
-
"priors.2.prior.transformer._attn_mods.36.layer_norm_0.bias": "pytorch_model-
|
2391 |
-
"priors.2.prior.transformer._attn_mods.36.layer_norm_0.weight": "pytorch_model-
|
2392 |
-
"priors.2.prior.transformer._attn_mods.36.layer_norm_1.bias": "pytorch_model-
|
2393 |
-
"priors.2.prior.transformer._attn_mods.36.layer_norm_1.weight": "pytorch_model-
|
2394 |
-
"priors.2.prior.transformer._attn_mods.36.mlp.c_fc.bias": "pytorch_model-
|
2395 |
-
"priors.2.prior.transformer._attn_mods.36.mlp.c_fc.weight": "pytorch_model-
|
2396 |
-
"priors.2.prior.transformer._attn_mods.36.mlp.c_proj.bias": "pytorch_model-
|
2397 |
-
"priors.2.prior.transformer._attn_mods.36.mlp.c_proj.weight": "pytorch_model-
|
2398 |
-
"priors.2.prior.transformer._attn_mods.37.attn.c_attn.bias": "pytorch_model-
|
2399 |
-
"priors.2.prior.transformer._attn_mods.37.attn.c_attn.weight": "pytorch_model-
|
2400 |
-
"priors.2.prior.transformer._attn_mods.37.attn.c_proj.bias": "pytorch_model-
|
2401 |
-
"priors.2.prior.transformer._attn_mods.37.attn.c_proj.weight": "pytorch_model-
|
2402 |
-
"priors.2.prior.transformer._attn_mods.37.layer_norm_0.bias": "pytorch_model-
|
2403 |
-
"priors.2.prior.transformer._attn_mods.37.layer_norm_0.weight": "pytorch_model-
|
2404 |
-
"priors.2.prior.transformer._attn_mods.37.layer_norm_1.bias": "pytorch_model-
|
2405 |
-
"priors.2.prior.transformer._attn_mods.37.layer_norm_1.weight": "pytorch_model-
|
2406 |
-
"priors.2.prior.transformer._attn_mods.37.mlp.c_fc.bias": "pytorch_model-
|
2407 |
-
"priors.2.prior.transformer._attn_mods.37.mlp.c_fc.weight": "pytorch_model-
|
2408 |
-
"priors.2.prior.transformer._attn_mods.37.mlp.c_proj.bias": "pytorch_model-
|
2409 |
-
"priors.2.prior.transformer._attn_mods.37.mlp.c_proj.weight": "pytorch_model-
|
2410 |
-
"priors.2.prior.transformer._attn_mods.38.attn.c_attn.bias": "pytorch_model-
|
2411 |
-
"priors.2.prior.transformer._attn_mods.38.attn.c_attn.weight": "pytorch_model-
|
2412 |
-
"priors.2.prior.transformer._attn_mods.38.attn.c_proj.bias": "pytorch_model-
|
2413 |
-
"priors.2.prior.transformer._attn_mods.38.attn.c_proj.weight": "pytorch_model-
|
2414 |
-
"priors.2.prior.transformer._attn_mods.38.layer_norm_0.bias": "pytorch_model-
|
2415 |
-
"priors.2.prior.transformer._attn_mods.38.layer_norm_0.weight": "pytorch_model-
|
2416 |
-
"priors.2.prior.transformer._attn_mods.38.layer_norm_1.bias": "pytorch_model-
|
2417 |
-
"priors.2.prior.transformer._attn_mods.38.layer_norm_1.weight": "pytorch_model-
|
2418 |
-
"priors.2.prior.transformer._attn_mods.38.mlp.c_fc.bias": "pytorch_model-
|
2419 |
-
"priors.2.prior.transformer._attn_mods.38.mlp.c_fc.weight": "pytorch_model-
|
2420 |
-
"priors.2.prior.transformer._attn_mods.38.mlp.c_proj.bias": "pytorch_model-
|
2421 |
-
"priors.2.prior.transformer._attn_mods.38.mlp.c_proj.weight": "pytorch_model-
|
2422 |
"priors.2.prior.transformer._attn_mods.39.attn.c_attn.bias": "pytorch_model-00002-of-00002.bin",
|
2423 |
"priors.2.prior.transformer._attn_mods.39.attn.c_attn.weight": "pytorch_model-00002-of-00002.bin",
|
2424 |
"priors.2.prior.transformer._attn_mods.39.attn.c_proj.bias": "pytorch_model-00002-of-00002.bin",
|
|
|
3 |
"total_size": 11677355788
|
4 |
},
|
5 |
"weight_map": {
|
6 |
+
"priors.0.metadata_embedding.absolute_pos_emb.emb.weight": "pytorch_model-00001-of-00002.bin",
|
7 |
+
"priors.0.metadata_embedding.artist_emb.weight": "pytorch_model-00001-of-00002.bin",
|
8 |
+
"priors.0.metadata_embedding.bow_genre_emb.weight": "pytorch_model-00001-of-00002.bin",
|
9 |
+
"priors.0.metadata_embedding.relative_pos_emb.emb.weight": "pytorch_model-00001-of-00002.bin",
|
10 |
+
"priors.0.metadata_embedding.total_length_emb.emb.weight": "pytorch_model-00001-of-00002.bin",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
"priors.0.prior.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
|
12 |
"priors.0.prior.fc_proj_out.weight": "pytorch_model-00001-of-00002.bin",
|
13 |
"priors.0.prior.pos_emb.pos_emb": "pytorch_model-00001-of-00002.bin",
|
|
|
875 |
"priors.0.prior.transformer._attn_mods.9.mlp.c_fc.weight": "pytorch_model-00001-of-00002.bin",
|
876 |
"priors.0.prior.transformer._attn_mods.9.mlp.c_proj.bias": "pytorch_model-00001-of-00002.bin",
|
877 |
"priors.0.prior.transformer._attn_mods.9.mlp.c_proj.weight": "pytorch_model-00001-of-00002.bin",
|
878 |
+
"priors.1.conditioner_blocks.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
|
879 |
+
"priors.1.conditioner_blocks.layer_norm.bias": "pytorch_model-00001-of-00002.bin",
|
880 |
+
"priors.1.conditioner_blocks.layer_norm.weight": "pytorch_model-00001-of-00002.bin",
|
881 |
+
"priors.1.conditioner_blocks.upsampler.proj_in.bias": "pytorch_model-00001-of-00002.bin",
|
882 |
+
"priors.1.conditioner_blocks.upsampler.proj_in.weight": "pytorch_model-00001-of-00002.bin",
|
883 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.0.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
884 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.0.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
885 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.0.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
886 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.0.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
887 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.1.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
888 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.1.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
889 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.1.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
890 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.1.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
891 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.10.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
892 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.10.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
893 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.10.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
894 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.10.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
895 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.11.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
896 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.11.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
897 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.11.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
898 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.11.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
899 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.12.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
900 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.12.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
901 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.12.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
902 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.12.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
903 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.13.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
904 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.13.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
905 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.13.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
906 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.13.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
907 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.14.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
908 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.14.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
909 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.14.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
910 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.14.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
911 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.15.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
912 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.15.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
913 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.15.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
914 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.15.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
915 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.2.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
916 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.2.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
917 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.2.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
918 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.2.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
919 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.3.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
920 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.3.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
921 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.3.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
922 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.3.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
923 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.4.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
924 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.4.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
925 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.4.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
926 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.4.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
927 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.5.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
928 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.5.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
929 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.5.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
930 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.5.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
931 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.6.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
932 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.6.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
933 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.6.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
934 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.6.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
935 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.7.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
936 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.7.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
937 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.7.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
938 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.7.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
939 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.8.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
940 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.8.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
941 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.8.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
942 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.8.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
943 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.9.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
944 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.9.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
945 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.9.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
946 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.0.resnet_block.9.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
947 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.1.bias": "pytorch_model-00001-of-00002.bin",
|
948 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.1.weight": "pytorch_model-00001-of-00002.bin",
|
949 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.0.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
950 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.0.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
951 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.0.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
952 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.0.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
953 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.1.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
954 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.1.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
955 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.1.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
956 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.1.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
957 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.10.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
958 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.10.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
959 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.10.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
960 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.10.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
961 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.11.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
962 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.11.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
963 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.11.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
964 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.11.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
965 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.12.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
966 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.12.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
967 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.12.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
968 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.12.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
969 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.13.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
970 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.13.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
971 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.13.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
972 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.13.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
973 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.14.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
974 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.14.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
975 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.14.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
976 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.14.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
977 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.15.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
978 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.15.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
979 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.15.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
980 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.15.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
981 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.2.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
982 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.2.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
983 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.2.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
984 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.2.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
985 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.3.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
986 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.3.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
987 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.3.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
988 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.3.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
989 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.4.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
990 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.4.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
991 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.4.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
992 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.4.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
993 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.5.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
994 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.5.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
995 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.5.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
996 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.5.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
997 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.6.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
998 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.6.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
999 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.6.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1000 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.6.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1001 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.7.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1002 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.7.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1003 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.7.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1004 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.7.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1005 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.8.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1006 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.8.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1007 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.8.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1008 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.8.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1009 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.9.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1010 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.9.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1011 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.9.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1012 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.2.resnet_block.9.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1013 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.3.bias": "pytorch_model-00001-of-00002.bin",
|
1014 |
+
"priors.1.conditioner_blocks.upsampler.upsample_block.3.weight": "pytorch_model-00001-of-00002.bin",
|
1015 |
+
"priors.1.metadata_embedding.artist_emb.weight": "pytorch_model-00001-of-00002.bin",
|
1016 |
+
"priors.1.metadata_embedding.bow_genre_emb.weight": "pytorch_model-00001-of-00002.bin",
|
1017 |
"priors.1.prior.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
|
1018 |
"priors.1.prior.fc_proj_out.weight": "pytorch_model-00001-of-00002.bin",
|
1019 |
"priors.1.prior.pos_emb.pos_emb": "pytorch_model-00001-of-00002.bin",
|
|
|
1881 |
"priors.1.prior.transformer._attn_mods.9.mlp.c_fc.weight": "pytorch_model-00001-of-00002.bin",
|
1882 |
"priors.1.prior.transformer._attn_mods.9.mlp.c_proj.bias": "pytorch_model-00001-of-00002.bin",
|
1883 |
"priors.1.prior.transformer._attn_mods.9.mlp.c_proj.weight": "pytorch_model-00001-of-00002.bin",
|
1884 |
+
"priors.2.conditioner_blocks.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
|
1885 |
+
"priors.2.conditioner_blocks.layer_norm.bias": "pytorch_model-00001-of-00002.bin",
|
1886 |
+
"priors.2.conditioner_blocks.layer_norm.weight": "pytorch_model-00001-of-00002.bin",
|
1887 |
+
"priors.2.conditioner_blocks.upsampler.proj_in.bias": "pytorch_model-00001-of-00002.bin",
|
1888 |
+
"priors.2.conditioner_blocks.upsampler.proj_in.weight": "pytorch_model-00001-of-00002.bin",
|
1889 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.0.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1890 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.0.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1891 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.0.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1892 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.0.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1893 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.1.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1894 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.1.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1895 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.1.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1896 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.1.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1897 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.10.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1898 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.10.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1899 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.10.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1900 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.10.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1901 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.11.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1902 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.11.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1903 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.11.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1904 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.11.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1905 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.12.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1906 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.12.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1907 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.12.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1908 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.12.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1909 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.13.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1910 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.13.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1911 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.13.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1912 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.13.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1913 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.14.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1914 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.14.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1915 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.14.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1916 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.14.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1917 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.15.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1918 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.15.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1919 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.15.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1920 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.15.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1921 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.2.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1922 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.2.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1923 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.2.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1924 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.2.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1925 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.3.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1926 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.3.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1927 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.3.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1928 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.3.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1929 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.4.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1930 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.4.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1931 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.4.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1932 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.4.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1933 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.5.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1934 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.5.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1935 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.5.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1936 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.5.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1937 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.6.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1938 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.6.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1939 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.6.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1940 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.6.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1941 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.7.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1942 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.7.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1943 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.7.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1944 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.7.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1945 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.8.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1946 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.8.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1947 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.8.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1948 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.8.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1949 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.9.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1950 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.9.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1951 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.9.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1952 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.0.resnet_block.9.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1953 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.1.bias": "pytorch_model-00001-of-00002.bin",
|
1954 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.1.weight": "pytorch_model-00001-of-00002.bin",
|
1955 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.0.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1956 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.0.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1957 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.0.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1958 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.0.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1959 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.1.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1960 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.1.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1961 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.1.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1962 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.1.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1963 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.10.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1964 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.10.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1965 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.10.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1966 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.10.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1967 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.11.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1968 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.11.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1969 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.11.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1970 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.11.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1971 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.12.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1972 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.12.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1973 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.12.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1974 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.12.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1975 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.13.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1976 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.13.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1977 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.13.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1978 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.13.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1979 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.14.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1980 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.14.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1981 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.14.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1982 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.14.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1983 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.15.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1984 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.15.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1985 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.15.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1986 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.15.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1987 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.2.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1988 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.2.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1989 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.2.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1990 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.2.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1991 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.3.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1992 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.3.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1993 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.3.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1994 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.3.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1995 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.4.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
1996 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.4.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
1997 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.4.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
1998 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.4.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
1999 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.5.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
2000 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.5.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
2001 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.5.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
2002 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.5.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
2003 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.6.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
2004 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.6.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
2005 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.6.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
2006 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.6.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
2007 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.7.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
2008 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.7.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
2009 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.7.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
2010 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.7.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
2011 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.8.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
2012 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.8.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
2013 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.8.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
2014 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.8.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
2015 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.9.conv1d_1.bias": "pytorch_model-00001-of-00002.bin",
|
2016 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.9.conv1d_1.weight": "pytorch_model-00001-of-00002.bin",
|
2017 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.9.conv1d_2.bias": "pytorch_model-00001-of-00002.bin",
|
2018 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.2.resnet_block.9.conv1d_2.weight": "pytorch_model-00001-of-00002.bin",
|
2019 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.3.bias": "pytorch_model-00001-of-00002.bin",
|
2020 |
+
"priors.2.conditioner_blocks.upsampler.upsample_block.3.weight": "pytorch_model-00001-of-00002.bin",
|
2021 |
+
"priors.2.metadata_embedding.artist_emb.weight": "pytorch_model-00001-of-00002.bin",
|
2022 |
+
"priors.2.metadata_embedding.bow_genre_emb.weight": "pytorch_model-00001-of-00002.bin",
|
2023 |
"priors.2.prior.embed_tokens.weight": "pytorch_model-00001-of-00002.bin",
|
2024 |
"priors.2.prior.fc_proj_out.weight": "pytorch_model-00002-of-00002.bin",
|
2025 |
"priors.2.prior.pos_emb.pos_emb": "pytorch_model-00001-of-00002.bin",
|
|
|
2365 |
"priors.2.prior.transformer._attn_mods.34.attn.c_proj.weight": "pytorch_model-00001-of-00002.bin",
|
2366 |
"priors.2.prior.transformer._attn_mods.34.layer_norm_0.bias": "pytorch_model-00001-of-00002.bin",
|
2367 |
"priors.2.prior.transformer._attn_mods.34.layer_norm_0.weight": "pytorch_model-00001-of-00002.bin",
|
2368 |
+
"priors.2.prior.transformer._attn_mods.34.layer_norm_1.bias": "pytorch_model-00002-of-00002.bin",
|
2369 |
+
"priors.2.prior.transformer._attn_mods.34.layer_norm_1.weight": "pytorch_model-00002-of-00002.bin",
|
2370 |
+
"priors.2.prior.transformer._attn_mods.34.mlp.c_fc.bias": "pytorch_model-00002-of-00002.bin",
|
2371 |
+
"priors.2.prior.transformer._attn_mods.34.mlp.c_fc.weight": "pytorch_model-00002-of-00002.bin",
|
2372 |
+
"priors.2.prior.transformer._attn_mods.34.mlp.c_proj.bias": "pytorch_model-00002-of-00002.bin",
|
2373 |
+
"priors.2.prior.transformer._attn_mods.34.mlp.c_proj.weight": "pytorch_model-00002-of-00002.bin",
|
2374 |
+
"priors.2.prior.transformer._attn_mods.35.attn.c_attn.bias": "pytorch_model-00002-of-00002.bin",
|
2375 |
+
"priors.2.prior.transformer._attn_mods.35.attn.c_attn.weight": "pytorch_model-00002-of-00002.bin",
|
2376 |
+
"priors.2.prior.transformer._attn_mods.35.attn.c_proj.bias": "pytorch_model-00002-of-00002.bin",
|
2377 |
+
"priors.2.prior.transformer._attn_mods.35.attn.c_proj.weight": "pytorch_model-00002-of-00002.bin",
|
2378 |
+
"priors.2.prior.transformer._attn_mods.35.layer_norm_0.bias": "pytorch_model-00002-of-00002.bin",
|
2379 |
+
"priors.2.prior.transformer._attn_mods.35.layer_norm_0.weight": "pytorch_model-00002-of-00002.bin",
|
2380 |
+
"priors.2.prior.transformer._attn_mods.35.layer_norm_1.bias": "pytorch_model-00002-of-00002.bin",
|
2381 |
+
"priors.2.prior.transformer._attn_mods.35.layer_norm_1.weight": "pytorch_model-00002-of-00002.bin",
|
2382 |
+
"priors.2.prior.transformer._attn_mods.35.mlp.c_fc.bias": "pytorch_model-00002-of-00002.bin",
|
2383 |
+
"priors.2.prior.transformer._attn_mods.35.mlp.c_fc.weight": "pytorch_model-00002-of-00002.bin",
|
2384 |
+
"priors.2.prior.transformer._attn_mods.35.mlp.c_proj.bias": "pytorch_model-00002-of-00002.bin",
|
2385 |
+
"priors.2.prior.transformer._attn_mods.35.mlp.c_proj.weight": "pytorch_model-00002-of-00002.bin",
|
2386 |
+
"priors.2.prior.transformer._attn_mods.36.attn.c_attn.bias": "pytorch_model-00002-of-00002.bin",
|
2387 |
+
"priors.2.prior.transformer._attn_mods.36.attn.c_attn.weight": "pytorch_model-00002-of-00002.bin",
|
2388 |
+
"priors.2.prior.transformer._attn_mods.36.attn.c_proj.bias": "pytorch_model-00002-of-00002.bin",
|
2389 |
+
"priors.2.prior.transformer._attn_mods.36.attn.c_proj.weight": "pytorch_model-00002-of-00002.bin",
|
2390 |
+
"priors.2.prior.transformer._attn_mods.36.layer_norm_0.bias": "pytorch_model-00002-of-00002.bin",
|
2391 |
+
"priors.2.prior.transformer._attn_mods.36.layer_norm_0.weight": "pytorch_model-00002-of-00002.bin",
|
2392 |
+
"priors.2.prior.transformer._attn_mods.36.layer_norm_1.bias": "pytorch_model-00002-of-00002.bin",
|
2393 |
+
"priors.2.prior.transformer._attn_mods.36.layer_norm_1.weight": "pytorch_model-00002-of-00002.bin",
|
2394 |
+
"priors.2.prior.transformer._attn_mods.36.mlp.c_fc.bias": "pytorch_model-00002-of-00002.bin",
|
2395 |
+
"priors.2.prior.transformer._attn_mods.36.mlp.c_fc.weight": "pytorch_model-00002-of-00002.bin",
|
2396 |
+
"priors.2.prior.transformer._attn_mods.36.mlp.c_proj.bias": "pytorch_model-00002-of-00002.bin",
|
2397 |
+
"priors.2.prior.transformer._attn_mods.36.mlp.c_proj.weight": "pytorch_model-00002-of-00002.bin",
|
2398 |
+
"priors.2.prior.transformer._attn_mods.37.attn.c_attn.bias": "pytorch_model-00002-of-00002.bin",
|
2399 |
+
"priors.2.prior.transformer._attn_mods.37.attn.c_attn.weight": "pytorch_model-00002-of-00002.bin",
|
2400 |
+
"priors.2.prior.transformer._attn_mods.37.attn.c_proj.bias": "pytorch_model-00002-of-00002.bin",
|
2401 |
+
"priors.2.prior.transformer._attn_mods.37.attn.c_proj.weight": "pytorch_model-00002-of-00002.bin",
|
2402 |
+
"priors.2.prior.transformer._attn_mods.37.layer_norm_0.bias": "pytorch_model-00002-of-00002.bin",
|
2403 |
+
"priors.2.prior.transformer._attn_mods.37.layer_norm_0.weight": "pytorch_model-00002-of-00002.bin",
|
2404 |
+
"priors.2.prior.transformer._attn_mods.37.layer_norm_1.bias": "pytorch_model-00002-of-00002.bin",
|
2405 |
+
"priors.2.prior.transformer._attn_mods.37.layer_norm_1.weight": "pytorch_model-00002-of-00002.bin",
|
2406 |
+
"priors.2.prior.transformer._attn_mods.37.mlp.c_fc.bias": "pytorch_model-00002-of-00002.bin",
|
2407 |
+
"priors.2.prior.transformer._attn_mods.37.mlp.c_fc.weight": "pytorch_model-00002-of-00002.bin",
|
2408 |
+
"priors.2.prior.transformer._attn_mods.37.mlp.c_proj.bias": "pytorch_model-00002-of-00002.bin",
|
2409 |
+
"priors.2.prior.transformer._attn_mods.37.mlp.c_proj.weight": "pytorch_model-00002-of-00002.bin",
|
2410 |
+
"priors.2.prior.transformer._attn_mods.38.attn.c_attn.bias": "pytorch_model-00002-of-00002.bin",
|
2411 |
+
"priors.2.prior.transformer._attn_mods.38.attn.c_attn.weight": "pytorch_model-00002-of-00002.bin",
|
2412 |
+
"priors.2.prior.transformer._attn_mods.38.attn.c_proj.bias": "pytorch_model-00002-of-00002.bin",
|
2413 |
+
"priors.2.prior.transformer._attn_mods.38.attn.c_proj.weight": "pytorch_model-00002-of-00002.bin",
|
2414 |
+
"priors.2.prior.transformer._attn_mods.38.layer_norm_0.bias": "pytorch_model-00002-of-00002.bin",
|
2415 |
+
"priors.2.prior.transformer._attn_mods.38.layer_norm_0.weight": "pytorch_model-00002-of-00002.bin",
|
2416 |
+
"priors.2.prior.transformer._attn_mods.38.layer_norm_1.bias": "pytorch_model-00002-of-00002.bin",
|
2417 |
+
"priors.2.prior.transformer._attn_mods.38.layer_norm_1.weight": "pytorch_model-00002-of-00002.bin",
|
2418 |
+
"priors.2.prior.transformer._attn_mods.38.mlp.c_fc.bias": "pytorch_model-00002-of-00002.bin",
|
2419 |
+
"priors.2.prior.transformer._attn_mods.38.mlp.c_fc.weight": "pytorch_model-00002-of-00002.bin",
|
2420 |
+
"priors.2.prior.transformer._attn_mods.38.mlp.c_proj.bias": "pytorch_model-00002-of-00002.bin",
|
2421 |
+
"priors.2.prior.transformer._attn_mods.38.mlp.c_proj.weight": "pytorch_model-00002-of-00002.bin",
|
2422 |
"priors.2.prior.transformer._attn_mods.39.attn.c_attn.bias": "pytorch_model-00002-of-00002.bin",
|
2423 |
"priors.2.prior.transformer._attn_mods.39.attn.c_attn.weight": "pytorch_model-00002-of-00002.bin",
|
2424 |
"priors.2.prior.transformer._attn_mods.39.attn.c_proj.bias": "pytorch_model-00002-of-00002.bin",
|