|
{ |
|
"_name_or_path": "AutoNLP", |
|
"_num_labels": 29, |
|
"architectures": [ |
|
"BertForSequenceClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.1, |
|
"classifier_dropout": null, |
|
"directionality": "bidi", |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.1, |
|
"hidden_size": 768, |
|
"id2label": { |
|
"0": "\u4e92\u8054\u7f51\u670d\u52a1", |
|
"1": "\u4ea4\u901a\u8fd0\u8f93", |
|
"2": "\u4f11\u95f2\u670d\u52a1", |
|
"3": "\u4f20\u5a92", |
|
"4": "\u4fe1\u606f\u6280\u672f", |
|
"5": "\u516c\u7528\u4e8b\u4e1a", |
|
"6": "\u519c\u4e1a", |
|
"7": "\u5316\u5de5\u5236\u9020", |
|
"8": "\u533b\u836f\u751f\u7269", |
|
"9": "\u5546\u4e1a\u8d38\u6613", |
|
"10": "\u5efa\u7b51\u4e1a", |
|
"11": "\u623f\u5730\u4ea7", |
|
"12": "\u6559\u80b2", |
|
"13": "\u6587\u5316", |
|
"14": "\u6709\u8272\u91d1\u5c5e", |
|
"15": "\u6797\u4e1a", |
|
"16": "\u6c7d\u8f66\u5236\u9020", |
|
"17": "\u6e14\u4e1a", |
|
"18": "\u7535\u5b50\u5236\u9020", |
|
"19": "\u7535\u6c14\u8bbe\u5907", |
|
"20": "\u755c\u7267\u4e1a", |
|
"21": "\u7eba\u7ec7\u670d\u88c5\u5236\u9020", |
|
"22": "\u8f7b\u5de5\u5236\u9020", |
|
"23": "\u901a\u4fe1", |
|
"24": "\u91c7\u77ff\u4e1a", |
|
"25": "\u94a2\u94c1", |
|
"26": "\u94f6\u884c", |
|
"27": "\u975e\u94f6\u91d1\u878d", |
|
"28": "\u98df\u54c1\u996e\u6599" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 3072, |
|
"label2id": { |
|
"\u4e92\u8054\u7f51\u670d\u52a1": 0, |
|
"\u4ea4\u901a\u8fd0\u8f93": 1, |
|
"\u4f11\u95f2\u670d\u52a1": 2, |
|
"\u4f20\u5a92": 3, |
|
"\u4fe1\u606f\u6280\u672f": 4, |
|
"\u516c\u7528\u4e8b\u4e1a": 5, |
|
"\u519c\u4e1a": 6, |
|
"\u5316\u5de5\u5236\u9020": 7, |
|
"\u533b\u836f\u751f\u7269": 8, |
|
"\u5546\u4e1a\u8d38\u6613": 9, |
|
"\u5efa\u7b51\u4e1a": 10, |
|
"\u623f\u5730\u4ea7": 11, |
|
"\u6559\u80b2": 12, |
|
"\u6587\u5316": 13, |
|
"\u6709\u8272\u91d1\u5c5e": 14, |
|
"\u6797\u4e1a": 15, |
|
"\u6c7d\u8f66\u5236\u9020": 16, |
|
"\u6e14\u4e1a": 17, |
|
"\u7535\u5b50\u5236\u9020": 18, |
|
"\u7535\u6c14\u8bbe\u5907": 19, |
|
"\u755c\u7267\u4e1a": 20, |
|
"\u7eba\u7ec7\u670d\u88c5\u5236\u9020": 21, |
|
"\u8f7b\u5de5\u5236\u9020": 22, |
|
"\u901a\u4fe1": 23, |
|
"\u91c7\u77ff\u4e1a": 24, |
|
"\u94a2\u94c1": 25, |
|
"\u94f6\u884c": 26, |
|
"\u975e\u94f6\u91d1\u878d": 27, |
|
"\u98df\u54c1\u996e\u6599": 28 |
|
}, |
|
"layer_norm_eps": 1e-12, |
|
"max_length": 128, |
|
"max_position_embeddings": 512, |
|
"model_type": "bert", |
|
"num_attention_heads": 12, |
|
"num_hidden_layers": 12, |
|
"output_past": true, |
|
"pad_token_id": 0, |
|
"padding": "max_length", |
|
"pooler_fc_size": 768, |
|
"pooler_num_attention_heads": 12, |
|
"pooler_num_fc_layers": 3, |
|
"pooler_size_per_head": 128, |
|
"pooler_type": "first_token_transform", |
|
"position_embedding_type": "absolute", |
|
"problem_type": "single_label_classification", |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.15.0", |
|
"type_vocab_size": 2, |
|
"use_cache": true, |
|
"vocab_size": 21128 |
|
} |
|
|