{ "architectures": [ "BertForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "directionality": "bidi", "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "0", "1": "B-H\u1ecd v\u00e0 t\u00ean", "2": "I-H\u1ecd v\u00e0 t\u00ean", "3": "B-Gi\u1edbi t\u00ednh", "4": "I-Gi\u1edbi t\u00ednh", "5": "B-S\u1ed1 \u0111i\u1ec7n tho\u1ea1i", "6": "I-S\u1ed1 \u0111i\u1ec7n tho\u1ea1i", "7": "B-Email", "8": "I-Email", "9": "B-\u0110\u1ecba ch\u1ec9", "10": "I-\u0110\u1ecba ch\u1ec9", "11": "B-N\u01a1i l\u00e0m vi\u1ec7c", "12": "I-N\u01a1i l\u00e0m vi\u1ec7c", "13": "B-Ngh\u1ec7 nghi\u1ec7p", "14": "I-Ngh\u1ec7 nghi\u1ec7p", "15": "B-D\u00e2n t\u1ed9c", "16": "I-D\u00e2n t\u1ed9c", "17": "B-T\u00f4n gi\u00e1o", "18": "I-T\u00f4n gi\u00e1o", "19": "B-N\u01a1i sinh", "20": "I-N\u01a1i sinh", "21": "B-Qu\u00ea qu\u00e1n", "22": "I-Qu\u00ea qu\u00e1n", "23": "B-Qu\u1ed1c t\u1ecbnh", "24": "I-Qu\u1ed1c t\u1ecbnh", "25": "B-Nh\u00f3m m\u00e1u", "26": "I-Nh\u00f3m m\u00e1u", "27": "B-S\u1ed1 c\u0103n c\u01b0\u1edbc c\u00f4ng d\u00e2n", "28": "I-S\u1ed1 c\u0103n c\u01b0\u1edbc c\u00f4ng d\u00e2n", "29": "B-S\u1ed1 h\u1ed9 chi\u1ebfu", "30": "I-S\u1ed1 h\u1ed9 chi\u1ebfu", "31": "B-N\u01a1i \u1edf hi\u1ec7n t\u1ea1i", "32": "I-N\u01a1i \u1edf hi\u1ec7n t\u1ea1i", "33": "B-T\u00ecnh tr\u1ea1ng h\u00f4n nh\u00e2n", "34": "I-T\u00ecnh tr\u1ea1ng h\u00f4n nh\u00e2n", "35": "B-Ng\u00e0y sinh", "36": "I-Ng\u00e0y sinh", "37": "B-Ng\u00e0nh h\u1ecdc", "38": "I-Ng\u00e0nh h\u1ecdc", "39": "B-Tr\u01b0\u1eddng h\u1ecdc", "40": "I-Tr\u01b0\u1eddng h\u1ecdc", "41": "B-B\u1eb1ng c\u1ea5p", "42": "I-B\u1eb1ng c\u1ea5p", "43": "B-Ch\u1ee9c v\u1ee5", "44": "I-Ch\u1ee9c v\u1ee5", "45": "B-S\u1edf th\u00edch", "46": "I-S\u1edf th\u00edch", "47": "B-T\u00ednh c\u00e1ch", "48": "I-T\u00ednh c\u00e1ch", "49": "B-\u0110i\u1ec3m m\u1ea1nh", "50": "I-\u0110i\u1ec3m m\u1ea1nh", "51": "B-\u0110i\u1ec3m y\u1ebfu", "52": "I-\u0110i\u1ec3m y\u1ebfu", "53": "B-Tu\u1ed5i", "54": "I-Tu\u1ed5i", "55": "B-M\u1ee5c ti\u00eau, mong mu\u1ed1n", "56": "I-M\u1ee5c ti\u00eau, mong mu\u1ed1n" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "0": 0, "B-B\u1eb1ng c\u1ea5p": 41, "B-Ch\u1ee9c v\u1ee5": 43, "B-D\u00e2n t\u1ed9c": 15, "B-Email": 7, "B-Gi\u1edbi t\u00ednh": 3, "B-H\u1ecd v\u00e0 t\u00ean": 1, "B-M\u1ee5c ti\u00eau, mong mu\u1ed1n": 55, "B-Ngh\u1ec7 nghi\u1ec7p": 13, "B-Ng\u00e0nh h\u1ecdc": 37, "B-Ng\u00e0y sinh": 35, "B-Nh\u00f3m m\u00e1u": 25, "B-N\u01a1i l\u00e0m vi\u1ec7c": 11, "B-N\u01a1i sinh": 19, "B-N\u01a1i \u1edf hi\u1ec7n t\u1ea1i": 31, "B-Qu\u00ea qu\u00e1n": 21, "B-Qu\u1ed1c t\u1ecbnh": 23, "B-S\u1ed1 c\u0103n c\u01b0\u1edbc c\u00f4ng d\u00e2n": 27, "B-S\u1ed1 h\u1ed9 chi\u1ebfu": 29, "B-S\u1ed1 \u0111i\u1ec7n tho\u1ea1i": 5, "B-S\u1edf th\u00edch": 45, "B-Tr\u01b0\u1eddng h\u1ecdc": 39, "B-Tu\u1ed5i": 53, "B-T\u00ecnh tr\u1ea1ng h\u00f4n nh\u00e2n": 33, "B-T\u00ednh c\u00e1ch": 47, "B-T\u00f4n gi\u00e1o": 17, "B-\u0110i\u1ec3m m\u1ea1nh": 49, "B-\u0110i\u1ec3m y\u1ebfu": 51, "B-\u0110\u1ecba ch\u1ec9": 9, "I-B\u1eb1ng c\u1ea5p": 42, "I-Ch\u1ee9c v\u1ee5": 44, "I-D\u00e2n t\u1ed9c": 16, "I-Email": 8, "I-Gi\u1edbi t\u00ednh": 4, "I-H\u1ecd v\u00e0 t\u00ean": 2, "I-M\u1ee5c ti\u00eau, mong mu\u1ed1n": 56, "I-Ngh\u1ec7 nghi\u1ec7p": 14, "I-Ng\u00e0nh h\u1ecdc": 38, "I-Ng\u00e0y sinh": 36, "I-Nh\u00f3m m\u00e1u": 26, "I-N\u01a1i l\u00e0m vi\u1ec7c": 12, "I-N\u01a1i sinh": 20, "I-N\u01a1i \u1edf hi\u1ec7n t\u1ea1i": 32, "I-Qu\u00ea qu\u00e1n": 22, "I-Qu\u1ed1c t\u1ecbnh": 24, "I-S\u1ed1 c\u0103n c\u01b0\u1edbc c\u00f4ng d\u00e2n": 28, "I-S\u1ed1 h\u1ed9 chi\u1ebfu": 30, "I-S\u1ed1 \u0111i\u1ec7n tho\u1ea1i": 6, "I-S\u1edf th\u00edch": 46, "I-Tr\u01b0\u1eddng h\u1ecdc": 40, "I-Tu\u1ed5i": 54, "I-T\u00ecnh tr\u1ea1ng h\u00f4n nh\u00e2n": 34, "I-T\u00ednh c\u00e1ch": 48, "I-T\u00f4n gi\u00e1o": 18, "I-\u0110i\u1ec3m m\u1ea1nh": 50, "I-\u0110i\u1ec3m y\u1ebfu": 52, "I-\u0110\u1ecba ch\u1ec9": 10 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 0, "pooler_fc_size": 768, "pooler_num_attention_heads": 12, "pooler_num_fc_layers": 3, "pooler_size_per_head": 128, "pooler_type": "first_token_transform", "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.50.3", "type_vocab_size": 2, "use_cache": true, "vocab_size": 119547 }