pdjohn commited on
Commit
b1cfa89
·
verified ·
1 Parent(s): d63e02b

Upload model

Browse files
Files changed (2) hide show
  1. config.json +70 -0
  2. model.safetensors +3 -0
config.json ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "CausalBERTMultiTaskModel"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_eurobert.EuroBertConfig",
9
+ "AutoModel": "modeling_eurobert.EuroBertModel",
10
+ "AutoModelForMaskedLM": "modeling_eurobert.EuroBertForMaskedLM",
11
+ "AutoModelForPreTraining": "modeling_eurobert.EuroBertPreTrainedModel",
12
+ "AutoModelForSequenceClassification": "modeling_eurobert.EuroBertForSequenceClassification",
13
+ "AutoModelForTokenClassification": "modeling_eurobert.EuroBertForTokenClassification"
14
+ },
15
+ "base_model_name": "EuroBERT/EuroBERT-210m",
16
+ "bos_token": "<|begin_of_text|>",
17
+ "bos_token_id": 128000,
18
+ "clf_pooling": "late",
19
+ "eos_token": "<|end_of_text|>",
20
+ "eos_token_id": 128001,
21
+ "head_dim": 64,
22
+ "hidden_act": "silu",
23
+ "hidden_dropout": 0.0,
24
+ "hidden_size": 768,
25
+ "id2label_relation": {
26
+ "0": "NO_RELATION",
27
+ "1": "CAUSE",
28
+ "2": "EFFECT",
29
+ "3": "INTERDEPENDENCY"
30
+ },
31
+ "id2label_span": {
32
+ "0": "O",
33
+ "1": "B-INDICATOR",
34
+ "2": "I-INDICATOR",
35
+ "3": "B-ENTITY",
36
+ "4": "I-ENTITY"
37
+ },
38
+ "initializer_range": 0.02,
39
+ "intermediate_size": 3072,
40
+ "mask_token": "<|mask|>",
41
+ "mask_token_id": 128002,
42
+ "max_position_embeddings": 8192,
43
+ "mlp_bias": false,
44
+ "model_type": "causalbert_multitask",
45
+ "num_attention_heads": 12,
46
+ "num_hidden_layers": 12,
47
+ "num_key_value_heads": 12,
48
+ "num_relation_labels": 4,
49
+ "num_span_labels": 5,
50
+ "pad_token": "<|end_of_text|>",
51
+ "pad_token_id": 128001,
52
+ "pretraining_tp": 1,
53
+ "relation_class_weights": null,
54
+ "rms_norm_eps": 1e-05,
55
+ "rope_scaling": null,
56
+ "rope_theta": 250000,
57
+ "span_class_weights": [
58
+ 0.09086648603440675,
59
+ 2.1256438250409166,
60
+ 1.4697529981521529,
61
+ 0.9365069591289833,
62
+ 0.37722973164353996
63
+ ],
64
+ "tie_word_embeddings": false,
65
+ "torch_dtype": "bfloat16",
66
+ "transformers_version": "4.53.1",
67
+ "use_cache": false,
68
+ "vocab_size": 128256,
69
+ "vocab_size_with_special_tokens": 128256
70
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ce980756462746c271fc6aa8e71b1da7efdab38434a890802ba384ca70220f6
3
+ size 423558482