dejanseo commited on
Commit
81e10d2
·
verified ·
1 Parent(s): 9e01d99

Upload 10 files

Browse files
config.json CHANGED
@@ -14,7 +14,7 @@
14
  "layer_norm_eps": 1e-12,
15
  "max_position_embeddings": 514,
16
  "model_type": "roberta",
17
- "num_attention_heads": 16,
18
  "num_hidden_layers": 16,
19
  "pad_token_id": 0,
20
  "position_embedding_type": "absolute",
 
14
  "layer_norm_eps": 1e-12,
15
  "max_position_embeddings": 514,
16
  "model_type": "roberta",
17
+ "num_attention_heads": 8,
18
  "num_hidden_layers": 16,
19
  "pad_token_id": 0,
20
  "position_embedding_type": "absolute",
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5c1e8ccb6f14cb6dcec494a520acec5df58b53ba85be2cdb48f7eb5eac9452f4
3
  size 943718976
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4aec094a13cd4f0238f9d090bee7a8fa3f6c9909e9a624630e366be6a740f567
3
  size 943718976
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8d41882bcc0a013f99050462749763bf1bd4a293bea1cb4362ef977340c5bd3
3
+ size 1887595706
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc25a518db79e306af0dd7dc4c87484077270ec4e290aa916eb688d7dd622f41
3
+ size 14244
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1231afd9e300c9c36b52007eaa6f0128f09e5be0fdb650174e477d440440b63a
3
+ size 1064
trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7244246c18f0b7a6952b17df5b6e190aa0a446d94276bb2c1309d089c689814a
3
+ size 5304