schnell commited on
Commit
a77be22
1 Parent(s): 8004c1a

Model save

Browse files
last-checkpoint/config.json DELETED
@@ -1,24 +0,0 @@
1
- {
2
- "architectures": [
3
- "BertForMaskedLM"
4
- ],
5
- "attention_probs_dropout_prob": 0.1,
6
- "classifier_dropout": null,
7
- "hidden_act": "gelu",
8
- "hidden_dropout_prob": 0.1,
9
- "hidden_size": 512,
10
- "initializer_range": 0.02,
11
- "intermediate_size": 2048,
12
- "layer_norm_eps": 1e-12,
13
- "max_position_embeddings": 512,
14
- "model_type": "bert",
15
- "num_attention_heads": 8,
16
- "num_hidden_layers": 4,
17
- "pad_token_id": 0,
18
- "position_embedding_type": "absolute",
19
- "torch_dtype": "float32",
20
- "transformers_version": "4.19.2",
21
- "type_vocab_size": 2,
22
- "use_cache": true,
23
- "vocab_size": 32000
24
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
last-checkpoint/optimizer.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a80de4f99ba96538ea5a735298ad67254dc36ac6e8f2508dd88e19e7cccfc79b
3
- size 236469913
 
 
 
 
last-checkpoint/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a79842eb8d3703aa88be914994a6731a452d3e001d4f5b77d78b0278b72add18
3
- size 118242180
 
 
 
 
last-checkpoint/rng_state_0.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:b67ab6f5617974b5ba56e4c155051651820b8e56ba845909986b820e730f23ac
3
- size 14503
 
 
 
 
last-checkpoint/rng_state_1.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:8c38171df4138e19d52ac52e00a28c297f6d44ed20e85b0b5167822571629ce8
3
- size 14503
 
 
 
 
last-checkpoint/rng_state_2.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:04072c0191372303a6ea36d36a17938287fe23b60104af5ed0acdb868df205d8
3
- size 14503
 
 
 
 
last-checkpoint/scaler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:32b5fdb733a605d87af51fdd387ceec9f49ad656b2ebd6ccfb7e2575b9b2fe96
3
- size 559
 
 
 
 
last-checkpoint/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:0af66bc664fec71be099c0730bc9d951e866219367cb0ac1010fb8e5e1934741
3
- size 623
 
 
 
 
last-checkpoint/special_tokens_map.json DELETED
@@ -1 +0,0 @@
1
- {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
 
 
last-checkpoint/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
last-checkpoint/tokenizer_config.json DELETED
@@ -1 +0,0 @@
1
- {"model_max_length": 128, "padding_side": "right", "truncation_side": "right", "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "special_tokens_map_file": "pretrained_tokenizers/ex3-1_ipadic_bpe/special_tokens_map.json", "name_or_path": "pretrained_tokenizers/ex3-1_ipadic_bpe", "tokenizer_class": "PreTrainedTokenizerFast"}
 
 
last-checkpoint/trainer_state.json DELETED
The diff for this file is too large to render. See raw diff
 
last-checkpoint/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:5883b79fc19dbeacc6f4d380b20de1f85c6a4e0818d4ad41bbf211141fc2852c
3
- size 3311
 
 
 
 
runs/Aug02_00-40-05_user-SYS-5049A-TR/events.out.tfevents.1659368419.user-SYS-5049A-TR.4008140.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:aa6c9c07fe49aee01ea3edfc472b0ed659e5d26477f76f75082a578dd387d568
3
- size 319136
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6156e63497cfa9b675e2673ef5ee7e8fc8cf338043e63f991bfbbda1df444026
3
+ size 319496