stojchet commited on
Commit
4b0bd9b
·
verified ·
1 Parent(s): ef0deb0

Training in progress, step 1

Browse files
Files changed (3) hide show
  1. config.json +2 -1
  2. tokenizer_config.json +1 -2
  3. training_args.bin +2 -2
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "architectures": [
3
  "LlamaForCausalLM"
4
  ],
@@ -27,7 +28,7 @@
27
  "rope_theta": 100000,
28
  "tie_word_embeddings": false,
29
  "torch_dtype": "float32",
30
- "transformers_version": "4.50.0.dev0",
31
  "use_cache": false,
32
  "vocab_size": 32256
33
  }
 
1
  {
2
+ "_name_or_path": "deepseek-ai/deepseek-coder-1.3b-base",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
28
  "rope_theta": 100000,
29
  "tie_word_embeddings": false,
30
  "torch_dtype": "float32",
31
+ "transformers_version": "4.45.0",
32
  "use_cache": false,
33
  "vocab_size": 32256
34
  }
tokenizer_config.json CHANGED
@@ -183,12 +183,11 @@
183
  "bos_token": "<|begin▁of▁sentence|>",
184
  "clean_up_tokenization_spaces": false,
185
  "eos_token": "<|end▁of▁sentence|>",
186
- "extra_special_tokens": {},
187
  "legacy": true,
188
  "model_max_length": 16384,
189
  "pad_token": "<|end▁of▁sentence|>",
190
  "sp_model_kwargs": {},
191
- "tokenizer_class": "LlamaTokenizerFast",
192
  "unk_token": null,
193
  "use_default_system_prompt": false
194
  }
 
183
  "bos_token": "<|begin▁of▁sentence|>",
184
  "clean_up_tokenization_spaces": false,
185
  "eos_token": "<|end▁of▁sentence|>",
 
186
  "legacy": true,
187
  "model_max_length": 16384,
188
  "pad_token": "<|end▁of▁sentence|>",
189
  "sp_model_kwargs": {},
190
+ "tokenizer_class": "LlamaTokenizer",
191
  "unk_token": null,
192
  "use_default_system_prompt": false
193
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2d59a5a16ed26c0d7af0c99fa6d5413fd1a93bbb28edb45403639d8d1b1703c9
3
- size 5368
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cdf687aacdd70232dbb5a88cea332d24c5890f4c91d7b25cab83b2c675f99649
3
+ size 5240