adavande commited on
Commit
9be183e
·
verified ·
1 Parent(s): 6894527

Upload model trained with Unsloth

Browse files

Upload model trained with Unsloth 2x faster

Files changed (3) hide show
  1. .gitattributes +1 -0
  2. tokenizer.json +0 -0
  3. tokenizer_config.json +4 -1
.gitattributes CHANGED
@@ -38,3 +38,4 @@ unsloth.BF16.gguf filter=lfs diff=lfs merge=lfs -text
38
  unsloth.Q4_K_M.gguf filter=lfs diff=lfs merge=lfs -text
39
  unsloth.Q8_0.gguf filter=lfs diff=lfs merge=lfs -text
40
  unsloth.Q5_K_M.gguf filter=lfs diff=lfs merge=lfs -text
 
 
38
  unsloth.Q4_K_M.gguf filter=lfs diff=lfs merge=lfs -text
39
  unsloth.Q8_0.gguf filter=lfs diff=lfs merge=lfs -text
40
  unsloth.Q5_K_M.gguf filter=lfs diff=lfs merge=lfs -text
41
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "added_tokens_decoder": {
3
  "128000": {
4
  "content": "<|begin_of_text|>",
@@ -2052,6 +2053,7 @@
2052
  "bos_token": "<|begin_of_text|>",
2053
  "clean_up_tokenization_spaces": true,
2054
  "eos_token": "<|end_of_text|>",
 
2055
  "model_input_names": [
2056
  "input_ids",
2057
  "attention_mask"
@@ -2059,5 +2061,6 @@
2059
  "model_max_length": 131072,
2060
  "pad_token": "<|finetune_right_pad_id|>",
2061
  "padding_side": "left",
2062
- "tokenizer_class": "PreTrainedTokenizerFast"
 
2063
  }
 
1
  {
2
+ "add_bos_token": true,
3
  "added_tokens_decoder": {
4
  "128000": {
5
  "content": "<|begin_of_text|>",
 
2053
  "bos_token": "<|begin_of_text|>",
2054
  "clean_up_tokenization_spaces": true,
2055
  "eos_token": "<|end_of_text|>",
2056
+ "extra_special_tokens": {},
2057
  "model_input_names": [
2058
  "input_ids",
2059
  "attention_mask"
 
2061
  "model_max_length": 131072,
2062
  "pad_token": "<|finetune_right_pad_id|>",
2063
  "padding_side": "left",
2064
+ "tokenizer_class": "PreTrainedTokenizerFast",
2065
+ "unk_token": null
2066
  }