Mubarak127 commited on
Commit
fab35a0
·
verified ·
1 Parent(s): 351cbd9

Initial tokenizer upload

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +2 -28
  2. tokenizer.json +2 -2
  3. tokenizer_config.json +2 -20
special_tokens_map.json CHANGED
@@ -1,33 +1,7 @@
1
  {
2
  "additional_special_tokens": [
3
- {
4
- "content": "[KNOWLEDGE]",
5
- "lstrip": false,
6
- "normalized": false,
7
- "rstrip": false,
8
- "single_word": false
9
- },
10
- {
11
- "content": "[/KNOWLEDGE]",
12
- "lstrip": false,
13
- "normalized": false,
14
- "rstrip": false,
15
- "single_word": false
16
- },
17
- {
18
- "content": "[REASONING]",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- },
24
- {
25
- "content": "[/REASONING]",
26
- "lstrip": false,
27
- "normalized": false,
28
- "rstrip": false,
29
- "single_word": false
30
- }
31
  ],
32
  "bos_token": {
33
  "content": "<|begin_of_text|>",
 
1
  {
2
  "additional_special_tokens": [
3
+ "[KNOWLEDGE]",
4
+ "[REASONING]"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  ],
6
  "bos_token": {
7
  "content": "<|begin_of_text|>",
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:929f1ab58b5deea4054d7d06c0100b1d3b4df857a534adf926ca54c1cfaf7666
3
- size 17210942
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c3e39f7ef233dc92fc002889a496eb1c1d3282824fd32e594f3e52bda70c0fc
3
+ size 17210576
tokenizer_config.json CHANGED
@@ -2063,29 +2063,11 @@
2063
  "rstrip": false,
2064
  "single_word": false,
2065
  "special": true
2066
- },
2067
- "128258": {
2068
- "content": "[/KNOWLEDGE]",
2069
- "lstrip": false,
2070
- "normalized": false,
2071
- "rstrip": false,
2072
- "single_word": false,
2073
- "special": true
2074
- },
2075
- "128259": {
2076
- "content": "[/REASONING]",
2077
- "lstrip": false,
2078
- "normalized": false,
2079
- "rstrip": false,
2080
- "single_word": false,
2081
- "special": true
2082
  }
2083
  },
2084
  "additional_special_tokens": [
2085
  "[KNOWLEDGE]",
2086
- "[/KNOWLEDGE]",
2087
- "[REASONING]",
2088
- "[/REASONING]"
2089
  ],
2090
  "bos_token": "<|begin_of_text|>",
2091
  "clean_up_tokenization_spaces": true,
@@ -2102,7 +2084,7 @@
2102
  "pad_token_type_id": 0,
2103
  "padding_side": "right",
2104
  "stride": 0,
2105
- "tokenizer_class": "PreTrainedTokenizer",
2106
  "truncation_side": "right",
2107
  "truncation_strategy": "longest_first"
2108
  }
 
2063
  "rstrip": false,
2064
  "single_word": false,
2065
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2066
  }
2067
  },
2068
  "additional_special_tokens": [
2069
  "[KNOWLEDGE]",
2070
+ "[REASONING]"
 
 
2071
  ],
2072
  "bos_token": "<|begin_of_text|>",
2073
  "clean_up_tokenization_spaces": true,
 
2084
  "pad_token_type_id": 0,
2085
  "padding_side": "right",
2086
  "stride": 0,
2087
+ "tokenizer_class": "PreTrainedTokenizerFast",
2088
  "truncation_side": "right",
2089
  "truncation_strategy": "longest_first"
2090
  }