Update tokenizer_config.json
Browse files- tokenizer_config.json +2 -2
 
    	
        tokenizer_config.json
    CHANGED
    
    | 
         @@ -119,10 +119,10 @@ 
     | 
|
| 119 | 
         
             
              "bos_token": "<s>",
         
     | 
| 120 | 
         
             
              "chat_template": "{% for message in messages %}{% if message['role'] == 'system' and message['content'] %}{{'<|system|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'user' %}{{'<|user|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'assistant' %}{{'<|assistant|>\n' + message['content'] + '<|end|>\n'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>\n' }}{% else %}{{ eos_token }}{% endif %}",
         
     | 
| 121 | 
         
             
              "clean_up_tokenization_spaces": false,
         
     | 
| 122 | 
         
            -
              "eos_token": "<| 
     | 
| 123 | 
         
             
              "legacy": false,
         
     | 
| 124 | 
         
             
              "model_max_length": 131072,
         
     | 
| 125 | 
         
            -
              "pad_token": "<| 
     | 
| 126 | 
         
             
              "padding_side": "left",
         
     | 
| 127 | 
         
             
              "sp_model_kwargs": {},
         
     | 
| 128 | 
         
             
              "tokenizer_class": "LlamaTokenizer",
         
     | 
| 
         | 
|
| 119 | 
         
             
              "bos_token": "<s>",
         
     | 
| 120 | 
         
             
              "chat_template": "{% for message in messages %}{% if message['role'] == 'system' and message['content'] %}{{'<|system|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'user' %}{{'<|user|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'assistant' %}{{'<|assistant|>\n' + message['content'] + '<|end|>\n'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>\n' }}{% else %}{{ eos_token }}{% endif %}",
         
     | 
| 121 | 
         
             
              "clean_up_tokenization_spaces": false,
         
     | 
| 122 | 
         
            +
              "eos_token": "<|end|>",
         
     | 
| 123 | 
         
             
              "legacy": false,
         
     | 
| 124 | 
         
             
              "model_max_length": 131072,
         
     | 
| 125 | 
         
            +
              "pad_token": "<|end|>",
         
     | 
| 126 | 
         
             
              "padding_side": "left",
         
     | 
| 127 | 
         
             
              "sp_model_kwargs": {},
         
     | 
| 128 | 
         
             
              "tokenizer_class": "LlamaTokenizer",
         
     |