File size: 1,703 Bytes
ebd7bb2
2d2a8a0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ebd7bb2
2d2a8a0
a62ff31
2d2a8a0
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
{
    "add_prefix_space": true,
    "added_tokens_decoder": {
        "0": {
            "content": "<unk>",
            "lstrip": false,
            "normalized": false,
            "rstrip": false,
            "single_word": false,
            "special": true
        },
        "1": {
            "content": "<s>",
            "lstrip": false,
            "normalized": false,
            "rstrip": false,
            "single_word": false,
            "special": true
        },
        "2": {
            "content": "</s>",
            "lstrip": false,
            "normalized": false,
            "rstrip": false,
            "single_word": false,
            "special": true
        },
        "32000": {
            "content": "<pad>",
            "lstrip": false,
            "normalized": false,
            "rstrip": false,
            "single_word": false,
            "special": true
        }
    },
    "bos_token": "<s>",
    "chat_template": "{% for message in messages %}{% if message['role'] == 'user' %}{{ '<|user|>\\n' + message['content'] + eos_token }}{% elif message['role'] == 'system' %}{{ '<|system|>\\n' + message['content'] + eos_token }}{% elif message['role'] == 'assistant' %}{{ '\\n'  + message['content'] + eos_token }}{% endif %}{% if loop.last and add_generation_prompt %}{{ '<|assistant|>' }}{% endif %}{% endfor %}",
    "clean_up_tokenization_spaces": false,
    "eos_token": "</s>",
    "legacy": true,
    "model_max_length": 1000000000000000019884624838656,
    "pad_token": "<pad>",
    "sp_model_kwargs": {},
    "spaces_between_special_tokens": false,
    "tokenizer_class": "LlamaTokenizer",
    "unk_token": "<unk>",
    "use_default_system_prompt": false
}