| { | |
| "add_prefix_space": false, | |
| "added_tokens_decoder": { | |
| "50256": { | |
| "content": "<|endoftext|>", | |
| "lstrip": false, | |
| "normalized": true, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "50257": { | |
| "content": "<|im_end|>", | |
| "lstrip": false, | |
| "normalized": false, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "50258": { | |
| "content": "<|im_start|>", | |
| "lstrip": false, | |
| "normalized": false, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| } | |
| }, | |
| "additional_special_tokens": [ | |
| "<|im_end|>", | |
| "<|im_start|>" | |
| ], | |
| "bos_token": "<|im_start|>", | |
| "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}", | |
| "clean_up_tokenization_spaces": true, | |
| "eos_token": "<|im_end|>", | |
| "model_max_length": 1024, | |
| "pad_token": "<|im_end|>", | |
| "tokenizer_class": "GPT2Tokenizer", | |
| "unk_token": "<|endoftext|>" | |
| } | |