rwkv7-1.5B-g1 / tokenizer_config.json
zhiyuan8's picture
Upload folder using huggingface_hub
619bcc3 verified
raw
history blame
1.33 kB
{
"add_prefix_space": false,
"added_tokens_decoder": {
"0": {
"content": "<|rwkv_tokenizer_end_of_text|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"65530": {
"content": "\n\n",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
}
},
"auto_map": {
"AutoTokenizer": [
"fla-hub/rwkv7-1.5B-world--hf_rwkv_tokenizer.RwkvTokenizer",
null
]
},
"bos_token": "<|rwkv_tokenizer_end_of_text|>",
"chat_template": "{{ '<|rwkv_tokenizer_end_of_text|>' }}{% for message in messages %}{% if message['role'] == 'user' %}{{'User: ' + message['content'] + '\n\n'}}{% elif message['role'] == 'system' %}{{'System: ' + message['content'] + '\n\n'}}{% elif message['role'] == 'assistant' %}{{'Assistant: ' + message['content'] + '\n\n'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ 'Assistant: <think' }}{% endif %}",
"clean_up_tokenization_spaces": false,
"eos_token": "\n\n",
"extra_special_tokens": {},
"model_max_length": 1000000000000000019884624838656,
"pad_token": "<|rwkv_tokenizer_end_of_text|>",
"tokenizer_class": "RwkvTokenizer",
"unk_token": "<|rwkv_tokenizer_end_of_text|>",
"use_fast": false
}