| { | |
| "add_bos_token": true, | |
| "add_eos_token": false, | |
| "added_tokens_decoder": { | |
| "0": { | |
| "content": "<unk>", | |
| "lstrip": false, | |
| "normalized": false, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "1": { | |
| "content": "<s>", | |
| "lstrip": false, | |
| "normalized": false, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "2": { | |
| "content": "</s>", | |
| "lstrip": false, | |
| "normalized": false, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| } | |
| }, | |
| "additional_special_tokens": [], | |
| "bos_token": "<s>", | |
| "chat_template": "{% for message in messages %}{% if message['role'] == 'system' %}{%- if not messages|selectattr('role', 'equalto', 'function_metadata')|list %}<|system|\n{{ message['content'] }}</s>\n{%- else %}<|system|\n{{ message['content'] }}{%- endif %}{% elif message['role'] == 'function_metadata' and loop.index0 == 1 %}\nYou have access to the following functions. Use them if required:\n\n{{ message['content'] }}\n\n</s>\n{% elif message['role'] == 'function_metadata' and loop.index0 == 0 %}<|system|\nYou have access to the following functions. Use them if required:\n\n{{ message['content'] }}\n\n</s>\n{% elif message['role'] == 'assistant' %}</s>\n<|assistant|>\n{{ message['content'] }}{{ eos_token }}{% elif message['role'] == 'function_call' %}</s>\n<|assistant|>\n[FUNCTION_CALL] {{ message['content'] }}{{ eos_token }}{% elif message['role'] == 'function_response' %}<|user|>\n[FUNCTION_RESPONSE] Here is the response to the function call. If helpful, use it to respond to the user's question:{{ message['content'] }}{% elif message['role'] == 'user'%}<|user|>\n{{ message['content'] }}{% endif %}{% endfor %}{% if add_generation_prompt %}</s>\n<|assistant|>\n{% endif %}", | |
| "clean_up_tokenization_spaces": false, | |
| "eos_token": "</s>", | |
| "legacy": true, | |
| "model_max_length": 8192, | |
| "pad_token": "</s>", | |
| "sp_model_kwargs": {}, | |
| "spaces_between_special_tokens": false, | |
| "tokenizer_class": "LlamaTokenizer", | |
| "unk_token": "<unk>", | |
| "use_default_system_prompt": true | |
| } | |