interstellarninja Rocketknight1 HF staff commited on
Commit
a806daa
1 Parent(s): 09317b1

Add tool use template (#18)

Browse files

- Add tool use template (44ba1a220a7c4dde080b07625c52ca9a5f27e1c1)


Co-authored-by: Matthew Carrigan <[email protected]>

Files changed (4) hide show
  1. README.md +11 -9
  2. added_tokens.json +11 -11
  3. tokenizer.json +0 -0
  4. tokenizer_config.json +10 -1
README.md CHANGED
@@ -1,5 +1,7 @@
1
  ---
2
- base_model: mistralai/Mistral-7B-v0.1
 
 
3
  tags:
4
  - Mistral
5
  - instruct
@@ -12,21 +14,21 @@ tags:
12
  - distillation
13
  - function calling
14
  - json mode
15
- model-index:
16
- - name: Hermes-2-Pro-Mistral-7B
17
- results: []
18
- license: apache-2.0
19
- language:
20
- - en
21
  datasets:
22
  - teknium/OpenHermes-2.5
23
  widget:
24
  - example_title: Hermes 2 Pro
25
  messages:
26
  - role: system
27
- content: You are a sentient, superintelligent artificial general intelligence, here to teach and assist me.
 
28
  - role: user
29
- content: Write a short story about Goku discovering kirby has teamed up with Majin Buu to destroy the world.
 
 
 
 
30
  ---
31
 
32
  # Hermes 2 Pro - Mistral 7B
 
1
  ---
2
+ language:
3
+ - en
4
+ license: apache-2.0
5
  tags:
6
  - Mistral
7
  - instruct
 
14
  - distillation
15
  - function calling
16
  - json mode
17
+ base_model: mistralai/Mistral-7B-v0.1
 
 
 
 
 
18
  datasets:
19
  - teknium/OpenHermes-2.5
20
  widget:
21
  - example_title: Hermes 2 Pro
22
  messages:
23
  - role: system
24
+ content: You are a sentient, superintelligent artificial general intelligence,
25
+ here to teach and assist me.
26
  - role: user
27
+ content: Write a short story about Goku discovering kirby has teamed up with Majin
28
+ Buu to destroy the world.
29
+ model-index:
30
+ - name: Hermes-2-Pro-Mistral-7B
31
+ results: []
32
  ---
33
 
34
  # Hermes 2 Pro - Mistral 7B
added_tokens.json CHANGED
@@ -1,14 +1,4 @@
1
  {
2
- "<|im_end|>": 32000,
3
- "<|im_start|>": 32001,
4
- "<pad2>": 32002,
5
- "<pad3>": 32003,
6
- "<pad4>": 32004,
7
- "<pad5>": 32005,
8
- "<pad6>": 32006,
9
- "<pad7>": 32007,
10
- "<pad8>": 32008,
11
- "<pad9>": 32009,
12
  "<pad10>": 32010,
13
  "<pad11>": 32011,
14
  "<pad12>": 32012,
@@ -29,6 +19,16 @@
29
  "<pad27>": 32027,
30
  "<pad28>": 32028,
31
  "<pad29>": 32029,
 
32
  "<pad30>": 32030,
33
- "<pad31>": 32031
 
 
 
 
 
 
 
 
 
34
  }
 
1
  {
 
 
 
 
 
 
 
 
 
 
2
  "<pad10>": 32010,
3
  "<pad11>": 32011,
4
  "<pad12>": 32012,
 
19
  "<pad27>": 32027,
20
  "<pad28>": 32028,
21
  "<pad29>": 32029,
22
+ "<pad2>": 32002,
23
  "<pad30>": 32030,
24
+ "<pad31>": 32031,
25
+ "<pad3>": 32003,
26
+ "<pad4>": 32004,
27
+ "<pad5>": 32005,
28
+ "<pad6>": 32006,
29
+ "<pad7>": 32007,
30
+ "<pad8>": 32008,
31
+ "<pad9>": 32009,
32
+ "<|im_end|>": 32000,
33
+ "<|im_start|>": 32001
34
  }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -286,7 +286,16 @@
286
  },
287
  "additional_special_tokens": [],
288
  "bos_token": "<s>",
289
- "chat_template": "{{bos_token}}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
 
 
 
 
 
 
 
 
 
290
  "clean_up_tokenization_spaces": true,
291
  "eos_token": "<|im_end|>",
292
  "legacy": true,
 
286
  },
287
  "additional_special_tokens": [],
288
  "bos_token": "<s>",
289
+ "chat_template": [
290
+ {
291
+ "name": "default",
292
+ "template": "{{bos_token}}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}"
293
+ },
294
+ {
295
+ "name": "tool_use",
296
+ "template": "{%- macro json_to_python_type(json_spec) %}\n{%- set basic_type_map = {\n \"string\": \"str\",\n \"number\": \"float\",\n \"integer\": \"int\",\n \"boolean\": \"bool\"\n} %}\n\n{%- if basic_type_map[json_spec.type] is defined %}\n {{- basic_type_map[json_spec.type] }}\n{%- elif json_spec.type == \"array\" %}\n {{- \"list[\" + json_to_python_type(json_spec|items) + \"]\"}}\n{%- elif json_spec.type == \"object\" %}\n {%- if json_spec.additionalProperties is defined %}\n {{- \"dict[str, \" + json_to_python_type(json_spec.additionalProperties) + ']'}}\n {%- else %}\n {{- \"dict\" }}\n {%- endif %}\n{%- elif json_spec.type is iterable %}\n {{- \"Union[\" }}\n {%- for t in json_spec.type %}\n {{- json_to_python_type({\"type\": t}) }}\n {%- if not loop.last %}\n {{- \",\" }} \n {%- endif %}\n {%- endfor %}\n {{- \"]\" }}\n{%- else %}\n {{- \"Any\" }}\n{%- endif %}\n{%- endmacro %}\n\n\n{{- bos_token }}\n{{- \"You are a function calling AI model. You are provided with function signatures within <tools></tools> XML tags. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. Here are the available tools: <tools> \" }}\n{%- for tool in tools %}\n {%- if tool.function is defined %}\n {%- set tool = tool.function %}\n {%- endif %}\n {{- '{\"type\": \"function\", \"function\": ' }}\n {{- '{\"name\": ' + tool.name + '\", ' }}\n {{- '\"description\": \"' + tool.name + '(' }}\n {%- for param_name, param_fields in tool.parameters.properties|items %}\n {{- param_name + \": \" + json_to_python_type(param_fields) }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- if tool.return is defined %}\n {{- \" -> \" + json_to_python_type(tool.return) }}\n {%- endif %}\n {{- \" - \" + tool.description + \"\\n\\n\" }}\n {%- for param_name, param_fields in tool.parameters.properties|items %}\n {%- if loop.first %}\n {{- \" Args:\\n\" }}\n {%- endif %}\n {{- \" \" + param_name + \"(\" + json_to_python_type(param_fields) + \"): \" + param_fields.description|trim }}\n {%- endfor %}\n {%- if tool.return is defined and tool.return.description is defined %}\n {{- \"\\n Returns:\\n \" + tool.return.description }}\n {%- endif %}\n {{- '\"' }}\n {{- ', \"parameters\": ' }}\n {%- if tool.parameters.properties | length == 0 %}\n {{- \"{}\" }}\n {%- else %}\n {{- tool.parameters|tojson }}\n {%- endif %}\n {{- \"}\" }}\n {%- if not loop.last %}\n {{- \"\\n\" }}\n {%- endif %}\n{%- endfor %}\n{{- \" </tools>\" }}\n{{- 'Use the following pydantic model json schema for each tool call you will make: {\"properties\": {\"arguments\": {\"title\": \"Arguments\", \"type\": \"object\"}, \"name\": {\"title\": \"Name\", \"type\": \"string\"}}, \"required\": [\"arguments\", \"name\"], \"title\": \"FunctionCall\", \"type\": \"object\"}\n' }}\n{{- \"For each function call return a json object with function name and arguments within <tool_call></tool_call> XML tags as follows:\n\" }}\n{{- \"<tool_call>\n\" }}\n{{- '{\"arguments\": <args-dict>, \"name\": <function-name>}\n' }}\n{{- '</tool_call><|im_end|>' }}\n{%- for message in messages %}\n {%- if message.role == \"user\" or message.role == \"system\" or (message.role == \"assistant\" and message.tool_calls is not defined) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role + '\\n<tool_call>\\n' }}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '{ ' }}\n {%- if tool_call.arguments is defined %}\n {{- '\"arguments\": ' }}\n {{- tool_call.arguments|tojson }}\n {{- ', '}}\n {%- endif %}\n {{- '\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\"}' }}\n {{- '\\n</tool_call> ' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if not message.name is defined %}\n {{- raise_exception(\"Tool response dicts require a 'name' key indicating the name of the called function!\") }}\n {%- endif %}\n {{- '<|im_start|>' + message.role + '\\n<tool_response>\\n' }}\n {{- '{\"name\": \"' }}\n {{- message.name }}\n {{- '\", \"content\": ' }}\n {{- message.content|tojson + '}' }}\n {{- '\\n</tool_response> <|im_end|>\\n' }} \n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n"
297
+ }
298
+ ],
299
  "clean_up_tokenization_spaces": true,
300
  "eos_token": "<|im_end|>",
301
  "legacy": true,