Upload folder using huggingface_hub
Browse files- config.json +2 -2
- tokenizer_config.json +2 -2
config.json
CHANGED
@@ -7,7 +7,7 @@
|
|
7 |
"attn_logit_softcapping": null,
|
8 |
"bos_token_id": 2,
|
9 |
"cache_implementation": "hybrid",
|
10 |
-
"eos_token_id":
|
11 |
"final_logit_softcapping": null,
|
12 |
"head_dim": 256,
|
13 |
"hidden_activation": "gelu_pytorch_tanh",
|
@@ -34,4 +34,4 @@
|
|
34 |
"transformers_version": "4.51.3",
|
35 |
"use_cache": false,
|
36 |
"vocab_size": 262208
|
37 |
-
}
|
|
|
7 |
"attn_logit_softcapping": null,
|
8 |
"bos_token_id": 2,
|
9 |
"cache_implementation": "hybrid",
|
10 |
+
"eos_token_id": 106,
|
11 |
"final_logit_softcapping": null,
|
12 |
"head_dim": 256,
|
13 |
"hidden_activation": "gelu_pytorch_tanh",
|
|
|
34 |
"transformers_version": "4.51.3",
|
35 |
"use_cache": false,
|
36 |
"vocab_size": 262208
|
37 |
+
}
|
tokenizer_config.json
CHANGED
@@ -51326,7 +51326,7 @@
|
|
51326 |
"bos_token": "<bos>",
|
51327 |
"chat_template": "{{ bos_token }}{% for message in messages %}<start_of_turn>{{ 'translation' if message['role'] == 'assistant' else 'instruction' if message['role'] == 'system' else 'input' if message['role'] == 'user' else message['role'] }}\n{{ message['content'] | trim }}<end_of_turn>\n{% endfor %}{% if add_generation_prompt %}<start_of_turn>translation\n{% endif %}\n",
|
51328 |
"clean_up_tokenization_spaces": false,
|
51329 |
-
"eos_token": "<
|
51330 |
"extra_special_tokens": {},
|
51331 |
"max_length": null,
|
51332 |
"model_max_length": 1000000000000000019884624838656,
|
@@ -51339,4 +51339,4 @@
|
|
51339 |
"tokenizer_class": "GemmaTokenizerFast",
|
51340 |
"unk_token": "<unk>",
|
51341 |
"use_default_system_prompt": false
|
51342 |
-
}
|
|
|
51326 |
"bos_token": "<bos>",
|
51327 |
"chat_template": "{{ bos_token }}{% for message in messages %}<start_of_turn>{{ 'translation' if message['role'] == 'assistant' else 'instruction' if message['role'] == 'system' else 'input' if message['role'] == 'user' else message['role'] }}\n{{ message['content'] | trim }}<end_of_turn>\n{% endfor %}{% if add_generation_prompt %}<start_of_turn>translation\n{% endif %}\n",
|
51328 |
"clean_up_tokenization_spaces": false,
|
51329 |
+
"eos_token": "<end_of_turn>",
|
51330 |
"extra_special_tokens": {},
|
51331 |
"max_length": null,
|
51332 |
"model_max_length": 1000000000000000019884624838656,
|
|
|
51339 |
"tokenizer_class": "GemmaTokenizerFast",
|
51340 |
"unk_token": "<unk>",
|
51341 |
"use_default_system_prompt": false
|
51342 |
+
}
|