jackboot commited on
Commit
80ff82b
1 Parent(s): 5ec8ebc

modded to work with llama.cpp_HF and 72b

Browse files

it works, hooray. original is in previous commit

Files changed (3) hide show
  1. config.json +1 -1
  2. special_tokens_map.json +1 -1
  3. tokenizer_config.json +1 -1
config.json CHANGED
@@ -5,7 +5,7 @@
5
  "attention_dropout": 0.0,
6
  "bos_token_id": 151643,
7
  "decoder_sparse_step": 1,
8
- "eos_token_id": 151643,
9
  "hidden_act": "silu",
10
  "hidden_size": 3584,
11
  "initializer_range": 0.02,
 
5
  "attention_dropout": 0.0,
6
  "bos_token_id": 151643,
7
  "decoder_sparse_step": 1,
8
+ "eos_token_id": 151645,
9
  "hidden_act": "silu",
10
  "hidden_size": 3584,
11
  "initializer_range": 0.02,
special_tokens_map.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "eos_token": {
3
- "content": "<|endoftext|>",
4
  "lstrip": false,
5
  "normalized": false,
6
  "rstrip": false,
 
1
  {
2
  "eos_token": {
3
+ "content": "<|im_end|>",
4
  "lstrip": false,
5
  "normalized": false,
6
  "rstrip": false,
tokenizer_config.json CHANGED
@@ -30,7 +30,7 @@
30
  "bos_token": null,
31
  "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
32
  "clean_up_tokenization_spaces": false,
33
- "eos_token": "<|endoftext|>",
34
  "errors": "replace",
35
  "model_max_length": 32768,
36
  "pad_token": "<|endoftext|>",
 
30
  "bos_token": null,
31
  "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
32
  "clean_up_tokenization_spaces": false,
33
+ "eos_token": "<|im_end|>",
34
  "errors": "replace",
35
  "model_max_length": 32768,
36
  "pad_token": "<|endoftext|>",