hasibirok0 commited on
Commit
1116ba4
·
verified ·
1 Parent(s): e965a79

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +1 -1
  2. tokenizer_config.json +2 -1
special_tokens_map.json CHANGED
@@ -22,7 +22,7 @@
22
  "single_word": false
23
  },
24
  "pad_token": {
25
- "content": "<|endoftext|>",
26
  "lstrip": false,
27
  "normalized": false,
28
  "rstrip": false,
 
22
  "single_word": false
23
  },
24
  "pad_token": {
25
+ "content": "<|im_end|>",
26
  "lstrip": false,
27
  "normalized": false,
28
  "rstrip": false,
tokenizer_config.json CHANGED
@@ -1,5 +1,6 @@
1
  {
2
  "add_bos_token": false,
 
3
  "add_prefix_space": false,
4
  "added_tokens_decoder": {
5
  "151643": {
@@ -200,7 +201,7 @@
200
  "eos_token": "<|im_end|>",
201
  "errors": "replace",
202
  "model_max_length": 131072,
203
- "pad_token": "<|endoftext|>",
204
  "split_special_tokens": false,
205
  "tokenizer_class": "Qwen2Tokenizer",
206
  "unk_token": null
 
1
  {
2
  "add_bos_token": false,
3
+ "add_eos_token": true,
4
  "add_prefix_space": false,
5
  "added_tokens_decoder": {
6
  "151643": {
 
201
  "eos_token": "<|im_end|>",
202
  "errors": "replace",
203
  "model_max_length": 131072,
204
+ "pad_token": "<|im_end|>",
205
  "split_special_tokens": false,
206
  "tokenizer_class": "Qwen2Tokenizer",
207
  "unk_token": null