loim commited on
Commit
3c97db7
·
verified ·
1 Parent(s): 83f51f4

Upload 3 files

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +21 -0
  2. tokenizer.json +0 -0
  3. tokenizer_config.json +131 -0
special_tokens_map.json ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|endoftext|>",
4
+ "<|padding|>",
5
+ "<|mask|>",
6
+ "<|user|>",
7
+ "<|assistant|>",
8
+ "<|system|>",
9
+ "<|end|>",
10
+ "<|en|>",
11
+ "<|ru|>",
12
+ "<|tok|>",
13
+ "<|",
14
+ "|>"
15
+ ],
16
+ "bos_token": "<|endoftext|>",
17
+ "eos_token": "<|end|>",
18
+ "mask_token": "<|mask|>",
19
+ "pad_token": "<|padding|>",
20
+ "unk_token": "<|endoftext|>"
21
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<|endoftext|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "<|padding|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "<|mask|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "<|user|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "4": {
36
+ "content": "<|assistant|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "5": {
44
+ "content": "<|system|>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "6": {
52
+ "content": "<|end|>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "7": {
60
+ "content": "<|en|>",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ },
67
+ "8": {
68
+ "content": "<|ru|>",
69
+ "lstrip": false,
70
+ "normalized": false,
71
+ "rstrip": false,
72
+ "single_word": false,
73
+ "special": true
74
+ },
75
+ "9": {
76
+ "content": "<|tok|>",
77
+ "lstrip": false,
78
+ "normalized": false,
79
+ "rstrip": false,
80
+ "single_word": false,
81
+ "special": true
82
+ },
83
+ "10": {
84
+ "content": "<|",
85
+ "lstrip": false,
86
+ "normalized": false,
87
+ "rstrip": false,
88
+ "single_word": false,
89
+ "special": true
90
+ },
91
+ "11": {
92
+ "content": "|>",
93
+ "lstrip": false,
94
+ "normalized": false,
95
+ "rstrip": false,
96
+ "single_word": false,
97
+ "special": true
98
+ }
99
+ },
100
+ "additional_special_tokens": [
101
+ "<|endoftext|>",
102
+ "<|padding|>",
103
+ "<|mask|>",
104
+ "<|user|>",
105
+ "<|assistant|>",
106
+ "<|system|>",
107
+ "<|end|>",
108
+ "<|en|>",
109
+ "<|ru|>",
110
+ "<|tok|>",
111
+ "<|",
112
+ "|>"
113
+ ],
114
+ "bos_token": "<|endoftext|>",
115
+ "chat_template": "{% for message in messages %}{% if message['lang'] %}{{ \"<|\" + message['lang'] + \"|>\" }}{% endif %}{{ \"<|\" + message['role'] + \"|>\" }}{{ message['content'] }}<|end|>{% endfor %}",
116
+ "clean_up_tokenization_spaces": false,
117
+ "eos_token": "<|end|>",
118
+ "extra_special_tokens": {},
119
+ "mask_token": "<|mask|>",
120
+ "max_length": 65536,
121
+ "model_max_length": 65536,
122
+ "pad_to_multiple_of": 8,
123
+ "pad_token": "<|padding|>",
124
+ "pad_token_type_id": 0,
125
+ "padding_side": "right",
126
+ "stride": 0,
127
+ "tokenizer_class": "PreTrainedTokenizer",
128
+ "truncation_side": "right",
129
+ "truncation_strategy": "longest_first",
130
+ "unk_token": "<|endoftext|>"
131
+ }