dingzhaohan commited on
Commit
3c8762d
·
1 Parent(s): b2f8cb8

Upload tokenizer

Browse files
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "</s>": 41,
3
+ "<s>": 40
4
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<s>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ {
11
+ "content": "</s>",
12
+ "lstrip": false,
13
+ "normalized": true,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ }
17
+ ],
18
+ "bos_token": "<s>",
19
+ "eos_token": "</s>",
20
+ "pad_token": "[PAD]",
21
+ "unk_token": "[UNK]"
22
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "clean_up_tokenization_spaces": true,
4
+ "do_lower_case": false,
5
+ "eos_token": "</s>",
6
+ "model_max_length": 1000000000000000019884624838656,
7
+ "pad_token": "[PAD]",
8
+ "replace_word_delimiter_char": " ",
9
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
10
+ "unk_token": "[UNK]",
11
+ "word_delimiter_token": "|"
12
+ }
vocab.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "'": 21,
3
+ "[PAD]": 39,
4
+ "[UNK]": 38,
5
+ "a": 12,
6
+ "b": 14,
7
+ "c": 7,
8
+ "d": 19,
9
+ "e": 16,
10
+ "f": 11,
11
+ "g": 34,
12
+ "h": 13,
13
+ "i": 31,
14
+ "j": 37,
15
+ "k": 25,
16
+ "l": 9,
17
+ "m": 24,
18
+ "n": 18,
19
+ "o": 17,
20
+ "p": 30,
21
+ "q": 1,
22
+ "r": 26,
23
+ "s": 15,
24
+ "t": 32,
25
+ "u": 29,
26
+ "v": 2,
27
+ "w": 6,
28
+ "x": 0,
29
+ "y": 35,
30
+ "z": 28,
31
+ "|": 22,
32
+ "â": 4,
33
+ "ç": 20,
34
+ "ë": 3,
35
+ "î": 23,
36
+ "ö": 36,
37
+ "ü": 5,
38
+ "ğ": 27,
39
+ "ı": 33,
40
+ "ş": 10,
41
+ "̇": 8
42
+ }