Upload folder using huggingface_hub
Browse files- am.mvn +8 -0
- config.yaml +97 -0
- openvino_detokenizer.bin +3 -0
- openvino_detokenizer.xml +200 -0
- openvino_model.bin +3 -0
- openvino_model.xml +0 -0
am.mvn
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<Nnet>
|
2 |
+
<Splice> 560 560
|
3 |
+
[ 0 ]
|
4 |
+
<AddShift> 560 560
|
5 |
+
<LearnRateCoef> 0 [ -8.311879 -8.600912 -9.615928 -10.43595 -11.21292 -11.88333 -12.36243 -12.63706 -12.8818 -12.83066 -12.89103 -12.95666 -13.19763 -13.40598 -13.49113 -13.5546 -13.55639 -13.51915 -13.68284 -13.53289 -13.42107 -13.65519 -13.50713 -13.75251 -13.76715 -13.87408 -13.73109 -13.70412 -13.56073 -13.53488 -13.54895 -13.56228 -13.59408 -13.62047 -13.64198 -13.66109 -13.62669 -13.58297 -13.57387 -13.4739 -13.53063 -13.48348 -13.61047 -13.64716 -13.71546 -13.79184 -13.90614 -14.03098 -14.18205 -14.35881 -14.48419 -14.60172 -14.70591 -14.83362 -14.92122 -15.00622 -15.05122 -15.03119 -14.99028 -14.92302 -14.86927 -14.82691 -14.7972 -14.76909 -14.71356 -14.61277 -14.51696 -14.42252 -14.36405 -14.30451 -14.23161 -14.19851 -14.16633 -14.15649 -14.10504 -13.99518 -13.79562 -13.3996 -12.7767 -11.71208 -8.311879 -8.600912 -9.615928 -10.43595 -11.21292 -11.88333 -12.36243 -12.63706 -12.8818 -12.83066 -12.89103 -12.95666 -13.19763 -13.40598 -13.49113 -13.5546 -13.55639 -13.51915 -13.68284 -13.53289 -13.42107 -13.65519 -13.50713 -13.75251 -13.76715 -13.87408 -13.73109 -13.70412 -13.56073 -13.53488 -13.54895 -13.56228 -13.59408 -13.62047 -13.64198 -13.66109 -13.62669 -13.58297 -13.57387 -13.4739 -13.53063 -13.48348 -13.61047 -13.64716 -13.71546 -13.79184 -13.90614 -14.03098 -14.18205 -14.35881 -14.48419 -14.60172 -14.70591 -14.83362 -14.92122 -15.00622 -15.05122 -15.03119 -14.99028 -14.92302 -14.86927 -14.82691 -14.7972 -14.76909 -14.71356 -14.61277 -14.51696 -14.42252 -14.36405 -14.30451 -14.23161 -14.19851 -14.16633 -14.15649 -14.10504 -13.99518 -13.79562 -13.3996 -12.7767 -11.71208 -8.311879 -8.600912 -9.615928 -10.43595 -11.21292 -11.88333 -12.36243 -12.63706 -12.8818 -12.83066 -12.89103 -12.95666 -13.19763 -13.40598 -13.49113 -13.5546 -13.55639 -13.51915 -13.68284 -13.53289 -13.42107 -13.65519 -13.50713 -13.75251 -13.76715 -13.87408 -13.73109 -13.70412 -13.56073 -13.53488 -13.54895 -13.56228 -13.59408 -13.62047 -13.64198 -13.66109 -13.62669 -13.58297 -13.57387 -13.4739 -13.53063 -13.48348 -13.61047 -13.64716 -13.71546 -13.79184 -13.90614 -14.03098 -14.18205 -14.35881 -14.48419 -14.60172 -14.70591 -14.83362 -14.92122 -15.00622 -15.05122 -15.03119 -14.99028 -14.92302 -14.86927 -14.82691 -14.7972 -14.76909 -14.71356 -14.61277 -14.51696 -14.42252 -14.36405 -14.30451 -14.23161 -14.19851 -14.16633 -14.15649 -14.10504 -13.99518 -13.79562 -13.3996 -12.7767 -11.71208 -8.311879 -8.600912 -9.615928 -10.43595 -11.21292 -11.88333 -12.36243 -12.63706 -12.8818 -12.83066 -12.89103 -12.95666 -13.19763 -13.40598 -13.49113 -13.5546 -13.55639 -13.51915 -13.68284 -13.53289 -13.42107 -13.65519 -13.50713 -13.75251 -13.76715 -13.87408 -13.73109 -13.70412 -13.56073 -13.53488 -13.54895 -13.56228 -13.59408 -13.62047 -13.64198 -13.66109 -13.62669 -13.58297 -13.57387 -13.4739 -13.53063 -13.48348 -13.61047 -13.64716 -13.71546 -13.79184 -13.90614 -14.03098 -14.18205 -14.35881 -14.48419 -14.60172 -14.70591 -14.83362 -14.92122 -15.00622 -15.05122 -15.03119 -14.99028 -14.92302 -14.86927 -14.82691 -14.7972 -14.76909 -14.71356 -14.61277 -14.51696 -14.42252 -14.36405 -14.30451 -14.23161 -14.19851 -14.16633 -14.15649 -14.10504 -13.99518 -13.79562 -13.3996 -12.7767 -11.71208 -8.311879 -8.600912 -9.615928 -10.43595 -11.21292 -11.88333 -12.36243 -12.63706 -12.8818 -12.83066 -12.89103 -12.95666 -13.19763 -13.40598 -13.49113 -13.5546 -13.55639 -13.51915 -13.68284 -13.53289 -13.42107 -13.65519 -13.50713 -13.75251 -13.76715 -13.87408 -13.73109 -13.70412 -13.56073 -13.53488 -13.54895 -13.56228 -13.59408 -13.62047 -13.64198 -13.66109 -13.62669 -13.58297 -13.57387 -13.4739 -13.53063 -13.48348 -13.61047 -13.64716 -13.71546 -13.79184 -13.90614 -14.03098 -14.18205 -14.35881 -14.48419 -14.60172 -14.70591 -14.83362 -14.92122 -15.00622 -15.05122 -15.03119 -14.99028 -14.92302 -14.86927 -14.82691 -14.7972 -14.76909 -14.71356 -14.61277 -14.51696 -14.42252 -14.36405 -14.30451 -14.23161 -14.19851 -14.16633 -14.15649 -14.10504 -13.99518 -13.79562 -13.3996 -12.7767 -11.71208 -8.311879 -8.600912 -9.615928 -10.43595 -11.21292 -11.88333 -12.36243 -12.63706 -12.8818 -12.83066 -12.89103 -12.95666 -13.19763 -13.40598 -13.49113 -13.5546 -13.55639 -13.51915 -13.68284 -13.53289 -13.42107 -13.65519 -13.50713 -13.75251 -13.76715 -13.87408 -13.73109 -13.70412 -13.56073 -13.53488 -13.54895 -13.56228 -13.59408 -13.62047 -13.64198 -13.66109 -13.62669 -13.58297 -13.57387 -13.4739 -13.53063 -13.48348 -13.61047 -13.64716 -13.71546 -13.79184 -13.90614 -14.03098 -14.18205 -14.35881 -14.48419 -14.60172 -14.70591 -14.83362 -14.92122 -15.00622 -15.05122 -15.03119 -14.99028 -14.92302 -14.86927 -14.82691 -14.7972 -14.76909 -14.71356 -14.61277 -14.51696 -14.42252 -14.36405 -14.30451 -14.23161 -14.19851 -14.16633 -14.15649 -14.10504 -13.99518 -13.79562 -13.3996 -12.7767 -11.71208 -8.311879 -8.600912 -9.615928 -10.43595 -11.21292 -11.88333 -12.36243 -12.63706 -12.8818 -12.83066 -12.89103 -12.95666 -13.19763 -13.40598 -13.49113 -13.5546 -13.55639 -13.51915 -13.68284 -13.53289 -13.42107 -13.65519 -13.50713 -13.75251 -13.76715 -13.87408 -13.73109 -13.70412 -13.56073 -13.53488 -13.54895 -13.56228 -13.59408 -13.62047 -13.64198 -13.66109 -13.62669 -13.58297 -13.57387 -13.4739 -13.53063 -13.48348 -13.61047 -13.64716 -13.71546 -13.79184 -13.90614 -14.03098 -14.18205 -14.35881 -14.48419 -14.60172 -14.70591 -14.83362 -14.92122 -15.00622 -15.05122 -15.03119 -14.99028 -14.92302 -14.86927 -14.82691 -14.7972 -14.76909 -14.71356 -14.61277 -14.51696 -14.42252 -14.36405 -14.30451 -14.23161 -14.19851 -14.16633 -14.15649 -14.10504 -13.99518 -13.79562 -13.3996 -12.7767 -11.71208 ]
|
6 |
+
<Rescale> 560 560
|
7 |
+
<LearnRateCoef> 0 [ 0.155775 0.154484 0.1527379 0.1518718 0.1506028 0.1489256 0.147067 0.1447061 0.1436307 0.1443568 0.1451849 0.1455157 0.1452821 0.1445717 0.1439195 0.1435867 0.1436018 0.1438781 0.1442086 0.1448844 0.1454756 0.145663 0.146268 0.1467386 0.1472724 0.147664 0.1480913 0.1483739 0.1488841 0.1493636 0.1497088 0.1500379 0.1502916 0.1505389 0.1506787 0.1507102 0.1505992 0.1505445 0.1505938 0.1508133 0.1509569 0.1512396 0.1514625 0.1516195 0.1516156 0.1515561 0.1514966 0.1513976 0.1512612 0.151076 0.1510596 0.1510431 0.151077 0.1511168 0.1511917 0.151023 0.1508045 0.1505885 0.1503493 0.1502373 0.1501726 0.1500762 0.1500065 0.1499782 0.150057 0.1502658 0.150469 0.1505335 0.1505505 0.1505328 0.1504275 0.1502438 0.1499674 0.1497118 0.1494661 0.1493102 0.1493681 0.1495501 0.1499738 0.1509654 0.155775 0.154484 0.1527379 0.1518718 0.1506028 0.1489256 0.147067 0.1447061 0.1436307 0.1443568 0.1451849 0.1455157 0.1452821 0.1445717 0.1439195 0.1435867 0.1436018 0.1438781 0.1442086 0.1448844 0.1454756 0.145663 0.146268 0.1467386 0.1472724 0.147664 0.1480913 0.1483739 0.1488841 0.1493636 0.1497088 0.1500379 0.1502916 0.1505389 0.1506787 0.1507102 0.1505992 0.1505445 0.1505938 0.1508133 0.1509569 0.1512396 0.1514625 0.1516195 0.1516156 0.1515561 0.1514966 0.1513976 0.1512612 0.151076 0.1510596 0.1510431 0.151077 0.1511168 0.1511917 0.151023 0.1508045 0.1505885 0.1503493 0.1502373 0.1501726 0.1500762 0.1500065 0.1499782 0.150057 0.1502658 0.150469 0.1505335 0.1505505 0.1505328 0.1504275 0.1502438 0.1499674 0.1497118 0.1494661 0.1493102 0.1493681 0.1495501 0.1499738 0.1509654 0.155775 0.154484 0.1527379 0.1518718 0.1506028 0.1489256 0.147067 0.1447061 0.1436307 0.1443568 0.1451849 0.1455157 0.1452821 0.1445717 0.1439195 0.1435867 0.1436018 0.1438781 0.1442086 0.1448844 0.1454756 0.145663 0.146268 0.1467386 0.1472724 0.147664 0.1480913 0.1483739 0.1488841 0.1493636 0.1497088 0.1500379 0.1502916 0.1505389 0.1506787 0.1507102 0.1505992 0.1505445 0.1505938 0.1508133 0.1509569 0.1512396 0.1514625 0.1516195 0.1516156 0.1515561 0.1514966 0.1513976 0.1512612 0.151076 0.1510596 0.1510431 0.151077 0.1511168 0.1511917 0.151023 0.1508045 0.1505885 0.1503493 0.1502373 0.1501726 0.1500762 0.1500065 0.1499782 0.150057 0.1502658 0.150469 0.1505335 0.1505505 0.1505328 0.1504275 0.1502438 0.1499674 0.1497118 0.1494661 0.1493102 0.1493681 0.1495501 0.1499738 0.1509654 0.155775 0.154484 0.1527379 0.1518718 0.1506028 0.1489256 0.147067 0.1447061 0.1436307 0.1443568 0.1451849 0.1455157 0.1452821 0.1445717 0.1439195 0.1435867 0.1436018 0.1438781 0.1442086 0.1448844 0.1454756 0.145663 0.146268 0.1467386 0.1472724 0.147664 0.1480913 0.1483739 0.1488841 0.1493636 0.1497088 0.1500379 0.1502916 0.1505389 0.1506787 0.1507102 0.1505992 0.1505445 0.1505938 0.1508133 0.1509569 0.1512396 0.1514625 0.1516195 0.1516156 0.1515561 0.1514966 0.1513976 0.1512612 0.151076 0.1510596 0.1510431 0.151077 0.1511168 0.1511917 0.151023 0.1508045 0.1505885 0.1503493 0.1502373 0.1501726 0.1500762 0.1500065 0.1499782 0.150057 0.1502658 0.150469 0.1505335 0.1505505 0.1505328 0.1504275 0.1502438 0.1499674 0.1497118 0.1494661 0.1493102 0.1493681 0.1495501 0.1499738 0.1509654 0.155775 0.154484 0.1527379 0.1518718 0.1506028 0.1489256 0.147067 0.1447061 0.1436307 0.1443568 0.1451849 0.1455157 0.1452821 0.1445717 0.1439195 0.1435867 0.1436018 0.1438781 0.1442086 0.1448844 0.1454756 0.145663 0.146268 0.1467386 0.1472724 0.147664 0.1480913 0.1483739 0.1488841 0.1493636 0.1497088 0.1500379 0.1502916 0.1505389 0.1506787 0.1507102 0.1505992 0.1505445 0.1505938 0.1508133 0.1509569 0.1512396 0.1514625 0.1516195 0.1516156 0.1515561 0.1514966 0.1513976 0.1512612 0.151076 0.1510596 0.1510431 0.151077 0.1511168 0.1511917 0.151023 0.1508045 0.1505885 0.1503493 0.1502373 0.1501726 0.1500762 0.1500065 0.1499782 0.150057 0.1502658 0.150469 0.1505335 0.1505505 0.1505328 0.1504275 0.1502438 0.1499674 0.1497118 0.1494661 0.1493102 0.1493681 0.1495501 0.1499738 0.1509654 0.155775 0.154484 0.1527379 0.1518718 0.1506028 0.1489256 0.147067 0.1447061 0.1436307 0.1443568 0.1451849 0.1455157 0.1452821 0.1445717 0.1439195 0.1435867 0.1436018 0.1438781 0.1442086 0.1448844 0.1454756 0.145663 0.146268 0.1467386 0.1472724 0.147664 0.1480913 0.1483739 0.1488841 0.1493636 0.1497088 0.1500379 0.1502916 0.1505389 0.1506787 0.1507102 0.1505992 0.1505445 0.1505938 0.1508133 0.1509569 0.1512396 0.1514625 0.1516195 0.1516156 0.1515561 0.1514966 0.1513976 0.1512612 0.151076 0.1510596 0.1510431 0.151077 0.1511168 0.1511917 0.151023 0.1508045 0.1505885 0.1503493 0.1502373 0.1501726 0.1500762 0.1500065 0.1499782 0.150057 0.1502658 0.150469 0.1505335 0.1505505 0.1505328 0.1504275 0.1502438 0.1499674 0.1497118 0.1494661 0.1493102 0.1493681 0.1495501 0.1499738 0.1509654 0.155775 0.154484 0.1527379 0.1518718 0.1506028 0.1489256 0.147067 0.1447061 0.1436307 0.1443568 0.1451849 0.1455157 0.1452821 0.1445717 0.1439195 0.1435867 0.1436018 0.1438781 0.1442086 0.1448844 0.1454756 0.145663 0.146268 0.1467386 0.1472724 0.147664 0.1480913 0.1483739 0.1488841 0.1493636 0.1497088 0.1500379 0.1502916 0.1505389 0.1506787 0.1507102 0.1505992 0.1505445 0.1505938 0.1508133 0.1509569 0.1512396 0.1514625 0.1516195 0.1516156 0.1515561 0.1514966 0.1513976 0.1512612 0.151076 0.1510596 0.1510431 0.151077 0.1511168 0.1511917 0.151023 0.1508045 0.1505885 0.1503493 0.1502373 0.1501726 0.1500762 0.1500065 0.1499782 0.150057 0.1502658 0.150469 0.1505335 0.1505505 0.1505328 0.1504275 0.1502438 0.1499674 0.1497118 0.1494661 0.1493102 0.1493681 0.1495501 0.1499738 0.1509654 ]
|
8 |
+
</Nnet>
|
config.yaml
ADDED
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
encoder: SenseVoiceEncoderSmall
|
2 |
+
encoder_conf:
|
3 |
+
output_size: 512
|
4 |
+
attention_heads: 4
|
5 |
+
linear_units: 2048
|
6 |
+
num_blocks: 50
|
7 |
+
tp_blocks: 20
|
8 |
+
dropout_rate: 0.1
|
9 |
+
positional_dropout_rate: 0.1
|
10 |
+
attention_dropout_rate: 0.1
|
11 |
+
input_layer: pe
|
12 |
+
pos_enc_class: SinusoidalPositionEncoder
|
13 |
+
normalize_before: true
|
14 |
+
kernel_size: 11
|
15 |
+
sanm_shfit: 0
|
16 |
+
selfattention_layer_type: sanm
|
17 |
+
|
18 |
+
|
19 |
+
model: SenseVoiceSmall
|
20 |
+
model_conf:
|
21 |
+
length_normalized_loss: true
|
22 |
+
sos: 1
|
23 |
+
eos: 2
|
24 |
+
ignore_id: -1
|
25 |
+
|
26 |
+
tokenizer: SentencepiecesTokenizer
|
27 |
+
tokenizer_conf:
|
28 |
+
bpemodel: null
|
29 |
+
unk_symbol: <unk>
|
30 |
+
split_with_space: true
|
31 |
+
|
32 |
+
frontend: WavFrontend
|
33 |
+
frontend_conf:
|
34 |
+
fs: 16000
|
35 |
+
window: hamming
|
36 |
+
n_mels: 80
|
37 |
+
frame_length: 25
|
38 |
+
frame_shift: 10
|
39 |
+
lfr_m: 7
|
40 |
+
lfr_n: 6
|
41 |
+
cmvn_file: null
|
42 |
+
|
43 |
+
|
44 |
+
dataset: SenseVoiceCTCDataset
|
45 |
+
dataset_conf:
|
46 |
+
index_ds: IndexDSJsonl
|
47 |
+
batch_sampler: EspnetStyleBatchSampler
|
48 |
+
data_split_num: 32
|
49 |
+
batch_type: token
|
50 |
+
batch_size: 14000
|
51 |
+
max_token_length: 2000
|
52 |
+
min_token_length: 60
|
53 |
+
max_source_length: 2000
|
54 |
+
min_source_length: 60
|
55 |
+
max_target_length: 200
|
56 |
+
min_target_length: 0
|
57 |
+
shuffle: true
|
58 |
+
num_workers: 4
|
59 |
+
sos: ${model_conf.sos}
|
60 |
+
eos: ${model_conf.eos}
|
61 |
+
IndexDSJsonl: IndexDSJsonl
|
62 |
+
retry: 20
|
63 |
+
|
64 |
+
train_conf:
|
65 |
+
accum_grad: 1
|
66 |
+
grad_clip: 5
|
67 |
+
max_epoch: 20
|
68 |
+
keep_nbest_models: 10
|
69 |
+
avg_nbest_model: 10
|
70 |
+
log_interval: 100
|
71 |
+
resume: true
|
72 |
+
validate_interval: 10000
|
73 |
+
save_checkpoint_interval: 10000
|
74 |
+
|
75 |
+
optim: adamw
|
76 |
+
optim_conf:
|
77 |
+
lr: 0.00002
|
78 |
+
scheduler: warmuplr
|
79 |
+
scheduler_conf:
|
80 |
+
warmup_steps: 25000
|
81 |
+
|
82 |
+
specaug: SpecAugLFR
|
83 |
+
specaug_conf:
|
84 |
+
apply_time_warp: false
|
85 |
+
time_warp_window: 5
|
86 |
+
time_warp_mode: bicubic
|
87 |
+
apply_freq_mask: true
|
88 |
+
freq_mask_width_range:
|
89 |
+
- 0
|
90 |
+
- 30
|
91 |
+
lfr_rate: 6
|
92 |
+
num_freq_mask: 1
|
93 |
+
apply_time_mask: true
|
94 |
+
time_mask_width_range:
|
95 |
+
- 0
|
96 |
+
- 12
|
97 |
+
num_time_mask: 1
|
openvino_detokenizer.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:48a0af897d1991ff484190c0c4122783ba7dd1a71185555c6e07a55ad4ee3b50
|
3 |
+
size 379811
|
openvino_detokenizer.xml
ADDED
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0"?>
|
2 |
+
<net name="detokenizer" version="11">
|
3 |
+
<layers>
|
4 |
+
<layer id="0" name="Parameter_20" type="Parameter" version="opset1">
|
5 |
+
<data shape="?,?" element_type="i64" />
|
6 |
+
<output>
|
7 |
+
<port id="0" precision="I64" names="Parameter_20">
|
8 |
+
<dim>-1</dim>
|
9 |
+
<dim>-1</dim>
|
10 |
+
</port>
|
11 |
+
</output>
|
12 |
+
</layer>
|
13 |
+
<layer id="1" name="Constant_2" type="Const" version="opset1">
|
14 |
+
<data element_type="u8" shape="379758" offset="0" size="379758" />
|
15 |
+
<output>
|
16 |
+
<port id="0" precision="U8">
|
17 |
+
<dim>379758</dim>
|
18 |
+
</port>
|
19 |
+
</output>
|
20 |
+
</layer>
|
21 |
+
<layer id="2" name="Convert_36" type="Convert" version="opset1">
|
22 |
+
<data destination_type="i32" />
|
23 |
+
<input>
|
24 |
+
<port id="0" precision="I64">
|
25 |
+
<dim>-1</dim>
|
26 |
+
<dim>-1</dim>
|
27 |
+
</port>
|
28 |
+
</input>
|
29 |
+
<output>
|
30 |
+
<port id="1" precision="I32">
|
31 |
+
<dim>-1</dim>
|
32 |
+
<dim>-1</dim>
|
33 |
+
</port>
|
34 |
+
</output>
|
35 |
+
</layer>
|
36 |
+
<layer id="3" name="SentencepieceDetokenizer_21" type="SentencepieceDetokenizer" version="extension">
|
37 |
+
<input>
|
38 |
+
<port id="0" precision="U8">
|
39 |
+
<dim>379758</dim>
|
40 |
+
</port>
|
41 |
+
<port id="1" precision="I32">
|
42 |
+
<dim>-1</dim>
|
43 |
+
<dim>-1</dim>
|
44 |
+
</port>
|
45 |
+
</input>
|
46 |
+
<output>
|
47 |
+
<port id="2" precision="I32">
|
48 |
+
<dim>-1</dim>
|
49 |
+
</port>
|
50 |
+
<port id="3" precision="I32">
|
51 |
+
<dim>-1</dim>
|
52 |
+
</port>
|
53 |
+
<port id="4" precision="U8">
|
54 |
+
<dim>-1</dim>
|
55 |
+
</port>
|
56 |
+
</output>
|
57 |
+
</layer>
|
58 |
+
<layer id="4" name="Constant_23" type="Const" version="opset1">
|
59 |
+
<data element_type="u8" shape="51" offset="379758" size="51" />
|
60 |
+
<output>
|
61 |
+
<port id="0" precision="U8">
|
62 |
+
<dim>51</dim>
|
63 |
+
</port>
|
64 |
+
</output>
|
65 |
+
</layer>
|
66 |
+
<layer id="5" name="Constant_25" type="Const" version="opset1">
|
67 |
+
<data element_type="u8" shape="2" offset="379809" size="2" />
|
68 |
+
<output>
|
69 |
+
<port id="0" precision="U8">
|
70 |
+
<dim>2</dim>
|
71 |
+
</port>
|
72 |
+
</output>
|
73 |
+
</layer>
|
74 |
+
<layer id="6" name="RegexNormalization_26" type="RegexNormalization" version="extension">
|
75 |
+
<data global_replace="true" />
|
76 |
+
<input>
|
77 |
+
<port id="0" precision="I32">
|
78 |
+
<dim>-1</dim>
|
79 |
+
</port>
|
80 |
+
<port id="1" precision="I32">
|
81 |
+
<dim>-1</dim>
|
82 |
+
</port>
|
83 |
+
<port id="2" precision="U8">
|
84 |
+
<dim>-1</dim>
|
85 |
+
</port>
|
86 |
+
<port id="3" precision="U8">
|
87 |
+
<dim>51</dim>
|
88 |
+
</port>
|
89 |
+
<port id="4" precision="U8">
|
90 |
+
<dim>2</dim>
|
91 |
+
</port>
|
92 |
+
</input>
|
93 |
+
<output>
|
94 |
+
<port id="5" precision="I32">
|
95 |
+
<dim>-1</dim>
|
96 |
+
</port>
|
97 |
+
<port id="6" precision="I32">
|
98 |
+
<dim>-1</dim>
|
99 |
+
</port>
|
100 |
+
<port id="7" precision="U8">
|
101 |
+
<dim>-1</dim>
|
102 |
+
</port>
|
103 |
+
</output>
|
104 |
+
</layer>
|
105 |
+
<layer id="7" name="UTF8Validate_27" type="UTF8Validate" version="extension">
|
106 |
+
<data replace_mode="true" />
|
107 |
+
<input>
|
108 |
+
<port id="0" precision="I32">
|
109 |
+
<dim>-1</dim>
|
110 |
+
</port>
|
111 |
+
<port id="1" precision="I32">
|
112 |
+
<dim>-1</dim>
|
113 |
+
</port>
|
114 |
+
<port id="2" precision="U8">
|
115 |
+
<dim>-1</dim>
|
116 |
+
</port>
|
117 |
+
</input>
|
118 |
+
<output>
|
119 |
+
<port id="3" precision="I32">
|
120 |
+
<dim>-1</dim>
|
121 |
+
</port>
|
122 |
+
<port id="4" precision="I32">
|
123 |
+
<dim>-1</dim>
|
124 |
+
</port>
|
125 |
+
<port id="5" precision="U8">
|
126 |
+
<dim>-1</dim>
|
127 |
+
</port>
|
128 |
+
</output>
|
129 |
+
</layer>
|
130 |
+
<layer id="8" name="StringTensorPack_28" type="StringTensorPack" version="extension">
|
131 |
+
<data mode="begins_ends" />
|
132 |
+
<input>
|
133 |
+
<port id="0" precision="I32">
|
134 |
+
<dim>-1</dim>
|
135 |
+
</port>
|
136 |
+
<port id="1" precision="I32">
|
137 |
+
<dim>-1</dim>
|
138 |
+
</port>
|
139 |
+
<port id="2" precision="U8">
|
140 |
+
<dim>-1</dim>
|
141 |
+
</port>
|
142 |
+
</input>
|
143 |
+
<output>
|
144 |
+
<port id="3" precision="STRING" names="string_output">
|
145 |
+
<dim>-1</dim>
|
146 |
+
</port>
|
147 |
+
</output>
|
148 |
+
</layer>
|
149 |
+
<layer id="9" name="Result_29" type="Result" version="opset1">
|
150 |
+
<input>
|
151 |
+
<port id="0" precision="STRING">
|
152 |
+
<dim>-1</dim>
|
153 |
+
</port>
|
154 |
+
</input>
|
155 |
+
</layer>
|
156 |
+
</layers>
|
157 |
+
<edges>
|
158 |
+
<edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
|
159 |
+
<edge from-layer="1" from-port="0" to-layer="3" to-port="0" />
|
160 |
+
<edge from-layer="2" from-port="1" to-layer="3" to-port="1" />
|
161 |
+
<edge from-layer="3" from-port="2" to-layer="6" to-port="0" />
|
162 |
+
<edge from-layer="3" from-port="3" to-layer="6" to-port="1" />
|
163 |
+
<edge from-layer="3" from-port="4" to-layer="6" to-port="2" />
|
164 |
+
<edge from-layer="4" from-port="0" to-layer="6" to-port="3" />
|
165 |
+
<edge from-layer="5" from-port="0" to-layer="6" to-port="4" />
|
166 |
+
<edge from-layer="6" from-port="5" to-layer="7" to-port="0" />
|
167 |
+
<edge from-layer="6" from-port="6" to-layer="7" to-port="1" />
|
168 |
+
<edge from-layer="6" from-port="7" to-layer="7" to-port="2" />
|
169 |
+
<edge from-layer="7" from-port="3" to-layer="8" to-port="0" />
|
170 |
+
<edge from-layer="7" from-port="4" to-layer="8" to-port="1" />
|
171 |
+
<edge from-layer="7" from-port="5" to-layer="8" to-port="2" />
|
172 |
+
<edge from-layer="8" from-port="3" to-layer="9" to-port="0" />
|
173 |
+
</edges>
|
174 |
+
<rt_info>
|
175 |
+
<add_attention_mask value="True" />
|
176 |
+
<add_prefix_space value="True" />
|
177 |
+
<add_special_tokens value="True" />
|
178 |
+
<chat_template value="{% for message in messages %}{{'<|im_start|>' + message['role'] + ' ' + message['content'] + '<|im_end|>' + ' '}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant ' }}{% endif %}" />
|
179 |
+
<clean_up_tokenization_spaces value="True" />
|
180 |
+
<detokenizer_input_type value="i64" />
|
181 |
+
<eos_token_id value="2" />
|
182 |
+
<handle_special_tokens_with_re value="False" />
|
183 |
+
<number_of_inputs value="1" />
|
184 |
+
<openvino_tokenizers_version value="2024.6.0.0" />
|
185 |
+
<openvino_version value="2024.6.0" />
|
186 |
+
<original_tokenizer_class value="<class 'transformers.models.t5.tokenization_t5_fast.T5TokenizerFast'>" />
|
187 |
+
<pad_token_id value="25155" />
|
188 |
+
<sentencepiece_version value="0.2.0" />
|
189 |
+
<skip_special_tokens value="True" />
|
190 |
+
<streaming_detokenizer value="False" />
|
191 |
+
<tiktoken_version value="0.8.0" />
|
192 |
+
<tokenizer_output_type value="i64" />
|
193 |
+
<tokenizers_version value="0.19.1" />
|
194 |
+
<transformers_version value="4.40.1" />
|
195 |
+
<use_max_padding value="False" />
|
196 |
+
<use_sentencepiece_backend value="False" />
|
197 |
+
<utf8_replace_mode value="replace" />
|
198 |
+
<with_detokenizer value="True" />
|
199 |
+
</rt_info>
|
200 |
+
</net>
|
openvino_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c80c5c7ab0e12e866bf4c2f97375b1cf242078a658b3740dc8e2057da76f9dd8
|
3 |
+
size 130115913
|
openvino_model.xml
ADDED
The diff for this file is too large to render.
See raw diff
|
|