File size: 3,912 Bytes
			
			| 4fcdfd4 8d0fb47 4fcdfd4 8d0fb47 4fcdfd4 8d0fb47 4fcdfd4 8d0fb47 4fcdfd4 8d0fb47 4fcdfd4 8d0fb47 4fcdfd4 8d0fb47 4fcdfd4 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 | <?xml version="1.0"?>
<net name="detokenizer" version="11">
	<layers>
		<layer id="0" name="Parameter_207565" type="Parameter" version="opset1">
			<data shape="?,?" element_type="i64" />
			<output>
				<port id="0" precision="I64" names="Parameter_207565">
					<dim>-1</dim>
					<dim>-1</dim>
				</port>
			</output>
		</layer>
		<layer id="1" name="Constant_207492" type="Const" version="opset1">
			<data element_type="u8" shape="1018544" offset="0" size="1018544" />
			<output>
				<port id="0" precision="U8">
					<dim>1018544</dim>
				</port>
			</output>
		</layer>
		<layer id="2" name="Convert_207575" type="Convert" version="opset1">
			<data destination_type="i32" />
			<input>
				<port id="0" precision="I64">
					<dim>-1</dim>
					<dim>-1</dim>
				</port>
			</input>
			<output>
				<port id="1" precision="I32">
					<dim>-1</dim>
					<dim>-1</dim>
				</port>
			</output>
		</layer>
		<layer id="3" name="SentencepieceDetokenizer_207566" type="SentencepieceDetokenizer" version="extension">
			<input>
				<port id="0" precision="U8">
					<dim>1018544</dim>
				</port>
				<port id="1" precision="I32">
					<dim>-1</dim>
					<dim>-1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="I32">
					<dim>-1</dim>
				</port>
				<port id="3" precision="I32">
					<dim>-1</dim>
				</port>
				<port id="4" precision="U8">
					<dim>-1</dim>
				</port>
			</output>
		</layer>
		<layer id="4" name="StringTensorPack_207567" type="StringTensorPack" version="extension">
			<data mode="begins_ends" />
			<input>
				<port id="0" precision="I32">
					<dim>-1</dim>
				</port>
				<port id="1" precision="I32">
					<dim>-1</dim>
				</port>
				<port id="2" precision="U8">
					<dim>-1</dim>
				</port>
			</input>
			<output>
				<port id="3" precision="STRING" names="string_output">
					<dim>-1</dim>
				</port>
			</output>
		</layer>
		<layer id="5" name="Result_207568" type="Result" version="opset1">
			<input>
				<port id="0" precision="STRING">
					<dim>-1</dim>
				</port>
			</input>
		</layer>
	</layers>
	<edges>
		<edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
		<edge from-layer="1" from-port="0" to-layer="3" to-port="0" />
		<edge from-layer="2" from-port="1" to-layer="3" to-port="1" />
		<edge from-layer="3" from-port="2" to-layer="4" to-port="0" />
		<edge from-layer="3" from-port="3" to-layer="4" to-port="1" />
		<edge from-layer="3" from-port="4" to-layer="4" to-port="2" />
		<edge from-layer="4" from-port="3" to-layer="5" to-port="0" />
	</edges>
	<rt_info>
		<add_attention_mask value="True" />
		<add_prefix_space />
		<add_special_tokens value="True" />
		<chat_template value="{% for message in messages %}{% if loop.first %}[gMASK]sop<|{{ message['role'] }}|>
 {{ message['content'] }}{% else %}<|{{ message['role'] }}|>
 {{ message['content'] }}{% endif %}{% endfor %}{% if add_generation_prompt %}<|assistant|>{% endif %}" />
		<clean_up_tokenization_spaces value="False" />
		<detokenizer_input_type value="i64" />
		<eos_token_id value="2" />
		<handle_special_tokens_with_re value="True" />
		<number_of_inputs value="1" />
		<openvino_tokenizers_version value="2024.5.0.0.dev20241030" />
		<openvino_version value="2024.5.0.dev20241030" />
		<original_tokenizer_class value="<class 'transformers_modules.THUDM.chatglm3-6b.67d005d386a01d4825649743f41e90f83edd6094.tokenization_chatglm.ChatGLMTokenizer'>" />
		<pad_token_id value="0" />
		<sentencepiece_version value="0.2.0" />
		<skip_special_tokens value="True" />
		<streaming_detokenizer value="False" />
		<tiktoken_version value="0.8.0" />
		<tokenizer_output_type value="i64" />
		<tokenizers_version value="0.20.1" />
		<transformers_version value="4.45.2" />
		<use_max_padding value="False" />
		<use_sentencepiece_backend value="False" />
		<utf8_replace_mode />
		<with_detokenizer value="True" />
	</rt_info>
</net>
 | 
