Model save
Browse files- README.md +1 -1
- added_tokens.json +3 -0
- model-00001-of-00002.safetensors +1 -1
- model-00002-of-00002.safetensors +1 -1
- special_tokens_map.json +1 -7
- tokenizer.json +2 -2
- tokenizer_config.json +25 -1
- training_args.bin +1 -1
README.md
CHANGED
@@ -27,7 +27,7 @@ print(output["generated_text"])
|
|
27 |
|
28 |
## Training procedure
|
29 |
|
30 |
-
[<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/zc096373/s1/runs/
|
31 |
|
32 |
|
33 |
This model was trained with SFT.
|
|
|
27 |
|
28 |
## Training procedure
|
29 |
|
30 |
+
[<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/zc096373/s1/runs/zp9uk74s)
|
31 |
|
32 |
|
33 |
This model was trained with SFT.
|
added_tokens.json
CHANGED
@@ -1,7 +1,10 @@
|
|
1 |
{
|
|
|
2 |
"</think>": 151668,
|
3 |
"</tool_call>": 151658,
|
4 |
"</tool_response>": 151666,
|
|
|
|
|
5 |
"<think>": 151667,
|
6 |
"<tool_call>": 151657,
|
7 |
"<tool_response>": 151665,
|
|
|
1 |
{
|
2 |
+
"</saving>": 151670,
|
3 |
"</think>": 151668,
|
4 |
"</tool_call>": 151658,
|
5 |
"</tool_response>": 151666,
|
6 |
+
"<control>": 151671,
|
7 |
+
"<saving>": 151669,
|
8 |
"<think>": 151667,
|
9 |
"<tool_call>": 151657,
|
10 |
"<tool_response>": 151665,
|
model-00001-of-00002.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4967376872
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:69439624d825b26d0c08c78f4a5739096a83072a36fac4b93a1236d29116a490
|
3 |
size 4967376872
|
model-00002-of-00002.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 3155292920
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2ead046091f06e4f44bb890ae4ba3a8db01846e343fba91f92b36ed6c5b76212
|
3 |
size 3155292920
|
special_tokens_map.json
CHANGED
@@ -21,11 +21,5 @@
|
|
21 |
"rstrip": false,
|
22 |
"single_word": false
|
23 |
},
|
24 |
-
"pad_token":
|
25 |
-
"content": "<|endoftext|>",
|
26 |
-
"lstrip": false,
|
27 |
-
"normalized": false,
|
28 |
-
"rstrip": false,
|
29 |
-
"single_word": false
|
30 |
-
}
|
31 |
}
|
|
|
21 |
"rstrip": false,
|
22 |
"single_word": false
|
23 |
},
|
24 |
+
"pad_token": "<|fim_pad|>"
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
}
|
tokenizer.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:41c8dfeb937be85b6ef72682cee777c65c594d7902ca18eba9fb6b352d74486e
|
3 |
+
size 11423211
|
tokenizer_config.json
CHANGED
@@ -209,6 +209,30 @@
|
|
209 |
"rstrip": false,
|
210 |
"single_word": false,
|
211 |
"special": false
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
212 |
}
|
213 |
},
|
214 |
"additional_special_tokens": [
|
@@ -232,7 +256,7 @@
|
|
232 |
"errors": "replace",
|
233 |
"extra_special_tokens": {},
|
234 |
"model_max_length": 131072,
|
235 |
-
"pad_token": "<|
|
236 |
"split_special_tokens": false,
|
237 |
"tokenizer_class": "Qwen2Tokenizer",
|
238 |
"unk_token": null
|
|
|
209 |
"rstrip": false,
|
210 |
"single_word": false,
|
211 |
"special": false
|
212 |
+
},
|
213 |
+
"151669": {
|
214 |
+
"content": "<saving>",
|
215 |
+
"lstrip": false,
|
216 |
+
"normalized": true,
|
217 |
+
"rstrip": false,
|
218 |
+
"single_word": false,
|
219 |
+
"special": false
|
220 |
+
},
|
221 |
+
"151670": {
|
222 |
+
"content": "</saving>",
|
223 |
+
"lstrip": false,
|
224 |
+
"normalized": true,
|
225 |
+
"rstrip": false,
|
226 |
+
"single_word": false,
|
227 |
+
"special": false
|
228 |
+
},
|
229 |
+
"151671": {
|
230 |
+
"content": "<control>",
|
231 |
+
"lstrip": false,
|
232 |
+
"normalized": true,
|
233 |
+
"rstrip": false,
|
234 |
+
"single_word": false,
|
235 |
+
"special": false
|
236 |
}
|
237 |
},
|
238 |
"additional_special_tokens": [
|
|
|
256 |
"errors": "replace",
|
257 |
"extra_special_tokens": {},
|
258 |
"model_max_length": 131072,
|
259 |
+
"pad_token": "<|fim_pad|>",
|
260 |
"split_special_tokens": false,
|
261 |
"tokenizer_class": "Qwen2Tokenizer",
|
262 |
"unk_token": null
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5880
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:df53f6869c2132314fa55454634da2694f711b9c6c6e8e59c24c41db112cef98
|
3 |
size 5880
|