ggbetz commited on
Commit
1b51052
·
verified ·
1 Parent(s): 30dc8a9

Model save

Browse files
README.md ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: DebateLabKIT/Phi-4-Argunaut-1-SFT
3
+ library_name: transformers
4
+ model_name: Phi-4-Argunaut-1-SPIN-dev0
5
+ tags:
6
+ - generated_from_trainer
7
+ - trl
8
+ - dpo
9
+ licence: license
10
+ ---
11
+
12
+ # Model Card for Phi-4-Argunaut-1-SPIN-dev0
13
+
14
+ This model is a fine-tuned version of [DebateLabKIT/Phi-4-Argunaut-1-SFT](https://huggingface.co/DebateLabKIT/Phi-4-Argunaut-1-SFT).
15
+ It has been trained using [TRL](https://github.com/huggingface/trl).
16
+
17
+ ## Quick start
18
+
19
+ ```python
20
+ from transformers import pipeline
21
+
22
+ question = "If you had a time machine, but could only go to the past or the future once and never return, which would you choose and why?"
23
+ generator = pipeline("text-generation", model="DebateLabKIT/Phi-4-Argunaut-1-SPIN-dev0", device="cuda")
24
+ output = generator([{"role": "user", "content": question}], max_new_tokens=128, return_full_text=False)[0]
25
+ print(output["generated_text"])
26
+ ```
27
+
28
+ ## Training procedure
29
+
30
+ [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/ggbetz/argunauts-training/runs/mq4gsxna)
31
+
32
+
33
+ This model was trained with DPO, a method introduced in [Direct Preference Optimization: Your Language Model is Secretly a Reward Model](https://huggingface.co/papers/2305.18290).
34
+
35
+ ### Framework versions
36
+
37
+ - TRL: 0.14.0
38
+ - Transformers: 4.46.3
39
+ - Pytorch: 2.4.1
40
+ - Datasets: 3.1.0
41
+ - Tokenizers: 0.20.3
42
+
43
+ ## Citations
44
+
45
+ Cite DPO as:
46
+
47
+ ```bibtex
48
+ @inproceedings{rafailov2023direct,
49
+ title = {{Direct Preference Optimization: Your Language Model is Secretly a Reward Model}},
50
+ author = {Rafael Rafailov and Archit Sharma and Eric Mitchell and Christopher D. Manning and Stefano Ermon and Chelsea Finn},
51
+ year = 2023,
52
+ booktitle = {Advances in Neural Information Processing Systems 36: Annual Conference on Neural Information Processing Systems 2023, NeurIPS 2023, New Orleans, LA, USA, December 10 - 16, 2023},
53
+ url = {http://papers.nips.cc/paper_files/paper/2023/hash/a85b405ed65c6477a4fe8302b5e06ce7-Abstract-Conference.html},
54
+ editor = {Alice Oh and Tristan Naumann and Amir Globerson and Kate Saenko and Moritz Hardt and Sergey Levine},
55
+ }
56
+ ```
57
+
58
+ Cite TRL as:
59
+
60
+ ```bibtex
61
+ @misc{vonwerra2022trl,
62
+ title = {{TRL: Transformer Reinforcement Learning}},
63
+ author = {Leandro von Werra and Younes Belkada and Lewis Tunstall and Edward Beeching and Tristan Thrush and Nathan Lambert and Shengyi Huang and Kashif Rasul and Quentin Gallouédec},
64
+ year = 2020,
65
+ journal = {GitHub repository},
66
+ publisher = {GitHub},
67
+ howpublished = {\url{https://github.com/huggingface/trl}}
68
+ }
69
+ ```
all_results.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.9870967741935484,
3
+ "total_flos": 0.0,
4
+ "train_loss": 0.6479313838017451,
5
+ "train_runtime": 1783.9929,
6
+ "train_samples": 4957,
7
+ "train_samples_per_second": 5.557,
8
+ "train_steps_per_second": 0.086
9
+ }
config.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "DebateLabKIT/Phi-4-Argunaut-1-SFT",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 100257,
9
+ "eos_token_id": 100265,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 5120,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 17920,
15
+ "max_position_embeddings": 16384,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 40,
19
+ "num_hidden_layers": 40,
20
+ "num_key_value_heads": 10,
21
+ "original_max_position_embeddings": 16384,
22
+ "pad_token_id": 100351,
23
+ "pretraining_tp": 1,
24
+ "rms_norm_eps": 1e-05,
25
+ "rope_scaling": null,
26
+ "rope_theta": 250000,
27
+ "tie_word_embeddings": false,
28
+ "torch_dtype": "bfloat16",
29
+ "transformers_version": "4.46.3",
30
+ "use_cache": false,
31
+ "vocab_size": 100352
32
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 100257,
4
+ "eos_token_id": 100265,
5
+ "pad_token_id": 100351,
6
+ "transformers_version": "4.46.3"
7
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:542dc0ec056367ac235ed36feb6ada2ca4da71ac1104845a7b97eebc53262500
3
+ size 4933658528
model-00002-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6b6663829ce9e6e093f54e3e3c4806e6eb0e4ba105fa506bf8e9a8c79febaee
3
+ size 4954693112
model-00003-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:84ff677b24e55c0242c1d48e019e1fd97bf6ac9c9a8a7b84e4c605a3657c51f5
3
+ size 4902243992
model-00004-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f1fc12c8ba32fa554c157f7d52fb190f0c8da38683ed82c13b284f8ff858eaa
3
+ size 4954672440
model-00005-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27e034961bc49c15705ad6eb0f0fc93ab010d0633480d73b87f4c5aff4671ab2
3
+ size 4954672432
model-00006-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af621a9810dabaa0e50f6f276fdf036dfcc0fb1dc6cd951b1f470ae760c51b9d
3
+ size 4619116224
model.safetensors.index.json ADDED
@@ -0,0 +1,370 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 29319014400
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00006-of-00006.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00006.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00006.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00006.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00006.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00006.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00006.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00003-of-00006.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00003-of-00006.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00003-of-00006.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00003-of-00006.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00003-of-00006.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00006.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00006.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00006.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00004-of-00006.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00004-of-00006.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00004-of-00006.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00004-of-00006.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00004-of-00006.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00004-of-00006.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00004-of-00006.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00005-of-00006.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
197
+ "model.layers.28.input_layernorm.weight": "model-00005-of-00006.safetensors",
198
+ "model.layers.28.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
199
+ "model.layers.28.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
200
+ "model.layers.28.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
201
+ "model.layers.28.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
202
+ "model.layers.28.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
203
+ "model.layers.28.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
204
+ "model.layers.28.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
205
+ "model.layers.28.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
206
+ "model.layers.29.input_layernorm.weight": "model-00005-of-00006.safetensors",
207
+ "model.layers.29.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
208
+ "model.layers.29.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
209
+ "model.layers.29.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
210
+ "model.layers.29.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
211
+ "model.layers.29.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
212
+ "model.layers.29.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
213
+ "model.layers.29.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
214
+ "model.layers.29.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
215
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00006.safetensors",
216
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
217
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
218
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
219
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
220
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
221
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
222
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
223
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
224
+ "model.layers.30.input_layernorm.weight": "model-00005-of-00006.safetensors",
225
+ "model.layers.30.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
226
+ "model.layers.30.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
227
+ "model.layers.30.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
228
+ "model.layers.30.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
229
+ "model.layers.30.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
230
+ "model.layers.30.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
231
+ "model.layers.30.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
232
+ "model.layers.30.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
233
+ "model.layers.31.input_layernorm.weight": "model-00005-of-00006.safetensors",
234
+ "model.layers.31.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
235
+ "model.layers.31.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
236
+ "model.layers.31.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
237
+ "model.layers.31.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
238
+ "model.layers.31.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
239
+ "model.layers.31.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
240
+ "model.layers.31.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
241
+ "model.layers.31.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
242
+ "model.layers.32.input_layernorm.weight": "model-00005-of-00006.safetensors",
243
+ "model.layers.32.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
244
+ "model.layers.32.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
245
+ "model.layers.32.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
246
+ "model.layers.32.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
247
+ "model.layers.32.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
248
+ "model.layers.32.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
249
+ "model.layers.32.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
250
+ "model.layers.32.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
251
+ "model.layers.33.input_layernorm.weight": "model-00005-of-00006.safetensors",
252
+ "model.layers.33.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
253
+ "model.layers.33.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
254
+ "model.layers.33.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
255
+ "model.layers.33.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
256
+ "model.layers.33.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
257
+ "model.layers.33.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
258
+ "model.layers.33.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
259
+ "model.layers.33.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
260
+ "model.layers.34.input_layernorm.weight": "model-00006-of-00006.safetensors",
261
+ "model.layers.34.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
262
+ "model.layers.34.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
263
+ "model.layers.34.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
264
+ "model.layers.34.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
265
+ "model.layers.34.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
266
+ "model.layers.34.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
267
+ "model.layers.34.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
268
+ "model.layers.34.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
269
+ "model.layers.35.input_layernorm.weight": "model-00006-of-00006.safetensors",
270
+ "model.layers.35.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
271
+ "model.layers.35.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
272
+ "model.layers.35.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
273
+ "model.layers.35.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
274
+ "model.layers.35.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
275
+ "model.layers.35.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
276
+ "model.layers.35.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
277
+ "model.layers.35.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
278
+ "model.layers.36.input_layernorm.weight": "model-00006-of-00006.safetensors",
279
+ "model.layers.36.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
280
+ "model.layers.36.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
281
+ "model.layers.36.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
282
+ "model.layers.36.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
283
+ "model.layers.36.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
284
+ "model.layers.36.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
285
+ "model.layers.36.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
286
+ "model.layers.36.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
287
+ "model.layers.37.input_layernorm.weight": "model-00006-of-00006.safetensors",
288
+ "model.layers.37.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
289
+ "model.layers.37.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
290
+ "model.layers.37.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
291
+ "model.layers.37.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
292
+ "model.layers.37.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
293
+ "model.layers.37.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
294
+ "model.layers.37.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
295
+ "model.layers.37.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
296
+ "model.layers.38.input_layernorm.weight": "model-00006-of-00006.safetensors",
297
+ "model.layers.38.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
298
+ "model.layers.38.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
299
+ "model.layers.38.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
300
+ "model.layers.38.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
301
+ "model.layers.38.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
302
+ "model.layers.38.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
303
+ "model.layers.38.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
304
+ "model.layers.38.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
305
+ "model.layers.39.input_layernorm.weight": "model-00006-of-00006.safetensors",
306
+ "model.layers.39.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
307
+ "model.layers.39.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
308
+ "model.layers.39.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
309
+ "model.layers.39.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
310
+ "model.layers.39.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
311
+ "model.layers.39.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
312
+ "model.layers.39.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
313
+ "model.layers.39.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
314
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00006.safetensors",
315
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
316
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
317
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
318
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
319
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
320
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
321
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
322
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
323
+ "model.layers.5.input_layernorm.weight": "model-00002-of-00006.safetensors",
324
+ "model.layers.5.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
325
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
326
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
327
+ "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
328
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
329
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
330
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
331
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
332
+ "model.layers.6.input_layernorm.weight": "model-00002-of-00006.safetensors",
333
+ "model.layers.6.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
334
+ "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
335
+ "model.layers.6.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
336
+ "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
337
+ "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
338
+ "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
339
+ "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
340
+ "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
341
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00006.safetensors",
342
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
343
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
344
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
345
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
346
+ "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
347
+ "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
348
+ "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
349
+ "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
350
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00006.safetensors",
351
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
352
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
353
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
354
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
355
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
356
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
357
+ "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
358
+ "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
359
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00006.safetensors",
360
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
361
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
362
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
363
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
364
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
365
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
366
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
367
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
368
+ "model.norm.weight": "model-00006-of-00006.safetensors"
369
+ }
370
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|endoftext|>",
4
+ "lstrip": true,
5
+ "normalized": false,
6
+ "rstrip": true,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|im_end|>",
11
+ "lstrip": true,
12
+ "normalized": false,
13
+ "rstrip": true,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|dummy_87|>",
18
+ "lstrip": true,
19
+ "normalized": false,
20
+ "rstrip": true,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "�",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,795 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "5809": {
5
+ "content": "�",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "100256": {
13
+ "content": "<|dummy_0|>",
14
+ "lstrip": true,
15
+ "normalized": false,
16
+ "rstrip": true,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "100257": {
21
+ "content": "<|endoftext|>",
22
+ "lstrip": true,
23
+ "normalized": false,
24
+ "rstrip": true,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "100258": {
29
+ "content": "<|fim_prefix|>",
30
+ "lstrip": true,
31
+ "normalized": false,
32
+ "rstrip": true,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "100259": {
37
+ "content": "<|fim_middle|>",
38
+ "lstrip": true,
39
+ "normalized": false,
40
+ "rstrip": true,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "100260": {
45
+ "content": "<|fim_suffix|>",
46
+ "lstrip": true,
47
+ "normalized": false,
48
+ "rstrip": true,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "100261": {
53
+ "content": "<|dummy_1|>",
54
+ "lstrip": true,
55
+ "normalized": false,
56
+ "rstrip": true,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "100262": {
61
+ "content": "<|dummy_2|>",
62
+ "lstrip": true,
63
+ "normalized": false,
64
+ "rstrip": true,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "100263": {
69
+ "content": "<|dummy_3|>",
70
+ "lstrip": true,
71
+ "normalized": false,
72
+ "rstrip": true,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "100264": {
77
+ "content": "<|im_start|>",
78
+ "lstrip": true,
79
+ "normalized": false,
80
+ "rstrip": true,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "100265": {
85
+ "content": "<|im_end|>",
86
+ "lstrip": true,
87
+ "normalized": false,
88
+ "rstrip": true,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "100266": {
93
+ "content": "<|im_sep|>",
94
+ "lstrip": true,
95
+ "normalized": false,
96
+ "rstrip": true,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "100267": {
101
+ "content": "<|dummy_4|>",
102
+ "lstrip": true,
103
+ "normalized": false,
104
+ "rstrip": true,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "100268": {
109
+ "content": "<|dummy_5|>",
110
+ "lstrip": true,
111
+ "normalized": false,
112
+ "rstrip": true,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "100269": {
117
+ "content": "<|dummy_6|>",
118
+ "lstrip": true,
119
+ "normalized": false,
120
+ "rstrip": true,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "100270": {
125
+ "content": "<|dummy_7|>",
126
+ "lstrip": true,
127
+ "normalized": false,
128
+ "rstrip": true,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "100271": {
133
+ "content": "<|dummy_8|>",
134
+ "lstrip": true,
135
+ "normalized": false,
136
+ "rstrip": true,
137
+ "single_word": false,
138
+ "special": true
139
+ },
140
+ "100272": {
141
+ "content": "<|dummy_9|>",
142
+ "lstrip": true,
143
+ "normalized": false,
144
+ "rstrip": true,
145
+ "single_word": false,
146
+ "special": true
147
+ },
148
+ "100273": {
149
+ "content": "<|dummy_10|>",
150
+ "lstrip": true,
151
+ "normalized": false,
152
+ "rstrip": true,
153
+ "single_word": false,
154
+ "special": true
155
+ },
156
+ "100274": {
157
+ "content": "<|dummy_11|>",
158
+ "lstrip": true,
159
+ "normalized": false,
160
+ "rstrip": true,
161
+ "single_word": false,
162
+ "special": true
163
+ },
164
+ "100275": {
165
+ "content": "<|dummy_12|>",
166
+ "lstrip": true,
167
+ "normalized": false,
168
+ "rstrip": true,
169
+ "single_word": false,
170
+ "special": true
171
+ },
172
+ "100276": {
173
+ "content": "<|endofprompt|>",
174
+ "lstrip": true,
175
+ "normalized": false,
176
+ "rstrip": true,
177
+ "single_word": false,
178
+ "special": true
179
+ },
180
+ "100277": {
181
+ "content": "<|dummy_13|>",
182
+ "lstrip": true,
183
+ "normalized": false,
184
+ "rstrip": true,
185
+ "single_word": false,
186
+ "special": true
187
+ },
188
+ "100278": {
189
+ "content": "<|dummy_14|>",
190
+ "lstrip": true,
191
+ "normalized": false,
192
+ "rstrip": true,
193
+ "single_word": false,
194
+ "special": true
195
+ },
196
+ "100279": {
197
+ "content": "<|dummy_15|>",
198
+ "lstrip": true,
199
+ "normalized": false,
200
+ "rstrip": true,
201
+ "single_word": false,
202
+ "special": true
203
+ },
204
+ "100280": {
205
+ "content": "<|dummy_16|>",
206
+ "lstrip": true,
207
+ "normalized": false,
208
+ "rstrip": true,
209
+ "single_word": false,
210
+ "special": true
211
+ },
212
+ "100281": {
213
+ "content": "<|dummy_17|>",
214
+ "lstrip": true,
215
+ "normalized": false,
216
+ "rstrip": true,
217
+ "single_word": false,
218
+ "special": true
219
+ },
220
+ "100282": {
221
+ "content": "<|dummy_18|>",
222
+ "lstrip": true,
223
+ "normalized": false,
224
+ "rstrip": true,
225
+ "single_word": false,
226
+ "special": true
227
+ },
228
+ "100283": {
229
+ "content": "<|dummy_19|>",
230
+ "lstrip": true,
231
+ "normalized": false,
232
+ "rstrip": true,
233
+ "single_word": false,
234
+ "special": true
235
+ },
236
+ "100284": {
237
+ "content": "<|dummy_20|>",
238
+ "lstrip": true,
239
+ "normalized": false,
240
+ "rstrip": true,
241
+ "single_word": false,
242
+ "special": true
243
+ },
244
+ "100285": {
245
+ "content": "<|dummy_21|>",
246
+ "lstrip": true,
247
+ "normalized": false,
248
+ "rstrip": true,
249
+ "single_word": false,
250
+ "special": true
251
+ },
252
+ "100286": {
253
+ "content": "<|dummy_22|>",
254
+ "lstrip": true,
255
+ "normalized": false,
256
+ "rstrip": true,
257
+ "single_word": false,
258
+ "special": true
259
+ },
260
+ "100287": {
261
+ "content": "<|dummy_23|>",
262
+ "lstrip": true,
263
+ "normalized": false,
264
+ "rstrip": true,
265
+ "single_word": false,
266
+ "special": true
267
+ },
268
+ "100288": {
269
+ "content": "<|dummy_24|>",
270
+ "lstrip": true,
271
+ "normalized": false,
272
+ "rstrip": true,
273
+ "single_word": false,
274
+ "special": true
275
+ },
276
+ "100289": {
277
+ "content": "<|dummy_25|>",
278
+ "lstrip": true,
279
+ "normalized": false,
280
+ "rstrip": true,
281
+ "single_word": false,
282
+ "special": true
283
+ },
284
+ "100290": {
285
+ "content": "<|dummy_26|>",
286
+ "lstrip": true,
287
+ "normalized": false,
288
+ "rstrip": true,
289
+ "single_word": false,
290
+ "special": true
291
+ },
292
+ "100291": {
293
+ "content": "<|dummy_27|>",
294
+ "lstrip": true,
295
+ "normalized": false,
296
+ "rstrip": true,
297
+ "single_word": false,
298
+ "special": true
299
+ },
300
+ "100292": {
301
+ "content": "<|dummy_28|>",
302
+ "lstrip": true,
303
+ "normalized": false,
304
+ "rstrip": true,
305
+ "single_word": false,
306
+ "special": true
307
+ },
308
+ "100293": {
309
+ "content": "<|dummy_29|>",
310
+ "lstrip": true,
311
+ "normalized": false,
312
+ "rstrip": true,
313
+ "single_word": false,
314
+ "special": true
315
+ },
316
+ "100294": {
317
+ "content": "<|dummy_30|>",
318
+ "lstrip": true,
319
+ "normalized": false,
320
+ "rstrip": true,
321
+ "single_word": false,
322
+ "special": true
323
+ },
324
+ "100295": {
325
+ "content": "<|dummy_31|>",
326
+ "lstrip": true,
327
+ "normalized": false,
328
+ "rstrip": true,
329
+ "single_word": false,
330
+ "special": true
331
+ },
332
+ "100296": {
333
+ "content": "<|dummy_32|>",
334
+ "lstrip": true,
335
+ "normalized": false,
336
+ "rstrip": true,
337
+ "single_word": false,
338
+ "special": true
339
+ },
340
+ "100297": {
341
+ "content": "<|dummy_33|>",
342
+ "lstrip": true,
343
+ "normalized": false,
344
+ "rstrip": true,
345
+ "single_word": false,
346
+ "special": true
347
+ },
348
+ "100298": {
349
+ "content": "<|dummy_34|>",
350
+ "lstrip": true,
351
+ "normalized": false,
352
+ "rstrip": true,
353
+ "single_word": false,
354
+ "special": true
355
+ },
356
+ "100299": {
357
+ "content": "<|dummy_35|>",
358
+ "lstrip": true,
359
+ "normalized": false,
360
+ "rstrip": true,
361
+ "single_word": false,
362
+ "special": true
363
+ },
364
+ "100300": {
365
+ "content": "<|dummy_36|>",
366
+ "lstrip": true,
367
+ "normalized": false,
368
+ "rstrip": true,
369
+ "single_word": false,
370
+ "special": true
371
+ },
372
+ "100301": {
373
+ "content": "<|dummy_37|>",
374
+ "lstrip": true,
375
+ "normalized": false,
376
+ "rstrip": true,
377
+ "single_word": false,
378
+ "special": true
379
+ },
380
+ "100302": {
381
+ "content": "<|dummy_38|>",
382
+ "lstrip": true,
383
+ "normalized": false,
384
+ "rstrip": true,
385
+ "single_word": false,
386
+ "special": true
387
+ },
388
+ "100303": {
389
+ "content": "<|dummy_39|>",
390
+ "lstrip": true,
391
+ "normalized": false,
392
+ "rstrip": true,
393
+ "single_word": false,
394
+ "special": true
395
+ },
396
+ "100304": {
397
+ "content": "<|dummy_40|>",
398
+ "lstrip": true,
399
+ "normalized": false,
400
+ "rstrip": true,
401
+ "single_word": false,
402
+ "special": true
403
+ },
404
+ "100305": {
405
+ "content": "<|dummy_41|>",
406
+ "lstrip": true,
407
+ "normalized": false,
408
+ "rstrip": true,
409
+ "single_word": false,
410
+ "special": true
411
+ },
412
+ "100306": {
413
+ "content": "<|dummy_42|>",
414
+ "lstrip": true,
415
+ "normalized": false,
416
+ "rstrip": true,
417
+ "single_word": false,
418
+ "special": true
419
+ },
420
+ "100307": {
421
+ "content": "<|dummy_43|>",
422
+ "lstrip": true,
423
+ "normalized": false,
424
+ "rstrip": true,
425
+ "single_word": false,
426
+ "special": true
427
+ },
428
+ "100308": {
429
+ "content": "<|dummy_44|>",
430
+ "lstrip": true,
431
+ "normalized": false,
432
+ "rstrip": true,
433
+ "single_word": false,
434
+ "special": true
435
+ },
436
+ "100309": {
437
+ "content": "<|dummy_45|>",
438
+ "lstrip": true,
439
+ "normalized": false,
440
+ "rstrip": true,
441
+ "single_word": false,
442
+ "special": true
443
+ },
444
+ "100310": {
445
+ "content": "<|dummy_46|>",
446
+ "lstrip": true,
447
+ "normalized": false,
448
+ "rstrip": true,
449
+ "single_word": false,
450
+ "special": true
451
+ },
452
+ "100311": {
453
+ "content": "<|dummy_47|>",
454
+ "lstrip": true,
455
+ "normalized": false,
456
+ "rstrip": true,
457
+ "single_word": false,
458
+ "special": true
459
+ },
460
+ "100312": {
461
+ "content": "<|dummy_48|>",
462
+ "lstrip": true,
463
+ "normalized": false,
464
+ "rstrip": true,
465
+ "single_word": false,
466
+ "special": true
467
+ },
468
+ "100313": {
469
+ "content": "<|dummy_49|>",
470
+ "lstrip": true,
471
+ "normalized": false,
472
+ "rstrip": true,
473
+ "single_word": false,
474
+ "special": true
475
+ },
476
+ "100314": {
477
+ "content": "<|dummy_50|>",
478
+ "lstrip": true,
479
+ "normalized": false,
480
+ "rstrip": true,
481
+ "single_word": false,
482
+ "special": true
483
+ },
484
+ "100315": {
485
+ "content": "<|dummy_51|>",
486
+ "lstrip": true,
487
+ "normalized": false,
488
+ "rstrip": true,
489
+ "single_word": false,
490
+ "special": true
491
+ },
492
+ "100316": {
493
+ "content": "<|dummy_52|>",
494
+ "lstrip": true,
495
+ "normalized": false,
496
+ "rstrip": true,
497
+ "single_word": false,
498
+ "special": true
499
+ },
500
+ "100317": {
501
+ "content": "<|dummy_53|>",
502
+ "lstrip": true,
503
+ "normalized": false,
504
+ "rstrip": true,
505
+ "single_word": false,
506
+ "special": true
507
+ },
508
+ "100318": {
509
+ "content": "<|dummy_54|>",
510
+ "lstrip": true,
511
+ "normalized": false,
512
+ "rstrip": true,
513
+ "single_word": false,
514
+ "special": true
515
+ },
516
+ "100319": {
517
+ "content": "<|dummy_55|>",
518
+ "lstrip": true,
519
+ "normalized": false,
520
+ "rstrip": true,
521
+ "single_word": false,
522
+ "special": true
523
+ },
524
+ "100320": {
525
+ "content": "<|dummy_56|>",
526
+ "lstrip": true,
527
+ "normalized": false,
528
+ "rstrip": true,
529
+ "single_word": false,
530
+ "special": true
531
+ },
532
+ "100321": {
533
+ "content": "<|dummy_57|>",
534
+ "lstrip": true,
535
+ "normalized": false,
536
+ "rstrip": true,
537
+ "single_word": false,
538
+ "special": true
539
+ },
540
+ "100322": {
541
+ "content": "<|dummy_58|>",
542
+ "lstrip": true,
543
+ "normalized": false,
544
+ "rstrip": true,
545
+ "single_word": false,
546
+ "special": true
547
+ },
548
+ "100323": {
549
+ "content": "<|dummy_59|>",
550
+ "lstrip": true,
551
+ "normalized": false,
552
+ "rstrip": true,
553
+ "single_word": false,
554
+ "special": true
555
+ },
556
+ "100324": {
557
+ "content": "<|dummy_60|>",
558
+ "lstrip": true,
559
+ "normalized": false,
560
+ "rstrip": true,
561
+ "single_word": false,
562
+ "special": true
563
+ },
564
+ "100325": {
565
+ "content": "<|dummy_61|>",
566
+ "lstrip": true,
567
+ "normalized": false,
568
+ "rstrip": true,
569
+ "single_word": false,
570
+ "special": true
571
+ },
572
+ "100326": {
573
+ "content": "<|dummy_62|>",
574
+ "lstrip": true,
575
+ "normalized": false,
576
+ "rstrip": true,
577
+ "single_word": false,
578
+ "special": true
579
+ },
580
+ "100327": {
581
+ "content": "<|dummy_63|>",
582
+ "lstrip": true,
583
+ "normalized": false,
584
+ "rstrip": true,
585
+ "single_word": false,
586
+ "special": true
587
+ },
588
+ "100328": {
589
+ "content": "<|dummy_64|>",
590
+ "lstrip": true,
591
+ "normalized": false,
592
+ "rstrip": true,
593
+ "single_word": false,
594
+ "special": true
595
+ },
596
+ "100329": {
597
+ "content": "<|dummy_65|>",
598
+ "lstrip": true,
599
+ "normalized": false,
600
+ "rstrip": true,
601
+ "single_word": false,
602
+ "special": true
603
+ },
604
+ "100330": {
605
+ "content": "<|dummy_66|>",
606
+ "lstrip": true,
607
+ "normalized": false,
608
+ "rstrip": true,
609
+ "single_word": false,
610
+ "special": true
611
+ },
612
+ "100331": {
613
+ "content": "<|dummy_67|>",
614
+ "lstrip": true,
615
+ "normalized": false,
616
+ "rstrip": true,
617
+ "single_word": false,
618
+ "special": true
619
+ },
620
+ "100332": {
621
+ "content": "<|dummy_68|>",
622
+ "lstrip": true,
623
+ "normalized": false,
624
+ "rstrip": true,
625
+ "single_word": false,
626
+ "special": true
627
+ },
628
+ "100333": {
629
+ "content": "<|dummy_69|>",
630
+ "lstrip": true,
631
+ "normalized": false,
632
+ "rstrip": true,
633
+ "single_word": false,
634
+ "special": true
635
+ },
636
+ "100334": {
637
+ "content": "<|dummy_70|>",
638
+ "lstrip": true,
639
+ "normalized": false,
640
+ "rstrip": true,
641
+ "single_word": false,
642
+ "special": true
643
+ },
644
+ "100335": {
645
+ "content": "<|dummy_71|>",
646
+ "lstrip": true,
647
+ "normalized": false,
648
+ "rstrip": true,
649
+ "single_word": false,
650
+ "special": true
651
+ },
652
+ "100336": {
653
+ "content": "<|dummy_72|>",
654
+ "lstrip": true,
655
+ "normalized": false,
656
+ "rstrip": true,
657
+ "single_word": false,
658
+ "special": true
659
+ },
660
+ "100337": {
661
+ "content": "<|dummy_73|>",
662
+ "lstrip": true,
663
+ "normalized": false,
664
+ "rstrip": true,
665
+ "single_word": false,
666
+ "special": true
667
+ },
668
+ "100338": {
669
+ "content": "<|dummy_74|>",
670
+ "lstrip": true,
671
+ "normalized": false,
672
+ "rstrip": true,
673
+ "single_word": false,
674
+ "special": true
675
+ },
676
+ "100339": {
677
+ "content": "<|dummy_75|>",
678
+ "lstrip": true,
679
+ "normalized": false,
680
+ "rstrip": true,
681
+ "single_word": false,
682
+ "special": true
683
+ },
684
+ "100340": {
685
+ "content": "<|dummy_76|>",
686
+ "lstrip": true,
687
+ "normalized": false,
688
+ "rstrip": true,
689
+ "single_word": false,
690
+ "special": true
691
+ },
692
+ "100341": {
693
+ "content": "<|dummy_77|>",
694
+ "lstrip": true,
695
+ "normalized": false,
696
+ "rstrip": true,
697
+ "single_word": false,
698
+ "special": true
699
+ },
700
+ "100342": {
701
+ "content": "<|dummy_78|>",
702
+ "lstrip": true,
703
+ "normalized": false,
704
+ "rstrip": true,
705
+ "single_word": false,
706
+ "special": true
707
+ },
708
+ "100343": {
709
+ "content": "<|dummy_79|>",
710
+ "lstrip": true,
711
+ "normalized": false,
712
+ "rstrip": true,
713
+ "single_word": false,
714
+ "special": true
715
+ },
716
+ "100344": {
717
+ "content": "<|dummy_80|>",
718
+ "lstrip": true,
719
+ "normalized": false,
720
+ "rstrip": true,
721
+ "single_word": false,
722
+ "special": true
723
+ },
724
+ "100345": {
725
+ "content": "<|dummy_81|>",
726
+ "lstrip": true,
727
+ "normalized": false,
728
+ "rstrip": true,
729
+ "single_word": false,
730
+ "special": true
731
+ },
732
+ "100346": {
733
+ "content": "<|dummy_82|>",
734
+ "lstrip": true,
735
+ "normalized": false,
736
+ "rstrip": true,
737
+ "single_word": false,
738
+ "special": true
739
+ },
740
+ "100347": {
741
+ "content": "<|dummy_83|>",
742
+ "lstrip": true,
743
+ "normalized": false,
744
+ "rstrip": true,
745
+ "single_word": false,
746
+ "special": true
747
+ },
748
+ "100348": {
749
+ "content": "<|dummy_84|>",
750
+ "lstrip": true,
751
+ "normalized": false,
752
+ "rstrip": true,
753
+ "single_word": false,
754
+ "special": true
755
+ },
756
+ "100349": {
757
+ "content": "<|dummy_85|>",
758
+ "lstrip": true,
759
+ "normalized": false,
760
+ "rstrip": true,
761
+ "single_word": false,
762
+ "special": true
763
+ },
764
+ "100350": {
765
+ "content": "<|dummy_86|>",
766
+ "lstrip": true,
767
+ "normalized": false,
768
+ "rstrip": true,
769
+ "single_word": false,
770
+ "special": true
771
+ },
772
+ "100351": {
773
+ "content": "<|dummy_87|>",
774
+ "lstrip": true,
775
+ "normalized": false,
776
+ "rstrip": true,
777
+ "single_word": false,
778
+ "special": true
779
+ }
780
+ },
781
+ "bos_token": "<|endoftext|>",
782
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system<|im_sep|>' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Argunaut, created by DebateLab@KIT. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system<|im_sep|>' + messages[0]['content'] + '<|im_end|>' }}\n {%- else %}\n {{- '<|im_start|>system<|im_sep|>{system_message}<|im_end|>' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '<|im_sep|>' + message.content + '<|im_end|>' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role + '<|im_sep|>'}}\n {%- if message.content %}\n {{- message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %} {{- '<|im_start|>user<|im_sep|>' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant<|im_sep|>' }}\n{%- endif %}\n",
783
+ "clean_up_tokenization_spaces": false,
784
+ "eos_token": "<|im_end|>",
785
+ "extra_special_tokens": {},
786
+ "max_length": 8192,
787
+ "model_max_length": 16384,
788
+ "pad_token": "<|dummy_87|>",
789
+ "padding_side": "left",
790
+ "stride": 0,
791
+ "tokenizer_class": "GPT2Tokenizer",
792
+ "truncation_side": "right",
793
+ "truncation_strategy": "longest_first",
794
+ "unk_token": "�"
795
+ }
train_results.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.9870967741935484,
3
+ "total_flos": 0.0,
4
+ "train_loss": 0.6479313838017451,
5
+ "train_runtime": 1783.9929,
6
+ "train_samples": 4957,
7
+ "train_samples_per_second": 5.557,
8
+ "train_steps_per_second": 0.086
9
+ }
trainer_state.json ADDED
@@ -0,0 +1,492 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 1.9870967741935484,
5
+ "eval_steps": 500,
6
+ "global_step": 154,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.06451612903225806,
13
+ "grad_norm": 27.289285780817472,
14
+ "learning_rate": 1.25e-07,
15
+ "logits/chosen": NaN,
16
+ "logits/rejected": -1.731054663658142,
17
+ "logps/chosen": -201.2937469482422,
18
+ "logps/rejected": -252.0500030517578,
19
+ "loss": 0.6905,
20
+ "rewards/accuracies": 0.203125,
21
+ "rewards/chosen": 0.004031372256577015,
22
+ "rewards/margins": 0.005436515901237726,
23
+ "rewards/rejected": -0.00140466692391783,
24
+ "step": 5
25
+ },
26
+ {
27
+ "epoch": 0.12903225806451613,
28
+ "grad_norm": 251.04300037826727,
29
+ "learning_rate": 1.9726027397260271e-07,
30
+ "logits/chosen": NaN,
31
+ "logits/rejected": NaN,
32
+ "logps/chosen": -223.9875030517578,
33
+ "logps/rejected": -326.3687438964844,
34
+ "loss": 0.7008,
35
+ "rewards/accuracies": 0.2874999940395355,
36
+ "rewards/chosen": -0.001165628433227539,
37
+ "rewards/margins": -0.00862197857350111,
38
+ "rewards/rejected": 0.007415962405502796,
39
+ "step": 10
40
+ },
41
+ {
42
+ "epoch": 0.1935483870967742,
43
+ "grad_norm": 104.00994364166638,
44
+ "learning_rate": 1.9041095890410958e-07,
45
+ "logits/chosen": NaN,
46
+ "logits/rejected": NaN,
47
+ "logps/chosen": -217.40625,
48
+ "logps/rejected": -252.0593719482422,
49
+ "loss": 0.6939,
50
+ "rewards/accuracies": 0.3531250059604645,
51
+ "rewards/chosen": 0.022618770599365234,
52
+ "rewards/margins": 0.00520400982350111,
53
+ "rewards/rejected": 0.0174699779599905,
54
+ "step": 15
55
+ },
56
+ {
57
+ "epoch": 0.25806451612903225,
58
+ "grad_norm": 131.0274487253917,
59
+ "learning_rate": 1.8356164383561644e-07,
60
+ "logits/chosen": NaN,
61
+ "logits/rejected": NaN,
62
+ "logps/chosen": -200.5749969482422,
63
+ "logps/rejected": -295.1937561035156,
64
+ "loss": 0.6871,
65
+ "rewards/accuracies": 0.4375,
66
+ "rewards/chosen": 0.04545478895306587,
67
+ "rewards/margins": 0.025292014703154564,
68
+ "rewards/rejected": 0.020198345184326172,
69
+ "step": 20
70
+ },
71
+ {
72
+ "epoch": 0.3225806451612903,
73
+ "grad_norm": 36.61386061695541,
74
+ "learning_rate": 1.7671232876712328e-07,
75
+ "logits/chosen": NaN,
76
+ "logits/rejected": NaN,
77
+ "logps/chosen": -227.3249969482422,
78
+ "logps/rejected": -285.34375,
79
+ "loss": 0.6877,
80
+ "rewards/accuracies": 0.390625,
81
+ "rewards/chosen": 0.017041301354765892,
82
+ "rewards/margins": 0.011513328179717064,
83
+ "rewards/rejected": 0.005533790681511164,
84
+ "step": 25
85
+ },
86
+ {
87
+ "epoch": 0.3870967741935484,
88
+ "grad_norm": 140.34939073625793,
89
+ "learning_rate": 1.698630136986301e-07,
90
+ "logits/chosen": NaN,
91
+ "logits/rejected": NaN,
92
+ "logps/chosen": -225.6875,
93
+ "logps/rejected": -300.26873779296875,
94
+ "loss": 0.6871,
95
+ "rewards/accuracies": 0.4437499940395355,
96
+ "rewards/chosen": 0.015233516693115234,
97
+ "rewards/margins": 0.016741562634706497,
98
+ "rewards/rejected": -0.0015290260780602694,
99
+ "step": 30
100
+ },
101
+ {
102
+ "epoch": 0.45161290322580644,
103
+ "grad_norm": 63.75896766493631,
104
+ "learning_rate": 1.6301369863013698e-07,
105
+ "logits/chosen": NaN,
106
+ "logits/rejected": NaN,
107
+ "logps/chosen": -219.99374389648438,
108
+ "logps/rejected": -243.10000610351562,
109
+ "loss": 0.675,
110
+ "rewards/accuracies": 0.546875,
111
+ "rewards/chosen": 0.05904693529009819,
112
+ "rewards/margins": 0.04693260043859482,
113
+ "rewards/rejected": 0.012106895446777344,
114
+ "step": 35
115
+ },
116
+ {
117
+ "epoch": 0.5161290322580645,
118
+ "grad_norm": 184.99755958111177,
119
+ "learning_rate": 1.5616438356164384e-07,
120
+ "logits/chosen": NaN,
121
+ "logits/rejected": -1.765039086341858,
122
+ "logps/chosen": -217.19375610351562,
123
+ "logps/rejected": -306.0687561035156,
124
+ "loss": 0.675,
125
+ "rewards/accuracies": 0.5062500238418579,
126
+ "rewards/chosen": 0.04494018480181694,
127
+ "rewards/margins": 0.03857250139117241,
128
+ "rewards/rejected": 0.006308078765869141,
129
+ "step": 40
130
+ },
131
+ {
132
+ "epoch": 0.5806451612903226,
133
+ "grad_norm": 21.657122426298795,
134
+ "learning_rate": 1.4931506849315067e-07,
135
+ "logits/chosen": NaN,
136
+ "logits/rejected": NaN,
137
+ "logps/chosen": -222.03750610351562,
138
+ "logps/rejected": -234.1531219482422,
139
+ "loss": 0.6734,
140
+ "rewards/accuracies": 0.53125,
141
+ "rewards/chosen": 0.03910064697265625,
142
+ "rewards/margins": 0.04501190036535263,
143
+ "rewards/rejected": -0.005856895353645086,
144
+ "step": 45
145
+ },
146
+ {
147
+ "epoch": 0.6451612903225806,
148
+ "grad_norm": 78.33315305508819,
149
+ "learning_rate": 1.424657534246575e-07,
150
+ "logits/chosen": NaN,
151
+ "logits/rejected": NaN,
152
+ "logps/chosen": -219.0437469482422,
153
+ "logps/rejected": -278.5062561035156,
154
+ "loss": 0.6759,
155
+ "rewards/accuracies": 0.5,
156
+ "rewards/chosen": 0.07380714267492294,
157
+ "rewards/margins": 0.04411773756146431,
158
+ "rewards/rejected": 0.029708098620176315,
159
+ "step": 50
160
+ },
161
+ {
162
+ "epoch": 0.7096774193548387,
163
+ "grad_norm": 72.46159716165603,
164
+ "learning_rate": 1.356164383561644e-07,
165
+ "logits/chosen": NaN,
166
+ "logits/rejected": NaN,
167
+ "logps/chosen": -196.640625,
168
+ "logps/rejected": -235.4812469482422,
169
+ "loss": 0.655,
170
+ "rewards/accuracies": 0.550000011920929,
171
+ "rewards/chosen": 0.09041748195886612,
172
+ "rewards/margins": 0.08573532104492188,
173
+ "rewards/rejected": 0.004653358366340399,
174
+ "step": 55
175
+ },
176
+ {
177
+ "epoch": 0.7741935483870968,
178
+ "grad_norm": 68.67285889232319,
179
+ "learning_rate": 1.2876712328767124e-07,
180
+ "logits/chosen": NaN,
181
+ "logits/rejected": NaN,
182
+ "logps/chosen": -219.3125,
183
+ "logps/rejected": -265.66876220703125,
184
+ "loss": 0.6779,
185
+ "rewards/accuracies": 0.48750001192092896,
186
+ "rewards/chosen": 0.0799354538321495,
187
+ "rewards/margins": 0.04260139539837837,
188
+ "rewards/rejected": 0.037416838109493256,
189
+ "step": 60
190
+ },
191
+ {
192
+ "epoch": 0.8387096774193549,
193
+ "grad_norm": 51.38797079036038,
194
+ "learning_rate": 1.2191780821917807e-07,
195
+ "logits/chosen": NaN,
196
+ "logits/rejected": NaN,
197
+ "logps/chosen": -202.9250030517578,
198
+ "logps/rejected": -247.3125,
199
+ "loss": 0.6436,
200
+ "rewards/accuracies": 0.6031249761581421,
201
+ "rewards/chosen": 0.11755065619945526,
202
+ "rewards/margins": 0.10801849514245987,
203
+ "rewards/rejected": 0.009516906924545765,
204
+ "step": 65
205
+ },
206
+ {
207
+ "epoch": 0.9032258064516129,
208
+ "grad_norm": 17.570407250605022,
209
+ "learning_rate": 1.1506849315068492e-07,
210
+ "logits/chosen": NaN,
211
+ "logits/rejected": NaN,
212
+ "logps/chosen": -177.94686889648438,
213
+ "logps/rejected": -210.5906219482422,
214
+ "loss": 0.6623,
215
+ "rewards/accuracies": 0.578125,
216
+ "rewards/chosen": 0.12347412109375,
217
+ "rewards/margins": 0.08150939643383026,
218
+ "rewards/rejected": 0.04202079772949219,
219
+ "step": 70
220
+ },
221
+ {
222
+ "epoch": 0.967741935483871,
223
+ "grad_norm": 20.043471047023353,
224
+ "learning_rate": 1.0821917808219178e-07,
225
+ "logits/chosen": NaN,
226
+ "logits/rejected": NaN,
227
+ "logps/chosen": -211.6374969482422,
228
+ "logps/rejected": -219.0625,
229
+ "loss": 0.6478,
230
+ "rewards/accuracies": 0.543749988079071,
231
+ "rewards/chosen": 0.1253662109375,
232
+ "rewards/margins": 0.10564498603343964,
233
+ "rewards/rejected": 0.019800186157226562,
234
+ "step": 75
235
+ },
236
+ {
237
+ "epoch": 1.032258064516129,
238
+ "grad_norm": 181.9806965524942,
239
+ "learning_rate": 1.0136986301369863e-07,
240
+ "logits/chosen": NaN,
241
+ "logits/rejected": NaN,
242
+ "logps/chosen": -201.86874389648438,
243
+ "logps/rejected": -286.6625061035156,
244
+ "loss": 0.6312,
245
+ "rewards/accuracies": 0.612500011920929,
246
+ "rewards/chosen": 0.160227969288826,
247
+ "rewards/margins": 0.15892334282398224,
248
+ "rewards/rejected": 0.0013935088645666838,
249
+ "step": 80
250
+ },
251
+ {
252
+ "epoch": 1.096774193548387,
253
+ "grad_norm": 226.16471597538276,
254
+ "learning_rate": 9.452054794520547e-08,
255
+ "logits/chosen": NaN,
256
+ "logits/rejected": NaN,
257
+ "logps/chosen": -203.13436889648438,
258
+ "logps/rejected": -286.20001220703125,
259
+ "loss": 0.6379,
260
+ "rewards/accuracies": 0.6000000238418579,
261
+ "rewards/chosen": 0.15032806992530823,
262
+ "rewards/margins": 0.13654594123363495,
263
+ "rewards/rejected": 0.013786124996840954,
264
+ "step": 85
265
+ },
266
+ {
267
+ "epoch": 1.1612903225806452,
268
+ "grad_norm": 49.30845933718566,
269
+ "learning_rate": 8.767123287671232e-08,
270
+ "logits/chosen": NaN,
271
+ "logits/rejected": NaN,
272
+ "logps/chosen": -203.625,
273
+ "logps/rejected": -219.5500030517578,
274
+ "loss": 0.6366,
275
+ "rewards/accuracies": 0.6156250238418579,
276
+ "rewards/chosen": 0.16707763075828552,
277
+ "rewards/margins": 0.14001694321632385,
278
+ "rewards/rejected": 0.027159880846738815,
279
+ "step": 90
280
+ },
281
+ {
282
+ "epoch": 1.2258064516129032,
283
+ "grad_norm": 13.702653617386117,
284
+ "learning_rate": 8.082191780821918e-08,
285
+ "logits/chosen": NaN,
286
+ "logits/rejected": NaN,
287
+ "logps/chosen": -207.1875,
288
+ "logps/rejected": -250.50625610351562,
289
+ "loss": 0.618,
290
+ "rewards/accuracies": 0.6343749761581421,
291
+ "rewards/chosen": 0.19346924126148224,
292
+ "rewards/margins": 0.18638305366039276,
293
+ "rewards/rejected": 0.007043170742690563,
294
+ "step": 95
295
+ },
296
+ {
297
+ "epoch": 1.2903225806451613,
298
+ "grad_norm": 35.11393292174802,
299
+ "learning_rate": 7.397260273972602e-08,
300
+ "logits/chosen": NaN,
301
+ "logits/rejected": NaN,
302
+ "logps/chosen": -219.03125,
303
+ "logps/rejected": -224.34375,
304
+ "loss": 0.6314,
305
+ "rewards/accuracies": 0.590624988079071,
306
+ "rewards/chosen": 0.18589934706687927,
307
+ "rewards/margins": 0.15184783935546875,
308
+ "rewards/rejected": 0.034147072583436966,
309
+ "step": 100
310
+ },
311
+ {
312
+ "epoch": 1.3548387096774195,
313
+ "grad_norm": 18.703710157148258,
314
+ "learning_rate": 6.712328767123288e-08,
315
+ "logits/chosen": NaN,
316
+ "logits/rejected": NaN,
317
+ "logps/chosen": -241.578125,
318
+ "logps/rejected": -254.94375610351562,
319
+ "loss": 0.6358,
320
+ "rewards/accuracies": 0.609375,
321
+ "rewards/chosen": 0.18818970024585724,
322
+ "rewards/margins": 0.14898376166820526,
323
+ "rewards/rejected": 0.03929481655359268,
324
+ "step": 105
325
+ },
326
+ {
327
+ "epoch": 1.4193548387096775,
328
+ "grad_norm": 101.2488344322131,
329
+ "learning_rate": 6.027397260273972e-08,
330
+ "logits/chosen": NaN,
331
+ "logits/rejected": NaN,
332
+ "logps/chosen": -221.3312530517578,
333
+ "logps/rejected": -232.75,
334
+ "loss": 0.6348,
335
+ "rewards/accuracies": 0.59375,
336
+ "rewards/chosen": 0.18128661811351776,
337
+ "rewards/margins": 0.14649733901023865,
338
+ "rewards/rejected": 0.03501739352941513,
339
+ "step": 110
340
+ },
341
+ {
342
+ "epoch": 1.4838709677419355,
343
+ "grad_norm": 203.68501398113497,
344
+ "learning_rate": 5.342465753424657e-08,
345
+ "logits/chosen": NaN,
346
+ "logits/rejected": NaN,
347
+ "logps/chosen": -204.66250610351562,
348
+ "logps/rejected": -296.0874938964844,
349
+ "loss": 0.6092,
350
+ "rewards/accuracies": 0.643750011920929,
351
+ "rewards/chosen": 0.2092742919921875,
352
+ "rewards/margins": 0.20909729599952698,
353
+ "rewards/rejected": 0.00017089843458961695,
354
+ "step": 115
355
+ },
356
+ {
357
+ "epoch": 1.5483870967741935,
358
+ "grad_norm": 178.53103020067346,
359
+ "learning_rate": 4.657534246575342e-08,
360
+ "logits/chosen": NaN,
361
+ "logits/rejected": NaN,
362
+ "logps/chosen": -222.47811889648438,
363
+ "logps/rejected": -294.98748779296875,
364
+ "loss": 0.6212,
365
+ "rewards/accuracies": 0.6156250238418579,
366
+ "rewards/chosen": 0.20711669325828552,
367
+ "rewards/margins": 0.18365783989429474,
368
+ "rewards/rejected": 0.023543357849121094,
369
+ "step": 120
370
+ },
371
+ {
372
+ "epoch": 1.6129032258064515,
373
+ "grad_norm": 57.639206165539655,
374
+ "learning_rate": 3.972602739726027e-08,
375
+ "logits/chosen": NaN,
376
+ "logits/rejected": NaN,
377
+ "logps/chosen": -202.88125610351562,
378
+ "logps/rejected": -257.28125,
379
+ "loss": 0.6202,
380
+ "rewards/accuracies": 0.6000000238418579,
381
+ "rewards/chosen": 0.19972380995750427,
382
+ "rewards/margins": 0.18543700873851776,
383
+ "rewards/rejected": 0.014235305599868298,
384
+ "step": 125
385
+ },
386
+ {
387
+ "epoch": 1.6774193548387095,
388
+ "grad_norm": 177.42127109852333,
389
+ "learning_rate": 3.287671232876712e-08,
390
+ "logits/chosen": NaN,
391
+ "logits/rejected": NaN,
392
+ "logps/chosen": -191.9499969482422,
393
+ "logps/rejected": -254.09375,
394
+ "loss": 0.6162,
395
+ "rewards/accuracies": 0.6187499761581421,
396
+ "rewards/chosen": 0.23677368462085724,
397
+ "rewards/margins": 0.1921844482421875,
398
+ "rewards/rejected": 0.044641874730587006,
399
+ "step": 130
400
+ },
401
+ {
402
+ "epoch": 1.7419354838709677,
403
+ "grad_norm": 86.28470255174987,
404
+ "learning_rate": 2.602739726027397e-08,
405
+ "logits/chosen": NaN,
406
+ "logits/rejected": NaN,
407
+ "logps/chosen": -220.6374969482422,
408
+ "logps/rejected": -291.5687561035156,
409
+ "loss": 0.6146,
410
+ "rewards/accuracies": 0.612500011920929,
411
+ "rewards/chosen": 0.21770019829273224,
412
+ "rewards/margins": 0.19477081298828125,
413
+ "rewards/rejected": 0.0229034423828125,
414
+ "step": 135
415
+ },
416
+ {
417
+ "epoch": 1.8064516129032258,
418
+ "grad_norm": 20.45663584135668,
419
+ "learning_rate": 1.917808219178082e-08,
420
+ "logits/chosen": NaN,
421
+ "logits/rejected": NaN,
422
+ "logps/chosen": -213.7937469482422,
423
+ "logps/rejected": -283.7875061035156,
424
+ "loss": 0.6108,
425
+ "rewards/accuracies": 0.6187499761581421,
426
+ "rewards/chosen": 0.20465087890625,
427
+ "rewards/margins": 0.19698485732078552,
428
+ "rewards/rejected": 0.0077342987060546875,
429
+ "step": 140
430
+ },
431
+ {
432
+ "epoch": 1.870967741935484,
433
+ "grad_norm": 115.93768575577776,
434
+ "learning_rate": 1.232876712328767e-08,
435
+ "logits/chosen": NaN,
436
+ "logits/rejected": NaN,
437
+ "logps/chosen": -204.1999969482422,
438
+ "logps/rejected": -290.3687438964844,
439
+ "loss": 0.6114,
440
+ "rewards/accuracies": 0.606249988079071,
441
+ "rewards/chosen": 0.21126098930835724,
442
+ "rewards/margins": 0.19959107041358948,
443
+ "rewards/rejected": 0.01172027550637722,
444
+ "step": 145
445
+ },
446
+ {
447
+ "epoch": 1.935483870967742,
448
+ "grad_norm": 43.38211794873351,
449
+ "learning_rate": 5.47945205479452e-09,
450
+ "logits/chosen": NaN,
451
+ "logits/rejected": NaN,
452
+ "logps/chosen": -217.8937530517578,
453
+ "logps/rejected": -287.4906311035156,
454
+ "loss": 0.6101,
455
+ "rewards/accuracies": 0.6312500238418579,
456
+ "rewards/chosen": 0.22992248833179474,
457
+ "rewards/margins": 0.22356566786766052,
458
+ "rewards/rejected": 0.006373024079948664,
459
+ "step": 150
460
+ },
461
+ {
462
+ "epoch": 1.9870967741935484,
463
+ "step": 154,
464
+ "total_flos": 0.0,
465
+ "train_loss": 0.6479313838017451,
466
+ "train_runtime": 1783.9929,
467
+ "train_samples_per_second": 5.557,
468
+ "train_steps_per_second": 0.086
469
+ }
470
+ ],
471
+ "logging_steps": 5,
472
+ "max_steps": 154,
473
+ "num_input_tokens_seen": 0,
474
+ "num_train_epochs": 2,
475
+ "save_steps": 50,
476
+ "stateful_callbacks": {
477
+ "TrainerControl": {
478
+ "args": {
479
+ "should_epoch_stop": false,
480
+ "should_evaluate": false,
481
+ "should_log": false,
482
+ "should_save": false,
483
+ "should_training_stop": false
484
+ },
485
+ "attributes": {}
486
+ }
487
+ },
488
+ "total_flos": 0.0,
489
+ "train_batch_size": 2,
490
+ "trial_name": null,
491
+ "trial_params": null
492
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8bae1139873054138f7c769c3ec3fe88ce8b9829ad976f429dbca9bd65269b9
3
+ size 7672
vocab.json ADDED
The diff for this file is too large to render. See raw diff