Upload folder using huggingface_hub
Browse files- .gitattributes +1 -0
- README.md +69 -0
- all_results.json +12 -0
- config.json +29 -0
- eval_results.json +7 -0
- generation_config.json +9 -0
- model-00001-of-00006.safetensors +3 -0
- model-00002-of-00006.safetensors +3 -0
- model-00003-of-00006.safetensors +3 -0
- model-00004-of-00006.safetensors +3 -0
- model-00005-of-00006.safetensors +3 -0
- model-00006-of-00006.safetensors +3 -0
- model.safetensors.index.json +586 -0
- special_tokens_map.json +23 -0
- tokenizer.json +3 -0
- tokenizer_config.json +204 -0
- train_results.json +8 -0
- trainer_log.jsonl +155 -0
- trainer_state.json +1125 -0
- training_args.bin +3 -0
- training_eval_loss.png +0 -0
- training_loss.png +0 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
library_name: transformers
|
3 |
+
license: other
|
4 |
+
base_model: deepseek-ai/DeepSeek-R1-Distill-Qwen-14B
|
5 |
+
tags:
|
6 |
+
- llama-factory
|
7 |
+
- full
|
8 |
+
- generated_from_trainer
|
9 |
+
model-index:
|
10 |
+
- name: code_finetune
|
11 |
+
results: []
|
12 |
+
---
|
13 |
+
|
14 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
15 |
+
should probably proofread and complete it, then remove this comment. -->
|
16 |
+
|
17 |
+
# code_finetune
|
18 |
+
|
19 |
+
This model is a fine-tuned version of [deepseek-ai/DeepSeek-R1-Distill-Qwen-14B](https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-14B) on the open_thoughts_19K dataset.
|
20 |
+
It achieves the following results on the evaluation set:
|
21 |
+
- Loss: nan
|
22 |
+
|
23 |
+
## Model description
|
24 |
+
|
25 |
+
More information needed
|
26 |
+
|
27 |
+
## Intended uses & limitations
|
28 |
+
|
29 |
+
More information needed
|
30 |
+
|
31 |
+
## Training and evaluation data
|
32 |
+
|
33 |
+
More information needed
|
34 |
+
|
35 |
+
## Training procedure
|
36 |
+
|
37 |
+
### Training hyperparameters
|
38 |
+
|
39 |
+
The following hyperparameters were used during training:
|
40 |
+
- learning_rate: 1e-05
|
41 |
+
- train_batch_size: 6
|
42 |
+
- eval_batch_size: 4
|
43 |
+
- seed: 42
|
44 |
+
- distributed_type: multi-GPU
|
45 |
+
- num_devices: 8
|
46 |
+
- total_train_batch_size: 48
|
47 |
+
- total_eval_batch_size: 32
|
48 |
+
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
49 |
+
- lr_scheduler_type: cosine
|
50 |
+
- lr_scheduler_warmup_ratio: 0.1
|
51 |
+
- num_epochs: 5.0
|
52 |
+
|
53 |
+
### Training results
|
54 |
+
|
55 |
+
| Training Loss | Epoch | Step | Validation Loss |
|
56 |
+
|:-------------:|:-----:|:-----:|:---------------:|
|
57 |
+
| 1.3178 | 1.0 | 2986 | nan |
|
58 |
+
| 1.0555 | 2.0 | 5972 | nan |
|
59 |
+
| 0.7954 | 3.0 | 8958 | nan |
|
60 |
+
| 0.5992 | 4.0 | 11944 | nan |
|
61 |
+
| 0.5494 | 5.0 | 14930 | nan |
|
62 |
+
|
63 |
+
|
64 |
+
### Framework versions
|
65 |
+
|
66 |
+
- Transformers 4.45.2
|
67 |
+
- Pytorch 2.6.0+cu124
|
68 |
+
- Datasets 3.1.0
|
69 |
+
- Tokenizers 0.20.3
|
all_results.json
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 5.0,
|
3 |
+
"eval_loss": NaN,
|
4 |
+
"eval_runtime": 348.2336,
|
5 |
+
"eval_samples_per_second": 45.728,
|
6 |
+
"eval_steps_per_second": 1.43,
|
7 |
+
"total_flos": 7403300223713280.0,
|
8 |
+
"train_loss": 0.9973710162960596,
|
9 |
+
"train_runtime": 63021.9977,
|
10 |
+
"train_samples_per_second": 11.37,
|
11 |
+
"train_steps_per_second": 0.237
|
12 |
+
}
|
config.json
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "deepseek-ai/DeepSeek-R1-Distill-Qwen-14B",
|
3 |
+
"architectures": [
|
4 |
+
"Qwen2ForCausalLM"
|
5 |
+
],
|
6 |
+
"attention_dropout": 0.0,
|
7 |
+
"bos_token_id": 151643,
|
8 |
+
"eos_token_id": 151643,
|
9 |
+
"hidden_act": "silu",
|
10 |
+
"hidden_size": 5120,
|
11 |
+
"initializer_range": 0.02,
|
12 |
+
"intermediate_size": 13824,
|
13 |
+
"max_position_embeddings": 131072,
|
14 |
+
"max_window_layers": 48,
|
15 |
+
"model_type": "qwen2",
|
16 |
+
"num_attention_heads": 40,
|
17 |
+
"num_hidden_layers": 48,
|
18 |
+
"num_key_value_heads": 8,
|
19 |
+
"rms_norm_eps": 1e-05,
|
20 |
+
"rope_scaling": null,
|
21 |
+
"rope_theta": 1000000.0,
|
22 |
+
"sliding_window": null,
|
23 |
+
"tie_word_embeddings": false,
|
24 |
+
"torch_dtype": "bfloat16",
|
25 |
+
"transformers_version": "4.45.2",
|
26 |
+
"use_cache": false,
|
27 |
+
"use_sliding_window": false,
|
28 |
+
"vocab_size": 152064
|
29 |
+
}
|
eval_results.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 5.0,
|
3 |
+
"eval_loss": NaN,
|
4 |
+
"eval_runtime": 348.2336,
|
5 |
+
"eval_samples_per_second": 45.728,
|
6 |
+
"eval_steps_per_second": 1.43
|
7 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 151646,
|
4 |
+
"do_sample": true,
|
5 |
+
"eos_token_id": 151643,
|
6 |
+
"temperature": 0.6,
|
7 |
+
"top_p": 0.95,
|
8 |
+
"transformers_version": "4.45.2"
|
9 |
+
}
|
model-00001-of-00006.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ddbcea1e6a0ec952df2aba5b0efcf97192435619bdca797713f4eb2d507d1fdd
|
3 |
+
size 4986211280
|
model-00002-of-00006.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:be6695429c49826c7e8237b13d974cac0c02093cc6603373fd0749e03cf6ff62
|
3 |
+
size 4954847344
|
model-00003-of-00006.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:aee174d346418ebc436b65cc0209610ac4debbe76c5d8b8121bdd108dde3de1d
|
3 |
+
size 4954847392
|
model-00004-of-00006.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1c81a377241295de8f68f9b22c23483ad1f1dd2e3c0f36be474d035d4564284c
|
3 |
+
size 4954847392
|
model-00005-of-00006.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1b65d5bd4fbfc9340125cc74987c841b35d7ce3740ce98b1cb21150c001d70d3
|
3 |
+
size 4954847392
|
model-00006-of-00006.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:62887914eefc17ce071c28defe9e60395b14855d6eaf7f4688b513e03f420f1c
|
3 |
+
size 4734533160
|
model.safetensors.index.json
ADDED
@@ -0,0 +1,586 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"metadata": {
|
3 |
+
"total_size": 29540067328
|
4 |
+
},
|
5 |
+
"weight_map": {
|
6 |
+
"lm_head.weight": "model-00006-of-00006.safetensors",
|
7 |
+
"model.embed_tokens.weight": "model-00001-of-00006.safetensors",
|
8 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00006.safetensors",
|
9 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
|
10 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
|
11 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
|
12 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
|
13 |
+
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
|
14 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
|
15 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
|
16 |
+
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
|
17 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
|
18 |
+
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
|
19 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
|
20 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00006.safetensors",
|
21 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
|
22 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
|
23 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
|
24 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
|
25 |
+
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
|
26 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
|
27 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
|
28 |
+
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
|
29 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
|
30 |
+
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
|
31 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
|
32 |
+
"model.layers.10.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
33 |
+
"model.layers.10.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
34 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
|
35 |
+
"model.layers.10.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
|
36 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
37 |
+
"model.layers.10.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
|
38 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
|
39 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
|
40 |
+
"model.layers.10.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
|
41 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
|
42 |
+
"model.layers.10.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
|
43 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
|
44 |
+
"model.layers.11.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
45 |
+
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
46 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
|
47 |
+
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
|
48 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
49 |
+
"model.layers.11.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
|
50 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
|
51 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
|
52 |
+
"model.layers.11.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
|
53 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
|
54 |
+
"model.layers.11.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
|
55 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
|
56 |
+
"model.layers.12.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
57 |
+
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
58 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
|
59 |
+
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
|
60 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
61 |
+
"model.layers.12.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
|
62 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
|
63 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
|
64 |
+
"model.layers.12.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
|
65 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
|
66 |
+
"model.layers.12.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
|
67 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
|
68 |
+
"model.layers.13.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
69 |
+
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
70 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
|
71 |
+
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
|
72 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
73 |
+
"model.layers.13.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
|
74 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
|
75 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
|
76 |
+
"model.layers.13.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
|
77 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
|
78 |
+
"model.layers.13.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
|
79 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
|
80 |
+
"model.layers.14.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
81 |
+
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
82 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
|
83 |
+
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
|
84 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
85 |
+
"model.layers.14.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
|
86 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
|
87 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
|
88 |
+
"model.layers.14.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
|
89 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
|
90 |
+
"model.layers.14.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
|
91 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
|
92 |
+
"model.layers.15.input_layernorm.weight": "model-00003-of-00006.safetensors",
|
93 |
+
"model.layers.15.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
|
94 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
|
95 |
+
"model.layers.15.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
|
96 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
|
97 |
+
"model.layers.15.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
|
98 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
|
99 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
|
100 |
+
"model.layers.15.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
|
101 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
|
102 |
+
"model.layers.15.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
|
103 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
|
104 |
+
"model.layers.16.input_layernorm.weight": "model-00003-of-00006.safetensors",
|
105 |
+
"model.layers.16.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
|
106 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
|
107 |
+
"model.layers.16.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
|
108 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
|
109 |
+
"model.layers.16.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
|
110 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
111 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
112 |
+
"model.layers.16.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
|
113 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
114 |
+
"model.layers.16.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
|
115 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
116 |
+
"model.layers.17.input_layernorm.weight": "model-00003-of-00006.safetensors",
|
117 |
+
"model.layers.17.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
|
118 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
|
119 |
+
"model.layers.17.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
|
120 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
|
121 |
+
"model.layers.17.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
|
122 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
123 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
124 |
+
"model.layers.17.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
|
125 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
126 |
+
"model.layers.17.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
|
127 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
128 |
+
"model.layers.18.input_layernorm.weight": "model-00003-of-00006.safetensors",
|
129 |
+
"model.layers.18.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
|
130 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
|
131 |
+
"model.layers.18.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
|
132 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
|
133 |
+
"model.layers.18.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
|
134 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
135 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
136 |
+
"model.layers.18.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
|
137 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
138 |
+
"model.layers.18.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
|
139 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
140 |
+
"model.layers.19.input_layernorm.weight": "model-00003-of-00006.safetensors",
|
141 |
+
"model.layers.19.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
|
142 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
|
143 |
+
"model.layers.19.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
|
144 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
|
145 |
+
"model.layers.19.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
|
146 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
147 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
148 |
+
"model.layers.19.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
|
149 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
150 |
+
"model.layers.19.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
|
151 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
152 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00006.safetensors",
|
153 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
|
154 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
|
155 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
|
156 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
|
157 |
+
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
|
158 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
|
159 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
|
160 |
+
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
|
161 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
|
162 |
+
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
|
163 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
|
164 |
+
"model.layers.20.input_layernorm.weight": "model-00003-of-00006.safetensors",
|
165 |
+
"model.layers.20.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
|
166 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
|
167 |
+
"model.layers.20.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
|
168 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
|
169 |
+
"model.layers.20.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
|
170 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
171 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
172 |
+
"model.layers.20.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
|
173 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
174 |
+
"model.layers.20.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
|
175 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
176 |
+
"model.layers.21.input_layernorm.weight": "model-00003-of-00006.safetensors",
|
177 |
+
"model.layers.21.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
|
178 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
|
179 |
+
"model.layers.21.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
|
180 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
|
181 |
+
"model.layers.21.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
|
182 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
183 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
184 |
+
"model.layers.21.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
|
185 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
186 |
+
"model.layers.21.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
|
187 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
188 |
+
"model.layers.22.input_layernorm.weight": "model-00003-of-00006.safetensors",
|
189 |
+
"model.layers.22.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
|
190 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
|
191 |
+
"model.layers.22.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
|
192 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
|
193 |
+
"model.layers.22.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
|
194 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
195 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
196 |
+
"model.layers.22.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
|
197 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
198 |
+
"model.layers.22.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
|
199 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
200 |
+
"model.layers.23.input_layernorm.weight": "model-00003-of-00006.safetensors",
|
201 |
+
"model.layers.23.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
|
202 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
|
203 |
+
"model.layers.23.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
|
204 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
|
205 |
+
"model.layers.23.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
|
206 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
207 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
208 |
+
"model.layers.23.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
|
209 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
210 |
+
"model.layers.23.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
|
211 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
212 |
+
"model.layers.24.input_layernorm.weight": "model-00004-of-00006.safetensors",
|
213 |
+
"model.layers.24.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
|
214 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
215 |
+
"model.layers.24.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
|
216 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
|
217 |
+
"model.layers.24.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
|
218 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
|
219 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
|
220 |
+
"model.layers.24.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
|
221 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
|
222 |
+
"model.layers.24.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
|
223 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
|
224 |
+
"model.layers.25.input_layernorm.weight": "model-00004-of-00006.safetensors",
|
225 |
+
"model.layers.25.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
|
226 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
227 |
+
"model.layers.25.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
|
228 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
|
229 |
+
"model.layers.25.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
|
230 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
|
231 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
|
232 |
+
"model.layers.25.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
|
233 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
|
234 |
+
"model.layers.25.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
|
235 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
|
236 |
+
"model.layers.26.input_layernorm.weight": "model-00004-of-00006.safetensors",
|
237 |
+
"model.layers.26.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
|
238 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
239 |
+
"model.layers.26.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
|
240 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
|
241 |
+
"model.layers.26.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
|
242 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
|
243 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
|
244 |
+
"model.layers.26.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
|
245 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
|
246 |
+
"model.layers.26.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
|
247 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
|
248 |
+
"model.layers.27.input_layernorm.weight": "model-00004-of-00006.safetensors",
|
249 |
+
"model.layers.27.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
|
250 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
251 |
+
"model.layers.27.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
|
252 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
|
253 |
+
"model.layers.27.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
|
254 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
|
255 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
|
256 |
+
"model.layers.27.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
|
257 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
|
258 |
+
"model.layers.27.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
|
259 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
|
260 |
+
"model.layers.28.input_layernorm.weight": "model-00004-of-00006.safetensors",
|
261 |
+
"model.layers.28.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
|
262 |
+
"model.layers.28.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
263 |
+
"model.layers.28.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
|
264 |
+
"model.layers.28.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
|
265 |
+
"model.layers.28.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
|
266 |
+
"model.layers.28.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
|
267 |
+
"model.layers.28.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
|
268 |
+
"model.layers.28.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
|
269 |
+
"model.layers.28.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
|
270 |
+
"model.layers.28.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
|
271 |
+
"model.layers.28.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
|
272 |
+
"model.layers.29.input_layernorm.weight": "model-00004-of-00006.safetensors",
|
273 |
+
"model.layers.29.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
|
274 |
+
"model.layers.29.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
275 |
+
"model.layers.29.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
|
276 |
+
"model.layers.29.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
|
277 |
+
"model.layers.29.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
|
278 |
+
"model.layers.29.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
|
279 |
+
"model.layers.29.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
|
280 |
+
"model.layers.29.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
|
281 |
+
"model.layers.29.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
|
282 |
+
"model.layers.29.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
|
283 |
+
"model.layers.29.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
|
284 |
+
"model.layers.3.input_layernorm.weight": "model-00001-of-00006.safetensors",
|
285 |
+
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
|
286 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
|
287 |
+
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
|
288 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
|
289 |
+
"model.layers.3.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
|
290 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
|
291 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
|
292 |
+
"model.layers.3.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
|
293 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
|
294 |
+
"model.layers.3.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
|
295 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
|
296 |
+
"model.layers.30.input_layernorm.weight": "model-00004-of-00006.safetensors",
|
297 |
+
"model.layers.30.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
|
298 |
+
"model.layers.30.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
299 |
+
"model.layers.30.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
|
300 |
+
"model.layers.30.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
|
301 |
+
"model.layers.30.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
|
302 |
+
"model.layers.30.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
|
303 |
+
"model.layers.30.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
|
304 |
+
"model.layers.30.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
|
305 |
+
"model.layers.30.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
|
306 |
+
"model.layers.30.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
|
307 |
+
"model.layers.30.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
|
308 |
+
"model.layers.31.input_layernorm.weight": "model-00004-of-00006.safetensors",
|
309 |
+
"model.layers.31.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
|
310 |
+
"model.layers.31.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
311 |
+
"model.layers.31.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
|
312 |
+
"model.layers.31.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
|
313 |
+
"model.layers.31.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
|
314 |
+
"model.layers.31.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
|
315 |
+
"model.layers.31.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
|
316 |
+
"model.layers.31.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
|
317 |
+
"model.layers.31.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
|
318 |
+
"model.layers.31.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
|
319 |
+
"model.layers.31.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
|
320 |
+
"model.layers.32.input_layernorm.weight": "model-00004-of-00006.safetensors",
|
321 |
+
"model.layers.32.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
|
322 |
+
"model.layers.32.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
|
323 |
+
"model.layers.32.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
|
324 |
+
"model.layers.32.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
|
325 |
+
"model.layers.32.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
|
326 |
+
"model.layers.32.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
|
327 |
+
"model.layers.32.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
|
328 |
+
"model.layers.32.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
|
329 |
+
"model.layers.32.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
|
330 |
+
"model.layers.32.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
|
331 |
+
"model.layers.32.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
|
332 |
+
"model.layers.33.input_layernorm.weight": "model-00005-of-00006.safetensors",
|
333 |
+
"model.layers.33.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
|
334 |
+
"model.layers.33.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
|
335 |
+
"model.layers.33.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
336 |
+
"model.layers.33.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
|
337 |
+
"model.layers.33.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
|
338 |
+
"model.layers.33.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
|
339 |
+
"model.layers.33.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
|
340 |
+
"model.layers.33.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
|
341 |
+
"model.layers.33.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
|
342 |
+
"model.layers.33.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
|
343 |
+
"model.layers.33.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
|
344 |
+
"model.layers.34.input_layernorm.weight": "model-00005-of-00006.safetensors",
|
345 |
+
"model.layers.34.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
|
346 |
+
"model.layers.34.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
|
347 |
+
"model.layers.34.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
348 |
+
"model.layers.34.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
|
349 |
+
"model.layers.34.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
|
350 |
+
"model.layers.34.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
|
351 |
+
"model.layers.34.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
|
352 |
+
"model.layers.34.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
|
353 |
+
"model.layers.34.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
|
354 |
+
"model.layers.34.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
|
355 |
+
"model.layers.34.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
|
356 |
+
"model.layers.35.input_layernorm.weight": "model-00005-of-00006.safetensors",
|
357 |
+
"model.layers.35.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
|
358 |
+
"model.layers.35.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
|
359 |
+
"model.layers.35.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
360 |
+
"model.layers.35.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
|
361 |
+
"model.layers.35.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
|
362 |
+
"model.layers.35.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
|
363 |
+
"model.layers.35.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
|
364 |
+
"model.layers.35.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
|
365 |
+
"model.layers.35.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
|
366 |
+
"model.layers.35.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
|
367 |
+
"model.layers.35.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
|
368 |
+
"model.layers.36.input_layernorm.weight": "model-00005-of-00006.safetensors",
|
369 |
+
"model.layers.36.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
|
370 |
+
"model.layers.36.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
|
371 |
+
"model.layers.36.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
372 |
+
"model.layers.36.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
|
373 |
+
"model.layers.36.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
|
374 |
+
"model.layers.36.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
|
375 |
+
"model.layers.36.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
|
376 |
+
"model.layers.36.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
|
377 |
+
"model.layers.36.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
|
378 |
+
"model.layers.36.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
|
379 |
+
"model.layers.36.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
|
380 |
+
"model.layers.37.input_layernorm.weight": "model-00005-of-00006.safetensors",
|
381 |
+
"model.layers.37.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
|
382 |
+
"model.layers.37.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
|
383 |
+
"model.layers.37.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
384 |
+
"model.layers.37.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
|
385 |
+
"model.layers.37.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
|
386 |
+
"model.layers.37.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
|
387 |
+
"model.layers.37.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
|
388 |
+
"model.layers.37.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
|
389 |
+
"model.layers.37.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
|
390 |
+
"model.layers.37.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
|
391 |
+
"model.layers.37.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
|
392 |
+
"model.layers.38.input_layernorm.weight": "model-00005-of-00006.safetensors",
|
393 |
+
"model.layers.38.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
|
394 |
+
"model.layers.38.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
|
395 |
+
"model.layers.38.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
396 |
+
"model.layers.38.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
|
397 |
+
"model.layers.38.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
|
398 |
+
"model.layers.38.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
|
399 |
+
"model.layers.38.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
|
400 |
+
"model.layers.38.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
|
401 |
+
"model.layers.38.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
|
402 |
+
"model.layers.38.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
|
403 |
+
"model.layers.38.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
|
404 |
+
"model.layers.39.input_layernorm.weight": "model-00005-of-00006.safetensors",
|
405 |
+
"model.layers.39.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
|
406 |
+
"model.layers.39.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
|
407 |
+
"model.layers.39.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
408 |
+
"model.layers.39.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
|
409 |
+
"model.layers.39.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
|
410 |
+
"model.layers.39.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
|
411 |
+
"model.layers.39.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
|
412 |
+
"model.layers.39.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
|
413 |
+
"model.layers.39.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
|
414 |
+
"model.layers.39.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
|
415 |
+
"model.layers.39.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
|
416 |
+
"model.layers.4.input_layernorm.weight": "model-00001-of-00006.safetensors",
|
417 |
+
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
|
418 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
|
419 |
+
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
|
420 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
|
421 |
+
"model.layers.4.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
|
422 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
|
423 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
|
424 |
+
"model.layers.4.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
|
425 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
|
426 |
+
"model.layers.4.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
|
427 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
|
428 |
+
"model.layers.40.input_layernorm.weight": "model-00005-of-00006.safetensors",
|
429 |
+
"model.layers.40.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
|
430 |
+
"model.layers.40.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
|
431 |
+
"model.layers.40.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
432 |
+
"model.layers.40.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
|
433 |
+
"model.layers.40.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
|
434 |
+
"model.layers.40.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
|
435 |
+
"model.layers.40.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
|
436 |
+
"model.layers.40.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
|
437 |
+
"model.layers.40.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
|
438 |
+
"model.layers.40.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
|
439 |
+
"model.layers.40.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
|
440 |
+
"model.layers.41.input_layernorm.weight": "model-00005-of-00006.safetensors",
|
441 |
+
"model.layers.41.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
|
442 |
+
"model.layers.41.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
|
443 |
+
"model.layers.41.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
|
444 |
+
"model.layers.41.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
|
445 |
+
"model.layers.41.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
|
446 |
+
"model.layers.41.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
|
447 |
+
"model.layers.41.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
|
448 |
+
"model.layers.41.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
|
449 |
+
"model.layers.41.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
|
450 |
+
"model.layers.41.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
|
451 |
+
"model.layers.41.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
|
452 |
+
"model.layers.42.input_layernorm.weight": "model-00006-of-00006.safetensors",
|
453 |
+
"model.layers.42.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
|
454 |
+
"model.layers.42.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
|
455 |
+
"model.layers.42.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
|
456 |
+
"model.layers.42.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
|
457 |
+
"model.layers.42.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
|
458 |
+
"model.layers.42.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
|
459 |
+
"model.layers.42.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
|
460 |
+
"model.layers.42.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
|
461 |
+
"model.layers.42.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
|
462 |
+
"model.layers.42.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
|
463 |
+
"model.layers.42.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
|
464 |
+
"model.layers.43.input_layernorm.weight": "model-00006-of-00006.safetensors",
|
465 |
+
"model.layers.43.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
|
466 |
+
"model.layers.43.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
|
467 |
+
"model.layers.43.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
|
468 |
+
"model.layers.43.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
|
469 |
+
"model.layers.43.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
|
470 |
+
"model.layers.43.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
|
471 |
+
"model.layers.43.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
|
472 |
+
"model.layers.43.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
|
473 |
+
"model.layers.43.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
|
474 |
+
"model.layers.43.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
|
475 |
+
"model.layers.43.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
|
476 |
+
"model.layers.44.input_layernorm.weight": "model-00006-of-00006.safetensors",
|
477 |
+
"model.layers.44.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
|
478 |
+
"model.layers.44.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
|
479 |
+
"model.layers.44.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
|
480 |
+
"model.layers.44.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
|
481 |
+
"model.layers.44.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
|
482 |
+
"model.layers.44.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
|
483 |
+
"model.layers.44.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
|
484 |
+
"model.layers.44.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
|
485 |
+
"model.layers.44.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
|
486 |
+
"model.layers.44.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
|
487 |
+
"model.layers.44.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
|
488 |
+
"model.layers.45.input_layernorm.weight": "model-00006-of-00006.safetensors",
|
489 |
+
"model.layers.45.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
|
490 |
+
"model.layers.45.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
|
491 |
+
"model.layers.45.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
|
492 |
+
"model.layers.45.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
|
493 |
+
"model.layers.45.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
|
494 |
+
"model.layers.45.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
|
495 |
+
"model.layers.45.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
|
496 |
+
"model.layers.45.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
|
497 |
+
"model.layers.45.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
|
498 |
+
"model.layers.45.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
|
499 |
+
"model.layers.45.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
|
500 |
+
"model.layers.46.input_layernorm.weight": "model-00006-of-00006.safetensors",
|
501 |
+
"model.layers.46.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
|
502 |
+
"model.layers.46.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
|
503 |
+
"model.layers.46.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
|
504 |
+
"model.layers.46.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
|
505 |
+
"model.layers.46.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
|
506 |
+
"model.layers.46.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
|
507 |
+
"model.layers.46.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
|
508 |
+
"model.layers.46.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
|
509 |
+
"model.layers.46.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
|
510 |
+
"model.layers.46.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
|
511 |
+
"model.layers.46.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
|
512 |
+
"model.layers.47.input_layernorm.weight": "model-00006-of-00006.safetensors",
|
513 |
+
"model.layers.47.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
|
514 |
+
"model.layers.47.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
|
515 |
+
"model.layers.47.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
|
516 |
+
"model.layers.47.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
|
517 |
+
"model.layers.47.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
|
518 |
+
"model.layers.47.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
|
519 |
+
"model.layers.47.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
|
520 |
+
"model.layers.47.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
|
521 |
+
"model.layers.47.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
|
522 |
+
"model.layers.47.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
|
523 |
+
"model.layers.47.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
|
524 |
+
"model.layers.5.input_layernorm.weight": "model-00001-of-00006.safetensors",
|
525 |
+
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
|
526 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
|
527 |
+
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
|
528 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
|
529 |
+
"model.layers.5.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
|
530 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
|
531 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
|
532 |
+
"model.layers.5.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
|
533 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
|
534 |
+
"model.layers.5.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
|
535 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
|
536 |
+
"model.layers.6.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
537 |
+
"model.layers.6.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
538 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
|
539 |
+
"model.layers.6.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
|
540 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
541 |
+
"model.layers.6.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
|
542 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
|
543 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
|
544 |
+
"model.layers.6.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
|
545 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
|
546 |
+
"model.layers.6.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
|
547 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
|
548 |
+
"model.layers.7.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
549 |
+
"model.layers.7.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
550 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
|
551 |
+
"model.layers.7.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
|
552 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
553 |
+
"model.layers.7.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
|
554 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
|
555 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
|
556 |
+
"model.layers.7.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
|
557 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
|
558 |
+
"model.layers.7.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
|
559 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
|
560 |
+
"model.layers.8.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
561 |
+
"model.layers.8.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
562 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
|
563 |
+
"model.layers.8.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
|
564 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
565 |
+
"model.layers.8.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
|
566 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
|
567 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
|
568 |
+
"model.layers.8.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
|
569 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
|
570 |
+
"model.layers.8.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
|
571 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
|
572 |
+
"model.layers.9.input_layernorm.weight": "model-00002-of-00006.safetensors",
|
573 |
+
"model.layers.9.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
|
574 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
|
575 |
+
"model.layers.9.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
|
576 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
|
577 |
+
"model.layers.9.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
|
578 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
|
579 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
|
580 |
+
"model.layers.9.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
|
581 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
|
582 |
+
"model.layers.9.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
|
583 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
|
584 |
+
"model.norm.weight": "model-00006-of-00006.safetensors"
|
585 |
+
}
|
586 |
+
}
|
special_tokens_map.json
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<|begin▁of▁sentence|>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "<|im_end|>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": {
|
17 |
+
"content": "<|end▁of▁sentence|>",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
}
|
23 |
+
}
|
tokenizer.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:02643f00207dfc5ed248992486bde04314c21dca556bf65ce520690962b8db63
|
3 |
+
size 11422965
|
tokenizer_config.json
ADDED
@@ -0,0 +1,204 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"add_prefix_space": null,
|
5 |
+
"added_tokens_decoder": {
|
6 |
+
"151643": {
|
7 |
+
"content": "<|end▁of▁sentence|>",
|
8 |
+
"lstrip": false,
|
9 |
+
"normalized": false,
|
10 |
+
"rstrip": false,
|
11 |
+
"single_word": false,
|
12 |
+
"special": true
|
13 |
+
},
|
14 |
+
"151644": {
|
15 |
+
"content": "<|User|>",
|
16 |
+
"lstrip": false,
|
17 |
+
"normalized": false,
|
18 |
+
"rstrip": false,
|
19 |
+
"single_word": false,
|
20 |
+
"special": false
|
21 |
+
},
|
22 |
+
"151645": {
|
23 |
+
"content": "<|Assistant|>",
|
24 |
+
"lstrip": false,
|
25 |
+
"normalized": false,
|
26 |
+
"rstrip": false,
|
27 |
+
"single_word": false,
|
28 |
+
"special": false
|
29 |
+
},
|
30 |
+
"151646": {
|
31 |
+
"content": "<|begin▁of▁sentence|>",
|
32 |
+
"lstrip": false,
|
33 |
+
"normalized": false,
|
34 |
+
"rstrip": false,
|
35 |
+
"single_word": false,
|
36 |
+
"special": true
|
37 |
+
},
|
38 |
+
"151647": {
|
39 |
+
"content": "<|EOT|>",
|
40 |
+
"lstrip": false,
|
41 |
+
"normalized": false,
|
42 |
+
"rstrip": false,
|
43 |
+
"single_word": false,
|
44 |
+
"special": false
|
45 |
+
},
|
46 |
+
"151648": {
|
47 |
+
"content": "<think>",
|
48 |
+
"lstrip": false,
|
49 |
+
"normalized": false,
|
50 |
+
"rstrip": false,
|
51 |
+
"single_word": false,
|
52 |
+
"special": false
|
53 |
+
},
|
54 |
+
"151649": {
|
55 |
+
"content": "</think>",
|
56 |
+
"lstrip": false,
|
57 |
+
"normalized": false,
|
58 |
+
"rstrip": false,
|
59 |
+
"single_word": false,
|
60 |
+
"special": false
|
61 |
+
},
|
62 |
+
"151650": {
|
63 |
+
"content": "<|quad_start|>",
|
64 |
+
"lstrip": false,
|
65 |
+
"normalized": false,
|
66 |
+
"rstrip": false,
|
67 |
+
"single_word": false,
|
68 |
+
"special": true
|
69 |
+
},
|
70 |
+
"151651": {
|
71 |
+
"content": "<|quad_end|>",
|
72 |
+
"lstrip": false,
|
73 |
+
"normalized": false,
|
74 |
+
"rstrip": false,
|
75 |
+
"single_word": false,
|
76 |
+
"special": true
|
77 |
+
},
|
78 |
+
"151652": {
|
79 |
+
"content": "<|vision_start|>",
|
80 |
+
"lstrip": false,
|
81 |
+
"normalized": false,
|
82 |
+
"rstrip": false,
|
83 |
+
"single_word": false,
|
84 |
+
"special": true
|
85 |
+
},
|
86 |
+
"151653": {
|
87 |
+
"content": "<|vision_end|>",
|
88 |
+
"lstrip": false,
|
89 |
+
"normalized": false,
|
90 |
+
"rstrip": false,
|
91 |
+
"single_word": false,
|
92 |
+
"special": true
|
93 |
+
},
|
94 |
+
"151654": {
|
95 |
+
"content": "<|vision_pad|>",
|
96 |
+
"lstrip": false,
|
97 |
+
"normalized": false,
|
98 |
+
"rstrip": false,
|
99 |
+
"single_word": false,
|
100 |
+
"special": true
|
101 |
+
},
|
102 |
+
"151655": {
|
103 |
+
"content": "<|image_pad|>",
|
104 |
+
"lstrip": false,
|
105 |
+
"normalized": false,
|
106 |
+
"rstrip": false,
|
107 |
+
"single_word": false,
|
108 |
+
"special": true
|
109 |
+
},
|
110 |
+
"151656": {
|
111 |
+
"content": "<|video_pad|>",
|
112 |
+
"lstrip": false,
|
113 |
+
"normalized": false,
|
114 |
+
"rstrip": false,
|
115 |
+
"single_word": false,
|
116 |
+
"special": true
|
117 |
+
},
|
118 |
+
"151657": {
|
119 |
+
"content": "<tool_call>",
|
120 |
+
"lstrip": false,
|
121 |
+
"normalized": false,
|
122 |
+
"rstrip": false,
|
123 |
+
"single_word": false,
|
124 |
+
"special": false
|
125 |
+
},
|
126 |
+
"151658": {
|
127 |
+
"content": "</tool_call>",
|
128 |
+
"lstrip": false,
|
129 |
+
"normalized": false,
|
130 |
+
"rstrip": false,
|
131 |
+
"single_word": false,
|
132 |
+
"special": false
|
133 |
+
},
|
134 |
+
"151659": {
|
135 |
+
"content": "<|fim_prefix|>",
|
136 |
+
"lstrip": false,
|
137 |
+
"normalized": false,
|
138 |
+
"rstrip": false,
|
139 |
+
"single_word": false,
|
140 |
+
"special": false
|
141 |
+
},
|
142 |
+
"151660": {
|
143 |
+
"content": "<|fim_middle|>",
|
144 |
+
"lstrip": false,
|
145 |
+
"normalized": false,
|
146 |
+
"rstrip": false,
|
147 |
+
"single_word": false,
|
148 |
+
"special": false
|
149 |
+
},
|
150 |
+
"151661": {
|
151 |
+
"content": "<|fim_suffix|>",
|
152 |
+
"lstrip": false,
|
153 |
+
"normalized": false,
|
154 |
+
"rstrip": false,
|
155 |
+
"single_word": false,
|
156 |
+
"special": false
|
157 |
+
},
|
158 |
+
"151662": {
|
159 |
+
"content": "<|fim_pad|>",
|
160 |
+
"lstrip": false,
|
161 |
+
"normalized": false,
|
162 |
+
"rstrip": false,
|
163 |
+
"single_word": false,
|
164 |
+
"special": false
|
165 |
+
},
|
166 |
+
"151663": {
|
167 |
+
"content": "<|repo_name|>",
|
168 |
+
"lstrip": false,
|
169 |
+
"normalized": false,
|
170 |
+
"rstrip": false,
|
171 |
+
"single_word": false,
|
172 |
+
"special": false
|
173 |
+
},
|
174 |
+
"151664": {
|
175 |
+
"content": "<|file_sep|>",
|
176 |
+
"lstrip": false,
|
177 |
+
"normalized": false,
|
178 |
+
"rstrip": false,
|
179 |
+
"single_word": false,
|
180 |
+
"special": false
|
181 |
+
},
|
182 |
+
"151665": {
|
183 |
+
"content": "<|im_end|>",
|
184 |
+
"lstrip": false,
|
185 |
+
"normalized": false,
|
186 |
+
"rstrip": false,
|
187 |
+
"single_word": false,
|
188 |
+
"special": true
|
189 |
+
}
|
190 |
+
},
|
191 |
+
"bos_token": "<|begin▁of▁sentence|>",
|
192 |
+
"chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\\n' + '<|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'<|tool▁outputs▁end|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '</think>' in content %}{% set content = content.split('</think>')[-1] %}{% endif %}{{'<|Assistant|>' + content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<|tool▁outputs▁begin|><|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\\n<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<|Assistant|><think>\\n'}}{% endif %}",
|
193 |
+
"clean_up_tokenization_spaces": false,
|
194 |
+
"eos_token": "<|im_end|>",
|
195 |
+
"legacy": true,
|
196 |
+
"model_max_length": 16384,
|
197 |
+
"pad_token": "<|end▁of▁sentence|>",
|
198 |
+
"padding_side": "right",
|
199 |
+
"sp_model_kwargs": {},
|
200 |
+
"split_special_tokens": false,
|
201 |
+
"tokenizer_class": "LlamaTokenizer",
|
202 |
+
"unk_token": null,
|
203 |
+
"use_default_system_prompt": false
|
204 |
+
}
|
train_results.json
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 5.0,
|
3 |
+
"total_flos": 7403300223713280.0,
|
4 |
+
"train_loss": 0.9973710162960596,
|
5 |
+
"train_runtime": 63021.9977,
|
6 |
+
"train_samples_per_second": 11.37,
|
7 |
+
"train_steps_per_second": 0.237
|
8 |
+
}
|
trainer_log.jsonl
ADDED
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{"current_steps": 100, "total_steps": 14930, "loss": 11.7393, "lr": 6.697923643670463e-07, "epoch": 0.033489618218352314, "percentage": 0.67, "elapsed_time": "0:06:51", "remaining_time": "16:57:30"}
|
2 |
+
{"current_steps": 200, "total_steps": 14930, "loss": 3.297, "lr": 1.3395847287340927e-06, "epoch": 0.06697923643670463, "percentage": 1.34, "elapsed_time": "0:13:36", "remaining_time": "16:42:12"}
|
3 |
+
{"current_steps": 300, "total_steps": 14930, "loss": 1.8384, "lr": 2.0093770931011387e-06, "epoch": 0.10046885465505694, "percentage": 2.01, "elapsed_time": "0:20:21", "remaining_time": "16:32:31"}
|
4 |
+
{"current_steps": 400, "total_steps": 14930, "loss": 1.6728, "lr": 2.6791694574681854e-06, "epoch": 0.13395847287340926, "percentage": 2.68, "elapsed_time": "0:27:05", "remaining_time": "16:23:50"}
|
5 |
+
{"current_steps": 500, "total_steps": 14930, "loss": 1.5883, "lr": 3.3489618218352316e-06, "epoch": 0.16744809109176156, "percentage": 3.35, "elapsed_time": "0:33:48", "remaining_time": "16:15:55"}
|
6 |
+
{"current_steps": 600, "total_steps": 14930, "loss": 1.5399, "lr": 4.018754186202277e-06, "epoch": 0.20093770931011387, "percentage": 4.02, "elapsed_time": "0:40:33", "remaining_time": "16:08:31"}
|
7 |
+
{"current_steps": 700, "total_steps": 14930, "loss": 1.5214, "lr": 4.688546550569324e-06, "epoch": 0.23442732752846618, "percentage": 4.69, "elapsed_time": "0:47:17", "remaining_time": "16:01:25"}
|
8 |
+
{"current_steps": 800, "total_steps": 14930, "loss": 1.4822, "lr": 5.358338914936371e-06, "epoch": 0.2679169457468185, "percentage": 5.36, "elapsed_time": "0:54:01", "remaining_time": "15:54:17"}
|
9 |
+
{"current_steps": 900, "total_steps": 14930, "loss": 1.4623, "lr": 6.028131279303416e-06, "epoch": 0.3014065639651708, "percentage": 6.03, "elapsed_time": "1:00:45", "remaining_time": "15:47:12"}
|
10 |
+
{"current_steps": 1000, "total_steps": 14930, "loss": 1.4408, "lr": 6.697923643670463e-06, "epoch": 0.33489618218352313, "percentage": 6.7, "elapsed_time": "1:07:29", "remaining_time": "15:40:09"}
|
11 |
+
{"current_steps": 1100, "total_steps": 14930, "loss": 1.4378, "lr": 7.3677160080375086e-06, "epoch": 0.3683858004018754, "percentage": 7.37, "elapsed_time": "1:15:46", "remaining_time": "15:52:37"}
|
12 |
+
{"current_steps": 1200, "total_steps": 14930, "loss": 1.4189, "lr": 8.037508372404555e-06, "epoch": 0.40187541862022774, "percentage": 8.04, "elapsed_time": "1:22:29", "remaining_time": "15:43:56"}
|
13 |
+
{"current_steps": 1300, "total_steps": 14930, "loss": 1.4098, "lr": 8.707300736771601e-06, "epoch": 0.43536503683858, "percentage": 8.71, "elapsed_time": "1:29:14", "remaining_time": "15:35:38"}
|
14 |
+
{"current_steps": 1400, "total_steps": 14930, "loss": 1.4011, "lr": 9.377093101138647e-06, "epoch": 0.46885465505693236, "percentage": 9.38, "elapsed_time": "1:35:58", "remaining_time": "15:27:33"}
|
15 |
+
{"current_steps": 1500, "total_steps": 14930, "loss": 1.3933, "lr": 9.999993303758581e-06, "epoch": 0.5023442732752846, "percentage": 10.05, "elapsed_time": "1:42:42", "remaining_time": "15:19:35"}
|
16 |
+
{"current_steps": 1600, "total_steps": 14930, "loss": 1.3828, "lr": 9.998435483941776e-06, "epoch": 0.535833891493637, "percentage": 10.72, "elapsed_time": "1:49:26", "remaining_time": "15:11:49"}
|
17 |
+
{"current_steps": 1700, "total_steps": 14930, "loss": 1.3772, "lr": 9.994145483428403e-06, "epoch": 0.5693235097119893, "percentage": 11.39, "elapsed_time": "1:56:11", "remaining_time": "15:04:11"}
|
18 |
+
{"current_steps": 1800, "total_steps": 14930, "loss": 1.3674, "lr": 9.987125647163527e-06, "epoch": 0.6028131279303416, "percentage": 12.06, "elapsed_time": "2:02:54", "remaining_time": "14:56:35"}
|
19 |
+
{"current_steps": 1900, "total_steps": 14930, "loss": 1.3538, "lr": 9.977379812240013e-06, "epoch": 0.6363027461486939, "percentage": 12.73, "elapsed_time": "2:09:40", "remaining_time": "14:49:14"}
|
20 |
+
{"current_steps": 2000, "total_steps": 14930, "loss": 1.3566, "lr": 9.964913305801151e-06, "epoch": 0.6697923643670463, "percentage": 13.4, "elapsed_time": "2:16:24", "remaining_time": "14:41:50"}
|
21 |
+
{"current_steps": 2100, "total_steps": 14930, "loss": 1.3473, "lr": 9.9497329421288e-06, "epoch": 0.7032819825853985, "percentage": 14.07, "elapsed_time": "2:24:07", "remaining_time": "14:40:30"}
|
22 |
+
{"current_steps": 2200, "total_steps": 14930, "loss": 1.35, "lr": 9.931847018918654e-06, "epoch": 0.7367716008037508, "percentage": 14.74, "elapsed_time": "2:30:52", "remaining_time": "14:32:59"}
|
23 |
+
{"current_steps": 2300, "total_steps": 14930, "loss": 1.3381, "lr": 9.911265312744663e-06, "epoch": 0.7702612190221031, "percentage": 15.41, "elapsed_time": "2:37:37", "remaining_time": "14:25:31"}
|
24 |
+
{"current_steps": 2400, "total_steps": 14930, "loss": 1.3409, "lr": 9.887999073715083e-06, "epoch": 0.8037508372404555, "percentage": 16.08, "elapsed_time": "2:44:21", "remaining_time": "14:18:05"}
|
25 |
+
{"current_steps": 2500, "total_steps": 14930, "loss": 1.333, "lr": 9.86206101932309e-06, "epoch": 0.8372404554588078, "percentage": 16.74, "elapsed_time": "2:51:05", "remaining_time": "14:10:41"}
|
26 |
+
{"current_steps": 2600, "total_steps": 14930, "loss": 1.3195, "lr": 9.833465327495307e-06, "epoch": 0.87073007367716, "percentage": 17.41, "elapsed_time": "2:57:52", "remaining_time": "14:03:30"}
|
27 |
+
{"current_steps": 2700, "total_steps": 14930, "loss": 1.3183, "lr": 9.802227628842045e-06, "epoch": 0.9042196918955124, "percentage": 18.08, "elapsed_time": "3:04:36", "remaining_time": "13:56:10"}
|
28 |
+
{"current_steps": 2800, "total_steps": 14930, "loss": 1.3145, "lr": 9.7683649981135e-06, "epoch": 0.9377093101138647, "percentage": 18.75, "elapsed_time": "3:11:20", "remaining_time": "13:48:54"}
|
29 |
+
{"current_steps": 2900, "total_steps": 14930, "loss": 1.3178, "lr": 9.731895944866576e-06, "epoch": 0.971198928332217, "percentage": 19.42, "elapsed_time": "3:18:04", "remaining_time": "13:41:41"}
|
30 |
+
{"current_steps": 2986, "total_steps": 14930, "eval_loss": NaN, "epoch": 1.0, "percentage": 20.0, "elapsed_time": "3:29:40", "remaining_time": "13:58:41"}
|
31 |
+
{"current_steps": 3000, "total_steps": 14930, "loss": 1.3204, "lr": 9.69284040334742e-06, "epoch": 1.0046885465505693, "percentage": 20.09, "elapsed_time": "3:30:37", "remaining_time": "13:57:33"}
|
32 |
+
{"current_steps": 3100, "total_steps": 14930, "loss": 1.31, "lr": 9.651219721595235e-06, "epoch": 1.0381781647689217, "percentage": 20.76, "elapsed_time": "3:38:21", "remaining_time": "13:53:17"}
|
33 |
+
{"current_steps": 3200, "total_steps": 14930, "loss": 1.2948, "lr": 9.607056649773266e-06, "epoch": 1.0716677829872738, "percentage": 21.43, "elapsed_time": "3:45:05", "remaining_time": "13:45:07"}
|
34 |
+
{"current_steps": 3300, "total_steps": 14930, "loss": 1.2819, "lr": 9.56037532773342e-06, "epoch": 1.1051574012056262, "percentage": 22.1, "elapsed_time": "3:51:50", "remaining_time": "13:37:03"}
|
35 |
+
{"current_steps": 3400, "total_steps": 14930, "loss": 1.2655, "lr": 9.511201271821235e-06, "epoch": 1.1386470194239786, "percentage": 22.77, "elapsed_time": "3:58:34", "remaining_time": "13:29:01"}
|
36 |
+
{"current_steps": 3500, "total_steps": 14930, "loss": 1.2451, "lr": 9.459561360928472e-06, "epoch": 1.1721366376423308, "percentage": 23.44, "elapsed_time": "4:05:17", "remaining_time": "13:21:03"}
|
37 |
+
{"current_steps": 3600, "total_steps": 14930, "loss": 1.2355, "lr": 9.405483821800912e-06, "epoch": 1.2056262558606832, "percentage": 24.11, "elapsed_time": "4:12:01", "remaining_time": "13:13:12"}
|
38 |
+
{"current_steps": 3700, "total_steps": 14930, "loss": 1.2295, "lr": 9.348998213609416e-06, "epoch": 1.2391158740790356, "percentage": 24.78, "elapsed_time": "4:18:46", "remaining_time": "13:05:23"}
|
39 |
+
{"current_steps": 3800, "total_steps": 14930, "loss": 1.2011, "lr": 9.29013541179268e-06, "epoch": 1.2726054922973877, "percentage": 25.45, "elapsed_time": "4:25:29", "remaining_time": "12:57:37"}
|
40 |
+
{"current_steps": 3900, "total_steps": 14930, "loss": 1.1906, "lr": 9.228927591180484e-06, "epoch": 1.3060951105157401, "percentage": 26.12, "elapsed_time": "4:32:13", "remaining_time": "12:49:55"}
|
41 |
+
{"current_steps": 4000, "total_steps": 14930, "loss": 1.1715, "lr": 9.165408208406703e-06, "epoch": 1.3395847287340925, "percentage": 26.79, "elapsed_time": "4:38:57", "remaining_time": "12:42:15"}
|
42 |
+
{"current_steps": 4100, "total_steps": 14930, "loss": 1.1605, "lr": 9.099611983621684e-06, "epoch": 1.3730743469524447, "percentage": 27.46, "elapsed_time": "4:46:42", "remaining_time": "12:37:20"}
|
43 |
+
{"current_steps": 4200, "total_steps": 14930, "loss": 1.1425, "lr": 9.03157488151394e-06, "epoch": 1.406563965170797, "percentage": 28.13, "elapsed_time": "4:53:26", "remaining_time": "12:29:40"}
|
44 |
+
{"current_steps": 4300, "total_steps": 14930, "loss": 1.1233, "lr": 8.961334091651618e-06, "epoch": 1.4400535833891492, "percentage": 28.8, "elapsed_time": "5:00:10", "remaining_time": "12:22:04"}
|
45 |
+
{"current_steps": 4400, "total_steps": 14930, "loss": 1.1193, "lr": 8.888928008154393e-06, "epoch": 1.4735432016075016, "percentage": 29.47, "elapsed_time": "5:06:55", "remaining_time": "12:14:31"}
|
46 |
+
{"current_steps": 4500, "total_steps": 14930, "loss": 1.0943, "lr": 8.81439620870698e-06, "epoch": 1.507032819825854, "percentage": 30.14, "elapsed_time": "5:13:39", "remaining_time": "12:06:58"}
|
47 |
+
{"current_steps": 4600, "total_steps": 14930, "loss": 1.0895, "lr": 8.737779432925682e-06, "epoch": 1.5405224380442064, "percentage": 30.81, "elapsed_time": "5:20:23", "remaining_time": "11:59:29"}
|
48 |
+
{"current_steps": 4700, "total_steps": 14930, "loss": 1.0894, "lr": 8.659119560089822e-06, "epoch": 1.5740120562625586, "percentage": 31.48, "elapsed_time": "5:27:07", "remaining_time": "11:52:01"}
|
49 |
+
{"current_steps": 4800, "total_steps": 14930, "loss": 1.0715, "lr": 8.578459586250235e-06, "epoch": 1.607501674480911, "percentage": 32.15, "elapsed_time": "5:33:51", "remaining_time": "11:44:34"}
|
50 |
+
{"current_steps": 4900, "total_steps": 14930, "loss": 1.0717, "lr": 8.495843600727313e-06, "epoch": 1.6409912926992631, "percentage": 32.82, "elapsed_time": "5:40:36", "remaining_time": "11:37:11"}
|
51 |
+
{"current_steps": 5000, "total_steps": 14930, "loss": 1.0706, "lr": 8.411316762011469e-06, "epoch": 1.6744809109176155, "percentage": 33.49, "elapsed_time": "5:47:20", "remaining_time": "11:29:48"}
|
52 |
+
{"current_steps": 5100, "total_steps": 14930, "loss": 1.0665, "lr": 8.324925273079176e-06, "epoch": 1.707970529135968, "percentage": 34.16, "elapsed_time": "5:55:06", "remaining_time": "11:24:27"}
|
53 |
+
{"current_steps": 5200, "total_steps": 14930, "loss": 1.0661, "lr": 8.236716356138098e-06, "epoch": 1.7414601473543203, "percentage": 34.83, "elapsed_time": "6:01:50", "remaining_time": "11:17:03"}
|
54 |
+
{"current_steps": 5300, "total_steps": 14930, "loss": 1.0636, "lr": 8.146738226815088e-06, "epoch": 1.7749497655726725, "percentage": 35.5, "elapsed_time": "6:08:34", "remaining_time": "11:09:42"}
|
55 |
+
{"current_steps": 5400, "total_steps": 14930, "loss": 1.0676, "lr": 8.055040067801172e-06, "epoch": 1.8084393837910246, "percentage": 36.17, "elapsed_time": "6:15:19", "remaining_time": "11:02:22"}
|
56 |
+
{"current_steps": 5500, "total_steps": 14930, "loss": 1.0612, "lr": 7.961672001967954e-06, "epoch": 1.841929002009377, "percentage": 36.84, "elapsed_time": "6:22:02", "remaining_time": "10:55:02"}
|
57 |
+
{"current_steps": 5600, "total_steps": 14930, "loss": 1.0561, "lr": 7.866685064970086e-06, "epoch": 1.8754186202277294, "percentage": 37.51, "elapsed_time": "6:28:46", "remaining_time": "10:47:43"}
|
58 |
+
{"current_steps": 5700, "total_steps": 14930, "loss": 1.0597, "lr": 7.770131177348806e-06, "epoch": 1.9089082384460818, "percentage": 38.18, "elapsed_time": "6:35:30", "remaining_time": "10:40:26"}
|
59 |
+
{"current_steps": 5800, "total_steps": 14930, "loss": 1.0603, "lr": 7.672063116151811e-06, "epoch": 1.942397856664434, "percentage": 38.85, "elapsed_time": "6:42:14", "remaining_time": "10:33:10"}
|
60 |
+
{"current_steps": 5900, "total_steps": 14930, "loss": 1.0555, "lr": 7.572534486084937e-06, "epoch": 1.9758874748827864, "percentage": 39.52, "elapsed_time": "6:48:58", "remaining_time": "10:25:56"}
|
61 |
+
{"current_steps": 5972, "total_steps": 14930, "eval_loss": NaN, "epoch": 2.0, "percentage": 40.0, "elapsed_time": "6:59:36", "remaining_time": "10:29:25"}
|
62 |
+
{"current_steps": 6000, "total_steps": 14930, "loss": 1.0571, "lr": 7.47159969021144e-06, "epoch": 2.0093770931011385, "percentage": 40.19, "elapsed_time": "7:01:29", "remaining_time": "10:27:19"}
|
63 |
+
{"current_steps": 6100, "total_steps": 14930, "loss": 1.0526, "lr": 7.369313900214897e-06, "epoch": 2.042866711319491, "percentage": 40.86, "elapsed_time": "7:09:12", "remaining_time": "10:21:17"}
|
64 |
+
{"current_steps": 6200, "total_steps": 14930, "loss": 1.0395, "lr": 7.265733026241967e-06, "epoch": 2.0763563295378433, "percentage": 41.53, "elapsed_time": "7:15:56", "remaining_time": "10:13:50"}
|
65 |
+
{"current_steps": 6300, "total_steps": 14930, "loss": 1.0189, "lr": 7.160913686341495e-06, "epoch": 2.1098459477561957, "percentage": 42.2, "elapsed_time": "7:22:41", "remaining_time": "10:06:24"}
|
66 |
+
{"current_steps": 6400, "total_steps": 14930, "loss": 1.0034, "lr": 7.054913175516698e-06, "epoch": 2.1433355659745477, "percentage": 42.87, "elapsed_time": "7:29:25", "remaining_time": "9:58:59"}
|
67 |
+
{"current_steps": 6500, "total_steps": 14930, "loss": 0.9789, "lr": 6.947789434407284e-06, "epoch": 2.1768251841929, "percentage": 43.54, "elapsed_time": "7:36:09", "remaining_time": "9:51:35"}
|
68 |
+
{"current_steps": 6600, "total_steps": 14930, "loss": 0.9673, "lr": 6.839601017618699e-06, "epoch": 2.2103148024112524, "percentage": 44.21, "elapsed_time": "7:42:53", "remaining_time": "9:44:13"}
|
69 |
+
{"current_steps": 6700, "total_steps": 14930, "loss": 0.9564, "lr": 6.730407061715752e-06, "epoch": 2.243804420629605, "percentage": 44.88, "elapsed_time": "7:49:37", "remaining_time": "9:36:52"}
|
70 |
+
{"current_steps": 6800, "total_steps": 14930, "loss": 0.9251, "lr": 6.620267252898148e-06, "epoch": 2.2772940388479572, "percentage": 45.55, "elapsed_time": "7:56:21", "remaining_time": "9:29:32"}
|
71 |
+
{"current_steps": 6900, "total_steps": 14930, "loss": 0.9125, "lr": 6.509241794375577e-06, "epoch": 2.3107836570663096, "percentage": 46.22, "elapsed_time": "8:03:05", "remaining_time": "9:22:12"}
|
72 |
+
{"current_steps": 7000, "total_steps": 14930, "loss": 0.8919, "lr": 6.3973913734602174e-06, "epoch": 2.3442732752846616, "percentage": 46.89, "elapsed_time": "8:09:48", "remaining_time": "9:14:53"}
|
73 |
+
{"current_steps": 7100, "total_steps": 14930, "loss": 0.879, "lr": 6.284777128394603e-06, "epoch": 2.377762893503014, "percentage": 47.56, "elapsed_time": "8:17:32", "remaining_time": "9:08:42"}
|
74 |
+
{"current_steps": 7200, "total_steps": 14930, "loss": 0.8625, "lr": 6.171460614933038e-06, "epoch": 2.4112525117213663, "percentage": 48.23, "elapsed_time": "8:24:16", "remaining_time": "9:01:24"}
|
75 |
+
{"current_steps": 7300, "total_steps": 14930, "loss": 0.8374, "lr": 6.057503772694761e-06, "epoch": 2.4447421299397187, "percentage": 48.89, "elapsed_time": "8:31:01", "remaining_time": "8:54:07"}
|
76 |
+
{"current_steps": 7400, "total_steps": 14930, "loss": 0.8394, "lr": 5.942968891307317e-06, "epoch": 2.478231748158071, "percentage": 49.56, "elapsed_time": "8:37:45", "remaining_time": "8:46:51"}
|
77 |
+
{"current_steps": 7500, "total_steps": 14930, "loss": 0.8131, "lr": 5.8279185763585975e-06, "epoch": 2.511721366376423, "percentage": 50.23, "elapsed_time": "8:44:29", "remaining_time": "8:39:36"}
|
78 |
+
{"current_steps": 7600, "total_steps": 14930, "loss": 0.805, "lr": 5.7124157151761795e-06, "epoch": 2.5452109845947755, "percentage": 50.9, "elapsed_time": "8:51:14", "remaining_time": "8:32:22"}
|
79 |
+
{"current_steps": 7700, "total_steps": 14930, "loss": 0.8083, "lr": 5.596523442452652e-06, "epoch": 2.578700602813128, "percentage": 51.57, "elapsed_time": "8:57:58", "remaining_time": "8:25:08"}
|
80 |
+
{"current_steps": 7800, "total_steps": 14930, "loss": 0.7845, "lr": 5.480305105735749e-06, "epoch": 2.6121902210314802, "percentage": 52.24, "elapsed_time": "9:04:42", "remaining_time": "8:17:55"}
|
81 |
+
{"current_steps": 7900, "total_steps": 14930, "loss": 0.7909, "lr": 5.363824230802127e-06, "epoch": 2.6456798392498326, "percentage": 52.91, "elapsed_time": "9:11:27", "remaining_time": "8:10:44"}
|
82 |
+
{"current_steps": 8000, "total_steps": 14930, "loss": 0.7963, "lr": 5.247144486933706e-06, "epoch": 2.679169457468185, "percentage": 53.58, "elapsed_time": "9:18:12", "remaining_time": "8:03:32"}
|
83 |
+
{"current_steps": 8100, "total_steps": 14930, "loss": 0.7835, "lr": 5.130329652115603e-06, "epoch": 2.7126590756865374, "percentage": 54.25, "elapsed_time": "9:25:55", "remaining_time": "7:57:11"}
|
84 |
+
{"current_steps": 8200, "total_steps": 14930, "loss": 0.7899, "lr": 5.013443578174608e-06, "epoch": 2.7461486939048894, "percentage": 54.92, "elapsed_time": "9:32:39", "remaining_time": "7:49:59"}
|
85 |
+
{"current_steps": 8300, "total_steps": 14930, "loss": 0.788, "lr": 4.8965501558773326e-06, "epoch": 2.7796383121232418, "percentage": 55.59, "elapsed_time": "9:39:23", "remaining_time": "7:42:49"}
|
86 |
+
{"current_steps": 8400, "total_steps": 14930, "loss": 0.7979, "lr": 4.779713280007051e-06, "epoch": 2.813127930341594, "percentage": 56.26, "elapsed_time": "9:46:08", "remaining_time": "7:35:38"}
|
87 |
+
{"current_steps": 8500, "total_steps": 14930, "loss": 0.7839, "lr": 4.6629968144383545e-06, "epoch": 2.8466175485599465, "percentage": 56.93, "elapsed_time": "9:52:51", "remaining_time": "7:28:28"}
|
88 |
+
{"current_steps": 8600, "total_steps": 14930, "loss": 0.7899, "lr": 4.546464557228699e-06, "epoch": 2.8801071667782985, "percentage": 57.6, "elapsed_time": "9:59:36", "remaining_time": "7:21:20"}
|
89 |
+
{"current_steps": 8700, "total_steps": 14930, "loss": 0.7959, "lr": 4.430180205745932e-06, "epoch": 2.913596784996651, "percentage": 58.27, "elapsed_time": "10:06:20", "remaining_time": "7:14:11"}
|
90 |
+
{"current_steps": 8800, "total_steps": 14930, "loss": 0.7956, "lr": 4.314207321850849e-06, "epoch": 2.9470864032150033, "percentage": 58.94, "elapsed_time": "10:13:05", "remaining_time": "7:07:04"}
|
91 |
+
{"current_steps": 8900, "total_steps": 14930, "loss": 0.7954, "lr": 4.198609297153831e-06, "epoch": 2.9805760214333556, "percentage": 59.61, "elapsed_time": "10:19:49", "remaining_time": "6:59:56"}
|
92 |
+
{"current_steps": 8958, "total_steps": 14930, "eval_loss": NaN, "epoch": 3.0, "percentage": 60.0, "elapsed_time": "10:29:31", "remaining_time": "6:59:41"}
|
93 |
+
{"current_steps": 9000, "total_steps": 14930, "loss": 0.7853, "lr": 4.083449318364527e-06, "epoch": 3.014065639651708, "percentage": 60.28, "elapsed_time": "10:32:21", "remaining_time": "6:56:39"}
|
94 |
+
{"current_steps": 9100, "total_steps": 14930, "loss": 0.7987, "lr": 3.968790332753555e-06, "epoch": 3.0475552578700604, "percentage": 60.95, "elapsed_time": "10:40:05", "remaining_time": "6:50:04"}
|
95 |
+
{"current_steps": 9200, "total_steps": 14930, "loss": 0.7732, "lr": 3.8546950137450656e-06, "epoch": 3.081044876088413, "percentage": 61.62, "elapsed_time": "10:46:49", "remaining_time": "6:42:51"}
|
96 |
+
{"current_steps": 9300, "total_steps": 14930, "loss": 0.7563, "lr": 3.7412257266590007e-06, "epoch": 3.1145344943067648, "percentage": 62.29, "elapsed_time": "10:53:34", "remaining_time": "6:35:39"}
|
97 |
+
{"current_steps": 9400, "total_steps": 14930, "loss": 0.739, "lr": 3.62844449462176e-06, "epoch": 3.148024112525117, "percentage": 62.96, "elapsed_time": "11:00:18", "remaining_time": "6:28:27"}
|
98 |
+
{"current_steps": 9500, "total_steps": 14930, "loss": 0.7228, "lr": 3.5164129646639204e-06, "epoch": 3.1815137307434695, "percentage": 63.63, "elapsed_time": "11:07:02", "remaining_time": "6:21:16"}
|
99 |
+
{"current_steps": 9600, "total_steps": 14930, "loss": 0.7088, "lr": 3.4051923740235205e-06, "epoch": 3.215003348961822, "percentage": 64.3, "elapsed_time": "11:13:46", "remaining_time": "6:14:05"}
|
100 |
+
{"current_steps": 9700, "total_steps": 14930, "loss": 0.6959, "lr": 3.2948435166733506e-06, "epoch": 3.2484929671801743, "percentage": 64.97, "elapsed_time": "11:20:30", "remaining_time": "6:06:54"}
|
101 |
+
{"current_steps": 9800, "total_steps": 14930, "loss": 0.6686, "lr": 3.1854267100905344e-06, "epoch": 3.2819825853985263, "percentage": 65.64, "elapsed_time": "11:27:14", "remaining_time": "5:59:45"}
|
102 |
+
{"current_steps": 9900, "total_steps": 14930, "loss": 0.6606, "lr": 3.0770017622865523e-06, "epoch": 3.3154722036168787, "percentage": 66.31, "elapsed_time": "11:33:58", "remaining_time": "5:52:35"}
|
103 |
+
{"current_steps": 10000, "total_steps": 14930, "loss": 0.6448, "lr": 2.9696279391157663e-06, "epoch": 3.348961821835231, "percentage": 66.98, "elapsed_time": "11:40:42", "remaining_time": "5:45:27"}
|
104 |
+
{"current_steps": 10100, "total_steps": 14930, "loss": 0.6331, "lr": 2.8633639318802685e-06, "epoch": 3.3824514400535834, "percentage": 67.65, "elapsed_time": "11:48:25", "remaining_time": "5:38:46"}
|
105 |
+
{"current_steps": 10200, "total_steps": 14930, "loss": 0.6145, "lr": 2.758267825248798e-06, "epoch": 3.415941058271936, "percentage": 68.32, "elapsed_time": "11:55:10", "remaining_time": "5:31:38"}
|
106 |
+
{"current_steps": 10300, "total_steps": 14930, "loss": 0.6065, "lr": 2.6543970655072514e-06, "epoch": 3.4494306764902882, "percentage": 68.99, "elapsed_time": "12:01:55", "remaining_time": "5:24:30"}
|
107 |
+
{"current_steps": 10400, "total_steps": 14930, "loss": 0.6009, "lr": 2.5518084291581163e-06, "epoch": 3.48292029470864, "percentage": 69.66, "elapsed_time": "12:08:41", "remaining_time": "5:17:23"}
|
108 |
+
{"current_steps": 10500, "total_steps": 14930, "loss": 0.5847, "lr": 2.450557991886039e-06, "epoch": 3.5164099129269926, "percentage": 70.33, "elapsed_time": "12:15:25", "remaining_time": "5:10:16"}
|
109 |
+
{"current_steps": 10600, "total_steps": 14930, "loss": 0.578, "lr": 2.350701097906447e-06, "epoch": 3.549899531145345, "percentage": 71.0, "elapsed_time": "12:22:09", "remaining_time": "5:03:10"}
|
110 |
+
{"current_steps": 10700, "total_steps": 14930, "loss": 0.5838, "lr": 2.252292329714012e-06, "epoch": 3.5833891493636973, "percentage": 71.67, "elapsed_time": "12:28:53", "remaining_time": "4:56:03"}
|
111 |
+
{"current_steps": 10800, "total_steps": 14930, "loss": 0.5653, "lr": 2.155385478247455e-06, "epoch": 3.6168787675820493, "percentage": 72.34, "elapsed_time": "12:35:37", "remaining_time": "4:48:57"}
|
112 |
+
{"current_steps": 10900, "total_steps": 14930, "loss": 0.5723, "lr": 2.0600335134870415e-06, "epoch": 3.6503683858004017, "percentage": 73.01, "elapsed_time": "12:42:22", "remaining_time": "4:41:52"}
|
113 |
+
{"current_steps": 11000, "total_steps": 14930, "loss": 0.5762, "lr": 1.9662885555008055e-06, "epoch": 3.683858004018754, "percentage": 73.68, "elapsed_time": "12:49:06", "remaining_time": "4:34:46"}
|
114 |
+
{"current_steps": 11100, "total_steps": 14930, "loss": 0.5715, "lr": 1.8742018459553551e-06, "epoch": 3.7173476222371065, "percentage": 74.35, "elapsed_time": "12:56:50", "remaining_time": "4:28:02"}
|
115 |
+
{"current_steps": 11200, "total_steps": 14930, "loss": 0.577, "lr": 1.7838237201067976e-06, "epoch": 3.750837240455459, "percentage": 75.02, "elapsed_time": "13:03:34", "remaining_time": "4:20:57"}
|
116 |
+
{"current_steps": 11300, "total_steps": 14930, "loss": 0.5825, "lr": 1.695203579287134e-06, "epoch": 3.7843268586738112, "percentage": 75.69, "elapsed_time": "13:10:18", "remaining_time": "4:13:52"}
|
117 |
+
{"current_steps": 11400, "total_steps": 14930, "loss": 0.5871, "lr": 1.6083898639011402e-06, "epoch": 3.8178164768921636, "percentage": 76.36, "elapsed_time": "13:17:03", "remaining_time": "4:06:48"}
|
118 |
+
{"current_steps": 11500, "total_steps": 14930, "loss": 0.5805, "lr": 1.5234300269484848e-06, "epoch": 3.8513060951105156, "percentage": 77.03, "elapsed_time": "13:23:47", "remaining_time": "3:59:44"}
|
119 |
+
{"current_steps": 11600, "total_steps": 14930, "loss": 0.5859, "lr": 1.440370508085589e-06, "epoch": 3.884795713328868, "percentage": 77.7, "elapsed_time": "13:30:31", "remaining_time": "3:52:40"}
|
120 |
+
{"current_steps": 11700, "total_steps": 14930, "loss": 0.5961, "lr": 1.3592567082413683e-06, "epoch": 3.9182853315472204, "percentage": 78.37, "elapsed_time": "13:37:15", "remaining_time": "3:45:37"}
|
121 |
+
{"current_steps": 11800, "total_steps": 14930, "loss": 0.6098, "lr": 1.2801329648007648e-06, "epoch": 3.9517749497655728, "percentage": 79.04, "elapsed_time": "13:44:00", "remaining_time": "3:38:34"}
|
122 |
+
{"current_steps": 11900, "total_steps": 14930, "loss": 0.5992, "lr": 1.203042527369611e-06, "epoch": 3.985264567983925, "percentage": 79.71, "elapsed_time": "13:50:43", "remaining_time": "3:31:31"}
|
123 |
+
{"current_steps": 11944, "total_steps": 14930, "eval_loss": NaN, "epoch": 4.0, "percentage": 80.0, "elapsed_time": "13:59:30", "remaining_time": "3:29:52"}
|
124 |
+
{"current_steps": 12000, "total_steps": 14930, "loss": 0.5984, "lr": 1.1280275341340919e-06, "epoch": 4.018754186202277, "percentage": 80.38, "elapsed_time": "14:03:15", "remaining_time": "3:25:53"}
|
125 |
+
{"current_steps": 12100, "total_steps": 14930, "loss": 0.6147, "lr": 1.0551289888277e-06, "epoch": 4.0522438044206295, "percentage": 81.04, "elapsed_time": "14:10:59", "remaining_time": "3:19:01"}
|
126 |
+
{"current_steps": 12200, "total_steps": 14930, "loss": 0.5882, "lr": 9.843867383183065e-07, "epoch": 4.085733422638982, "percentage": 81.71, "elapsed_time": "14:17:43", "remaining_time": "3:11:55"}
|
127 |
+
{"current_steps": 12300, "total_steps": 14930, "loss": 0.5785, "lr": 9.158394508275764e-07, "epoch": 4.119223040857334, "percentage": 82.38, "elapsed_time": "14:24:27", "remaining_time": "3:04:50"}
|
128 |
+
{"current_steps": 12400, "total_steps": 14930, "loss": 0.568, "lr": 8.495245947946428e-07, "epoch": 4.152712659075687, "percentage": 83.05, "elapsed_time": "14:31:10", "remaining_time": "2:57:44"}
|
129 |
+
{"current_steps": 12500, "total_steps": 14930, "loss": 0.5548, "lr": 7.85478418395586e-07, "epoch": 4.186202277294039, "percentage": 83.72, "elapsed_time": "14:37:54", "remaining_time": "2:50:39"}
|
130 |
+
{"current_steps": 12600, "total_steps": 14930, "loss": 0.5491, "lr": 7.237359297299213e-07, "epoch": 4.219691895512391, "percentage": 84.39, "elapsed_time": "14:44:38", "remaining_time": "2:43:35"}
|
131 |
+
{"current_steps": 12700, "total_steps": 14930, "loss": 0.5344, "lr": 6.643308776849211e-07, "epoch": 4.253181513730744, "percentage": 85.06, "elapsed_time": "14:51:23", "remaining_time": "2:36:31"}
|
132 |
+
{"current_steps": 12800, "total_steps": 14930, "loss": 0.5205, "lr": 6.07295733488234e-07, "epoch": 4.286671131949095, "percentage": 85.73, "elapsed_time": "14:58:06", "remaining_time": "2:29:27"}
|
133 |
+
{"current_steps": 12900, "total_steps": 14930, "loss": 0.5143, "lr": 5.526616729588719e-07, "epoch": 4.320160750167448, "percentage": 86.4, "elapsed_time": "15:04:50", "remaining_time": "2:22:23"}
|
134 |
+
{"current_steps": 13000, "total_steps": 14930, "loss": 0.5027, "lr": 5.00458559466292e-07, "epoch": 4.3536503683858, "percentage": 87.07, "elapsed_time": "15:11:34", "remaining_time": "2:15:20"}
|
135 |
+
{"current_steps": 13100, "total_steps": 14930, "loss": 0.498, "lr": 4.507149276068562e-07, "epoch": 4.3871399866041525, "percentage": 87.74, "elapsed_time": "15:19:17", "remaining_time": "2:08:25"}
|
136 |
+
{"current_steps": 13200, "total_steps": 14930, "loss": 0.4925, "lr": 4.0345796760662247e-07, "epoch": 4.420629604822505, "percentage": 88.41, "elapsed_time": "15:26:01", "remaining_time": "2:01:21"}
|
137 |
+
{"current_steps": 13300, "total_steps": 14930, "loss": 0.4893, "lr": 3.587135104589706e-07, "epoch": 4.454119223040857, "percentage": 89.08, "elapsed_time": "15:32:46", "remaining_time": "1:54:19"}
|
138 |
+
{"current_steps": 13400, "total_steps": 14930, "loss": 0.4844, "lr": 3.16506013805194e-07, "epoch": 4.48760884125921, "percentage": 89.75, "elapsed_time": "15:39:30", "remaining_time": "1:47:16"}
|
139 |
+
{"current_steps": 13500, "total_steps": 14930, "loss": 0.472, "lr": 2.7685854856577934e-07, "epoch": 4.521098459477562, "percentage": 90.42, "elapsed_time": "15:46:14", "remaining_time": "1:40:13"}
|
140 |
+
{"current_steps": 13600, "total_steps": 14930, "loss": 0.4774, "lr": 2.3979278632967507e-07, "epoch": 4.5545880776959144, "percentage": 91.09, "elapsed_time": "15:52:59", "remaining_time": "1:33:11"}
|
141 |
+
{"current_steps": 13700, "total_steps": 14930, "loss": 0.4786, "lr": 2.0532898750844633e-07, "epoch": 4.588077695914267, "percentage": 91.76, "elapsed_time": "15:59:43", "remaining_time": "1:26:09"}
|
142 |
+
{"current_steps": 13800, "total_steps": 14930, "loss": 0.4786, "lr": 1.734859902617886e-07, "epoch": 4.621567314132619, "percentage": 92.43, "elapsed_time": "16:06:28", "remaining_time": "1:19:08"}
|
143 |
+
{"current_steps": 13900, "total_steps": 14930, "loss": 0.4882, "lr": 1.4428120020045122e-07, "epoch": 4.655056932350972, "percentage": 93.1, "elapsed_time": "16:13:12", "remaining_time": "1:12:06"}
|
144 |
+
{"current_steps": 14000, "total_steps": 14930, "loss": 0.4816, "lr": 1.1773058087221068e-07, "epoch": 4.688546550569323, "percentage": 93.77, "elapsed_time": "16:19:56", "remaining_time": "1:05:05"}
|
145 |
+
{"current_steps": 14100, "total_steps": 14930, "loss": 0.482, "lr": 9.384864503607871e-08, "epoch": 4.7220361687876755, "percentage": 94.44, "elapsed_time": "16:27:40", "remaining_time": "0:58:08"}
|
146 |
+
{"current_steps": 14200, "total_steps": 14930, "loss": 0.4989, "lr": 7.264844672952299e-08, "epoch": 4.755525787006028, "percentage": 95.11, "elapsed_time": "16:34:24", "remaining_time": "0:51:07"}
|
147 |
+
{"current_steps": 14300, "total_steps": 14930, "loss": 0.5135, "lr": 5.4141574133037555e-08, "epoch": 4.78901540522438, "percentage": 95.78, "elapsed_time": "16:41:08", "remaining_time": "0:44:06"}
|
148 |
+
{"current_steps": 14400, "total_steps": 14930, "loss": 0.5077, "lr": 3.8338143235959746e-08, "epoch": 4.822505023442733, "percentage": 96.45, "elapsed_time": "16:47:53", "remaining_time": "0:37:05"}
|
149 |
+
{"current_steps": 14500, "total_steps": 14930, "loss": 0.5146, "lr": 2.5246792306999334e-08, "epoch": 4.855994641661085, "percentage": 97.12, "elapsed_time": "16:54:37", "remaining_time": "0:30:05"}
|
150 |
+
{"current_steps": 14600, "total_steps": 14930, "loss": 0.5236, "lr": 1.4874677172497243e-08, "epoch": 4.8894842598794375, "percentage": 97.79, "elapsed_time": "17:01:21", "remaining_time": "0:23:05"}
|
151 |
+
{"current_steps": 14700, "total_steps": 14930, "loss": 0.5404, "lr": 7.2274673050010124e-09, "epoch": 4.92297387809779, "percentage": 98.46, "elapsed_time": "17:08:05", "remaining_time": "0:16:05"}
|
152 |
+
{"current_steps": 14800, "total_steps": 14930, "loss": 0.5488, "lr": 2.309342724287622e-09, "epoch": 4.956463496316142, "percentage": 99.13, "elapsed_time": "17:14:50", "remaining_time": "0:09:05"}
|
153 |
+
{"current_steps": 14900, "total_steps": 14930, "loss": 0.5494, "lr": 1.229917125389335e-10, "epoch": 4.989953114534495, "percentage": 99.8, "elapsed_time": "17:21:34", "remaining_time": "0:02:05"}
|
154 |
+
{"current_steps": 14930, "total_steps": 14930, "eval_loss": NaN, "epoch": 5.0, "percentage": 100.0, "elapsed_time": "17:29:23", "remaining_time": "0:00:00"}
|
155 |
+
{"current_steps": 14930, "total_steps": 14930, "epoch": 5.0, "percentage": 100.0, "elapsed_time": "17:30:21", "remaining_time": "0:00:00"}
|
trainer_state.json
ADDED
@@ -0,0 +1,1125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": null,
|
3 |
+
"best_model_checkpoint": null,
|
4 |
+
"epoch": 5.0,
|
5 |
+
"eval_steps": 2986,
|
6 |
+
"global_step": 14930,
|
7 |
+
"is_hyper_param_search": false,
|
8 |
+
"is_local_process_zero": true,
|
9 |
+
"is_world_process_zero": true,
|
10 |
+
"log_history": [
|
11 |
+
{
|
12 |
+
"epoch": 0.033489618218352314,
|
13 |
+
"grad_norm": 27.663120029291747,
|
14 |
+
"learning_rate": 6.697923643670463e-07,
|
15 |
+
"loss": 11.7393,
|
16 |
+
"step": 100
|
17 |
+
},
|
18 |
+
{
|
19 |
+
"epoch": 0.06697923643670463,
|
20 |
+
"grad_norm": 1.7320339885689573,
|
21 |
+
"learning_rate": 1.3395847287340927e-06,
|
22 |
+
"loss": 3.297,
|
23 |
+
"step": 200
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"epoch": 0.10046885465505694,
|
27 |
+
"grad_norm": 1.2696699900839306,
|
28 |
+
"learning_rate": 2.0093770931011387e-06,
|
29 |
+
"loss": 1.8384,
|
30 |
+
"step": 300
|
31 |
+
},
|
32 |
+
{
|
33 |
+
"epoch": 0.13395847287340926,
|
34 |
+
"grad_norm": 1.1644334409082224,
|
35 |
+
"learning_rate": 2.6791694574681854e-06,
|
36 |
+
"loss": 1.6728,
|
37 |
+
"step": 400
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"epoch": 0.16744809109176156,
|
41 |
+
"grad_norm": 1.272695149119617,
|
42 |
+
"learning_rate": 3.3489618218352316e-06,
|
43 |
+
"loss": 1.5883,
|
44 |
+
"step": 500
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"epoch": 0.20093770931011387,
|
48 |
+
"grad_norm": 1.2497604240608484,
|
49 |
+
"learning_rate": 4.018754186202277e-06,
|
50 |
+
"loss": 1.5399,
|
51 |
+
"step": 600
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"epoch": 0.23442732752846618,
|
55 |
+
"grad_norm": 1.1521160041280882,
|
56 |
+
"learning_rate": 4.688546550569324e-06,
|
57 |
+
"loss": 1.5214,
|
58 |
+
"step": 700
|
59 |
+
},
|
60 |
+
{
|
61 |
+
"epoch": 0.2679169457468185,
|
62 |
+
"grad_norm": 1.0610261246972188,
|
63 |
+
"learning_rate": 5.358338914936371e-06,
|
64 |
+
"loss": 1.4822,
|
65 |
+
"step": 800
|
66 |
+
},
|
67 |
+
{
|
68 |
+
"epoch": 0.3014065639651708,
|
69 |
+
"grad_norm": 1.1385854301492127,
|
70 |
+
"learning_rate": 6.028131279303416e-06,
|
71 |
+
"loss": 1.4623,
|
72 |
+
"step": 900
|
73 |
+
},
|
74 |
+
{
|
75 |
+
"epoch": 0.33489618218352313,
|
76 |
+
"grad_norm": 1.04201570658625,
|
77 |
+
"learning_rate": 6.697923643670463e-06,
|
78 |
+
"loss": 1.4408,
|
79 |
+
"step": 1000
|
80 |
+
},
|
81 |
+
{
|
82 |
+
"epoch": 0.3683858004018754,
|
83 |
+
"grad_norm": 1.0568444189378532,
|
84 |
+
"learning_rate": 7.3677160080375086e-06,
|
85 |
+
"loss": 1.4378,
|
86 |
+
"step": 1100
|
87 |
+
},
|
88 |
+
{
|
89 |
+
"epoch": 0.40187541862022774,
|
90 |
+
"grad_norm": 1.088346568816493,
|
91 |
+
"learning_rate": 8.037508372404555e-06,
|
92 |
+
"loss": 1.4189,
|
93 |
+
"step": 1200
|
94 |
+
},
|
95 |
+
{
|
96 |
+
"epoch": 0.43536503683858,
|
97 |
+
"grad_norm": 1.0714826983472352,
|
98 |
+
"learning_rate": 8.707300736771601e-06,
|
99 |
+
"loss": 1.4098,
|
100 |
+
"step": 1300
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"epoch": 0.46885465505693236,
|
104 |
+
"grad_norm": 1.0415739456028938,
|
105 |
+
"learning_rate": 9.377093101138647e-06,
|
106 |
+
"loss": 1.4011,
|
107 |
+
"step": 1400
|
108 |
+
},
|
109 |
+
{
|
110 |
+
"epoch": 0.5023442732752846,
|
111 |
+
"grad_norm": 1.0789186294154542,
|
112 |
+
"learning_rate": 9.999993303758581e-06,
|
113 |
+
"loss": 1.3933,
|
114 |
+
"step": 1500
|
115 |
+
},
|
116 |
+
{
|
117 |
+
"epoch": 0.535833891493637,
|
118 |
+
"grad_norm": 1.0509262824198378,
|
119 |
+
"learning_rate": 9.998435483941776e-06,
|
120 |
+
"loss": 1.3828,
|
121 |
+
"step": 1600
|
122 |
+
},
|
123 |
+
{
|
124 |
+
"epoch": 0.5693235097119893,
|
125 |
+
"grad_norm": 1.0764796757312198,
|
126 |
+
"learning_rate": 9.994145483428403e-06,
|
127 |
+
"loss": 1.3772,
|
128 |
+
"step": 1700
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"epoch": 0.6028131279303416,
|
132 |
+
"grad_norm": 1.047153842290928,
|
133 |
+
"learning_rate": 9.987125647163527e-06,
|
134 |
+
"loss": 1.3674,
|
135 |
+
"step": 1800
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"epoch": 0.6363027461486939,
|
139 |
+
"grad_norm": 0.9188670491033151,
|
140 |
+
"learning_rate": 9.977379812240013e-06,
|
141 |
+
"loss": 1.3538,
|
142 |
+
"step": 1900
|
143 |
+
},
|
144 |
+
{
|
145 |
+
"epoch": 0.6697923643670463,
|
146 |
+
"grad_norm": 1.004701344911835,
|
147 |
+
"learning_rate": 9.964913305801151e-06,
|
148 |
+
"loss": 1.3566,
|
149 |
+
"step": 2000
|
150 |
+
},
|
151 |
+
{
|
152 |
+
"epoch": 0.7032819825853985,
|
153 |
+
"grad_norm": 1.1760354727564355,
|
154 |
+
"learning_rate": 9.9497329421288e-06,
|
155 |
+
"loss": 1.3473,
|
156 |
+
"step": 2100
|
157 |
+
},
|
158 |
+
{
|
159 |
+
"epoch": 0.7367716008037508,
|
160 |
+
"grad_norm": 0.9440290166066829,
|
161 |
+
"learning_rate": 9.931847018918654e-06,
|
162 |
+
"loss": 1.35,
|
163 |
+
"step": 2200
|
164 |
+
},
|
165 |
+
{
|
166 |
+
"epoch": 0.7702612190221031,
|
167 |
+
"grad_norm": 0.8730032484094693,
|
168 |
+
"learning_rate": 9.911265312744663e-06,
|
169 |
+
"loss": 1.3381,
|
170 |
+
"step": 2300
|
171 |
+
},
|
172 |
+
{
|
173 |
+
"epoch": 0.8037508372404555,
|
174 |
+
"grad_norm": 1.0757201534073755,
|
175 |
+
"learning_rate": 9.887999073715083e-06,
|
176 |
+
"loss": 1.3409,
|
177 |
+
"step": 2400
|
178 |
+
},
|
179 |
+
{
|
180 |
+
"epoch": 0.8372404554588078,
|
181 |
+
"grad_norm": 0.8841599798410706,
|
182 |
+
"learning_rate": 9.86206101932309e-06,
|
183 |
+
"loss": 1.333,
|
184 |
+
"step": 2500
|
185 |
+
},
|
186 |
+
{
|
187 |
+
"epoch": 0.87073007367716,
|
188 |
+
"grad_norm": 0.9378532515897725,
|
189 |
+
"learning_rate": 9.833465327495307e-06,
|
190 |
+
"loss": 1.3195,
|
191 |
+
"step": 2600
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"epoch": 0.9042196918955124,
|
195 |
+
"grad_norm": 1.2091982749118577,
|
196 |
+
"learning_rate": 9.802227628842045e-06,
|
197 |
+
"loss": 1.3183,
|
198 |
+
"step": 2700
|
199 |
+
},
|
200 |
+
{
|
201 |
+
"epoch": 0.9377093101138647,
|
202 |
+
"grad_norm": 1.0171797419584845,
|
203 |
+
"learning_rate": 9.7683649981135e-06,
|
204 |
+
"loss": 1.3145,
|
205 |
+
"step": 2800
|
206 |
+
},
|
207 |
+
{
|
208 |
+
"epoch": 0.971198928332217,
|
209 |
+
"grad_norm": 0.9664734856676821,
|
210 |
+
"learning_rate": 9.731895944866576e-06,
|
211 |
+
"loss": 1.3178,
|
212 |
+
"step": 2900
|
213 |
+
},
|
214 |
+
{
|
215 |
+
"epoch": 1.0,
|
216 |
+
"eval_loss": NaN,
|
217 |
+
"eval_runtime": 348.649,
|
218 |
+
"eval_samples_per_second": 45.673,
|
219 |
+
"eval_steps_per_second": 1.428,
|
220 |
+
"step": 2986
|
221 |
+
},
|
222 |
+
{
|
223 |
+
"epoch": 1.0046885465505693,
|
224 |
+
"grad_norm": 1.0415846298834508,
|
225 |
+
"learning_rate": 9.69284040334742e-06,
|
226 |
+
"loss": 1.3204,
|
227 |
+
"step": 3000
|
228 |
+
},
|
229 |
+
{
|
230 |
+
"epoch": 1.0381781647689217,
|
231 |
+
"grad_norm": 0.8757930482053028,
|
232 |
+
"learning_rate": 9.651219721595235e-06,
|
233 |
+
"loss": 1.31,
|
234 |
+
"step": 3100
|
235 |
+
},
|
236 |
+
{
|
237 |
+
"epoch": 1.0716677829872738,
|
238 |
+
"grad_norm": 0.9287401756438767,
|
239 |
+
"learning_rate": 9.607056649773266e-06,
|
240 |
+
"loss": 1.2948,
|
241 |
+
"step": 3200
|
242 |
+
},
|
243 |
+
{
|
244 |
+
"epoch": 1.1051574012056262,
|
245 |
+
"grad_norm": 0.966772568640745,
|
246 |
+
"learning_rate": 9.56037532773342e-06,
|
247 |
+
"loss": 1.2819,
|
248 |
+
"step": 3300
|
249 |
+
},
|
250 |
+
{
|
251 |
+
"epoch": 1.1386470194239786,
|
252 |
+
"grad_norm": 0.9981440658348595,
|
253 |
+
"learning_rate": 9.511201271821235e-06,
|
254 |
+
"loss": 1.2655,
|
255 |
+
"step": 3400
|
256 |
+
},
|
257 |
+
{
|
258 |
+
"epoch": 1.1721366376423308,
|
259 |
+
"grad_norm": 1.0546997584325322,
|
260 |
+
"learning_rate": 9.459561360928472e-06,
|
261 |
+
"loss": 1.2451,
|
262 |
+
"step": 3500
|
263 |
+
},
|
264 |
+
{
|
265 |
+
"epoch": 1.2056262558606832,
|
266 |
+
"grad_norm": 0.8994951932737885,
|
267 |
+
"learning_rate": 9.405483821800912e-06,
|
268 |
+
"loss": 1.2355,
|
269 |
+
"step": 3600
|
270 |
+
},
|
271 |
+
{
|
272 |
+
"epoch": 1.2391158740790356,
|
273 |
+
"grad_norm": 1.1226364755523701,
|
274 |
+
"learning_rate": 9.348998213609416e-06,
|
275 |
+
"loss": 1.2295,
|
276 |
+
"step": 3700
|
277 |
+
},
|
278 |
+
{
|
279 |
+
"epoch": 1.2726054922973877,
|
280 |
+
"grad_norm": 1.0157346646837568,
|
281 |
+
"learning_rate": 9.29013541179268e-06,
|
282 |
+
"loss": 1.2011,
|
283 |
+
"step": 3800
|
284 |
+
},
|
285 |
+
{
|
286 |
+
"epoch": 1.3060951105157401,
|
287 |
+
"grad_norm": 1.0670758632556911,
|
288 |
+
"learning_rate": 9.228927591180484e-06,
|
289 |
+
"loss": 1.1906,
|
290 |
+
"step": 3900
|
291 |
+
},
|
292 |
+
{
|
293 |
+
"epoch": 1.3395847287340925,
|
294 |
+
"grad_norm": 0.9528782862122593,
|
295 |
+
"learning_rate": 9.165408208406703e-06,
|
296 |
+
"loss": 1.1715,
|
297 |
+
"step": 4000
|
298 |
+
},
|
299 |
+
{
|
300 |
+
"epoch": 1.3730743469524447,
|
301 |
+
"grad_norm": 0.9576902845349418,
|
302 |
+
"learning_rate": 9.099611983621684e-06,
|
303 |
+
"loss": 1.1605,
|
304 |
+
"step": 4100
|
305 |
+
},
|
306 |
+
{
|
307 |
+
"epoch": 1.406563965170797,
|
308 |
+
"grad_norm": 1.271234164945301,
|
309 |
+
"learning_rate": 9.03157488151394e-06,
|
310 |
+
"loss": 1.1425,
|
311 |
+
"step": 4200
|
312 |
+
},
|
313 |
+
{
|
314 |
+
"epoch": 1.4400535833891492,
|
315 |
+
"grad_norm": 1.1036926365187665,
|
316 |
+
"learning_rate": 8.961334091651618e-06,
|
317 |
+
"loss": 1.1233,
|
318 |
+
"step": 4300
|
319 |
+
},
|
320 |
+
{
|
321 |
+
"epoch": 1.4735432016075016,
|
322 |
+
"grad_norm": 0.9973365902952639,
|
323 |
+
"learning_rate": 8.888928008154393e-06,
|
324 |
+
"loss": 1.1193,
|
325 |
+
"step": 4400
|
326 |
+
},
|
327 |
+
{
|
328 |
+
"epoch": 1.507032819825854,
|
329 |
+
"grad_norm": 1.0008322426572456,
|
330 |
+
"learning_rate": 8.81439620870698e-06,
|
331 |
+
"loss": 1.0943,
|
332 |
+
"step": 4500
|
333 |
+
},
|
334 |
+
{
|
335 |
+
"epoch": 1.5405224380442064,
|
336 |
+
"grad_norm": 1.035859311044444,
|
337 |
+
"learning_rate": 8.737779432925682e-06,
|
338 |
+
"loss": 1.0895,
|
339 |
+
"step": 4600
|
340 |
+
},
|
341 |
+
{
|
342 |
+
"epoch": 1.5740120562625586,
|
343 |
+
"grad_norm": 1.00625654318314,
|
344 |
+
"learning_rate": 8.659119560089822e-06,
|
345 |
+
"loss": 1.0894,
|
346 |
+
"step": 4700
|
347 |
+
},
|
348 |
+
{
|
349 |
+
"epoch": 1.607501674480911,
|
350 |
+
"grad_norm": 1.0315606204038301,
|
351 |
+
"learning_rate": 8.578459586250235e-06,
|
352 |
+
"loss": 1.0715,
|
353 |
+
"step": 4800
|
354 |
+
},
|
355 |
+
{
|
356 |
+
"epoch": 1.6409912926992631,
|
357 |
+
"grad_norm": 1.0926346917432537,
|
358 |
+
"learning_rate": 8.495843600727313e-06,
|
359 |
+
"loss": 1.0717,
|
360 |
+
"step": 4900
|
361 |
+
},
|
362 |
+
{
|
363 |
+
"epoch": 1.6744809109176155,
|
364 |
+
"grad_norm": 1.0612821467128837,
|
365 |
+
"learning_rate": 8.411316762011469e-06,
|
366 |
+
"loss": 1.0706,
|
367 |
+
"step": 5000
|
368 |
+
},
|
369 |
+
{
|
370 |
+
"epoch": 1.707970529135968,
|
371 |
+
"grad_norm": 1.016164489585248,
|
372 |
+
"learning_rate": 8.324925273079176e-06,
|
373 |
+
"loss": 1.0665,
|
374 |
+
"step": 5100
|
375 |
+
},
|
376 |
+
{
|
377 |
+
"epoch": 1.7414601473543203,
|
378 |
+
"grad_norm": 1.0371020643635414,
|
379 |
+
"learning_rate": 8.236716356138098e-06,
|
380 |
+
"loss": 1.0661,
|
381 |
+
"step": 5200
|
382 |
+
},
|
383 |
+
{
|
384 |
+
"epoch": 1.7749497655726725,
|
385 |
+
"grad_norm": 1.0416961084839862,
|
386 |
+
"learning_rate": 8.146738226815088e-06,
|
387 |
+
"loss": 1.0636,
|
388 |
+
"step": 5300
|
389 |
+
},
|
390 |
+
{
|
391 |
+
"epoch": 1.8084393837910246,
|
392 |
+
"grad_norm": 0.9790282258830432,
|
393 |
+
"learning_rate": 8.055040067801172e-06,
|
394 |
+
"loss": 1.0676,
|
395 |
+
"step": 5400
|
396 |
+
},
|
397 |
+
{
|
398 |
+
"epoch": 1.841929002009377,
|
399 |
+
"grad_norm": 1.0172124964539753,
|
400 |
+
"learning_rate": 7.961672001967954e-06,
|
401 |
+
"loss": 1.0612,
|
402 |
+
"step": 5500
|
403 |
+
},
|
404 |
+
{
|
405 |
+
"epoch": 1.8754186202277294,
|
406 |
+
"grad_norm": 1.1307997119049311,
|
407 |
+
"learning_rate": 7.866685064970086e-06,
|
408 |
+
"loss": 1.0561,
|
409 |
+
"step": 5600
|
410 |
+
},
|
411 |
+
{
|
412 |
+
"epoch": 1.9089082384460818,
|
413 |
+
"grad_norm": 1.0443307684727732,
|
414 |
+
"learning_rate": 7.770131177348806e-06,
|
415 |
+
"loss": 1.0597,
|
416 |
+
"step": 5700
|
417 |
+
},
|
418 |
+
{
|
419 |
+
"epoch": 1.942397856664434,
|
420 |
+
"grad_norm": 1.0764967903414877,
|
421 |
+
"learning_rate": 7.672063116151811e-06,
|
422 |
+
"loss": 1.0603,
|
423 |
+
"step": 5800
|
424 |
+
},
|
425 |
+
{
|
426 |
+
"epoch": 1.9758874748827864,
|
427 |
+
"grad_norm": 1.1410122429288658,
|
428 |
+
"learning_rate": 7.572534486084937e-06,
|
429 |
+
"loss": 1.0555,
|
430 |
+
"step": 5900
|
431 |
+
},
|
432 |
+
{
|
433 |
+
"epoch": 2.0,
|
434 |
+
"eval_loss": NaN,
|
435 |
+
"eval_runtime": 348.1208,
|
436 |
+
"eval_samples_per_second": 45.743,
|
437 |
+
"eval_steps_per_second": 1.431,
|
438 |
+
"step": 5972
|
439 |
+
},
|
440 |
+
{
|
441 |
+
"epoch": 2.0093770931011385,
|
442 |
+
"grad_norm": 1.0764883927956457,
|
443 |
+
"learning_rate": 7.47159969021144e-06,
|
444 |
+
"loss": 1.0571,
|
445 |
+
"step": 6000
|
446 |
+
},
|
447 |
+
{
|
448 |
+
"epoch": 2.042866711319491,
|
449 |
+
"grad_norm": 1.1019967079045427,
|
450 |
+
"learning_rate": 7.369313900214897e-06,
|
451 |
+
"loss": 1.0526,
|
452 |
+
"step": 6100
|
453 |
+
},
|
454 |
+
{
|
455 |
+
"epoch": 2.0763563295378433,
|
456 |
+
"grad_norm": 1.09355151149325,
|
457 |
+
"learning_rate": 7.265733026241967e-06,
|
458 |
+
"loss": 1.0395,
|
459 |
+
"step": 6200
|
460 |
+
},
|
461 |
+
{
|
462 |
+
"epoch": 2.1098459477561957,
|
463 |
+
"grad_norm": 1.0207706582548508,
|
464 |
+
"learning_rate": 7.160913686341495e-06,
|
465 |
+
"loss": 1.0189,
|
466 |
+
"step": 6300
|
467 |
+
},
|
468 |
+
{
|
469 |
+
"epoch": 2.1433355659745477,
|
470 |
+
"grad_norm": 1.2991784934719508,
|
471 |
+
"learning_rate": 7.054913175516698e-06,
|
472 |
+
"loss": 1.0034,
|
473 |
+
"step": 6400
|
474 |
+
},
|
475 |
+
{
|
476 |
+
"epoch": 2.1768251841929,
|
477 |
+
"grad_norm": 1.1127907199675022,
|
478 |
+
"learning_rate": 6.947789434407284e-06,
|
479 |
+
"loss": 0.9789,
|
480 |
+
"step": 6500
|
481 |
+
},
|
482 |
+
{
|
483 |
+
"epoch": 2.2103148024112524,
|
484 |
+
"grad_norm": 1.0987386576171831,
|
485 |
+
"learning_rate": 6.839601017618699e-06,
|
486 |
+
"loss": 0.9673,
|
487 |
+
"step": 6600
|
488 |
+
},
|
489 |
+
{
|
490 |
+
"epoch": 2.243804420629605,
|
491 |
+
"grad_norm": 1.1749268268222908,
|
492 |
+
"learning_rate": 6.730407061715752e-06,
|
493 |
+
"loss": 0.9564,
|
494 |
+
"step": 6700
|
495 |
+
},
|
496 |
+
{
|
497 |
+
"epoch": 2.2772940388479572,
|
498 |
+
"grad_norm": 1.2656062020023109,
|
499 |
+
"learning_rate": 6.620267252898148e-06,
|
500 |
+
"loss": 0.9251,
|
501 |
+
"step": 6800
|
502 |
+
},
|
503 |
+
{
|
504 |
+
"epoch": 2.3107836570663096,
|
505 |
+
"grad_norm": 1.2775085805331419,
|
506 |
+
"learning_rate": 6.509241794375577e-06,
|
507 |
+
"loss": 0.9125,
|
508 |
+
"step": 6900
|
509 |
+
},
|
510 |
+
{
|
511 |
+
"epoch": 2.3442732752846616,
|
512 |
+
"grad_norm": 1.3173880822264534,
|
513 |
+
"learning_rate": 6.3973913734602174e-06,
|
514 |
+
"loss": 0.8919,
|
515 |
+
"step": 7000
|
516 |
+
},
|
517 |
+
{
|
518 |
+
"epoch": 2.377762893503014,
|
519 |
+
"grad_norm": 1.2455502782297214,
|
520 |
+
"learning_rate": 6.284777128394603e-06,
|
521 |
+
"loss": 0.879,
|
522 |
+
"step": 7100
|
523 |
+
},
|
524 |
+
{
|
525 |
+
"epoch": 2.4112525117213663,
|
526 |
+
"grad_norm": 1.332442255586022,
|
527 |
+
"learning_rate": 6.171460614933038e-06,
|
528 |
+
"loss": 0.8625,
|
529 |
+
"step": 7200
|
530 |
+
},
|
531 |
+
{
|
532 |
+
"epoch": 2.4447421299397187,
|
533 |
+
"grad_norm": 1.3507817687995796,
|
534 |
+
"learning_rate": 6.057503772694761e-06,
|
535 |
+
"loss": 0.8374,
|
536 |
+
"step": 7300
|
537 |
+
},
|
538 |
+
{
|
539 |
+
"epoch": 2.478231748158071,
|
540 |
+
"grad_norm": 1.2079298830395293,
|
541 |
+
"learning_rate": 5.942968891307317e-06,
|
542 |
+
"loss": 0.8394,
|
543 |
+
"step": 7400
|
544 |
+
},
|
545 |
+
{
|
546 |
+
"epoch": 2.511721366376423,
|
547 |
+
"grad_norm": 1.2858100084464055,
|
548 |
+
"learning_rate": 5.8279185763585975e-06,
|
549 |
+
"loss": 0.8131,
|
550 |
+
"step": 7500
|
551 |
+
},
|
552 |
+
{
|
553 |
+
"epoch": 2.5452109845947755,
|
554 |
+
"grad_norm": 1.50200071509283,
|
555 |
+
"learning_rate": 5.7124157151761795e-06,
|
556 |
+
"loss": 0.805,
|
557 |
+
"step": 7600
|
558 |
+
},
|
559 |
+
{
|
560 |
+
"epoch": 2.578700602813128,
|
561 |
+
"grad_norm": 1.7131605578882496,
|
562 |
+
"learning_rate": 5.596523442452652e-06,
|
563 |
+
"loss": 0.8083,
|
564 |
+
"step": 7700
|
565 |
+
},
|
566 |
+
{
|
567 |
+
"epoch": 2.6121902210314802,
|
568 |
+
"grad_norm": 1.549866945076186,
|
569 |
+
"learning_rate": 5.480305105735749e-06,
|
570 |
+
"loss": 0.7845,
|
571 |
+
"step": 7800
|
572 |
+
},
|
573 |
+
{
|
574 |
+
"epoch": 2.6456798392498326,
|
575 |
+
"grad_norm": 1.3213984004712975,
|
576 |
+
"learning_rate": 5.363824230802127e-06,
|
577 |
+
"loss": 0.7909,
|
578 |
+
"step": 7900
|
579 |
+
},
|
580 |
+
{
|
581 |
+
"epoch": 2.679169457468185,
|
582 |
+
"grad_norm": 1.4279995035623863,
|
583 |
+
"learning_rate": 5.247144486933706e-06,
|
584 |
+
"loss": 0.7963,
|
585 |
+
"step": 8000
|
586 |
+
},
|
587 |
+
{
|
588 |
+
"epoch": 2.7126590756865374,
|
589 |
+
"grad_norm": 1.8497497934553626,
|
590 |
+
"learning_rate": 5.130329652115603e-06,
|
591 |
+
"loss": 0.7835,
|
592 |
+
"step": 8100
|
593 |
+
},
|
594 |
+
{
|
595 |
+
"epoch": 2.7461486939048894,
|
596 |
+
"grad_norm": 1.4531964604398142,
|
597 |
+
"learning_rate": 5.013443578174608e-06,
|
598 |
+
"loss": 0.7899,
|
599 |
+
"step": 8200
|
600 |
+
},
|
601 |
+
{
|
602 |
+
"epoch": 2.7796383121232418,
|
603 |
+
"grad_norm": 1.5125912635190244,
|
604 |
+
"learning_rate": 4.8965501558773326e-06,
|
605 |
+
"loss": 0.788,
|
606 |
+
"step": 8300
|
607 |
+
},
|
608 |
+
{
|
609 |
+
"epoch": 2.813127930341594,
|
610 |
+
"grad_norm": 1.3974817703691491,
|
611 |
+
"learning_rate": 4.779713280007051e-06,
|
612 |
+
"loss": 0.7979,
|
613 |
+
"step": 8400
|
614 |
+
},
|
615 |
+
{
|
616 |
+
"epoch": 2.8466175485599465,
|
617 |
+
"grad_norm": 1.3666908779315028,
|
618 |
+
"learning_rate": 4.6629968144383545e-06,
|
619 |
+
"loss": 0.7839,
|
620 |
+
"step": 8500
|
621 |
+
},
|
622 |
+
{
|
623 |
+
"epoch": 2.8801071667782985,
|
624 |
+
"grad_norm": 1.3996848474214347,
|
625 |
+
"learning_rate": 4.546464557228699e-06,
|
626 |
+
"loss": 0.7899,
|
627 |
+
"step": 8600
|
628 |
+
},
|
629 |
+
{
|
630 |
+
"epoch": 2.913596784996651,
|
631 |
+
"grad_norm": 1.3649644046851297,
|
632 |
+
"learning_rate": 4.430180205745932e-06,
|
633 |
+
"loss": 0.7959,
|
634 |
+
"step": 8700
|
635 |
+
},
|
636 |
+
{
|
637 |
+
"epoch": 2.9470864032150033,
|
638 |
+
"grad_norm": 1.369973634022689,
|
639 |
+
"learning_rate": 4.314207321850849e-06,
|
640 |
+
"loss": 0.7956,
|
641 |
+
"step": 8800
|
642 |
+
},
|
643 |
+
{
|
644 |
+
"epoch": 2.9805760214333556,
|
645 |
+
"grad_norm": 1.5071401378465865,
|
646 |
+
"learning_rate": 4.198609297153831e-06,
|
647 |
+
"loss": 0.7954,
|
648 |
+
"step": 8900
|
649 |
+
},
|
650 |
+
{
|
651 |
+
"epoch": 3.0,
|
652 |
+
"eval_loss": NaN,
|
653 |
+
"eval_runtime": 348.2929,
|
654 |
+
"eval_samples_per_second": 45.72,
|
655 |
+
"eval_steps_per_second": 1.43,
|
656 |
+
"step": 8958
|
657 |
+
},
|
658 |
+
{
|
659 |
+
"epoch": 3.014065639651708,
|
660 |
+
"grad_norm": 1.4428883177319884,
|
661 |
+
"learning_rate": 4.083449318364527e-06,
|
662 |
+
"loss": 0.7853,
|
663 |
+
"step": 9000
|
664 |
+
},
|
665 |
+
{
|
666 |
+
"epoch": 3.0475552578700604,
|
667 |
+
"grad_norm": 1.351580022628522,
|
668 |
+
"learning_rate": 3.968790332753555e-06,
|
669 |
+
"loss": 0.7987,
|
670 |
+
"step": 9100
|
671 |
+
},
|
672 |
+
{
|
673 |
+
"epoch": 3.081044876088413,
|
674 |
+
"grad_norm": 1.3743638112843153,
|
675 |
+
"learning_rate": 3.8546950137450656e-06,
|
676 |
+
"loss": 0.7732,
|
677 |
+
"step": 9200
|
678 |
+
},
|
679 |
+
{
|
680 |
+
"epoch": 3.1145344943067648,
|
681 |
+
"grad_norm": 1.484385543221885,
|
682 |
+
"learning_rate": 3.7412257266590007e-06,
|
683 |
+
"loss": 0.7563,
|
684 |
+
"step": 9300
|
685 |
+
},
|
686 |
+
{
|
687 |
+
"epoch": 3.148024112525117,
|
688 |
+
"grad_norm": 1.5933705790768147,
|
689 |
+
"learning_rate": 3.62844449462176e-06,
|
690 |
+
"loss": 0.739,
|
691 |
+
"step": 9400
|
692 |
+
},
|
693 |
+
{
|
694 |
+
"epoch": 3.1815137307434695,
|
695 |
+
"grad_norm": 1.6870709018629149,
|
696 |
+
"learning_rate": 3.5164129646639204e-06,
|
697 |
+
"loss": 0.7228,
|
698 |
+
"step": 9500
|
699 |
+
},
|
700 |
+
{
|
701 |
+
"epoch": 3.215003348961822,
|
702 |
+
"grad_norm": 1.6481990122285803,
|
703 |
+
"learning_rate": 3.4051923740235205e-06,
|
704 |
+
"loss": 0.7088,
|
705 |
+
"step": 9600
|
706 |
+
},
|
707 |
+
{
|
708 |
+
"epoch": 3.2484929671801743,
|
709 |
+
"grad_norm": 1.5879233480759347,
|
710 |
+
"learning_rate": 3.2948435166733506e-06,
|
711 |
+
"loss": 0.6959,
|
712 |
+
"step": 9700
|
713 |
+
},
|
714 |
+
{
|
715 |
+
"epoch": 3.2819825853985263,
|
716 |
+
"grad_norm": 1.5176278019008493,
|
717 |
+
"learning_rate": 3.1854267100905344e-06,
|
718 |
+
"loss": 0.6686,
|
719 |
+
"step": 9800
|
720 |
+
},
|
721 |
+
{
|
722 |
+
"epoch": 3.3154722036168787,
|
723 |
+
"grad_norm": 1.552541254972543,
|
724 |
+
"learning_rate": 3.0770017622865523e-06,
|
725 |
+
"loss": 0.6606,
|
726 |
+
"step": 9900
|
727 |
+
},
|
728 |
+
{
|
729 |
+
"epoch": 3.348961821835231,
|
730 |
+
"grad_norm": 1.7082594351164857,
|
731 |
+
"learning_rate": 2.9696279391157663e-06,
|
732 |
+
"loss": 0.6448,
|
733 |
+
"step": 10000
|
734 |
+
},
|
735 |
+
{
|
736 |
+
"epoch": 3.3824514400535834,
|
737 |
+
"grad_norm": 3.1211350166343546,
|
738 |
+
"learning_rate": 2.8633639318802685e-06,
|
739 |
+
"loss": 0.6331,
|
740 |
+
"step": 10100
|
741 |
+
},
|
742 |
+
{
|
743 |
+
"epoch": 3.415941058271936,
|
744 |
+
"grad_norm": 1.6469804551491567,
|
745 |
+
"learning_rate": 2.758267825248798e-06,
|
746 |
+
"loss": 0.6145,
|
747 |
+
"step": 10200
|
748 |
+
},
|
749 |
+
{
|
750 |
+
"epoch": 3.4494306764902882,
|
751 |
+
"grad_norm": 1.7146851447701008,
|
752 |
+
"learning_rate": 2.6543970655072514e-06,
|
753 |
+
"loss": 0.6065,
|
754 |
+
"step": 10300
|
755 |
+
},
|
756 |
+
{
|
757 |
+
"epoch": 3.48292029470864,
|
758 |
+
"grad_norm": 2.043969779746548,
|
759 |
+
"learning_rate": 2.5518084291581163e-06,
|
760 |
+
"loss": 0.6009,
|
761 |
+
"step": 10400
|
762 |
+
},
|
763 |
+
{
|
764 |
+
"epoch": 3.5164099129269926,
|
765 |
+
"grad_norm": 1.6446301288443177,
|
766 |
+
"learning_rate": 2.450557991886039e-06,
|
767 |
+
"loss": 0.5847,
|
768 |
+
"step": 10500
|
769 |
+
},
|
770 |
+
{
|
771 |
+
"epoch": 3.549899531145345,
|
772 |
+
"grad_norm": 1.8987310858087982,
|
773 |
+
"learning_rate": 2.350701097906447e-06,
|
774 |
+
"loss": 0.578,
|
775 |
+
"step": 10600
|
776 |
+
},
|
777 |
+
{
|
778 |
+
"epoch": 3.5833891493636973,
|
779 |
+
"grad_norm": 1.5875486081882013,
|
780 |
+
"learning_rate": 2.252292329714012e-06,
|
781 |
+
"loss": 0.5838,
|
782 |
+
"step": 10700
|
783 |
+
},
|
784 |
+
{
|
785 |
+
"epoch": 3.6168787675820493,
|
786 |
+
"grad_norm": 1.5799154770812294,
|
787 |
+
"learning_rate": 2.155385478247455e-06,
|
788 |
+
"loss": 0.5653,
|
789 |
+
"step": 10800
|
790 |
+
},
|
791 |
+
{
|
792 |
+
"epoch": 3.6503683858004017,
|
793 |
+
"grad_norm": 1.9320449012232872,
|
794 |
+
"learning_rate": 2.0600335134870415e-06,
|
795 |
+
"loss": 0.5723,
|
796 |
+
"step": 10900
|
797 |
+
},
|
798 |
+
{
|
799 |
+
"epoch": 3.683858004018754,
|
800 |
+
"grad_norm": 1.9440461986884756,
|
801 |
+
"learning_rate": 1.9662885555008055e-06,
|
802 |
+
"loss": 0.5762,
|
803 |
+
"step": 11000
|
804 |
+
},
|
805 |
+
{
|
806 |
+
"epoch": 3.7173476222371065,
|
807 |
+
"grad_norm": 1.8693480723789162,
|
808 |
+
"learning_rate": 1.8742018459553551e-06,
|
809 |
+
"loss": 0.5715,
|
810 |
+
"step": 11100
|
811 |
+
},
|
812 |
+
{
|
813 |
+
"epoch": 3.750837240455459,
|
814 |
+
"grad_norm": 1.6174259181592696,
|
815 |
+
"learning_rate": 1.7838237201067976e-06,
|
816 |
+
"loss": 0.577,
|
817 |
+
"step": 11200
|
818 |
+
},
|
819 |
+
{
|
820 |
+
"epoch": 3.7843268586738112,
|
821 |
+
"grad_norm": 1.7622557353103667,
|
822 |
+
"learning_rate": 1.695203579287134e-06,
|
823 |
+
"loss": 0.5825,
|
824 |
+
"step": 11300
|
825 |
+
},
|
826 |
+
{
|
827 |
+
"epoch": 3.8178164768921636,
|
828 |
+
"grad_norm": 1.6601130738963383,
|
829 |
+
"learning_rate": 1.6083898639011402e-06,
|
830 |
+
"loss": 0.5871,
|
831 |
+
"step": 11400
|
832 |
+
},
|
833 |
+
{
|
834 |
+
"epoch": 3.8513060951105156,
|
835 |
+
"grad_norm": 2.0156698217638445,
|
836 |
+
"learning_rate": 1.5234300269484848e-06,
|
837 |
+
"loss": 0.5805,
|
838 |
+
"step": 11500
|
839 |
+
},
|
840 |
+
{
|
841 |
+
"epoch": 3.884795713328868,
|
842 |
+
"grad_norm": 1.6232268197279267,
|
843 |
+
"learning_rate": 1.440370508085589e-06,
|
844 |
+
"loss": 0.5859,
|
845 |
+
"step": 11600
|
846 |
+
},
|
847 |
+
{
|
848 |
+
"epoch": 3.9182853315472204,
|
849 |
+
"grad_norm": 1.9405869273694487,
|
850 |
+
"learning_rate": 1.3592567082413683e-06,
|
851 |
+
"loss": 0.5961,
|
852 |
+
"step": 11700
|
853 |
+
},
|
854 |
+
{
|
855 |
+
"epoch": 3.9517749497655728,
|
856 |
+
"grad_norm": 1.6205319658414614,
|
857 |
+
"learning_rate": 1.2801329648007648e-06,
|
858 |
+
"loss": 0.6098,
|
859 |
+
"step": 11800
|
860 |
+
},
|
861 |
+
{
|
862 |
+
"epoch": 3.985264567983925,
|
863 |
+
"grad_norm": 2.0422623489549028,
|
864 |
+
"learning_rate": 1.203042527369611e-06,
|
865 |
+
"loss": 0.5992,
|
866 |
+
"step": 11900
|
867 |
+
},
|
868 |
+
{
|
869 |
+
"epoch": 4.0,
|
870 |
+
"eval_loss": NaN,
|
871 |
+
"eval_runtime": 348.6255,
|
872 |
+
"eval_samples_per_second": 45.677,
|
873 |
+
"eval_steps_per_second": 1.428,
|
874 |
+
"step": 11944
|
875 |
+
},
|
876 |
+
{
|
877 |
+
"epoch": 4.018754186202277,
|
878 |
+
"grad_norm": 2.042627434325046,
|
879 |
+
"learning_rate": 1.1280275341340919e-06,
|
880 |
+
"loss": 0.5984,
|
881 |
+
"step": 12000
|
882 |
+
},
|
883 |
+
{
|
884 |
+
"epoch": 4.0522438044206295,
|
885 |
+
"grad_norm": 1.6714080325577199,
|
886 |
+
"learning_rate": 1.0551289888277e-06,
|
887 |
+
"loss": 0.6147,
|
888 |
+
"step": 12100
|
889 |
+
},
|
890 |
+
{
|
891 |
+
"epoch": 4.085733422638982,
|
892 |
+
"grad_norm": 1.73673619396585,
|
893 |
+
"learning_rate": 9.843867383183065e-07,
|
894 |
+
"loss": 0.5882,
|
895 |
+
"step": 12200
|
896 |
+
},
|
897 |
+
{
|
898 |
+
"epoch": 4.119223040857334,
|
899 |
+
"grad_norm": 1.7412386584226505,
|
900 |
+
"learning_rate": 9.158394508275764e-07,
|
901 |
+
"loss": 0.5785,
|
902 |
+
"step": 12300
|
903 |
+
},
|
904 |
+
{
|
905 |
+
"epoch": 4.152712659075687,
|
906 |
+
"grad_norm": 1.688382578729062,
|
907 |
+
"learning_rate": 8.495245947946428e-07,
|
908 |
+
"loss": 0.568,
|
909 |
+
"step": 12400
|
910 |
+
},
|
911 |
+
{
|
912 |
+
"epoch": 4.186202277294039,
|
913 |
+
"grad_norm": 1.7570920632165827,
|
914 |
+
"learning_rate": 7.85478418395586e-07,
|
915 |
+
"loss": 0.5548,
|
916 |
+
"step": 12500
|
917 |
+
},
|
918 |
+
{
|
919 |
+
"epoch": 4.219691895512391,
|
920 |
+
"grad_norm": 2.0216120473215775,
|
921 |
+
"learning_rate": 7.237359297299213e-07,
|
922 |
+
"loss": 0.5491,
|
923 |
+
"step": 12600
|
924 |
+
},
|
925 |
+
{
|
926 |
+
"epoch": 4.253181513730744,
|
927 |
+
"grad_norm": 1.7189176283861873,
|
928 |
+
"learning_rate": 6.643308776849211e-07,
|
929 |
+
"loss": 0.5344,
|
930 |
+
"step": 12700
|
931 |
+
},
|
932 |
+
{
|
933 |
+
"epoch": 4.286671131949095,
|
934 |
+
"grad_norm": 1.878389510492855,
|
935 |
+
"learning_rate": 6.07295733488234e-07,
|
936 |
+
"loss": 0.5205,
|
937 |
+
"step": 12800
|
938 |
+
},
|
939 |
+
{
|
940 |
+
"epoch": 4.320160750167448,
|
941 |
+
"grad_norm": 1.8272392601326726,
|
942 |
+
"learning_rate": 5.526616729588719e-07,
|
943 |
+
"loss": 0.5143,
|
944 |
+
"step": 12900
|
945 |
+
},
|
946 |
+
{
|
947 |
+
"epoch": 4.3536503683858,
|
948 |
+
"grad_norm": 1.9885081295428908,
|
949 |
+
"learning_rate": 5.00458559466292e-07,
|
950 |
+
"loss": 0.5027,
|
951 |
+
"step": 13000
|
952 |
+
},
|
953 |
+
{
|
954 |
+
"epoch": 4.3871399866041525,
|
955 |
+
"grad_norm": 1.8856403938879776,
|
956 |
+
"learning_rate": 4.507149276068562e-07,
|
957 |
+
"loss": 0.498,
|
958 |
+
"step": 13100
|
959 |
+
},
|
960 |
+
{
|
961 |
+
"epoch": 4.420629604822505,
|
962 |
+
"grad_norm": 1.9693198350457568,
|
963 |
+
"learning_rate": 4.0345796760662247e-07,
|
964 |
+
"loss": 0.4925,
|
965 |
+
"step": 13200
|
966 |
+
},
|
967 |
+
{
|
968 |
+
"epoch": 4.454119223040857,
|
969 |
+
"grad_norm": 2.1533445061135827,
|
970 |
+
"learning_rate": 3.587135104589706e-07,
|
971 |
+
"loss": 0.4893,
|
972 |
+
"step": 13300
|
973 |
+
},
|
974 |
+
{
|
975 |
+
"epoch": 4.48760884125921,
|
976 |
+
"grad_norm": 1.795431467461819,
|
977 |
+
"learning_rate": 3.16506013805194e-07,
|
978 |
+
"loss": 0.4844,
|
979 |
+
"step": 13400
|
980 |
+
},
|
981 |
+
{
|
982 |
+
"epoch": 4.521098459477562,
|
983 |
+
"grad_norm": 1.8492311436151545,
|
984 |
+
"learning_rate": 2.7685854856577934e-07,
|
985 |
+
"loss": 0.472,
|
986 |
+
"step": 13500
|
987 |
+
},
|
988 |
+
{
|
989 |
+
"epoch": 4.5545880776959144,
|
990 |
+
"grad_norm": 1.9596331843479984,
|
991 |
+
"learning_rate": 2.3979278632967507e-07,
|
992 |
+
"loss": 0.4774,
|
993 |
+
"step": 13600
|
994 |
+
},
|
995 |
+
{
|
996 |
+
"epoch": 4.588077695914267,
|
997 |
+
"grad_norm": 1.9830637420731552,
|
998 |
+
"learning_rate": 2.0532898750844633e-07,
|
999 |
+
"loss": 0.4786,
|
1000 |
+
"step": 13700
|
1001 |
+
},
|
1002 |
+
{
|
1003 |
+
"epoch": 4.621567314132619,
|
1004 |
+
"grad_norm": 2.0258341442311423,
|
1005 |
+
"learning_rate": 1.734859902617886e-07,
|
1006 |
+
"loss": 0.4786,
|
1007 |
+
"step": 13800
|
1008 |
+
},
|
1009 |
+
{
|
1010 |
+
"epoch": 4.655056932350972,
|
1011 |
+
"grad_norm": 1.9180928479733919,
|
1012 |
+
"learning_rate": 1.4428120020045122e-07,
|
1013 |
+
"loss": 0.4882,
|
1014 |
+
"step": 13900
|
1015 |
+
},
|
1016 |
+
{
|
1017 |
+
"epoch": 4.688546550569323,
|
1018 |
+
"grad_norm": 1.897536871580035,
|
1019 |
+
"learning_rate": 1.1773058087221068e-07,
|
1020 |
+
"loss": 0.4816,
|
1021 |
+
"step": 14000
|
1022 |
+
},
|
1023 |
+
{
|
1024 |
+
"epoch": 4.7220361687876755,
|
1025 |
+
"grad_norm": 1.9655089659832947,
|
1026 |
+
"learning_rate": 9.384864503607871e-08,
|
1027 |
+
"loss": 0.482,
|
1028 |
+
"step": 14100
|
1029 |
+
},
|
1030 |
+
{
|
1031 |
+
"epoch": 4.755525787006028,
|
1032 |
+
"grad_norm": 2.1423206586220345,
|
1033 |
+
"learning_rate": 7.264844672952299e-08,
|
1034 |
+
"loss": 0.4989,
|
1035 |
+
"step": 14200
|
1036 |
+
},
|
1037 |
+
{
|
1038 |
+
"epoch": 4.78901540522438,
|
1039 |
+
"grad_norm": 1.8698413000910197,
|
1040 |
+
"learning_rate": 5.4141574133037555e-08,
|
1041 |
+
"loss": 0.5135,
|
1042 |
+
"step": 14300
|
1043 |
+
},
|
1044 |
+
{
|
1045 |
+
"epoch": 4.822505023442733,
|
1046 |
+
"grad_norm": 1.888414362344593,
|
1047 |
+
"learning_rate": 3.8338143235959746e-08,
|
1048 |
+
"loss": 0.5077,
|
1049 |
+
"step": 14400
|
1050 |
+
},
|
1051 |
+
{
|
1052 |
+
"epoch": 4.855994641661085,
|
1053 |
+
"grad_norm": 1.9521761786674516,
|
1054 |
+
"learning_rate": 2.5246792306999334e-08,
|
1055 |
+
"loss": 0.5146,
|
1056 |
+
"step": 14500
|
1057 |
+
},
|
1058 |
+
{
|
1059 |
+
"epoch": 4.8894842598794375,
|
1060 |
+
"grad_norm": 1.8440132403394436,
|
1061 |
+
"learning_rate": 1.4874677172497243e-08,
|
1062 |
+
"loss": 0.5236,
|
1063 |
+
"step": 14600
|
1064 |
+
},
|
1065 |
+
{
|
1066 |
+
"epoch": 4.92297387809779,
|
1067 |
+
"grad_norm": 1.9744361393291854,
|
1068 |
+
"learning_rate": 7.2274673050010124e-09,
|
1069 |
+
"loss": 0.5404,
|
1070 |
+
"step": 14700
|
1071 |
+
},
|
1072 |
+
{
|
1073 |
+
"epoch": 4.956463496316142,
|
1074 |
+
"grad_norm": 1.8076672661575879,
|
1075 |
+
"learning_rate": 2.309342724287622e-09,
|
1076 |
+
"loss": 0.5488,
|
1077 |
+
"step": 14800
|
1078 |
+
},
|
1079 |
+
{
|
1080 |
+
"epoch": 4.989953114534495,
|
1081 |
+
"grad_norm": 1.9120835449610545,
|
1082 |
+
"learning_rate": 1.229917125389335e-10,
|
1083 |
+
"loss": 0.5494,
|
1084 |
+
"step": 14900
|
1085 |
+
},
|
1086 |
+
{
|
1087 |
+
"epoch": 5.0,
|
1088 |
+
"eval_loss": NaN,
|
1089 |
+
"eval_runtime": 348.4998,
|
1090 |
+
"eval_samples_per_second": 45.693,
|
1091 |
+
"eval_steps_per_second": 1.429,
|
1092 |
+
"step": 14930
|
1093 |
+
},
|
1094 |
+
{
|
1095 |
+
"epoch": 5.0,
|
1096 |
+
"step": 14930,
|
1097 |
+
"total_flos": 7403300223713280.0,
|
1098 |
+
"train_loss": 0.9973710162960596,
|
1099 |
+
"train_runtime": 63021.9977,
|
1100 |
+
"train_samples_per_second": 11.37,
|
1101 |
+
"train_steps_per_second": 0.237
|
1102 |
+
}
|
1103 |
+
],
|
1104 |
+
"logging_steps": 100,
|
1105 |
+
"max_steps": 14930,
|
1106 |
+
"num_input_tokens_seen": 0,
|
1107 |
+
"num_train_epochs": 5,
|
1108 |
+
"save_steps": 1000,
|
1109 |
+
"stateful_callbacks": {
|
1110 |
+
"TrainerControl": {
|
1111 |
+
"args": {
|
1112 |
+
"should_epoch_stop": false,
|
1113 |
+
"should_evaluate": false,
|
1114 |
+
"should_log": false,
|
1115 |
+
"should_save": true,
|
1116 |
+
"should_training_stop": true
|
1117 |
+
},
|
1118 |
+
"attributes": {}
|
1119 |
+
}
|
1120 |
+
},
|
1121 |
+
"total_flos": 7403300223713280.0,
|
1122 |
+
"train_batch_size": 6,
|
1123 |
+
"trial_name": null,
|
1124 |
+
"trial_params": null
|
1125 |
+
}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1ac80307e2d742d85aacb6ec8b4dcdd61f123ee378e7d5b4c2c49d1bafc2aad2
|
3 |
+
size 7160
|
training_eval_loss.png
ADDED
![]() |
training_loss.png
ADDED
![]() |