Training in progress, step 31500, checkpoint
Browse files- checkpoint-31500/config.json +30 -0
- checkpoint-31500/generation_config.json +6 -0
- checkpoint-31500/model.safetensors +3 -0
- checkpoint-31500/optimizer.pt +3 -0
- checkpoint-31500/rng_state.pth +3 -0
- checkpoint-31500/scheduler.pt +3 -0
- checkpoint-31500/special_tokens_map.json +24 -0
- checkpoint-31500/tokenizer.json +96 -0
- checkpoint-31500/tokenizer_config.json +38 -0
- checkpoint-31500/trainer_state.json +2238 -0
- checkpoint-31500/training_args.bin +3 -0
    	
        checkpoint-31500/config.json
    ADDED
    
    | @@ -0,0 +1,30 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "_name_or_path": "EleutherAI/pythia-160m",
         | 
| 3 | 
            +
              "architectures": [
         | 
| 4 | 
            +
                "GPTNeoXForCausalLM"
         | 
| 5 | 
            +
              ],
         | 
| 6 | 
            +
              "attention_bias": true,
         | 
| 7 | 
            +
              "attention_dropout": 0.0,
         | 
| 8 | 
            +
              "bos_token_id": 0,
         | 
| 9 | 
            +
              "classifier_dropout": 0.1,
         | 
| 10 | 
            +
              "eos_token_id": 0,
         | 
| 11 | 
            +
              "hidden_act": "gelu",
         | 
| 12 | 
            +
              "hidden_dropout": 0.0,
         | 
| 13 | 
            +
              "hidden_size": 768,
         | 
| 14 | 
            +
              "initializer_range": 0.02,
         | 
| 15 | 
            +
              "intermediate_size": 3072,
         | 
| 16 | 
            +
              "layer_norm_eps": 1e-05,
         | 
| 17 | 
            +
              "max_position_embeddings": 2048,
         | 
| 18 | 
            +
              "model_type": "gpt_neox",
         | 
| 19 | 
            +
              "num_attention_heads": 12,
         | 
| 20 | 
            +
              "num_hidden_layers": 12,
         | 
| 21 | 
            +
              "rope_scaling": null,
         | 
| 22 | 
            +
              "rotary_emb_base": 10000,
         | 
| 23 | 
            +
              "rotary_pct": 0.25,
         | 
| 24 | 
            +
              "tie_word_embeddings": false,
         | 
| 25 | 
            +
              "torch_dtype": "float32",
         | 
| 26 | 
            +
              "transformers_version": "4.42.3",
         | 
| 27 | 
            +
              "use_cache": true,
         | 
| 28 | 
            +
              "use_parallel_residual": true,
         | 
| 29 | 
            +
              "vocab_size": 8
         | 
| 30 | 
            +
            }
         | 
    	
        checkpoint-31500/generation_config.json
    ADDED
    
    | @@ -0,0 +1,6 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "_from_model_config": true,
         | 
| 3 | 
            +
              "bos_token_id": 0,
         | 
| 4 | 
            +
              "eos_token_id": 0,
         | 
| 5 | 
            +
              "transformers_version": "4.42.3"
         | 
| 6 | 
            +
            }
         | 
    	
        checkpoint-31500/model.safetensors
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:544a7469724b4f5ba9ea403754052dffbbe1364f044999ec19790e735de1a3f2
         | 
| 3 | 
            +
            size 340289960
         | 
    	
        checkpoint-31500/optimizer.pt
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:590a93641b7582e9a54b220f24f033add119b7cc4ab1b505393da58cb443867d
         | 
| 3 | 
            +
            size 680670138
         | 
    	
        checkpoint-31500/rng_state.pth
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:835f869ea325fd6edf27b48b589309fb66641cb92b45f2fc13d1bb6e8814106c
         | 
| 3 | 
            +
            size 14244
         | 
    	
        checkpoint-31500/scheduler.pt
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:d828a1bce6a102ac6eddc89b6b76268c1b85a3b003444bc0cc78ff99d03af8b4
         | 
| 3 | 
            +
            size 1064
         | 
    	
        checkpoint-31500/special_tokens_map.json
    ADDED
    
    | @@ -0,0 +1,24 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "bos_token": {
         | 
| 3 | 
            +
                "content": "<|beginoftext|>",
         | 
| 4 | 
            +
                "lstrip": false,
         | 
| 5 | 
            +
                "normalized": true,
         | 
| 6 | 
            +
                "rstrip": false,
         | 
| 7 | 
            +
                "single_word": false
         | 
| 8 | 
            +
              },
         | 
| 9 | 
            +
              "eos_token": {
         | 
| 10 | 
            +
                "content": "<|endoftext|>",
         | 
| 11 | 
            +
                "lstrip": false,
         | 
| 12 | 
            +
                "normalized": false,
         | 
| 13 | 
            +
                "rstrip": false,
         | 
| 14 | 
            +
                "single_word": false
         | 
| 15 | 
            +
              },
         | 
| 16 | 
            +
              "pad_token": "<|endoftext|>",
         | 
| 17 | 
            +
              "unk_token": {
         | 
| 18 | 
            +
                "content": "<|unknown|>",
         | 
| 19 | 
            +
                "lstrip": false,
         | 
| 20 | 
            +
                "normalized": false,
         | 
| 21 | 
            +
                "rstrip": false,
         | 
| 22 | 
            +
                "single_word": false
         | 
| 23 | 
            +
              }
         | 
| 24 | 
            +
            }
         | 
    	
        checkpoint-31500/tokenizer.json
    ADDED
    
    | @@ -0,0 +1,96 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "version": "1.0",
         | 
| 3 | 
            +
              "truncation": null,
         | 
| 4 | 
            +
              "padding": null,
         | 
| 5 | 
            +
              "added_tokens": [
         | 
| 6 | 
            +
                {
         | 
| 7 | 
            +
                  "id": 0,
         | 
| 8 | 
            +
                  "content": "<|beginoftext|>",
         | 
| 9 | 
            +
                  "single_word": false,
         | 
| 10 | 
            +
                  "lstrip": false,
         | 
| 11 | 
            +
                  "rstrip": false,
         | 
| 12 | 
            +
                  "normalized": true,
         | 
| 13 | 
            +
                  "special": true
         | 
| 14 | 
            +
                },
         | 
| 15 | 
            +
                {
         | 
| 16 | 
            +
                  "id": 1,
         | 
| 17 | 
            +
                  "content": "<|endoftext|>",
         | 
| 18 | 
            +
                  "single_word": false,
         | 
| 19 | 
            +
                  "lstrip": false,
         | 
| 20 | 
            +
                  "rstrip": false,
         | 
| 21 | 
            +
                  "normalized": false,
         | 
| 22 | 
            +
                  "special": true
         | 
| 23 | 
            +
                },
         | 
| 24 | 
            +
                {
         | 
| 25 | 
            +
                  "id": 2,
         | 
| 26 | 
            +
                  "content": "<|unknown|>",
         | 
| 27 | 
            +
                  "single_word": false,
         | 
| 28 | 
            +
                  "lstrip": false,
         | 
| 29 | 
            +
                  "rstrip": false,
         | 
| 30 | 
            +
                  "normalized": false,
         | 
| 31 | 
            +
                  "special": true
         | 
| 32 | 
            +
                }
         | 
| 33 | 
            +
              ],
         | 
| 34 | 
            +
              "normalizer": {
         | 
| 35 | 
            +
                "type": "NFC"
         | 
| 36 | 
            +
              },
         | 
| 37 | 
            +
              "pre_tokenizer": {
         | 
| 38 | 
            +
                "type": "ByteLevel",
         | 
| 39 | 
            +
                "add_prefix_space": false,
         | 
| 40 | 
            +
                "trim_offsets": true,
         | 
| 41 | 
            +
                "use_regex": true
         | 
| 42 | 
            +
              },
         | 
| 43 | 
            +
              "post_processor": {
         | 
| 44 | 
            +
                "type": "TemplateProcessing",
         | 
| 45 | 
            +
                "single": [
         | 
| 46 | 
            +
                  {
         | 
| 47 | 
            +
                    "Sequence": {
         | 
| 48 | 
            +
                      "id": "A",
         | 
| 49 | 
            +
                      "type_id": 0
         | 
| 50 | 
            +
                    }
         | 
| 51 | 
            +
                  }
         | 
| 52 | 
            +
                ],
         | 
| 53 | 
            +
                "pair": [
         | 
| 54 | 
            +
                  {
         | 
| 55 | 
            +
                    "Sequence": {
         | 
| 56 | 
            +
                      "id": "A",
         | 
| 57 | 
            +
                      "type_id": 0
         | 
| 58 | 
            +
                    }
         | 
| 59 | 
            +
                  },
         | 
| 60 | 
            +
                  {
         | 
| 61 | 
            +
                    "Sequence": {
         | 
| 62 | 
            +
                      "id": "B",
         | 
| 63 | 
            +
                      "type_id": 1
         | 
| 64 | 
            +
                    }
         | 
| 65 | 
            +
                  }
         | 
| 66 | 
            +
                ],
         | 
| 67 | 
            +
                "special_tokens": {}
         | 
| 68 | 
            +
              },
         | 
| 69 | 
            +
              "decoder": {
         | 
| 70 | 
            +
                "type": "ByteLevel",
         | 
| 71 | 
            +
                "add_prefix_space": false,
         | 
| 72 | 
            +
                "trim_offsets": true,
         | 
| 73 | 
            +
                "use_regex": true
         | 
| 74 | 
            +
              },
         | 
| 75 | 
            +
              "model": {
         | 
| 76 | 
            +
                "type": "BPE",
         | 
| 77 | 
            +
                "dropout": null,
         | 
| 78 | 
            +
                "unk_token": "<|unknown|>",
         | 
| 79 | 
            +
                "continuing_subword_prefix": null,
         | 
| 80 | 
            +
                "end_of_word_suffix": null,
         | 
| 81 | 
            +
                "fuse_unk": false,
         | 
| 82 | 
            +
                "byte_fallback": false,
         | 
| 83 | 
            +
                "ignore_merges": false,
         | 
| 84 | 
            +
                "vocab": {
         | 
| 85 | 
            +
                  "<|beginoftext|>": 0,
         | 
| 86 | 
            +
                  "<|padding|>": 1,
         | 
| 87 | 
            +
                  "<|unknown|>": 2,
         | 
| 88 | 
            +
                  "a": 3,
         | 
| 89 | 
            +
                  "c": 4,
         | 
| 90 | 
            +
                  "g": 5,
         | 
| 91 | 
            +
                  "t": 6,
         | 
| 92 | 
            +
                  "n": 7
         | 
| 93 | 
            +
                },
         | 
| 94 | 
            +
                "merges": []
         | 
| 95 | 
            +
              }
         | 
| 96 | 
            +
            }
         | 
    	
        checkpoint-31500/tokenizer_config.json
    ADDED
    
    | @@ -0,0 +1,38 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "add_bos_token": false,
         | 
| 3 | 
            +
              "add_eos_token": false,
         | 
| 4 | 
            +
              "add_prefix_space": false,
         | 
| 5 | 
            +
              "added_tokens_decoder": {
         | 
| 6 | 
            +
                "0": {
         | 
| 7 | 
            +
                  "content": "<|beginoftext|>",
         | 
| 8 | 
            +
                  "lstrip": false,
         | 
| 9 | 
            +
                  "normalized": true,
         | 
| 10 | 
            +
                  "rstrip": false,
         | 
| 11 | 
            +
                  "single_word": false,
         | 
| 12 | 
            +
                  "special": true
         | 
| 13 | 
            +
                },
         | 
| 14 | 
            +
                "1": {
         | 
| 15 | 
            +
                  "content": "<|endoftext|>",
         | 
| 16 | 
            +
                  "lstrip": false,
         | 
| 17 | 
            +
                  "normalized": false,
         | 
| 18 | 
            +
                  "rstrip": false,
         | 
| 19 | 
            +
                  "single_word": false,
         | 
| 20 | 
            +
                  "special": true
         | 
| 21 | 
            +
                },
         | 
| 22 | 
            +
                "2": {
         | 
| 23 | 
            +
                  "content": "<|unknown|>",
         | 
| 24 | 
            +
                  "lstrip": false,
         | 
| 25 | 
            +
                  "normalized": false,
         | 
| 26 | 
            +
                  "rstrip": false,
         | 
| 27 | 
            +
                  "single_word": false,
         | 
| 28 | 
            +
                  "special": true
         | 
| 29 | 
            +
                }
         | 
| 30 | 
            +
              },
         | 
| 31 | 
            +
              "bos_token": "<|beginoftext|>",
         | 
| 32 | 
            +
              "clean_up_tokenization_spaces": true,
         | 
| 33 | 
            +
              "eos_token": "<|endoftext|>",
         | 
| 34 | 
            +
              "model_max_length": 1000000000000000019884624838656,
         | 
| 35 | 
            +
              "pad_token": "<|endoftext|>",
         | 
| 36 | 
            +
              "tokenizer_class": "GPTNeoXTokenizer",
         | 
| 37 | 
            +
              "unk_token": "<|unknown|>"
         | 
| 38 | 
            +
            }
         | 
    	
        checkpoint-31500/trainer_state.json
    ADDED
    
    | @@ -0,0 +1,2238 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            {
         | 
| 2 | 
            +
              "best_metric": null,
         | 
| 3 | 
            +
              "best_model_checkpoint": null,
         | 
| 4 | 
            +
              "epoch": 0.30781257634240483,
         | 
| 5 | 
            +
              "eval_steps": 500,
         | 
| 6 | 
            +
              "global_step": 31500,
         | 
| 7 | 
            +
              "is_hyper_param_search": false,
         | 
| 8 | 
            +
              "is_local_process_zero": true,
         | 
| 9 | 
            +
              "is_world_process_zero": true,
         | 
| 10 | 
            +
              "log_history": [
         | 
| 11 | 
            +
                {
         | 
| 12 | 
            +
                  "epoch": 0.0009771827820393806,
         | 
| 13 | 
            +
                  "grad_norm": 0.5417118072509766,
         | 
| 14 | 
            +
                  "learning_rate": 4.995602247740044e-05,
         | 
| 15 | 
            +
                  "loss": 1.378,
         | 
| 16 | 
            +
                  "step": 100
         | 
| 17 | 
            +
                },
         | 
| 18 | 
            +
                {
         | 
| 19 | 
            +
                  "epoch": 0.001954365564078761,
         | 
| 20 | 
            +
                  "grad_norm": 0.6493918895721436,
         | 
| 21 | 
            +
                  "learning_rate": 4.990715856340093e-05,
         | 
| 22 | 
            +
                  "loss": 1.3304,
         | 
| 23 | 
            +
                  "step": 200
         | 
| 24 | 
            +
                },
         | 
| 25 | 
            +
                {
         | 
| 26 | 
            +
                  "epoch": 0.0029315483461181415,
         | 
| 27 | 
            +
                  "grad_norm": 0.9062462449073792,
         | 
| 28 | 
            +
                  "learning_rate": 4.9858294649401425e-05,
         | 
| 29 | 
            +
                  "loss": 1.3284,
         | 
| 30 | 
            +
                  "step": 300
         | 
| 31 | 
            +
                },
         | 
| 32 | 
            +
                {
         | 
| 33 | 
            +
                  "epoch": 0.003908731128157522,
         | 
| 34 | 
            +
                  "grad_norm": 0.750052273273468,
         | 
| 35 | 
            +
                  "learning_rate": 4.9809430735401906e-05,
         | 
| 36 | 
            +
                  "loss": 1.3166,
         | 
| 37 | 
            +
                  "step": 400
         | 
| 38 | 
            +
                },
         | 
| 39 | 
            +
                {
         | 
| 40 | 
            +
                  "epoch": 0.004885913910196903,
         | 
| 41 | 
            +
                  "grad_norm": 0.6602022051811218,
         | 
| 42 | 
            +
                  "learning_rate": 4.97605668214024e-05,
         | 
| 43 | 
            +
                  "loss": 1.3166,
         | 
| 44 | 
            +
                  "step": 500
         | 
| 45 | 
            +
                },
         | 
| 46 | 
            +
                {
         | 
| 47 | 
            +
                  "epoch": 0.005863096692236283,
         | 
| 48 | 
            +
                  "grad_norm": 0.4193927049636841,
         | 
| 49 | 
            +
                  "learning_rate": 4.971170290740288e-05,
         | 
| 50 | 
            +
                  "loss": 1.3098,
         | 
| 51 | 
            +
                  "step": 600
         | 
| 52 | 
            +
                },
         | 
| 53 | 
            +
                {
         | 
| 54 | 
            +
                  "epoch": 0.006840279474275663,
         | 
| 55 | 
            +
                  "grad_norm": 0.6095415949821472,
         | 
| 56 | 
            +
                  "learning_rate": 4.966283899340338e-05,
         | 
| 57 | 
            +
                  "loss": 1.3103,
         | 
| 58 | 
            +
                  "step": 700
         | 
| 59 | 
            +
                },
         | 
| 60 | 
            +
                {
         | 
| 61 | 
            +
                  "epoch": 0.007817462256315045,
         | 
| 62 | 
            +
                  "grad_norm": 0.9943467378616333,
         | 
| 63 | 
            +
                  "learning_rate": 4.9613975079403865e-05,
         | 
| 64 | 
            +
                  "loss": 1.3096,
         | 
| 65 | 
            +
                  "step": 800
         | 
| 66 | 
            +
                },
         | 
| 67 | 
            +
                {
         | 
| 68 | 
            +
                  "epoch": 0.008794645038354424,
         | 
| 69 | 
            +
                  "grad_norm": 1.2263585329055786,
         | 
| 70 | 
            +
                  "learning_rate": 4.9565111165404346e-05,
         | 
| 71 | 
            +
                  "loss": 1.3067,
         | 
| 72 | 
            +
                  "step": 900
         | 
| 73 | 
            +
                },
         | 
| 74 | 
            +
                {
         | 
| 75 | 
            +
                  "epoch": 0.009771827820393805,
         | 
| 76 | 
            +
                  "grad_norm": 0.7198677659034729,
         | 
| 77 | 
            +
                  "learning_rate": 4.951624725140484e-05,
         | 
| 78 | 
            +
                  "loss": 1.3041,
         | 
| 79 | 
            +
                  "step": 1000
         | 
| 80 | 
            +
                },
         | 
| 81 | 
            +
                {
         | 
| 82 | 
            +
                  "epoch": 0.010749010602433185,
         | 
| 83 | 
            +
                  "grad_norm": 0.7370775938034058,
         | 
| 84 | 
            +
                  "learning_rate": 4.946738333740533e-05,
         | 
| 85 | 
            +
                  "loss": 1.302,
         | 
| 86 | 
            +
                  "step": 1100
         | 
| 87 | 
            +
                },
         | 
| 88 | 
            +
                {
         | 
| 89 | 
            +
                  "epoch": 0.011726193384472566,
         | 
| 90 | 
            +
                  "grad_norm": 0.5109437704086304,
         | 
| 91 | 
            +
                  "learning_rate": 4.941851942340582e-05,
         | 
| 92 | 
            +
                  "loss": 1.3089,
         | 
| 93 | 
            +
                  "step": 1200
         | 
| 94 | 
            +
                },
         | 
| 95 | 
            +
                {
         | 
| 96 | 
            +
                  "epoch": 0.012703376166511945,
         | 
| 97 | 
            +
                  "grad_norm": 0.1879555583000183,
         | 
| 98 | 
            +
                  "learning_rate": 4.9369655509406305e-05,
         | 
| 99 | 
            +
                  "loss": 1.3043,
         | 
| 100 | 
            +
                  "step": 1300
         | 
| 101 | 
            +
                },
         | 
| 102 | 
            +
                {
         | 
| 103 | 
            +
                  "epoch": 0.013680558948551327,
         | 
| 104 | 
            +
                  "grad_norm": 0.951046884059906,
         | 
| 105 | 
            +
                  "learning_rate": 4.932079159540679e-05,
         | 
| 106 | 
            +
                  "loss": 1.3098,
         | 
| 107 | 
            +
                  "step": 1400
         | 
| 108 | 
            +
                },
         | 
| 109 | 
            +
                {
         | 
| 110 | 
            +
                  "epoch": 0.014657741730590706,
         | 
| 111 | 
            +
                  "grad_norm": 0.2478829026222229,
         | 
| 112 | 
            +
                  "learning_rate": 4.927192768140728e-05,
         | 
| 113 | 
            +
                  "loss": 1.3026,
         | 
| 114 | 
            +
                  "step": 1500
         | 
| 115 | 
            +
                },
         | 
| 116 | 
            +
                {
         | 
| 117 | 
            +
                  "epoch": 0.01563492451263009,
         | 
| 118 | 
            +
                  "grad_norm": 0.5585843324661255,
         | 
| 119 | 
            +
                  "learning_rate": 4.9223063767407776e-05,
         | 
| 120 | 
            +
                  "loss": 1.3014,
         | 
| 121 | 
            +
                  "step": 1600
         | 
| 122 | 
            +
                },
         | 
| 123 | 
            +
                {
         | 
| 124 | 
            +
                  "epoch": 0.016612107294669467,
         | 
| 125 | 
            +
                  "grad_norm": 0.48532453179359436,
         | 
| 126 | 
            +
                  "learning_rate": 4.917419985340826e-05,
         | 
| 127 | 
            +
                  "loss": 1.2981,
         | 
| 128 | 
            +
                  "step": 1700
         | 
| 129 | 
            +
                },
         | 
| 130 | 
            +
                {
         | 
| 131 | 
            +
                  "epoch": 0.017589290076708848,
         | 
| 132 | 
            +
                  "grad_norm": 0.4233573079109192,
         | 
| 133 | 
            +
                  "learning_rate": 4.912533593940875e-05,
         | 
| 134 | 
            +
                  "loss": 1.2992,
         | 
| 135 | 
            +
                  "step": 1800
         | 
| 136 | 
            +
                },
         | 
| 137 | 
            +
                {
         | 
| 138 | 
            +
                  "epoch": 0.01856647285874823,
         | 
| 139 | 
            +
                  "grad_norm": 0.3272475600242615,
         | 
| 140 | 
            +
                  "learning_rate": 4.9076472025409234e-05,
         | 
| 141 | 
            +
                  "loss": 1.292,
         | 
| 142 | 
            +
                  "step": 1900
         | 
| 143 | 
            +
                },
         | 
| 144 | 
            +
                {
         | 
| 145 | 
            +
                  "epoch": 0.01954365564078761,
         | 
| 146 | 
            +
                  "grad_norm": 0.5299385786056519,
         | 
| 147 | 
            +
                  "learning_rate": 4.902760811140973e-05,
         | 
| 148 | 
            +
                  "loss": 1.2963,
         | 
| 149 | 
            +
                  "step": 2000
         | 
| 150 | 
            +
                },
         | 
| 151 | 
            +
                {
         | 
| 152 | 
            +
                  "epoch": 0.02052083842282699,
         | 
| 153 | 
            +
                  "grad_norm": 0.1614024043083191,
         | 
| 154 | 
            +
                  "learning_rate": 4.8978744197410216e-05,
         | 
| 155 | 
            +
                  "loss": 1.2945,
         | 
| 156 | 
            +
                  "step": 2100
         | 
| 157 | 
            +
                },
         | 
| 158 | 
            +
                {
         | 
| 159 | 
            +
                  "epoch": 0.02149802120486637,
         | 
| 160 | 
            +
                  "grad_norm": 0.6039963960647583,
         | 
| 161 | 
            +
                  "learning_rate": 4.8929880283410705e-05,
         | 
| 162 | 
            +
                  "loss": 1.2913,
         | 
| 163 | 
            +
                  "step": 2200
         | 
| 164 | 
            +
                },
         | 
| 165 | 
            +
                {
         | 
| 166 | 
            +
                  "epoch": 0.02247520398690575,
         | 
| 167 | 
            +
                  "grad_norm": 0.5772804021835327,
         | 
| 168 | 
            +
                  "learning_rate": 4.888101636941119e-05,
         | 
| 169 | 
            +
                  "loss": 1.2895,
         | 
| 170 | 
            +
                  "step": 2300
         | 
| 171 | 
            +
                },
         | 
| 172 | 
            +
                {
         | 
| 173 | 
            +
                  "epoch": 0.023452386768945132,
         | 
| 174 | 
            +
                  "grad_norm": 0.7489622235298157,
         | 
| 175 | 
            +
                  "learning_rate": 4.883215245541168e-05,
         | 
| 176 | 
            +
                  "loss": 1.2847,
         | 
| 177 | 
            +
                  "step": 2400
         | 
| 178 | 
            +
                },
         | 
| 179 | 
            +
                {
         | 
| 180 | 
            +
                  "epoch": 0.024429569550984513,
         | 
| 181 | 
            +
                  "grad_norm": 0.30208253860473633,
         | 
| 182 | 
            +
                  "learning_rate": 4.878328854141217e-05,
         | 
| 183 | 
            +
                  "loss": 1.2924,
         | 
| 184 | 
            +
                  "step": 2500
         | 
| 185 | 
            +
                },
         | 
| 186 | 
            +
                {
         | 
| 187 | 
            +
                  "epoch": 0.02540675233302389,
         | 
| 188 | 
            +
                  "grad_norm": 0.36944472789764404,
         | 
| 189 | 
            +
                  "learning_rate": 4.873442462741266e-05,
         | 
| 190 | 
            +
                  "loss": 1.2916,
         | 
| 191 | 
            +
                  "step": 2600
         | 
| 192 | 
            +
                },
         | 
| 193 | 
            +
                {
         | 
| 194 | 
            +
                  "epoch": 0.026383935115063272,
         | 
| 195 | 
            +
                  "grad_norm": 0.3268676698207855,
         | 
| 196 | 
            +
                  "learning_rate": 4.8685560713413145e-05,
         | 
| 197 | 
            +
                  "loss": 1.2893,
         | 
| 198 | 
            +
                  "step": 2700
         | 
| 199 | 
            +
                },
         | 
| 200 | 
            +
                {
         | 
| 201 | 
            +
                  "epoch": 0.027361117897102653,
         | 
| 202 | 
            +
                  "grad_norm": 0.2795974910259247,
         | 
| 203 | 
            +
                  "learning_rate": 4.863669679941363e-05,
         | 
| 204 | 
            +
                  "loss": 1.282,
         | 
| 205 | 
            +
                  "step": 2800
         | 
| 206 | 
            +
                },
         | 
| 207 | 
            +
                {
         | 
| 208 | 
            +
                  "epoch": 0.028338300679142035,
         | 
| 209 | 
            +
                  "grad_norm": 0.36298853158950806,
         | 
| 210 | 
            +
                  "learning_rate": 4.858783288541413e-05,
         | 
| 211 | 
            +
                  "loss": 1.2832,
         | 
| 212 | 
            +
                  "step": 2900
         | 
| 213 | 
            +
                },
         | 
| 214 | 
            +
                {
         | 
| 215 | 
            +
                  "epoch": 0.029315483461181412,
         | 
| 216 | 
            +
                  "grad_norm": 0.5242423415184021,
         | 
| 217 | 
            +
                  "learning_rate": 4.853896897141461e-05,
         | 
| 218 | 
            +
                  "loss": 1.2819,
         | 
| 219 | 
            +
                  "step": 3000
         | 
| 220 | 
            +
                },
         | 
| 221 | 
            +
                {
         | 
| 222 | 
            +
                  "epoch": 0.030292666243220794,
         | 
| 223 | 
            +
                  "grad_norm": 0.25340864062309265,
         | 
| 224 | 
            +
                  "learning_rate": 4.8490105057415104e-05,
         | 
| 225 | 
            +
                  "loss": 1.2809,
         | 
| 226 | 
            +
                  "step": 3100
         | 
| 227 | 
            +
                },
         | 
| 228 | 
            +
                {
         | 
| 229 | 
            +
                  "epoch": 0.03126984902526018,
         | 
| 230 | 
            +
                  "grad_norm": 0.7241976261138916,
         | 
| 231 | 
            +
                  "learning_rate": 4.844124114341559e-05,
         | 
| 232 | 
            +
                  "loss": 1.2802,
         | 
| 233 | 
            +
                  "step": 3200
         | 
| 234 | 
            +
                },
         | 
| 235 | 
            +
                {
         | 
| 236 | 
            +
                  "epoch": 0.032247031807299556,
         | 
| 237 | 
            +
                  "grad_norm": 0.5154001712799072,
         | 
| 238 | 
            +
                  "learning_rate": 4.839237722941608e-05,
         | 
| 239 | 
            +
                  "loss": 1.2748,
         | 
| 240 | 
            +
                  "step": 3300
         | 
| 241 | 
            +
                },
         | 
| 242 | 
            +
                {
         | 
| 243 | 
            +
                  "epoch": 0.033224214589338934,
         | 
| 244 | 
            +
                  "grad_norm": 0.5323473811149597,
         | 
| 245 | 
            +
                  "learning_rate": 4.834351331541657e-05,
         | 
| 246 | 
            +
                  "loss": 1.284,
         | 
| 247 | 
            +
                  "step": 3400
         | 
| 248 | 
            +
                },
         | 
| 249 | 
            +
                {
         | 
| 250 | 
            +
                  "epoch": 0.03420139737137832,
         | 
| 251 | 
            +
                  "grad_norm": 0.3947168290615082,
         | 
| 252 | 
            +
                  "learning_rate": 4.8294649401417056e-05,
         | 
| 253 | 
            +
                  "loss": 1.276,
         | 
| 254 | 
            +
                  "step": 3500
         | 
| 255 | 
            +
                },
         | 
| 256 | 
            +
                {
         | 
| 257 | 
            +
                  "epoch": 0.035178580153417696,
         | 
| 258 | 
            +
                  "grad_norm": 0.4776057302951813,
         | 
| 259 | 
            +
                  "learning_rate": 4.8245785487417544e-05,
         | 
| 260 | 
            +
                  "loss": 1.2783,
         | 
| 261 | 
            +
                  "step": 3600
         | 
| 262 | 
            +
                },
         | 
| 263 | 
            +
                {
         | 
| 264 | 
            +
                  "epoch": 0.036155762935457074,
         | 
| 265 | 
            +
                  "grad_norm": 0.4884164035320282,
         | 
| 266 | 
            +
                  "learning_rate": 4.819692157341804e-05,
         | 
| 267 | 
            +
                  "loss": 1.2745,
         | 
| 268 | 
            +
                  "step": 3700
         | 
| 269 | 
            +
                },
         | 
| 270 | 
            +
                {
         | 
| 271 | 
            +
                  "epoch": 0.03713294571749646,
         | 
| 272 | 
            +
                  "grad_norm": 0.5210428833961487,
         | 
| 273 | 
            +
                  "learning_rate": 4.814805765941852e-05,
         | 
| 274 | 
            +
                  "loss": 1.2707,
         | 
| 275 | 
            +
                  "step": 3800
         | 
| 276 | 
            +
                },
         | 
| 277 | 
            +
                {
         | 
| 278 | 
            +
                  "epoch": 0.038110128499535836,
         | 
| 279 | 
            +
                  "grad_norm": 0.46214359998703003,
         | 
| 280 | 
            +
                  "learning_rate": 4.809919374541901e-05,
         | 
| 281 | 
            +
                  "loss": 1.2727,
         | 
| 282 | 
            +
                  "step": 3900
         | 
| 283 | 
            +
                },
         | 
| 284 | 
            +
                {
         | 
| 285 | 
            +
                  "epoch": 0.03908731128157522,
         | 
| 286 | 
            +
                  "grad_norm": 0.2656782865524292,
         | 
| 287 | 
            +
                  "learning_rate": 4.8050329831419496e-05,
         | 
| 288 | 
            +
                  "loss": 1.2694,
         | 
| 289 | 
            +
                  "step": 4000
         | 
| 290 | 
            +
                },
         | 
| 291 | 
            +
                {
         | 
| 292 | 
            +
                  "epoch": 0.0400644940636146,
         | 
| 293 | 
            +
                  "grad_norm": 0.4923059940338135,
         | 
| 294 | 
            +
                  "learning_rate": 4.8001465917419985e-05,
         | 
| 295 | 
            +
                  "loss": 1.2665,
         | 
| 296 | 
            +
                  "step": 4100
         | 
| 297 | 
            +
                },
         | 
| 298 | 
            +
                {
         | 
| 299 | 
            +
                  "epoch": 0.04104167684565398,
         | 
| 300 | 
            +
                  "grad_norm": 0.92928147315979,
         | 
| 301 | 
            +
                  "learning_rate": 4.795260200342048e-05,
         | 
| 302 | 
            +
                  "loss": 1.2627,
         | 
| 303 | 
            +
                  "step": 4200
         | 
| 304 | 
            +
                },
         | 
| 305 | 
            +
                {
         | 
| 306 | 
            +
                  "epoch": 0.04201885962769336,
         | 
| 307 | 
            +
                  "grad_norm": 1.0651229619979858,
         | 
| 308 | 
            +
                  "learning_rate": 4.790373808942096e-05,
         | 
| 309 | 
            +
                  "loss": 1.2623,
         | 
| 310 | 
            +
                  "step": 4300
         | 
| 311 | 
            +
                },
         | 
| 312 | 
            +
                {
         | 
| 313 | 
            +
                  "epoch": 0.04299604240973274,
         | 
| 314 | 
            +
                  "grad_norm": 0.9612557888031006,
         | 
| 315 | 
            +
                  "learning_rate": 4.7854874175421456e-05,
         | 
| 316 | 
            +
                  "loss": 1.2482,
         | 
| 317 | 
            +
                  "step": 4400
         | 
| 318 | 
            +
                },
         | 
| 319 | 
            +
                {
         | 
| 320 | 
            +
                  "epoch": 0.043973225191772124,
         | 
| 321 | 
            +
                  "grad_norm": 1.0120874643325806,
         | 
| 322 | 
            +
                  "learning_rate": 4.7806010261421944e-05,
         | 
| 323 | 
            +
                  "loss": 1.2589,
         | 
| 324 | 
            +
                  "step": 4500
         | 
| 325 | 
            +
                },
         | 
| 326 | 
            +
                {
         | 
| 327 | 
            +
                  "epoch": 0.0449504079738115,
         | 
| 328 | 
            +
                  "grad_norm": 0.6250020861625671,
         | 
| 329 | 
            +
                  "learning_rate": 4.775714634742243e-05,
         | 
| 330 | 
            +
                  "loss": 1.2499,
         | 
| 331 | 
            +
                  "step": 4600
         | 
| 332 | 
            +
                },
         | 
| 333 | 
            +
                {
         | 
| 334 | 
            +
                  "epoch": 0.04592759075585088,
         | 
| 335 | 
            +
                  "grad_norm": 0.2850038707256317,
         | 
| 336 | 
            +
                  "learning_rate": 4.770828243342292e-05,
         | 
| 337 | 
            +
                  "loss": 1.2446,
         | 
| 338 | 
            +
                  "step": 4700
         | 
| 339 | 
            +
                },
         | 
| 340 | 
            +
                {
         | 
| 341 | 
            +
                  "epoch": 0.046904773537890264,
         | 
| 342 | 
            +
                  "grad_norm": 1.2032625675201416,
         | 
| 343 | 
            +
                  "learning_rate": 4.765941851942341e-05,
         | 
| 344 | 
            +
                  "loss": 1.2238,
         | 
| 345 | 
            +
                  "step": 4800
         | 
| 346 | 
            +
                },
         | 
| 347 | 
            +
                {
         | 
| 348 | 
            +
                  "epoch": 0.04788195631992964,
         | 
| 349 | 
            +
                  "grad_norm": 0.42024949193000793,
         | 
| 350 | 
            +
                  "learning_rate": 4.7610554605423896e-05,
         | 
| 351 | 
            +
                  "loss": 1.2255,
         | 
| 352 | 
            +
                  "step": 4900
         | 
| 353 | 
            +
                },
         | 
| 354 | 
            +
                {
         | 
| 355 | 
            +
                  "epoch": 0.048859139101969026,
         | 
| 356 | 
            +
                  "grad_norm": 0.7451406121253967,
         | 
| 357 | 
            +
                  "learning_rate": 4.756169069142439e-05,
         | 
| 358 | 
            +
                  "loss": 1.2071,
         | 
| 359 | 
            +
                  "step": 5000
         | 
| 360 | 
            +
                },
         | 
| 361 | 
            +
                {
         | 
| 362 | 
            +
                  "epoch": 0.049836321884008404,
         | 
| 363 | 
            +
                  "grad_norm": 0.8735096454620361,
         | 
| 364 | 
            +
                  "learning_rate": 4.751282677742487e-05,
         | 
| 365 | 
            +
                  "loss": 1.2126,
         | 
| 366 | 
            +
                  "step": 5100
         | 
| 367 | 
            +
                },
         | 
| 368 | 
            +
                {
         | 
| 369 | 
            +
                  "epoch": 0.05081350466604778,
         | 
| 370 | 
            +
                  "grad_norm": 0.73675137758255,
         | 
| 371 | 
            +
                  "learning_rate": 4.746396286342537e-05,
         | 
| 372 | 
            +
                  "loss": 1.2036,
         | 
| 373 | 
            +
                  "step": 5200
         | 
| 374 | 
            +
                },
         | 
| 375 | 
            +
                {
         | 
| 376 | 
            +
                  "epoch": 0.051790687448087167,
         | 
| 377 | 
            +
                  "grad_norm": 0.6540606617927551,
         | 
| 378 | 
            +
                  "learning_rate": 4.741509894942585e-05,
         | 
| 379 | 
            +
                  "loss": 1.1825,
         | 
| 380 | 
            +
                  "step": 5300
         | 
| 381 | 
            +
                },
         | 
| 382 | 
            +
                {
         | 
| 383 | 
            +
                  "epoch": 0.052767870230126544,
         | 
| 384 | 
            +
                  "grad_norm": 0.825066864490509,
         | 
| 385 | 
            +
                  "learning_rate": 4.7366235035426336e-05,
         | 
| 386 | 
            +
                  "loss": 1.1655,
         | 
| 387 | 
            +
                  "step": 5400
         | 
| 388 | 
            +
                },
         | 
| 389 | 
            +
                {
         | 
| 390 | 
            +
                  "epoch": 0.05374505301216593,
         | 
| 391 | 
            +
                  "grad_norm": 1.6421219110488892,
         | 
| 392 | 
            +
                  "learning_rate": 4.731737112142683e-05,
         | 
| 393 | 
            +
                  "loss": 1.1716,
         | 
| 394 | 
            +
                  "step": 5500
         | 
| 395 | 
            +
                },
         | 
| 396 | 
            +
                {
         | 
| 397 | 
            +
                  "epoch": 0.05472223579420531,
         | 
| 398 | 
            +
                  "grad_norm": 1.0644057989120483,
         | 
| 399 | 
            +
                  "learning_rate": 4.726850720742731e-05,
         | 
| 400 | 
            +
                  "loss": 1.1384,
         | 
| 401 | 
            +
                  "step": 5600
         | 
| 402 | 
            +
                },
         | 
| 403 | 
            +
                {
         | 
| 404 | 
            +
                  "epoch": 0.055699418576244684,
         | 
| 405 | 
            +
                  "grad_norm": 1.1611616611480713,
         | 
| 406 | 
            +
                  "learning_rate": 4.721964329342781e-05,
         | 
| 407 | 
            +
                  "loss": 1.1499,
         | 
| 408 | 
            +
                  "step": 5700
         | 
| 409 | 
            +
                },
         | 
| 410 | 
            +
                {
         | 
| 411 | 
            +
                  "epoch": 0.05667660135828407,
         | 
| 412 | 
            +
                  "grad_norm": 2.0900723934173584,
         | 
| 413 | 
            +
                  "learning_rate": 4.7170779379428295e-05,
         | 
| 414 | 
            +
                  "loss": 1.1323,
         | 
| 415 | 
            +
                  "step": 5800
         | 
| 416 | 
            +
                },
         | 
| 417 | 
            +
                {
         | 
| 418 | 
            +
                  "epoch": 0.05765378414032345,
         | 
| 419 | 
            +
                  "grad_norm": 1.0580404996871948,
         | 
| 420 | 
            +
                  "learning_rate": 4.712191546542878e-05,
         | 
| 421 | 
            +
                  "loss": 1.112,
         | 
| 422 | 
            +
                  "step": 5900
         | 
| 423 | 
            +
                },
         | 
| 424 | 
            +
                {
         | 
| 425 | 
            +
                  "epoch": 0.058630966922362825,
         | 
| 426 | 
            +
                  "grad_norm": 0.6299407482147217,
         | 
| 427 | 
            +
                  "learning_rate": 4.707305155142927e-05,
         | 
| 428 | 
            +
                  "loss": 1.104,
         | 
| 429 | 
            +
                  "step": 6000
         | 
| 430 | 
            +
                },
         | 
| 431 | 
            +
                {
         | 
| 432 | 
            +
                  "epoch": 0.05960814970440221,
         | 
| 433 | 
            +
                  "grad_norm": 0.6816271543502808,
         | 
| 434 | 
            +
                  "learning_rate": 4.702418763742976e-05,
         | 
| 435 | 
            +
                  "loss": 1.1128,
         | 
| 436 | 
            +
                  "step": 6100
         | 
| 437 | 
            +
                },
         | 
| 438 | 
            +
                {
         | 
| 439 | 
            +
                  "epoch": 0.06058533248644159,
         | 
| 440 | 
            +
                  "grad_norm": 0.654796302318573,
         | 
| 441 | 
            +
                  "learning_rate": 4.697532372343025e-05,
         | 
| 442 | 
            +
                  "loss": 1.0942,
         | 
| 443 | 
            +
                  "step": 6200
         | 
| 444 | 
            +
                },
         | 
| 445 | 
            +
                {
         | 
| 446 | 
            +
                  "epoch": 0.06156251526848097,
         | 
| 447 | 
            +
                  "grad_norm": 1.0433884859085083,
         | 
| 448 | 
            +
                  "learning_rate": 4.692645980943074e-05,
         | 
| 449 | 
            +
                  "loss": 1.0862,
         | 
| 450 | 
            +
                  "step": 6300
         | 
| 451 | 
            +
                },
         | 
| 452 | 
            +
                {
         | 
| 453 | 
            +
                  "epoch": 0.06253969805052036,
         | 
| 454 | 
            +
                  "grad_norm": 0.6256537437438965,
         | 
| 455 | 
            +
                  "learning_rate": 4.6877595895431224e-05,
         | 
| 456 | 
            +
                  "loss": 1.081,
         | 
| 457 | 
            +
                  "step": 6400
         | 
| 458 | 
            +
                },
         | 
| 459 | 
            +
                {
         | 
| 460 | 
            +
                  "epoch": 0.06351688083255973,
         | 
| 461 | 
            +
                  "grad_norm": 0.8173975348472595,
         | 
| 462 | 
            +
                  "learning_rate": 4.682873198143172e-05,
         | 
| 463 | 
            +
                  "loss": 1.0767,
         | 
| 464 | 
            +
                  "step": 6500
         | 
| 465 | 
            +
                },
         | 
| 466 | 
            +
                {
         | 
| 467 | 
            +
                  "epoch": 0.06449406361459911,
         | 
| 468 | 
            +
                  "grad_norm": 0.7856473922729492,
         | 
| 469 | 
            +
                  "learning_rate": 4.6779868067432206e-05,
         | 
| 470 | 
            +
                  "loss": 1.0767,
         | 
| 471 | 
            +
                  "step": 6600
         | 
| 472 | 
            +
                },
         | 
| 473 | 
            +
                {
         | 
| 474 | 
            +
                  "epoch": 0.0654712463966385,
         | 
| 475 | 
            +
                  "grad_norm": 0.6337741017341614,
         | 
| 476 | 
            +
                  "learning_rate": 4.6731004153432695e-05,
         | 
| 477 | 
            +
                  "loss": 1.0829,
         | 
| 478 | 
            +
                  "step": 6700
         | 
| 479 | 
            +
                },
         | 
| 480 | 
            +
                {
         | 
| 481 | 
            +
                  "epoch": 0.06644842917867787,
         | 
| 482 | 
            +
                  "grad_norm": 0.5813809037208557,
         | 
| 483 | 
            +
                  "learning_rate": 4.668214023943318e-05,
         | 
| 484 | 
            +
                  "loss": 1.0571,
         | 
| 485 | 
            +
                  "step": 6800
         | 
| 486 | 
            +
                },
         | 
| 487 | 
            +
                {
         | 
| 488 | 
            +
                  "epoch": 0.06742561196071725,
         | 
| 489 | 
            +
                  "grad_norm": 0.4155445992946625,
         | 
| 490 | 
            +
                  "learning_rate": 4.6633276325433664e-05,
         | 
| 491 | 
            +
                  "loss": 1.0707,
         | 
| 492 | 
            +
                  "step": 6900
         | 
| 493 | 
            +
                },
         | 
| 494 | 
            +
                {
         | 
| 495 | 
            +
                  "epoch": 0.06840279474275664,
         | 
| 496 | 
            +
                  "grad_norm": 0.6730567812919617,
         | 
| 497 | 
            +
                  "learning_rate": 4.658441241143416e-05,
         | 
| 498 | 
            +
                  "loss": 1.0477,
         | 
| 499 | 
            +
                  "step": 7000
         | 
| 500 | 
            +
                },
         | 
| 501 | 
            +
                {
         | 
| 502 | 
            +
                  "epoch": 0.06937997752479601,
         | 
| 503 | 
            +
                  "grad_norm": 0.8348300457000732,
         | 
| 504 | 
            +
                  "learning_rate": 4.653554849743465e-05,
         | 
| 505 | 
            +
                  "loss": 1.0644,
         | 
| 506 | 
            +
                  "step": 7100
         | 
| 507 | 
            +
                },
         | 
| 508 | 
            +
                {
         | 
| 509 | 
            +
                  "epoch": 0.07035716030683539,
         | 
| 510 | 
            +
                  "grad_norm": 2.2414326667785645,
         | 
| 511 | 
            +
                  "learning_rate": 4.6486684583435135e-05,
         | 
| 512 | 
            +
                  "loss": 1.0577,
         | 
| 513 | 
            +
                  "step": 7200
         | 
| 514 | 
            +
                },
         | 
| 515 | 
            +
                {
         | 
| 516 | 
            +
                  "epoch": 0.07133434308887478,
         | 
| 517 | 
            +
                  "grad_norm": 1.6573911905288696,
         | 
| 518 | 
            +
                  "learning_rate": 4.643782066943562e-05,
         | 
| 519 | 
            +
                  "loss": 1.0836,
         | 
| 520 | 
            +
                  "step": 7300
         | 
| 521 | 
            +
                },
         | 
| 522 | 
            +
                {
         | 
| 523 | 
            +
                  "epoch": 0.07231152587091415,
         | 
| 524 | 
            +
                  "grad_norm": 0.5690039396286011,
         | 
| 525 | 
            +
                  "learning_rate": 4.638895675543611e-05,
         | 
| 526 | 
            +
                  "loss": 1.0541,
         | 
| 527 | 
            +
                  "step": 7400
         | 
| 528 | 
            +
                },
         | 
| 529 | 
            +
                {
         | 
| 530 | 
            +
                  "epoch": 0.07328870865295353,
         | 
| 531 | 
            +
                  "grad_norm": 0.527215301990509,
         | 
| 532 | 
            +
                  "learning_rate": 4.63400928414366e-05,
         | 
| 533 | 
            +
                  "loss": 1.0164,
         | 
| 534 | 
            +
                  "step": 7500
         | 
| 535 | 
            +
                },
         | 
| 536 | 
            +
                {
         | 
| 537 | 
            +
                  "epoch": 0.07426589143499292,
         | 
| 538 | 
            +
                  "grad_norm": 0.7997362613677979,
         | 
| 539 | 
            +
                  "learning_rate": 4.6291228927437094e-05,
         | 
| 540 | 
            +
                  "loss": 1.0447,
         | 
| 541 | 
            +
                  "step": 7600
         | 
| 542 | 
            +
                },
         | 
| 543 | 
            +
                {
         | 
| 544 | 
            +
                  "epoch": 0.0752430742170323,
         | 
| 545 | 
            +
                  "grad_norm": 2.257143259048462,
         | 
| 546 | 
            +
                  "learning_rate": 4.6242365013437575e-05,
         | 
| 547 | 
            +
                  "loss": 1.0365,
         | 
| 548 | 
            +
                  "step": 7700
         | 
| 549 | 
            +
                },
         | 
| 550 | 
            +
                {
         | 
| 551 | 
            +
                  "epoch": 0.07622025699907167,
         | 
| 552 | 
            +
                  "grad_norm": 0.9132490158081055,
         | 
| 553 | 
            +
                  "learning_rate": 4.619350109943807e-05,
         | 
| 554 | 
            +
                  "loss": 1.0498,
         | 
| 555 | 
            +
                  "step": 7800
         | 
| 556 | 
            +
                },
         | 
| 557 | 
            +
                {
         | 
| 558 | 
            +
                  "epoch": 0.07719743978111106,
         | 
| 559 | 
            +
                  "grad_norm": 0.5229859948158264,
         | 
| 560 | 
            +
                  "learning_rate": 4.614463718543856e-05,
         | 
| 561 | 
            +
                  "loss": 1.0342,
         | 
| 562 | 
            +
                  "step": 7900
         | 
| 563 | 
            +
                },
         | 
| 564 | 
            +
                {
         | 
| 565 | 
            +
                  "epoch": 0.07817462256315044,
         | 
| 566 | 
            +
                  "grad_norm": 0.6948792338371277,
         | 
| 567 | 
            +
                  "learning_rate": 4.6095773271439046e-05,
         | 
| 568 | 
            +
                  "loss": 1.0325,
         | 
| 569 | 
            +
                  "step": 8000
         | 
| 570 | 
            +
                },
         | 
| 571 | 
            +
                {
         | 
| 572 | 
            +
                  "epoch": 0.07915180534518981,
         | 
| 573 | 
            +
                  "grad_norm": 0.8526360988616943,
         | 
| 574 | 
            +
                  "learning_rate": 4.6046909357439534e-05,
         | 
| 575 | 
            +
                  "loss": 1.0183,
         | 
| 576 | 
            +
                  "step": 8100
         | 
| 577 | 
            +
                },
         | 
| 578 | 
            +
                {
         | 
| 579 | 
            +
                  "epoch": 0.0801289881272292,
         | 
| 580 | 
            +
                  "grad_norm": 1.1457374095916748,
         | 
| 581 | 
            +
                  "learning_rate": 4.599804544344002e-05,
         | 
| 582 | 
            +
                  "loss": 1.0243,
         | 
| 583 | 
            +
                  "step": 8200
         | 
| 584 | 
            +
                },
         | 
| 585 | 
            +
                {
         | 
| 586 | 
            +
                  "epoch": 0.08110617090926858,
         | 
| 587 | 
            +
                  "grad_norm": 0.9335997700691223,
         | 
| 588 | 
            +
                  "learning_rate": 4.594918152944051e-05,
         | 
| 589 | 
            +
                  "loss": 1.046,
         | 
| 590 | 
            +
                  "step": 8300
         | 
| 591 | 
            +
                },
         | 
| 592 | 
            +
                {
         | 
| 593 | 
            +
                  "epoch": 0.08208335369130795,
         | 
| 594 | 
            +
                  "grad_norm": 0.8367229700088501,
         | 
| 595 | 
            +
                  "learning_rate": 4.5900317615441e-05,
         | 
| 596 | 
            +
                  "loss": 1.0176,
         | 
| 597 | 
            +
                  "step": 8400
         | 
| 598 | 
            +
                },
         | 
| 599 | 
            +
                {
         | 
| 600 | 
            +
                  "epoch": 0.08306053647334734,
         | 
| 601 | 
            +
                  "grad_norm": 3.7648801803588867,
         | 
| 602 | 
            +
                  "learning_rate": 4.5851453701441486e-05,
         | 
| 603 | 
            +
                  "loss": 1.0047,
         | 
| 604 | 
            +
                  "step": 8500
         | 
| 605 | 
            +
                },
         | 
| 606 | 
            +
                {
         | 
| 607 | 
            +
                  "epoch": 0.08403771925538672,
         | 
| 608 | 
            +
                  "grad_norm": 0.5877612829208374,
         | 
| 609 | 
            +
                  "learning_rate": 4.5802589787441975e-05,
         | 
| 610 | 
            +
                  "loss": 1.0346,
         | 
| 611 | 
            +
                  "step": 8600
         | 
| 612 | 
            +
                },
         | 
| 613 | 
            +
                {
         | 
| 614 | 
            +
                  "epoch": 0.08501490203742611,
         | 
| 615 | 
            +
                  "grad_norm": 0.5145990252494812,
         | 
| 616 | 
            +
                  "learning_rate": 4.575372587344246e-05,
         | 
| 617 | 
            +
                  "loss": 1.0268,
         | 
| 618 | 
            +
                  "step": 8700
         | 
| 619 | 
            +
                },
         | 
| 620 | 
            +
                {
         | 
| 621 | 
            +
                  "epoch": 0.08599208481946548,
         | 
| 622 | 
            +
                  "grad_norm": 0.9310688376426697,
         | 
| 623 | 
            +
                  "learning_rate": 4.570486195944295e-05,
         | 
| 624 | 
            +
                  "loss": 1.0109,
         | 
| 625 | 
            +
                  "step": 8800
         | 
| 626 | 
            +
                },
         | 
| 627 | 
            +
                {
         | 
| 628 | 
            +
                  "epoch": 0.08696926760150486,
         | 
| 629 | 
            +
                  "grad_norm": 0.5182886719703674,
         | 
| 630 | 
            +
                  "learning_rate": 4.5655998045443445e-05,
         | 
| 631 | 
            +
                  "loss": 1.0117,
         | 
| 632 | 
            +
                  "step": 8900
         | 
| 633 | 
            +
                },
         | 
| 634 | 
            +
                {
         | 
| 635 | 
            +
                  "epoch": 0.08794645038354425,
         | 
| 636 | 
            +
                  "grad_norm": 0.4319695234298706,
         | 
| 637 | 
            +
                  "learning_rate": 4.560713413144393e-05,
         | 
| 638 | 
            +
                  "loss": 1.0053,
         | 
| 639 | 
            +
                  "step": 9000
         | 
| 640 | 
            +
                },
         | 
| 641 | 
            +
                {
         | 
| 642 | 
            +
                  "epoch": 0.08892363316558362,
         | 
| 643 | 
            +
                  "grad_norm": 4.307732582092285,
         | 
| 644 | 
            +
                  "learning_rate": 4.555827021744442e-05,
         | 
| 645 | 
            +
                  "loss": 1.0151,
         | 
| 646 | 
            +
                  "step": 9100
         | 
| 647 | 
            +
                },
         | 
| 648 | 
            +
                {
         | 
| 649 | 
            +
                  "epoch": 0.089900815947623,
         | 
| 650 | 
            +
                  "grad_norm": 0.46516236662864685,
         | 
| 651 | 
            +
                  "learning_rate": 4.550940630344491e-05,
         | 
| 652 | 
            +
                  "loss": 0.9945,
         | 
| 653 | 
            +
                  "step": 9200
         | 
| 654 | 
            +
                },
         | 
| 655 | 
            +
                {
         | 
| 656 | 
            +
                  "epoch": 0.09087799872966239,
         | 
| 657 | 
            +
                  "grad_norm": 1.2372952699661255,
         | 
| 658 | 
            +
                  "learning_rate": 4.54605423894454e-05,
         | 
| 659 | 
            +
                  "loss": 0.9865,
         | 
| 660 | 
            +
                  "step": 9300
         | 
| 661 | 
            +
                },
         | 
| 662 | 
            +
                {
         | 
| 663 | 
            +
                  "epoch": 0.09185518151170176,
         | 
| 664 | 
            +
                  "grad_norm": 0.7494595646858215,
         | 
| 665 | 
            +
                  "learning_rate": 4.5411678475445886e-05,
         | 
| 666 | 
            +
                  "loss": 0.9824,
         | 
| 667 | 
            +
                  "step": 9400
         | 
| 668 | 
            +
                },
         | 
| 669 | 
            +
                {
         | 
| 670 | 
            +
                  "epoch": 0.09283236429374114,
         | 
| 671 | 
            +
                  "grad_norm": 0.5540333390235901,
         | 
| 672 | 
            +
                  "learning_rate": 4.5362814561446374e-05,
         | 
| 673 | 
            +
                  "loss": 1.0132,
         | 
| 674 | 
            +
                  "step": 9500
         | 
| 675 | 
            +
                },
         | 
| 676 | 
            +
                {
         | 
| 677 | 
            +
                  "epoch": 0.09380954707578053,
         | 
| 678 | 
            +
                  "grad_norm": 0.48533427715301514,
         | 
| 679 | 
            +
                  "learning_rate": 4.531395064744686e-05,
         | 
| 680 | 
            +
                  "loss": 1.0173,
         | 
| 681 | 
            +
                  "step": 9600
         | 
| 682 | 
            +
                },
         | 
| 683 | 
            +
                {
         | 
| 684 | 
            +
                  "epoch": 0.0947867298578199,
         | 
| 685 | 
            +
                  "grad_norm": 0.4972572922706604,
         | 
| 686 | 
            +
                  "learning_rate": 4.526508673344736e-05,
         | 
| 687 | 
            +
                  "loss": 1.0078,
         | 
| 688 | 
            +
                  "step": 9700
         | 
| 689 | 
            +
                },
         | 
| 690 | 
            +
                {
         | 
| 691 | 
            +
                  "epoch": 0.09576391263985928,
         | 
| 692 | 
            +
                  "grad_norm": 0.6748878955841064,
         | 
| 693 | 
            +
                  "learning_rate": 4.521622281944784e-05,
         | 
| 694 | 
            +
                  "loss": 1.0172,
         | 
| 695 | 
            +
                  "step": 9800
         | 
| 696 | 
            +
                },
         | 
| 697 | 
            +
                {
         | 
| 698 | 
            +
                  "epoch": 0.09674109542189867,
         | 
| 699 | 
            +
                  "grad_norm": 0.5261876583099365,
         | 
| 700 | 
            +
                  "learning_rate": 4.5167358905448326e-05,
         | 
| 701 | 
            +
                  "loss": 1.0189,
         | 
| 702 | 
            +
                  "step": 9900
         | 
| 703 | 
            +
                },
         | 
| 704 | 
            +
                {
         | 
| 705 | 
            +
                  "epoch": 0.09771827820393805,
         | 
| 706 | 
            +
                  "grad_norm": 0.4164600670337677,
         | 
| 707 | 
            +
                  "learning_rate": 4.5118494991448814e-05,
         | 
| 708 | 
            +
                  "loss": 0.9978,
         | 
| 709 | 
            +
                  "step": 10000
         | 
| 710 | 
            +
                },
         | 
| 711 | 
            +
                {
         | 
| 712 | 
            +
                  "epoch": 0.09869546098597742,
         | 
| 713 | 
            +
                  "grad_norm": 0.40417763590812683,
         | 
| 714 | 
            +
                  "learning_rate": 4.50696310774493e-05,
         | 
| 715 | 
            +
                  "loss": 1.0103,
         | 
| 716 | 
            +
                  "step": 10100
         | 
| 717 | 
            +
                },
         | 
| 718 | 
            +
                {
         | 
| 719 | 
            +
                  "epoch": 0.09967264376801681,
         | 
| 720 | 
            +
                  "grad_norm": 0.8591890931129456,
         | 
| 721 | 
            +
                  "learning_rate": 4.50207671634498e-05,
         | 
| 722 | 
            +
                  "loss": 1.0065,
         | 
| 723 | 
            +
                  "step": 10200
         | 
| 724 | 
            +
                },
         | 
| 725 | 
            +
                {
         | 
| 726 | 
            +
                  "epoch": 0.10064982655005619,
         | 
| 727 | 
            +
                  "grad_norm": 0.5676371455192566,
         | 
| 728 | 
            +
                  "learning_rate": 4.497190324945028e-05,
         | 
| 729 | 
            +
                  "loss": 1.0089,
         | 
| 730 | 
            +
                  "step": 10300
         | 
| 731 | 
            +
                },
         | 
| 732 | 
            +
                {
         | 
| 733 | 
            +
                  "epoch": 0.10162700933209556,
         | 
| 734 | 
            +
                  "grad_norm": 0.616646945476532,
         | 
| 735 | 
            +
                  "learning_rate": 4.492303933545077e-05,
         | 
| 736 | 
            +
                  "loss": 0.9897,
         | 
| 737 | 
            +
                  "step": 10400
         | 
| 738 | 
            +
                },
         | 
| 739 | 
            +
                {
         | 
| 740 | 
            +
                  "epoch": 0.10260419211413495,
         | 
| 741 | 
            +
                  "grad_norm": 0.37536484003067017,
         | 
| 742 | 
            +
                  "learning_rate": 4.487417542145126e-05,
         | 
| 743 | 
            +
                  "loss": 0.9989,
         | 
| 744 | 
            +
                  "step": 10500
         | 
| 745 | 
            +
                },
         | 
| 746 | 
            +
                {
         | 
| 747 | 
            +
                  "epoch": 0.10358137489617433,
         | 
| 748 | 
            +
                  "grad_norm": 0.6801789402961731,
         | 
| 749 | 
            +
                  "learning_rate": 4.482531150745175e-05,
         | 
| 750 | 
            +
                  "loss": 0.9923,
         | 
| 751 | 
            +
                  "step": 10600
         | 
| 752 | 
            +
                },
         | 
| 753 | 
            +
                {
         | 
| 754 | 
            +
                  "epoch": 0.1045585576782137,
         | 
| 755 | 
            +
                  "grad_norm": 0.5848776698112488,
         | 
| 756 | 
            +
                  "learning_rate": 4.477644759345224e-05,
         | 
| 757 | 
            +
                  "loss": 0.9919,
         | 
| 758 | 
            +
                  "step": 10700
         | 
| 759 | 
            +
                },
         | 
| 760 | 
            +
                {
         | 
| 761 | 
            +
                  "epoch": 0.10553574046025309,
         | 
| 762 | 
            +
                  "grad_norm": 0.7715157866477966,
         | 
| 763 | 
            +
                  "learning_rate": 4.4727583679452725e-05,
         | 
| 764 | 
            +
                  "loss": 0.9814,
         | 
| 765 | 
            +
                  "step": 10800
         | 
| 766 | 
            +
                },
         | 
| 767 | 
            +
                {
         | 
| 768 | 
            +
                  "epoch": 0.10651292324229247,
         | 
| 769 | 
            +
                  "grad_norm": 0.8080986142158508,
         | 
| 770 | 
            +
                  "learning_rate": 4.4678719765453214e-05,
         | 
| 771 | 
            +
                  "loss": 0.9935,
         | 
| 772 | 
            +
                  "step": 10900
         | 
| 773 | 
            +
                },
         | 
| 774 | 
            +
                {
         | 
| 775 | 
            +
                  "epoch": 0.10749010602433186,
         | 
| 776 | 
            +
                  "grad_norm": 0.4375016391277313,
         | 
| 777 | 
            +
                  "learning_rate": 4.462985585145371e-05,
         | 
| 778 | 
            +
                  "loss": 0.988,
         | 
| 779 | 
            +
                  "step": 11000
         | 
| 780 | 
            +
                },
         | 
| 781 | 
            +
                {
         | 
| 782 | 
            +
                  "epoch": 0.10846728880637123,
         | 
| 783 | 
            +
                  "grad_norm": 0.8055805563926697,
         | 
| 784 | 
            +
                  "learning_rate": 4.458099193745419e-05,
         | 
| 785 | 
            +
                  "loss": 0.9861,
         | 
| 786 | 
            +
                  "step": 11100
         | 
| 787 | 
            +
                },
         | 
| 788 | 
            +
                {
         | 
| 789 | 
            +
                  "epoch": 0.10944447158841061,
         | 
| 790 | 
            +
                  "grad_norm": 1.1914618015289307,
         | 
| 791 | 
            +
                  "learning_rate": 4.4532128023454685e-05,
         | 
| 792 | 
            +
                  "loss": 0.9622,
         | 
| 793 | 
            +
                  "step": 11200
         | 
| 794 | 
            +
                },
         | 
| 795 | 
            +
                {
         | 
| 796 | 
            +
                  "epoch": 0.11042165437045,
         | 
| 797 | 
            +
                  "grad_norm": 0.4247540533542633,
         | 
| 798 | 
            +
                  "learning_rate": 4.448326410945517e-05,
         | 
| 799 | 
            +
                  "loss": 0.9602,
         | 
| 800 | 
            +
                  "step": 11300
         | 
| 801 | 
            +
                },
         | 
| 802 | 
            +
                {
         | 
| 803 | 
            +
                  "epoch": 0.11139883715248937,
         | 
| 804 | 
            +
                  "grad_norm": 0.5454650521278381,
         | 
| 805 | 
            +
                  "learning_rate": 4.4434400195455654e-05,
         | 
| 806 | 
            +
                  "loss": 0.9696,
         | 
| 807 | 
            +
                  "step": 11400
         | 
| 808 | 
            +
                },
         | 
| 809 | 
            +
                {
         | 
| 810 | 
            +
                  "epoch": 0.11237601993452875,
         | 
| 811 | 
            +
                  "grad_norm": 0.5259748697280884,
         | 
| 812 | 
            +
                  "learning_rate": 4.438553628145615e-05,
         | 
| 813 | 
            +
                  "loss": 1.0021,
         | 
| 814 | 
            +
                  "step": 11500
         | 
| 815 | 
            +
                },
         | 
| 816 | 
            +
                {
         | 
| 817 | 
            +
                  "epoch": 0.11335320271656814,
         | 
| 818 | 
            +
                  "grad_norm": 0.5165246725082397,
         | 
| 819 | 
            +
                  "learning_rate": 4.433667236745663e-05,
         | 
| 820 | 
            +
                  "loss": 0.982,
         | 
| 821 | 
            +
                  "step": 11600
         | 
| 822 | 
            +
                },
         | 
| 823 | 
            +
                {
         | 
| 824 | 
            +
                  "epoch": 0.11433038549860751,
         | 
| 825 | 
            +
                  "grad_norm": 0.6768147945404053,
         | 
| 826 | 
            +
                  "learning_rate": 4.4287808453457125e-05,
         | 
| 827 | 
            +
                  "loss": 0.9398,
         | 
| 828 | 
            +
                  "step": 11700
         | 
| 829 | 
            +
                },
         | 
| 830 | 
            +
                {
         | 
| 831 | 
            +
                  "epoch": 0.1153075682806469,
         | 
| 832 | 
            +
                  "grad_norm": 1.0245041847229004,
         | 
| 833 | 
            +
                  "learning_rate": 4.423894453945761e-05,
         | 
| 834 | 
            +
                  "loss": 0.9934,
         | 
| 835 | 
            +
                  "step": 11800
         | 
| 836 | 
            +
                },
         | 
| 837 | 
            +
                {
         | 
| 838 | 
            +
                  "epoch": 0.11628475106268628,
         | 
| 839 | 
            +
                  "grad_norm": 0.6241583228111267,
         | 
| 840 | 
            +
                  "learning_rate": 4.41900806254581e-05,
         | 
| 841 | 
            +
                  "loss": 0.9697,
         | 
| 842 | 
            +
                  "step": 11900
         | 
| 843 | 
            +
                },
         | 
| 844 | 
            +
                {
         | 
| 845 | 
            +
                  "epoch": 0.11726193384472565,
         | 
| 846 | 
            +
                  "grad_norm": 0.4234873652458191,
         | 
| 847 | 
            +
                  "learning_rate": 4.414121671145859e-05,
         | 
| 848 | 
            +
                  "loss": 0.9723,
         | 
| 849 | 
            +
                  "step": 12000
         | 
| 850 | 
            +
                },
         | 
| 851 | 
            +
                {
         | 
| 852 | 
            +
                  "epoch": 0.11823911662676503,
         | 
| 853 | 
            +
                  "grad_norm": 0.3932545781135559,
         | 
| 854 | 
            +
                  "learning_rate": 4.409235279745908e-05,
         | 
| 855 | 
            +
                  "loss": 0.9826,
         | 
| 856 | 
            +
                  "step": 12100
         | 
| 857 | 
            +
                },
         | 
| 858 | 
            +
                {
         | 
| 859 | 
            +
                  "epoch": 0.11921629940880442,
         | 
| 860 | 
            +
                  "grad_norm": 1.5067880153656006,
         | 
| 861 | 
            +
                  "learning_rate": 4.4043488883459565e-05,
         | 
| 862 | 
            +
                  "loss": 0.9581,
         | 
| 863 | 
            +
                  "step": 12200
         | 
| 864 | 
            +
                },
         | 
| 865 | 
            +
                {
         | 
| 866 | 
            +
                  "epoch": 0.1201934821908438,
         | 
| 867 | 
            +
                  "grad_norm": 0.41707366704940796,
         | 
| 868 | 
            +
                  "learning_rate": 4.399462496946006e-05,
         | 
| 869 | 
            +
                  "loss": 0.9666,
         | 
| 870 | 
            +
                  "step": 12300
         | 
| 871 | 
            +
                },
         | 
| 872 | 
            +
                {
         | 
| 873 | 
            +
                  "epoch": 0.12117066497288317,
         | 
| 874 | 
            +
                  "grad_norm": 1.1278653144836426,
         | 
| 875 | 
            +
                  "learning_rate": 4.394576105546054e-05,
         | 
| 876 | 
            +
                  "loss": 0.9553,
         | 
| 877 | 
            +
                  "step": 12400
         | 
| 878 | 
            +
                },
         | 
| 879 | 
            +
                {
         | 
| 880 | 
            +
                  "epoch": 0.12214784775492256,
         | 
| 881 | 
            +
                  "grad_norm": 0.350543737411499,
         | 
| 882 | 
            +
                  "learning_rate": 4.3896897141461036e-05,
         | 
| 883 | 
            +
                  "loss": 0.9422,
         | 
| 884 | 
            +
                  "step": 12500
         | 
| 885 | 
            +
                },
         | 
| 886 | 
            +
                {
         | 
| 887 | 
            +
                  "epoch": 0.12312503053696194,
         | 
| 888 | 
            +
                  "grad_norm": 0.3775838315486908,
         | 
| 889 | 
            +
                  "learning_rate": 4.3848033227461524e-05,
         | 
| 890 | 
            +
                  "loss": 0.9626,
         | 
| 891 | 
            +
                  "step": 12600
         | 
| 892 | 
            +
                },
         | 
| 893 | 
            +
                {
         | 
| 894 | 
            +
                  "epoch": 0.12410221331900131,
         | 
| 895 | 
            +
                  "grad_norm": 0.8341017365455627,
         | 
| 896 | 
            +
                  "learning_rate": 4.379916931346201e-05,
         | 
| 897 | 
            +
                  "loss": 0.9289,
         | 
| 898 | 
            +
                  "step": 12700
         | 
| 899 | 
            +
                },
         | 
| 900 | 
            +
                {
         | 
| 901 | 
            +
                  "epoch": 0.1250793961010407,
         | 
| 902 | 
            +
                  "grad_norm": 0.805614173412323,
         | 
| 903 | 
            +
                  "learning_rate": 4.37503053994625e-05,
         | 
| 904 | 
            +
                  "loss": 0.9474,
         | 
| 905 | 
            +
                  "step": 12800
         | 
| 906 | 
            +
                },
         | 
| 907 | 
            +
                {
         | 
| 908 | 
            +
                  "epoch": 0.12605657888308008,
         | 
| 909 | 
            +
                  "grad_norm": 0.8439397215843201,
         | 
| 910 | 
            +
                  "learning_rate": 4.370144148546299e-05,
         | 
| 911 | 
            +
                  "loss": 0.9661,
         | 
| 912 | 
            +
                  "step": 12900
         | 
| 913 | 
            +
                },
         | 
| 914 | 
            +
                {
         | 
| 915 | 
            +
                  "epoch": 0.12703376166511945,
         | 
| 916 | 
            +
                  "grad_norm": 1.1272892951965332,
         | 
| 917 | 
            +
                  "learning_rate": 4.3652577571463476e-05,
         | 
| 918 | 
            +
                  "loss": 0.9514,
         | 
| 919 | 
            +
                  "step": 13000
         | 
| 920 | 
            +
                },
         | 
| 921 | 
            +
                {
         | 
| 922 | 
            +
                  "epoch": 0.12801094444715885,
         | 
| 923 | 
            +
                  "grad_norm": 0.6426375508308411,
         | 
| 924 | 
            +
                  "learning_rate": 4.3603713657463965e-05,
         | 
| 925 | 
            +
                  "loss": 0.9448,
         | 
| 926 | 
            +
                  "step": 13100
         | 
| 927 | 
            +
                },
         | 
| 928 | 
            +
                {
         | 
| 929 | 
            +
                  "epoch": 0.12898812722919822,
         | 
| 930 | 
            +
                  "grad_norm": 1.3205431699752808,
         | 
| 931 | 
            +
                  "learning_rate": 4.355484974346445e-05,
         | 
| 932 | 
            +
                  "loss": 0.9511,
         | 
| 933 | 
            +
                  "step": 13200
         | 
| 934 | 
            +
                },
         | 
| 935 | 
            +
                {
         | 
| 936 | 
            +
                  "epoch": 0.1299653100112376,
         | 
| 937 | 
            +
                  "grad_norm": 0.3671954870223999,
         | 
| 938 | 
            +
                  "learning_rate": 4.350598582946494e-05,
         | 
| 939 | 
            +
                  "loss": 0.9506,
         | 
| 940 | 
            +
                  "step": 13300
         | 
| 941 | 
            +
                },
         | 
| 942 | 
            +
                {
         | 
| 943 | 
            +
                  "epoch": 0.130942492793277,
         | 
| 944 | 
            +
                  "grad_norm": 0.7566332817077637,
         | 
| 945 | 
            +
                  "learning_rate": 4.345712191546543e-05,
         | 
| 946 | 
            +
                  "loss": 0.9363,
         | 
| 947 | 
            +
                  "step": 13400
         | 
| 948 | 
            +
                },
         | 
| 949 | 
            +
                {
         | 
| 950 | 
            +
                  "epoch": 0.13191967557531636,
         | 
| 951 | 
            +
                  "grad_norm": 0.8800159692764282,
         | 
| 952 | 
            +
                  "learning_rate": 4.340825800146592e-05,
         | 
| 953 | 
            +
                  "loss": 0.9388,
         | 
| 954 | 
            +
                  "step": 13500
         | 
| 955 | 
            +
                },
         | 
| 956 | 
            +
                {
         | 
| 957 | 
            +
                  "epoch": 0.13289685835735573,
         | 
| 958 | 
            +
                  "grad_norm": 0.7134628891944885,
         | 
| 959 | 
            +
                  "learning_rate": 4.335939408746641e-05,
         | 
| 960 | 
            +
                  "loss": 0.9162,
         | 
| 961 | 
            +
                  "step": 13600
         | 
| 962 | 
            +
                },
         | 
| 963 | 
            +
                {
         | 
| 964 | 
            +
                  "epoch": 0.13387404113939513,
         | 
| 965 | 
            +
                  "grad_norm": 0.5555543899536133,
         | 
| 966 | 
            +
                  "learning_rate": 4.331053017346689e-05,
         | 
| 967 | 
            +
                  "loss": 0.9366,
         | 
| 968 | 
            +
                  "step": 13700
         | 
| 969 | 
            +
                },
         | 
| 970 | 
            +
                {
         | 
| 971 | 
            +
                  "epoch": 0.1348512239214345,
         | 
| 972 | 
            +
                  "grad_norm": 0.4485512375831604,
         | 
| 973 | 
            +
                  "learning_rate": 4.326166625946739e-05,
         | 
| 974 | 
            +
                  "loss": 0.9286,
         | 
| 975 | 
            +
                  "step": 13800
         | 
| 976 | 
            +
                },
         | 
| 977 | 
            +
                {
         | 
| 978 | 
            +
                  "epoch": 0.13582840670347388,
         | 
| 979 | 
            +
                  "grad_norm": 0.8888948559761047,
         | 
| 980 | 
            +
                  "learning_rate": 4.3212802345467876e-05,
         | 
| 981 | 
            +
                  "loss": 0.943,
         | 
| 982 | 
            +
                  "step": 13900
         | 
| 983 | 
            +
                },
         | 
| 984 | 
            +
                {
         | 
| 985 | 
            +
                  "epoch": 0.13680558948551327,
         | 
| 986 | 
            +
                  "grad_norm": 0.6719749569892883,
         | 
| 987 | 
            +
                  "learning_rate": 4.3163938431468364e-05,
         | 
| 988 | 
            +
                  "loss": 0.9217,
         | 
| 989 | 
            +
                  "step": 14000
         | 
| 990 | 
            +
                },
         | 
| 991 | 
            +
                {
         | 
| 992 | 
            +
                  "epoch": 0.13778277226755264,
         | 
| 993 | 
            +
                  "grad_norm": 0.695377767086029,
         | 
| 994 | 
            +
                  "learning_rate": 4.311507451746885e-05,
         | 
| 995 | 
            +
                  "loss": 0.9093,
         | 
| 996 | 
            +
                  "step": 14100
         | 
| 997 | 
            +
                },
         | 
| 998 | 
            +
                {
         | 
| 999 | 
            +
                  "epoch": 0.13875995504959202,
         | 
| 1000 | 
            +
                  "grad_norm": 0.5966312885284424,
         | 
| 1001 | 
            +
                  "learning_rate": 4.306621060346934e-05,
         | 
| 1002 | 
            +
                  "loss": 0.9195,
         | 
| 1003 | 
            +
                  "step": 14200
         | 
| 1004 | 
            +
                },
         | 
| 1005 | 
            +
                {
         | 
| 1006 | 
            +
                  "epoch": 0.13973713783163141,
         | 
| 1007 | 
            +
                  "grad_norm": 0.8073310256004333,
         | 
| 1008 | 
            +
                  "learning_rate": 4.301734668946983e-05,
         | 
| 1009 | 
            +
                  "loss": 0.9309,
         | 
| 1010 | 
            +
                  "step": 14300
         | 
| 1011 | 
            +
                },
         | 
| 1012 | 
            +
                {
         | 
| 1013 | 
            +
                  "epoch": 0.14071432061367078,
         | 
| 1014 | 
            +
                  "grad_norm": 0.6303800940513611,
         | 
| 1015 | 
            +
                  "learning_rate": 4.2968482775470316e-05,
         | 
| 1016 | 
            +
                  "loss": 0.9458,
         | 
| 1017 | 
            +
                  "step": 14400
         | 
| 1018 | 
            +
                },
         | 
| 1019 | 
            +
                {
         | 
| 1020 | 
            +
                  "epoch": 0.14169150339571016,
         | 
| 1021 | 
            +
                  "grad_norm": 0.7043970823287964,
         | 
| 1022 | 
            +
                  "learning_rate": 4.2919618861470804e-05,
         | 
| 1023 | 
            +
                  "loss": 0.9132,
         | 
| 1024 | 
            +
                  "step": 14500
         | 
| 1025 | 
            +
                },
         | 
| 1026 | 
            +
                {
         | 
| 1027 | 
            +
                  "epoch": 0.14266868617774955,
         | 
| 1028 | 
            +
                  "grad_norm": 0.9100736379623413,
         | 
| 1029 | 
            +
                  "learning_rate": 4.287075494747129e-05,
         | 
| 1030 | 
            +
                  "loss": 0.9296,
         | 
| 1031 | 
            +
                  "step": 14600
         | 
| 1032 | 
            +
                },
         | 
| 1033 | 
            +
                {
         | 
| 1034 | 
            +
                  "epoch": 0.14364586895978892,
         | 
| 1035 | 
            +
                  "grad_norm": 0.787862241268158,
         | 
| 1036 | 
            +
                  "learning_rate": 4.282189103347179e-05,
         | 
| 1037 | 
            +
                  "loss": 0.9643,
         | 
| 1038 | 
            +
                  "step": 14700
         | 
| 1039 | 
            +
                },
         | 
| 1040 | 
            +
                {
         | 
| 1041 | 
            +
                  "epoch": 0.1446230517418283,
         | 
| 1042 | 
            +
                  "grad_norm": 0.8169028162956238,
         | 
| 1043 | 
            +
                  "learning_rate": 4.277302711947227e-05,
         | 
| 1044 | 
            +
                  "loss": 0.9244,
         | 
| 1045 | 
            +
                  "step": 14800
         | 
| 1046 | 
            +
                },
         | 
| 1047 | 
            +
                {
         | 
| 1048 | 
            +
                  "epoch": 0.1456002345238677,
         | 
| 1049 | 
            +
                  "grad_norm": 0.9544184803962708,
         | 
| 1050 | 
            +
                  "learning_rate": 4.272416320547276e-05,
         | 
| 1051 | 
            +
                  "loss": 0.918,
         | 
| 1052 | 
            +
                  "step": 14900
         | 
| 1053 | 
            +
                },
         | 
| 1054 | 
            +
                {
         | 
| 1055 | 
            +
                  "epoch": 0.14657741730590707,
         | 
| 1056 | 
            +
                  "grad_norm": 0.5325574278831482,
         | 
| 1057 | 
            +
                  "learning_rate": 4.2675299291473245e-05,
         | 
| 1058 | 
            +
                  "loss": 0.9273,
         | 
| 1059 | 
            +
                  "step": 15000
         | 
| 1060 | 
            +
                },
         | 
| 1061 | 
            +
                {
         | 
| 1062 | 
            +
                  "epoch": 0.14755460008794646,
         | 
| 1063 | 
            +
                  "grad_norm": 1.1403323411941528,
         | 
| 1064 | 
            +
                  "learning_rate": 4.262643537747374e-05,
         | 
| 1065 | 
            +
                  "loss": 0.9095,
         | 
| 1066 | 
            +
                  "step": 15100
         | 
| 1067 | 
            +
                },
         | 
| 1068 | 
            +
                {
         | 
| 1069 | 
            +
                  "epoch": 0.14853178286998583,
         | 
| 1070 | 
            +
                  "grad_norm": 1.0411937236785889,
         | 
| 1071 | 
            +
                  "learning_rate": 4.257757146347423e-05,
         | 
| 1072 | 
            +
                  "loss": 0.8967,
         | 
| 1073 | 
            +
                  "step": 15200
         | 
| 1074 | 
            +
                },
         | 
| 1075 | 
            +
                {
         | 
| 1076 | 
            +
                  "epoch": 0.1495089656520252,
         | 
| 1077 | 
            +
                  "grad_norm": 0.630393922328949,
         | 
| 1078 | 
            +
                  "learning_rate": 4.2528707549474715e-05,
         | 
| 1079 | 
            +
                  "loss": 0.8883,
         | 
| 1080 | 
            +
                  "step": 15300
         | 
| 1081 | 
            +
                },
         | 
| 1082 | 
            +
                {
         | 
| 1083 | 
            +
                  "epoch": 0.1504861484340646,
         | 
| 1084 | 
            +
                  "grad_norm": 0.9445775747299194,
         | 
| 1085 | 
            +
                  "learning_rate": 4.2479843635475204e-05,
         | 
| 1086 | 
            +
                  "loss": 0.9253,
         | 
| 1087 | 
            +
                  "step": 15400
         | 
| 1088 | 
            +
                },
         | 
| 1089 | 
            +
                {
         | 
| 1090 | 
            +
                  "epoch": 0.15146333121610397,
         | 
| 1091 | 
            +
                  "grad_norm": 0.5689444541931152,
         | 
| 1092 | 
            +
                  "learning_rate": 4.243097972147569e-05,
         | 
| 1093 | 
            +
                  "loss": 0.8983,
         | 
| 1094 | 
            +
                  "step": 15500
         | 
| 1095 | 
            +
                },
         | 
| 1096 | 
            +
                {
         | 
| 1097 | 
            +
                  "epoch": 0.15244051399814335,
         | 
| 1098 | 
            +
                  "grad_norm": 0.7726677656173706,
         | 
| 1099 | 
            +
                  "learning_rate": 4.238211580747618e-05,
         | 
| 1100 | 
            +
                  "loss": 0.9228,
         | 
| 1101 | 
            +
                  "step": 15600
         | 
| 1102 | 
            +
                },
         | 
| 1103 | 
            +
                {
         | 
| 1104 | 
            +
                  "epoch": 0.15341769678018274,
         | 
| 1105 | 
            +
                  "grad_norm": 0.8260165452957153,
         | 
| 1106 | 
            +
                  "learning_rate": 4.2333251893476675e-05,
         | 
| 1107 | 
            +
                  "loss": 0.9202,
         | 
| 1108 | 
            +
                  "step": 15700
         | 
| 1109 | 
            +
                },
         | 
| 1110 | 
            +
                {
         | 
| 1111 | 
            +
                  "epoch": 0.15439487956222211,
         | 
| 1112 | 
            +
                  "grad_norm": 0.4869302809238434,
         | 
| 1113 | 
            +
                  "learning_rate": 4.2284387979477156e-05,
         | 
| 1114 | 
            +
                  "loss": 0.9283,
         | 
| 1115 | 
            +
                  "step": 15800
         | 
| 1116 | 
            +
                },
         | 
| 1117 | 
            +
                {
         | 
| 1118 | 
            +
                  "epoch": 0.15537206234426149,
         | 
| 1119 | 
            +
                  "grad_norm": 0.5768991708755493,
         | 
| 1120 | 
            +
                  "learning_rate": 4.2235524065477644e-05,
         | 
| 1121 | 
            +
                  "loss": 0.9233,
         | 
| 1122 | 
            +
                  "step": 15900
         | 
| 1123 | 
            +
                },
         | 
| 1124 | 
            +
                {
         | 
| 1125 | 
            +
                  "epoch": 0.15634924512630088,
         | 
| 1126 | 
            +
                  "grad_norm": 0.8856435418128967,
         | 
| 1127 | 
            +
                  "learning_rate": 4.218666015147814e-05,
         | 
| 1128 | 
            +
                  "loss": 0.8825,
         | 
| 1129 | 
            +
                  "step": 16000
         | 
| 1130 | 
            +
                },
         | 
| 1131 | 
            +
                {
         | 
| 1132 | 
            +
                  "epoch": 0.15732642790834026,
         | 
| 1133 | 
            +
                  "grad_norm": 0.5258185267448425,
         | 
| 1134 | 
            +
                  "learning_rate": 4.213779623747862e-05,
         | 
| 1135 | 
            +
                  "loss": 0.8834,
         | 
| 1136 | 
            +
                  "step": 16100
         | 
| 1137 | 
            +
                },
         | 
| 1138 | 
            +
                {
         | 
| 1139 | 
            +
                  "epoch": 0.15830361069037963,
         | 
| 1140 | 
            +
                  "grad_norm": 0.8340526223182678,
         | 
| 1141 | 
            +
                  "learning_rate": 4.2088932323479115e-05,
         | 
| 1142 | 
            +
                  "loss": 0.8856,
         | 
| 1143 | 
            +
                  "step": 16200
         | 
| 1144 | 
            +
                },
         | 
| 1145 | 
            +
                {
         | 
| 1146 | 
            +
                  "epoch": 0.15928079347241902,
         | 
| 1147 | 
            +
                  "grad_norm": 0.4123723804950714,
         | 
| 1148 | 
            +
                  "learning_rate": 4.2040068409479596e-05,
         | 
| 1149 | 
            +
                  "loss": 0.8957,
         | 
| 1150 | 
            +
                  "step": 16300
         | 
| 1151 | 
            +
                },
         | 
| 1152 | 
            +
                {
         | 
| 1153 | 
            +
                  "epoch": 0.1602579762544584,
         | 
| 1154 | 
            +
                  "grad_norm": 0.8336274027824402,
         | 
| 1155 | 
            +
                  "learning_rate": 4.199120449548009e-05,
         | 
| 1156 | 
            +
                  "loss": 0.9053,
         | 
| 1157 | 
            +
                  "step": 16400
         | 
| 1158 | 
            +
                },
         | 
| 1159 | 
            +
                {
         | 
| 1160 | 
            +
                  "epoch": 0.16123515903649777,
         | 
| 1161 | 
            +
                  "grad_norm": 0.7977516055107117,
         | 
| 1162 | 
            +
                  "learning_rate": 4.194234058148058e-05,
         | 
| 1163 | 
            +
                  "loss": 0.8698,
         | 
| 1164 | 
            +
                  "step": 16500
         | 
| 1165 | 
            +
                },
         | 
| 1166 | 
            +
                {
         | 
| 1167 | 
            +
                  "epoch": 0.16221234181853716,
         | 
| 1168 | 
            +
                  "grad_norm": 0.5064985156059265,
         | 
| 1169 | 
            +
                  "learning_rate": 4.189347666748107e-05,
         | 
| 1170 | 
            +
                  "loss": 0.8945,
         | 
| 1171 | 
            +
                  "step": 16600
         | 
| 1172 | 
            +
                },
         | 
| 1173 | 
            +
                {
         | 
| 1174 | 
            +
                  "epoch": 0.16318952460057654,
         | 
| 1175 | 
            +
                  "grad_norm": 0.8241267204284668,
         | 
| 1176 | 
            +
                  "learning_rate": 4.1844612753481555e-05,
         | 
| 1177 | 
            +
                  "loss": 0.8875,
         | 
| 1178 | 
            +
                  "step": 16700
         | 
| 1179 | 
            +
                },
         | 
| 1180 | 
            +
                {
         | 
| 1181 | 
            +
                  "epoch": 0.1641667073826159,
         | 
| 1182 | 
            +
                  "grad_norm": 0.7517113089561462,
         | 
| 1183 | 
            +
                  "learning_rate": 4.179574883948204e-05,
         | 
| 1184 | 
            +
                  "loss": 0.8845,
         | 
| 1185 | 
            +
                  "step": 16800
         | 
| 1186 | 
            +
                },
         | 
| 1187 | 
            +
                {
         | 
| 1188 | 
            +
                  "epoch": 0.1651438901646553,
         | 
| 1189 | 
            +
                  "grad_norm": 0.6297169923782349,
         | 
| 1190 | 
            +
                  "learning_rate": 4.174688492548253e-05,
         | 
| 1191 | 
            +
                  "loss": 0.9303,
         | 
| 1192 | 
            +
                  "step": 16900
         | 
| 1193 | 
            +
                },
         | 
| 1194 | 
            +
                {
         | 
| 1195 | 
            +
                  "epoch": 0.16612107294669468,
         | 
| 1196 | 
            +
                  "grad_norm": 0.5828490257263184,
         | 
| 1197 | 
            +
                  "learning_rate": 4.1698021011483026e-05,
         | 
| 1198 | 
            +
                  "loss": 0.8654,
         | 
| 1199 | 
            +
                  "step": 17000
         | 
| 1200 | 
            +
                },
         | 
| 1201 | 
            +
                {
         | 
| 1202 | 
            +
                  "epoch": 0.16709825572873405,
         | 
| 1203 | 
            +
                  "grad_norm": 0.3038561940193176,
         | 
| 1204 | 
            +
                  "learning_rate": 4.164915709748351e-05,
         | 
| 1205 | 
            +
                  "loss": 0.8933,
         | 
| 1206 | 
            +
                  "step": 17100
         | 
| 1207 | 
            +
                },
         | 
| 1208 | 
            +
                {
         | 
| 1209 | 
            +
                  "epoch": 0.16807543851077344,
         | 
| 1210 | 
            +
                  "grad_norm": 0.8928827047348022,
         | 
| 1211 | 
            +
                  "learning_rate": 4.1600293183484e-05,
         | 
| 1212 | 
            +
                  "loss": 0.8509,
         | 
| 1213 | 
            +
                  "step": 17200
         | 
| 1214 | 
            +
                },
         | 
| 1215 | 
            +
                {
         | 
| 1216 | 
            +
                  "epoch": 0.16905262129281282,
         | 
| 1217 | 
            +
                  "grad_norm": 0.7055086493492126,
         | 
| 1218 | 
            +
                  "learning_rate": 4.155142926948449e-05,
         | 
| 1219 | 
            +
                  "loss": 0.8814,
         | 
| 1220 | 
            +
                  "step": 17300
         | 
| 1221 | 
            +
                },
         | 
| 1222 | 
            +
                {
         | 
| 1223 | 
            +
                  "epoch": 0.17002980407485221,
         | 
| 1224 | 
            +
                  "grad_norm": 0.5377823710441589,
         | 
| 1225 | 
            +
                  "learning_rate": 4.150256535548497e-05,
         | 
| 1226 | 
            +
                  "loss": 0.888,
         | 
| 1227 | 
            +
                  "step": 17400
         | 
| 1228 | 
            +
                },
         | 
| 1229 | 
            +
                {
         | 
| 1230 | 
            +
                  "epoch": 0.17100698685689159,
         | 
| 1231 | 
            +
                  "grad_norm": 0.6319778561592102,
         | 
| 1232 | 
            +
                  "learning_rate": 4.1453701441485466e-05,
         | 
| 1233 | 
            +
                  "loss": 0.8575,
         | 
| 1234 | 
            +
                  "step": 17500
         | 
| 1235 | 
            +
                },
         | 
| 1236 | 
            +
                {
         | 
| 1237 | 
            +
                  "epoch": 0.17198416963893096,
         | 
| 1238 | 
            +
                  "grad_norm": 0.8756042122840881,
         | 
| 1239 | 
            +
                  "learning_rate": 4.1404837527485954e-05,
         | 
| 1240 | 
            +
                  "loss": 0.8805,
         | 
| 1241 | 
            +
                  "step": 17600
         | 
| 1242 | 
            +
                },
         | 
| 1243 | 
            +
                {
         | 
| 1244 | 
            +
                  "epoch": 0.17296135242097035,
         | 
| 1245 | 
            +
                  "grad_norm": 0.5293178558349609,
         | 
| 1246 | 
            +
                  "learning_rate": 4.135597361348644e-05,
         | 
| 1247 | 
            +
                  "loss": 0.8471,
         | 
| 1248 | 
            +
                  "step": 17700
         | 
| 1249 | 
            +
                },
         | 
| 1250 | 
            +
                {
         | 
| 1251 | 
            +
                  "epoch": 0.17393853520300973,
         | 
| 1252 | 
            +
                  "grad_norm": 0.9118284583091736,
         | 
| 1253 | 
            +
                  "learning_rate": 4.130710969948693e-05,
         | 
| 1254 | 
            +
                  "loss": 0.8426,
         | 
| 1255 | 
            +
                  "step": 17800
         | 
| 1256 | 
            +
                },
         | 
| 1257 | 
            +
                {
         | 
| 1258 | 
            +
                  "epoch": 0.1749157179850491,
         | 
| 1259 | 
            +
                  "grad_norm": 1.0211195945739746,
         | 
| 1260 | 
            +
                  "learning_rate": 4.125824578548742e-05,
         | 
| 1261 | 
            +
                  "loss": 0.8877,
         | 
| 1262 | 
            +
                  "step": 17900
         | 
| 1263 | 
            +
                },
         | 
| 1264 | 
            +
                {
         | 
| 1265 | 
            +
                  "epoch": 0.1758929007670885,
         | 
| 1266 | 
            +
                  "grad_norm": 1.4174985885620117,
         | 
| 1267 | 
            +
                  "learning_rate": 4.120938187148791e-05,
         | 
| 1268 | 
            +
                  "loss": 0.8731,
         | 
| 1269 | 
            +
                  "step": 18000
         | 
| 1270 | 
            +
                },
         | 
| 1271 | 
            +
                {
         | 
| 1272 | 
            +
                  "epoch": 0.17687008354912787,
         | 
| 1273 | 
            +
                  "grad_norm": 0.8243415951728821,
         | 
| 1274 | 
            +
                  "learning_rate": 4.1160517957488395e-05,
         | 
| 1275 | 
            +
                  "loss": 0.8852,
         | 
| 1276 | 
            +
                  "step": 18100
         | 
| 1277 | 
            +
                },
         | 
| 1278 | 
            +
                {
         | 
| 1279 | 
            +
                  "epoch": 0.17784726633116724,
         | 
| 1280 | 
            +
                  "grad_norm": 0.8385602235794067,
         | 
| 1281 | 
            +
                  "learning_rate": 4.111165404348888e-05,
         | 
| 1282 | 
            +
                  "loss": 0.8361,
         | 
| 1283 | 
            +
                  "step": 18200
         | 
| 1284 | 
            +
                },
         | 
| 1285 | 
            +
                {
         | 
| 1286 | 
            +
                  "epoch": 0.17882444911320663,
         | 
| 1287 | 
            +
                  "grad_norm": 1.003968358039856,
         | 
| 1288 | 
            +
                  "learning_rate": 4.106279012948938e-05,
         | 
| 1289 | 
            +
                  "loss": 0.8738,
         | 
| 1290 | 
            +
                  "step": 18300
         | 
| 1291 | 
            +
                },
         | 
| 1292 | 
            +
                {
         | 
| 1293 | 
            +
                  "epoch": 0.179801631895246,
         | 
| 1294 | 
            +
                  "grad_norm": 0.7428449988365173,
         | 
| 1295 | 
            +
                  "learning_rate": 4.101392621548986e-05,
         | 
| 1296 | 
            +
                  "loss": 0.8563,
         | 
| 1297 | 
            +
                  "step": 18400
         | 
| 1298 | 
            +
                },
         | 
| 1299 | 
            +
                {
         | 
| 1300 | 
            +
                  "epoch": 0.18077881467728538,
         | 
| 1301 | 
            +
                  "grad_norm": 1.8963735103607178,
         | 
| 1302 | 
            +
                  "learning_rate": 4.0965062301490354e-05,
         | 
| 1303 | 
            +
                  "loss": 0.8428,
         | 
| 1304 | 
            +
                  "step": 18500
         | 
| 1305 | 
            +
                },
         | 
| 1306 | 
            +
                {
         | 
| 1307 | 
            +
                  "epoch": 0.18175599745932478,
         | 
| 1308 | 
            +
                  "grad_norm": 0.6868895888328552,
         | 
| 1309 | 
            +
                  "learning_rate": 4.091619838749084e-05,
         | 
| 1310 | 
            +
                  "loss": 0.8727,
         | 
| 1311 | 
            +
                  "step": 18600
         | 
| 1312 | 
            +
                },
         | 
| 1313 | 
            +
                {
         | 
| 1314 | 
            +
                  "epoch": 0.18273318024136415,
         | 
| 1315 | 
            +
                  "grad_norm": 1.8936256170272827,
         | 
| 1316 | 
            +
                  "learning_rate": 4.086733447349133e-05,
         | 
| 1317 | 
            +
                  "loss": 0.9211,
         | 
| 1318 | 
            +
                  "step": 18700
         | 
| 1319 | 
            +
                },
         | 
| 1320 | 
            +
                {
         | 
| 1321 | 
            +
                  "epoch": 0.18371036302340352,
         | 
| 1322 | 
            +
                  "grad_norm": 1.004941463470459,
         | 
| 1323 | 
            +
                  "learning_rate": 4.081847055949182e-05,
         | 
| 1324 | 
            +
                  "loss": 0.8404,
         | 
| 1325 | 
            +
                  "step": 18800
         | 
| 1326 | 
            +
                },
         | 
| 1327 | 
            +
                {
         | 
| 1328 | 
            +
                  "epoch": 0.18468754580544292,
         | 
| 1329 | 
            +
                  "grad_norm": 1.4084818363189697,
         | 
| 1330 | 
            +
                  "learning_rate": 4.0769606645492306e-05,
         | 
| 1331 | 
            +
                  "loss": 0.868,
         | 
| 1332 | 
            +
                  "step": 18900
         | 
| 1333 | 
            +
                },
         | 
| 1334 | 
            +
                {
         | 
| 1335 | 
            +
                  "epoch": 0.1856647285874823,
         | 
| 1336 | 
            +
                  "grad_norm": 0.6459541320800781,
         | 
| 1337 | 
            +
                  "learning_rate": 4.0720742731492794e-05,
         | 
| 1338 | 
            +
                  "loss": 0.8583,
         | 
| 1339 | 
            +
                  "step": 19000
         | 
| 1340 | 
            +
                },
         | 
| 1341 | 
            +
                {
         | 
| 1342 | 
            +
                  "epoch": 0.18664191136952166,
         | 
| 1343 | 
            +
                  "grad_norm": 0.7335548996925354,
         | 
| 1344 | 
            +
                  "learning_rate": 4.067187881749328e-05,
         | 
| 1345 | 
            +
                  "loss": 0.8622,
         | 
| 1346 | 
            +
                  "step": 19100
         | 
| 1347 | 
            +
                },
         | 
| 1348 | 
            +
                {
         | 
| 1349 | 
            +
                  "epoch": 0.18761909415156106,
         | 
| 1350 | 
            +
                  "grad_norm": 0.6783348321914673,
         | 
| 1351 | 
            +
                  "learning_rate": 4.062301490349377e-05,
         | 
| 1352 | 
            +
                  "loss": 0.8572,
         | 
| 1353 | 
            +
                  "step": 19200
         | 
| 1354 | 
            +
                },
         | 
| 1355 | 
            +
                {
         | 
| 1356 | 
            +
                  "epoch": 0.18859627693360043,
         | 
| 1357 | 
            +
                  "grad_norm": 0.6323419809341431,
         | 
| 1358 | 
            +
                  "learning_rate": 4.057415098949426e-05,
         | 
| 1359 | 
            +
                  "loss": 0.8763,
         | 
| 1360 | 
            +
                  "step": 19300
         | 
| 1361 | 
            +
                },
         | 
| 1362 | 
            +
                {
         | 
| 1363 | 
            +
                  "epoch": 0.1895734597156398,
         | 
| 1364 | 
            +
                  "grad_norm": 0.963927686214447,
         | 
| 1365 | 
            +
                  "learning_rate": 4.052528707549475e-05,
         | 
| 1366 | 
            +
                  "loss": 0.8543,
         | 
| 1367 | 
            +
                  "step": 19400
         | 
| 1368 | 
            +
                },
         | 
| 1369 | 
            +
                {
         | 
| 1370 | 
            +
                  "epoch": 0.1905506424976792,
         | 
| 1371 | 
            +
                  "grad_norm": 0.4785550832748413,
         | 
| 1372 | 
            +
                  "learning_rate": 4.0476423161495234e-05,
         | 
| 1373 | 
            +
                  "loss": 0.863,
         | 
| 1374 | 
            +
                  "step": 19500
         | 
| 1375 | 
            +
                },
         | 
| 1376 | 
            +
                {
         | 
| 1377 | 
            +
                  "epoch": 0.19152782527971857,
         | 
| 1378 | 
            +
                  "grad_norm": 0.6358627080917358,
         | 
| 1379 | 
            +
                  "learning_rate": 4.042755924749573e-05,
         | 
| 1380 | 
            +
                  "loss": 0.8842,
         | 
| 1381 | 
            +
                  "step": 19600
         | 
| 1382 | 
            +
                },
         | 
| 1383 | 
            +
                {
         | 
| 1384 | 
            +
                  "epoch": 0.19250500806175797,
         | 
| 1385 | 
            +
                  "grad_norm": 0.7857956886291504,
         | 
| 1386 | 
            +
                  "learning_rate": 4.037869533349621e-05,
         | 
| 1387 | 
            +
                  "loss": 0.8698,
         | 
| 1388 | 
            +
                  "step": 19700
         | 
| 1389 | 
            +
                },
         | 
| 1390 | 
            +
                {
         | 
| 1391 | 
            +
                  "epoch": 0.19348219084379734,
         | 
| 1392 | 
            +
                  "grad_norm": 0.5225537419319153,
         | 
| 1393 | 
            +
                  "learning_rate": 4.0329831419496705e-05,
         | 
| 1394 | 
            +
                  "loss": 0.8842,
         | 
| 1395 | 
            +
                  "step": 19800
         | 
| 1396 | 
            +
                },
         | 
| 1397 | 
            +
                {
         | 
| 1398 | 
            +
                  "epoch": 0.1944593736258367,
         | 
| 1399 | 
            +
                  "grad_norm": 0.582313597202301,
         | 
| 1400 | 
            +
                  "learning_rate": 4.0280967505497194e-05,
         | 
| 1401 | 
            +
                  "loss": 0.8506,
         | 
| 1402 | 
            +
                  "step": 19900
         | 
| 1403 | 
            +
                },
         | 
| 1404 | 
            +
                {
         | 
| 1405 | 
            +
                  "epoch": 0.1954365564078761,
         | 
| 1406 | 
            +
                  "grad_norm": 0.7206740379333496,
         | 
| 1407 | 
            +
                  "learning_rate": 4.023210359149768e-05,
         | 
| 1408 | 
            +
                  "loss": 0.8529,
         | 
| 1409 | 
            +
                  "step": 20000
         | 
| 1410 | 
            +
                },
         | 
| 1411 | 
            +
                {
         | 
| 1412 | 
            +
                  "epoch": 0.19641373918991548,
         | 
| 1413 | 
            +
                  "grad_norm": 0.45054760575294495,
         | 
| 1414 | 
            +
                  "learning_rate": 4.018323967749817e-05,
         | 
| 1415 | 
            +
                  "loss": 0.8564,
         | 
| 1416 | 
            +
                  "step": 20100
         | 
| 1417 | 
            +
                },
         | 
| 1418 | 
            +
                {
         | 
| 1419 | 
            +
                  "epoch": 0.19739092197195485,
         | 
| 1420 | 
            +
                  "grad_norm": 0.9214595556259155,
         | 
| 1421 | 
            +
                  "learning_rate": 4.013437576349866e-05,
         | 
| 1422 | 
            +
                  "loss": 0.8443,
         | 
| 1423 | 
            +
                  "step": 20200
         | 
| 1424 | 
            +
                },
         | 
| 1425 | 
            +
                {
         | 
| 1426 | 
            +
                  "epoch": 0.19836810475399425,
         | 
| 1427 | 
            +
                  "grad_norm": 0.9843263626098633,
         | 
| 1428 | 
            +
                  "learning_rate": 4.0085511849499146e-05,
         | 
| 1429 | 
            +
                  "loss": 0.856,
         | 
| 1430 | 
            +
                  "step": 20300
         | 
| 1431 | 
            +
                },
         | 
| 1432 | 
            +
                {
         | 
| 1433 | 
            +
                  "epoch": 0.19934528753603362,
         | 
| 1434 | 
            +
                  "grad_norm": 0.6508098840713501,
         | 
| 1435 | 
            +
                  "learning_rate": 4.0036647935499634e-05,
         | 
| 1436 | 
            +
                  "loss": 0.8532,
         | 
| 1437 | 
            +
                  "step": 20400
         | 
| 1438 | 
            +
                },
         | 
| 1439 | 
            +
                {
         | 
| 1440 | 
            +
                  "epoch": 0.200322470318073,
         | 
| 1441 | 
            +
                  "grad_norm": 0.8091655969619751,
         | 
| 1442 | 
            +
                  "learning_rate": 3.998778402150012e-05,
         | 
| 1443 | 
            +
                  "loss": 0.8691,
         | 
| 1444 | 
            +
                  "step": 20500
         | 
| 1445 | 
            +
                },
         | 
| 1446 | 
            +
                {
         | 
| 1447 | 
            +
                  "epoch": 0.20129965310011239,
         | 
| 1448 | 
            +
                  "grad_norm": 0.8139657378196716,
         | 
| 1449 | 
            +
                  "learning_rate": 3.993892010750061e-05,
         | 
| 1450 | 
            +
                  "loss": 0.8608,
         | 
| 1451 | 
            +
                  "step": 20600
         | 
| 1452 | 
            +
                },
         | 
| 1453 | 
            +
                {
         | 
| 1454 | 
            +
                  "epoch": 0.20227683588215176,
         | 
| 1455 | 
            +
                  "grad_norm": 0.628423273563385,
         | 
| 1456 | 
            +
                  "learning_rate": 3.9890056193501105e-05,
         | 
| 1457 | 
            +
                  "loss": 0.8369,
         | 
| 1458 | 
            +
                  "step": 20700
         | 
| 1459 | 
            +
                },
         | 
| 1460 | 
            +
                {
         | 
| 1461 | 
            +
                  "epoch": 0.20325401866419113,
         | 
| 1462 | 
            +
                  "grad_norm": 1.737331748008728,
         | 
| 1463 | 
            +
                  "learning_rate": 3.9841192279501586e-05,
         | 
| 1464 | 
            +
                  "loss": 0.8363,
         | 
| 1465 | 
            +
                  "step": 20800
         | 
| 1466 | 
            +
                },
         | 
| 1467 | 
            +
                {
         | 
| 1468 | 
            +
                  "epoch": 0.20423120144623053,
         | 
| 1469 | 
            +
                  "grad_norm": 1.036280870437622,
         | 
| 1470 | 
            +
                  "learning_rate": 3.979232836550208e-05,
         | 
| 1471 | 
            +
                  "loss": 0.8387,
         | 
| 1472 | 
            +
                  "step": 20900
         | 
| 1473 | 
            +
                },
         | 
| 1474 | 
            +
                {
         | 
| 1475 | 
            +
                  "epoch": 0.2052083842282699,
         | 
| 1476 | 
            +
                  "grad_norm": 0.35834863781929016,
         | 
| 1477 | 
            +
                  "learning_rate": 3.974346445150256e-05,
         | 
| 1478 | 
            +
                  "loss": 0.8565,
         | 
| 1479 | 
            +
                  "step": 21000
         | 
| 1480 | 
            +
                },
         | 
| 1481 | 
            +
                {
         | 
| 1482 | 
            +
                  "epoch": 0.20618556701030927,
         | 
| 1483 | 
            +
                  "grad_norm": 0.7657331824302673,
         | 
| 1484 | 
            +
                  "learning_rate": 3.969460053750306e-05,
         | 
| 1485 | 
            +
                  "loss": 0.8654,
         | 
| 1486 | 
            +
                  "step": 21100
         | 
| 1487 | 
            +
                },
         | 
| 1488 | 
            +
                {
         | 
| 1489 | 
            +
                  "epoch": 0.20716274979234867,
         | 
| 1490 | 
            +
                  "grad_norm": 1.077300786972046,
         | 
| 1491 | 
            +
                  "learning_rate": 3.9645736623503545e-05,
         | 
| 1492 | 
            +
                  "loss": 0.8218,
         | 
| 1493 | 
            +
                  "step": 21200
         | 
| 1494 | 
            +
                },
         | 
| 1495 | 
            +
                {
         | 
| 1496 | 
            +
                  "epoch": 0.20813993257438804,
         | 
| 1497 | 
            +
                  "grad_norm": 0.5806353688240051,
         | 
| 1498 | 
            +
                  "learning_rate": 3.959687270950403e-05,
         | 
| 1499 | 
            +
                  "loss": 0.8375,
         | 
| 1500 | 
            +
                  "step": 21300
         | 
| 1501 | 
            +
                },
         | 
| 1502 | 
            +
                {
         | 
| 1503 | 
            +
                  "epoch": 0.2091171153564274,
         | 
| 1504 | 
            +
                  "grad_norm": 0.3875705599784851,
         | 
| 1505 | 
            +
                  "learning_rate": 3.954800879550452e-05,
         | 
| 1506 | 
            +
                  "loss": 0.8342,
         | 
| 1507 | 
            +
                  "step": 21400
         | 
| 1508 | 
            +
                },
         | 
| 1509 | 
            +
                {
         | 
| 1510 | 
            +
                  "epoch": 0.2100942981384668,
         | 
| 1511 | 
            +
                  "grad_norm": 0.7829961180686951,
         | 
| 1512 | 
            +
                  "learning_rate": 3.949914488150501e-05,
         | 
| 1513 | 
            +
                  "loss": 0.832,
         | 
| 1514 | 
            +
                  "step": 21500
         | 
| 1515 | 
            +
                },
         | 
| 1516 | 
            +
                {
         | 
| 1517 | 
            +
                  "epoch": 0.21107148092050618,
         | 
| 1518 | 
            +
                  "grad_norm": 1.9466382265090942,
         | 
| 1519 | 
            +
                  "learning_rate": 3.94502809675055e-05,
         | 
| 1520 | 
            +
                  "loss": 0.8118,
         | 
| 1521 | 
            +
                  "step": 21600
         | 
| 1522 | 
            +
                },
         | 
| 1523 | 
            +
                {
         | 
| 1524 | 
            +
                  "epoch": 0.21204866370254555,
         | 
| 1525 | 
            +
                  "grad_norm": 0.6271357536315918,
         | 
| 1526 | 
            +
                  "learning_rate": 3.940141705350599e-05,
         | 
| 1527 | 
            +
                  "loss": 0.8436,
         | 
| 1528 | 
            +
                  "step": 21700
         | 
| 1529 | 
            +
                },
         | 
| 1530 | 
            +
                {
         | 
| 1531 | 
            +
                  "epoch": 0.21302584648458495,
         | 
| 1532 | 
            +
                  "grad_norm": 1.320719838142395,
         | 
| 1533 | 
            +
                  "learning_rate": 3.9352553139506474e-05,
         | 
| 1534 | 
            +
                  "loss": 0.8586,
         | 
| 1535 | 
            +
                  "step": 21800
         | 
| 1536 | 
            +
                },
         | 
| 1537 | 
            +
                {
         | 
| 1538 | 
            +
                  "epoch": 0.21400302926662432,
         | 
| 1539 | 
            +
                  "grad_norm": 0.6017069220542908,
         | 
| 1540 | 
            +
                  "learning_rate": 3.930368922550697e-05,
         | 
| 1541 | 
            +
                  "loss": 0.8242,
         | 
| 1542 | 
            +
                  "step": 21900
         | 
| 1543 | 
            +
                },
         | 
| 1544 | 
            +
                {
         | 
| 1545 | 
            +
                  "epoch": 0.21498021204866372,
         | 
| 1546 | 
            +
                  "grad_norm": 0.8584203124046326,
         | 
| 1547 | 
            +
                  "learning_rate": 3.9254825311507456e-05,
         | 
| 1548 | 
            +
                  "loss": 0.815,
         | 
| 1549 | 
            +
                  "step": 22000
         | 
| 1550 | 
            +
                },
         | 
| 1551 | 
            +
                {
         | 
| 1552 | 
            +
                  "epoch": 0.2159573948307031,
         | 
| 1553 | 
            +
                  "grad_norm": 0.623652458190918,
         | 
| 1554 | 
            +
                  "learning_rate": 3.920596139750794e-05,
         | 
| 1555 | 
            +
                  "loss": 0.812,
         | 
| 1556 | 
            +
                  "step": 22100
         | 
| 1557 | 
            +
                },
         | 
| 1558 | 
            +
                {
         | 
| 1559 | 
            +
                  "epoch": 0.21693457761274246,
         | 
| 1560 | 
            +
                  "grad_norm": 0.6867117881774902,
         | 
| 1561 | 
            +
                  "learning_rate": 3.915709748350843e-05,
         | 
| 1562 | 
            +
                  "loss": 0.8141,
         | 
| 1563 | 
            +
                  "step": 22200
         | 
| 1564 | 
            +
                },
         | 
| 1565 | 
            +
                {
         | 
| 1566 | 
            +
                  "epoch": 0.21791176039478186,
         | 
| 1567 | 
            +
                  "grad_norm": 0.6963294744491577,
         | 
| 1568 | 
            +
                  "learning_rate": 3.910823356950892e-05,
         | 
| 1569 | 
            +
                  "loss": 0.8227,
         | 
| 1570 | 
            +
                  "step": 22300
         | 
| 1571 | 
            +
                },
         | 
| 1572 | 
            +
                {
         | 
| 1573 | 
            +
                  "epoch": 0.21888894317682123,
         | 
| 1574 | 
            +
                  "grad_norm": 0.6727440357208252,
         | 
| 1575 | 
            +
                  "learning_rate": 3.905936965550941e-05,
         | 
| 1576 | 
            +
                  "loss": 0.8285,
         | 
| 1577 | 
            +
                  "step": 22400
         | 
| 1578 | 
            +
                },
         | 
| 1579 | 
            +
                {
         | 
| 1580 | 
            +
                  "epoch": 0.2198661259588606,
         | 
| 1581 | 
            +
                  "grad_norm": 1.261771559715271,
         | 
| 1582 | 
            +
                  "learning_rate": 3.90105057415099e-05,
         | 
| 1583 | 
            +
                  "loss": 0.8396,
         | 
| 1584 | 
            +
                  "step": 22500
         | 
| 1585 | 
            +
                },
         | 
| 1586 | 
            +
                {
         | 
| 1587 | 
            +
                  "epoch": 0.2208433087409,
         | 
| 1588 | 
            +
                  "grad_norm": 0.9146804809570312,
         | 
| 1589 | 
            +
                  "learning_rate": 3.8961641827510385e-05,
         | 
| 1590 | 
            +
                  "loss": 0.8194,
         | 
| 1591 | 
            +
                  "step": 22600
         | 
| 1592 | 
            +
                },
         | 
| 1593 | 
            +
                {
         | 
| 1594 | 
            +
                  "epoch": 0.22182049152293937,
         | 
| 1595 | 
            +
                  "grad_norm": 0.9350225329399109,
         | 
| 1596 | 
            +
                  "learning_rate": 3.891277791351087e-05,
         | 
| 1597 | 
            +
                  "loss": 0.8376,
         | 
| 1598 | 
            +
                  "step": 22700
         | 
| 1599 | 
            +
                },
         | 
| 1600 | 
            +
                {
         | 
| 1601 | 
            +
                  "epoch": 0.22279767430497874,
         | 
| 1602 | 
            +
                  "grad_norm": 0.6317518353462219,
         | 
| 1603 | 
            +
                  "learning_rate": 3.886391399951137e-05,
         | 
| 1604 | 
            +
                  "loss": 0.8313,
         | 
| 1605 | 
            +
                  "step": 22800
         | 
| 1606 | 
            +
                },
         | 
| 1607 | 
            +
                {
         | 
| 1608 | 
            +
                  "epoch": 0.22377485708701814,
         | 
| 1609 | 
            +
                  "grad_norm": 0.6716780662536621,
         | 
| 1610 | 
            +
                  "learning_rate": 3.881505008551185e-05,
         | 
| 1611 | 
            +
                  "loss": 0.8033,
         | 
| 1612 | 
            +
                  "step": 22900
         | 
| 1613 | 
            +
                },
         | 
| 1614 | 
            +
                {
         | 
| 1615 | 
            +
                  "epoch": 0.2247520398690575,
         | 
| 1616 | 
            +
                  "grad_norm": 0.4494755268096924,
         | 
| 1617 | 
            +
                  "learning_rate": 3.8766186171512344e-05,
         | 
| 1618 | 
            +
                  "loss": 0.8047,
         | 
| 1619 | 
            +
                  "step": 23000
         | 
| 1620 | 
            +
                },
         | 
| 1621 | 
            +
                {
         | 
| 1622 | 
            +
                  "epoch": 0.22572922265109688,
         | 
| 1623 | 
            +
                  "grad_norm": 0.5505642890930176,
         | 
| 1624 | 
            +
                  "learning_rate": 3.8717322257512825e-05,
         | 
| 1625 | 
            +
                  "loss": 0.8456,
         | 
| 1626 | 
            +
                  "step": 23100
         | 
| 1627 | 
            +
                },
         | 
| 1628 | 
            +
                {
         | 
| 1629 | 
            +
                  "epoch": 0.22670640543313628,
         | 
| 1630 | 
            +
                  "grad_norm": 0.8866478800773621,
         | 
| 1631 | 
            +
                  "learning_rate": 3.866845834351332e-05,
         | 
| 1632 | 
            +
                  "loss": 0.8105,
         | 
| 1633 | 
            +
                  "step": 23200
         | 
| 1634 | 
            +
                },
         | 
| 1635 | 
            +
                {
         | 
| 1636 | 
            +
                  "epoch": 0.22768358821517565,
         | 
| 1637 | 
            +
                  "grad_norm": 0.7525384426116943,
         | 
| 1638 | 
            +
                  "learning_rate": 3.861959442951381e-05,
         | 
| 1639 | 
            +
                  "loss": 0.8292,
         | 
| 1640 | 
            +
                  "step": 23300
         | 
| 1641 | 
            +
                },
         | 
| 1642 | 
            +
                {
         | 
| 1643 | 
            +
                  "epoch": 0.22866077099721502,
         | 
| 1644 | 
            +
                  "grad_norm": 0.8182941675186157,
         | 
| 1645 | 
            +
                  "learning_rate": 3.8570730515514296e-05,
         | 
| 1646 | 
            +
                  "loss": 0.8392,
         | 
| 1647 | 
            +
                  "step": 23400
         | 
| 1648 | 
            +
                },
         | 
| 1649 | 
            +
                {
         | 
| 1650 | 
            +
                  "epoch": 0.22963795377925442,
         | 
| 1651 | 
            +
                  "grad_norm": 0.6246720552444458,
         | 
| 1652 | 
            +
                  "learning_rate": 3.8521866601514784e-05,
         | 
| 1653 | 
            +
                  "loss": 0.8292,
         | 
| 1654 | 
            +
                  "step": 23500
         | 
| 1655 | 
            +
                },
         | 
| 1656 | 
            +
                {
         | 
| 1657 | 
            +
                  "epoch": 0.2306151365612938,
         | 
| 1658 | 
            +
                  "grad_norm": 0.7931325435638428,
         | 
| 1659 | 
            +
                  "learning_rate": 3.847300268751527e-05,
         | 
| 1660 | 
            +
                  "loss": 0.83,
         | 
| 1661 | 
            +
                  "step": 23600
         | 
| 1662 | 
            +
                },
         | 
| 1663 | 
            +
                {
         | 
| 1664 | 
            +
                  "epoch": 0.23159231934333316,
         | 
| 1665 | 
            +
                  "grad_norm": 0.4839908480644226,
         | 
| 1666 | 
            +
                  "learning_rate": 3.842413877351576e-05,
         | 
| 1667 | 
            +
                  "loss": 0.8544,
         | 
| 1668 | 
            +
                  "step": 23700
         | 
| 1669 | 
            +
                },
         | 
| 1670 | 
            +
                {
         | 
| 1671 | 
            +
                  "epoch": 0.23256950212537256,
         | 
| 1672 | 
            +
                  "grad_norm": 0.694095253944397,
         | 
| 1673 | 
            +
                  "learning_rate": 3.837527485951625e-05,
         | 
| 1674 | 
            +
                  "loss": 0.8168,
         | 
| 1675 | 
            +
                  "step": 23800
         | 
| 1676 | 
            +
                },
         | 
| 1677 | 
            +
                {
         | 
| 1678 | 
            +
                  "epoch": 0.23354668490741193,
         | 
| 1679 | 
            +
                  "grad_norm": 0.6341009140014648,
         | 
| 1680 | 
            +
                  "learning_rate": 3.8326410945516736e-05,
         | 
| 1681 | 
            +
                  "loss": 0.8007,
         | 
| 1682 | 
            +
                  "step": 23900
         | 
| 1683 | 
            +
                },
         | 
| 1684 | 
            +
                {
         | 
| 1685 | 
            +
                  "epoch": 0.2345238676894513,
         | 
| 1686 | 
            +
                  "grad_norm": 0.6198739409446716,
         | 
| 1687 | 
            +
                  "learning_rate": 3.8277547031517224e-05,
         | 
| 1688 | 
            +
                  "loss": 0.8222,
         | 
| 1689 | 
            +
                  "step": 24000
         | 
| 1690 | 
            +
                },
         | 
| 1691 | 
            +
                {
         | 
| 1692 | 
            +
                  "epoch": 0.2355010504714907,
         | 
| 1693 | 
            +
                  "grad_norm": 0.7246755361557007,
         | 
| 1694 | 
            +
                  "learning_rate": 3.822868311751772e-05,
         | 
| 1695 | 
            +
                  "loss": 0.8239,
         | 
| 1696 | 
            +
                  "step": 24100
         | 
| 1697 | 
            +
                },
         | 
| 1698 | 
            +
                {
         | 
| 1699 | 
            +
                  "epoch": 0.23647823325353007,
         | 
| 1700 | 
            +
                  "grad_norm": 1.1782780885696411,
         | 
| 1701 | 
            +
                  "learning_rate": 3.81798192035182e-05,
         | 
| 1702 | 
            +
                  "loss": 0.8069,
         | 
| 1703 | 
            +
                  "step": 24200
         | 
| 1704 | 
            +
                },
         | 
| 1705 | 
            +
                {
         | 
| 1706 | 
            +
                  "epoch": 0.23745541603556947,
         | 
| 1707 | 
            +
                  "grad_norm": 0.7902185320854187,
         | 
| 1708 | 
            +
                  "learning_rate": 3.8130955289518695e-05,
         | 
| 1709 | 
            +
                  "loss": 0.8283,
         | 
| 1710 | 
            +
                  "step": 24300
         | 
| 1711 | 
            +
                },
         | 
| 1712 | 
            +
                {
         | 
| 1713 | 
            +
                  "epoch": 0.23843259881760884,
         | 
| 1714 | 
            +
                  "grad_norm": 1.605393648147583,
         | 
| 1715 | 
            +
                  "learning_rate": 3.808209137551918e-05,
         | 
| 1716 | 
            +
                  "loss": 0.7758,
         | 
| 1717 | 
            +
                  "step": 24400
         | 
| 1718 | 
            +
                },
         | 
| 1719 | 
            +
                {
         | 
| 1720 | 
            +
                  "epoch": 0.2394097815996482,
         | 
| 1721 | 
            +
                  "grad_norm": 0.5076558589935303,
         | 
| 1722 | 
            +
                  "learning_rate": 3.803322746151967e-05,
         | 
| 1723 | 
            +
                  "loss": 0.8178,
         | 
| 1724 | 
            +
                  "step": 24500
         | 
| 1725 | 
            +
                },
         | 
| 1726 | 
            +
                {
         | 
| 1727 | 
            +
                  "epoch": 0.2403869643816876,
         | 
| 1728 | 
            +
                  "grad_norm": 0.777646005153656,
         | 
| 1729 | 
            +
                  "learning_rate": 3.798436354752016e-05,
         | 
| 1730 | 
            +
                  "loss": 0.8074,
         | 
| 1731 | 
            +
                  "step": 24600
         | 
| 1732 | 
            +
                },
         | 
| 1733 | 
            +
                {
         | 
| 1734 | 
            +
                  "epoch": 0.24136414716372698,
         | 
| 1735 | 
            +
                  "grad_norm": 1.3850637674331665,
         | 
| 1736 | 
            +
                  "learning_rate": 3.793549963352065e-05,
         | 
| 1737 | 
            +
                  "loss": 0.8058,
         | 
| 1738 | 
            +
                  "step": 24700
         | 
| 1739 | 
            +
                },
         | 
| 1740 | 
            +
                {
         | 
| 1741 | 
            +
                  "epoch": 0.24234132994576635,
         | 
| 1742 | 
            +
                  "grad_norm": 0.6476046442985535,
         | 
| 1743 | 
            +
                  "learning_rate": 3.7886635719521136e-05,
         | 
| 1744 | 
            +
                  "loss": 0.7967,
         | 
| 1745 | 
            +
                  "step": 24800
         | 
| 1746 | 
            +
                },
         | 
| 1747 | 
            +
                {
         | 
| 1748 | 
            +
                  "epoch": 0.24331851272780575,
         | 
| 1749 | 
            +
                  "grad_norm": 0.5768633484840393,
         | 
| 1750 | 
            +
                  "learning_rate": 3.7837771805521624e-05,
         | 
| 1751 | 
            +
                  "loss": 0.8269,
         | 
| 1752 | 
            +
                  "step": 24900
         | 
| 1753 | 
            +
                },
         | 
| 1754 | 
            +
                {
         | 
| 1755 | 
            +
                  "epoch": 0.24429569550984512,
         | 
| 1756 | 
            +
                  "grad_norm": 0.7800481915473938,
         | 
| 1757 | 
            +
                  "learning_rate": 3.778890789152211e-05,
         | 
| 1758 | 
            +
                  "loss": 0.8237,
         | 
| 1759 | 
            +
                  "step": 25000
         | 
| 1760 | 
            +
                },
         | 
| 1761 | 
            +
                {
         | 
| 1762 | 
            +
                  "epoch": 0.2452728782918845,
         | 
| 1763 | 
            +
                  "grad_norm": 0.591273844242096,
         | 
| 1764 | 
            +
                  "learning_rate": 3.77400439775226e-05,
         | 
| 1765 | 
            +
                  "loss": 0.8045,
         | 
| 1766 | 
            +
                  "step": 25100
         | 
| 1767 | 
            +
                },
         | 
| 1768 | 
            +
                {
         | 
| 1769 | 
            +
                  "epoch": 0.2462500610739239,
         | 
| 1770 | 
            +
                  "grad_norm": 0.5170730352401733,
         | 
| 1771 | 
            +
                  "learning_rate": 3.769118006352309e-05,
         | 
| 1772 | 
            +
                  "loss": 0.818,
         | 
| 1773 | 
            +
                  "step": 25200
         | 
| 1774 | 
            +
                },
         | 
| 1775 | 
            +
                {
         | 
| 1776 | 
            +
                  "epoch": 0.24722724385596326,
         | 
| 1777 | 
            +
                  "grad_norm": 0.7280113101005554,
         | 
| 1778 | 
            +
                  "learning_rate": 3.7642316149523576e-05,
         | 
| 1779 | 
            +
                  "loss": 0.806,
         | 
| 1780 | 
            +
                  "step": 25300
         | 
| 1781 | 
            +
                },
         | 
| 1782 | 
            +
                {
         | 
| 1783 | 
            +
                  "epoch": 0.24820442663800263,
         | 
| 1784 | 
            +
                  "grad_norm": 0.48092082142829895,
         | 
| 1785 | 
            +
                  "learning_rate": 3.759345223552407e-05,
         | 
| 1786 | 
            +
                  "loss": 0.804,
         | 
| 1787 | 
            +
                  "step": 25400
         | 
| 1788 | 
            +
                },
         | 
| 1789 | 
            +
                {
         | 
| 1790 | 
            +
                  "epoch": 0.24918160942004203,
         | 
| 1791 | 
            +
                  "grad_norm": 0.8031238913536072,
         | 
| 1792 | 
            +
                  "learning_rate": 3.754458832152455e-05,
         | 
| 1793 | 
            +
                  "loss": 0.8031,
         | 
| 1794 | 
            +
                  "step": 25500
         | 
| 1795 | 
            +
                },
         | 
| 1796 | 
            +
                {
         | 
| 1797 | 
            +
                  "epoch": 0.2501587922020814,
         | 
| 1798 | 
            +
                  "grad_norm": 0.5290892720222473,
         | 
| 1799 | 
            +
                  "learning_rate": 3.749572440752505e-05,
         | 
| 1800 | 
            +
                  "loss": 0.816,
         | 
| 1801 | 
            +
                  "step": 25600
         | 
| 1802 | 
            +
                },
         | 
| 1803 | 
            +
                {
         | 
| 1804 | 
            +
                  "epoch": 0.25113597498412077,
         | 
| 1805 | 
            +
                  "grad_norm": 1.850685477256775,
         | 
| 1806 | 
            +
                  "learning_rate": 3.7446860493525535e-05,
         | 
| 1807 | 
            +
                  "loss": 0.8241,
         | 
| 1808 | 
            +
                  "step": 25700
         | 
| 1809 | 
            +
                },
         | 
| 1810 | 
            +
                {
         | 
| 1811 | 
            +
                  "epoch": 0.25211315776616017,
         | 
| 1812 | 
            +
                  "grad_norm": 0.9196923971176147,
         | 
| 1813 | 
            +
                  "learning_rate": 3.739799657952602e-05,
         | 
| 1814 | 
            +
                  "loss": 0.8115,
         | 
| 1815 | 
            +
                  "step": 25800
         | 
| 1816 | 
            +
                },
         | 
| 1817 | 
            +
                {
         | 
| 1818 | 
            +
                  "epoch": 0.25309034054819957,
         | 
| 1819 | 
            +
                  "grad_norm": 0.8779144883155823,
         | 
| 1820 | 
            +
                  "learning_rate": 3.734913266552651e-05,
         | 
| 1821 | 
            +
                  "loss": 0.8065,
         | 
| 1822 | 
            +
                  "step": 25900
         | 
| 1823 | 
            +
                },
         | 
| 1824 | 
            +
                {
         | 
| 1825 | 
            +
                  "epoch": 0.2540675233302389,
         | 
| 1826 | 
            +
                  "grad_norm": 0.6696827411651611,
         | 
| 1827 | 
            +
                  "learning_rate": 3.7300268751527e-05,
         | 
| 1828 | 
            +
                  "loss": 0.7827,
         | 
| 1829 | 
            +
                  "step": 26000
         | 
| 1830 | 
            +
                },
         | 
| 1831 | 
            +
                {
         | 
| 1832 | 
            +
                  "epoch": 0.2550447061122783,
         | 
| 1833 | 
            +
                  "grad_norm": 0.5037100315093994,
         | 
| 1834 | 
            +
                  "learning_rate": 3.725140483752749e-05,
         | 
| 1835 | 
            +
                  "loss": 0.7955,
         | 
| 1836 | 
            +
                  "step": 26100
         | 
| 1837 | 
            +
                },
         | 
| 1838 | 
            +
                {
         | 
| 1839 | 
            +
                  "epoch": 0.2560218888943177,
         | 
| 1840 | 
            +
                  "grad_norm": 1.4716683626174927,
         | 
| 1841 | 
            +
                  "learning_rate": 3.7202540923527975e-05,
         | 
| 1842 | 
            +
                  "loss": 0.8076,
         | 
| 1843 | 
            +
                  "step": 26200
         | 
| 1844 | 
            +
                },
         | 
| 1845 | 
            +
                {
         | 
| 1846 | 
            +
                  "epoch": 0.25699907167635705,
         | 
| 1847 | 
            +
                  "grad_norm": 0.7515909671783447,
         | 
| 1848 | 
            +
                  "learning_rate": 3.7153677009528463e-05,
         | 
| 1849 | 
            +
                  "loss": 0.7645,
         | 
| 1850 | 
            +
                  "step": 26300
         | 
| 1851 | 
            +
                },
         | 
| 1852 | 
            +
                {
         | 
| 1853 | 
            +
                  "epoch": 0.25797625445839645,
         | 
| 1854 | 
            +
                  "grad_norm": 0.8641912341117859,
         | 
| 1855 | 
            +
                  "learning_rate": 3.710481309552896e-05,
         | 
| 1856 | 
            +
                  "loss": 0.7794,
         | 
| 1857 | 
            +
                  "step": 26400
         | 
| 1858 | 
            +
                },
         | 
| 1859 | 
            +
                {
         | 
| 1860 | 
            +
                  "epoch": 0.25895343724043585,
         | 
| 1861 | 
            +
                  "grad_norm": 0.7385029792785645,
         | 
| 1862 | 
            +
                  "learning_rate": 3.705594918152944e-05,
         | 
| 1863 | 
            +
                  "loss": 0.8047,
         | 
| 1864 | 
            +
                  "step": 26500
         | 
| 1865 | 
            +
                },
         | 
| 1866 | 
            +
                {
         | 
| 1867 | 
            +
                  "epoch": 0.2599306200224752,
         | 
| 1868 | 
            +
                  "grad_norm": 1.194313645362854,
         | 
| 1869 | 
            +
                  "learning_rate": 3.700708526752993e-05,
         | 
| 1870 | 
            +
                  "loss": 0.7973,
         | 
| 1871 | 
            +
                  "step": 26600
         | 
| 1872 | 
            +
                },
         | 
| 1873 | 
            +
                {
         | 
| 1874 | 
            +
                  "epoch": 0.2609078028045146,
         | 
| 1875 | 
            +
                  "grad_norm": 0.8573377728462219,
         | 
| 1876 | 
            +
                  "learning_rate": 3.695822135353042e-05,
         | 
| 1877 | 
            +
                  "loss": 0.8054,
         | 
| 1878 | 
            +
                  "step": 26700
         | 
| 1879 | 
            +
                },
         | 
| 1880 | 
            +
                {
         | 
| 1881 | 
            +
                  "epoch": 0.261884985586554,
         | 
| 1882 | 
            +
                  "grad_norm": 0.7428358793258667,
         | 
| 1883 | 
            +
                  "learning_rate": 3.6909357439530904e-05,
         | 
| 1884 | 
            +
                  "loss": 0.8194,
         | 
| 1885 | 
            +
                  "step": 26800
         | 
| 1886 | 
            +
                },
         | 
| 1887 | 
            +
                {
         | 
| 1888 | 
            +
                  "epoch": 0.26286216836859333,
         | 
| 1889 | 
            +
                  "grad_norm": 1.1976490020751953,
         | 
| 1890 | 
            +
                  "learning_rate": 3.68604935255314e-05,
         | 
| 1891 | 
            +
                  "loss": 0.7745,
         | 
| 1892 | 
            +
                  "step": 26900
         | 
| 1893 | 
            +
                },
         | 
| 1894 | 
            +
                {
         | 
| 1895 | 
            +
                  "epoch": 0.26383935115063273,
         | 
| 1896 | 
            +
                  "grad_norm": 0.8391226530075073,
         | 
| 1897 | 
            +
                  "learning_rate": 3.681162961153189e-05,
         | 
| 1898 | 
            +
                  "loss": 0.7981,
         | 
| 1899 | 
            +
                  "step": 27000
         | 
| 1900 | 
            +
                },
         | 
| 1901 | 
            +
                {
         | 
| 1902 | 
            +
                  "epoch": 0.2648165339326721,
         | 
| 1903 | 
            +
                  "grad_norm": 1.0753370523452759,
         | 
| 1904 | 
            +
                  "learning_rate": 3.6762765697532375e-05,
         | 
| 1905 | 
            +
                  "loss": 0.8018,
         | 
| 1906 | 
            +
                  "step": 27100
         | 
| 1907 | 
            +
                },
         | 
| 1908 | 
            +
                {
         | 
| 1909 | 
            +
                  "epoch": 0.26579371671471147,
         | 
| 1910 | 
            +
                  "grad_norm": 0.8495202660560608,
         | 
| 1911 | 
            +
                  "learning_rate": 3.671390178353286e-05,
         | 
| 1912 | 
            +
                  "loss": 0.7894,
         | 
| 1913 | 
            +
                  "step": 27200
         | 
| 1914 | 
            +
                },
         | 
| 1915 | 
            +
                {
         | 
| 1916 | 
            +
                  "epoch": 0.26677089949675087,
         | 
| 1917 | 
            +
                  "grad_norm": 2.3333170413970947,
         | 
| 1918 | 
            +
                  "learning_rate": 3.666503786953335e-05,
         | 
| 1919 | 
            +
                  "loss": 0.7892,
         | 
| 1920 | 
            +
                  "step": 27300
         | 
| 1921 | 
            +
                },
         | 
| 1922 | 
            +
                {
         | 
| 1923 | 
            +
                  "epoch": 0.26774808227879027,
         | 
| 1924 | 
            +
                  "grad_norm": 0.7213625311851501,
         | 
| 1925 | 
            +
                  "learning_rate": 3.661617395553384e-05,
         | 
| 1926 | 
            +
                  "loss": 0.7902,
         | 
| 1927 | 
            +
                  "step": 27400
         | 
| 1928 | 
            +
                },
         | 
| 1929 | 
            +
                {
         | 
| 1930 | 
            +
                  "epoch": 0.2687252650608296,
         | 
| 1931 | 
            +
                  "grad_norm": 1.045614242553711,
         | 
| 1932 | 
            +
                  "learning_rate": 3.6567310041534334e-05,
         | 
| 1933 | 
            +
                  "loss": 0.7719,
         | 
| 1934 | 
            +
                  "step": 27500
         | 
| 1935 | 
            +
                },
         | 
| 1936 | 
            +
                {
         | 
| 1937 | 
            +
                  "epoch": 0.269702447842869,
         | 
| 1938 | 
            +
                  "grad_norm": 0.42100274562835693,
         | 
| 1939 | 
            +
                  "learning_rate": 3.6518446127534815e-05,
         | 
| 1940 | 
            +
                  "loss": 0.7705,
         | 
| 1941 | 
            +
                  "step": 27600
         | 
| 1942 | 
            +
                },
         | 
| 1943 | 
            +
                {
         | 
| 1944 | 
            +
                  "epoch": 0.2706796306249084,
         | 
| 1945 | 
            +
                  "grad_norm": 0.5944122076034546,
         | 
| 1946 | 
            +
                  "learning_rate": 3.646958221353531e-05,
         | 
| 1947 | 
            +
                  "loss": 0.7717,
         | 
| 1948 | 
            +
                  "step": 27700
         | 
| 1949 | 
            +
                },
         | 
| 1950 | 
            +
                {
         | 
| 1951 | 
            +
                  "epoch": 0.27165681340694775,
         | 
| 1952 | 
            +
                  "grad_norm": 0.7398585677146912,
         | 
| 1953 | 
            +
                  "learning_rate": 3.642071829953579e-05,
         | 
| 1954 | 
            +
                  "loss": 0.7896,
         | 
| 1955 | 
            +
                  "step": 27800
         | 
| 1956 | 
            +
                },
         | 
| 1957 | 
            +
                {
         | 
| 1958 | 
            +
                  "epoch": 0.27263399618898715,
         | 
| 1959 | 
            +
                  "grad_norm": 0.8064782023429871,
         | 
| 1960 | 
            +
                  "learning_rate": 3.6371854385536286e-05,
         | 
| 1961 | 
            +
                  "loss": 0.7917,
         | 
| 1962 | 
            +
                  "step": 27900
         | 
| 1963 | 
            +
                },
         | 
| 1964 | 
            +
                {
         | 
| 1965 | 
            +
                  "epoch": 0.27361117897102655,
         | 
| 1966 | 
            +
                  "grad_norm": 0.6715266108512878,
         | 
| 1967 | 
            +
                  "learning_rate": 3.6322990471536774e-05,
         | 
| 1968 | 
            +
                  "loss": 0.7771,
         | 
| 1969 | 
            +
                  "step": 28000
         | 
| 1970 | 
            +
                },
         | 
| 1971 | 
            +
                {
         | 
| 1972 | 
            +
                  "epoch": 0.2745883617530659,
         | 
| 1973 | 
            +
                  "grad_norm": 1.1130329370498657,
         | 
| 1974 | 
            +
                  "learning_rate": 3.6274126557537255e-05,
         | 
| 1975 | 
            +
                  "loss": 0.7476,
         | 
| 1976 | 
            +
                  "step": 28100
         | 
| 1977 | 
            +
                },
         | 
| 1978 | 
            +
                {
         | 
| 1979 | 
            +
                  "epoch": 0.2755655445351053,
         | 
| 1980 | 
            +
                  "grad_norm": 0.7601907253265381,
         | 
| 1981 | 
            +
                  "learning_rate": 3.622526264353775e-05,
         | 
| 1982 | 
            +
                  "loss": 0.7745,
         | 
| 1983 | 
            +
                  "step": 28200
         | 
| 1984 | 
            +
                },
         | 
| 1985 | 
            +
                {
         | 
| 1986 | 
            +
                  "epoch": 0.2765427273171447,
         | 
| 1987 | 
            +
                  "grad_norm": 0.8511783480644226,
         | 
| 1988 | 
            +
                  "learning_rate": 3.617639872953824e-05,
         | 
| 1989 | 
            +
                  "loss": 0.7737,
         | 
| 1990 | 
            +
                  "step": 28300
         | 
| 1991 | 
            +
                },
         | 
| 1992 | 
            +
                {
         | 
| 1993 | 
            +
                  "epoch": 0.27751991009918403,
         | 
| 1994 | 
            +
                  "grad_norm": 0.8136917948722839,
         | 
| 1995 | 
            +
                  "learning_rate": 3.6127534815538726e-05,
         | 
| 1996 | 
            +
                  "loss": 0.7905,
         | 
| 1997 | 
            +
                  "step": 28400
         | 
| 1998 | 
            +
                },
         | 
| 1999 | 
            +
                {
         | 
| 2000 | 
            +
                  "epoch": 0.27849709288122343,
         | 
| 2001 | 
            +
                  "grad_norm": 0.5580685138702393,
         | 
| 2002 | 
            +
                  "learning_rate": 3.6078670901539214e-05,
         | 
| 2003 | 
            +
                  "loss": 0.7957,
         | 
| 2004 | 
            +
                  "step": 28500
         | 
| 2005 | 
            +
                },
         | 
| 2006 | 
            +
                {
         | 
| 2007 | 
            +
                  "epoch": 0.27947427566326283,
         | 
| 2008 | 
            +
                  "grad_norm": 0.750845730304718,
         | 
| 2009 | 
            +
                  "learning_rate": 3.60298069875397e-05,
         | 
| 2010 | 
            +
                  "loss": 0.7396,
         | 
| 2011 | 
            +
                  "step": 28600
         | 
| 2012 | 
            +
                },
         | 
| 2013 | 
            +
                {
         | 
| 2014 | 
            +
                  "epoch": 0.28045145844530217,
         | 
| 2015 | 
            +
                  "grad_norm": 0.9611383080482483,
         | 
| 2016 | 
            +
                  "learning_rate": 3.598094307354019e-05,
         | 
| 2017 | 
            +
                  "loss": 0.774,
         | 
| 2018 | 
            +
                  "step": 28700
         | 
| 2019 | 
            +
                },
         | 
| 2020 | 
            +
                {
         | 
| 2021 | 
            +
                  "epoch": 0.28142864122734157,
         | 
| 2022 | 
            +
                  "grad_norm": 0.6622794270515442,
         | 
| 2023 | 
            +
                  "learning_rate": 3.5932079159540685e-05,
         | 
| 2024 | 
            +
                  "loss": 0.7993,
         | 
| 2025 | 
            +
                  "step": 28800
         | 
| 2026 | 
            +
                },
         | 
| 2027 | 
            +
                {
         | 
| 2028 | 
            +
                  "epoch": 0.28240582400938097,
         | 
| 2029 | 
            +
                  "grad_norm": 0.4816977381706238,
         | 
| 2030 | 
            +
                  "learning_rate": 3.588321524554117e-05,
         | 
| 2031 | 
            +
                  "loss": 0.7868,
         | 
| 2032 | 
            +
                  "step": 28900
         | 
| 2033 | 
            +
                },
         | 
| 2034 | 
            +
                {
         | 
| 2035 | 
            +
                  "epoch": 0.2833830067914203,
         | 
| 2036 | 
            +
                  "grad_norm": 0.6779691576957703,
         | 
| 2037 | 
            +
                  "learning_rate": 3.583435133154166e-05,
         | 
| 2038 | 
            +
                  "loss": 0.7838,
         | 
| 2039 | 
            +
                  "step": 29000
         | 
| 2040 | 
            +
                },
         | 
| 2041 | 
            +
                {
         | 
| 2042 | 
            +
                  "epoch": 0.2843601895734597,
         | 
| 2043 | 
            +
                  "grad_norm": 0.9714117646217346,
         | 
| 2044 | 
            +
                  "learning_rate": 3.578548741754214e-05,
         | 
| 2045 | 
            +
                  "loss": 0.7686,
         | 
| 2046 | 
            +
                  "step": 29100
         | 
| 2047 | 
            +
                },
         | 
| 2048 | 
            +
                {
         | 
| 2049 | 
            +
                  "epoch": 0.2853373723554991,
         | 
| 2050 | 
            +
                  "grad_norm": 0.7163410186767578,
         | 
| 2051 | 
            +
                  "learning_rate": 3.573662350354264e-05,
         | 
| 2052 | 
            +
                  "loss": 0.7747,
         | 
| 2053 | 
            +
                  "step": 29200
         | 
| 2054 | 
            +
                },
         | 
| 2055 | 
            +
                {
         | 
| 2056 | 
            +
                  "epoch": 0.28631455513753845,
         | 
| 2057 | 
            +
                  "grad_norm": 0.7338354587554932,
         | 
| 2058 | 
            +
                  "learning_rate": 3.5687759589543126e-05,
         | 
| 2059 | 
            +
                  "loss": 0.7703,
         | 
| 2060 | 
            +
                  "step": 29300
         | 
| 2061 | 
            +
                },
         | 
| 2062 | 
            +
                {
         | 
| 2063 | 
            +
                  "epoch": 0.28729173791957785,
         | 
| 2064 | 
            +
                  "grad_norm": 0.765074610710144,
         | 
| 2065 | 
            +
                  "learning_rate": 3.5638895675543614e-05,
         | 
| 2066 | 
            +
                  "loss": 0.7811,
         | 
| 2067 | 
            +
                  "step": 29400
         | 
| 2068 | 
            +
                },
         | 
| 2069 | 
            +
                {
         | 
| 2070 | 
            +
                  "epoch": 0.28826892070161725,
         | 
| 2071 | 
            +
                  "grad_norm": 0.6714346408843994,
         | 
| 2072 | 
            +
                  "learning_rate": 3.55900317615441e-05,
         | 
| 2073 | 
            +
                  "loss": 0.7971,
         | 
| 2074 | 
            +
                  "step": 29500
         | 
| 2075 | 
            +
                },
         | 
| 2076 | 
            +
                {
         | 
| 2077 | 
            +
                  "epoch": 0.2892461034836566,
         | 
| 2078 | 
            +
                  "grad_norm": 0.6784923672676086,
         | 
| 2079 | 
            +
                  "learning_rate": 3.554116784754459e-05,
         | 
| 2080 | 
            +
                  "loss": 0.7704,
         | 
| 2081 | 
            +
                  "step": 29600
         | 
| 2082 | 
            +
                },
         | 
| 2083 | 
            +
                {
         | 
| 2084 | 
            +
                  "epoch": 0.290223286265696,
         | 
| 2085 | 
            +
                  "grad_norm": 0.6446245312690735,
         | 
| 2086 | 
            +
                  "learning_rate": 3.549230393354508e-05,
         | 
| 2087 | 
            +
                  "loss": 0.7843,
         | 
| 2088 | 
            +
                  "step": 29700
         | 
| 2089 | 
            +
                },
         | 
| 2090 | 
            +
                {
         | 
| 2091 | 
            +
                  "epoch": 0.2912004690477354,
         | 
| 2092 | 
            +
                  "grad_norm": 0.9739934206008911,
         | 
| 2093 | 
            +
                  "learning_rate": 3.5443440019545566e-05,
         | 
| 2094 | 
            +
                  "loss": 0.7423,
         | 
| 2095 | 
            +
                  "step": 29800
         | 
| 2096 | 
            +
                },
         | 
| 2097 | 
            +
                {
         | 
| 2098 | 
            +
                  "epoch": 0.2921776518297748,
         | 
| 2099 | 
            +
                  "grad_norm": 0.2898177206516266,
         | 
| 2100 | 
            +
                  "learning_rate": 3.5394576105546054e-05,
         | 
| 2101 | 
            +
                  "loss": 0.7322,
         | 
| 2102 | 
            +
                  "step": 29900
         | 
| 2103 | 
            +
                },
         | 
| 2104 | 
            +
                {
         | 
| 2105 | 
            +
                  "epoch": 0.29315483461181413,
         | 
| 2106 | 
            +
                  "grad_norm": 0.720974862575531,
         | 
| 2107 | 
            +
                  "learning_rate": 3.534571219154654e-05,
         | 
| 2108 | 
            +
                  "loss": 0.7593,
         | 
| 2109 | 
            +
                  "step": 30000
         | 
| 2110 | 
            +
                },
         | 
| 2111 | 
            +
                {
         | 
| 2112 | 
            +
                  "epoch": 0.29413201739385353,
         | 
| 2113 | 
            +
                  "grad_norm": 0.4672446548938751,
         | 
| 2114 | 
            +
                  "learning_rate": 3.529684827754704e-05,
         | 
| 2115 | 
            +
                  "loss": 0.7422,
         | 
| 2116 | 
            +
                  "step": 30100
         | 
| 2117 | 
            +
                },
         | 
| 2118 | 
            +
                {
         | 
| 2119 | 
            +
                  "epoch": 0.2951092001758929,
         | 
| 2120 | 
            +
                  "grad_norm": 0.7546716332435608,
         | 
| 2121 | 
            +
                  "learning_rate": 3.524798436354752e-05,
         | 
| 2122 | 
            +
                  "loss": 0.7788,
         | 
| 2123 | 
            +
                  "step": 30200
         | 
| 2124 | 
            +
                },
         | 
| 2125 | 
            +
                {
         | 
| 2126 | 
            +
                  "epoch": 0.29608638295793227,
         | 
| 2127 | 
            +
                  "grad_norm": 0.6265705823898315,
         | 
| 2128 | 
            +
                  "learning_rate": 3.519912044954801e-05,
         | 
| 2129 | 
            +
                  "loss": 0.745,
         | 
| 2130 | 
            +
                  "step": 30300
         | 
| 2131 | 
            +
                },
         | 
| 2132 | 
            +
                {
         | 
| 2133 | 
            +
                  "epoch": 0.29706356573997167,
         | 
| 2134 | 
            +
                  "grad_norm": 1.092965841293335,
         | 
| 2135 | 
            +
                  "learning_rate": 3.51502565355485e-05,
         | 
| 2136 | 
            +
                  "loss": 0.789,
         | 
| 2137 | 
            +
                  "step": 30400
         | 
| 2138 | 
            +
                },
         | 
| 2139 | 
            +
                {
         | 
| 2140 | 
            +
                  "epoch": 0.29804074852201107,
         | 
| 2141 | 
            +
                  "grad_norm": 0.7648272514343262,
         | 
| 2142 | 
            +
                  "learning_rate": 3.510139262154899e-05,
         | 
| 2143 | 
            +
                  "loss": 0.758,
         | 
| 2144 | 
            +
                  "step": 30500
         | 
| 2145 | 
            +
                },
         | 
| 2146 | 
            +
                {
         | 
| 2147 | 
            +
                  "epoch": 0.2990179313040504,
         | 
| 2148 | 
            +
                  "grad_norm": 0.785746157169342,
         | 
| 2149 | 
            +
                  "learning_rate": 3.505252870754948e-05,
         | 
| 2150 | 
            +
                  "loss": 0.7744,
         | 
| 2151 | 
            +
                  "step": 30600
         | 
| 2152 | 
            +
                },
         | 
| 2153 | 
            +
                {
         | 
| 2154 | 
            +
                  "epoch": 0.2999951140860898,
         | 
| 2155 | 
            +
                  "grad_norm": 0.8007264733314514,
         | 
| 2156 | 
            +
                  "learning_rate": 3.5003664793549965e-05,
         | 
| 2157 | 
            +
                  "loss": 0.7696,
         | 
| 2158 | 
            +
                  "step": 30700
         | 
| 2159 | 
            +
                },
         | 
| 2160 | 
            +
                {
         | 
| 2161 | 
            +
                  "epoch": 0.3009722968681292,
         | 
| 2162 | 
            +
                  "grad_norm": 1.1369248628616333,
         | 
| 2163 | 
            +
                  "learning_rate": 3.4954800879550453e-05,
         | 
| 2164 | 
            +
                  "loss": 0.7667,
         | 
| 2165 | 
            +
                  "step": 30800
         | 
| 2166 | 
            +
                },
         | 
| 2167 | 
            +
                {
         | 
| 2168 | 
            +
                  "epoch": 0.30194947965016855,
         | 
| 2169 | 
            +
                  "grad_norm": 0.6251523494720459,
         | 
| 2170 | 
            +
                  "learning_rate": 3.490593696555095e-05,
         | 
| 2171 | 
            +
                  "loss": 0.7686,
         | 
| 2172 | 
            +
                  "step": 30900
         | 
| 2173 | 
            +
                },
         | 
| 2174 | 
            +
                {
         | 
| 2175 | 
            +
                  "epoch": 0.30292666243220795,
         | 
| 2176 | 
            +
                  "grad_norm": 1.1552335023880005,
         | 
| 2177 | 
            +
                  "learning_rate": 3.485707305155143e-05,
         | 
| 2178 | 
            +
                  "loss": 0.7693,
         | 
| 2179 | 
            +
                  "step": 31000
         | 
| 2180 | 
            +
                },
         | 
| 2181 | 
            +
                {
         | 
| 2182 | 
            +
                  "epoch": 0.30390384521424735,
         | 
| 2183 | 
            +
                  "grad_norm": 0.9136368036270142,
         | 
| 2184 | 
            +
                  "learning_rate": 3.480820913755192e-05,
         | 
| 2185 | 
            +
                  "loss": 0.7898,
         | 
| 2186 | 
            +
                  "step": 31100
         | 
| 2187 | 
            +
                },
         | 
| 2188 | 
            +
                {
         | 
| 2189 | 
            +
                  "epoch": 0.3048810279962867,
         | 
| 2190 | 
            +
                  "grad_norm": 0.4203650951385498,
         | 
| 2191 | 
            +
                  "learning_rate": 3.4759345223552406e-05,
         | 
| 2192 | 
            +
                  "loss": 0.7541,
         | 
| 2193 | 
            +
                  "step": 31200
         | 
| 2194 | 
            +
                },
         | 
| 2195 | 
            +
                {
         | 
| 2196 | 
            +
                  "epoch": 0.3058582107783261,
         | 
| 2197 | 
            +
                  "grad_norm": 0.671546995639801,
         | 
| 2198 | 
            +
                  "learning_rate": 3.4710481309552894e-05,
         | 
| 2199 | 
            +
                  "loss": 0.735,
         | 
| 2200 | 
            +
                  "step": 31300
         | 
| 2201 | 
            +
                },
         | 
| 2202 | 
            +
                {
         | 
| 2203 | 
            +
                  "epoch": 0.3068353935603655,
         | 
| 2204 | 
            +
                  "grad_norm": 0.6711509227752686,
         | 
| 2205 | 
            +
                  "learning_rate": 3.466161739555339e-05,
         | 
| 2206 | 
            +
                  "loss": 0.7481,
         | 
| 2207 | 
            +
                  "step": 31400
         | 
| 2208 | 
            +
                },
         | 
| 2209 | 
            +
                {
         | 
| 2210 | 
            +
                  "epoch": 0.30781257634240483,
         | 
| 2211 | 
            +
                  "grad_norm": 0.7787076234817505,
         | 
| 2212 | 
            +
                  "learning_rate": 3.461275348155387e-05,
         | 
| 2213 | 
            +
                  "loss": 0.7701,
         | 
| 2214 | 
            +
                  "step": 31500
         | 
| 2215 | 
            +
                }
         | 
| 2216 | 
            +
              ],
         | 
| 2217 | 
            +
              "logging_steps": 100,
         | 
| 2218 | 
            +
              "max_steps": 102335,
         | 
| 2219 | 
            +
              "num_input_tokens_seen": 0,
         | 
| 2220 | 
            +
              "num_train_epochs": 1,
         | 
| 2221 | 
            +
              "save_steps": 500,
         | 
| 2222 | 
            +
              "stateful_callbacks": {
         | 
| 2223 | 
            +
                "TrainerControl": {
         | 
| 2224 | 
            +
                  "args": {
         | 
| 2225 | 
            +
                    "should_epoch_stop": false,
         | 
| 2226 | 
            +
                    "should_evaluate": false,
         | 
| 2227 | 
            +
                    "should_log": false,
         | 
| 2228 | 
            +
                    "should_save": true,
         | 
| 2229 | 
            +
                    "should_training_stop": false
         | 
| 2230 | 
            +
                  },
         | 
| 2231 | 
            +
                  "attributes": {}
         | 
| 2232 | 
            +
                }
         | 
| 2233 | 
            +
              },
         | 
| 2234 | 
            +
              "total_flos": 3.951020798587699e+17,
         | 
| 2235 | 
            +
              "train_batch_size": 12,
         | 
| 2236 | 
            +
              "trial_name": null,
         | 
| 2237 | 
            +
              "trial_params": null
         | 
| 2238 | 
            +
            }
         | 
    	
        checkpoint-31500/training_args.bin
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:278b8ae4ca6b721ea0f2d20061c63ec30cd0ca06fdf7af3d153eaddb11a3fd6b
         | 
| 3 | 
            +
            size 5176
         | 
