File size: 3,723 Bytes
df97a6f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
{
    "type": "system2",
    "n_obs_steps": 1,
    "normalization_mapping": {
        "VISUAL": "IDENTITY",
        "STATE": "MEAN_STD",
        "ACTION": "MEAN_STD"
    },
    "input_features": {
        "observation.images.image": {
            "type": "VISUAL",
            "shape": [
                3,
                256,
                256
            ]
        },
        "observation.images.wrist_image": {
            "type": "VISUAL",
            "shape": [
                3,
                256,
                256
            ]
        },
        "observation.state": {
            "type": "STATE",
            "shape": [
                8
            ]
        }
    },
    "output_features": {
        "action": {
            "type": "ACTION",
            "shape": [
                7
            ]
        }
    },
    "device": "cpu",
    "use_amp": false,
    "num_pos": 3,
    "discount": 0.98,
    "chunk_size": 4,
    "n_action_steps": 4,
    "next_obs_offset": 1,
    "s1_his_state_size": 1,
    "max_state_dim": 32,
    "max_action_dim": 32,
    "resize_imgs_with_padding": [
        224,
        224
    ],
    "empty_cameras": 0,
    "adapt_to_pi_aloha": false,
    "use_delta_joint_actions_aloha": false,
    "tokenizer_max_length": 48,
    "proj_width": 1024,
    "num_steps": 10,
    "use_cache": true,
    "attention_implementation": "eager",
    "freeze_vision_encoder": true,
    "train_expert_only": false,
    "train_state_proj": true,
    "optimizer_lr": 5e-05,
    "optimizer_betas": [
        0.9,
        0.95
    ],
    "optimizer_eps": 1e-08,
    "optimizer_weight_decay": 1e-10,
    "scheduler_warmup_steps": 1000,
    "scheduler_decay_steps": 200000,
    "scheduler_decay_lr": 2.5e-06,
    "paligemma_config": {
        "bos_token_id": 2,
        "eos_token_id": 1,
        "hidden_size": 2048,
        "ignore_index": -100,
        "image_token_index": 257152,
        "model_type": "paligemma",
        "pad_token_id": 0,
        "projection_dim": 2048,
        "text_config": {
            "hidden_activation": "gelu_pytorch_tanh",
            "hidden_size": 2048,
            "intermediate_size": 16384,
            "model_type": "gemma",
            "num_attention_heads": 8,
            "num_hidden_layers": 18,
            "num_image_tokens": 256,
            "num_key_value_heads": 1,
            "torch_dtype": "float32",
            "vocab_size": 257152
        },
        "torch_dtype": "float32",
        "transformers_version": "4.48.1",
        "vision_config": {
            "hidden_size": 1152,
            "intermediate_size": 4304,
            "model_type": "siglip_vision_model",
            "num_attention_heads": 16,
            "num_hidden_layers": 27,
            "num_image_tokens": 256,
            "patch_size": 14,
            "projection_dim": 2048,
            "projector_hidden_act": "gelu_fast",
            "vision_use_head": false
        },
        "vocab_size": 257152
    },
    "gemma_expert_config": {
        "attention_bias": false,
        "attention_dropout": 0.0,
        "bos_token_id": 2,
        "eos_token_id": 1,
        "head_dim": 256,
        "hidden_act": "gelu_pytorch_tanh",
        "hidden_activation": "gelu_pytorch_tanh",
        "hidden_size": 1024,
        "initializer_range": 0.02,
        "intermediate_size": 4096,
        "max_position_embeddings": 8192,
        "model_type": "gemma",
        "num_attention_heads": 8,
        "num_hidden_layers": 18,
        "num_key_value_heads": 1,
        "pad_token_id": 0,
        "rms_norm_eps": 1e-06,
        "rope_theta": 10000.0,
        "torch_dtype": "float32",
        "transformers_version": "4.48.1",
        "use_cache": true,
        "vocab_size": 257152
    }
}