File size: 2,714 Bytes
7c0ded9 3199689 7c0ded9 3199689 7c0ded9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 |
{
"type": "dot",
"n_obs_steps": 3,
"normalization_mapping": {
"VISUAL": "MEAN_STD",
"STATE": "MIN_MAX",
"ENV": "MIN_MAX",
"ACTION": "MIN_MAX"
},
"input_features": {
"observation.images.top": {
"type": "VISUAL",
"shape": [
3,
480,
640
]
},
"observation.state": {
"type": "STATE",
"shape": [
14
]
}
},
"output_features": {
"action": {
"type": "ACTION",
"shape": [
14
]
}
},
"train_horizon": 150,
"inference_horizon": 100,
"lookback_obs_steps": 30,
"lookback_aug": 5,
"override_dataset_stats": false,
"new_dataset_stats": {
"action": {
"max": [
512.0,
512.0
],
"min": [
0.0,
0.0
]
},
"observation.environment_state": {
"max": [
512.0,
512.0,
512.0,
512.0,
512.0,
512.0,
512.0,
512.0,
512.0,
512.0,
512.0,
512.0,
512.0,
512.0,
512.0,
512.0
],
"min": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
]
},
"observation.state": {
"max": [
512.0,
512.0
],
"min": [
0.0,
0.0
]
}
},
"vision_backbone": "resnet18",
"pretrained_backbone_weights": "ResNet18_Weights.IMAGENET1K_V1",
"pre_norm": true,
"lora_rank": 20,
"merge_lora": false,
"dim_model": 128,
"n_heads": 8,
"dim_feedforward": 512,
"n_decoder_layers": 8,
"rescale_shape": [
480,
640
],
"crop_scale": 1.0,
"state_noise": 0.01,
"noise_decay": 0.999995,
"dropout": 0.1,
"alpha": 0.98,
"train_alpha": 0.99,
"predict_every_n": 1,
"return_every_n": 1,
"optimizer_lr": 3e-05,
"optimizer_min_lr": 1e-05,
"optimizer_lr_cycle_steps": 100000,
"optimizer_weight_decay": 1e-05
} |