Initial commit
Browse files- README.md +1 -1
- a2c-PandaReachDense-v2.zip +2 -2
- a2c-PandaReachDense-v2/data +24 -22
- a2c-PandaReachDense-v2/policy.optimizer.pth +2 -2
- a2c-PandaReachDense-v2/policy.pth +2 -2
- config.json +1 -1
- replay.mp4 +0 -0
- results.json +1 -1
- vec_normalize.pkl +1 -1
README.md
CHANGED
|
@@ -16,7 +16,7 @@ model-index:
|
|
| 16 |
type: PandaReachDense-v2
|
| 17 |
metrics:
|
| 18 |
- type: mean_reward
|
| 19 |
-
value: -
|
| 20 |
name: mean_reward
|
| 21 |
verified: false
|
| 22 |
---
|
|
|
|
| 16 |
type: PandaReachDense-v2
|
| 17 |
metrics:
|
| 18 |
- type: mean_reward
|
| 19 |
+
value: -0.96 +/- 0.52
|
| 20 |
name: mean_reward
|
| 21 |
verified: false
|
| 22 |
---
|
a2c-PandaReachDense-v2.zip
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e7c27e6947bc72ce05066c28621473d6b44ff95874260d708abc25f66dae2794
|
| 3 |
+
size 109500
|
a2c-PandaReachDense-v2/data
CHANGED
|
@@ -4,14 +4,16 @@
|
|
| 4 |
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
| 5 |
"__module__": "stable_baselines3.common.policies",
|
| 6 |
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
| 7 |
-
"__init__": "<function MultiInputActorCriticPolicy.__init__ at
|
| 8 |
"__abstractmethods__": "frozenset()",
|
| 9 |
-
"_abc_impl": "<_abc._abc_data object at
|
| 10 |
},
|
| 11 |
"verbose": 1,
|
| 12 |
"policy_kwargs": {
|
| 13 |
":type:": "<class 'dict'>",
|
| 14 |
-
":serialized:": "
|
|
|
|
|
|
|
| 15 |
"optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>",
|
| 16 |
"optimizer_kwargs": {
|
| 17 |
"alpha": 0.99,
|
|
@@ -41,54 +43,54 @@
|
|
| 41 |
"_np_random": null
|
| 42 |
},
|
| 43 |
"n_envs": 4,
|
| 44 |
-
"num_timesteps":
|
| 45 |
-
"_total_timesteps":
|
| 46 |
"_num_timesteps_at_start": 0,
|
| 47 |
"seed": null,
|
| 48 |
"action_noise": null,
|
| 49 |
-
"start_time":
|
| 50 |
-
"learning_rate": 0.
|
| 51 |
"tensorboard_log": null,
|
| 52 |
"lr_schedule": {
|
| 53 |
":type:": "<class 'function'>",
|
| 54 |
-
":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/
|
| 55 |
},
|
| 56 |
"_last_obs": {
|
| 57 |
":type:": "<class 'collections.OrderedDict'>",
|
| 58 |
-
":serialized:": "
|
| 59 |
-
"achieved_goal": "[[
|
| 60 |
-
"desired_goal": "[[
|
| 61 |
-
"observation": "[[
|
| 62 |
},
|
| 63 |
"_last_episode_starts": {
|
| 64 |
":type:": "<class 'numpy.ndarray'>",
|
| 65 |
-
":serialized:": "
|
| 66 |
},
|
| 67 |
"_last_original_obs": {
|
| 68 |
":type:": "<class 'collections.OrderedDict'>",
|
| 69 |
-
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////
|
| 70 |
"achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
|
| 71 |
-
"desired_goal": "[[
|
| 72 |
"observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
|
| 73 |
},
|
| 74 |
"_episode_num": 0,
|
| 75 |
-
"use_sde":
|
| 76 |
"sde_sample_freq": -1,
|
| 77 |
-
"_current_progress_remaining": 0.
|
| 78 |
"ep_info_buffer": {
|
| 79 |
":type:": "<class 'collections.deque'>",
|
| 80 |
-
":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////
|
| 81 |
},
|
| 82 |
"ep_success_buffer": {
|
| 83 |
":type:": "<class 'collections.deque'>",
|
| 84 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
| 85 |
},
|
| 86 |
-
"_n_updates":
|
| 87 |
-
"n_steps":
|
| 88 |
"gamma": 0.99,
|
| 89 |
-
"gae_lambda":
|
| 90 |
"ent_coef": 0.0,
|
| 91 |
-
"vf_coef": 0.
|
| 92 |
"max_grad_norm": 0.5,
|
| 93 |
"normalize_advantage": false
|
| 94 |
}
|
|
|
|
| 4 |
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
| 5 |
"__module__": "stable_baselines3.common.policies",
|
| 6 |
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
| 7 |
+
"__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7f16002fa4c0>",
|
| 8 |
"__abstractmethods__": "frozenset()",
|
| 9 |
+
"_abc_impl": "<_abc._abc_data object at 0x7f16002f3d80>"
|
| 10 |
},
|
| 11 |
"verbose": 1,
|
| 12 |
"policy_kwargs": {
|
| 13 |
":type:": "<class 'dict'>",
|
| 14 |
+
":serialized:": "gAWVowAAAAAAAAB9lCiMDGxvZ19zdGRfaW5pdJRK/v///4wKb3J0aG9faW5pdJSJjA9vcHRpbWl6ZXJfY2xhc3OUjBN0b3JjaC5vcHRpbS5ybXNwcm9wlIwHUk1TcHJvcJSTlIwQb3B0aW1pemVyX2t3YXJnc5R9lCiMBWFscGhhlEc/764UeuFHrowDZXBzlEc+5Pi1iONo8YwMd2VpZ2h0X2RlY2F5lEsAdXUu",
|
| 15 |
+
"log_std_init": -2,
|
| 16 |
+
"ortho_init": false,
|
| 17 |
"optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>",
|
| 18 |
"optimizer_kwargs": {
|
| 19 |
"alpha": 0.99,
|
|
|
|
| 43 |
"_np_random": null
|
| 44 |
},
|
| 45 |
"n_envs": 4,
|
| 46 |
+
"num_timesteps": 1000000,
|
| 47 |
+
"_total_timesteps": 1000000,
|
| 48 |
"_num_timesteps_at_start": 0,
|
| 49 |
"seed": null,
|
| 50 |
"action_noise": null,
|
| 51 |
+
"start_time": 1680431351470800598,
|
| 52 |
+
"learning_rate": 0.0005,
|
| 53 |
"tensorboard_log": null,
|
| 54 |
"lr_schedule": {
|
| 55 |
":type:": "<class 'function'>",
|
| 56 |
+
":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/QGJN0vGp/IWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="
|
| 57 |
},
|
| 58 |
"_last_obs": {
|
| 59 |
":type:": "<class 'collections.OrderedDict'>",
|
| 60 |
+
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAQXTOPqeWAjuF+RM/QXTOPqeWAjuF+RM/QXTOPqeWAjuF+RM/QXTOPqeWAjuF+RM/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAuAYnP68RZj8fVy8+1CG9v3CFxjxhTl4/y6W8PtH1hz/bY3o/ZjgKv6w7nz9xT4k/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAABBdM4+p5YCO4X5Ez9k34E9JBNQO0KYdD1BdM4+p5YCO4X5Ez9k34E9JBNQO0KYdD1BdM4+p5YCO4X5Ez9k34E9JBNQO0KYdD1BdM4+p5YCO4X5Ez9k34E9JBNQO0KYdD2UaA5LBEsGhpRoEnSUUpR1Lg==",
|
| 61 |
+
"achieved_goal": "[[0.4032307 0.00199262 0.5780261 ]\n [0.4032307 0.00199262 0.5780261 ]\n [0.4032307 0.00199262 0.5780261 ]\n [0.4032307 0.00199262 0.5780261 ]]",
|
| 62 |
+
"desired_goal": "[[ 0.65244627 0.89870733 0.17123078]\n [-1.4775949 0.02423355 0.86838347]\n [ 0.3684524 1.0621892 0.9780862 ]\n [-0.5399231 1.2440085 1.0727369 ]]",
|
| 63 |
+
"observation": "[[0.4032307 0.00199262 0.5780261 0.06341437 0.00317497 0.05971552]\n [0.4032307 0.00199262 0.5780261 0.06341437 0.00317497 0.05971552]\n [0.4032307 0.00199262 0.5780261 0.06341437 0.00317497 0.05971552]\n [0.4032307 0.00199262 0.5780261 0.06341437 0.00317497 0.05971552]]"
|
| 64 |
},
|
| 65 |
"_last_episode_starts": {
|
| 66 |
":type:": "<class 'numpy.ndarray'>",
|
| 67 |
+
":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="
|
| 68 |
},
|
| 69 |
"_last_original_obs": {
|
| 70 |
":type:": "<class 'collections.OrderedDict'>",
|
| 71 |
+
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAifg+vRWzBD53f489JoDIvT56HL3y0ao98Jyvvf9qK70KHzk+S2URviHHAD3GGY4+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==",
|
| 72 |
"achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
|
| 73 |
+
"desired_goal": "[[-0.04662374 0.1295894 0.07006734]\n [-0.09790067 -0.03820252 0.08340825]\n [-0.08574855 -0.04185009 0.18078247]\n [-0.14198796 0.0314399 0.2775404 ]]",
|
| 74 |
"observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
|
| 75 |
},
|
| 76 |
"_episode_num": 0,
|
| 77 |
+
"use_sde": true,
|
| 78 |
"sde_sample_freq": -1,
|
| 79 |
+
"_current_progress_remaining": 0.0,
|
| 80 |
"ep_info_buffer": {
|
| 81 |
":type:": "<class 'collections.deque'>",
|
| 82 |
+
":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIODKP/MFA4r+UhpRSlIwBbJRLMowBdJRHQKfaoSNfgJl1fZQoaAZoCWgPQwjiy0QRUjfhv5SGlFKUaBVLMmgWR0Cn2mOJk5IZdX2UKGgGaAloD0MIYVCm0eTi57+UhpRSlGgVSzJoFkdAp9obmlqJuXV9lChoBmgJaA9DCB0AcVevoua/lIaUUpRoFUsyaBZHQKfZ34mkWRB1fZQoaAZoCWgPQwgEyxEykGfNv5SGlFKUaBVLMmgWR0Cn26gKF7D3dX2UKGgGaAloD0MIwM+4cCAk3b+UhpRSlGgVSzJoFkdAp9tqjnFHa3V9lChoBmgJaA9DCDxodt1bkdm/lIaUUpRoFUsyaBZHQKfbIrq+rU91fZQoaAZoCWgPQwgSvvc3aC/gv5SGlFKUaBVLMmgWR0Cn2uaAvtdBdX2UKGgGaAloD0MI86/llevt4b+UhpRSlGgVSzJoFkdAp9zRp5/smnV9lChoBmgJaA9DCDZYOEnzR+a/lIaUUpRoFUsyaBZHQKfclFAmiQF1fZQoaAZoCWgPQwgBomDGFKzav5SGlFKUaBVLMmgWR0Cn3ExeTmnwdX2UKGgGaAloD0MIiesYV1wc1r+UhpRSlGgVSzJoFkdAp9wQH1OCXnV9lChoBmgJaA9DCN8yp8tiYta/lIaUUpRoFUsyaBZHQKfeBNOdoWZ1fZQoaAZoCWgPQwglk1M7w9Thv5SGlFKUaBVLMmgWR0Cn3cdYwIt2dX2UKGgGaAloD0MIeLeyRGcZ5b+UhpRSlGgVSzJoFkdAp92AVKwpv3V9lChoBmgJaA9DCMZOeAlOfeK/lIaUUpRoFUsyaBZHQKfdRFglWwN1fZQoaAZoCWgPQwi3CffKvFXbv5SGlFKUaBVLMmgWR0Cn3xChFmWddX2UKGgGaAloD0MIEAcJUb6g2b+UhpRSlGgVSzJoFkdAp97S5NGmUHV9lChoBmgJaA9DCCzzVl2Hata/lIaUUpRoFUsyaBZHQKfeixlg+hZ1fZQoaAZoCWgPQwi22sNeKGDNv5SGlFKUaBVLMmgWR0Cn3k7KzRhMdX2UKGgGaAloD0MId0mcFVET3r+UhpRSlGgVSzJoFkdAp+AgDvE0i3V9lChoBmgJaA9DCHxFt17Tg+S/lIaUUpRoFUsyaBZHQKff4okRjBl1fZQoaAZoCWgPQwgLKT+p9mnjv5SGlFKUaBVLMmgWR0Cn35qrilzmdX2UKGgGaAloD0MIt2J/2T357b+UhpRSlGgVSzJoFkdAp99emce8w3V9lChoBmgJaA9DCIaPiCmRRM2/lIaUUpRoFUsyaBZHQKfhMJ+DvmZ1fZQoaAZoCWgPQwgAqyNHOoPkv5SGlFKUaBVLMmgWR0Cn4PMwtapxdX2UKGgGaAloD0MIdxN80/RZ4L+UhpRSlGgVSzJoFkdAp+CrYXfqHHV9lChoBmgJaA9DCNFXkGYsmu2/lIaUUpRoFUsyaBZHQKfgb0CA+ZB1fZQoaAZoCWgPQwgxmpXtQ97Wv5SGlFKUaBVLMmgWR0Cn4juuzQeFdX2UKGgGaAloD0MIh8CRQIPN4b+UhpRSlGgVSzJoFkdAp+H+qFRHgHV9lChoBmgJaA9DCHl4z4HlCOK/lIaUUpRoFUsyaBZHQKfhttEXtSh1fZQoaAZoCWgPQwjyYIvdPqvUv5SGlFKUaBVLMmgWR0Cn4Xq6vq1PdX2UKGgGaAloD0MIw9hCkIMS4b+UhpRSlGgVSzJoFkdAp+NZu4wyqXV9lChoBmgJaA9DCI9SCU/o9dK/lIaUUpRoFUsyaBZHQKfjHGwRoRJ1fZQoaAZoCWgPQwjWxtgJL8Hlv5SGlFKUaBVLMmgWR0Cn4tSC4BmxdX2UKGgGaAloD0MID4C4q1eR57+UhpRSlGgVSzJoFkdAp+KYgvDgqHV9lChoBmgJaA9DCNi2KLNBJua/lIaUUpRoFUsyaBZHQKfkgRzzVc51fZQoaAZoCWgPQwiafLPNjWnmv5SGlFKUaBVLMmgWR0Cn5EOloDgZdX2UKGgGaAloD0MIBmSvd3+84b+UhpRSlGgVSzJoFkdAp+P7vy9VWHV9lChoBmgJaA9DCIuNeR1xSOm/lIaUUpRoFUsyaBZHQKfjv8YQ8Ol1fZQoaAZoCWgPQwiNJEG4Aorhv5SGlFKUaBVLMmgWR0Cn5YG8mKIjdX2UKGgGaAloD0MITWiSWFLu5L+UhpRSlGgVSzJoFkdAp+VEIzFdcHV9lChoBmgJaA9DCPOQKR+CqtC/lIaUUpRoFUsyaBZHQKfk/D2rXDp1fZQoaAZoCWgPQwgTmiSWlLvhv5SGlFKUaBVLMmgWR0Cn5L/0NBnjdX2UKGgGaAloD0MIQN1AgXfy3L+UhpRSlGgVSzJoFkdAp+aQD/2kBXV9lChoBmgJaA9DCIPAyqFFtta/lIaUUpRoFUsyaBZHQKfmUla8pTd1fZQoaAZoCWgPQwiBlUOLbOflv5SGlFKUaBVLMmgWR0Cn5gp53TuwdX2UKGgGaAloD0MIjdR7Kqc92r+UhpRSlGgVSzJoFkdAp+XOL9/BnHV9lChoBmgJaA9DCFotsMdESsu/lIaUUpRoFUsyaBZHQKfnmJTl1bJ1fZQoaAZoCWgPQwiLNzKP/MHIv5SGlFKUaBVLMmgWR0Cn51sJpnHvdX2UKGgGaAloD0MIQ6z+CMOA3b+UhpRSlGgVSzJoFkdAp+cTAxi5NHV9lChoBmgJaA9DCNU8R+S7FOG/lIaUUpRoFUsyaBZHQKfm1vYvnKZ1fZQoaAZoCWgPQwgepn1zf/Xhv5SGlFKUaBVLMmgWR0Cn6SNZ/0/XdX2UKGgGaAloD0MIEcZP497837+UhpRSlGgVSzJoFkdAp+jnTI/7i3V9lChoBmgJaA9DCC4gtB6+zOS/lIaUUpRoFUsyaBZHQKfooN3GGVR1fZQoaAZoCWgPQwgbLJyk+WPZv5SGlFKUaBVLMmgWR0Cn6GVsUIszdX2UKGgGaAloD0MIDK1OzlBc6L+UhpRSlGgVSzJoFkdAp+sBM8HObHV9lChoBmgJaA9DCEXaxp+obOm/lIaUUpRoFUsyaBZHQKfqxALy+Yd1fZQoaAZoCWgPQwhKCiyAKYPjv5SGlFKUaBVLMmgWR0Cn6n+2uxKQdX2UKGgGaAloD0MIUN8yp8ti3r+UhpRSlGgVSzJoFkdAp+pEn/kvK3V9lChoBmgJaA9DCJc48kBkkd+/lIaUUpRoFUsyaBZHQKfsoqZML4N1fZQoaAZoCWgPQwh6NUBpqFHiv5SGlFKUaBVLMmgWR0Cn7GZgogFHdX2UKGgGaAloD0MIAiocQSrF0b+UhpRSlGgVSzJoFkdAp+wfH3lCC3V9lChoBmgJaA9DCKhSswdaAeO/lIaUUpRoFUsyaBZHQKfr48SPEKp1fZQoaAZoCWgPQwhPsWoQ5vbiv5SGlFKUaBVLMmgWR0Cn7nzuF6AwdX2UKGgGaAloD0MIlddK6C6J0r+UhpRSlGgVSzJoFkdAp+5A8GLUC3V9lChoBmgJaA9DCBP0F3rEaOO/lIaUUpRoFUsyaBZHQKft+fywwCd1fZQoaAZoCWgPQwg2zNB4IojSv5SGlFKUaBVLMmgWR0Cn7b9If8uSdX2UKGgGaAloD0MI9b2G4LiM1b+UhpRSlGgVSzJoFkdAp/BOn4wh4nV9lChoBmgJaA9DCKlOB7Ke2uC/lIaUUpRoFUsyaBZHQKfwEcJ+lTF1fZQoaAZoCWgPQwhe29styQHhv5SGlFKUaBVLMmgWR0Cn78rlFMIvdX2UKGgGaAloD0MIWvJ4Wn7g47+UhpRSlGgVSzJoFkdAp++PhCMP0HV9lChoBmgJaA9DCJXyWgndJeu/lIaUUpRoFUsyaBZHQKfyAdKdxyZ1fZQoaAZoCWgPQwjYKOs3E1Pjv5SGlFKUaBVLMmgWR0Cn8cUMPSUkdX2UKGgGaAloD0MIjpWYZyUt47+UhpRSlGgVSzJoFkdAp/F93fQ8fXV9lChoBmgJaA9DCD3RdeEH5+C/lIaUUpRoFUsyaBZHQKfxQrgflp51fZQoaAZoCWgPQwjAdcWM8Hbhv5SGlFKUaBVLMmgWR0Cn8ysqJ/G3dX2UKGgGaAloD0MIW88Qjll27L+UhpRSlGgVSzJoFkdAp/LtcUuct3V9lChoBmgJaA9DCCbFxydk59i/lIaUUpRoFUsyaBZHQKfypaRISUV1fZQoaAZoCWgPQwiVRPZBlgXRv5SGlFKUaBVLMmgWR0Cn8ml0PpY+dX2UKGgGaAloD0MIcM0d/S9X5L+UhpRSlGgVSzJoFkdAp/QkBMi8nXV9lChoBmgJaA9DCKK0N/jCZOG/lIaUUpRoFUsyaBZHQKfz5mGucMF1fZQoaAZoCWgPQwi/1TpxOR7yv5SGlFKUaBVLMmgWR0Cn8546XBxhdX2UKGgGaAloD0MIWvW52op97L+UhpRSlGgVSzJoFkdAp/Nh+H8CP3V9lChoBmgJaA9DCG+3JAfsKvS/lIaUUpRoFUsyaBZHQKf1L6OYIB11fZQoaAZoCWgPQwhxrIvbaIDkv5SGlFKUaBVLMmgWR0Cn9PH2AXl9dX2UKGgGaAloD0MI81Zdh2rK7L+UhpRSlGgVSzJoFkdAp/SqEnLJS3V9lChoBmgJaA9DCKNcGr/wSum/lIaUUpRoFUsyaBZHQKf0bgLJCBx1fZQoaAZoCWgPQwgFbt3NUx3dv5SGlFKUaBVLMmgWR0Cn9l82aUiZdX2UKGgGaAloD0MIK0t0lllE9L+UhpRSlGgVSzJoFkdAp/YiMglniHV9lChoBmgJaA9DCKK3eHjPAea/lIaUUpRoFUsyaBZHQKf13C1qnFZ1fZQoaAZoCWgPQwjNVl7yP/nVv5SGlFKUaBVLMmgWR0Cn9aDRc/t6dX2UKGgGaAloD0MIXD6Skh6G4L+UhpRSlGgVSzJoFkdAp/doCjk+5nV9lChoBmgJaA9DCJiiXBq/cOq/lIaUUpRoFUsyaBZHQKf3KkZ75VR1fZQoaAZoCWgPQwhmpN5TOW3qv5SGlFKUaBVLMmgWR0Cn9uJkwvg4dX2UKGgGaAloD0MIKbSs+8dC6L+UhpRSlGgVSzJoFkdAp/amRRuTA3V9lChoBmgJaA9DCNIBSdi30/m/lIaUUpRoFUsyaBZHQKf4ZjbSJCV1fZQoaAZoCWgPQwj4cTRHVv7ov5SGlFKUaBVLMmgWR0Cn+Ch91EE1dX2UKGgGaAloD0MIVmEzwAXZ6r+UhpRSlGgVSzJoFkdAp/fhLdvbXnV9lChoBmgJaA9DCNfAVgkWh+e/lIaUUpRoFUsyaBZHQKf3pW8yvcJ1ZS4="
|
| 83 |
},
|
| 84 |
"ep_success_buffer": {
|
| 85 |
":type:": "<class 'collections.deque'>",
|
| 86 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
| 87 |
},
|
| 88 |
+
"_n_updates": 31250,
|
| 89 |
+
"n_steps": 8,
|
| 90 |
"gamma": 0.99,
|
| 91 |
+
"gae_lambda": 0.9,
|
| 92 |
"ent_coef": 0.0,
|
| 93 |
+
"vf_coef": 0.4,
|
| 94 |
"max_grad_norm": 0.5,
|
| 95 |
"normalize_advantage": false
|
| 96 |
}
|
a2c-PandaReachDense-v2/policy.optimizer.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ba91a0e8a6a0efb42b1565d1b061fa0fbe90623f33327158a78509b15a0d4b1f
|
| 3 |
+
size 45438
|
a2c-PandaReachDense-v2/policy.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:94416d7ba29a79a74b975849a36e1857f454ee69ba004527d0a30ed1e09d164c
|
| 3 |
+
size 46718
|
config.json
CHANGED
|
@@ -1 +1 @@
|
|
| 1 |
-
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7ff61ee96c10>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7ff61ee980c0>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "observation_space": {":type:": "<class 'gym.spaces.dict.Dict'>", ":serialized:": "gAWVUgMAAAAAAACMD2d5bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwOZ3ltLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUaBCTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowGX3NoYXBllEsDhZSMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZSMAUOUdJRSlIwEaGlnaJRoHSiWDAAAAAAAAAAAACBBAAAgQQAAIEGUaBVLA4WUaCB0lFKUjA1ib3VuZGVkX2JlbG93lGgdKJYDAAAAAAAAAAEBAZRoEowCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZRoIHSUUpSMDWJvdW5kZWRfYWJvdmWUaB0olgMAAAAAAAAAAQEBlGgsSwOFlGggdJRSlIwKX25wX3JhbmRvbZROdWKMDGRlc2lyZWRfZ29hbJRoDSmBlH2UKGgQaBVoGEsDhZRoGmgdKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZRoIHSUUpRoI2gdKJYMAAAAAAAAAAAAIEEAACBBAAAgQZRoFUsDhZRoIHSUUpRoKGgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoMmgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoN051YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgVaBhLBoWUaBpoHSiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBVLBoWUaCB0lFKUaCNoHSiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBVLBoWUaCB0lFKUaChoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDJoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDdOdWJ1aBhOaBBOaDdOdWIu", "spaces": "OrderedDict([('achieved_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('desired_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('observation', Box([-10. -10. -10. -10. -10. -10.], [10. 10. 10. 10. 10. 10.], (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVbQEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaApLA4WUjAFDlHSUUpSMBGhpZ2iUaBIolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgKSwOFlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWAwAAAAAAAAABAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYDAAAAAAAAAAEBAZRoIUsDhZRoFXSUUpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_np_random": null}, "n_envs": 4, "num_timesteps": 1647696, "_total_timesteps": 3000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1680423642372618719, "learning_rate": 0.0007, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/RvAGjbi6x4WUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAhvVQv6c1hj6FF4C+P9E/Px1y8T5xFQq/PNiDPz/j2r6tQgO+k2dkP+xwVb/ChT+/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAJqV0v13yVj+6rsw+Ciy3PqOroz/VBgo/ZsBIPyS9gD6Yy3Q/R5SNP9HmKL/dqFy/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAACG9VC/pzWGPoUXgL6Ksxc/y1rRv8v5Yz4/0T8/HXLxPnEVCr9Keq4+k+YSvW1OXz882IM/P+Pavq1CA77lPUq+boVmvjKaOz+TZ2Q/7HBVv8KFP7/wQH6/MbOePphnz7+UaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[-0.8162464 0.26212808 -0.25017944]\n [ 0.7492866 0.47157374 -0.53938967]\n [ 1.0300364 -0.427515 -0.12818404]\n [ 0.8922054 -0.8337543 -0.74813473]]", "desired_goal": "[[-0.95564497 0.83963567 0.39977056]\n [ 0.35775787 1.2786754 0.53916675]\n [ 0.78418577 0.25144303 0.9562316 ]\n [ 1.1060876 -0.659772 -0.86195165]]", "observation": "[[-0.8162464 0.26212808 -0.25017944 0.5925833 -1.6355833 0.22263257]\n [ 0.7492866 0.47157374 -0.53938967 0.34077674 -0.03586442 0.87229043]\n [ 1.0300364 -0.427515 -0.12818404 -0.19750173 -0.22511837 0.7328216 ]\n [ 0.8922054 -0.8337543 -0.74813473 -0.99317837 0.30996087 -1.6203489 ]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAw3cUOgbR4j3CYrM9qVn0PZRpFb5OZEY9RfYJvko27ju5ET4+eksXPhZ7BD5aOkQ+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[ 0.00056636 0.11075024 0.08759071]\n [ 0.11931164 -0.14591056 0.0484355 ]\n [-0.1347285 0.00726966 0.18561448]\n [ 0.14774886 0.12937579 0.19162884]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.45077333333333336, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMId6G5TiPdEcCUhpRSlIwBbJRLMowBdJRHQLD5a5tWMjx1fZQoaAZoCWgPQwhVNNb+zpYKwJSGlFKUaBVLMmgWR0Cw+Uk8zQ/pdX2UKGgGaAloD0MI8yA9RQ7RDMCUhpRSlGgVSzJoFkdAsPkl0Qsf73V9lChoBmgJaA9DCIo6cw8JXwbAlIaUUpRoFUsyaBZHQLD5ArbQC0Z1fZQoaAZoCWgPQwiLxAQ1fIsMwJSGlFKUaBVLMmgWR0Cw+eKKk2xZdX2UKGgGaAloD0MIKZMa2gCsCMCUhpRSlGgVSzJoFkdAsPm//ffoBHV9lChoBmgJaA9DCI9VSs/0chLAlIaUUpRoFUsyaBZHQLD5nIGhVVB1fZQoaAZoCWgPQwgVGR2QhG0TwJSGlFKUaBVLMmgWR0Cw+Xlr6+FldX2UKGgGaAloD0MIKv2Es1vrBsCUhpRSlGgVSzJoFkdAsPpZsLv1DnV9lChoBmgJaA9DCB3Lu+oBswPAlIaUUpRoFUsyaBZHQLD6NysCDEp1fZQoaAZoCWgPQwhRhqqYSv8TwJSGlFKUaBVLMmgWR0Cw+hO49X9zdX2UKGgGaAloD0MICf1MvW5xCsCUhpRSlGgVSzJoFkdAsPnwnrpqynV9lChoBmgJaA9DCLFvJxHhfwjAlIaUUpRoFUsyaBZHQLD60XDWK/F1fZQoaAZoCWgPQwjOUUfH1WgPwJSGlFKUaBVLMmgWR0Cw+q7q+rU9dX2UKGgGaAloD0MI56vkY3cBC8CUhpRSlGgVSzJoFkdAsPqLnB+F13V9lChoBmgJaA9DCGObVDTWXhDAlIaUUpRoFUsyaBZHQLD6aIn0Cih1fZQoaAZoCWgPQwjUmBBzSRUHwJSGlFKUaBVLMmgWR0Cw+00O/cnFdX2UKGgGaAloD0MIdO52vTQFEMCUhpRSlGgVSzJoFkdAsPsqhWYF7nV9lChoBmgJaA9DCFM/bypSoQnAlIaUUpRoFUsyaBZHQLD7BxCIDYB1fZQoaAZoCWgPQwhJ9gg1Q6oGwJSGlFKUaBVLMmgWR0Cw+uQEZBLPdX2UKGgGaAloD0MIGhajrrU3DsCUhpRSlGgVSzJoFkdAsPvHWlMyrXV9lChoBmgJaA9DCFXa4hqfGRPAlIaUUpRoFUsyaBZHQLD7pM3ZPEd1fZQoaAZoCWgPQwgEV3kCYWcFwJSGlFKUaBVLMmgWR0Cw+4FSn+AFdX2UKGgGaAloD0MICJRNucJ7E8CUhpRSlGgVSzJoFkdAsPteN70Fr3V9lChoBmgJaA9DCI523PC7CQ/AlIaUUpRoFUsyaBZHQLD8RRChN/R1fZQoaAZoCWgPQwghdxGmKOcTwJSGlFKUaBVLMmgWR0Cw/CKFVT73dX2UKGgGaAloD0MIGELO+/+4A8CUhpRSlGgVSzJoFkdAsPv/BSDRMXV9lChoBmgJaA9DCBCSBUzgFgXAlIaUUpRoFUsyaBZHQLD72/ZuhsZ1fZQoaAZoCWgPQwjPwMjLmhgLwJSGlFKUaBVLMmgWR0Cw/MD1schldX2UKGgGaAloD0MIyt5Szhd7BMCUhpRSlGgVSzJoFkdAsPyebVjI73V9lChoBmgJaA9DCGb4TzdQYAzAlIaUUpRoFUsyaBZHQLD8evjwQUZ1fZQoaAZoCWgPQwjvqDEh5nIKwJSGlFKUaBVLMmgWR0Cw/FfwiJO4dX2UKGgGaAloD0MIpHITtTRXBsCUhpRSlGgVSzJoFkdAsP09cC5mRXV9lChoBmgJaA9DCD8cJET54gvAlIaUUpRoFUsyaBZHQLD9GuBczIp1fZQoaAZoCWgPQwizXgzlRFsGwJSGlFKUaBVLMmgWR0Cw/PeIMz/IdX2UKGgGaAloD0MI2ERmLnAJEcCUhpRSlGgVSzJoFkdAsPzUdwNsnHV9lChoBmgJaA9DCOqT3GETyRHAlIaUUpRoFUsyaBZHQLD9uIBRyfd1fZQoaAZoCWgPQwi2ateEtKYSwJSGlFKUaBVLMmgWR0Cw/ZYAKfFrdX2UKGgGaAloD0MI3lflQuU/BMCUhpRSlGgVSzJoFkdAsP1yg00m+nV9lChoBmgJaA9DCCRfCaTELgfAlIaUUpRoFUsyaBZHQLD9T2uxKQJ1fZQoaAZoCWgPQwhkyRzLu+oPwJSGlFKUaBVLMmgWR0Cw/i/GQ0XQdX2UKGgGaAloD0MIjgQabOocF8CUhpRSlGgVSzJoFkdAsP4NPUKArnV9lChoBmgJaA9DCBmqYir9ZAXAlIaUUpRoFUsyaBZHQLD96cbiqAB1fZQoaAZoCWgPQwjjiSDOw3kQwJSGlFKUaBVLMmgWR0Cw/caz3RG+dX2UKGgGaAloD0MIf/YjRWQ4DsCUhpRSlGgVSzJoFkdAsP6tGb1AaHV9lChoBmgJaA9DCIz34/bLhxHAlIaUUpRoFUsyaBZHQLD+ipm29ct1fZQoaAZoCWgPQwgZWTLH8m4TwJSGlFKUaBVLMmgWR0Cw/mcbzbvgdX2UKGgGaAloD0MIVIuIYvLGCMCUhpRSlGgVSzJoFkdAsP5EBQvYe3V9lChoBmgJaA9DCBfYYyKlWQXAlIaUUpRoFUsyaBZHQLD/I7L+xW11fZQoaAZoCWgPQwiutmJ/2Y0TwJSGlFKUaBVLMmgWR0Cw/wEleF+NdX2UKGgGaAloD0MIrBxaZDsfDsCUhpRSlGgVSzJoFkdAsP7dqM3qA3V9lChoBmgJaA9DCBJPdjOjzxrAlIaUUpRoFUsyaBZHQLD+upRGc4J1fZQoaAZoCWgPQwhyi/m5oVkRwJSGlFKUaBVLMmgWR0Cw/51y7wrldX2UKGgGaAloD0MI+1xtxf6yDcCUhpRSlGgVSzJoFkdAsP96508vEnV9lChoBmgJaA9DCK8l5IOeDQjAlIaUUpRoFUsyaBZHQLD/V2jfvWp1fZQoaAZoCWgPQwiqKF5lbcMUwJSGlFKUaBVLMmgWR0Cw/zRhMJyAdX2UKGgGaAloD0MIqDY4Ef3aCMCUhpRSlGgVSzJoFkdAsQAZXq7iAHV9lChoBmgJaA9DCBHlC1pIgBTAlIaUUpRoFUsyaBZHQLD/9t0V8Cx1fZQoaAZoCWgPQwjye5v+7OcRwJSGlFKUaBVLMmgWR0Cw/9OafBepdX2UKGgGaAloD0MI+MQ6Vb7nDsCUhpRSlGgVSzJoFkdAsP+wj+rEL3V9lChoBmgJaA9DCMGLvoI0QwjAlIaUUpRoFUsyaBZHQLEAla6BiCt1fZQoaAZoCWgPQwjjjjf5LUoUwJSGlFKUaBVLMmgWR0CxAHMrRSgodX2UKGgGaAloD0MIK2wGuCBbCcCUhpRSlGgVSzJoFkdAsQBPsC1Z1XV9lChoBmgJaA9DCAZINIEithLAlIaUUpRoFUsyaBZHQLEALJw84gl1fZQoaAZoCWgPQwg7cw8J3/sCwJSGlFKUaBVLMmgWR0CxARBwAEMcdX2UKGgGaAloD0MIIZOMnIUdCMCUhpRSlGgVSzJoFkdAsQDt5rxiG3V9lChoBmgJaA9DCPvqqkAt9hDAlIaUUpRoFUsyaBZHQLEAynssxwh1fZQoaAZoCWgPQwgY7fFCOowQwJSGlFKUaBVLMmgWR0CxAKdpZfUndX2UKGgGaAloD0MILh9JSQ/DEMCUhpRSlGgVSzJoFkdAsQGIKw6hg3V9lChoBmgJaA9DCITXLm04TArAlIaUUpRoFUsyaBZHQLEBZZ2ZApt1fZQoaAZoCWgPQwgF24gnu1kMwJSGlFKUaBVLMmgWR0CxAUIgA6uGdX2UKGgGaAloD0MIt9RBXg8mCMCUhpRSlGgVSzJoFkdAsQEfEpAlfXV9lChoBmgJaA9DCLb0aKonUwrAlIaUUpRoFUsyaBZHQLEB/qioKlZ1fZQoaAZoCWgPQwho6nWLwNgLwJSGlFKUaBVLMmgWR0CxAdwf+0gKdX2UKGgGaAloD0MIOIQqNXvwFMCUhpRSlGgVSzJoFkdAsQG4rkKeCnV9lChoBmgJaA9DCFJgAUwZaBPAlIaUUpRoFUsyaBZHQLEBlZvkzXV1fZQoaAZoCWgPQwgqjZjZ53EOwJSGlFKUaBVLMmgWR0CxAnYIKMNudX2UKGgGaAloD0MIUMb4MHt5B8CUhpRSlGgVSzJoFkdAsQJTied073V9lChoBmgJaA9DCCmV8IRePxLAlIaUUpRoFUsyaBZHQLECMA+6iCd1fZQoaAZoCWgPQwhApUqUvQUNwJSGlFKUaBVLMmgWR0CxAgz6N2kjdX2UKGgGaAloD0MIUprN4zCIEMCUhpRSlGgVSzJoFkdAsQLtoTPBznV9lChoBmgJaA9DCGghAaPLGwfAlIaUUpRoFUsyaBZHQLECyxYJVsF1fZQoaAZoCWgPQwg334juWVcRwJSGlFKUaBVLMmgWR0CxAqeUMXrMdX2UKGgGaAloD0MIiUFg5dDiBMCUhpRSlGgVSzJoFkdAsQKEgDA8CHV9lChoBmgJaA9DCLag98YQYAnAlIaUUpRoFUsyaBZHQLEDZvqTr3V1fZQoaAZoCWgPQwgjSRCugOIGwJSGlFKUaBVLMmgWR0CxA0SC4BmxdX2UKGgGaAloD0MIsyeBzTk4BcCUhpRSlGgVSzJoFkdAsQMhPGhmG3V9lChoBmgJaA9DCFsIclDCLBLAlIaUUpRoFUsyaBZHQLEC/jqOcUd1fZQoaAZoCWgPQwjw3Hu45BgTwJSGlFKUaBVLMmgWR0CxA9+2JBPbdX2UKGgGaAloD0MIj6uRXWm5C8CUhpRSlGgVSzJoFkdAsQO9Je3QU3V9lChoBmgJaA9DCFtCPujZDAbAlIaUUpRoFUsyaBZHQLEDmamGdqd1fZQoaAZoCWgPQwiq86j4v7MSwJSGlFKUaBVLMmgWR0CxA3aN+9amdX2UKGgGaAloD0MI4Sh5dY7BBsCUhpRSlGgVSzJoFkdAsQRXkYGdJHV9lChoBmgJaA9DCHeeeM4WsBDAlIaUUpRoFUsyaBZHQLEENQjUuth1fZQoaAZoCWgPQwhMpZ9wdssIwJSGlFKUaBVLMmgWR0CxBBGNaQmvdX2UKGgGaAloD0MIKZKvBFJCDsCUhpRSlGgVSzJoFkdAsQPuclPac3V9lChoBmgJaA9DCKc/+5Ei0gzAlIaUUpRoFUsyaBZHQLEE1JtSAH51fZQoaAZoCWgPQwhLVkW4yegEwJSGlFKUaBVLMmgWR0CxBLIWk8A8dX2UKGgGaAloD0MIb0VighqeAsCUhpRSlGgVSzJoFkdAsQSOnivPknV9lChoBmgJaA9DCDSg3oyarwXAlIaUUpRoFUsyaBZHQLEEa4ptrKx1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 82384, "n_steps": 5, "gamma": 0.99, "gae_lambda": 1.0, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": false, "system_info": {"OS": "Linux-5.10.147+-x86_64-with-glibc2.31 # 1 SMP Sat Dec 10 16:00:40 UTC 2022", "Python": "3.9.16", "Stable-Baselines3": "1.7.0", "PyTorch": "1.13.1+cu116", "GPU Enabled": "True", "Numpy": "1.22.4", "Gym": "0.21.0"}}
|
|
|
|
| 1 |
+
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7f16002fa4c0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7f16002f3d80>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVowAAAAAAAAB9lCiMDGxvZ19zdGRfaW5pdJRK/v///4wKb3J0aG9faW5pdJSJjA9vcHRpbWl6ZXJfY2xhc3OUjBN0b3JjaC5vcHRpbS5ybXNwcm9wlIwHUk1TcHJvcJSTlIwQb3B0aW1pemVyX2t3YXJnc5R9lCiMBWFscGhhlEc/764UeuFHrowDZXBzlEc+5Pi1iONo8YwMd2VpZ2h0X2RlY2F5lEsAdXUu", "log_std_init": -2, "ortho_init": false, "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "observation_space": {":type:": "<class 'gym.spaces.dict.Dict'>", ":serialized:": "gAWVUgMAAAAAAACMD2d5bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwOZ3ltLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUaBCTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowGX3NoYXBllEsDhZSMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZSMAUOUdJRSlIwEaGlnaJRoHSiWDAAAAAAAAAAAACBBAAAgQQAAIEGUaBVLA4WUaCB0lFKUjA1ib3VuZGVkX2JlbG93lGgdKJYDAAAAAAAAAAEBAZRoEowCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZRoIHSUUpSMDWJvdW5kZWRfYWJvdmWUaB0olgMAAAAAAAAAAQEBlGgsSwOFlGggdJRSlIwKX25wX3JhbmRvbZROdWKMDGRlc2lyZWRfZ29hbJRoDSmBlH2UKGgQaBVoGEsDhZRoGmgdKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZRoIHSUUpRoI2gdKJYMAAAAAAAAAAAAIEEAACBBAAAgQZRoFUsDhZRoIHSUUpRoKGgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoMmgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoN051YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgVaBhLBoWUaBpoHSiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBVLBoWUaCB0lFKUaCNoHSiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBVLBoWUaCB0lFKUaChoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDJoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDdOdWJ1aBhOaBBOaDdOdWIu", "spaces": "OrderedDict([('achieved_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('desired_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('observation', Box([-10. -10. -10. -10. -10. -10.], [10. 10. 10. 10. 10. 10.], (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVbQEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaApLA4WUjAFDlHSUUpSMBGhpZ2iUaBIolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgKSwOFlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWAwAAAAAAAAABAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYDAAAAAAAAAAEBAZRoIUsDhZRoFXSUUpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_np_random": null}, "n_envs": 4, "num_timesteps": 1000000, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1680431351470800598, "learning_rate": 0.0005, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/QGJN0vGp/IWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAQXTOPqeWAjuF+RM/QXTOPqeWAjuF+RM/QXTOPqeWAjuF+RM/QXTOPqeWAjuF+RM/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAuAYnP68RZj8fVy8+1CG9v3CFxjxhTl4/y6W8PtH1hz/bY3o/ZjgKv6w7nz9xT4k/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAABBdM4+p5YCO4X5Ez9k34E9JBNQO0KYdD1BdM4+p5YCO4X5Ez9k34E9JBNQO0KYdD1BdM4+p5YCO4X5Ez9k34E9JBNQO0KYdD1BdM4+p5YCO4X5Ez9k34E9JBNQO0KYdD2UaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[0.4032307 0.00199262 0.5780261 ]\n [0.4032307 0.00199262 0.5780261 ]\n [0.4032307 0.00199262 0.5780261 ]\n [0.4032307 0.00199262 0.5780261 ]]", "desired_goal": "[[ 0.65244627 0.89870733 0.17123078]\n [-1.4775949 0.02423355 0.86838347]\n [ 0.3684524 1.0621892 0.9780862 ]\n [-0.5399231 1.2440085 1.0727369 ]]", "observation": "[[0.4032307 0.00199262 0.5780261 0.06341437 0.00317497 0.05971552]\n [0.4032307 0.00199262 0.5780261 0.06341437 0.00317497 0.05971552]\n [0.4032307 0.00199262 0.5780261 0.06341437 0.00317497 0.05971552]\n [0.4032307 0.00199262 0.5780261 0.06341437 0.00317497 0.05971552]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAifg+vRWzBD53f489JoDIvT56HL3y0ao98Jyvvf9qK70KHzk+S2URviHHAD3GGY4+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[-0.04662374 0.1295894 0.07006734]\n [-0.09790067 -0.03820252 0.08340825]\n [-0.08574855 -0.04185009 0.18078247]\n [-0.14198796 0.0314399 0.2775404 ]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": true, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIODKP/MFA4r+UhpRSlIwBbJRLMowBdJRHQKfaoSNfgJl1fZQoaAZoCWgPQwjiy0QRUjfhv5SGlFKUaBVLMmgWR0Cn2mOJk5IZdX2UKGgGaAloD0MIYVCm0eTi57+UhpRSlGgVSzJoFkdAp9obmlqJuXV9lChoBmgJaA9DCB0AcVevoua/lIaUUpRoFUsyaBZHQKfZ34mkWRB1fZQoaAZoCWgPQwgEyxEykGfNv5SGlFKUaBVLMmgWR0Cn26gKF7D3dX2UKGgGaAloD0MIwM+4cCAk3b+UhpRSlGgVSzJoFkdAp9tqjnFHa3V9lChoBmgJaA9DCDxodt1bkdm/lIaUUpRoFUsyaBZHQKfbIrq+rU91fZQoaAZoCWgPQwgSvvc3aC/gv5SGlFKUaBVLMmgWR0Cn2uaAvtdBdX2UKGgGaAloD0MI86/llevt4b+UhpRSlGgVSzJoFkdAp9zRp5/smnV9lChoBmgJaA9DCDZYOEnzR+a/lIaUUpRoFUsyaBZHQKfclFAmiQF1fZQoaAZoCWgPQwgBomDGFKzav5SGlFKUaBVLMmgWR0Cn3ExeTmnwdX2UKGgGaAloD0MIiesYV1wc1r+UhpRSlGgVSzJoFkdAp9wQH1OCXnV9lChoBmgJaA9DCN8yp8tiYta/lIaUUpRoFUsyaBZHQKfeBNOdoWZ1fZQoaAZoCWgPQwglk1M7w9Thv5SGlFKUaBVLMmgWR0Cn3cdYwIt2dX2UKGgGaAloD0MIeLeyRGcZ5b+UhpRSlGgVSzJoFkdAp92AVKwpv3V9lChoBmgJaA9DCMZOeAlOfeK/lIaUUpRoFUsyaBZHQKfdRFglWwN1fZQoaAZoCWgPQwi3CffKvFXbv5SGlFKUaBVLMmgWR0Cn3xChFmWddX2UKGgGaAloD0MIEAcJUb6g2b+UhpRSlGgVSzJoFkdAp97S5NGmUHV9lChoBmgJaA9DCCzzVl2Hata/lIaUUpRoFUsyaBZHQKfeixlg+hZ1fZQoaAZoCWgPQwi22sNeKGDNv5SGlFKUaBVLMmgWR0Cn3k7KzRhMdX2UKGgGaAloD0MId0mcFVET3r+UhpRSlGgVSzJoFkdAp+AgDvE0i3V9lChoBmgJaA9DCHxFt17Tg+S/lIaUUpRoFUsyaBZHQKff4okRjBl1fZQoaAZoCWgPQwgLKT+p9mnjv5SGlFKUaBVLMmgWR0Cn35qrilzmdX2UKGgGaAloD0MIt2J/2T357b+UhpRSlGgVSzJoFkdAp99emce8w3V9lChoBmgJaA9DCIaPiCmRRM2/lIaUUpRoFUsyaBZHQKfhMJ+DvmZ1fZQoaAZoCWgPQwgAqyNHOoPkv5SGlFKUaBVLMmgWR0Cn4PMwtapxdX2UKGgGaAloD0MIdxN80/RZ4L+UhpRSlGgVSzJoFkdAp+CrYXfqHHV9lChoBmgJaA9DCNFXkGYsmu2/lIaUUpRoFUsyaBZHQKfgb0CA+ZB1fZQoaAZoCWgPQwgxmpXtQ97Wv5SGlFKUaBVLMmgWR0Cn4juuzQeFdX2UKGgGaAloD0MIh8CRQIPN4b+UhpRSlGgVSzJoFkdAp+H+qFRHgHV9lChoBmgJaA9DCHl4z4HlCOK/lIaUUpRoFUsyaBZHQKfhttEXtSh1fZQoaAZoCWgPQwjyYIvdPqvUv5SGlFKUaBVLMmgWR0Cn4Xq6vq1PdX2UKGgGaAloD0MIw9hCkIMS4b+UhpRSlGgVSzJoFkdAp+NZu4wyqXV9lChoBmgJaA9DCI9SCU/o9dK/lIaUUpRoFUsyaBZHQKfjHGwRoRJ1fZQoaAZoCWgPQwjWxtgJL8Hlv5SGlFKUaBVLMmgWR0Cn4tSC4BmxdX2UKGgGaAloD0MID4C4q1eR57+UhpRSlGgVSzJoFkdAp+KYgvDgqHV9lChoBmgJaA9DCNi2KLNBJua/lIaUUpRoFUsyaBZHQKfkgRzzVc51fZQoaAZoCWgPQwiafLPNjWnmv5SGlFKUaBVLMmgWR0Cn5EOloDgZdX2UKGgGaAloD0MIBmSvd3+84b+UhpRSlGgVSzJoFkdAp+P7vy9VWHV9lChoBmgJaA9DCIuNeR1xSOm/lIaUUpRoFUsyaBZHQKfjv8YQ8Ol1fZQoaAZoCWgPQwiNJEG4Aorhv5SGlFKUaBVLMmgWR0Cn5YG8mKIjdX2UKGgGaAloD0MITWiSWFLu5L+UhpRSlGgVSzJoFkdAp+VEIzFdcHV9lChoBmgJaA9DCPOQKR+CqtC/lIaUUpRoFUsyaBZHQKfk/D2rXDp1fZQoaAZoCWgPQwgTmiSWlLvhv5SGlFKUaBVLMmgWR0Cn5L/0NBnjdX2UKGgGaAloD0MIQN1AgXfy3L+UhpRSlGgVSzJoFkdAp+aQD/2kBXV9lChoBmgJaA9DCIPAyqFFtta/lIaUUpRoFUsyaBZHQKfmUla8pTd1fZQoaAZoCWgPQwiBlUOLbOflv5SGlFKUaBVLMmgWR0Cn5gp53TuwdX2UKGgGaAloD0MIjdR7Kqc92r+UhpRSlGgVSzJoFkdAp+XOL9/BnHV9lChoBmgJaA9DCFotsMdESsu/lIaUUpRoFUsyaBZHQKfnmJTl1bJ1fZQoaAZoCWgPQwiLNzKP/MHIv5SGlFKUaBVLMmgWR0Cn51sJpnHvdX2UKGgGaAloD0MIQ6z+CMOA3b+UhpRSlGgVSzJoFkdAp+cTAxi5NHV9lChoBmgJaA9DCNU8R+S7FOG/lIaUUpRoFUsyaBZHQKfm1vYvnKZ1fZQoaAZoCWgPQwgepn1zf/Xhv5SGlFKUaBVLMmgWR0Cn6SNZ/0/XdX2UKGgGaAloD0MIEcZP497837+UhpRSlGgVSzJoFkdAp+jnTI/7i3V9lChoBmgJaA9DCC4gtB6+zOS/lIaUUpRoFUsyaBZHQKfooN3GGVR1fZQoaAZoCWgPQwgbLJyk+WPZv5SGlFKUaBVLMmgWR0Cn6GVsUIszdX2UKGgGaAloD0MIDK1OzlBc6L+UhpRSlGgVSzJoFkdAp+sBM8HObHV9lChoBmgJaA9DCEXaxp+obOm/lIaUUpRoFUsyaBZHQKfqxALy+Yd1fZQoaAZoCWgPQwhKCiyAKYPjv5SGlFKUaBVLMmgWR0Cn6n+2uxKQdX2UKGgGaAloD0MIUN8yp8ti3r+UhpRSlGgVSzJoFkdAp+pEn/kvK3V9lChoBmgJaA9DCJc48kBkkd+/lIaUUpRoFUsyaBZHQKfsoqZML4N1fZQoaAZoCWgPQwh6NUBpqFHiv5SGlFKUaBVLMmgWR0Cn7GZgogFHdX2UKGgGaAloD0MIAiocQSrF0b+UhpRSlGgVSzJoFkdAp+wfH3lCC3V9lChoBmgJaA9DCKhSswdaAeO/lIaUUpRoFUsyaBZHQKfr48SPEKp1fZQoaAZoCWgPQwhPsWoQ5vbiv5SGlFKUaBVLMmgWR0Cn7nzuF6AwdX2UKGgGaAloD0MIlddK6C6J0r+UhpRSlGgVSzJoFkdAp+5A8GLUC3V9lChoBmgJaA9DCBP0F3rEaOO/lIaUUpRoFUsyaBZHQKft+fywwCd1fZQoaAZoCWgPQwg2zNB4IojSv5SGlFKUaBVLMmgWR0Cn7b9If8uSdX2UKGgGaAloD0MI9b2G4LiM1b+UhpRSlGgVSzJoFkdAp/BOn4wh4nV9lChoBmgJaA9DCKlOB7Ke2uC/lIaUUpRoFUsyaBZHQKfwEcJ+lTF1fZQoaAZoCWgPQwhe29styQHhv5SGlFKUaBVLMmgWR0Cn78rlFMIvdX2UKGgGaAloD0MIWvJ4Wn7g47+UhpRSlGgVSzJoFkdAp++PhCMP0HV9lChoBmgJaA9DCJXyWgndJeu/lIaUUpRoFUsyaBZHQKfyAdKdxyZ1fZQoaAZoCWgPQwjYKOs3E1Pjv5SGlFKUaBVLMmgWR0Cn8cUMPSUkdX2UKGgGaAloD0MIjpWYZyUt47+UhpRSlGgVSzJoFkdAp/F93fQ8fXV9lChoBmgJaA9DCD3RdeEH5+C/lIaUUpRoFUsyaBZHQKfxQrgflp51fZQoaAZoCWgPQwjAdcWM8Hbhv5SGlFKUaBVLMmgWR0Cn8ysqJ/G3dX2UKGgGaAloD0MIW88Qjll27L+UhpRSlGgVSzJoFkdAp/LtcUuct3V9lChoBmgJaA9DCCbFxydk59i/lIaUUpRoFUsyaBZHQKfypaRISUV1fZQoaAZoCWgPQwiVRPZBlgXRv5SGlFKUaBVLMmgWR0Cn8ml0PpY+dX2UKGgGaAloD0MIcM0d/S9X5L+UhpRSlGgVSzJoFkdAp/QkBMi8nXV9lChoBmgJaA9DCKK0N/jCZOG/lIaUUpRoFUsyaBZHQKfz5mGucMF1fZQoaAZoCWgPQwi/1TpxOR7yv5SGlFKUaBVLMmgWR0Cn8546XBxhdX2UKGgGaAloD0MIWvW52op97L+UhpRSlGgVSzJoFkdAp/Nh+H8CP3V9lChoBmgJaA9DCG+3JAfsKvS/lIaUUpRoFUsyaBZHQKf1L6OYIB11fZQoaAZoCWgPQwhxrIvbaIDkv5SGlFKUaBVLMmgWR0Cn9PH2AXl9dX2UKGgGaAloD0MI81Zdh2rK7L+UhpRSlGgVSzJoFkdAp/SqEnLJS3V9lChoBmgJaA9DCKNcGr/wSum/lIaUUpRoFUsyaBZHQKf0bgLJCBx1fZQoaAZoCWgPQwgFbt3NUx3dv5SGlFKUaBVLMmgWR0Cn9l82aUiZdX2UKGgGaAloD0MIK0t0lllE9L+UhpRSlGgVSzJoFkdAp/YiMglniHV9lChoBmgJaA9DCKK3eHjPAea/lIaUUpRoFUsyaBZHQKf13C1qnFZ1fZQoaAZoCWgPQwjNVl7yP/nVv5SGlFKUaBVLMmgWR0Cn9aDRc/t6dX2UKGgGaAloD0MIXD6Skh6G4L+UhpRSlGgVSzJoFkdAp/doCjk+5nV9lChoBmgJaA9DCJiiXBq/cOq/lIaUUpRoFUsyaBZHQKf3KkZ75VR1fZQoaAZoCWgPQwhmpN5TOW3qv5SGlFKUaBVLMmgWR0Cn9uJkwvg4dX2UKGgGaAloD0MIKbSs+8dC6L+UhpRSlGgVSzJoFkdAp/amRRuTA3V9lChoBmgJaA9DCNIBSdi30/m/lIaUUpRoFUsyaBZHQKf4ZjbSJCV1fZQoaAZoCWgPQwj4cTRHVv7ov5SGlFKUaBVLMmgWR0Cn+Ch91EE1dX2UKGgGaAloD0MIVmEzwAXZ6r+UhpRSlGgVSzJoFkdAp/fhLdvbXnV9lChoBmgJaA9DCNfAVgkWh+e/lIaUUpRoFUsyaBZHQKf3pW8yvcJ1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 31250, "n_steps": 8, "gamma": 0.99, "gae_lambda": 0.9, "ent_coef": 0.0, "vf_coef": 0.4, "max_grad_norm": 0.5, "normalize_advantage": false, "system_info": {"OS": "Linux-5.10.147+-x86_64-with-glibc2.31 # 1 SMP Sat Dec 10 16:00:40 UTC 2022", "Python": "3.9.16", "Stable-Baselines3": "1.7.0", "PyTorch": "1.13.1+cu116", "GPU Enabled": "True", "Numpy": "1.22.4", "Gym": "0.21.0"}}
|
replay.mp4
CHANGED
|
Binary files a/replay.mp4 and b/replay.mp4 differ
|
|
|
results.json
CHANGED
|
@@ -1 +1 @@
|
|
| 1 |
-
{"mean_reward": -
|
|
|
|
| 1 |
+
{"mean_reward": -0.9586081984918564, "std_reward": 0.5167007926031151, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-04-02T11:27:33.675246"}
|
vec_normalize.pkl
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 3056
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7c6ff3a7fa786c0e4c473677cd3ba22fc5eed1a1bf29b0b53f025a3e137d1fcb
|
| 3 |
size 3056
|