synthmonad commited on
Commit
1528c44
·
verified ·
1 Parent(s): 09a93cc

Upload PPO LunarLander-v2 trained agent

Browse files
README.md CHANGED
@@ -16,7 +16,7 @@ model-index:
16
  type: LunarLander-v2
17
  metrics:
18
  - type: mean_reward
19
- value: 277.74 +/- 31.21
20
  name: mean_reward
21
  verified: false
22
  ---
 
16
  type: LunarLander-v2
17
  metrics:
18
  - type: mean_reward
19
+ value: 246.45 +/- 27.90
20
  name: mean_reward
21
  verified: false
22
  ---
config.json CHANGED
@@ -1 +1 @@
1
- {"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x7c6aed3be320>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7c6aed3be3b0>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7c6aed3be440>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7c6aed3be4d0>", "_build": "<function ActorCriticPolicy._build at 0x7c6aed3be560>", "forward": "<function ActorCriticPolicy.forward at 0x7c6aed3be5f0>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x7c6aed3be680>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7c6aed3be710>", "_predict": "<function ActorCriticPolicy._predict at 0x7c6aed3be7a0>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7c6aed3be830>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7c6aed3be8c0>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x7c6aed3be950>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7c6aed3c80c0>"}, "verbose": 1, "policy_kwargs": {}, "num_timesteps": 1507328, "_total_timesteps": 1500000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1709833840974905046, "learning_rate": 0.0003, "tensorboard_log": null, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAACrekz7LFSc/7iG+vfonDL/+cJo+wBAvvgAAAAAAAAAAGoMVvbRg4z4FEvs8m5CbvkDSD71ySfk7AAAAAAAAAADNj4o9OCI3PzszEj1Koe2+R8TUPe/gA70AAAAAAAAAAFMMAz5gOf4+AfoavuVdtL4dGQ09Q5MQvgAAAAAAAAAAza5cvcKGDT42bYE9L/RLvpHlbbzK3B89AAAAAAAAAAAzCgi9sYGVPzJ0vr3KqSe/AVBFOyuh9rwAAAAAAAAAAM0KUD7UJLm85hwRO9wegbnAPym+ipNBugAAgD8AAIA/8+KJvYY0kT7rpPY9T2yNvoXno7wZ/DQ8AAAAAAAAAACa14U99x0PP6gtwb2Znqy+3xamPV0JJr4AAAAAAAAAAMZFNT5Aw44/YGVgPl7WJb8CS2I+1lYCvAAAAAAAAAAAmoP0PLwUAD0gvA+9W71NvgH7rbwJBzY9AAAAAAAAAADNK7C8eO7jPH2LWT5ELCm+g/31Pd4hp7sAAAAAAAAAAGaj+DxIBLw+UuKfvbthrr4qGK+8ddY5vAAAAAAAAAAA4LU2PvSg8D1kT0a+ldYEvvkImr2gC228AAAAAAAAAACawoY8KSQ+uitW5TLRVJ4xPoQxO5Itj7MAAIA/AACAP5oMxz2R+n4+aNXnvUy/jb4U7zS9quqMvQAAAAAAAAAAlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVgwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSxCFlIwBQ5R0lFKULg=="}, "_last_original_obs": null, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": -0.004885333333333408, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWV9QsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQHMAMJY1YQuMAWyUTTcBjAF0lEdAlsZ2xD9fkXV9lChoBkdAc1G3AVO9FmgHTQIBaAhHQJbIQoQWepZ1fZQoaAZHQHOeDb349HNoB0vhaAhHQJbIfA/LTx51fZQoaAZHQHEuCydFvydoB0vOaAhHQJbIvRgJC0F1fZQoaAZHQHC2fw/gR9RoB0vmaAhHQJbJEzzmOlx1fZQoaAZHQG8IYRujynVoB0voaAhHQJbJsAiml691fZQoaAZHQHJwXFglWwNoB0vaaAhHQJbJ5aC+UQl1fZQoaAZHQHDRpkTYdyVoB0vNaAhHQJbJ5JJ5E+h1fZQoaAZHQG//SntOVPhoB0vXaAhHQJbKRwn6VMV1fZQoaAZHQHJd7bHp8nhoB01WAWgIR0CWyuYNiH6/dX2UKGgGR0BxcpsoDxLCaAdNEgFoCEdAlsti1Z1V53V9lChoBkdAbhxQEZBLPGgHS+VoCEdAlsvyv9tMwnV9lChoBkdAcnYky1uzhWgHTQYBaAhHQJbMdRpDeCV1fZQoaAZHQHGlb6pHZsdoB0vjaAhHQJbMutdRiw11fZQoaAZHQHNPQAlv60poB00HAWgIR0CWzPasp5NXdX2UKGgGR0BwdkIt16mgaAdNFgFoCEdAls0RqO938nV9lChoBkdAcPHBClabF2gHS/poCEdAls2jHwPRRnV9lChoBkdATghHCoCMgmgHS6NoCEdAls5fM4cWCXV9lChoBkdAc0ZnSOR1YGgHS89oCEdAls52QOnVG3V9lChoBkdAcO+sqJ/G2mgHS9hoCEdAls58+aBqbnV9lChoBkdAcqGIRRMviGgHS+FoCEdAls6OoYNy53V9lChoBkdAcavKxcE/0WgHTQkBaAhHQJbQSqDK5kN1fZQoaAZHQHGiuglF+d9oB0veaAhHQJbQUxk/bCd1fZQoaAZHQHFmhInSfDloB0v5aAhHQJbQsCfYjB51fZQoaAZHQHKY7AgxJuloB0vHaAhHQJbQzu7YkE91fZQoaAZHQHDOC39aUzNoB00TAWgIR0CW0SV9nbqRdX2UKGgGR0ByBTOZ9d/saAdL62gIR0CW0UQl8gIQdX2UKGgGR0Bx1UkGA09AaAdLx2gIR0CW0UaUzKs/dX2UKGgGR0Bw/ErEtNBXaAdL0mgIR0CW0f0JF9a2dX2UKGgGR0BzF7EUCaJAaAdLzmgIR0CW0mhdt2s8dX2UKGgGR0ByLykKu0TlaAdLvmgIR0CW0o0TURWcdX2UKGgGR0BvTlDpkf9xaAdL7WgIR0CW0tx8D0UXdX2UKGgGR0ByipR4yGi6aAdNCwFoCEdAltPIvvjOs3V9lChoBkdAbPYeT3Zf2WgHS9toCEdAltQSAH3UQXV9lChoBkdAc166Gxlg+mgHS+doCEdAltR2OEM9bHV9lChoBkdAcoAeIVM232gHS/VoCEdAltS+ws5GSnV9lChoBkdAcm+NAC4jKWgHS+FoCEdAlusN2s7uD3V9lChoBkdAcFB4RVZLZmgHS9NoCEdAlusm0eEIxHV9lChoBkdAcDnuU2UB4mgHS9toCEdAlus/crRSg3V9lChoBkdAcwPXTEzfrWgHS+1oCEdAlutgOrhisnV9lChoBkdAcXFkOZssQWgHS91oCEdAluvMfV7QcHV9lChoBkdAcUh/D+BH1GgHS+NoCEdAluwP3vhIfHV9lChoBkdAclVSg5BC2WgHTVoBaAhHQJbsTJ9y9251fZQoaAZHQHAvcchkiEBoB0vcaAhHQJbtA8QqZtx1fZQoaAZHQHFKcTi83/BoB00OAWgIR0CW7R/MGHHndX2UKGgGR0BzNr4QBgeBaAdL1GgIR0CW7UQyRB/rdX2UKGgGR0BxkCpqASWaaAdLzmgIR0CW7tDAaef7dX2UKGgGR0By2U1+AmReaAdNJAFoCEdAlu8TLGJemnV9lChoBkdAcXKibUgB92gHS/NoCEdAlu8ieNDMNnV9lChoBkdAclAk078vVWgHS/xoCEdAlu+kMb3oLXV9lChoBkdAcT5/FBIFvGgHTQIBaAhHQJbwi4Ds+mp1fZQoaAZHQHQSeXNTtLNoB0vSaAhHQJbwlBlcyFh1fZQoaAZHQHA3v9LpRoBoB0vMaAhHQJbxIPSUkfN1fZQoaAZHQHI1vRJEpiJoB0vjaAhHQJbxHhhpg1F1fZQoaAZHQHBfXxBmf5FoB0vwaAhHQJbxpiKBNEh1fZQoaAZHQHK/MsMAmzBoB0v7aAhHQJbx0bn5i3J1fZQoaAZHQHE4ZdKNAC5oB0vlaAhHQJbyUGA08/51fZQoaAZHQHFrc/pt78hoB0v6aAhHQJbyoFeOXE91fZQoaAZHQHLxsf7rLQpoB0vhaAhHQJbzShg3Lmp1fZQoaAZHQHLPnqVyFPBoB00LAWgIR0CW9Evh60IDdX2UKGgGR0ByXp2HLzPKaAdLz2gIR0CW9XtSydFwdX2UKGgGR0BvsnrKNhmYaAdL8mgIR0CW9Z8c+7lJdX2UKGgGR0BzkYpVjqfOaAdL62gIR0CW9a+4smOVdX2UKGgGR0Bur6dz4k/saAdL9GgIR0CW9gVfeDWcdX2UKGgGR0ByxZyU9pyqaAdL1mgIR0CW936UJOWTdX2UKGgGR0BydcD/2kBTaAdL8GgIR0CW968baRISdX2UKGgGR0BxBpRMvh60aAdL5WgIR0CW9/4bS7XhdX2UKGgGR0BwQ6dqcmShaAdNAQFoCEdAlvg3MyJsPHV9lChoBkdARcmPkq+ajWgHS7VoCEdAlvhMdcSoO3V9lChoBkdAcDX0/4ZdfWgHS9poCEdAlviCRbKRuHV9lChoBkdAcX68Aq/dqWgHS+1oCEdAlvjf5tWMj3V9lChoBkdAcnrbor4FimgHS/1oCEdAlvoAvUSZjXV9lChoBkdAcjlwvg3tKWgHTQgBaAhHQJb7gY1pCa91fZQoaAZHQHDVdpqREF5oB0v9aAhHQJb8VshxHXp1fZQoaAZHQHI1TMqz7dloB0vVaAhHQJb8Ylme18d1fZQoaAZHQHD3je9Ba9toB0vzaAhHQJb9Jnyup0h1fZQoaAZHQHHUiW7e2uxoB0vxaAhHQJb9pda+vhZ1fZQoaAZHQHJItXo1UERoB00QAWgIR0CW/j+FDfFadX2UKGgGR0BwpwmdAgPmaAdL32gIR0CW/okjHGS7dX2UKGgGR0Bu0ATZg5R1aAdL32gIR0CW/riCrcTKdX2UKGgGR0BzAcPSUkfLaAdLy2gIR0CW/uPSUkfLdX2UKGgGR0Bx5C3lS0jUaAdL6GgIR0CW/1PTG5tndX2UKGgGR0BwI83gk1MuaAdL5mgIR0CW/3zNUwSKdX2UKGgGR0BxlViF0xM4aAdL32gIR0CW//IfKZDzdX2UKGgGR0BzhSsQumJnaAdL+WgIR0CXAB8Yht+DdX2UKGgGR0Bxvqcpb2UTaAdL3GgIR0CXAQVDKHO9dX2UKGgGR0BxkP5GjKxLaAdL9WgIR0CXA4v/zasZdX2UKGgGR0BkYosNDtw8aAdN6ANoCEdAlwQ8nuy/sXV9lChoBkdAcYtLkCFK02gHS9xoCEdAlwSAgcLjP3V9lChoBkdAcd/4mkWRBGgHS9xoCEdAlwUTUVi4KHV9lChoBkdAbaqWvbGm12gHTQYBaAhHQJcFJabF0gd1fZQoaAZHQHJfzbN8ma9oB00OAWgIR0CXBXMefZmJdX2UKGgGR0Bybb3sXzlLaAdL4GgIR0CXBk1iONo8dX2UKGgGR0ByRw3Ns3yaaAdL4mgIR0CXBpLmITGpdX2UKGgGR0ByhLDTBqKxaAdL2mgIR0CXBr21UlzEdX2UKGgGR0BwzshbGFSLaAdL82gIR0CXBsmUW2w3dX2UKGgGR0BxA2c2BJ7LaAdL/WgIR0CXBttUn5SFdX2UKGgGR0Bx3NMGorFwaAdL3WgIR0CXB3lKsdT6dX2UKGgGR0ByaBD7ZWaMaAdL2WgIR0CXB4mqHXVcdX2UKGgGR0ByPjFzdUKiaAdLz2gIR0CXCCBDohZAdWUu"}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 368, "observation_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVdgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWCAAAAAAAAAABAQEBAQEBAZRoFUsIhZRoGXSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBEoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaAtLCIWUaBl0lFKUjARoaWdolGgRKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgLSwiFlGgZdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "bounded_below": "[ True True True True True True True True]", "bounded_above": "[ True True True True True True True True]", "_shape": [8], "low": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "low_repr": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high_repr": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.discrete.Discrete'>", ":serialized:": "gAWV2wAAAAAAAACMGWd5bW5hc2l1bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIBAAAAAAAAACUhpRSlIwFc3RhcnSUaAhoDkMIAAAAAAAAAACUhpRSlIwGX3NoYXBllCmMBWR0eXBllGgOjApfbnBfcmFuZG9tlE51Yi4=", "n": "4", "start": "0", "_shape": [], "dtype": "int64", "_np_random": null}, "n_envs": 16, "n_steps": 1024, "gamma": 0.999, "gae_lambda": 0.98, "ent_coef": 0.01, "vf_coef": 0.5, "max_grad_norm": 0.5, "batch_size": 64, "n_epochs": 4, "clip_range": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "clip_range_vf": null, "normalize_advantage": true, "target_kl": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz8zqSowVTJhhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "system_info": {"OS": "Linux-6.1.58+-x86_64-with-glibc2.35 # 1 SMP PREEMPT_DYNAMIC Sat Nov 18 15:31:17 UTC 2023", "Python": "3.10.12", "Stable-Baselines3": "2.0.0a5", "PyTorch": "2.1.0+cu121", "GPU Enabled": "True", "Numpy": "1.25.2", "Cloudpickle": "2.2.1", "Gymnasium": "0.28.1", "OpenAI Gym": "0.25.2"}}
 
1
+ {"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x7c149f37e950>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7c149f37e9e0>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7c149f37ea70>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7c149f37eb00>", "_build": "<function ActorCriticPolicy._build at 0x7c149f37eb90>", "forward": "<function ActorCriticPolicy.forward at 0x7c149f37ec20>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x7c149f37ecb0>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7c149f37ed40>", "_predict": "<function ActorCriticPolicy._predict at 0x7c149f37edd0>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7c149f37ee60>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7c149f37eef0>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x7c149f37ef80>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7c149f3233c0>"}, "verbose": 1, "policy_kwargs": {}, "num_timesteps": 1015808, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1723810593727915757, "learning_rate": 0.0003, "tensorboard_log": null, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAAK3BDj5815A/ILwLPz816r562N89eoM7PgAAAAAAAAAAWrjcPQcVsj+2IKM+7FLRvkoM/D1OTzo9AAAAAAAAAADm7Gm9aG31PlXQcrr1uCu+bADUvKNfuLwAAAAAAAAAAI2nEr6L0Wg/8MoZvgbzmr6nlMS9LZZePQAAAAAAAAAAABYePe9JIz/i3+G80vehvqqJy7ocDY68AAAAAAAAAABmj2K9IuaBPzIUhDtdjsS+8x8fPI4Tsz0AAAAAAAAAAMCHJT6FIfy70Mq4vRGjiT11wAo+JwCuvAAAgD8AAIA/gF+XvTg/wLtiSZA83POzPGVcEL1dupY9AACAPwAAgD9ARzs+ZepPPgZ+Nr7TazK+zjvfPDpFjLwAAAAAAAAAAJqd8rtI65W63NbBt76CsrKk/KK6ZlbgNgAAgD8AAIA/AF9NvoLVvz9wVCi/rrx3vuE8Vr4SuoG+AAAAAAAAAACmXbC+igEGP2XGTT6RGIK+pihLvVXOxD0AAAAAAAAAANMjGD69JYI/WvIDPsObAr+qjhg+/rVZPAAAAAAAAAAAMx0sPNSR4z2jatS9Axsfvv0hG7xswkw6AAAAAAAAAAASKcW+qu+DP4qZvb2pK5O+Cp+FvvGzrz0AAAAAAAAAADOHJDyxOyI/+mGBvEl8UL7Ffwi8WguaPAAAAAAAAAAAlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVgwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSxCFlIwBQ5R0lFKULg=="}, "_last_original_obs": null, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": -0.015808000000000044, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVQgwAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQHAyodhiLEWMAWyUTX8CjAF0lEdAn21ELtu1nnV9lChoBkdAbfDejVQQ+WgHTQUDaAhHQJ9xH1/Ue+51fZQoaAZHQG0Owqy4Wk9oB000AWgIR0Cfhw9oN/e+dX2UKGgGR0Br0azE74i5aAdNLAFoCEdAn4dhgqmTDHV9lChoBkdAbG6y44Ia+GgHTVsBaAhHQJ+HhfhMrVh1fZQoaAZHQHHoS88La25oB02NAWgIR0CfiehWYF7ldX2UKGgGR0BxFdZ3cHnmaAdNQgFoCEdAn4oLwF1SwXV9lChoBkdAbIIToMa0hWgHTTkBaAhHQJ+KFmh/RVp1fZQoaAZHQGpUEqc3EQ5oB00dAmgIR0Cfionyup0fdX2UKGgGR0BtROGucMEzaAdNYwJoCEdAn4qcAWBSUHV9lChoBkdAcQIU/OdGzGgHTWUBaAhHQJ+LYD6nBLx1fZQoaAZHQHCCHNC7btZoB01eAWgIR0CfjGJ66asqdX2UKGgGR0Bs2QvexfOVaAdNdAFoCEdAn48oOpbUw3V9lChoBkdAcSve1rqMWGgHTVABaAhHQJ+PMcbR4Ql1fZQoaAZHQHFNnfhuO0doB02AAWgIR0Cfj5tvGZNPdX2UKGgGR0BxRPWVeKKpaAdNHgFoCEdAn5ButbLU1HV9lChoBkdAbM6/EfkmyGgHTUQBaAhHQJ+TZTIeYD11fZQoaAZHQHDv4x59mYloB01tAWgIR0CflQukDZDidX2UKGgGR0BrwjFjurp8aAdNeAFoCEdAn5U4tL+PzXV9lChoBkdAcllRyOq//WgHTSUBaAhHQJ+VOIfr8ix1fZQoaAZHQGRXoRqXWvtoB03oA2gIR0CflZm9xp+MdX2UKGgGR0Bw2mYx+KCQaAdNRgFoCEdAn5XlqagElnV9lChoBkdAcEokqMFUymgHTT8BaAhHQJ+WMz41xbV1fZQoaAZHQG2+d1dPci5oB00sAWgIR0CflnHymQ8wdX2UKGgGR0BxzW2TgVGkaAdNVQFoCEdAn5aAoCuEEnV9lChoBkdAb1mj2SMcZWgHTVUBaAhHQJ+WiS0Sh8J1fZQoaAZHQG5EvEjxCppoB009AWgIR0Cfl8D+zdDZdX2UKGgGR0BxQrapPykLaAdNIgFoCEdAn5lBHww0wnV9lChoBkdAcE1t4RmK7GgHTUABaAhHQJ+auDRMN+d1fZQoaAZHQHCNlARkEs9oB02FAWgIR0CfnM/kNnXedX2UKGgGR0BtY43PzFuOaAdNZAFoCEdAn5z2kSElFHV9lChoBkdAb/zr9ETg22gHTUABaAhHQJ+eh4MWoFV1fZQoaAZHQEmDgeii7CloB00SAWgIR0Cfn0NCJGe+dX2UKGgGR0BtRoSi/O+qaAdNNgFoCEdAn5+3sXzlLnV9lChoBkdAchhyZ8a4t2gHTTQBaAhHQJ+f68IzFdd1fZQoaAZHQHFlVbaAWi1oB009AWgIR0Cfn/UWl/H6dX2UKGgGR0BudEWZZ0SzaAdNLgFoCEdAn6B6sdT5wnV9lChoBkdAV2AR/ViF02gHTegDaAhHQJ+geAUcn3N1fZQoaAZHQG89jyWiUPhoB00tAWgIR0CfoIoN/e+FdX2UKGgGR0Bwc8ZWJaaDaAdNNAFoCEdAn6C2knCwbHV9lChoBkdAcOt0LMLWqmgHTRoBaAhHQJ+hPrZ8KHB1fZQoaAZHQHCDgSamXPZoB017AWgIR0CfoZNayKNydX2UKGgGR0BwBfXWe6I4aAdNTwFoCEdAn6RCFfzBh3V9lChoBkdAbrURradtmGgHTSwBaAhHQJ+kgTWXkYJ1fZQoaAZHQHDvF6u4gA9oB004AWgIR0CfpxOpKjBVdX2UKGgGR0Bw6DPSlWOqaAdNMQFoCEdAn6jNYGMXJ3V9lChoBkdAco2fsNUfgmgHTR8BaAhHQJ+prByjpLV1fZQoaAZHQHEGQa3qiXZoB01UAmgIR0CfqpTKkl/pdX2UKGgGR0Bxhp61LJ0XaAdNQQFoCEdAn6sSl3yI6HV9lChoBkdAb8GkB0ZFX2gHTU0BaAhHQJ/APhrFfiR1fZQoaAZHQG2c+tjkMkRoB00+AWgIR0CfwKcH4XXRdX2UKGgGR0BwJeq0dBBzaAdNIAFoCEdAn8DSm65G0HV9lChoBkdAb06CNCJGfGgHTTUBaAhHQJ/BJqnFYMh1fZQoaAZHQHBCsJ+lTFVoB01UAWgIR0CfwU7pmmLtdX2UKGgGR0Bwg4TZg5R1aAdNVwFoCEdAn8FaYmb9ZXV9lChoBkdAcVMJg9eQdWgHTXEBaAhHQJ/BoH4XXRR1fZQoaAZHQG5eKv3ai9JoB01mAWgIR0Cfwct4iX6ZdX2UKGgGR0BwvIgieNDMaAdNVAFoCEdAn8W7QokRjHV9lChoBkdAM9MzZYgaFWgHS9NoCEdAn8ZQksz2vnV9lChoBkdAcbQbDdgv12gHTSsBaAhHQJ/G/8ZUDMh1fZQoaAZHQHGBcQ/X5FhoB00uAWgIR0CfySqO938odX2UKGgGR0Bxw44BFNL2aAdN0AFoCEdAn8qpmh/RV3V9lChoBkdAcPgy3Td+HGgHTUkBaAhHQJ/LGlchTwV1fZQoaAZHQHLaqT4cm0FoB00xAWgIR0Cfy8Majvd/dX2UKGgGR0Bw+/C0ngHeaAdNIAFoCEdAn8weLWI42nV9lChoBkdAb10EcKgIyGgHTUsBaAhHQJ/NTdnCfpV1fZQoaAZHQG73aiKziS9oB01FAWgIR0CfzcgSeyzHdX2UKGgGR0BxRjR1HOKPaAdNogFoCEdAn83GjGkvb3V9lChoBkdAcCVb0OEuhGgHTVMBaAhHQJ/N0ZNwiq11fZQoaAZHQG33SwwCbMJoB01UAWgIR0CfzuWDHwPRdX2UKGgGR0BssAzi0fHQaAdNbAFoCEdAn88XW8RL9XV9lChoBkdAcAdacZtNz2gHTVEBaAhHQJ/Tkqd6LO11fZQoaAZHQHCBPDgqEvloB00/AWgIR0Cf1DnnuAqedX2UKGgGR0Bw3aliz9jxaAdNDwFoCEdAn9RpPIn0CnV9lChoBkdAcjAJRO1v22gHTV0BaAhHQJ/UyX+l0o11fZQoaAZHQG/lebNKRMhoB00MAmgIR0Cf1g1U2kzodX2UKGgGR0Bw2UqtozvaaAdNLAFoCEdAn9bms/6frnV9lChoBkdAVPtzU7Sy+2gHTegDaAhHQJ/W84ffXPJ1fZQoaAZHQGv39Sde6ZpoB00gAWgIR0Cf2Ket0V8DdX2UKGgGR0Bs/2FpPAO8aAdNSAFoCEdAn9jVlsguAnV9lChoBkdAcV4BJqZc9mgHTV4BaAhHQJ/ZFpnHvMN1fZQoaAZHQHDNt3bEgntoB00mAWgIR0Cf2Up4bCJodX2UKGgGR0ByQZKzzErHaAdNUAFoCEdAn9lmyC4Bm3V9lChoBkdAcJY34Kx9omgHTTQBaAhHQJ/ZuYhMajx1fZQoaAZHQHLc6VII4VBoB00gAWgIR0Cf2hwCbMHKdX2UKGgGR0BwPlo0ygwoaAdNcgFoCEdAn9uQXIlt0nV9lChoBkdARgcophF3IWgHS/FoCEdAn91Eh/y5JHV9lChoBkdAcGG5+YtxuWgHTZUBaAhHQJ/eZkwvg3t1fZQoaAZHQHJkEf9xZMdoB01UAWgIR0Cf4SNA1NxmdX2UKGgGR0BxKZWZJCjUaAdNXAFoCEdAn+NAUUO/cnV9lChoBkdAbX1yDIzWPWgHTSwBaAhHQJ/jfmT1TR91fZQoaAZHQG+4HE/B3zNoB002AWgIR0Cf5C98JD3NdX2UKGgGR0Bt5goE0SAZaAdNXAFoCEdAn+T7iADq4nV9lChoBkdAcEqMvysjmmgHTZoBaAhHQJ/l9Gx2SuB1fZQoaAZHQHE64aYNRWNoB00hAWgIR0Cf5hndO6/ZdX2UKGgGR0BsgqMzdk8SaAdNKgFoCEdAn+YyEDhcaHV9lChoBkdAcSuuivgWJ2gHTSwBaAhHQJ/m9hVlwtJ1fZQoaAZHQHFWI1He7+VoB01XAWgIR0Cf5/g9eQdTdX2UKGgGR0Bym06Kcd5qaAdNSQFoCEdAn+gqAWi1zHV9lChoBkdAbXL4JNTLn2gHTUIBaAhHQJ/ocD7qIJt1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 248, "observation_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVdgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWCAAAAAAAAAABAQEBAQEBAZRoFUsIhZRoGXSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBEoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaAtLCIWUaBl0lFKUjARoaWdolGgRKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgLSwiFlGgZdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "bounded_below": "[ True True True True True True True True]", "bounded_above": "[ True True True True True True True True]", "_shape": [8], "low": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "low_repr": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high_repr": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.discrete.Discrete'>", ":serialized:": "gAWV2wAAAAAAAACMGWd5bW5hc2l1bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIBAAAAAAAAACUhpRSlIwFc3RhcnSUaAhoDkMIAAAAAAAAAACUhpRSlIwGX3NoYXBllCmMBWR0eXBllGgOjApfbnBfcmFuZG9tlE51Yi4=", "n": "4", "start": "0", "_shape": [], "dtype": "int64", "_np_random": null}, "n_envs": 16, "n_steps": 1024, "gamma": 0.999, "gae_lambda": 0.98, "ent_coef": 0.01, "vf_coef": 0.5, "max_grad_norm": 0.5, "batch_size": 64, "n_epochs": 4, "clip_range": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "clip_range_vf": null, "normalize_advantage": true, "target_kl": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz8zqSowVTJhhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "system_info": {"OS": "Linux-6.1.85+-x86_64-with-glibc2.35 # 1 SMP PREEMPT_DYNAMIC Thu Jun 27 21:05:47 UTC 2024", "Python": "3.10.12", "Stable-Baselines3": "2.0.0a5", "PyTorch": "2.3.1+cu121", "GPU Enabled": "True", "Numpy": "1.26.4", "Cloudpickle": "2.2.1", "Gymnasium": "0.28.1", "OpenAI Gym": "0.25.2"}}
ppo-LunarLander-v2.zip CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:69aa5e0f88efe244ee2e20d1121d97fa7c5bea98ad64b28374de64aa9c3935d5
3
- size 147980
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:717d81b34de9f0d1f96c26bc112f20a9d4f087c21b9ae8f4bbcc445d296b5d76
3
+ size 148084
ppo-LunarLander-v2/data CHANGED
@@ -4,34 +4,34 @@
4
  ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
5
  "__module__": "stable_baselines3.common.policies",
6
  "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
7
- "__init__": "<function ActorCriticPolicy.__init__ at 0x7c6aed3be320>",
8
- "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7c6aed3be3b0>",
9
- "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7c6aed3be440>",
10
- "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7c6aed3be4d0>",
11
- "_build": "<function ActorCriticPolicy._build at 0x7c6aed3be560>",
12
- "forward": "<function ActorCriticPolicy.forward at 0x7c6aed3be5f0>",
13
- "extract_features": "<function ActorCriticPolicy.extract_features at 0x7c6aed3be680>",
14
- "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7c6aed3be710>",
15
- "_predict": "<function ActorCriticPolicy._predict at 0x7c6aed3be7a0>",
16
- "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7c6aed3be830>",
17
- "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7c6aed3be8c0>",
18
- "predict_values": "<function ActorCriticPolicy.predict_values at 0x7c6aed3be950>",
19
  "__abstractmethods__": "frozenset()",
20
- "_abc_impl": "<_abc._abc_data object at 0x7c6aed3c80c0>"
21
  },
22
  "verbose": 1,
23
  "policy_kwargs": {},
24
- "num_timesteps": 1507328,
25
- "_total_timesteps": 1500000,
26
  "_num_timesteps_at_start": 0,
27
  "seed": null,
28
  "action_noise": null,
29
- "start_time": 1709833840974905046,
30
  "learning_rate": 0.0003,
31
  "tensorboard_log": null,
32
  "_last_obs": {
33
  ":type:": "<class 'numpy.ndarray'>",
34
- ":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAACrekz7LFSc/7iG+vfonDL/+cJo+wBAvvgAAAAAAAAAAGoMVvbRg4z4FEvs8m5CbvkDSD71ySfk7AAAAAAAAAADNj4o9OCI3PzszEj1Koe2+R8TUPe/gA70AAAAAAAAAAFMMAz5gOf4+AfoavuVdtL4dGQ09Q5MQvgAAAAAAAAAAza5cvcKGDT42bYE9L/RLvpHlbbzK3B89AAAAAAAAAAAzCgi9sYGVPzJ0vr3KqSe/AVBFOyuh9rwAAAAAAAAAAM0KUD7UJLm85hwRO9wegbnAPym+ipNBugAAgD8AAIA/8+KJvYY0kT7rpPY9T2yNvoXno7wZ/DQ8AAAAAAAAAACa14U99x0PP6gtwb2Znqy+3xamPV0JJr4AAAAAAAAAAMZFNT5Aw44/YGVgPl7WJb8CS2I+1lYCvAAAAAAAAAAAmoP0PLwUAD0gvA+9W71NvgH7rbwJBzY9AAAAAAAAAADNK7C8eO7jPH2LWT5ELCm+g/31Pd4hp7sAAAAAAAAAAGaj+DxIBLw+UuKfvbthrr4qGK+8ddY5vAAAAAAAAAAA4LU2PvSg8D1kT0a+ldYEvvkImr2gC228AAAAAAAAAACawoY8KSQ+uitW5TLRVJ4xPoQxO5Itj7MAAIA/AACAP5oMxz2R+n4+aNXnvUy/jb4U7zS9quqMvQAAAAAAAAAAlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="
35
  },
36
  "_last_episode_starts": {
37
  ":type:": "<class 'numpy.ndarray'>",
@@ -41,17 +41,17 @@
41
  "_episode_num": 0,
42
  "use_sde": false,
43
  "sde_sample_freq": -1,
44
- "_current_progress_remaining": -0.004885333333333408,
45
  "_stats_window_size": 100,
46
  "ep_info_buffer": {
47
  ":type:": "<class 'collections.deque'>",
48
- ":serialized:": "gAWV9QsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQHMAMJY1YQuMAWyUTTcBjAF0lEdAlsZ2xD9fkXV9lChoBkdAc1G3AVO9FmgHTQIBaAhHQJbIQoQWepZ1fZQoaAZHQHOeDb349HNoB0vhaAhHQJbIfA/LTx51fZQoaAZHQHEuCydFvydoB0vOaAhHQJbIvRgJC0F1fZQoaAZHQHC2fw/gR9RoB0vmaAhHQJbJEzzmOlx1fZQoaAZHQG8IYRujynVoB0voaAhHQJbJsAiml691fZQoaAZHQHJwXFglWwNoB0vaaAhHQJbJ5aC+UQl1fZQoaAZHQHDRpkTYdyVoB0vNaAhHQJbJ5JJ5E+h1fZQoaAZHQG//SntOVPhoB0vXaAhHQJbKRwn6VMV1fZQoaAZHQHJd7bHp8nhoB01WAWgIR0CWyuYNiH6/dX2UKGgGR0BxcpsoDxLCaAdNEgFoCEdAlsti1Z1V53V9lChoBkdAbhxQEZBLPGgHS+VoCEdAlsvyv9tMwnV9lChoBkdAcnYky1uzhWgHTQYBaAhHQJbMdRpDeCV1fZQoaAZHQHGlb6pHZsdoB0vjaAhHQJbMutdRiw11fZQoaAZHQHNPQAlv60poB00HAWgIR0CWzPasp5NXdX2UKGgGR0BwdkIt16mgaAdNFgFoCEdAls0RqO938nV9lChoBkdAcPHBClabF2gHS/poCEdAls2jHwPRRnV9lChoBkdATghHCoCMgmgHS6NoCEdAls5fM4cWCXV9lChoBkdAc0ZnSOR1YGgHS89oCEdAls52QOnVG3V9lChoBkdAcO+sqJ/G2mgHS9hoCEdAls58+aBqbnV9lChoBkdAcqGIRRMviGgHS+FoCEdAls6OoYNy53V9lChoBkdAcavKxcE/0WgHTQkBaAhHQJbQSqDK5kN1fZQoaAZHQHGiuglF+d9oB0veaAhHQJbQUxk/bCd1fZQoaAZHQHFmhInSfDloB0v5aAhHQJbQsCfYjB51fZQoaAZHQHKY7AgxJuloB0vHaAhHQJbQzu7YkE91fZQoaAZHQHDOC39aUzNoB00TAWgIR0CW0SV9nbqRdX2UKGgGR0ByBTOZ9d/saAdL62gIR0CW0UQl8gIQdX2UKGgGR0Bx1UkGA09AaAdLx2gIR0CW0UaUzKs/dX2UKGgGR0Bw/ErEtNBXaAdL0mgIR0CW0f0JF9a2dX2UKGgGR0BzF7EUCaJAaAdLzmgIR0CW0mhdt2s8dX2UKGgGR0ByLykKu0TlaAdLvmgIR0CW0o0TURWcdX2UKGgGR0BvTlDpkf9xaAdL7WgIR0CW0tx8D0UXdX2UKGgGR0ByipR4yGi6aAdNCwFoCEdAltPIvvjOs3V9lChoBkdAbPYeT3Zf2WgHS9toCEdAltQSAH3UQXV9lChoBkdAc166Gxlg+mgHS+doCEdAltR2OEM9bHV9lChoBkdAcoAeIVM232gHS/VoCEdAltS+ws5GSnV9lChoBkdAcm+NAC4jKWgHS+FoCEdAlusN2s7uD3V9lChoBkdAcFB4RVZLZmgHS9NoCEdAlusm0eEIxHV9lChoBkdAcDnuU2UB4mgHS9toCEdAlus/crRSg3V9lChoBkdAcwPXTEzfrWgHS+1oCEdAlutgOrhisnV9lChoBkdAcXFkOZssQWgHS91oCEdAluvMfV7QcHV9lChoBkdAcUh/D+BH1GgHS+NoCEdAluwP3vhIfHV9lChoBkdAclVSg5BC2WgHTVoBaAhHQJbsTJ9y9251fZQoaAZHQHAvcchkiEBoB0vcaAhHQJbtA8QqZtx1fZQoaAZHQHFKcTi83/BoB00OAWgIR0CW7R/MGHHndX2UKGgGR0BzNr4QBgeBaAdL1GgIR0CW7UQyRB/rdX2UKGgGR0BxkCpqASWaaAdLzmgIR0CW7tDAaef7dX2UKGgGR0By2U1+AmReaAdNJAFoCEdAlu8TLGJemnV9lChoBkdAcXKibUgB92gHS/NoCEdAlu8ieNDMNnV9lChoBkdAclAk078vVWgHS/xoCEdAlu+kMb3oLXV9lChoBkdAcT5/FBIFvGgHTQIBaAhHQJbwi4Ds+mp1fZQoaAZHQHQSeXNTtLNoB0vSaAhHQJbwlBlcyFh1fZQoaAZHQHA3v9LpRoBoB0vMaAhHQJbxIPSUkfN1fZQoaAZHQHI1vRJEpiJoB0vjaAhHQJbxHhhpg1F1fZQoaAZHQHBfXxBmf5FoB0vwaAhHQJbxpiKBNEh1fZQoaAZHQHK/MsMAmzBoB0v7aAhHQJbx0bn5i3J1fZQoaAZHQHE4ZdKNAC5oB0vlaAhHQJbyUGA08/51fZQoaAZHQHFrc/pt78hoB0v6aAhHQJbyoFeOXE91fZQoaAZHQHLxsf7rLQpoB0vhaAhHQJbzShg3Lmp1fZQoaAZHQHLPnqVyFPBoB00LAWgIR0CW9Evh60IDdX2UKGgGR0ByXp2HLzPKaAdLz2gIR0CW9XtSydFwdX2UKGgGR0BvsnrKNhmYaAdL8mgIR0CW9Z8c+7lJdX2UKGgGR0BzkYpVjqfOaAdL62gIR0CW9a+4smOVdX2UKGgGR0Bur6dz4k/saAdL9GgIR0CW9gVfeDWcdX2UKGgGR0ByxZyU9pyqaAdL1mgIR0CW936UJOWTdX2UKGgGR0BydcD/2kBTaAdL8GgIR0CW968baRISdX2UKGgGR0BxBpRMvh60aAdL5WgIR0CW9/4bS7XhdX2UKGgGR0BwQ6dqcmShaAdNAQFoCEdAlvg3MyJsPHV9lChoBkdARcmPkq+ajWgHS7VoCEdAlvhMdcSoO3V9lChoBkdAcDX0/4ZdfWgHS9poCEdAlviCRbKRuHV9lChoBkdAcX68Aq/dqWgHS+1oCEdAlvjf5tWMj3V9lChoBkdAcnrbor4FimgHS/1oCEdAlvoAvUSZjXV9lChoBkdAcjlwvg3tKWgHTQgBaAhHQJb7gY1pCa91fZQoaAZHQHDVdpqREF5oB0v9aAhHQJb8VshxHXp1fZQoaAZHQHI1TMqz7dloB0vVaAhHQJb8Ylme18d1fZQoaAZHQHD3je9Ba9toB0vzaAhHQJb9Jnyup0h1fZQoaAZHQHHUiW7e2uxoB0vxaAhHQJb9pda+vhZ1fZQoaAZHQHJItXo1UERoB00QAWgIR0CW/j+FDfFadX2UKGgGR0BwpwmdAgPmaAdL32gIR0CW/okjHGS7dX2UKGgGR0Bu0ATZg5R1aAdL32gIR0CW/riCrcTKdX2UKGgGR0BzAcPSUkfLaAdLy2gIR0CW/uPSUkfLdX2UKGgGR0Bx5C3lS0jUaAdL6GgIR0CW/1PTG5tndX2UKGgGR0BwI83gk1MuaAdL5mgIR0CW/3zNUwSKdX2UKGgGR0BxlViF0xM4aAdL32gIR0CW//IfKZDzdX2UKGgGR0BzhSsQumJnaAdL+WgIR0CXAB8Yht+DdX2UKGgGR0Bxvqcpb2UTaAdL3GgIR0CXAQVDKHO9dX2UKGgGR0BxkP5GjKxLaAdL9WgIR0CXA4v/zasZdX2UKGgGR0BkYosNDtw8aAdN6ANoCEdAlwQ8nuy/sXV9lChoBkdAcYtLkCFK02gHS9xoCEdAlwSAgcLjP3V9lChoBkdAcd/4mkWRBGgHS9xoCEdAlwUTUVi4KHV9lChoBkdAbaqWvbGm12gHTQYBaAhHQJcFJabF0gd1fZQoaAZHQHJfzbN8ma9oB00OAWgIR0CXBXMefZmJdX2UKGgGR0Bybb3sXzlLaAdL4GgIR0CXBk1iONo8dX2UKGgGR0ByRw3Ns3yaaAdL4mgIR0CXBpLmITGpdX2UKGgGR0ByhLDTBqKxaAdL2mgIR0CXBr21UlzEdX2UKGgGR0BwzshbGFSLaAdL82gIR0CXBsmUW2w3dX2UKGgGR0BxA2c2BJ7LaAdL/WgIR0CXBttUn5SFdX2UKGgGR0Bx3NMGorFwaAdL3WgIR0CXB3lKsdT6dX2UKGgGR0ByaBD7ZWaMaAdL2WgIR0CXB4mqHXVcdX2UKGgGR0ByPjFzdUKiaAdLz2gIR0CXCCBDohZAdWUu"
49
  },
50
  "ep_success_buffer": {
51
  ":type:": "<class 'collections.deque'>",
52
  ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
53
  },
54
- "_n_updates": 368,
55
  "observation_space": {
56
  ":type:": "<class 'gymnasium.spaces.box.Box'>",
57
  ":serialized:": "gAWVdgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWCAAAAAAAAAABAQEBAQEBAZRoFUsIhZRoGXSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBEoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaAtLCIWUaBl0lFKUjARoaWdolGgRKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgLSwiFlGgZdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=",
 
4
  ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
5
  "__module__": "stable_baselines3.common.policies",
6
  "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
7
+ "__init__": "<function ActorCriticPolicy.__init__ at 0x7c149f37e950>",
8
+ "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7c149f37e9e0>",
9
+ "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7c149f37ea70>",
10
+ "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7c149f37eb00>",
11
+ "_build": "<function ActorCriticPolicy._build at 0x7c149f37eb90>",
12
+ "forward": "<function ActorCriticPolicy.forward at 0x7c149f37ec20>",
13
+ "extract_features": "<function ActorCriticPolicy.extract_features at 0x7c149f37ecb0>",
14
+ "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7c149f37ed40>",
15
+ "_predict": "<function ActorCriticPolicy._predict at 0x7c149f37edd0>",
16
+ "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7c149f37ee60>",
17
+ "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7c149f37eef0>",
18
+ "predict_values": "<function ActorCriticPolicy.predict_values at 0x7c149f37ef80>",
19
  "__abstractmethods__": "frozenset()",
20
+ "_abc_impl": "<_abc._abc_data object at 0x7c149f3233c0>"
21
  },
22
  "verbose": 1,
23
  "policy_kwargs": {},
24
+ "num_timesteps": 1015808,
25
+ "_total_timesteps": 1000000,
26
  "_num_timesteps_at_start": 0,
27
  "seed": null,
28
  "action_noise": null,
29
+ "start_time": 1723810593727915757,
30
  "learning_rate": 0.0003,
31
  "tensorboard_log": null,
32
  "_last_obs": {
33
  ":type:": "<class 'numpy.ndarray'>",
34
+ ":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAAK3BDj5815A/ILwLPz816r562N89eoM7PgAAAAAAAAAAWrjcPQcVsj+2IKM+7FLRvkoM/D1OTzo9AAAAAAAAAADm7Gm9aG31PlXQcrr1uCu+bADUvKNfuLwAAAAAAAAAAI2nEr6L0Wg/8MoZvgbzmr6nlMS9LZZePQAAAAAAAAAAABYePe9JIz/i3+G80vehvqqJy7ocDY68AAAAAAAAAABmj2K9IuaBPzIUhDtdjsS+8x8fPI4Tsz0AAAAAAAAAAMCHJT6FIfy70Mq4vRGjiT11wAo+JwCuvAAAgD8AAIA/gF+XvTg/wLtiSZA83POzPGVcEL1dupY9AACAPwAAgD9ARzs+ZepPPgZ+Nr7TazK+zjvfPDpFjLwAAAAAAAAAAJqd8rtI65W63NbBt76CsrKk/KK6ZlbgNgAAgD8AAIA/AF9NvoLVvz9wVCi/rrx3vuE8Vr4SuoG+AAAAAAAAAACmXbC+igEGP2XGTT6RGIK+pihLvVXOxD0AAAAAAAAAANMjGD69JYI/WvIDPsObAr+qjhg+/rVZPAAAAAAAAAAAMx0sPNSR4z2jatS9Axsfvv0hG7xswkw6AAAAAAAAAAASKcW+qu+DP4qZvb2pK5O+Cp+FvvGzrz0AAAAAAAAAADOHJDyxOyI/+mGBvEl8UL7Ffwi8WguaPAAAAAAAAAAAlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="
35
  },
36
  "_last_episode_starts": {
37
  ":type:": "<class 'numpy.ndarray'>",
 
41
  "_episode_num": 0,
42
  "use_sde": false,
43
  "sde_sample_freq": -1,
44
+ "_current_progress_remaining": -0.015808000000000044,
45
  "_stats_window_size": 100,
46
  "ep_info_buffer": {
47
  ":type:": "<class 'collections.deque'>",
48
+ ":serialized:": "gAWVQgwAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQHAyodhiLEWMAWyUTX8CjAF0lEdAn21ELtu1nnV9lChoBkdAbfDejVQQ+WgHTQUDaAhHQJ9xH1/Ue+51fZQoaAZHQG0Owqy4Wk9oB000AWgIR0Cfhw9oN/e+dX2UKGgGR0Br0azE74i5aAdNLAFoCEdAn4dhgqmTDHV9lChoBkdAbG6y44Ia+GgHTVsBaAhHQJ+HhfhMrVh1fZQoaAZHQHHoS88La25oB02NAWgIR0CfiehWYF7ldX2UKGgGR0BxFdZ3cHnmaAdNQgFoCEdAn4oLwF1SwXV9lChoBkdAbIIToMa0hWgHTTkBaAhHQJ+KFmh/RVp1fZQoaAZHQGpUEqc3EQ5oB00dAmgIR0Cfionyup0fdX2UKGgGR0BtROGucMEzaAdNYwJoCEdAn4qcAWBSUHV9lChoBkdAcQIU/OdGzGgHTWUBaAhHQJ+LYD6nBLx1fZQoaAZHQHCCHNC7btZoB01eAWgIR0CfjGJ66asqdX2UKGgGR0Bs2QvexfOVaAdNdAFoCEdAn48oOpbUw3V9lChoBkdAcSve1rqMWGgHTVABaAhHQJ+PMcbR4Ql1fZQoaAZHQHFNnfhuO0doB02AAWgIR0Cfj5tvGZNPdX2UKGgGR0BxRPWVeKKpaAdNHgFoCEdAn5ButbLU1HV9lChoBkdAbM6/EfkmyGgHTUQBaAhHQJ+TZTIeYD11fZQoaAZHQHDv4x59mYloB01tAWgIR0CflQukDZDidX2UKGgGR0BrwjFjurp8aAdNeAFoCEdAn5U4tL+PzXV9lChoBkdAcllRyOq//WgHTSUBaAhHQJ+VOIfr8ix1fZQoaAZHQGRXoRqXWvtoB03oA2gIR0CflZm9xp+MdX2UKGgGR0Bw2mYx+KCQaAdNRgFoCEdAn5XlqagElnV9lChoBkdAcEokqMFUymgHTT8BaAhHQJ+WMz41xbV1fZQoaAZHQG2+d1dPci5oB00sAWgIR0CflnHymQ8wdX2UKGgGR0BxzW2TgVGkaAdNVQFoCEdAn5aAoCuEEnV9lChoBkdAb1mj2SMcZWgHTVUBaAhHQJ+WiS0Sh8J1fZQoaAZHQG5EvEjxCppoB009AWgIR0Cfl8D+zdDZdX2UKGgGR0BxQrapPykLaAdNIgFoCEdAn5lBHww0wnV9lChoBkdAcE1t4RmK7GgHTUABaAhHQJ+auDRMN+d1fZQoaAZHQHCNlARkEs9oB02FAWgIR0CfnM/kNnXedX2UKGgGR0BtY43PzFuOaAdNZAFoCEdAn5z2kSElFHV9lChoBkdAb/zr9ETg22gHTUABaAhHQJ+eh4MWoFV1fZQoaAZHQEmDgeii7CloB00SAWgIR0Cfn0NCJGe+dX2UKGgGR0BtRoSi/O+qaAdNNgFoCEdAn5+3sXzlLnV9lChoBkdAchhyZ8a4t2gHTTQBaAhHQJ+f68IzFdd1fZQoaAZHQHFlVbaAWi1oB009AWgIR0Cfn/UWl/H6dX2UKGgGR0BudEWZZ0SzaAdNLgFoCEdAn6B6sdT5wnV9lChoBkdAV2AR/ViF02gHTegDaAhHQJ+geAUcn3N1fZQoaAZHQG89jyWiUPhoB00tAWgIR0CfoIoN/e+FdX2UKGgGR0Bwc8ZWJaaDaAdNNAFoCEdAn6C2knCwbHV9lChoBkdAcOt0LMLWqmgHTRoBaAhHQJ+hPrZ8KHB1fZQoaAZHQHCDgSamXPZoB017AWgIR0CfoZNayKNydX2UKGgGR0BwBfXWe6I4aAdNTwFoCEdAn6RCFfzBh3V9lChoBkdAbrURradtmGgHTSwBaAhHQJ+kgTWXkYJ1fZQoaAZHQHDvF6u4gA9oB004AWgIR0CfpxOpKjBVdX2UKGgGR0Bw6DPSlWOqaAdNMQFoCEdAn6jNYGMXJ3V9lChoBkdAco2fsNUfgmgHTR8BaAhHQJ+prByjpLV1fZQoaAZHQHEGQa3qiXZoB01UAmgIR0CfqpTKkl/pdX2UKGgGR0Bxhp61LJ0XaAdNQQFoCEdAn6sSl3yI6HV9lChoBkdAb8GkB0ZFX2gHTU0BaAhHQJ/APhrFfiR1fZQoaAZHQG2c+tjkMkRoB00+AWgIR0CfwKcH4XXRdX2UKGgGR0BwJeq0dBBzaAdNIAFoCEdAn8DSm65G0HV9lChoBkdAb06CNCJGfGgHTTUBaAhHQJ/BJqnFYMh1fZQoaAZHQHBCsJ+lTFVoB01UAWgIR0CfwU7pmmLtdX2UKGgGR0Bwg4TZg5R1aAdNVwFoCEdAn8FaYmb9ZXV9lChoBkdAcVMJg9eQdWgHTXEBaAhHQJ/BoH4XXRR1fZQoaAZHQG5eKv3ai9JoB01mAWgIR0Cfwct4iX6ZdX2UKGgGR0BwvIgieNDMaAdNVAFoCEdAn8W7QokRjHV9lChoBkdAM9MzZYgaFWgHS9NoCEdAn8ZQksz2vnV9lChoBkdAcbQbDdgv12gHTSsBaAhHQJ/G/8ZUDMh1fZQoaAZHQHGBcQ/X5FhoB00uAWgIR0CfySqO938odX2UKGgGR0Bxw44BFNL2aAdN0AFoCEdAn8qpmh/RV3V9lChoBkdAcPgy3Td+HGgHTUkBaAhHQJ/LGlchTwV1fZQoaAZHQHLaqT4cm0FoB00xAWgIR0Cfy8Majvd/dX2UKGgGR0Bw+/C0ngHeaAdNIAFoCEdAn8weLWI42nV9lChoBkdAb10EcKgIyGgHTUsBaAhHQJ/NTdnCfpV1fZQoaAZHQG73aiKziS9oB01FAWgIR0CfzcgSeyzHdX2UKGgGR0BxRjR1HOKPaAdNogFoCEdAn83GjGkvb3V9lChoBkdAcCVb0OEuhGgHTVMBaAhHQJ/N0ZNwiq11fZQoaAZHQG33SwwCbMJoB01UAWgIR0CfzuWDHwPRdX2UKGgGR0BssAzi0fHQaAdNbAFoCEdAn88XW8RL9XV9lChoBkdAcAdacZtNz2gHTVEBaAhHQJ/Tkqd6LO11fZQoaAZHQHCBPDgqEvloB00/AWgIR0Cf1DnnuAqedX2UKGgGR0Bw3aliz9jxaAdNDwFoCEdAn9RpPIn0CnV9lChoBkdAcjAJRO1v22gHTV0BaAhHQJ/UyX+l0o11fZQoaAZHQG/lebNKRMhoB00MAmgIR0Cf1g1U2kzodX2UKGgGR0Bw2UqtozvaaAdNLAFoCEdAn9bms/6frnV9lChoBkdAVPtzU7Sy+2gHTegDaAhHQJ/W84ffXPJ1fZQoaAZHQGv39Sde6ZpoB00gAWgIR0Cf2Ket0V8DdX2UKGgGR0Bs/2FpPAO8aAdNSAFoCEdAn9jVlsguAnV9lChoBkdAcV4BJqZc9mgHTV4BaAhHQJ/ZFpnHvMN1fZQoaAZHQHDNt3bEgntoB00mAWgIR0Cf2Up4bCJodX2UKGgGR0ByQZKzzErHaAdNUAFoCEdAn9lmyC4Bm3V9lChoBkdAcJY34Kx9omgHTTQBaAhHQJ/ZuYhMajx1fZQoaAZHQHLc6VII4VBoB00gAWgIR0Cf2hwCbMHKdX2UKGgGR0BwPlo0ygwoaAdNcgFoCEdAn9uQXIlt0nV9lChoBkdARgcophF3IWgHS/FoCEdAn91Eh/y5JHV9lChoBkdAcGG5+YtxuWgHTZUBaAhHQJ/eZkwvg3t1fZQoaAZHQHJkEf9xZMdoB01UAWgIR0Cf4SNA1NxmdX2UKGgGR0BxKZWZJCjUaAdNXAFoCEdAn+NAUUO/cnV9lChoBkdAbX1yDIzWPWgHTSwBaAhHQJ/jfmT1TR91fZQoaAZHQG+4HE/B3zNoB002AWgIR0Cf5C98JD3NdX2UKGgGR0Bt5goE0SAZaAdNXAFoCEdAn+T7iADq4nV9lChoBkdAcEqMvysjmmgHTZoBaAhHQJ/l9Gx2SuB1fZQoaAZHQHE64aYNRWNoB00hAWgIR0Cf5hndO6/ZdX2UKGgGR0BsgqMzdk8SaAdNKgFoCEdAn+YyEDhcaHV9lChoBkdAcSuuivgWJ2gHTSwBaAhHQJ/m9hVlwtJ1fZQoaAZHQHFWI1He7+VoB01XAWgIR0Cf5/g9eQdTdX2UKGgGR0Bym06Kcd5qaAdNSQFoCEdAn+gqAWi1zHV9lChoBkdAbXL4JNTLn2gHTUIBaAhHQJ/ocD7qIJt1ZS4="
49
  },
50
  "ep_success_buffer": {
51
  ":type:": "<class 'collections.deque'>",
52
  ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
53
  },
54
+ "_n_updates": 248,
55
  "observation_space": {
56
  ":type:": "<class 'gymnasium.spaces.box.Box'>",
57
  ":serialized:": "gAWVdgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWCAAAAAAAAAABAQEBAQEBAZRoFUsIhZRoGXSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBEoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaAtLCIWUaBl0lFKUjARoaWdolGgRKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgLSwiFlGgZdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=",
ppo-LunarLander-v2/policy.optimizer.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:08707b94c088a536ccc8e026c402c211ef7e2d4230e2ae63ca79071120156e20
3
  size 88362
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f82eab28f6669cbd6ff2e3fecaa8201a118092e63888a1c4aab5bbf62d84b62
3
  size 88362
ppo-LunarLander-v2/policy.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3d4d8070231ea74ac297f2b66935971da4760180dc730f11118ff96f6df6c7a9
3
  size 43762
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9426120346474a1068de53c78b6163dfa1f40338a0b19e51f6b3de93c780e00c
3
  size 43762
ppo-LunarLander-v2/system_info.txt CHANGED
@@ -1,9 +1,9 @@
1
- - OS: Linux-6.1.58+-x86_64-with-glibc2.35 # 1 SMP PREEMPT_DYNAMIC Sat Nov 18 15:31:17 UTC 2023
2
  - Python: 3.10.12
3
  - Stable-Baselines3: 2.0.0a5
4
- - PyTorch: 2.1.0+cu121
5
  - GPU Enabled: True
6
- - Numpy: 1.25.2
7
  - Cloudpickle: 2.2.1
8
  - Gymnasium: 0.28.1
9
  - OpenAI Gym: 0.25.2
 
1
+ - OS: Linux-6.1.85+-x86_64-with-glibc2.35 # 1 SMP PREEMPT_DYNAMIC Thu Jun 27 21:05:47 UTC 2024
2
  - Python: 3.10.12
3
  - Stable-Baselines3: 2.0.0a5
4
+ - PyTorch: 2.3.1+cu121
5
  - GPU Enabled: True
6
+ - Numpy: 1.26.4
7
  - Cloudpickle: 2.2.1
8
  - Gymnasium: 0.28.1
9
  - OpenAI Gym: 0.25.2
replay.mp4 CHANGED
Binary files a/replay.mp4 and b/replay.mp4 differ
 
results.json CHANGED
@@ -1 +1 @@
1
- {"mean_reward": 277.7383991, "std_reward": 31.21121307452956, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2024-03-07T18:20:24.786567"}
 
1
+ {"mean_reward": 246.45167298536776, "std_reward": 27.902909772081596, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2024-08-16T12:39:24.836715"}