Initial commit
Browse files- README.md +1 -1
- a2c-PandaReachDense-v2.zip +2 -2
- a2c-PandaReachDense-v2/data +18 -18
- a2c-PandaReachDense-v2/policy.optimizer.pth +1 -1
- a2c-PandaReachDense-v2/policy.pth +1 -1
- config.json +1 -1
- replay.mp4 +0 -0
- results.json +1 -1
- vec_normalize.pkl +1 -1
README.md
CHANGED
|
@@ -16,7 +16,7 @@ model-index:
|
|
| 16 |
type: PandaReachDense-v2
|
| 17 |
metrics:
|
| 18 |
- type: mean_reward
|
| 19 |
-
value: -
|
| 20 |
name: mean_reward
|
| 21 |
verified: false
|
| 22 |
---
|
|
|
|
| 16 |
type: PandaReachDense-v2
|
| 17 |
metrics:
|
| 18 |
- type: mean_reward
|
| 19 |
+
value: -0.33 +/- 0.15
|
| 20 |
name: mean_reward
|
| 21 |
verified: false
|
| 22 |
---
|
a2c-PandaReachDense-v2.zip
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:156761b2fc95c5faeb94a888bd8c9cf375f95fc2f21a0d251e9fd227660d0d59
|
| 3 |
+
size 108238
|
a2c-PandaReachDense-v2/data
CHANGED
|
@@ -4,9 +4,9 @@
|
|
| 4 |
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
| 5 |
"__module__": "stable_baselines3.common.policies",
|
| 6 |
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
| 7 |
-
"__init__": "<function MultiInputActorCriticPolicy.__init__ at
|
| 8 |
"__abstractmethods__": "frozenset()",
|
| 9 |
-
"_abc_impl": "<_abc._abc_data object at
|
| 10 |
},
|
| 11 |
"verbose": 1,
|
| 12 |
"policy_kwargs": {
|
|
@@ -40,13 +40,13 @@
|
|
| 40 |
"bounded_above": "[ True True True]",
|
| 41 |
"_np_random": null
|
| 42 |
},
|
| 43 |
-
"n_envs":
|
| 44 |
"num_timesteps": 2000000,
|
| 45 |
"_total_timesteps": 2000000,
|
| 46 |
"_num_timesteps_at_start": 0,
|
| 47 |
"seed": null,
|
| 48 |
"action_noise": null,
|
| 49 |
-
"start_time":
|
| 50 |
"learning_rate": 0.0001,
|
| 51 |
"tensorboard_log": null,
|
| 52 |
"lr_schedule": {
|
|
@@ -55,21 +55,21 @@
|
|
| 55 |
},
|
| 56 |
"_last_obs": {
|
| 57 |
":type:": "<class 'collections.OrderedDict'>",
|
| 58 |
-
":serialized:": "
|
| 59 |
-
"achieved_goal": "[[0.
|
| 60 |
-
"desired_goal": "[[
|
| 61 |
-
"observation": "[[
|
| 62 |
},
|
| 63 |
"_last_episode_starts": {
|
| 64 |
":type:": "<class 'numpy.ndarray'>",
|
| 65 |
-
":serialized:": "
|
| 66 |
},
|
| 67 |
"_last_original_obs": {
|
| 68 |
":type:": "<class 'collections.OrderedDict'>",
|
| 69 |
-
":serialized:": "
|
| 70 |
-
"achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]
|
| 71 |
-
"desired_goal": "[[
|
| 72 |
-
"observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]
|
| 73 |
},
|
| 74 |
"_episode_num": 0,
|
| 75 |
"use_sde": false,
|
|
@@ -77,17 +77,17 @@
|
|
| 77 |
"_current_progress_remaining": 0.0,
|
| 78 |
"ep_info_buffer": {
|
| 79 |
":type:": "<class 'collections.deque'>",
|
| 80 |
-
":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////
|
| 81 |
},
|
| 82 |
"ep_success_buffer": {
|
| 83 |
":type:": "<class 'collections.deque'>",
|
| 84 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
| 85 |
},
|
| 86 |
-
"_n_updates":
|
| 87 |
-
"n_steps":
|
| 88 |
-
"gamma": 0.
|
| 89 |
"gae_lambda": 0.98,
|
| 90 |
-
"ent_coef": 0
|
| 91 |
"vf_coef": 0.5,
|
| 92 |
"max_grad_norm": 0.5,
|
| 93 |
"normalize_advantage": false
|
|
|
|
| 4 |
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
| 5 |
"__module__": "stable_baselines3.common.policies",
|
| 6 |
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
| 7 |
+
"__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7fce7b570ee0>",
|
| 8 |
"__abstractmethods__": "frozenset()",
|
| 9 |
+
"_abc_impl": "<_abc._abc_data object at 0x7fce7b56fe40>"
|
| 10 |
},
|
| 11 |
"verbose": 1,
|
| 12 |
"policy_kwargs": {
|
|
|
|
| 40 |
"bounded_above": "[ True True True]",
|
| 41 |
"_np_random": null
|
| 42 |
},
|
| 43 |
+
"n_envs": 4,
|
| 44 |
"num_timesteps": 2000000,
|
| 45 |
"_total_timesteps": 2000000,
|
| 46 |
"_num_timesteps_at_start": 0,
|
| 47 |
"seed": null,
|
| 48 |
"action_noise": null,
|
| 49 |
+
"start_time": 1680431221493549003,
|
| 50 |
"learning_rate": 0.0001,
|
| 51 |
"tensorboard_log": null,
|
| 52 |
"lr_schedule": {
|
|
|
|
| 55 |
},
|
| 56 |
"_last_obs": {
|
| 57 |
":type:": "<class 'collections.OrderedDict'>",
|
| 58 |
+
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAivHbPiHsZDuahgw/ivHbPiHsZDuahgw/ivHbPiHsZDuahgw/ivHbPiHsZDuahgw/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAA6YauP2Alwz9Xu5C+hBbHP5IKsD/7upq6YEFwPkIBJz/WuY2/UjBMPk/FgL+jPdq+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAACK8ds+IexkO5qGDD9Ie/O5us9lutFJEDyK8ds+IexkO5qGDD9Ie/O5us9lutFJEDyK8ds+IexkO5qGDD9Ie/O5us9lutFJEDyK8ds+IexkO5qGDD9Ie/O5us9lutFJEDyUaA5LBEsGhpRoEnSUUpR1Lg==",
|
| 59 |
+
"achieved_goal": "[[0.42957717 0.00349308 0.54892886]\n [0.42957717 0.00349308 0.54892886]\n [0.42957717 0.00349308 0.54892886]\n [0.42957717 0.00349308 0.54892886]]",
|
| 60 |
+
"desired_goal": "[[ 1.3634921e+00 1.5245781e+00 -2.8267929e-01]\n [ 1.5553746e+00 1.3753226e+00 -1.1804992e-03]\n [ 2.3462439e-01 6.5236294e-01 -1.1072338e+00]\n [ 1.9940308e-01 -1.0060214e+00 -4.2625150e-01]]",
|
| 61 |
+
"observation": "[[ 4.2957717e-01 3.4930783e-03 5.4892886e-01 -4.6440423e-04\n -8.7666104e-04 8.8066617e-03]\n [ 4.2957717e-01 3.4930783e-03 5.4892886e-01 -4.6440423e-04\n -8.7666104e-04 8.8066617e-03]\n [ 4.2957717e-01 3.4930783e-03 5.4892886e-01 -4.6440423e-04\n -8.7666104e-04 8.8066617e-03]\n [ 4.2957717e-01 3.4930783e-03 5.4892886e-01 -4.6440423e-04\n -8.7666104e-04 8.8066617e-03]]"
|
| 62 |
},
|
| 63 |
"_last_episode_starts": {
|
| 64 |
":type:": "<class 'numpy.ndarray'>",
|
| 65 |
+
":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="
|
| 66 |
},
|
| 67 |
"_last_original_obs": {
|
| 68 |
":type:": "<class 'collections.OrderedDict'>",
|
| 69 |
+
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAMIvmvR3L371DmYs+W9DUvTIHeT37TrI9F+9Au8CXqL3Lj0I+wlCiPS+bAzvkGFA+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==",
|
| 70 |
+
"achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
|
| 71 |
+
"desired_goal": "[[-0.11257017 -0.10927413 0.27265367]\n [-0.10391303 0.06079788 0.08706471]\n [-0.00294394 -0.08232069 0.19000165]\n [ 0.0792556 0.00200815 0.20321995]]",
|
| 72 |
+
"observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
|
| 73 |
},
|
| 74 |
"_episode_num": 0,
|
| 75 |
"use_sde": false,
|
|
|
|
| 77 |
"_current_progress_remaining": 0.0,
|
| 78 |
"ep_info_buffer": {
|
| 79 |
":type:": "<class 'collections.deque'>",
|
| 80 |
+
":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIj/tW68Tl17+UhpRSlIwBbJRLMowBdJRHQK1hvwOvt+l1fZQoaAZoCWgPQwjxK9ZwkXvdv5SGlFKUaBVLMmgWR0CtYaOtfXwtdX2UKGgGaAloD0MIzywJUFPLxL+UhpRSlGgVSzJoFkdArWGHNiYsunV9lChoBmgJaA9DCADhQ4mWPOK/lIaUUpRoFUsyaBZHQK1haumJm/Z1fZQoaAZoCWgPQwh2Ul+WdmrIv5SGlFKUaBVLMmgWR0CtYnmNipeedX2UKGgGaAloD0MIw5rKorCL2r+UhpRSlGgVSzJoFkdArWJeOOsDGXV9lChoBmgJaA9DCLXeb7Tjhs+/lIaUUpRoFUsyaBZHQK1iQcwQDmt1fZQoaAZoCWgPQwhwXwfOGVHWv5SGlFKUaBVLMmgWR0CtYiUdilSCdX2UKGgGaAloD0MITwXc8/xp1L+UhpRSlGgVSzJoFkdArWM0580DU3V9lChoBmgJaA9DCNc07zhFR86/lIaUUpRoFUsyaBZHQK1jGajvd/J1fZQoaAZoCWgPQwj8q8d9q3XSv5SGlFKUaBVLMmgWR0CtYv00m+j/dX2UKGgGaAloD0MIbsDnhxHC1r+UhpRSlGgVSzJoFkdArWLgkeIVM3V9lChoBmgJaA9DCNpWs874vtO/lIaUUpRoFUsyaBZHQK1j8gqVhTh1fZQoaAZoCWgPQwiDp5Ar9SzZv5SGlFKUaBVLMmgWR0CtY9bDEWIodX2UKGgGaAloD0MIjpPCvMeZ0r+UhpRSlGgVSzJoFkdArWO6OvMbFXV9lChoBmgJaA9DCHHIBtLFpt+/lIaUUpRoFUsyaBZHQK1jnZV4oql1fZQoaAZoCWgPQwiCOuXRjbDWv5SGlFKUaBVLMmgWR0CtZK9t/FzddX2UKGgGaAloD0MI6lp7n6pCyb+UhpRSlGgVSzJoFkdArWSUI/qxDHV9lChoBmgJaA9DCNldoKTAAs6/lIaUUpRoFUsyaBZHQK1kd6Rhc7h1fZQoaAZoCWgPQwi366UpApzAv5SGlFKUaBVLMmgWR0CtZFr9ETg3dX2UKGgGaAloD0MIceZXc4Bg2L+UhpRSlGgVSzJoFkdArWVpxLkCFXV9lChoBmgJaA9DCOusFthjIsG/lIaUUpRoFUsyaBZHQK1lTnnuAqd1fZQoaAZoCWgPQwj68CxBRkDLv5SGlFKUaBVLMmgWR0CtZTIClrM1dX2UKGgGaAloD0MIf9sTJLa70L+UhpRSlGgVSzJoFkdArWUVVYISlHV9lChoBmgJaA9DCIvAWN/A5L6/lIaUUpRoFUsyaBZHQK1mNNzr/sF1fZQoaAZoCWgPQwhuxJPdzGjiv5SGlFKUaBVLMmgWR0CtZhoH1OCYdX2UKGgGaAloD0MI3/3xXrUy2b+UhpRSlGgVSzJoFkdArWX9k6Lfk3V9lChoBmgJaA9DCOIC0Chd+su/lIaUUpRoFUsyaBZHQK1l4PDHfdh1fZQoaAZoCWgPQwiJXkax3NLMv5SGlFKUaBVLMmgWR0CtZve4kNWmdX2UKGgGaAloD0MI3Xh3ZKw227+UhpRSlGgVSzJoFkdArWbcXLvCuXV9lChoBmgJaA9DCIf7yK1Jt9i/lIaUUpRoFUsyaBZHQK1mv+irT6V1fZQoaAZoCWgPQwimSL4SSIncv5SGlFKUaBVLMmgWR0CtZqNDMNc4dX2UKGgGaAloD0MIat5xio7kwL+UhpRSlGgVSzJoFkdArWe3P1L8JnV9lChoBmgJaA9DCC0GD9O+udO/lIaUUpRoFUsyaBZHQK1nm/oJRfp1fZQoaAZoCWgPQwgjaTf6mA/av5SGlFKUaBVLMmgWR0CtZ3+F10T2dX2UKGgGaAloD0MI4biMmxpo0r+UhpRSlGgVSzJoFkdArWdi5byH23V9lChoBmgJaA9DCMTuO4bHfta/lIaUUpRoFUsyaBZHQK1oj90ihWZ1fZQoaAZoCWgPQwjQRxlxAWjXv5SGlFKUaBVLMmgWR0CtaHSIHkcTdX2UKGgGaAloD0MIWd/A5EaR3b+UhpRSlGgVSzJoFkdArWhYEyLyc3V9lChoBmgJaA9DCK2kFd9Q+Mq/lIaUUpRoFUsyaBZHQK1oO3G4qgB1fZQoaAZoCWgPQwiAgSBAho7Hv5SGlFKUaBVLMmgWR0CtaVJmmLtNdX2UKGgGaAloD0MIPZl/9E2azL+UhpRSlGgVSzJoFkdArWk3E/B3zXV9lChoBmgJaA9DCMhESrN5HNC/lIaUUpRoFUsyaBZHQK1pGqLjxTd1fZQoaAZoCWgPQwjYRjzZzYzcv5SGlFKUaBVLMmgWR0CtaP34Kx9odX2UKGgGaAloD0MIokYhyaze0b+UhpRSlGgVSzJoFkdArWoPcQAdXHV9lChoBmgJaA9DCFCKVu4FZti/lIaUUpRoFUsyaBZHQK1p9Bk7Oml1fZQoaAZoCWgPQwh3EDtT6LzIv5SGlFKUaBVLMmgWR0Ctadeii7CjdX2UKGgGaAloD0MIR3U6kPXU0b+UhpRSlGgVSzJoFkdArWm6/GlyinV9lChoBmgJaA9DCJlH/mDgucO/lIaUUpRoFUsyaBZHQK1qzp4bCJp1fZQoaAZoCWgPQwh7Lei9MQTVv5SGlFKUaBVLMmgWR0CtarNIK+i8dX2UKGgGaAloD0MIo1cDlIYax7+UhpRSlGgVSzJoFkdArWqWzyBkJHV9lChoBmgJaA9DCDJyFva0w8e/lIaUUpRoFUsyaBZHQK1qeixFAml1fZQoaAZoCWgPQwiKVYMwt3vbv5SGlFKUaBVLMmgWR0Cta4fP5YYBdX2UKGgGaAloD0MI8MLWbOUl1b+UhpRSlGgVSzJoFkdArWtsf1YhdXV9lChoBmgJaA9DCCTwh5//HsC/lIaUUpRoFUsyaBZHQK1rT/d69kB1fZQoaAZoCWgPQwgWMlcG1Qbbv5SGlFKUaBVLMmgWR0CtazNQKrq/dX2UKGgGaAloD0MIZyYYzjXM2r+UhpRSlGgVSzJoFkdArWxCxiXpn3V9lChoBmgJaA9DCOF/K9mxEdK/lIaUUpRoFUsyaBZHQK1sJ3Tuv2Z1fZQoaAZoCWgPQwid8uhGWFTSv5SGlFKUaBVLMmgWR0CtbAsCkoF3dX2UKGgGaAloD0MIxEFClC9o3L+UhpRSlGgVSzJoFkdArWvuWv8qF3V9lChoBmgJaA9DCJz6QPLOod+/lIaUUpRoFUsyaBZHQK1tCjQAuI11fZQoaAZoCWgPQwiiDcAGRIjQv5SGlFKUaBVLMmgWR0CtbO7aRISUdX2UKGgGaAloD0MIv2VOl8XE4L+UhpRSlGgVSzJoFkdArWzSa1Cw8nV9lChoBmgJaA9DCC/6CtKMRd6/lIaUUpRoFUsyaBZHQK1staTwDvF1fZQoaAZoCWgPQwhOY3st6L3Uv5SGlFKUaBVLMmgWR0CtbcRl6JIldX2UKGgGaAloD0MI/rW8cr1t0r+UhpRSlGgVSzJoFkdArW2pGKAJ9nV9lChoBmgJaA9DCIRkARO4deG/lIaUUpRoFUsyaBZHQK1tjJlrdnF1fZQoaAZoCWgPQwhNFYxK6gTZv5SGlFKUaBVLMmgWR0CtbW/29L6DdX2UKGgGaAloD0MI/I123PC717+UhpRSlGgVSzJoFkdArW5/fl6qsHV9lChoBmgJaA9DCI+qJoi6D86/lIaUUpRoFUsyaBZHQK1uZCKrJbN1fZQoaAZoCWgPQwj/dtmvO93Tv5SGlFKUaBVLMmgWR0Ctbket8uzydX2UKGgGaAloD0MIuyU5YFeT1r+UhpRSlGgVSzJoFkdArW4rDZUT+XV9lChoBmgJaA9DCP0RhgFLruC/lIaUUpRoFUsyaBZHQK1vOcS5AhV1fZQoaAZoCWgPQwid1QJ7TKTMv5SGlFKUaBVLMmgWR0Ctbx6AOJ+EdX2UKGgGaAloD0MIDJV/La9c0L+UhpRSlGgVSzJoFkdArW8CAe7tiXV9lChoBmgJaA9DCFDIztvY7Ne/lIaUUpRoFUsyaBZHQK1u5Vd5Y5l1fZQoaAZoCWgPQwgyWdx/ZDrTv5SGlFKUaBVLMmgWR0Ctb/T7/GVBdX2UKGgGaAloD0MIrkfhehSu0b+UhpRSlGgVSzJoFkdArW/ZnSOR1XV9lChoBmgJaA9DCPEQxk/j3sy/lIaUUpRoFUsyaBZHQK1vvS1maph1fZQoaAZoCWgPQwgw9IjRcwvXv5SGlFKUaBVLMmgWR0Ctb6CIUJv6dX2UKGgGaAloD0MIDVNb6iCv2r+UhpRSlGgVSzJoFkdArXCxY5ksjHV9lChoBmgJaA9DCIPCoEyjycm/lIaUUpRoFUsyaBZHQK1wlgCOmzl1fZQoaAZoCWgPQwhWSs/0EmPcv5SGlFKUaBVLMmgWR0CtcHmLDQ7cdX2UKGgGaAloD0MI9nzNctno0L+UhpRSlGgVSzJoFkdArXBc6FM7EHV9lChoBmgJaA9DCBx4tdyZCde/lIaUUpRoFUsyaBZHQK1xa14Pf9B1fZQoaAZoCWgPQwipvvOLEvTSv5SGlFKUaBVLMmgWR0CtcVAE+xGEdX2UKGgGaAloD0MIs0XSbvSx4L+UhpRSlGgVSzJoFkdArXEzeGfwqnV9lChoBmgJaA9DCC4gtB6+TNK/lIaUUpRoFUsyaBZHQK1xFtrKvFF1fZQoaAZoCWgPQwjoE3mSdM3Av5SGlFKUaBVLMmgWR0Ctcie1jRUndX2UKGgGaAloD0MIIQN5dvnWyb+UhpRSlGgVSzJoFkdArXIMZR8+inV9lChoBmgJaA9DCMMoCB7f3sO/lIaUUpRoFUsyaBZHQK1x7+2mYSh1fZQoaAZoCWgPQwi5jJsaaD7Vv5SGlFKUaBVLMmgWR0CtcdNAC4jKdX2UKGgGaAloD0MI/BwfLc4Y3b+UhpRSlGgVSzJoFkdArXLnazu4PXV9lChoBmgJaA9DCF9/Ep87wdq/lIaUUpRoFUsyaBZHQK1yzBInSfF1fZQoaAZoCWgPQwi+3CdHAaLGv5SGlFKUaBVLMmgWR0Ctcq+NtIkJdX2UKGgGaAloD0MIai+i7Zi64L+UhpRSlGgVSzJoFkdArXKS6BiCrnV9lChoBmgJaA9DCNCc9SnHZNu/lIaUUpRoFUsyaBZHQK1zpPoFFDx1fZQoaAZoCWgPQwhAM4gP7PjYv5SGlFKUaBVLMmgWR0Ctc4mkN4JNdX2UKGgGaAloD0MIWg2Jeyx9yr+UhpRSlGgVSzJoFkdArXNtHWjGk3V9lChoBmgJaA9DCE9Y4gFlU9y/lIaUUpRoFUsyaBZHQK1zUHoHLRt1ZS4="
|
| 81 |
},
|
| 82 |
"ep_success_buffer": {
|
| 83 |
":type:": "<class 'collections.deque'>",
|
| 84 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
| 85 |
},
|
| 86 |
+
"_n_updates": 100000,
|
| 87 |
+
"n_steps": 5,
|
| 88 |
+
"gamma": 0.9,
|
| 89 |
"gae_lambda": 0.98,
|
| 90 |
+
"ent_coef": 0,
|
| 91 |
"vf_coef": 0.5,
|
| 92 |
"max_grad_norm": 0.5,
|
| 93 |
"normalize_advantage": false
|
a2c-PandaReachDense-v2/policy.optimizer.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 44734
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:44406fc5bc587316bdfd0c1a0e8eaffa272381280f885ed474f10b466ea633d8
|
| 3 |
size 44734
|
a2c-PandaReachDense-v2/policy.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 46014
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1700a2ede18d4a1399abb80cb086d81130768cbc27ee57a8c16f87e9e01fd1f6
|
| 3 |
size 46014
|
config.json
CHANGED
|
@@ -1 +1 @@
|
|
| 1 |
-
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7f7ea21b74c0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7f7ea21b1d80>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "observation_space": {":type:": "<class 'gym.spaces.dict.Dict'>", ":serialized:": "gAWVUgMAAAAAAACMD2d5bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwOZ3ltLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUaBCTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowGX3NoYXBllEsDhZSMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZSMAUOUdJRSlIwEaGlnaJRoHSiWDAAAAAAAAAAAACBBAAAgQQAAIEGUaBVLA4WUaCB0lFKUjA1ib3VuZGVkX2JlbG93lGgdKJYDAAAAAAAAAAEBAZRoEowCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZRoIHSUUpSMDWJvdW5kZWRfYWJvdmWUaB0olgMAAAAAAAAAAQEBlGgsSwOFlGggdJRSlIwKX25wX3JhbmRvbZROdWKMDGRlc2lyZWRfZ29hbJRoDSmBlH2UKGgQaBVoGEsDhZRoGmgdKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZRoIHSUUpRoI2gdKJYMAAAAAAAAAAAAIEEAACBBAAAgQZRoFUsDhZRoIHSUUpRoKGgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoMmgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoN051YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgVaBhLBoWUaBpoHSiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBVLBoWUaCB0lFKUaCNoHSiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBVLBoWUaCB0lFKUaChoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDJoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDdOdWJ1aBhOaBBOaDdOdWIu", "spaces": "OrderedDict([('achieved_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('desired_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('observation', Box([-10. -10. -10. -10. -10. -10.], [10. 10. 10. 10. 10. 10.], (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVbQEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaApLA4WUjAFDlHSUUpSMBGhpZ2iUaBIolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgKSwOFlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWAwAAAAAAAAABAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYDAAAAAAAAAAEBAZRoIUsDhZRoFXSUUpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_np_random": null}, "n_envs": 8, "num_timesteps": 2000000, "_total_timesteps": 2000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1680416453182156123, "learning_rate": 0.0001, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVAQMAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMZy9ob21lL3UvbWluaWNvbmRhMy9lbnZzL2h1Z2dpbmdmYWNlLXVuaXQ2L2xpYi9weXRob24zLjkvc2l0ZS1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuCQwIAAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjGcvaG9tZS91L21pbmljb25kYTMvZW52cy9odWdnaW5nZmFjZS11bml0Ni9saWIvcHl0aG9uMy45L3NpdGUtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz8aNuLrHEMthZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVewIAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QolmAAAAAAAAAAxjDWPkptODsu2xg/xjDWPkptODsu2xg/xjDWPkptODsu2xg/xjDWPkptODsu2xg/xjDWPkptODsu2xg/xjDWPkptODsu2xg/xjDWPkptODsu2xg/xjDWPkptODsu2xg/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksISwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcolmAAAAAAAAAAKXtZv9jHuz8PYu8+FJSfPvH53r4EvBS+7ixNvyCQMj8ofc2/8J7NP6U/KT1Z+Lu/bszXP4yeyj9s7bO/aMbtPpw+yb9vQpy/KCKOvkrjn7/Qvtq/0BahvoDdEz6NIY6+lGgOSwhLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWwAAAAAAAAADGMNY+Sm04Oy7bGD/tKIu8l+9LuohT9jvGMNY+Sm04Oy7bGD/tKIu8l+9LuohT9jvGMNY+Sm04Oy7bGD/tKIu8l+9LuohT9jvGMNY+Sm04Oy7bGD/tKIu8l+9LuohT9jvGMNY+Sm04Oy7bGD/tKIu8l+9LuohT9jvGMNY+Sm04Oy7bGD/tKIu8l+9LuohT9jvGMNY+Sm04Oy7bGD/tKIu8l+9LuohT9jvGMNY+Sm04Oy7bGD/tKIu8l+9LuohT9juUaA5LCEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[0.41834086 0.00281413 0.5970944 ]\n [0.41834086 0.00281413 0.5970944 ]\n [0.41834086 0.00281413 0.5970944 ]\n [0.41834086 0.00281413 0.5970944 ]\n [0.41834086 0.00281413 0.5970944 ]\n [0.41834086 0.00281413 0.5970944 ]\n [0.41834086 0.00281413 0.5970944 ]\n [0.41834086 0.00281413 0.5970944 ]]", "desired_goal": "[[-0.8495355 1.4670362 0.467545 ]\n [ 0.31167662 -0.43550065 -0.14524847]\n [-0.8014668 0.6975117 -1.605382 ]\n [ 1.6064129 0.04132046 -1.4685165 ]\n [ 1.6859262 1.5829635 -1.405683 ]\n [ 0.46440434 -1.5722232 -1.2207774 ]\n [-0.27760434 -1.2491238 -1.7089481 ]\n [-0.31462717 0.14439964 -0.27759972]]", "observation": "[[ 0.41834086 0.00281413 0.5970944 -0.01698729 -0.00077795 0.00751728]\n [ 0.41834086 0.00281413 0.5970944 -0.01698729 -0.00077795 0.00751728]\n [ 0.41834086 0.00281413 0.5970944 -0.01698729 -0.00077795 0.00751728]\n [ 0.41834086 0.00281413 0.5970944 -0.01698729 -0.00077795 0.00751728]\n [ 0.41834086 0.00281413 0.5970944 -0.01698729 -0.00077795 0.00751728]\n [ 0.41834086 0.00281413 0.5970944 -0.01698729 -0.00077795 0.00751728]\n [ 0.41834086 0.00281413 0.5970944 -0.01698729 -0.00077795 0.00751728]\n [ 0.41834086 0.00281413 0.5970944 -0.01698729 -0.00077795 0.00751728]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVewAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYIAAAAAAAAAAEBAQEBAQEBlIwFbnVtcHmUjAVkdHlwZZSTlIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlC4="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVewIAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QolmAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksISwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcolmAAAAAAAAAAU4ToPbm9Ej6eoos+ZZWKvTkI5T2HXP49FNR9PWexbr3nd5Y+gKOTPR8GuT3oWDQ+l2zJvTNHPr2j+WQ+HuHwPdMolL0ln4E+56dWvRrhzzw0TY47bvXWPYhnIT0ZRUM+lGgOSwhLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWwAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LCEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[ 0.11353364 0.14330186 0.27272505]\n [-0.06766776 0.11183209 0.12419992]\n [ 0.06196983 -0.05827465 0.29388353]\n [ 0.0720892 0.09034371 0.1761204 ]\n [-0.09835165 -0.04645462 0.22360854]\n [ 0.11761688 -0.07234349 0.2531673 ]\n [-0.05240622 0.02537589 0.0043427 ]\n [ 0.10496031 0.03940538 0.19069327]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIQGzp0VTP9b+UhpRSlIwBbJRLMowBdJRHQKK2Bpfx+a11fZQoaAZoCWgPQwgMHxFTIsn5v5SGlFKUaBVLMmgWR0Citeliay8jdX2UKGgGaAloD0MI1NfzNcsl+L+UhpRSlGgVSzJoFkdAorXNC9h7V3V9lChoBmgJaA9DCL7ArFCku/a/lIaUUpRoFUsyaBZHQKK1r6/IsAh1fZQoaAZoCWgPQwg8+IkD6Pf3v5SGlFKUaBVLMmgWR0Cit2UNSZSfdX2UKGgGaAloD0MIi06WWu+3+7+UhpRSlGgVSzJoFkdAordJgw482nV9lChoBmgJaA9DCKvrUE1JlvO/lIaUUpRoFUsyaBZHQKK3LkZJkG11fZQoaAZoCWgPQwgVUn5S7dP4v5SGlFKUaBVLMmgWR0CitxIi1RcedX2UKGgGaAloD0MIjXxe8dTj97+UhpRSlGgVSzJoFkdAorb11B+nZXV9lChoBmgJaA9DCCgNNQpJ5vW/lIaUUpRoFUsyaBZHQKK22Jw84gl1fZQoaAZoCWgPQwjsUE1J1iH2v5SGlFKUaBVLMmgWR0CitrwYtQKsdX2UKGgGaAloD0MIDhZO0vwx9b+UhpRSlGgVSzJoFkdAoraew9q1xHV9lChoBmgJaA9DCCrHZHH/0fW/lIaUUpRoFUsyaBZHQKK4WaESM991fZQoaAZoCWgPQwioVImyt5T2v5SGlFKUaBVLMmgWR0CiuD4MnZ00dX2UKGgGaAloD0MIkwILYMrA9r+UhpRSlGgVSzJoFkdAorgiyIHkcXV9lChoBmgJaA9DCO/k02NbRvO/lIaUUpRoFUsyaBZHQKK4BqmCROl1fZQoaAZoCWgPQwjLLEKxFbTzv5SGlFKUaBVLMmgWR0Cit+pSR8txdX2UKGgGaAloD0MIxciSOZZ39r+UhpRSlGgVSzJoFkdAorfNF+d9UnV9lChoBmgJaA9DCDlFR3L5j/m/lIaUUpRoFUsyaBZHQKK3sMlTm4l1fZQoaAZoCWgPQwh1yThGskf2v5SGlFKUaBVLMmgWR0Cit5OMl1KXdX2UKGgGaAloD0MIGhh5WRML9b+UhpRSlGgVSzJoFkdAorlN34bjtHV9lChoBmgJaA9DCD9wlScQ9vK/lIaUUpRoFUsyaBZHQKK5MlyBCld1fZQoaAZoCWgPQwiqC3iZYSP6v5SGlFKUaBVLMmgWR0CiuRcvM8oydX2UKGgGaAloD0MIn69ZLhvd8b+UhpRSlGgVSzJoFkdAorj7ENvwVnV9lChoBmgJaA9DCN4+q8yU1vO/lIaUUpRoFUsyaBZHQKK43tUn5SF1fZQoaAZoCWgPQwiYNEbrqGr4v5SGlFKUaBVLMmgWR0CiuMGXgLqmdX2UKGgGaAloD0MIVaGBWDbz9b+UhpRSlGgVSzJoFkdAorilSde6Z3V9lChoBmgJaA9DCGL3HcNj//e/lIaUUpRoFUsyaBZHQKK4h/iHZbp1fZQoaAZoCWgPQwj+RGXDmor5v5SGlFKUaBVLMmgWR0CiukQxWT5gdX2UKGgGaAloD0MIPQytTs6Q+r+UhpRSlGgVSzJoFkdAoroowGnn+3V9lChoBmgJaA9DCEOpvYi2I/W/lIaUUpRoFUsyaBZHQKK6DZaFEiN1fZQoaAZoCWgPQwhavcPt0PD4v5SGlFKUaBVLMmgWR0CiufFyq+8HdX2UKGgGaAloD0MIBkfJq3NM+b+UhpRSlGgVSzJoFkdAornVH2AXmHV9lChoBmgJaA9DCIHoSZnUEPS/lIaUUpRoFUsyaBZHQKK5t92ovSN1fZQoaAZoCWgPQwglzoqoiX70v5SGlFKUaBVLMmgWR0CiuZtSZSeidX2UKGgGaAloD0MIObNdoQ/W9b+UhpRSlGgVSzJoFkdAorl967dzn3V9lChoBmgJaA9DCKzj+KHSCPu/lIaUUpRoFUsyaBZHQKK7Phm5Dqp1fZQoaAZoCWgPQwhuiVxwBv/1v5SGlFKUaBVLMmgWR0CiuyKJ/G2kdX2UKGgGaAloD0MIhZfg1AcS+b+UhpRSlGgVSzJoFkdAorsHRE4NqnV9lChoBmgJaA9DCB6HwfwVcvW/lIaUUpRoFUsyaBZHQKK66zPa+N91fZQoaAZoCWgPQwgB+KdUibLyv5SGlFKUaBVLMmgWR0Cius8LKFIvdX2UKGgGaAloD0MILLgf8MAA+L+UhpRSlGgVSzJoFkdAorqx0U47zXV9lChoBmgJaA9DCCXoL/SIkfi/lIaUUpRoFUsyaBZHQKK6lVI7Njd1fZQoaAZoCWgPQwjudygK9In2v5SGlFKUaBVLMmgWR0Ciunf7rLQpdX2UKGgGaAloD0MIRpiiXBp/9b+UhpRSlGgVSzJoFkdAorwwmsvIwXV9lChoBmgJaA9DCN8Vwf9WMvW/lIaUUpRoFUsyaBZHQKK8FRb8m8d1fZQoaAZoCWgPQwgQr+sX7Mb6v5SGlFKUaBVLMmgWR0Ciu/nXVbzLdX2UKGgGaAloD0MIWksBaf+D+L+UhpRSlGgVSzJoFkdAorvduR9w33V9lChoBmgJaA9DCG9Kea2Ebvm/lIaUUpRoFUsyaBZHQKK7wWVu76J1fZQoaAZoCWgPQwjmWrQAbSv1v5SGlFKUaBVLMmgWR0Ciu6QvQF9sdX2UKGgGaAloD0MI7WMFvw1x87+UhpRSlGgVSzJoFkdAoruH0VafSXV9lChoBmgJaA9DCDVgkPRp1fi/lIaUUpRoFUsyaBZHQKK7amv4dp91fZQoaAZoCWgPQwg8nwH1ZpTyv5SGlFKUaBVLMmgWR0CivR71yvLYdX2UKGgGaAloD0MI0VlmEYpt97+UhpRSlGgVSzJoFkdAor0DZi/fwnV9lChoBmgJaA9DCJQzFHe8CfS/lIaUUpRoFUsyaBZHQKK86CCjDbd1fZQoaAZoCWgPQwiztFNzuQH6v5SGlFKUaBVLMmgWR0CivMwe3hGZdX2UKGgGaAloD0MI/FbrxOU4+b+UhpRSlGgVSzJoFkdAoryvz6JqI3V9lChoBmgJaA9DCDM334jumfm/lIaUUpRoFUsyaBZHQKK8kpPykKx1fZQoaAZoCWgPQwj0wTI2dPP5v5SGlFKUaBVLMmgWR0CivHYuTRpldX2UKGgGaAloD0MIr5Y7M8Hw9r+UhpRSlGgVSzJoFkdAorxYxYaHbnV9lChoBmgJaA9DCGK+vAD7qPa/lIaUUpRoFUsyaBZHQKK+C5ksjFB1fZQoaAZoCWgPQwhhMlUwKunyv5SGlFKUaBVLMmgWR0CivfAIIF/ydX2UKGgGaAloD0MIat5xio6k87+UhpRSlGgVSzJoFkdAor3U3bVSXXV9lChoBmgJaA9DCNS19j5Vhfq/lIaUUpRoFUsyaBZHQKK9uOuq3mV1fZQoaAZoCWgPQwgbR6zFp8D6v5SGlFKUaBVLMmgWR0CivZy4FzMidX2UKGgGaAloD0MICtgORuxT9L+UhpRSlGgVSzJoFkdAor1/gtOEd3V9lChoBmgJaA9DCB2SWiiZHPS/lIaUUpRoFUsyaBZHQKK9YvyLAHp1fZQoaAZoCWgPQwi8OzJWm//yv5SGlFKUaBVLMmgWR0CivUWnKnvVdX2UKGgGaAloD0MICwqDMo3m8r+UhpRSlGgVSzJoFkdAor8CzTnaFnV9lChoBmgJaA9DCO3ShsPSAPS/lIaUUpRoFUsyaBZHQKK+5zp5eJJ1fZQoaAZoCWgPQwiLbVLRWHv2v5SGlFKUaBVLMmgWR0Civsv99+gEdX2UKGgGaAloD0MI/mDgufdw97+UhpRSlGgVSzJoFkdAor6v8baRIXV9lChoBmgJaA9DCHnnUIaq2PO/lIaUUpRoFUsyaBZHQKK+k8ifQKN1fZQoaAZoCWgPQwhkO99PjVf1v5SGlFKUaBVLMmgWR0CivnaGQCCBdX2UKGgGaAloD0MI9kGWBRM/87+UhpRSlGgVSzJoFkdAor5aD/VAiXV9lChoBmgJaA9DCBR3vMlvEfa/lIaUUpRoFUsyaBZHQKK+PKkl/pd1fZQoaAZoCWgPQwirBfaYSOn0v5SGlFKUaBVLMmgWR0Civ/THCGeudX2UKGgGaAloD0MIPGcLCK0H9r+UhpRSlGgVSzJoFkdAor/ZSBK+SXV9lChoBmgJaA9DCH1bsFQXcPW/lIaUUpRoFUsyaBZHQKK/vg1FYuF1fZQoaAZoCWgPQwh8gVmhSHf3v5SGlFKUaBVLMmgWR0Civ6H/95yEdX2UKGgGaAloD0MIOdOE7Sdj+r+UhpRSlGgVSzJoFkdAor+FqzqrzXV9lChoBmgJaA9DCA3EsplDkva/lIaUUpRoFUsyaBZHQKK/aGt6ol51fZQoaAZoCWgPQwiPNSOD3IX5v5SGlFKUaBVLMmgWR0Civ0wT/Q0GdX2UKGgGaAloD0MIol2FlJ9U97+UhpRSlGgVSzJoFkdAor8ux2SuAHV9lChoBmgJaA9DCKvN/6uOHPa/lIaUUpRoFUsyaBZHQKLA5beMyad1fZQoaAZoCWgPQwhVUbzK2ub0v5SGlFKUaBVLMmgWR0CiwMopx3mndX2UKGgGaAloD0MIcEOM17zq9b+UhpRSlGgVSzJoFkdAosCu6y0KJHV9lChoBmgJaA9DCJQ0f0xr0/i/lIaUUpRoFUsyaBZHQKLAksuFpPB1fZQoaAZoCWgPQwgogjgPJ7D2v5SGlFKUaBVLMmgWR0CiwHaHbh3rdX2UKGgGaAloD0MIFM5uLZOh8r+UhpRSlGgVSzJoFkdAosBZSLqD9XV9lChoBmgJaA9DCIDTu3g/bvm/lIaUUpRoFUsyaBZHQKLAPMWXTmZ1fZQoaAZoCWgPQwifW+hKBCr0v5SGlFKUaBVLMmgWR0CiwB9eyAx0dX2UKGgGaAloD0MIezApPj5h9L+UhpRSlGgVSzJoFkdAosHWKO1fFHV9lChoBmgJaA9DCH+l8+FZAv2/lIaUUpRoFUsyaBZHQKLBupqASWZ1fZQoaAZoCWgPQwhHrTB9r6H2v5SGlFKUaBVLMmgWR0CiwZ+chC+ldX2UKGgGaAloD0MIjjo6rkZ29L+UhpRSlGgVSzJoFkdAosGDeIl+mXV9lChoBmgJaA9DCBwMdVjh1va/lIaUUpRoFUsyaBZHQKLBZyXlbNd1fZQoaAZoCWgPQwgnSkIibaPyv5SGlFKUaBVLMmgWR0CiwUnqmj0udX2UKGgGaAloD0MIcsKE0aws/b+UhpRSlGgVSzJoFkdAosEtjEvTPXV9lChoBmgJaA9DCP8+48KB0PW/lIaUUpRoFUsyaBZHQKLBEDjin511ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 15625, "n_steps": 16, "gamma": 0.9546, "gae_lambda": 0.98, "ent_coef": 0.0001, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": false, "system_info": {"OS": "Linux-5.15.90.1-microsoft-standard-WSL2-x86_64-with-glibc2.35 # 1 SMP Fri Jan 27 02:56:13 UTC 2023", "Python": "3.9.16", "Stable-Baselines3": "1.7.0", "PyTorch": "2.0.0+cu117", "GPU Enabled": "True", "Numpy": "1.24.2", "Gym": "0.21.0"}}
|
|
|
|
| 1 |
+
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7fce7b570ee0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7fce7b56fe40>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "observation_space": {":type:": "<class 'gym.spaces.dict.Dict'>", ":serialized:": "gAWVUgMAAAAAAACMD2d5bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwOZ3ltLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUaBCTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowGX3NoYXBllEsDhZSMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZSMAUOUdJRSlIwEaGlnaJRoHSiWDAAAAAAAAAAAACBBAAAgQQAAIEGUaBVLA4WUaCB0lFKUjA1ib3VuZGVkX2JlbG93lGgdKJYDAAAAAAAAAAEBAZRoEowCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZRoIHSUUpSMDWJvdW5kZWRfYWJvdmWUaB0olgMAAAAAAAAAAQEBlGgsSwOFlGggdJRSlIwKX25wX3JhbmRvbZROdWKMDGRlc2lyZWRfZ29hbJRoDSmBlH2UKGgQaBVoGEsDhZRoGmgdKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZRoIHSUUpRoI2gdKJYMAAAAAAAAAAAAIEEAACBBAAAgQZRoFUsDhZRoIHSUUpRoKGgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoMmgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoN051YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgVaBhLBoWUaBpoHSiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBVLBoWUaCB0lFKUaCNoHSiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBVLBoWUaCB0lFKUaChoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDJoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDdOdWJ1aBhOaBBOaDdOdWIu", "spaces": "OrderedDict([('achieved_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('desired_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('observation', Box([-10. -10. -10. -10. -10. -10.], [10. 10. 10. 10. 10. 10.], (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVbQEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaApLA4WUjAFDlHSUUpSMBGhpZ2iUaBIolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgKSwOFlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWAwAAAAAAAAABAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYDAAAAAAAAAAEBAZRoIUsDhZRoFXSUUpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_np_random": null}, "n_envs": 4, "num_timesteps": 2000000, "_total_timesteps": 2000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1680431221493549003, "learning_rate": 0.0001, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVAQMAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMZy9ob21lL3UvbWluaWNvbmRhMy9lbnZzL2h1Z2dpbmdmYWNlLXVuaXQ2L2xpYi9weXRob24zLjkvc2l0ZS1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuCQwIAAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjGcvaG9tZS91L21pbmljb25kYTMvZW52cy9odWdnaW5nZmFjZS11bml0Ni9saWIvcHl0aG9uMy45L3NpdGUtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz8aNuLrHEMthZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAivHbPiHsZDuahgw/ivHbPiHsZDuahgw/ivHbPiHsZDuahgw/ivHbPiHsZDuahgw/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAA6YauP2Alwz9Xu5C+hBbHP5IKsD/7upq6YEFwPkIBJz/WuY2/UjBMPk/FgL+jPdq+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAACK8ds+IexkO5qGDD9Ie/O5us9lutFJEDyK8ds+IexkO5qGDD9Ie/O5us9lutFJEDyK8ds+IexkO5qGDD9Ie/O5us9lutFJEDyK8ds+IexkO5qGDD9Ie/O5us9lutFJEDyUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[0.42957717 0.00349308 0.54892886]\n [0.42957717 0.00349308 0.54892886]\n [0.42957717 0.00349308 0.54892886]\n [0.42957717 0.00349308 0.54892886]]", "desired_goal": "[[ 1.3634921e+00 1.5245781e+00 -2.8267929e-01]\n [ 1.5553746e+00 1.3753226e+00 -1.1804992e-03]\n [ 2.3462439e-01 6.5236294e-01 -1.1072338e+00]\n [ 1.9940308e-01 -1.0060214e+00 -4.2625150e-01]]", "observation": "[[ 4.2957717e-01 3.4930783e-03 5.4892886e-01 -4.6440423e-04\n -8.7666104e-04 8.8066617e-03]\n [ 4.2957717e-01 3.4930783e-03 5.4892886e-01 -4.6440423e-04\n -8.7666104e-04 8.8066617e-03]\n [ 4.2957717e-01 3.4930783e-03 5.4892886e-01 -4.6440423e-04\n -8.7666104e-04 8.8066617e-03]\n [ 4.2957717e-01 3.4930783e-03 5.4892886e-01 -4.6440423e-04\n -8.7666104e-04 8.8066617e-03]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAMIvmvR3L371DmYs+W9DUvTIHeT37TrI9F+9Au8CXqL3Lj0I+wlCiPS+bAzvkGFA+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[-0.11257017 -0.10927413 0.27265367]\n [-0.10391303 0.06079788 0.08706471]\n [-0.00294394 -0.08232069 0.19000165]\n [ 0.0792556 0.00200815 0.20321995]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIj/tW68Tl17+UhpRSlIwBbJRLMowBdJRHQK1hvwOvt+l1fZQoaAZoCWgPQwjxK9ZwkXvdv5SGlFKUaBVLMmgWR0CtYaOtfXwtdX2UKGgGaAloD0MIzywJUFPLxL+UhpRSlGgVSzJoFkdArWGHNiYsunV9lChoBmgJaA9DCADhQ4mWPOK/lIaUUpRoFUsyaBZHQK1haumJm/Z1fZQoaAZoCWgPQwh2Ul+WdmrIv5SGlFKUaBVLMmgWR0CtYnmNipeedX2UKGgGaAloD0MIw5rKorCL2r+UhpRSlGgVSzJoFkdArWJeOOsDGXV9lChoBmgJaA9DCLXeb7Tjhs+/lIaUUpRoFUsyaBZHQK1iQcwQDmt1fZQoaAZoCWgPQwhwXwfOGVHWv5SGlFKUaBVLMmgWR0CtYiUdilSCdX2UKGgGaAloD0MITwXc8/xp1L+UhpRSlGgVSzJoFkdArWM0580DU3V9lChoBmgJaA9DCNc07zhFR86/lIaUUpRoFUsyaBZHQK1jGajvd/J1fZQoaAZoCWgPQwj8q8d9q3XSv5SGlFKUaBVLMmgWR0CtYv00m+j/dX2UKGgGaAloD0MIbsDnhxHC1r+UhpRSlGgVSzJoFkdArWLgkeIVM3V9lChoBmgJaA9DCNpWs874vtO/lIaUUpRoFUsyaBZHQK1j8gqVhTh1fZQoaAZoCWgPQwiDp5Ar9SzZv5SGlFKUaBVLMmgWR0CtY9bDEWIodX2UKGgGaAloD0MIjpPCvMeZ0r+UhpRSlGgVSzJoFkdArWO6OvMbFXV9lChoBmgJaA9DCHHIBtLFpt+/lIaUUpRoFUsyaBZHQK1jnZV4oql1fZQoaAZoCWgPQwiCOuXRjbDWv5SGlFKUaBVLMmgWR0CtZK9t/FzddX2UKGgGaAloD0MI6lp7n6pCyb+UhpRSlGgVSzJoFkdArWSUI/qxDHV9lChoBmgJaA9DCNldoKTAAs6/lIaUUpRoFUsyaBZHQK1kd6Rhc7h1fZQoaAZoCWgPQwi366UpApzAv5SGlFKUaBVLMmgWR0CtZFr9ETg3dX2UKGgGaAloD0MIceZXc4Bg2L+UhpRSlGgVSzJoFkdArWVpxLkCFXV9lChoBmgJaA9DCOusFthjIsG/lIaUUpRoFUsyaBZHQK1lTnnuAqd1fZQoaAZoCWgPQwj68CxBRkDLv5SGlFKUaBVLMmgWR0CtZTIClrM1dX2UKGgGaAloD0MIf9sTJLa70L+UhpRSlGgVSzJoFkdArWUVVYISlHV9lChoBmgJaA9DCIvAWN/A5L6/lIaUUpRoFUsyaBZHQK1mNNzr/sF1fZQoaAZoCWgPQwhuxJPdzGjiv5SGlFKUaBVLMmgWR0CtZhoH1OCYdX2UKGgGaAloD0MI3/3xXrUy2b+UhpRSlGgVSzJoFkdArWX9k6Lfk3V9lChoBmgJaA9DCOIC0Chd+su/lIaUUpRoFUsyaBZHQK1l4PDHfdh1fZQoaAZoCWgPQwiJXkax3NLMv5SGlFKUaBVLMmgWR0CtZve4kNWmdX2UKGgGaAloD0MI3Xh3ZKw227+UhpRSlGgVSzJoFkdArWbcXLvCuXV9lChoBmgJaA9DCIf7yK1Jt9i/lIaUUpRoFUsyaBZHQK1mv+irT6V1fZQoaAZoCWgPQwimSL4SSIncv5SGlFKUaBVLMmgWR0CtZqNDMNc4dX2UKGgGaAloD0MIat5xio7kwL+UhpRSlGgVSzJoFkdArWe3P1L8JnV9lChoBmgJaA9DCC0GD9O+udO/lIaUUpRoFUsyaBZHQK1nm/oJRfp1fZQoaAZoCWgPQwgjaTf6mA/av5SGlFKUaBVLMmgWR0CtZ3+F10T2dX2UKGgGaAloD0MI4biMmxpo0r+UhpRSlGgVSzJoFkdArWdi5byH23V9lChoBmgJaA9DCMTuO4bHfta/lIaUUpRoFUsyaBZHQK1oj90ihWZ1fZQoaAZoCWgPQwjQRxlxAWjXv5SGlFKUaBVLMmgWR0CtaHSIHkcTdX2UKGgGaAloD0MIWd/A5EaR3b+UhpRSlGgVSzJoFkdArWhYEyLyc3V9lChoBmgJaA9DCK2kFd9Q+Mq/lIaUUpRoFUsyaBZHQK1oO3G4qgB1fZQoaAZoCWgPQwiAgSBAho7Hv5SGlFKUaBVLMmgWR0CtaVJmmLtNdX2UKGgGaAloD0MIPZl/9E2azL+UhpRSlGgVSzJoFkdArWk3E/B3zXV9lChoBmgJaA9DCMhESrN5HNC/lIaUUpRoFUsyaBZHQK1pGqLjxTd1fZQoaAZoCWgPQwjYRjzZzYzcv5SGlFKUaBVLMmgWR0CtaP34Kx9odX2UKGgGaAloD0MIokYhyaze0b+UhpRSlGgVSzJoFkdArWoPcQAdXHV9lChoBmgJaA9DCFCKVu4FZti/lIaUUpRoFUsyaBZHQK1p9Bk7Oml1fZQoaAZoCWgPQwh3EDtT6LzIv5SGlFKUaBVLMmgWR0Ctadeii7CjdX2UKGgGaAloD0MIR3U6kPXU0b+UhpRSlGgVSzJoFkdArWm6/GlyinV9lChoBmgJaA9DCJlH/mDgucO/lIaUUpRoFUsyaBZHQK1qzp4bCJp1fZQoaAZoCWgPQwh7Lei9MQTVv5SGlFKUaBVLMmgWR0CtarNIK+i8dX2UKGgGaAloD0MIo1cDlIYax7+UhpRSlGgVSzJoFkdArWqWzyBkJHV9lChoBmgJaA9DCDJyFva0w8e/lIaUUpRoFUsyaBZHQK1qeixFAml1fZQoaAZoCWgPQwiKVYMwt3vbv5SGlFKUaBVLMmgWR0Cta4fP5YYBdX2UKGgGaAloD0MI8MLWbOUl1b+UhpRSlGgVSzJoFkdArWtsf1YhdXV9lChoBmgJaA9DCCTwh5//HsC/lIaUUpRoFUsyaBZHQK1rT/d69kB1fZQoaAZoCWgPQwgWMlcG1Qbbv5SGlFKUaBVLMmgWR0CtazNQKrq/dX2UKGgGaAloD0MIZyYYzjXM2r+UhpRSlGgVSzJoFkdArWxCxiXpn3V9lChoBmgJaA9DCOF/K9mxEdK/lIaUUpRoFUsyaBZHQK1sJ3Tuv2Z1fZQoaAZoCWgPQwid8uhGWFTSv5SGlFKUaBVLMmgWR0CtbAsCkoF3dX2UKGgGaAloD0MIxEFClC9o3L+UhpRSlGgVSzJoFkdArWvuWv8qF3V9lChoBmgJaA9DCJz6QPLOod+/lIaUUpRoFUsyaBZHQK1tCjQAuI11fZQoaAZoCWgPQwiiDcAGRIjQv5SGlFKUaBVLMmgWR0CtbO7aRISUdX2UKGgGaAloD0MIv2VOl8XE4L+UhpRSlGgVSzJoFkdArWzSa1Cw8nV9lChoBmgJaA9DCC/6CtKMRd6/lIaUUpRoFUsyaBZHQK1staTwDvF1fZQoaAZoCWgPQwhOY3st6L3Uv5SGlFKUaBVLMmgWR0CtbcRl6JIldX2UKGgGaAloD0MI/rW8cr1t0r+UhpRSlGgVSzJoFkdArW2pGKAJ9nV9lChoBmgJaA9DCIRkARO4deG/lIaUUpRoFUsyaBZHQK1tjJlrdnF1fZQoaAZoCWgPQwhNFYxK6gTZv5SGlFKUaBVLMmgWR0CtbW/29L6DdX2UKGgGaAloD0MI/I123PC717+UhpRSlGgVSzJoFkdArW5/fl6qsHV9lChoBmgJaA9DCI+qJoi6D86/lIaUUpRoFUsyaBZHQK1uZCKrJbN1fZQoaAZoCWgPQwj/dtmvO93Tv5SGlFKUaBVLMmgWR0Ctbket8uzydX2UKGgGaAloD0MIuyU5YFeT1r+UhpRSlGgVSzJoFkdArW4rDZUT+XV9lChoBmgJaA9DCP0RhgFLruC/lIaUUpRoFUsyaBZHQK1vOcS5AhV1fZQoaAZoCWgPQwid1QJ7TKTMv5SGlFKUaBVLMmgWR0Ctbx6AOJ+EdX2UKGgGaAloD0MIDJV/La9c0L+UhpRSlGgVSzJoFkdArW8CAe7tiXV9lChoBmgJaA9DCFDIztvY7Ne/lIaUUpRoFUsyaBZHQK1u5Vd5Y5l1fZQoaAZoCWgPQwgyWdx/ZDrTv5SGlFKUaBVLMmgWR0Ctb/T7/GVBdX2UKGgGaAloD0MIrkfhehSu0b+UhpRSlGgVSzJoFkdArW/ZnSOR1XV9lChoBmgJaA9DCPEQxk/j3sy/lIaUUpRoFUsyaBZHQK1vvS1maph1fZQoaAZoCWgPQwgw9IjRcwvXv5SGlFKUaBVLMmgWR0Ctb6CIUJv6dX2UKGgGaAloD0MIDVNb6iCv2r+UhpRSlGgVSzJoFkdArXCxY5ksjHV9lChoBmgJaA9DCIPCoEyjycm/lIaUUpRoFUsyaBZHQK1wlgCOmzl1fZQoaAZoCWgPQwhWSs/0EmPcv5SGlFKUaBVLMmgWR0CtcHmLDQ7cdX2UKGgGaAloD0MI9nzNctno0L+UhpRSlGgVSzJoFkdArXBc6FM7EHV9lChoBmgJaA9DCBx4tdyZCde/lIaUUpRoFUsyaBZHQK1xa14Pf9B1fZQoaAZoCWgPQwipvvOLEvTSv5SGlFKUaBVLMmgWR0CtcVAE+xGEdX2UKGgGaAloD0MIs0XSbvSx4L+UhpRSlGgVSzJoFkdArXEzeGfwqnV9lChoBmgJaA9DCC4gtB6+TNK/lIaUUpRoFUsyaBZHQK1xFtrKvFF1fZQoaAZoCWgPQwjoE3mSdM3Av5SGlFKUaBVLMmgWR0Ctcie1jRUndX2UKGgGaAloD0MIIQN5dvnWyb+UhpRSlGgVSzJoFkdArXIMZR8+inV9lChoBmgJaA9DCMMoCB7f3sO/lIaUUpRoFUsyaBZHQK1x7+2mYSh1fZQoaAZoCWgPQwi5jJsaaD7Vv5SGlFKUaBVLMmgWR0CtcdNAC4jKdX2UKGgGaAloD0MI/BwfLc4Y3b+UhpRSlGgVSzJoFkdArXLnazu4PXV9lChoBmgJaA9DCF9/Ep87wdq/lIaUUpRoFUsyaBZHQK1yzBInSfF1fZQoaAZoCWgPQwi+3CdHAaLGv5SGlFKUaBVLMmgWR0Ctcq+NtIkJdX2UKGgGaAloD0MIai+i7Zi64L+UhpRSlGgVSzJoFkdArXKS6BiCrnV9lChoBmgJaA9DCNCc9SnHZNu/lIaUUpRoFUsyaBZHQK1zpPoFFDx1fZQoaAZoCWgPQwhAM4gP7PjYv5SGlFKUaBVLMmgWR0Ctc4mkN4JNdX2UKGgGaAloD0MIWg2Jeyx9yr+UhpRSlGgVSzJoFkdArXNtHWjGk3V9lChoBmgJaA9DCE9Y4gFlU9y/lIaUUpRoFUsyaBZHQK1zUHoHLRt1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 100000, "n_steps": 5, "gamma": 0.9, "gae_lambda": 0.98, "ent_coef": 0, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": false, "system_info": {"OS": "Linux-5.15.90.1-microsoft-standard-WSL2-x86_64-with-glibc2.35 # 1 SMP Fri Jan 27 02:56:13 UTC 2023", "Python": "3.9.16", "Stable-Baselines3": "1.7.0", "PyTorch": "2.0.0+cu117", "GPU Enabled": "True", "Numpy": "1.24.2", "Gym": "0.21.0"}}
|
replay.mp4
CHANGED
|
Binary files a/replay.mp4 and b/replay.mp4 differ
|
|
|
results.json
CHANGED
|
@@ -1 +1 @@
|
|
| 1 |
-
{"mean_reward": -
|
|
|
|
| 1 |
+
{"mean_reward": -0.3295648271305254, "std_reward": 0.14730595798680027, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-04-02T13:13:28.195344"}
|
vec_normalize.pkl
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 3056
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:00b791faf496f212d11c13997f569988b54a618fc98dd865d85edff5e4e755ad
|
| 3 |
size 3056
|