fancy_gym/alr_envs/alr/mujoco/reacher/mp_wrapper.py

43 lines
1.3 KiB
Python
Raw Normal View History

2022-01-25 15:23:57 +01:00
from typing import Union
import numpy as np
from mp_env_api import MPEnvWrapper
class MPWrapper(MPEnvWrapper):
@property
def active_obs(self):
return np.concatenate([
2022-04-07 14:40:43 +02:00
[False] * self.n_links, # cos
[False] * self.n_links, # sin
2022-01-25 15:23:57 +01:00
[True] * 2, # goal position
2022-04-07 14:40:43 +02:00
[False] * self.n_links, # angular velocity
[False] * 3, # goal distance
2022-01-25 15:23:57 +01:00
# self.get_body_com("target"), # only return target to make problem harder
2022-04-07 14:40:43 +02:00
[False], # step
2022-01-25 15:23:57 +01:00
])
2022-04-07 14:40:43 +02:00
# @property
# def active_obs(self):
# return np.concatenate([
# [True] * self.n_links, # cos, True
# [True] * self.n_links, # sin, True
# [True] * 2, # goal position
# [True] * self.n_links, # angular velocity, True
# [True] * 3, # goal distance
# # self.get_body_com("target"), # only return target to make problem harder
# [False], # step
# ])
2022-01-25 15:23:57 +01:00
@property
def current_vel(self) -> Union[float, int, np.ndarray]:
return self.sim.data.qvel.flat[:self.n_links]
@property
def current_pos(self) -> Union[float, int, np.ndarray]:
return self.sim.data.qpos.flat[:self.n_links]
@property
def dt(self) -> Union[float, int]:
2022-06-21 17:15:01 +02:00
return self.env.dt