From ef2868759307584ae4fd54967963f12833f4df4c Mon Sep 17 00:00:00 2001 From: Abhishek Kadian <abhishekkadiyan@gmail.com> Date: Sun, 8 Sep 2019 14:11:16 -0700 Subject: [PATCH] pyrobot integration --- habitat/config/default.py | 72 +++++++++--- habitat/core/simulator.py | 1 + habitat/sims/pyrobot/pyrobot.py | 198 ++++++++++++++++++++------------ simple_pyrobot.py | 19 --- 4 files changed, 176 insertions(+), 114 deletions(-) delete mode 100644 simple_pyrobot.py diff --git a/habitat/config/default.py b/habitat/config/default.py index 24014465f..8ebb727b9 100644 --- a/habitat/config/default.py +++ b/habitat/config/default.py @@ -38,49 +38,49 @@ _C.TASK.SENSORS = [] _C.TASK.MEASUREMENTS = [] _C.TASK.GOAL_SENSOR_UUID = "pointgoal" # ----------------------------------------------------------------------------- -# # POINTGOAL SENSOR +# POINTGOAL SENSOR # ----------------------------------------------------------------------------- _C.TASK.POINTGOAL_SENSOR = CN() _C.TASK.POINTGOAL_SENSOR.TYPE = "PointGoalSensor" _C.TASK.POINTGOAL_SENSOR.GOAL_FORMAT = "POLAR" _C.TASK.POINTGOAL_SENSOR.DIMENSIONALITY = 2 # ----------------------------------------------------------------------------- -# # POINTGOAL WITH GPS+COMPASS SENSOR +# POINTGOAL WITH GPS+COMPASS SENSOR # ----------------------------------------------------------------------------- _C.TASK.POINTGOAL_WITH_GPS_COMPASS_SENSOR = _C.TASK.POINTGOAL_SENSOR.clone() _C.TASK.POINTGOAL_WITH_GPS_COMPASS_SENSOR.TYPE = ( "PointGoalWithGPSCompassSensor" ) # ----------------------------------------------------------------------------- -# # HEADING SENSOR +# HEADING SENSOR # ----------------------------------------------------------------------------- _C.TASK.HEADING_SENSOR = CN() _C.TASK.HEADING_SENSOR.TYPE = "HeadingSensor" # ----------------------------------------------------------------------------- -# # COMPASS SENSOR +# COMPASS SENSOR # ----------------------------------------------------------------------------- _C.TASK.COMPASS_SENSOR = CN() _C.TASK.COMPASS_SENSOR.TYPE = "CompassSensor" # ----------------------------------------------------------------------------- -# # GPS SENSOR +# GPS SENSOR # ----------------------------------------------------------------------------- _C.TASK.GPS_SENSOR = CN() _C.TASK.GPS_SENSOR.TYPE = "GPSSensor" _C.TASK.GPS_SENSOR.DIMENSIONALITY = 2 # ----------------------------------------------------------------------------- -# # PROXIMITY SENSOR +# PROXIMITY SENSOR # ----------------------------------------------------------------------------- _C.TASK.PROXIMITY_SENSOR = CN() _C.TASK.PROXIMITY_SENSOR.TYPE = "ProximitySensor" _C.TASK.PROXIMITY_SENSOR.MAX_DETECTION_RADIUS = 2.0 # ----------------------------------------------------------------------------- -# # SPL MEASUREMENT +# SPL MEASUREMENT # ----------------------------------------------------------------------------- _C.TASK.SPL = CN() _C.TASK.SPL.TYPE = "SPL" _C.TASK.SPL.SUCCESS_DISTANCE = 0.2 # ----------------------------------------------------------------------------- -# # TopDownMap MEASUREMENT +# TopDownMap MEASUREMENT # ----------------------------------------------------------------------------- _C.TASK.TOP_DOWN_MAP = CN() _C.TASK.TOP_DOWN_MAP.TYPE = "TopDownMap" @@ -96,7 +96,7 @@ _C.TASK.TOP_DOWN_MAP.FOG_OF_WAR.DRAW = True _C.TASK.TOP_DOWN_MAP.FOG_OF_WAR.VISIBILITY_DIST = 5.0 _C.TASK.TOP_DOWN_MAP.FOG_OF_WAR.FOV = 90 # ----------------------------------------------------------------------------- -# # COLLISIONS MEASUREMENT +# COLLISIONS MEASUREMENT # ----------------------------------------------------------------------------- _C.TASK.COLLISIONS = CN() _C.TASK.COLLISIONS.TYPE = "Collisions" @@ -115,22 +115,22 @@ _C.SIMULATOR.TURN_ANGLE = 10 # angle to rotate left or right in degrees _C.SIMULATOR.TILT_ANGLE = 15 # angle to tilt the camera up or down in degrees _C.SIMULATOR.DEFAULT_AGENT_ID = 0 # ----------------------------------------------------------------------------- -# # SENSORS +# SIMULATOR SENSORS # ----------------------------------------------------------------------------- -SENSOR = CN() -SENSOR.HEIGHT = 480 -SENSOR.WIDTH = 640 -SENSOR.HFOV = 90 # horizontal field of view in degrees -SENSOR.POSITION = [0, 1.25, 0] +SIMULATOR_SENSOR = CN() +SIMULATOR_SENSOR.HEIGHT = 480 +SIMULATOR_SENSOR.WIDTH = 640 +SIMULATOR_SENSOR.HFOV = 90 # horizontal field of view in degrees +SIMULATOR_SENSOR.POSITION = [0, 1.25, 0] # ----------------------------------------------------------------------------- -# # RGB SENSOR +# RGB SENSOR # ----------------------------------------------------------------------------- -_C.SIMULATOR.RGB_SENSOR = SENSOR.clone() +_C.SIMULATOR.RGB_SENSOR = SIMULATOR_SENSOR.clone() _C.SIMULATOR.RGB_SENSOR.TYPE = "HabitatSimRGBSensor" # ----------------------------------------------------------------------------- # DEPTH SENSOR # ----------------------------------------------------------------------------- -_C.SIMULATOR.DEPTH_SENSOR = SENSOR.clone() +_C.SIMULATOR.DEPTH_SENSOR = SIMULATOR_SENSOR.clone() _C.SIMULATOR.DEPTH_SENSOR.TYPE = "HabitatSimDepthSensor" _C.SIMULATOR.DEPTH_SENSOR.MIN_DEPTH = 0 _C.SIMULATOR.DEPTH_SENSOR.MAX_DEPTH = 10 @@ -138,7 +138,7 @@ _C.SIMULATOR.DEPTH_SENSOR.NORMALIZE_DEPTH = True # ----------------------------------------------------------------------------- # SEMANTIC SENSOR # ----------------------------------------------------------------------------- -_C.SIMULATOR.SEMANTIC_SENSOR = SENSOR.clone() +_C.SIMULATOR.SEMANTIC_SENSOR = SIMULATOR_SENSOR.clone() _C.SIMULATOR.SEMANTIC_SENSOR.TYPE = "HabitatSimSemanticSensor" # ----------------------------------------------------------------------------- # AGENT @@ -163,6 +163,40 @@ _C.SIMULATOR.AGENTS = ["AGENT_0"] _C.SIMULATOR.HABITAT_SIM_V0 = CN() _C.SIMULATOR.HABITAT_SIM_V0.GPU_DEVICE_ID = 0 # ----------------------------------------------------------------------------- +# PYROBOT +# ----------------------------------------------------------------------------- +_C.PYROBOT = CN() +_C.PYROBOT.ROBOTS = ["locobot"] # types of robots supported +_C.PYROBOT.ROBOT = "locobot" +_C.PYROBOT.SENSORS = ["RGB_SENSOR", "DEPTH_SENSOR"] +_C.PYROBOT.BASE_CONTROLLER = "proportional" +# ----------------------------------------------------------------------------- +# SENSORS +# ----------------------------------------------------------------------------- +PYROBOT_SENSOR = CN() +PYROBOT_SENSOR.HEIGHT = 480 +PYROBOT_SENSOR.WIDTH = 640 +# ----------------------------------------------------------------------------- +# RGB SENSOR +# ----------------------------------------------------------------------------- +_C.PYROBOT.RGB_SENSOR = PYROBOT_SENSOR.clone() +_C.PYROBOT.RGB_SENSOR.TYPE = "PyRobotRGBSensor" +# ----------------------------------------------------------------------------- +# DEPTH SENSOR +# ----------------------------------------------------------------------------- +_C.PYROBOT.DEPTH_SENSOR = PYROBOT_SENSOR.clone() +_C.PYROBOT.DEPTH_SENSOR.TYPE = "PyRobotDepthSensor" +_C.PYROBOT.DEPTH_SENSOR.MIN_DEPTH = 0 +_C.PYROBOT.DEPTH_SENSOR.MAX_DEPTH = 5 +_C.PYROBOT.DEPTH_SENSOR.NORMALIZE_DEPTH = True +# ----------------------------------------------------------------------------- +# ACTIONS LOCOBOT +# ----------------------------------------------------------------------------- +_C.PYROBOT.LOCOBOT = CN() +_C.PYROBOT.LOCOBOT.BASE_ACTIONS = ["go_to_relative", "go_to_absolute"] +_C.PYROBOT.LOCOBOT.CAMERA_ACTIONS = ["set_pan", "set_tilt", "set_pan_tilt"] +# TODO(akadian): add support for Arm actions +# ----------------------------------------------------------------------------- # DATASET # ----------------------------------------------------------------------------- _C.DATASET = CN() diff --git a/habitat/core/simulator.py b/habitat/core/simulator.py index 7effea5e4..c0a68b27b 100644 --- a/habitat/core/simulator.py +++ b/habitat/core/simulator.py @@ -457,6 +457,7 @@ class Simulator: def close(self) -> None: raise NotImplementedError + # TODO(akadian): move the below away from abstract class to HabitatSim class @property def index_stop_action(self): return SimulatorActions.STOP diff --git a/habitat/sims/pyrobot/pyrobot.py b/habitat/sims/pyrobot/pyrobot.py index 6d48cf57d..ca1d48058 100644 --- a/habitat/sims/pyrobot/pyrobot.py +++ b/habitat/sims/pyrobot/pyrobot.py @@ -4,113 +4,159 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +from typing import Any +from gym import Space, spaces + from habitat.core.registry import registry from habitat.core.simulator import ( Config, + DepthSensor, + RGBSensor, SensorSuite, Simulator, - Space ) import pyrobot import numpy as np +# TODO(akadian): remove the below pyrobot hack +import sys +ros_path = '/opt/ros/kinetic/lib/python2.7/dist-packages' +if ros_path in sys.path: + sys.path.remove(ros_path) + import cv2 +sys.path.append(ros_path) + + +@registry.register_sensor +class PyRobotRGBSensor(RGBSensor): + def __init__(self, config): + super().__init__(config=config) + + def _get_observation_space(self, *args: Any, **kwargs: Any): + return spaces.Box( + low=0, + high=255, + shape=(self.config.HEIGHT, self.config.WIDTH, 3), + dtype=np.uint8, + ) + + def get_observation(self, robot_obs): + obs = robot_obs.get(self.uuid, None) + + assert obs is not None, "Invalid observation for {} sensor".format(self.uuid) + + if obs.shape != self.observation_space.shape: + obs = cv2.resize(obs, (self.observation_space.shape[1], self.observation_space.shape[0])) + + return obs + + +@registry.register_sensor +class PyRobotDepthSensor(DepthSensor): + def __init__(self, config): + if config.NORMALIZE_DEPTH: + self.min_depth_value = 0 + self.max_depth_value = 1 + else: + self.min_depth_value = config.MIN_DEPTH + self.max_depth_value = config.MAX_DEPTH + + super().__init__(config=config) -# TODO(akadian): make MAX_DEPTH a parameter of the class -MAX_DEPTH = 5.0 + def _get_observation_space(self, *args: Any, **kwargs: Any): + return spaces.Box( + low=self.min_depth_value, + high=self.max_depth_value, + shape=(self.config.HEIGHT, self.config.WIDTH, 1), + dtype=np.float32, + ) + + def get_observation(self, robot_obs): + obs = robot_obs.get(self.uuid, None) + + assert obs is not None, "Invalid observation for {} sensor".format(self.uuid) + + if obs.shape != self.observation_space.shape: + obs = cv2.resize(obs, (self.observation_space.shape[1], self.observation_space.shape[0])) + + obs = np.clip(obs, self.config.MIN_DEPTH, self.config.MAX_DEPTH) + if self.config.NORMALIZE_DEPTH: + # normalize depth observations to [0, 1] + obs = (obs - self.config.MIN_DEPTH) / self.config.MAX_DEPTH + + obs = np.expand_dims(obs, axis=2) # make depth observations a 3D array + + return obs @registry.register_simulator(name="PyRobot-v0") class PyRobot(Simulator): def __init__(self, config: Config) -> None: + self._config = config + + robot_sensors = [] + for sensor_name in self._config.SENSORS: + sensor_cfg = getattr(self._config, sensor_name) + sensor_type = registry.get_sensor(sensor_cfg.TYPE) + + assert sensor_type is not None, "invalid sensor type {}".format( + sensor_cfg.TYPE + ) + robot_sensors.append(sensor_type(sensor_cfg)) + self._sensor_suite = SensorSuite(robot_sensors) + config_pyrobot = { - "base_controller": "proportional" + "base_controller": self._config.BASE_CONTROLLER } - # self._forward_step_change = [0.25, 0, 0] - self._angle_step = (10 / 180) * np.pi - # self._left_step_change = [0, 0, self._angle_step] - # self._right_step_change = [0, 0, -self._angle_step] + assert self._config.ROBOT in self._config.ROBOTS, "Invalid robot type {}".format(self._config.ROBOT) + self._robot_config = getattr( + self._config, + self._config.ROBOT.upper() + ) + + self._robot = pyrobot.Robot(self._config.ROBOT, base_config=config_pyrobot) - self._robot = pyrobot.Robot("locobot", base_config=config_pyrobot) + def _degree_to_radian(self, degrees): + return (degrees / 180) * np.pi + + def _robot_obs(self): + return { + "rgb": self._robot.camera.get_rgb(), + "depth": self._robot.camera.get_depth(), + } @property def sensor_suite(self) -> SensorSuite: return self._sensor_suite @property - def action_space(self) -> Space: - return self._action_space - - def reset(self): - self._robot.camera.reset() - rgb_observation = self.rgb_observation() - depth_observation = self.depth_observation() + def base(self): + return self._robot.base - return { - "rgb": rgb_observation, - "depth": depth_observation, - } + @property + def camera(self): + return self._robot.camera - def _forward(self): - target_position = [0.25, 0, 0] - # TODO(akadian): see if there is a command to check if the step was successful - self._robot.base.go_to_relative( - target_position, - smooth=False, - close_loop=True - ) + # TODO(akadian): add action space support. - def _left(self): - target_position = [0, 0, self._angle_step] - self._robot.base.go_to_relative( - target_position, - smooth=False, - close_loop=True - ) + def reset(self): + self._robot.camera.reset() - def _right(self): - target_position = [0, 0, -self._angle_step] - self._robot.base.go_to_relative( - target_position, - smooth=False, - close_loop=True - ) + observations = self._sensor_suite.get_observations(self._robot_obs()) + return observations - def step(self, action): - if action == 0: - # forward - self._forward() - elif action == 1: - # left - self._left() - elif action == 2: - # right - self._right() + def step(self, action, action_params): + if action in self._robot_config.BASE_ACTIONS: + getattr(self._robot.base, action)(**action_params) + elif action in self._robot_config.CAMERA_ACTIONS: + getattr(self._robot.camera, action)(**action_params) else: - raise ValueError - - rgb_observation = self.rgb_observation() - depth_observation = self.depth_observation() - - return { - "rgb": rgb_observation, - "depth": depth_observation, - } - - # TODO(akadian): expose a method to just get the observations, - # this will be useful for the case if the state of robot - # changes due to external factors. + raise ValueError("Invalid action {}".format(action)) - def rgb_observation(self): - rgb = self._robot.camera.get_rgb() - return rgb + observations = self._sensor_suite.get_observations(self._robot_obs()) - def depth_observation(self): - depth = self._robot.camera.get_depth() - depth = depth / 1000 # convert from mm to m - depth[depth > MAX_DEPTH] = MAX_DEPTH - depth = depth / MAX_DEPTH # scale to [0, 1] - return depth + return observations - def close(self): - raise NotImplementedError + def seed(self, seed: int) -> None: + raise NotImplementedError("No support for seeding in reality") diff --git a/simple_pyrobot.py b/simple_pyrobot.py deleted file mode 100644 index ed3a35e8c..000000000 --- a/simple_pyrobot.py +++ /dev/null @@ -1,19 +0,0 @@ -from habitat.sims import make_sim -from PIL import Image - - -def main(): - reality = make_sim( - id_sim="PyRobot-v0", config=None - ) - print("Reality created") - - observations = reality.reset() - observation = reality.step(0) - - print("RGB:", observations["rgb"].shape) - print("Depth:", observations["depth"].shape) - - -if __name__ == "__main__": - main() -- GitLab