diff --git a/omnigibson/__init__.py b/omnigibson/__init__.py index b25689a46e..3412380d80 100644 --- a/omnigibson/__init__.py +++ b/omnigibson/__init__.py @@ -86,6 +86,7 @@ def create_app(): from omni.isaac.core.utils.extensions import enable_extension enable_extension("omni.flowusd") enable_extension("omni.particle.system.bundle") + enable_extension("omni.kit.xr.profile.vr") # Additional import for windows if os.name == "nt": diff --git a/omnigibson/examples/xr/hand_tracking_demo.py b/omnigibson/examples/xr/hand_tracking_demo.py new file mode 100644 index 0000000000..a5b556d5f9 --- /dev/null +++ b/omnigibson/examples/xr/hand_tracking_demo.py @@ -0,0 +1,105 @@ +""" +Example script for vr system. +""" +import omnigibson as og +from omnigibson.utils.xr_utils import VRSys +from omnigibson.utils.ui_utils import choose_from_options + +# You can set this to True to visualize the landmarks of the hands! +DEBUG_MODE = True + +ROBOTS = { + "Behaviorbot": "Humanoid robot with two hands (default)", + "FrankaAllegro": "Franka Panda with Allegro hand", +} + +def main(): + robot_name = choose_from_options(options=ROBOTS, name="robot") + + # Create the config for generating the environment we want + scene_cfg = {"type": "Scene"} + robot0_cfg = { + "type": robot_name, + "obs_modalities": ["rgb", "depth", "normal", "scan", "occupancy_grid"], + "action_normalize": False, + } + object_cfg = [ + { + "type": "DatasetObject", + "prim_path": "/World/breakfast_table", + "name": "breakfast_table", + "category": "breakfast_table", + "model": "kwmfdg", + "bounding_box": [2, 1, 0.4], + "position": [0.8, 0, 0.3], + "orientation": [0, 0, 0.707, 0.707], + }, + { + "type": "DatasetObject", + "prim_path": "/World/apple", + "name": "apple", + "category": "apple", + "model": "omzprq", + "position": [0.6, 0.1, 0.5], + }, + { + "type": "DatasetObject", + "prim_path": "/World/banana", + "name": "banana", + "category": "banana", + "model": "znakxm", + "position": [0.6, -0.1, 0.5], + }, + ] + if DEBUG_MODE: + # Add the marker to visualize hand tracking waypoints + object_cfg.extend([{ + "type": "PrimitiveObject", + "prim_path": f"/World/marker_{i}", + "name": f"marker_{i}", + "primitive_type": "Cube", + "size": 0.01, + "visual_only": True, + "rgba": [0.0, 1.0, 0.0, 1.0], + } for i in range(52)]) + + cfg = dict(scene=scene_cfg, robots=[robot0_cfg], objects=object_cfg) + # Create the environment + env = og.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/240.) + env.reset() + # Start vrsys + vr_robot = env.robots[0] + vrsys = VRSys(vr_robot=vr_robot, use_hand_tracking=True) + vrsys.start() + # set headset position to be 1m above ground and facing +x direction + head_init_transform = vrsys.og2xr(pos=[0, 0, 1], orn=[0, 0, 0, 1]) + vrsys.vr_profile.set_physical_world_to_world_anchor_transform_to_match_xr_device(head_init_transform, vrsys.hmd) + + # get the objects + if DEBUG_MODE: + markers = [env.scene.object_registry("name", f"marker_{i}") for i in range(52)] + + # main simulation loop + for _ in range(10000): + if og.sim.is_playing(): + vr_data = vrsys.step() + if DEBUG_MODE: + if "left" in vr_data["hand_data"] and "raw" in vr_data["hand_data"]["left"]: + for i in range(26): + pos = vr_data["hand_data"]["left"]["raw"]["pos"][i] + orn = vr_data["hand_data"]["left"]["raw"]["orn"][i] + markers[i].set_position_orientation(pos, orn) + if "right" in vr_data["hand_data"] and "raw" in vr_data["hand_data"]["right"]: + for i in range(26): + pos = vr_data["hand_data"]["right"]["raw"]["pos"][i] + orn = vr_data["hand_data"]["right"]["raw"]["orn"][i] + markers[i + 26].set_position_orientation(pos, orn) + action = vr_robot.gen_action_from_vr_data(vr_data) + env.step(action) + + # Shut down the environment cleanly at the end + vrsys.stop() + env.close() + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/omnigibson/examples/xr/robot_teleoperate_demo.py b/omnigibson/examples/xr/robot_teleoperate_demo.py new file mode 100644 index 0000000000..41997bf322 --- /dev/null +++ b/omnigibson/examples/xr/robot_teleoperate_demo.py @@ -0,0 +1,140 @@ +""" +Example script for using VR controller to teleoperate a robot. +""" +import omnigibson as og +from omnigibson.utils.xr_utils import VRSys +from omnigibson.utils.ui_utils import choose_from_options + +ROBOTS = { + "FrankaPanda": "Franka Emika Panda (default)", + "Fetch": "Mobile robot with one arm", + "Tiago": "Mobile robot with two arms", +} + + +def main(): + robot_name = choose_from_options(options=ROBOTS, name="robot") + # Create the config for generating the environment we want + scene_cfg = dict() + scene_cfg["type"] = "Scene" + # Add the robot we want to load + robot0_cfg = { + "type": robot_name, + "obs_modalities": ["rgb", "depth", "seg_instance", "normal", "scan", "occupancy_grid"], + "action_normalize": False, + "grasping_mode": "assisted", + "controller_config": { + "arm_0": { + "name": "InverseKinematicsController", + "mode": "pose_absolute_ori", + "motor_type": "position", + }, + "gripper_0": { + "name": "MultiFingerGripperController", + "command_input_limits": (0.0, 1.0), + "mode": "smooth", + "inverted": True + } + } + } + object_cfg = [ + { + "type": "DatasetObject", + "prim_path": "/World/breakfast_table", + "name": "breakfast_table", + "category": "breakfast_table", + "model": "kwmfdg", + "bounding_box": [2, 1, 0.4], + "position": [0.8, 0, 0.3], + "orientation": [0, 0, 0.707, 0.707], + }, + { + "type": "DatasetObject", + "prim_path": "/World/frail", + "name": "frail", + "category": "frail", + "model": "zmjovr", + "scale": [2, 2, 2], + "position": [0.6, -0.3, 0.5], + }, + { + "type": "DatasetObject", + "prim_path": "/World/toy_figure1", + "name": "toy_figure1", + "category": "toy_figure", + "model": "issvzv", + "scale": [0.75, 0.75, 0.75], + "position": [0.6, 0, 0.5], + }, + { + "type": "DatasetObject", + "prim_path": "/World/toy_figure2", + "name": "toy_figure2", + "category": "toy_figure", + "model": "nncqfn", + "scale": [0.75, 0.75, 0.75], + "position": [0.6, 0.1, 0.5], + }, + { + "type": "DatasetObject", + "prim_path": "/World/toy_figure3", + "name": "toy_figure3", + "category": "toy_figure", + "model": "eulekw", + "scale": [0.25, 0.25, 0.25], + "position": [0.6, 0.2, 0.5], + }, + { + "type": "DatasetObject", + "prim_path": "/World/toy_figure4", + "name": "toy_figure4", + "category": "toy_figure", + "model": "yxiksm", + "scale": [0.25, 0.25, 0.25], + "position": [0.6, 0.3, 0.5], + }, + { + "type": "DatasetObject", + "prim_path": "/World/toy_figure5", + "name": "toy_figure5", + "category": "toy_figure", + "model": "wvpqbf", + "scale": [0.75, 0.75, 0.75], + "position": [0.6, 0.4, 0.5], + }, + ] + cfg = dict(scene=scene_cfg, robots=[robot0_cfg], objects=object_cfg) + + # Create the environment + env = og.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/240.) + env.reset() + # update viewer camera pose + og.sim.viewer_camera.set_position_orientation([-0.22, 0.99, 1.09], [-0.14, 0.47, 0.84, -0.23]) + + # Start vrsys + vr_robot = env.robots[0] + vrsys = VRSys(system="SteamVR", vr_robot=vr_robot, show_controller=True, disable_display_output=True, align_anchor_to_robot_base=True) + vrsys.start() + # tracker variable of whether the robot is attached to the VR system + prev_robot_attached = False + # main simulation loop + for _ in range(10000): + if og.sim.is_playing(): + vr_data = vrsys.step() + if vr_data["robot_attached"] == True and prev_robot_attached == False: + if vr_robot.model_name == "Tiago": + robot_eef_position = vr_robot.links[vr_robot.eef_link_names["right"]].get_position() + else: + robot_eef_position = vr_robot.links[vr_robot.eef_link_names[vr_robot.default_arm]].get_position() + base_rotation = vr_robot.get_orientation() + vrsys.snap_device_to_robot_eef(robot_eef_position=robot_eef_position, base_rotation=base_rotation) + else: + action = vr_robot.gen_action_from_vr_data(vr_data) + env.step(action) + prev_robot_attached = vr_data["robot_attached"] + # Shut down the environment cleanly at the end + vrsys.stop() + env.close() + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/omnigibson/examples/xr/vr_simple_demo.py b/omnigibson/examples/xr/vr_simple_demo.py new file mode 100644 index 0000000000..fae9dc43bd --- /dev/null +++ b/omnigibson/examples/xr/vr_simple_demo.py @@ -0,0 +1,45 @@ +""" +Example script for interacting with OmniGibson scenes with VR and Behaviorbot. +""" +import omnigibson as og +from omnigibson.utils.xr_utils import VRSys + +def main(): + # Create the config for generating the environment we want + scene_cfg = dict() + scene_cfg["type"] = "InteractiveTraversableScene" + scene_cfg["scene_model"] = "Rs_int" + robot0_cfg = dict() + robot0_cfg["type"] = "Behaviorbot" + robot0_cfg["controller_config"] = { + "gripper_0": {"command_input_limits": "default"}, + "gripper_1": {"command_input_limits": "default"}, + } + cfg = dict(scene=scene_cfg, robots=[robot0_cfg]) + + # Create the environment + env = og.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/240.) + env.reset() + # start vrsys + vr_robot = env.robots[0] + vrsys = VRSys(system="SteamVR", vr_robot=vr_robot, enable_touchpad_movement=True) + vrsys.start() + # set headset position to be 1m above ground and facing +x + head_init_transform = vrsys.og2xr(pos=[0, 0, 1], orn=[0, 0, 0, 1]) + vrsys.vr_profile.set_physical_world_to_world_anchor_transform_to_match_xr_device(head_init_transform, vrsys.hmd) + + # main simulation loop + for _ in range(10000): + if og.sim.is_playing(): + # step the VR system to get the latest data from VR runtime + vr_data = vrsys.step() + # generate robot action and step the environment + action = vr_robot.gen_action_from_vr_data(vr_data) + env.step(action) + + # Shut down the environment cleanly at the end + vrsys.stop() + env.close() + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/omnigibson/robots/__init__.py b/omnigibson/robots/__init__.py index 17b4af9a08..1ea44d8cb4 100644 --- a/omnigibson/robots/__init__.py +++ b/omnigibson/robots/__init__.py @@ -11,3 +11,4 @@ from omnigibson.robots.two_wheel_robot import TwoWheelRobot from omnigibson.robots.franka import FrankaPanda from omnigibson.robots.franka_allegro import FrankaAllegro +from omnigibson.robots.behaviorbot import Behaviorbot diff --git a/omnigibson/robots/behaviorbot.py b/omnigibson/robots/behaviorbot.py new file mode 100644 index 0000000000..dfdc49ea0f --- /dev/null +++ b/omnigibson/robots/behaviorbot.py @@ -0,0 +1,693 @@ +import os +import itertools +from typing import List, Tuple, Iterable +from collections import OrderedDict +from abc import ABC + +import numpy as np +from scipy.spatial.transform import Rotation as R +from pxr import Gf + +import omnigibson as og +from omnigibson.macros import gm +from omnigibson.robots.locomotion_robot import LocomotionRobot +from omnigibson.robots.manipulation_robot import ManipulationRobot, GraspingPoint +from omnigibson.robots.active_camera_robot import ActiveCameraRobot +from omnigibson.objects.usd_object import USDObject +import omnigibson.utils.transform_utils as T +from omnigibson.controllers.controller_base import ControlType + +COMPONENT_SUFFIXES = ['x', 'y', 'z', 'rx', 'ry', 'rz'] + +HEAD_TO_BODY_OFFSET = ([0, 0, -0.4], [0, 0, 0, 1]) +RH_TO_BODY_OFFSET = ([0, 0.15, -0.4], T.euler2quat([0, np.pi, np.pi / 2])) +LH_TO_BODY_OFFSET = ([0, -0.15, -0.4], T.euler2quat([0, np.pi, -np.pi / 2])) + +# Body parameters +BODY_HEIGHT_RANGE = (0, 2) # meters. The body is allowed to clip the floor by about a half. + +# Hand parameters +HAND_GHOST_HAND_APPEAR_THRESHOLD = 0.15 +THUMB_2_POS = [0, -0.02, -0.05] +THUMB_1_POS = [0, -0.015, -0.02] +PALM_CENTER_POS = [0, -0.04, 0.01] +PALM_BASE_POS = [0, 0, 0.015] +FINGER_TIP_POS = [0, -0.025, -0.055] + +# Assisted grasping parameters +ASSIST_FRACTION = 1.0 +ARTICULATED_ASSIST_FRACTION = 0.7 +MIN_ASSIST_FORCE = 0 +MAX_ASSIST_FORCE = 500 +ASSIST_FORCE = MIN_ASSIST_FORCE + (MAX_ASSIST_FORCE - MIN_ASSIST_FORCE) * ASSIST_FRACTION +TRIGGER_FRACTION_THRESHOLD = 0.5 +CONSTRAINT_VIOLATION_THRESHOLD = 0.1 +ATTACHMENT_BUTTON_TIME_THRESHOLD = 1 + +# Hand link index constants +PALM_LINK_NAME = "palm" +FINGER_MID_LINK_NAMES = ("Tproximal", "Iproximal", "Mproximal", "Rproximal", "Pproximal") +FINGER_TIP_LINK_NAMES = ("Tmiddle", "Imiddle", "Mmiddle", "Rmiddle", "Pmiddle") +THUMB_LINK_NAME = "Tmiddle" + + +class Behaviorbot(ManipulationRobot, LocomotionRobot, ActiveCameraRobot): + """ + A humanoid robot that can be used in VR as an avatar. It has two hands, a body and a head with two cameras. + """ + + def __init__( + self, + # Shared kwargs in hierarchy + name, + prim_path=None, + class_id=None, + uuid=None, + scale=None, + visible=True, + fixed_base=True, + visual_only=False, + self_collisions=True, + load_config=None, + + # Unique to USDObject hierarchy + abilities=None, + + # Unique to ControllableObject hierarchy + control_freq=None, + controller_config=None, + action_type="continuous", + action_normalize=False, + reset_joint_pos=None, + + # Unique to BaseRobot + obs_modalities="rgb", + proprio_obs="default", + + # Unique to ManipulationRobot + grasping_mode="assisted", + + # unique to Behaviorbot + agent_id=1, + use_body=True, + use_ghost_hands=True, + + **kwargs + ): + """ + Initializes Behaviorbot + Args: + agent_id (int): unique id of the agent - used in multi-user VR + image_width (int): width of each camera + image_height (int): height of each camera + use_body (bool): whether to use body + use_ghost_hands (bool): whether to use ghost hand + """ + + super(Behaviorbot, self).__init__( + prim_path=prim_path, + name=name, + class_id=class_id, + uuid=uuid, + scale=scale, + visible=visible, + fixed_base=fixed_base, + visual_only=visual_only, + self_collisions=self_collisions, + load_config=load_config, + abilities=abilities, + control_freq=control_freq, + controller_config=controller_config, + action_type=action_type, + action_normalize=action_normalize, + reset_joint_pos=reset_joint_pos, + obs_modalities=obs_modalities, + proprio_obs=proprio_obs, + grasping_mode=grasping_mode, + grasping_direction="upper", + **kwargs, + ) + + # Basic parameters + self.agent_id = agent_id + self.use_body = use_body + self.use_ghost_hands = use_ghost_hands + self._world_base_fixed_joint_prim = None + + # Activation parameters + self.first_frame = True + # whether hand or body is in contact with other objects (we need this since checking contact list is costly) + self.part_is_in_contact = {hand_name: False for hand_name in self.arm_names + ["body"]} + + # Whether the VR system is actively hooked up to the VR agent. + self.vr_attached = False + self._vr_attachment_button_press_timestamp = None + + # setup eef parts + self.parts = OrderedDict() + self.parts["lh"] = BRPart( + name="lh", parent=self, prim_path="lh_palm", eef_type="hand", + offset_to_body=LH_TO_BODY_OFFSET, **kwargs + ) + + self.parts["rh"] = BRPart( + name="rh", parent=self, prim_path="rh_palm", eef_type="hand", + offset_to_body=RH_TO_BODY_OFFSET, **kwargs + ) + + self.parts["head"] = BRPart( + name="head", parent=self, prim_path="eye", eef_type="head", + offset_to_body=HEAD_TO_BODY_OFFSET, **kwargs + ) + + @property + def usd_path(self): + return os.path.join(gm.ASSET_PATH, "models/behaviorbot/usd/Behaviorbot.usd") + + @property + def model_name(self): + return "Behaviorbot" + + @property + def n_arms(self): + return 2 + + @property + def arm_names(self): + return ["lh", "rh"] + + @property + def eef_link_names(self): + dic = {arm: f"{arm}_{PALM_LINK_NAME}" for arm in self.arm_names} + dic["head"] = "head" + return dic + + @property + def arm_link_names(self): + """The head counts as a arm since it has the same 33 joint configuration""" + return {arm: [f"{arm}_{component}" for component in COMPONENT_SUFFIXES] for arm in self.arm_names + ['head']} + + @property + def finger_link_names(self): + return { + arm: [f"{arm}_{link_name}" for link_name in itertools.chain(FINGER_MID_LINK_NAMES, FINGER_TIP_LINK_NAMES)] + for arm in self.arm_names + } + + @property + def base_joint_names(self): + return [f"base_{component}_joint" for component in COMPONENT_SUFFIXES] + + @property + def arm_joint_names(self): + """The head counts as a arm since it has the same 33 joint configuration""" + return {eef: [f"{eef}_{component}_joint" for component in COMPONENT_SUFFIXES] for eef in self.arm_names + ["head"]} + + @property + def finger_joint_names(self): + return { + arm: ( + # palm-to-proximal joints. + [f"{arm}_{to_link}__{arm}_{PALM_LINK_NAME}" for to_link in FINGER_MID_LINK_NAMES] + + + # proximal-to-tip joints. + [f"{arm}_{to_link}__{arm}_{from_link}" for from_link, to_link in zip(FINGER_MID_LINK_NAMES, FINGER_TIP_LINK_NAMES)] + ) + for arm in self.arm_names + } + + @property + def base_control_idx(self): + # TODO: might need to refactor out joints + joints = list(self.joints.keys()) + return tuple(joints.index(joint) for joint in self.base_joint_names) + + @property + def arm_control_idx(self): + joints = list(self.joints.keys()) + return { + arm: [joints.index(f"{arm}_{component}_joint") for component in COMPONENT_SUFFIXES] + for arm in self.arm_names + } + + @property + def gripper_control_idx(self): + joints = list(self.joints.values()) + return {arm: [joints.index(joint) for joint in arm_joints] for arm, arm_joints in self.finger_joints.items()} + + @property + def camera_control_idx(self): + joints = list(self.joints.keys()) + return [joints.index(f"head_{component}_joint") for component in COMPONENT_SUFFIXES] + + @property + def default_joint_pos(self): + return np.zeros(self.n_joints) + + @property + def controller_order(self): + controllers = ["base", "camera"] + for arm_name in self.arm_names: + controllers += [f"arm_{arm_name}", f"gripper_{arm_name}"] + return controllers + + @property + def _default_controllers(self): + controllers = { + "base": "JointController", + "camera": "JointController" + } + controllers.update({f"arm_{arm_name}": "JointController" for arm_name in self.arm_names}) + controllers.update({f"gripper_{arm_name}": "MultiFingerGripperController" for arm_name in self.arm_names}) + return controllers + + @property + def _default_base_joint_controller_config(self): + return { + "name": "JointController", + "control_freq": self._control_freq, + "control_limits": self.control_limits, + "use_delta_commands": False, + "motor_type": "position", + "dof_idx": self.base_control_idx, + "command_input_limits": None, + } + + @property + def _default_arm_joint_controller_configs(self): + dic = {} + for arm in self.arm_names: + dic[arm] = { + "name": "JointController", + "control_freq": self._control_freq, + "motor_type": "position", + "control_limits": self.control_limits, + "dof_idx": self.arm_control_idx[arm], + "command_input_limits": None, + "use_delta_commands": False, + } + return dic + + @property + def _default_gripper_multi_finger_controller_configs(self): + dic = {} + for arm in self.arm_names: + dic[arm] = { + "name": "MultiFingerGripperController", + "control_freq": self._control_freq, + "motor_type": "position", + "control_limits": self.control_limits, + "dof_idx": self.gripper_control_idx[arm], + "command_input_limits": None, + "mode": "independent", + } + return dic + + @property + def _default_camera_joint_controller_config(self): + return { + "name": "JointController", + "control_freq": self._control_freq, + "motor_type": "position", + "control_limits": self.control_limits, + "dof_idx": self.camera_control_idx, + "command_input_limits": None, + "use_delta_commands": False, + } + + @property + def _default_controller_config(self): + controllers = { + "base": {"JointController": self._default_base_joint_controller_config}, + "camera": {"JointController": self._default_camera_joint_controller_config}, + } + controllers.update( + { + f"arm_{arm_name}": {"JointController": self._default_arm_joint_controller_configs[arm_name]} + for arm_name in self.arm_names + } + ) + controllers.update( + { + f"gripper_{arm_name}": { + "MultiFingerGripperController": self._default_gripper_multi_finger_controller_configs[arm_name], + "JointController": self._default_gripper_joint_controller_configs[arm_name], + } + for arm_name in self.arm_names + } + ) + return controllers + + def load(self): + prim = super(Behaviorbot, self).load() + for part in self.parts.values(): + part.load() + return prim + + def _post_load(self): + super()._post_load() + + def _create_discrete_action_space(self): + raise ValueError("Behaviorbot does not support discrete actions!") + + def update_controller_mode(self): + super().update_controller_mode() + # overwrite robot params (e.g. damping, stiffess, max_effort) here + # set joint mass and rigid body properties + for link in self.links.values(): + link.ccd_enabled = True + for arm in self.arm_names: + for link in self.finger_link_names[arm]: + self.links[link].mass = 0.02 + self.links["base"].mass = 15 + self.links["lh_palm"].mass = 0.3 + self.links["rh_palm"].mass = 0.3 + + # set base joint properties + for joint_name in self.base_joint_names: + self.joints[joint_name].stiffness = 1e8 + self.joints[joint_name].max_effort = 7500 + + # set arm joint properties + for arm in self.arm_joint_names: + for joint_name in self.arm_joint_names[arm]: + self.joints[joint_name].stiffness = 1e6 + self.joints[joint_name].damping = 1 + self.joints[joint_name].max_effort = 300 + + # set finger joint properties + for arm in self.finger_joint_names: + for joint_name in self.finger_joint_names[arm]: + self.joints[joint_name].stiffness = 1e2 + self.joints[joint_name].damping = 1 + self.joints[joint_name].max_effort = 5 + + @property + def base_footprint_link_name(self): + """ + Name of the actual root link that we are interested in. + """ + return "base" + + @property + def base_footprint_link(self): + """ + Returns: + RigidPrim: base footprint link of this object prim + """ + return self._links[self.base_footprint_link_name] + + def get_position_orientation(self): + # If the simulator is playing, return the pose of the base_footprint link frame + if self._dc is not None and self._dc.is_simulating(): + return self.base_footprint_link.get_position_orientation() + + # Else, return the pose of the robot frame + else: + return super().get_position_orientation() + + def set_position_orientation(self, position=None, orientation=None): + super().set_position_orientation(position, orientation) + # Move the joint frame for the world_base_joint + if self._world_base_fixed_joint_prim is not None: + if position is not None: + self._world_base_fixed_joint_prim.GetAttribute("physics:localPos0").Set(tuple(position)) + if orientation is not None: + self._world_base_fixed_joint_prim.GetAttribute("physics:localRot0").Set(Gf.Quatf(*np.float_(orientation)[[3, 0, 1, 2]])) + + @property + def assisted_grasp_start_points(self): + side_coefficients = {"lh": np.array([1, -1, 1]), "rh": np.array([1, 1, 1])} + return { + arm: [ + GraspingPoint(link_name=f"{arm}_{PALM_LINK_NAME}", position=PALM_BASE_POS), + GraspingPoint(link_name=f"{arm}_{PALM_LINK_NAME}", position=PALM_CENTER_POS * side_coefficients[arm]), + GraspingPoint( + link_name=f"{arm}_{THUMB_LINK_NAME}", position=THUMB_1_POS * side_coefficients[arm] + ), + GraspingPoint( + link_name=f"{arm}_{THUMB_LINK_NAME}", position=THUMB_2_POS * side_coefficients[arm] + ), + ] + for arm in self.arm_names + } + + @property + def assisted_grasp_end_points(self): + side_coefficients = {"lh": np.array([1, -1, 1]), "rh": np.array([1, 1, 1])} + return { + arm: [ + GraspingPoint(link_name=f"{arm}_{finger}", position=FINGER_TIP_POS * side_coefficients[arm]) + for finger in FINGER_TIP_LINK_NAMES + ] + for arm in self.arm_names + } + + def update_hand_contact_info(self): + self.part_is_in_contact["body"] = len(self.links["body"].contact_list()) + for hand_name in self.arm_names: + self.part_is_in_contact[hand_name] = len(self.eef_links[hand_name].contact_list()) \ + or np.any([len(finger.contact_list()) for finger in self.finger_links[hand_name]]) + + def deploy_control(self, control, control_type, indices=None, normalized=False): + """ + overwrites controllable_object.deploy_control to make arm revolute joints set_target=False + """ + # Run sanity check + if indices is None: + assert len(control) == len(control_type) == self.n_dof, ( + "Control signals, control types, and number of DOF should all be the same!" + "Got {}, {}, and {} respectively.".format(len(control), len(control_type), self.n_dof) + ) + # Set indices manually so that we're standardized + indices = np.arange(self.n_dof) + else: + assert len(control) == len(control_type) == len(indices), ( + "Control signals, control types, and indices should all be the same!" + "Got {}, {}, and {} respectively.".format(len(control), len(control_type), len(indices)) + ) + + # Standardize normalized input + n_indices = len(indices) + normalized = normalized if isinstance(normalized, Iterable) else [normalized] * n_indices + + # Loop through controls and deploy + # We have to use delicate logic to account for the edge cases where a single joint may contain > 1 DOF + # (e.g.: spherical joint) + cur_indices_idx = 0 + while cur_indices_idx != n_indices: + # Grab the current DOF index we're controlling and find the corresponding joint + joint = self._dof_to_joints[indices[cur_indices_idx]] + cur_ctrl_idx = indices[cur_indices_idx] + joint_dof = joint.n_dof + if joint_dof > 1: + # Run additional sanity checks since the joint has more than one DOF to make sure our controls, + # control types, and indices all match as expected + + # Make sure the indices are mapped correctly + assert indices[cur_indices_idx + joint_dof] == cur_ctrl_idx + joint_dof, \ + "Got mismatched control indices for a single joint!" + # Check to make sure all joints, control_types, and normalized as all the same over n-DOF for the joint + for group_name, group in zip( + ("joints", "control_types", "normalized"), + (self._dof_to_joints, control_type, normalized), + ): + assert len({group[indices[cur_indices_idx + i]] for i in range(joint_dof)}) == 1, \ + f"Not all {group_name} were the same when trying to deploy control for a single joint!" + # Assuming this all passes, we grab the control subvector, type, and normalized value accordingly + ctrl = control[cur_ctrl_idx: cur_ctrl_idx + joint_dof] + else: + # Grab specific control. No need to do checks since this is a single value + ctrl = control[cur_ctrl_idx] + + # Deploy control based on type + ctrl_type, norm = control_type[cur_ctrl_idx], normalized[cur_ctrl_idx] # In multi-DOF joint case all values were already checked to be the same + if ctrl_type == ControlType.EFFORT: + joint.set_effort(ctrl, normalized=norm) + elif ctrl_type == ControlType.VELOCITY: + joint.set_vel(ctrl, normalized=norm, drive=True) + elif ctrl_type == ControlType.POSITION: + if "rx" in joint.joint_name or "ry" in joint.joint_name or "rz" in joint.joint_name: + joint.set_pos(ctrl, normalized=norm, drive=False) + else: + joint.set_pos(ctrl, normalized=norm, drive=True) + elif ctrl_type == ControlType.NONE: + # Do nothing + pass + else: + raise ValueError("Invalid control type specified: {}".format(ctrl_type)) + + # Finally, increment the current index based on how many DOFs were just controlled + cur_indices_idx += joint_dof + + def gen_action_from_vr_data(self, vr_data): + """ + Generates an action for the Behaviorbot to perform based on vr data dict. + + Action space (all non-normalized values that will be clipped if they are too large) + Body: + - 6DOF pose - relative to body frame from previous frame + Eye: + - 6DOF pose - relative to body frame (where the body will be after applying this frame's action) + Left hand, right hand (in that order): + - 6DOF pose - relative to body frame (same as above) + - 10DOF gripper joint rotation + + Total size: 44 + """ + # Actions are stored as 1D numpy array + action = np.zeros(44) + # Update body action space + hmd_pos, hmd_orn = vr_data["transforms"]["hmd"] + if np.all(np.equal(hmd_pos, np.zeros(3))) and np.all(np.equal(hmd_orn, np.array([0, 0, 0, 1]))): + des_body_pos, des_body_orn = self.get_position_orientation() + des_body_rpy = R.from_quat(des_body_orn).as_euler("XYZ") + else: + des_body_pos = hmd_pos - np.array([0, 0, 0.45]) + des_body_rpy = np.array([0, 0, R.from_quat(hmd_orn).as_euler("XYZ")[2]]) + des_body_orn = T.euler2quat(des_body_rpy) + action[self.controller_action_idx["base"]] = np.r_[des_body_pos, des_body_rpy] + # Update action space for other VR objects + for part_name, eef_part in self.parts.items(): + # Process local transform adjustments + prev_local_pos, prev_local_orn = eef_part.local_position_orientation + reset = 0 + hand_data = None + if part_name == "head": + part_pos, part_orn = vr_data["transforms"]["hmd"] + else: + hand_name, hand_index = ("left", 0) if part_name == "lh" else ("right", 1) + if "hand_data" in vr_data: + if "raw" in vr_data["hand_data"][hand_name]: + part_pos = vr_data["hand_data"][hand_name]["raw"]["pos"][0] + part_orn = vr_data["hand_data"][hand_name]["raw"]["orn"][0] + hand_data = vr_data["hand_data"][hand_name]["angles"] + else: + part_pos, part_orn = np.zeros(3), np.array([0, 0, 0, 1]) + else: + part_pos, part_orn = vr_data["transforms"]["controllers"][hand_index] + hand_data = vr_data["button_data"][hand_index]["axis"]["trigger"] + reset = vr_data["button_data"][hand_index]["press"]["grip"] + + if np.all(np.equal(part_pos, np.zeros(3))) and np.all(np.equal(part_orn, np.array([0, 0, 0, 1]))): + des_world_part_pos, des_world_part_orn = prev_local_pos, prev_local_orn + else: + # apply eef rotation offset to part transform first + des_world_part_pos = part_pos + des_world_part_orn = T.quat_multiply(part_orn, eef_part.offset_to_body[1]) + + # Get local pose with respect to the new body frame + des_local_part_pos, des_local_part_orn = T.relative_pose_transform( + des_world_part_pos, des_world_part_orn, des_body_pos, des_body_orn + ) + # apply shoulder position offset to the part transform to get final destination pose + des_local_part_pos, des_local_part_orn = T.pose_transform( + eef_part.offset_to_body[0], [0, 0, 0, 1], des_local_part_pos, des_local_part_orn + ) + des_part_rpy = R.from_quat(des_local_part_orn).as_euler("XYZ") + controller_name = "camera" if part_name == "head" else "arm_" + part_name + action[self.controller_action_idx[controller_name]] = np.r_[des_local_part_pos, des_part_rpy] + # Process trigger fraction and reset for controllers + if isinstance(hand_data, float): + # controller mode, map trigger fraction from [0, 1] to [-1, 1] range. + action[self.controller_action_idx[f"gripper_{part_name}"]] = hand_data * 2 - 1 + elif hand_data is not None: + # hand tracking mode, compute joint rotations for each independent hand joint + action[self.controller_action_idx[f"gripper_{part_name}"]] = hand_data[:, :2].T.reshape(-1) + # If we reset, action is 1, otherwise 0 + if reset: + self.parts[part_name].set_position_orientation(des_local_part_pos, des_part_rpy) + # update ghost hand if necessary + if self.use_ghost_hands and part_name is not "head": + self.parts[part_name].update_ghost_hands(des_world_part_pos, des_world_part_orn) + return action + + +class BRPart(ABC): + """This is the interface that all Behaviorbot eef parts must implement.""" + + def __init__(self, name: str, parent: Behaviorbot, prim_path: str, eef_type: str, offset_to_body: List[float]) -> None: + """ + Create an object instance with the minimum information of class ID and rendering parameters. + + Args: + name (str): unique name of this BR part + parent (Behaviorbot): the parent BR object + prim_path (str): prim path to the root link of the eef + eef_type (str): type of eef. One of hand, head + offset_to_body (List[float]): relative rotation offset between the shoulder_rz link and the eef link. + """ + self.name = name + self.parent = parent + self.prim_path = prim_path + self.eef_type = eef_type + self.offset_to_body = offset_to_body + + self.ghost_hand = None + self._root_link = None + self._world_position_orientation = ([0, 0, 0], [0, 0, 0, 1]) + + def load(self) -> None: + self._root_link = self.parent.links[self.prim_path] + # setup ghost hand + if self.eef_type == "hand": + gh_name = f"ghost_hand_{self.name}" + self.ghost_hand = USDObject( + prim_path=f"/World/{gh_name}", + usd_path=os.path.join(gm.ASSET_PATH, f"models/behaviorbot/usd/{gh_name}.usd"), + name=gh_name, + scale=0.001, + visible=False, + visual_only=True, + ) + og.sim.import_object(self.ghost_hand) + + @property + def local_position_orientation(self) -> Tuple[List[float], List[float]]: + """ + Get local position and orientation w.r.t. to the body + Return: + Tuple[Array[x, y, z], Array[x, y, z, w]] + + """ + return T.relative_pose_transform(*self.world_position_orientation, *self.parent.get_position_orientation()) + + @property + def world_position_orientation(self) -> None: + """ + Get position and orientation in the world space + Return: + Tuple[Array[x, y, z], Array[x, y, z, w]] + """ + return self._root_link.get_position_orientation() + + def set_position_orientation(self, pos: List[float], orn: List[float]) -> None: + """ + Call back function to set the base's position + """ + self.parent.joints[f"{self.name}_x_joint"].set_pos(pos[0], drive=False) + self.parent.joints[f"{self.name}_y_joint"].set_pos(pos[1], drive=False) + self.parent.joints[f"{self.name}_z_joint"].set_pos(pos[2], drive=False) + self.parent.joints[f"{self.name}_rx_joint"].set_pos(orn[0], drive=False) + self.parent.joints[f"{self.name}_ry_joint"].set_pos(orn[1], drive=False) + self.parent.joints[f"{self.name}_rz_joint"].set_pos(orn[2], drive=False) + + def update_ghost_hands(self, pos: List[float], orn: List[float]) -> None: + """ + Updates ghost hand to track real hand and displays it if the real and virtual hands are too far apart. + Args: + pos (List[float]): list of positions [x, y, z] + orn (List[float]): list of rotations [x, y, z, w] + """ + assert self.eef_type == "hand", "ghost hand is only valid for BR hand!" + # Ghost hand tracks real hand whether it is hidden or not + self.ghost_hand.set_position_orientation(pos, orn) + + # If distance between hand and controller is greater than threshold, + # ghost hand appears + dist_to_real_controller = np.linalg.norm(pos - self.world_position_orientation[0]) + should_visible = dist_to_real_controller > HAND_GHOST_HAND_APPEAR_THRESHOLD + + # Only toggle visibility if we are transition from hidden to unhidden, or the other way around + if self.ghost_hand.visible is not should_visible: + self.ghost_hand.visible = should_visible diff --git a/omnigibson/robots/fetch.py b/omnigibson/robots/fetch.py index 5e41af749e..ec14acc86c 100644 --- a/omnigibson/robots/fetch.py +++ b/omnigibson/robots/fetch.py @@ -8,6 +8,7 @@ from omnigibson.robots.two_wheel_robot import TwoWheelRobot from omnigibson.utils.python_utils import assert_valid_key from omnigibson.utils.ui_utils import create_module_logger +from omnigibson.utils.transform_utils import euler2quat from omnigibson.utils.usd_utils import JointType log = create_module_logger(module_name=__name__) @@ -356,10 +357,8 @@ def finger_lengths(self): def assisted_grasp_start_points(self): return { self.default_arm: [ - GraspingPoint(link_name="r_gripper_finger_link", position=[0.04, -0.012, 0.014]), - GraspingPoint(link_name="r_gripper_finger_link", position=[0.04, -0.012, -0.014]), - GraspingPoint(link_name="r_gripper_finger_link", position=[-0.04, -0.012, 0.014]), - GraspingPoint(link_name="r_gripper_finger_link", position=[-0.04, -0.012, -0.014]), + GraspingPoint(link_name="r_gripper_finger_link", position=[0.025, -0.012, 0.0]), + GraspingPoint(link_name="r_gripper_finger_link", position=[-0.025, -0.012, 0.0]), ] } @@ -367,10 +366,8 @@ def assisted_grasp_start_points(self): def assisted_grasp_end_points(self): return { self.default_arm: [ - GraspingPoint(link_name="l_gripper_finger_link", position=[0.04, 0.012, 0.014]), - GraspingPoint(link_name="l_gripper_finger_link", position=[0.04, 0.012, -0.014]), - GraspingPoint(link_name="l_gripper_finger_link", position=[-0.04, 0.012, 0.014]), - GraspingPoint(link_name="l_gripper_finger_link", position=[-0.04, 0.012, -0.014]), + GraspingPoint(link_name="l_gripper_finger_link", position=[0.025, 0.012, 0.0]), + GraspingPoint(link_name="l_gripper_finger_link", position=[-0.025, 0.012, 0.0]), ] } @@ -501,3 +498,13 @@ def arm_workspace_range(self): return { self.default_arm : [np.deg2rad(-45), np.deg2rad(45)] } + + @property + def vr_rotation_offset(self): + return {self.default_arm: euler2quat([0, np.pi / 2, np.pi])} + + def gen_action_from_vr_data(self, vr_data: dict): + action = np.zeros(11) + action[:2] = TwoWheelRobot.gen_action_from_vr_data(self, vr_data) + action[4:] = ManipulationRobot.gen_action_from_vr_data(self, vr_data) + return action diff --git a/omnigibson/robots/franka.py b/omnigibson/robots/franka.py index c4e2f9a900..fbe746907d 100644 --- a/omnigibson/robots/franka.py +++ b/omnigibson/robots/franka.py @@ -2,7 +2,8 @@ import numpy as np from omnigibson.macros import gm -from omnigibson.robots.manipulation_robot import ManipulationRobot +from omnigibson.robots.manipulation_robot import ManipulationRobot, GraspingPoint +from omnigibson.utils.transform_utils import euler2quat class FrankaPanda(ManipulationRobot): @@ -129,18 +130,6 @@ def _create_discrete_action_space(self): # Fetch does not support discrete actions raise ValueError("Franka does not support discrete actions!") - def tuck(self): - """ - Immediately set this robot's configuration to be in tucked mode - """ - self.set_joint_positions(self.tucked_default_joint_pos) - - def untuck(self): - """ - Immediately set this robot's configuration to be in untucked mode - """ - self.set_joint_positions(self.untucked_default_joint_pos) - def update_controller_mode(self): super().update_controller_mode() # overwrite joint params (e.g. damping, stiffess, max_effort) here @@ -203,4 +192,19 @@ def robot_arm_descriptor_yamls(self): @property def urdf_path(self): return os.path.join(gm.ASSET_PATH, "models/franka/franka_panda.urdf") - \ No newline at end of file + + @property + def vr_rotation_offset(self): + return {self.default_arm: euler2quat([-np.pi, 0, 0])} + + @property + def assisted_grasp_start_points(self): + return {self.default_arm: [ + GraspingPoint(link_name="panda_rightfinger", position=[0.0, 0.001, 0.045]), + ]} + + @property + def assisted_grasp_end_points(self): + return {self.default_arm: [ + GraspingPoint(link_name="panda_leftfinger", position=[0.0, 0.001, 0.045]), + ]} \ No newline at end of file diff --git a/omnigibson/robots/franka_allegro.py b/omnigibson/robots/franka_allegro.py index 9303d313c1..4a8224f584 100644 --- a/omnigibson/robots/franka_allegro.py +++ b/omnigibson/robots/franka_allegro.py @@ -1,6 +1,9 @@ import os import numpy as np +from typing import Dict, Iterable, Union +from omni.isaac.motion_generation import LulaKinematicsSolver +import omnigibson.utils.transform_utils as T from omnigibson.macros import gm from omnigibson.robots.manipulation_robot import ManipulationRobot @@ -114,9 +117,12 @@ def __init__( proprio_obs=proprio_obs, sensor_config=sensor_config, grasping_mode=grasping_mode, + grasping_direction="upper", **kwargs, ) + self.allegro_ik_controller = AllegroIKController(self) + @property def model_name(self): return "FrankaAllegro" @@ -130,28 +136,16 @@ def _create_discrete_action_space(self): # Fetch does not support discrete actions raise ValueError("Franka does not support discrete actions!") - def tuck(self): - """ - Immediately set this robot's configuration to be in tucked mode - """ - self.set_joint_positions(self.tucked_default_joint_pos) - - def untuck(self): - """ - Immediately set this robot's configuration to be in untucked mode - """ - self.set_joint_positions(self.untucked_default_joint_pos) - def update_controller_mode(self): super().update_controller_mode() # overwrite joint params here for i in range(7): - self.joints[f"panda_joint{i+1}"].damping = 1000 - self.joints[f"panda_joint{i+1}"].stiffness = 1000 + self.joints[f"panda_joint{i+1}"].damping = 10 + self.joints[f"panda_joint{i+1}"].stiffness = 1e5 + self.joints[f"panda_joint{i+1}"].max_effort = 100 for i in range(16): - self.joints[f"joint_{i}_0"].damping = 100 - self.joints[f"joint_{i}_0"].stiffness = 300 - self.joints[f"joint_{i}_0"].max_effort = 15 + self.joints[f"joint_{i}_0"].damping = 10 + self.joints[f"joint_{i}_0"].stiffness = 1e3 @property def controller_order(self): @@ -161,8 +155,30 @@ def controller_order(self): def _default_controllers(self): controllers = super()._default_controllers controllers["arm_{}".format(self.default_arm)] = "InverseKinematicsController" + controllers["gripper_{}".format(self.default_arm)] = "MultiFingerGripperController" return controllers + @property + def _default_arm_inverse_kinematics_controller(self): + return {self.default_arm: { + "name": "InverseKinematicsController", + "mode": "pose_absolute_ori", + "command_input_limits": None, + "motor_type": "position", + }} + + @property + def _default_gripper_multi_finger_controller_configs(self): + return {self.default_arm: { + "name": "MultiFingerGripperController", + "control_freq": self._control_freq, + "motor_type": "position", + "control_limits": self.control_limits, + "dof_idx": self.gripper_control_idx[self.default_arm], + "command_input_limits": None, + "mode": "independent", + }} + @property def default_joint_pos(self): # position where the hand is parallel to the ground @@ -178,7 +194,8 @@ def arm_control_idx(self): @property def gripper_control_idx(self): - return {self.default_arm: np.arange(7, 23)} + # thumb.proximal, ..., thumb.tip, ..., ring.tip + return {self.default_arm: np.array([8, 12, 16, 20, 10, 14, 18, 22, 9, 13, 17, 21, 7, 11, 15, 19])} @property def arm_link_names(self): @@ -228,3 +245,89 @@ def disabled_collision_pairs(self): return [ ["link_12_0", "part_studio_link"], ] + + @property + def vr_rotation_offset(self): + return {self.default_arm: T.euler2quat(np.array([0, np.pi / 2, 0]))} + + def gen_action_from_vr_data(self, vr_data: dict): + action = np.zeros(22) + if "hand_data" in vr_data: + hand_data = vr_data["hand_data"] + if "right" in hand_data and "raw" in hand_data["right"]: + target_pos = hand_data["right"]["raw"]["pos"][0] + target_orn = T.quat_multiply(hand_data["right"]["raw"]["orn"][0], self.vr_rotation_offset[self.default_arm]) + cur_robot_eef_pos, cur_robot_eef_orn = self.links[self.eef_link_names[self.default_arm]].get_position_orientation() + base_pos, base_orn = self.get_position_orientation() + rel_target_pos, rel_target_orn = T.relative_pose_transform(target_pos, target_orn, base_pos, base_orn) + rel_cur_pos, _ = T.relative_pose_transform(cur_robot_eef_pos, cur_robot_eef_orn, base_pos, base_orn) + action[:6] = np.r_[rel_target_pos - rel_cur_pos, T.quat2axisangle(rel_target_orn)] + # joint order: index, middle, pinky + angles = hand_data["right"]["angles"] + for f_idx in range(3): + for j_idx in range(3): + action[11 + f_idx * 4 + j_idx] = angles[f_idx + 1][j_idx] + # specifically, use ik for thumb + thumb_tip_pos = hand_data["right"]["raw"]["pos"][5] + action[6: 10] = self.allegro_ik_controller.solve({"thumb": thumb_tip_pos})["thumb"] + + else: + action_from_controller = super().gen_action_from_vr_data(vr_data) + action[:6] = action_from_controller[:6] + action[6:] = action_from_controller[6] + return action + + +class AllegroIKController: + """ + IK controller for Allegro hand, based on the LulaKinematicsSolver + """ + def __init__(self, robot: FrankaAllegro, max_iter=100) -> None: + """ + Initializes the IK controller + Args: + robot (FrankaAllegro): the Franka Allegro robot + max_iter (int): maximum number of iterations for the IK solver, default is 100. + """ + self.robot = robot + self.fingers = { + "ring": ("link_3_0_tip", np.arange(4) + 19), # finger name, finger tip link name, joint indices + "middle": ("link_7_0_tip", np.arange(4) + 15), + "index": ("link_11_0_tip", np.arange(4) + 11), + "thumb": ("link_15_0_tip", np.arange(4) + 7), + } + self.finger_ik_solvers = {} + for finger in self.fingers.keys(): + self.finger_ik_solvers[finger] = LulaKinematicsSolver( + robot_description_path = robot.robot_gripper_descriptor_yamls[finger], + urdf_path = robot.gripper_urdf_path + ) + self.finger_ik_solvers[finger].ccd_max_iterations = max_iter + + def solve(self, target_gripper_pos: Dict[str, Iterable[float]]) -> Dict[str, np.ndarray]: + """ + compute the joint positions given the position of each finger tip + Args: + target_gripper_pos (Dict[str, Iterable[float]]): (finger name, the 3-array of target positions of the finger tips) + Returns: + Dict[str, np.ndarray]: (finger name, 4-array of joint positions in order of proximal to tip) + """ + target_joint_positions = {} + # get the current finger joint positions + finger_joint_positions = self.robot.get_joint_positions() + # get current hand base pose + hand_base_pos, hand_base_orn = self.robot.links["base_link"].get_position_orientation() + for finger_name, pos in target_gripper_pos.items(): + target_joint_positions[finger_name] = finger_joint_positions[self.fingers[finger_name][1]] + # Grab the finger joint positions in order to reach the desired finger pose + self.finger_ik_solvers[finger_name].set_robot_base_pose(hand_base_pos, T.convert_quat(hand_base_orn, "wxyz")) + finger_joint_pos, success = self.finger_ik_solvers[finger_name].compute_inverse_kinematics( + frame_name=self.fingers[finger_name][0], + target_position=pos, + target_orientation=None, + warm_start=finger_joint_positions[self.fingers[finger_name][1]] + ) + if success: + target_joint_positions[finger_name] = finger_joint_pos + + return target_joint_positions diff --git a/omnigibson/robots/locomotion_robot.py b/omnigibson/robots/locomotion_robot.py index 89a3c11499..f475eb8603 100644 --- a/omnigibson/robots/locomotion_robot.py +++ b/omnigibson/robots/locomotion_robot.py @@ -197,3 +197,21 @@ def _do_not_register_classes(cls): classes = super()._do_not_register_classes classes.add("LocomotionRobot") return classes + + + def gen_action_from_vr_data(self, vr_data: dict) -> np.ndarray: + """ + Generate action data from VR input for robot teleoperation + NOTE: This implementation only supports DifferentialDriveController. + Overwrite this function if the robot is using a different base controller. + Args: + vr_data (dict): dictionary containing vr_data from VRSys.step() + Returns: + np.ndarray: array of action data + """ + if vr_data["robot_attached"]: + translation_offset = vr_data["button_data"][1]["axis"]["touchpad_y"] + rotation_offset = vr_data["button_data"][1]["axis"]["touchpad_x"] + else: + translation_offset, rotation_offset = 0, 0 + return np.array([translation_offset, -rotation_offset]) * 0.5 diff --git a/omnigibson/robots/manipulation_robot.py b/omnigibson/robots/manipulation_robot.py index 1468bbf0fb..fec604ff66 100644 --- a/omnigibson/robots/manipulation_robot.py +++ b/omnigibson/robots/manipulation_robot.py @@ -1,6 +1,9 @@ from abc import abstractmethod +import carb from collections import namedtuple import numpy as np +from omni.physx import get_physx_scene_query_interface +from pxr import Gf import omnigibson as og from omnigibson.macros import gm, create_module_macros @@ -12,15 +15,11 @@ ManipulationController, GripperController, ) -from omnigibson.objects.dataset_object import DatasetObject from omnigibson.robots.robot_base import BaseRobot from omnigibson.utils.python_utils import classproperty, assert_valid_key from omnigibson.utils.geometry_utils import generate_points_in_volume_checker_function from omnigibson.utils.constants import JointType, PrimType from omnigibson.utils.usd_utils import create_joint -from omnigibson.utils.ui_utils import suppress_omni_log - -from pxr import Gf # Create settings for this module @@ -89,6 +88,7 @@ def __init__( # Unique to ManipulationRobot grasping_mode="physical", + grasping_direction="lower", disable_grasp_handling=False, **kwargs, @@ -140,6 +140,8 @@ def __init__( at least two "fingers" need to touch the object. If "sticky", will magnetize any object touching the gripper's fingers. In this mode, only one finger needs to touch the object. + grasping_direction (str): One of {"lower", "upper"}. If "lower", joint lower limit is grasp, + otherwise joint upper limit is grasp. disable_grasp_handling (bool): If True, the robot will not automatically handle assisted or sticky grasps. Instead, you will need to call the grasp handling methods yourself. kwargs (dict): Additional keyword arguments that are used for other super() calls from subclasses, allowing @@ -148,6 +150,8 @@ def __init__( # Store relevant internal vars assert_valid_key(key=grasping_mode, valid_keys=AG_MODES, name="grasping_mode") self._grasping_mode = grasping_mode + self._grasping_direction = grasping_direction + assert self._grasping_direction in ["lower", "upper"], "grasping_direction must be one of 'lower' or 'upper'" self._disable_grasp_handling = disable_grasp_handling # Initialize other variables used for assistive grasping @@ -726,7 +730,8 @@ def _calculate_in_hand_object_rigid(self, arm="default"): candidates_set, robot_contact_links = self._find_gripper_contacts(arm=arm) # If we're using assisted grasping, we further filter candidates via ray-casting if self.grasping_mode == "assisted": - raise NotImplementedError("Assisted grasp not yet available in OmniGibson!") + candidates_set_raycast = self._find_gripper_raycast_collisions(arm=arm) + candidates_set = candidates_set.intersection(candidates_set_raycast) else: raise ValueError("Invalid grasping mode for calculating in hand object: {}".format(self.grasping_mode)) @@ -765,10 +770,74 @@ def _calculate_in_hand_object_rigid(self, arm="default"): # Return None if object cannot be assisted grasped or not touching at least two fingers if ag_obj is None or not touching_at_least_two_fingers: return None - + # Get object and its contacted link return ag_obj, ag_obj.links[ag_obj_link_name] + def _find_gripper_raycast_collisions(self, arm="default"): + """ + For arm @arm, calculate any prims that are not part of the robot + itself that intersect with rays cast between any of the gripper's start and end points + + Args: + arm (str): specific arm whose gripper will be checked for raycast collisions. Default is "default" + which corresponds to the first entry in self.arm_names + + Returns: + set[str]: set of prim path of detected raycast intersections that + are not the robot itself. Note: if no objects that are not the robot itself are intersecting, + the set will be empty. + """ + arm = self.default_arm if arm == "default" else arm + # First, make sure start and end grasp points exist (i.e.: aren't None) + assert ( + self.assisted_grasp_start_points[arm] is not None + ), "In order to use assisted grasping, assisted_grasp_start_points must not be None!" + assert ( + self.assisted_grasp_end_points[arm] is not None + ), "In order to use assisted grasping, assisted_grasp_end_points must not be None!" + + # Iterate over all start and end grasp points and calculate their x,y,z positions in the world frame + # (per arm appendage) + # Since we'll be calculating the cartesian cross product between start and end points, we stack the start points + # by the number of end points and repeat the individual elements of the end points by the number of start points + startpoints = [] + endpoints = [] + for grasp_start_point in self.assisted_grasp_start_points[arm]: + # Get world coordinates of link base frame + link_pos, link_orn = self.links[grasp_start_point.link_name].get_position_orientation() + # Calculate grasp start point in world frame and add to startpoints + start_point, _ = T.pose_transform(link_pos, link_orn, grasp_start_point.position, [0, 0, 0, 1]) + startpoints.append(start_point) + # Repeat for end points + for grasp_end_point in self.assisted_grasp_end_points[arm]: + # Get world coordinates of link base frame + link_pos, link_orn = self.links[grasp_end_point.link_name].get_position_orientation() + # Calculate grasp start point in world frame and add to endpoints + end_point, _ = T.pose_transform(link_pos, link_orn, grasp_end_point.position, [0, 0, 0, 1]) + endpoints.append(end_point) + # Stack the start points and repeat the end points, and add these values to the raycast dicts + n_startpoints, n_endpoints = len(startpoints), len(endpoints) + raycast_startpoints = startpoints * n_endpoints + raycast_endpoints = [] + for endpoint in endpoints: + raycast_endpoints += [endpoint] * n_startpoints + + # Calculate raycasts from each start point to end point -- this is n_startpoints * n_endpoints total rays + ray_data = set() + # Repeat twice, so that we avoid collisions with the fingers of each gripper themself + for raycast_startpoint, raycast_endpoint in zip(raycast_startpoints, raycast_endpoints): + origin = carb.Float3(raycast_startpoint) + dir = carb.Float3(raycast_endpoint - raycast_startpoint) + distance = np.linalg.norm(dir) + hit = get_physx_scene_query_interface().raycast_closest(origin, dir, distance) + if hit["hit"]: + # filter out self body parts (we currently assume that the robot cannot grasp itself) + if self.prim_path not in hit["rigidBody"]: + ray_data.add(hit["rigidBody"]) + return ray_data + + def _handle_release_window(self, arm="default"): """ Handles releasing an object from arm @arm @@ -1019,7 +1088,6 @@ def _establish_grasp_rigid(self, arm="default", ag_data=None, contact_pos=None): if ag_data is None: return ag_obj, ag_link = ag_data - # Get the appropriate joint type joint_type = self._get_assisted_grasp_joint_type(ag_obj, ag_link) if joint_type is None: @@ -1101,8 +1169,10 @@ def _handle_assisted_grasping(self, action): controller = self._controllers[f"gripper_{arm}"] controlled_joints = controller.dof_idx threshold = np.mean(np.array(self.control_limits["position"])[:, controlled_joints], axis=0) - applying_grasp = np.any(controller.control < threshold) - + if self._grasping_direction == "lower": + applying_grasp = np.any(controller.control < threshold) + else: + applying_grasp = np.any(controller.control > threshold) # Execute gradual release of object if self._ag_obj_in_hand[arm]: if self._ag_release_counter[arm] is not None: @@ -1322,3 +1392,38 @@ def _do_not_register_classes(cls): classes = super()._do_not_register_classes classes.add("ManipulationRobot") return classes + + @property + def vr_rotation_offset(self): + """ + Rotational offset that will be applied for VR teleoperation + """ + return {arm: np.array([0, 0, 0, 1]) for arm in self.arm_names} + + def gen_action_from_vr_data(self, vr_data: dict) -> np.ndarray: + """ + Generate action data from VR input for robot teleoperation + NOTE: This implementation only supports InverseKinematicsController for arm and MultiFingerGripperController for gripper. + Overwrite this function if the robot is using a different controller. + Args: + vr_data (dict): dictionary containing vr_data from VRSys.step() + Returns: + np.ndarray: array of action data for arm and gripper + """ + action = np.zeros(self.n_arms * 7) + for i in range(self.n_arms): + arm_name = self.arm_names[self.n_arms - i - 1] + cur_robot_eef_pos, cur_robot_eef_orn = self.links[self.eef_link_names[arm_name]].get_position_orientation() + if vr_data["robot_attached"]: + target_pos, target_orn = vr_data["transforms"]["controllers"][1 - i] + target_orn = T.quat_multiply(target_orn, self.vr_rotation_offset[arm_name]) + else: + target_pos, target_orn = cur_robot_eef_pos, cur_robot_eef_orn + # get orientation relative to robot base + base_pos, base_orn = self.get_position_orientation() + rel_target_pos, rel_target_orn = T.relative_pose_transform(target_pos, target_orn, base_pos, base_orn) + rel_cur_pos, _ = T.relative_pose_transform(cur_robot_eef_pos, cur_robot_eef_orn, base_pos, base_orn) + trigger_fraction = vr_data["button_data"][1 - i]["axis"]["trigger"] + action[i*7 : i*7+6] = np.r_[rel_target_pos - rel_cur_pos, T.quat2axisangle(rel_target_orn)] + action[i*7+6] = trigger_fraction + return action \ No newline at end of file diff --git a/omnigibson/robots/robot_base.py b/omnigibson/robots/robot_base.py index 3fe65ea877..44c0fc6a6d 100644 --- a/omnigibson/robots/robot_base.py +++ b/omnigibson/robots/robot_base.py @@ -414,6 +414,16 @@ def remove(self): # Run super super().remove() + def gen_action_from_vr_data(self, vr_data: dict) -> np.ndarray: + """ + Generate action data from VR input for robot teleoperation + Args: + vr_data (dict): dictionary containing vr_data from VRSys.step() + Returns: + np.ndarray: array of action data filled with update value + """ + raise NotImplementedError() + @property def sensors(self): """ diff --git a/omnigibson/robots/tiago.py b/omnigibson/robots/tiago.py index d727ee429f..5432190521 100644 --- a/omnigibson/robots/tiago.py +++ b/omnigibson/robots/tiago.py @@ -447,10 +447,8 @@ def default_joint_pos(self): def assisted_grasp_start_points(self): return { arm: [ - GraspingPoint(link_name="gripper_{}_right_finger_link".format(arm), position=[0.04, -0.012, 0.014]), - GraspingPoint(link_name="gripper_{}_right_finger_link".format(arm), position=[0.04, -0.012, -0.014]), - GraspingPoint(link_name="gripper_{}_right_finger_link".format(arm), position=[-0.04, -0.012, 0.014]), - GraspingPoint(link_name="gripper_{}_right_finger_link".format(arm), position=[-0.04, -0.012, -0.014]), + GraspingPoint(link_name="gripper_{}_right_finger_link".format(arm), position=[0.002, 0.0, -0.2]), + GraspingPoint(link_name="gripper_{}_right_finger_link".format(arm), position=[0.002, 0.0, -0.13]), ] for arm in self.arm_names } @@ -459,10 +457,8 @@ def assisted_grasp_start_points(self): def assisted_grasp_end_points(self): return { arm: [ - GraspingPoint(link_name="gripper_{}_left_finger_link".format(arm), position=[0.04, 0.012, 0.014]), - GraspingPoint(link_name="gripper_{}_left_finger_link".format(arm), position=[0.04, 0.012, -0.014]), - GraspingPoint(link_name="gripper_{}_left_finger_link".format(arm), position=[-0.04, 0.012, 0.014]), - GraspingPoint(link_name="gripper_{}_left_finger_link".format(arm), position=[-0.04, 0.012, -0.014]), + GraspingPoint(link_name="gripper_{}_left_finger_link".format(arm), position=[-0.002, 0.0, -0.2]), + GraspingPoint(link_name="gripper_{}_left_finger_link".format(arm), position=[-0.002, 0.0, -0.13]), ] for arm in self.arm_names } @@ -765,3 +761,12 @@ def set_angular_velocity(self, velocity: np.ndarray) -> None: def get_angular_velocity(self) -> np.ndarray: # Note that the link we are interested in is self.base_footprint_link, not self.root_link return self.base_footprint_link.get_angular_velocity() + + def gen_action_from_vr_data(self, vr_data: dict): + action = np.zeros(19) + x_offset = vr_data["button_data"][1]["axis"]["touchpad_y"] + y_offset = -vr_data["button_data"][1]["axis"]["touchpad_x"] + theta_offset = -vr_data["button_data"][0]["axis"]["touchpad_x"] + action[:3] = np.array([x_offset, y_offset, theta_offset]) * 0.3 + action[5:] = ManipulationRobot.gen_action_from_vr_data(self, vr_data) + return action diff --git a/omnigibson/utils/xr_utils.py b/omnigibson/utils/xr_utils.py new file mode 100644 index 0000000000..99ec0e711a --- /dev/null +++ b/omnigibson/utils/xr_utils.py @@ -0,0 +1,282 @@ +import carb +import numpy as np +from omni.kit.xr.core import XRCore, XRDeviceClass, XRCoreEventType +from omni.kit.xr.ui.stage.common import XRAvatarManager +from typing import Iterable, List, Optional, Tuple + +import omnigibson as og +import omnigibson.utils.transform_utils as T +from omnigibson.robots.robot_base import BaseRobot + +class VRSys(): + def __init__( + self, + system: str="OpenXR", + vr_robot: Optional[BaseRobot]=None, + show_controller: bool=False, + disable_display_output: bool=False, + enable_touchpad_movement: bool=False, + align_anchor_to_robot_base: bool=False, + use_hand_tracking: bool=False, + ) -> None: + """ + Initializes the VR system + Args: + vr_robot (None of BaseRobot): the robot that VR will control. + system (str): the VR system to use, one of ["OpenXR", "SteamVR"], default is "OpenXR". + show_controller (bool): whether to show the controller model in the scene, default is False. + disable_display_output (bool): whether we will not display output to the VR headset (only use controller tracking), default is False. + enable_touchpad_movement (bool): whether to enable VR system anchor movement by controller, default is False. + align_anchor_to_robot_base (bool): whether to align VR anchor to robot base, default is False. + use_hand_tracking (bool): whether to use hand tracking instead of controllers, default is False. + """ + self.xr_core = XRCore.get_singleton() + self.vr_profile = self.xr_core.get_profile("vr") + self.disable_display_output = disable_display_output + self.enable_touchpad_movement = enable_touchpad_movement + self.align_anchor_to_robot_base = align_anchor_to_robot_base + assert not (self.enable_touchpad_movement and self.align_anchor_to_robot_base), "enable_touchpad_movement and align_anchor_to_robot_base cannot be True at the same time!" + # robot info + self.vr_robot = vr_robot + self.robot_attached = False + self.reset_button_pressed = False + # set avatar + if show_controller: + self.vr_profile.set_avatar(XRAvatarManager.get_singleton().create_avatar("basic_avatar", {})) + else: + self.vr_profile.set_avatar(XRAvatarManager.get_singleton().create_avatar("empty_avatar", {})) + # # set anchor mode to be custom anchor + carb.settings.get_settings().set(self.vr_profile.get_scene_persistent_path() + "anchorMode", "scene origin") + # set vr system + carb.settings.get_settings().set(self.vr_profile.get_persistent_path() + "system/display", system) + # set display mode + carb.settings.get_settings().set(self.vr_profile.get_persistent_path() + "disableDisplayOutput", disable_display_output) + carb.settings.get_settings().set('/rtx/rendermode', "RaytracedLighting") + # devices info + self.hmd = None + self.controllers = {} + self.trackers = {} + # setup event subscriptions + self.use_hand_tracking = use_hand_tracking + if use_hand_tracking: + self.hand_data = {} + self._hand_tracking_subscription = self.xr_core.get_event_stream().create_subscription_to_pop_by_type( + XRCoreEventType.hand_joints, self.get_hand_tracking_data, name="hand tracking" + ) + + def xr2og(self, transform: np.ndarray) -> Tuple[np.ndarray, np.ndarray]: + """ + Apply the orientation offset from the Omniverse XR coordinate system to the OmniGibson coordinate system + Args: + transform (np.ndarray): the transform matrix in the Omniverse XR coordinate system + Returns: + tuple(np.ndarray, np.ndarray): the position and orientation in the OmniGibson coordinate system + """ + pos, orn = T.mat2pose(np.array(transform).T) + orn = T.quat_multiply(orn, np.array([0.5, -0.5, -0.5, -0.5])) + return pos, orn + + def og2xr(self, pos: np.ndarray, orn: np.ndarray) -> np.ndarray: + """ + Apply the orientation offset from the OmniGibson coordinate system to the Omniverse XR coordinate system + Args: + pos (np.ndarray): the position in the OmniGibson coordinate system + orn (np.ndarray): the orientation in the OmniGibson coordinate system + Returns: + np.ndarray: the transform matrix in the Omniverse XR coordinate system + """ + orn = T.quat_multiply(np.array([-0.5, 0.5, 0.5, -0.5]), orn) + return T.pose2mat((pos, orn)).T.astype(np.float64) + + @property + def is_enabled(self) -> bool: + """ + Checks whether the VR system is enabled + Returns: + bool: whether the VR system is enabled + """ + return self.vr_profile.is_enabled() + + def start(self) -> None: + """ + Starts the VR system by enabling the VR profile + """ + self.vr_profile.request_enable_profile() + for _ in range(100): + og.sim.step() + assert self.vr_profile.is_enabled(), "[VRSys] VR profile not enabled!" + self.update_devices() + + def step(self) -> dict: + """ + Steps the VR system + Returns: + dict: a dictionary of VR data containing device transforms, controller button data, and (optionally) hand tracking data + """ + vr_data = {} + # Optional move anchor + if self.enable_touchpad_movement: + self.move_anchor(pos_offset=self.compute_anchor_offset_with_controller_input()) + if self.align_anchor_to_robot_base: + robot_base_pos, robot_base_orn = self.vr_robot.get_position_orientation() + self.vr_profile.set_virtual_world_anchor_transform(self.og2xr(robot_base_pos, robot_base_orn[[0, 2, 1, 3]])) + # get transforms + vr_data["transforms"] = self.get_device_transforms() + # get controller button data + vr_data["button_data"] = self.get_controller_button_data() + # update robot attachment info + if vr_data["button_data"][1]["press"]["grip"]: + if not self.reset_button_pressed: + self.reset_button_pressed = True + self.robot_attached = not self.robot_attached + else: + self.reset_button_pressed = False + vr_data["robot_attached"] = self.robot_attached + # Optionally get hand tracking data + if self.use_hand_tracking: + vr_data["hand_data"] = self.hand_data + return vr_data + + def stop(self) -> None: + """ + disable VR profile + """ + self.xr_core.request_disable_profile() + self.sim.step() + assert not self.vr_profile.is_enabled(), "[VRSys] VR profile not disabled!" + + def update_devices(self) -> None: + """ + Update the VR device list + """ + for device in self.vr_profile.get_device_list(): + if device.get_class() == XRDeviceClass.xrdisplaydevice: + self.hmd = device + elif device.get_class() == XRDeviceClass.xrcontroller: + self.controllers[device.get_index()] = device + elif device.get_class() == XRDeviceClass.xrtracker: + self.trackers[device.get_index()] = device + assert self.hmd is not None, "[VRSys] HMD not detected! Please make sure you have a VR headset connected to your computer." + + def compute_anchor_offset_with_controller_input(self) -> np.ndarray: + """ + Compute the desired anchor translational offset based on controller touchpad input + Returns: + np.ndarray: 3d translational offset *in hmd frame* + """ + offset = np.zeros(3) + if 1 in self.controllers: + right_axis_state = self.controllers[1].get_axis_state() + offset = np.array([right_axis_state["touchpad_x"], right_axis_state["touchpad_y"], 0]) + if 0 in self.controllers: + offset[2] = self.controllers[0].get_axis_state()["touchpad_y"] + # normalize offset + length = np.linalg.norm(offset) + if length != 0: + offset *= 0.03 / length + # NOTE: in xr, the 3 axes are right, up, and back + return np.array(offset) + + def move_anchor(self, pos_offset: Optional[Iterable[float]]=None, rot_offset: Optional[Iterable[float]]=None) -> None: + """ + Updates the anchor of the xr system in the virtual world + Args: + pos_offset (Iterable[float]): the position offset to apply to the anchor *in the frame of hmd*. + rot_offset (Iterable[float]): the rotation offset to apply to the anchor *in the frame of hmd*. + """ + if pos_offset is not None: + # note that x is right, y is up, z is back for ovxr, but x is forward, y is left, z is up for og + pos_offset = np.array([pos_offset[0], pos_offset[2], -pos_offset[1]]).astype(np.float64) + self.vr_profile.add_move_physical_world_relative_to_device(pos_offset) + if rot_offset is not None: + rot_offset = np.array(rot_offset).astype(np.float64) + self.vr_profile.add_rotate_physical_world_around_device(rot_offset) + + def get_device_transforms(self) -> dict: + """ + Get the transform matrix of each VR device + Note that we have to transpose the transform matrix because Omniverse uses row-major matrices while OmniGibson uses column-major matrices + Returns: + dict: a dictionary of device transforms, with keys "hmd", "controllers", "trackers" + """ + transforms = {} + transforms["hmd"] = self.xr2og(self.hmd.get_virtual_world_pose()) + transforms["controllers"] = {} + transforms["trackers"] = {} + for controller_index in self.controllers: + transforms["controllers"][controller_index] = self.xr2og(self.controllers[controller_index].get_virtual_world_pose()) + for tracker_index in self.trackers: + transforms["trackers"][tracker_index] = self.xr2og(self.trackers[tracker_index].get_virtual_world_pose()) + return transforms + + def get_controller_button_data(self) -> dict: + """ + Get the button data for each controller + Returns: + dict: a dictionary of whether each button is pressed or touched, and the axis state for touchpad and joysticks + """ + button_data = {} + for controller_index in self.controllers: + button_data[controller_index] = {} + button_data[controller_index]["press"] = self.controllers[controller_index].get_button_press_state() + button_data[controller_index]["touch"] = self.controllers[controller_index].get_button_touch_state() + button_data[controller_index]["axis"] = self.controllers[controller_index].get_axis_state() + return button_data + + def get_hand_tracking_data(self, e: carb.events.IEvent) -> None: + """ + Get hand tracking data, see https://registry.khronos.org/OpenXR/specs/1.0/html/xrspec.html#convention-of-hand-joints for joint indices + Args: + e (carb.events.IEvent): event that contains hand tracking data as payload + """ + e.consume() + data_dict = e.payload + for hand in ["left", "right"]: + self.hand_data[hand] = {} + if data_dict[f"joint_count_{hand}"] != 0: + self.hand_data[hand]["raw"] = {"pos": [], "orn": []} + hand_joint_matrices = data_dict[f"joint_matrices_{hand}"] + for i in range(26): + pos, orn = self.xr2og(np.reshape(hand_joint_matrices[16 * i: 16 * (i + 1)], (4, 4))) + self.hand_data[hand]["raw"]["pos"].append(pos) + self.hand_data[hand]["raw"]["orn"].append(orn) + self.hand_data[hand]["angles"] = self.get_joint_angle_from_hand_data(self.hand_data[hand]["raw"]["pos"]) + + def get_joint_angle_from_hand_data(self, raw_hand_data: List[np.ndarray]) -> np.ndarray: + """ + Get each finger joint's rotation angle from hand tracking data + Each finger has 3 joints + Args: + raw_hand_data (List[np.ndarray]): a list of 26 matrices representing the hand tracking data + Returns: + np.ndarray: a 5 x 3 array of joint rotations (from thumb to pinky, from base to tip) + """ + joint_angles = np.zeros((5, 3)) + for i in range(5): + for j in range(3): + # get the 3 related joints + prev_joint_idx, cur_joint_idx, next_joint_idx = i * 5 + j + 1, i * 5 + j + 2, i * 5 + j + 3 + # get the 3 related joints' positions + prev_joint_pos = raw_hand_data[prev_joint_idx] + cur_joint_pos = raw_hand_data[cur_joint_idx] + next_joint_pos = raw_hand_data[next_joint_idx] + # calculate the angle formed by 3 points + v1 = cur_joint_pos - prev_joint_pos + v2 = next_joint_pos - cur_joint_pos + v1 /= np.linalg.norm(v1) + v2 /= np.linalg.norm(v2) + joint_angles[i, j] = np.arccos(v1 @ v2) + return joint_angles + + def snap_device_to_robot_eef(self, robot_eef_position: Iterable[float], base_rotation: Iterable[float], device: Optional[XRDeviceClass]=None) -> None: + """ + Snap device to the robot end effector (ManipulationRobot only) + Args: + robot_eef_position (Iterable[float]): the position of the robot end effector + base_rotation (Iterable[float]): the rotation of the robot base + device (Optional[XRDeviceClass]): the device to snap to the robot end effector, default is the right controller + """ + if device is None: + device = self.controllers[1] + target_transform = self.og2xr(pos=robot_eef_position, orn=base_rotation) + self.vr_profile.set_physical_world_to_world_anchor_transform_to_match_xr_device(target_transform, device)