Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

VR System #318

Merged
merged 45 commits into from
Jan 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
ea71cad
[xr] Initial commit: add xr_utils
wensi-ai Nov 8, 2023
8f178c1
[xr] Add Behaviorbot
wensi-ai Nov 8, 2023
ef5fc07
[xr] Add VR plugin on OG startup
wensi-ai Nov 8, 2023
66df756
[xr] Add assisted grasping, update grasping mode example
wensi-ai Nov 8, 2023
23a39ca
[xr] Add VR teleoperate functionality to robots, update assisted gras…
wensi-ai Nov 8, 2023
a58d2c5
[xr] Add robot teleoperate demo
wensi-ai Nov 8, 2023
1e4e97d
[xr] Add VR tour demo with Behaviorbot
wensi-ai Nov 8, 2023
1ef965a
[xr] Add hand tracking demo
wensi-ai Nov 8, 2023
2a5c460
Refine allegro hand tracking and assisted grasping
wensi-ai Nov 9, 2023
c62b79e
Update comments, clean up code
wensi-ai Nov 9, 2023
d1e4a89
Update IK controller
wensi-ai Nov 10, 2023
2f3fa3f
Address review comments
wensi-ai Nov 11, 2023
e7478ae
Add franka with leap hand
wensi-ai Nov 15, 2023
03717fb
Clean up gripper IK code. Fix bugs
wensi-ai Nov 17, 2023
3261cdb
Update xr import logic, update FrankaLeap
wensi-ai Nov 20, 2023
5a6b698
Update Frankas
wensi-ai Nov 21, 2023
1d513c2
Fix bugs
wensi-ai Nov 22, 2023
74ccb4f
Add assisted grasping for leap hand
wensi-ai Nov 30, 2023
9493543
Update to og-dev branch
wensi-ai Dec 22, 2023
21c78a7
Refactor xr to general teleop
wensi-ai Dec 22, 2023
2eb2320
Remove unnecessary comments
wensi-ai Dec 22, 2023
2d49b09
Update teleop
wensi-ai Dec 26, 2023
1b4928e
Update teleoperation robustness
wensi-ai Jan 3, 2024
bea5632
Make space mouse teleoperate working
wensi-ai Jan 4, 2024
2bc4873
Fix tiago teleoperation bug, update code structure
wensi-ai Jan 4, 2024
2d5664f
Modicy control marker logic
wensi-ai Jan 5, 2024
516e293
Fix Tiago & Oculus bug
wensi-ai Jan 5, 2024
18bd960
Clean up code, address comments
wensi-ai Jan 6, 2024
39b32fb
Update Behaviorbot
wensi-ai Jan 6, 2024
448ccd8
Update behaviorbot, fix bugs in teleoperation examples
wensi-ai Jan 7, 2024
46d3dea
Address macros issue
wensi-ai Jan 8, 2024
621f76b
Change teleop data to dataclass
wensi-ai Jan 9, 2024
eff6a31
Update teleop data and corresponding examples
wensi-ai Jan 9, 2024
6d28bca
Code formatting
wensi-ai Jan 9, 2024
7238cfb
Fix bugs in teleoperation with spacemouse
wensi-ai Jan 9, 2024
aa0e158
Add keyboard teleop interface
wensi-ai Jan 9, 2024
8a0db68
Fix oculus teleop bug
wensi-ai Jan 10, 2024
ddb151e
Fix bugs in ovxr teleoperation
wensi-ai Jan 12, 2024
db6a6c8
Add unittest for robot teleoperation
wensi-ai Jan 12, 2024
4a232ba
Rebase assisted grasping raycasting to og
wensi-ai Jan 12, 2024
e9c04b3
Fix bugs in OVXRSystem and Behaviorbot
wensi-ai Jan 12, 2024
9e4a3c8
Fix bugs to get AG working
wensi-ai Jan 13, 2024
83690e8
Rename Behaviorbot -> BehaviorRobot. Fix teleop unittest
wensi-ai Jan 12, 2024
8220000
Fix robot init file
wensi-ai Jan 13, 2024
c37c441
Rename vr_origin -> device_origin
wensi-ai Jan 13, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 12 additions & 3 deletions omnigibson/controllers/ik_controller.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import numpy as np
from omnigibson.macros import create_module_macros
from omnigibson.macros import gm, create_module_macros

import omnigibson.utils.transform_utils as T
from omnigibson.controllers import ControlType, ManipulationController
Expand All @@ -15,6 +15,8 @@
m = create_module_macros(module_path=__file__)
m.IK_POS_TOLERANCE = 0.002
m.IK_POS_WEIGHT = 20.0
m.IK_ORN_TOLERANCE = 0.01
m.IK_ORN_WEIGHT = 0.05
m.IK_MAX_ITERATIONS = 100

# Different modes
Expand Down Expand Up @@ -92,6 +94,8 @@ def __init__(
using @motor_type = velocity
mode (str): mode to use when computing IK. In all cases, position commands are 3DOF delta (dx,dy,dz)
cartesian values, relative to the robot base frame. Valid options are:
- "absolute_pose": 6DOF (dx,dy,dz,ax,ay,az) control over pose,
where both the position and the orientation is given in absolute axis-angle coordinates
- "pose_absolute_ori": 6DOF (dx,dy,dz,ax,ay,az) control over pose,
where the orientation is given in absolute axis-angle coordinates
- "pose_delta_ori": 6DOF (dx,dy,dz,dax,day,daz) control over pose
Expand Down Expand Up @@ -122,7 +126,7 @@ def limiter(command_pos: Array[float], command_quat: Array[float], control_dict:
if smoothing_filter_size in {None, 0}
else MovingAverageFilter(obs_dim=control_dim, filter_width=smoothing_filter_size)
)
assert mode in IK_MODES, "Invalid ik mode specified! Valid options are: {IK_MODES}, got: {mode}"
assert mode in IK_MODES, f"Invalid ik mode specified! Valid options are: {IK_MODES}, got: {mode}"
self.mode = mode
self.kv = kv
self.workspace_pose_limiter = workspace_pose_limiter
Expand Down Expand Up @@ -320,7 +324,9 @@ def _command_to_control(self, command, control_dict):
target_pos=target_pos,
target_quat=target_quat,
tolerance_pos=m.IK_POS_TOLERANCE,
tolerance_quat=m.IK_ORN_TOLERANCE,
weight_pos=m.IK_POS_WEIGHT,
weight_quat=m.IK_ORN_WEIGHT,
max_iterations=m.IK_MAX_ITERATIONS,
initial_joint_pos=current_joint_pos,
)
Expand All @@ -329,14 +335,17 @@ def _command_to_control(self, command, control_dict):
target_pos=target_pos,
target_quat=target_quat,
tolerance_pos=m.IK_POS_TOLERANCE,
tolerance_quat=m.IK_ORN_TOLERANCE,
weight_pos=m.IK_POS_WEIGHT,
weight_quat=m.IK_ORN_WEIGHT,
max_iterations=m.IK_MAX_ITERATIONS,
)

if target_joint_pos is None:
# Print warning that we couldn't find a valid solution, and return the current joint configuration
# instead so that we execute a no-op control
log.warning(f"Could not find valid IK configuration! Returning no-op control instead.")
if gm.DEBUG:
log.warning(f"Could not find valid IK configuration! Returning no-op control instead.")
target_joint_pos = current_joint_pos

# Optionally pass through smoothing filter for better stability
Expand Down
1 change: 0 additions & 1 deletion omnigibson/controllers/multi_finger_gripper_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,6 @@ def _command_to_control(self, command, control_dict):

Args:
command (Array[float]): desired (already preprocessed) command to convert into control signals.
This should always be 2D command for each gripper joint
control_dict (Dict[str, Any]): dictionary that should include any relevant keyword-mapped
states necessary for controller computation. Must include the following keys:
joint_position: Array of current joint positions
Expand Down
1 change: 1 addition & 0 deletions omnigibson/examples/robots/grasping_mode_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

GRASPING_MODES = dict(
sticky="Sticky Mitten - Objects are magnetized when they touch the fingers and a CLOSE command is given",
assisted="Assisted Grasping - Objects are magnetized when they touch the fingers, are within the hand, and a CLOSE command is given",
physical="Physical Grasping - No additional grasping assistance applied",
)

Expand Down
98 changes: 98 additions & 0 deletions omnigibson/examples/teleoperation/hand_tracking_demo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
"""
Example script for using hand tracking (OpenXR only) with dexterous hand.
You can set DEBUG_MODE to True to visualize the landmarks of the hands!
"""
import omnigibson as og
from omnigibson.utils.teleop_utils import OVXRSystem

DEBUG_MODE = True # set to True to visualize the landmarks of the hands

def main():
# Create the config for generating the environment we want
env_cfg = {"action_timestep": 1 / 60., "physics_timestep": 1 / 120.}
scene_cfg = {"type": "Scene"}
robot0_cfg = {
"type": "BehaviorRobot",
"obs_modalities": ["rgb", "depth", "normal", "scan", "occupancy_grid"],
"action_normalize": False,
"grasping_mode": "assisted"
}
object_cfg = [
{
"type": "DatasetObject",
"prim_path": "/World/breakfast_table",
"name": "breakfast_table",
"category": "breakfast_table",
"model": "kwmfdg",
"bounding_box": [2, 1, 0.4],
"position": [0.8, 0, 0.3],
"orientation": [0, 0, 0.707, 0.707],
},
{
"type": "DatasetObject",
"prim_path": "/World/apple",
"name": "apple",
"category": "apple",
"model": "omzprq",
"position": [0.6, 0.1, 0.5],
},
{
"type": "DatasetObject",
"prim_path": "/World/banana",
"name": "banana",
"category": "banana",
"model": "znakxm",
"position": [0.6, -0.1, 0.5],
},
]
if DEBUG_MODE:
# Add the marker to visualize hand tracking landmarks
object_cfg.extend([{
"type": "PrimitiveObject",
"prim_path": f"/World/marker_{i}",
"name": f"marker_{i}",
"primitive_type": "Cube",
"size": 0.01,
"visual_only": True,
"rgba": [0.0, 1.0, 0.0, 1.0],
} for i in range(52)])

cfg = dict(env=env_cfg, scene=scene_cfg, robots=[robot0_cfg], objects=object_cfg)
# Create the environment
env = og.Environment(configs=cfg)
env.reset()

if DEBUG_MODE:
markers = [env.scene.object_registry("name", f"marker_{i}") for i in range(52)]

# Start vrsys
vrsys = OVXRSystem(robot=env.robots[0], show_control_marker=False, system="OpenXR", use_hand_tracking=True)
vrsys.start()
# set headset position to be 1m above ground and facing +x direction
vrsys.set_initial_transform(pos=[0, 0, 1], orn=[0, 0, 0, 1])

# main simulation loop
for _ in range(10000):
# update vr system
vrsys.update()
if DEBUG_MODE:
# update the 26 markers' position and orientation for each hand
if vrsys.teleop_data.is_valid["left"]:
for i in range(26):
pos = vrsys.raw_data["hand_data"]["left"]["pos"][i]
orn = vrsys.raw_data["hand_data"]["left"]["orn"][i]
markers[i].set_position_orientation(pos, orn)
if vrsys.teleop_data.is_valid["right"]:
for i in range(26):
pos = vrsys.raw_data["hand_data"]["right"]["pos"][i]
orn = vrsys.raw_data["hand_data"]["right"]["orn"][i]
markers[i + 26].set_position_orientation(pos, orn)
action = vrsys.teleop_data_to_action()
env.step(action)

# Shut down the environment cleanly at the end
vrsys.stop()
env.close()

if __name__ == "__main__":
main()
155 changes: 155 additions & 0 deletions omnigibson/examples/teleoperation/robot_teleoperate_demo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,155 @@
"""
Example script for using external devices to teleoperate a robot.
"""
import omnigibson as og
from omnigibson.utils.ui_utils import choose_from_options

ROBOTS = {
"FrankaPanda": "Franka Emika Panda (default)",
"Fetch": "Mobile robot with one arm",
"Tiago": "Mobile robot with two arms",
}

SYSTEMS = {
"Keyboard": "Keyboard (default)",
"SteamVR": "SteamVR with HTC VIVE through OmniverseXR plugin",
"Oculus": "Oculus Reader with Quest 2",
"SpaceMouse": "Space Mouse",
}


def main():
teleop_system = choose_from_options(options=SYSTEMS, name="system")
robot_name = choose_from_options(options=ROBOTS, name="robot")
# Create the config for generating the environment we want
env_cfg = {"action_timestep": 1 / 60., "physics_timestep": 1 / 300.}
scene_cfg = {"type": "Scene"}
# Add the robot we want to load
robot_cfg = {
"type": robot_name,
"obs_modalities": ["rgb"],
"action_normalize": False,
"grasping_mode": "assisted",
}
arms = ["left", "right"] if robot_name == "Tiago" else ["0"]
robot_cfg["controller_config"] = {}
for arm in arms:
robot_cfg["controller_config"][f"arm_{arm}"] = {
"name": "InverseKinematicsController",
"mode": "pose_absolute_ori",
"motor_type": "position"
}
robot_cfg["controller_config"][f"gripper_{arm}"] = {
"name": "MultiFingerGripperController",
"command_input_limits": (0.0, 1.0),
"mode": "smooth",
"inverted": True
}
object_cfg = [
{
"type": "DatasetObject",
"prim_path": "/World/breakfast_table",
"name": "breakfast_table",
"category": "breakfast_table",
"model": "kwmfdg",
"bounding_box": [2, 1, 0.4],
"position": [0.8, 0, 0.3],
"orientation": [0, 0, 0.707, 0.707],
},
{
"type": "DatasetObject",
"prim_path": "/World/frail",
"name": "frail",
"category": "frail",
"model": "zmjovr",
"scale": [2, 2, 2],
"position": [0.6, -0.3, 0.5],
},
{
"type": "DatasetObject",
"prim_path": "/World/toy_figure1",
"name": "toy_figure1",
"category": "toy_figure",
"model": "issvzv",
"scale": [0.75, 0.75, 0.75],
"position": [0.6, 0, 0.5],
},
{
"type": "DatasetObject",
"prim_path": "/World/toy_figure2",
"name": "toy_figure2",
"category": "toy_figure",
"model": "nncqfn",
"scale": [0.75, 0.75, 0.75],
"position": [0.6, 0.1, 0.5],
},
{
"type": "DatasetObject",
"prim_path": "/World/toy_figure3",
"name": "toy_figure3",
"category": "toy_figure",
"model": "eulekw",
"scale": [0.25, 0.25, 0.25],
"position": [0.6, 0.2, 0.5],
},
{
"type": "DatasetObject",
"prim_path": "/World/toy_figure4",
"name": "toy_figure4",
"category": "toy_figure",
"model": "yxiksm",
"scale": [0.25, 0.25, 0.25],
"position": [0.6, 0.3, 0.5],
},
{
"type": "DatasetObject",
"prim_path": "/World/toy_figure5",
"name": "toy_figure5",
"category": "toy_figure",
"model": "wvpqbf",
"scale": [0.75, 0.75, 0.75],
"position": [0.6, 0.4, 0.5],
},
]
cfg = dict(env=env_cfg, scene=scene_cfg, robots=[robot_cfg], objects=object_cfg)

# Create the environment
env = og.Environment(configs=cfg)
env.reset()
# update viewer camera pose
og.sim.viewer_camera.set_position_orientation([-0.22, 0.99, 1.09], [-0.14, 0.47, 0.84, -0.23])

# Start teleoperation system
robot = env.robots[0]

# Initialize teleoperation system
if teleop_system == "SteamVR":
from omnigibson.utils.teleop_utils import OVXRSystem as TeleopSystem
elif teleop_system == "Oculus":
from omnigibson.utils.teleop_utils import OculusReaderSystem as TeleopSystem
elif teleop_system == "SpaceMouse":
from omnigibson.utils.teleop_utils import SpaceMouseSystem as TeleopSystem
elif teleop_system == "Keyboard":
from omnigibson.utils.teleop_utils import KeyboardSystem as TeleopSystem
teleop_sys = TeleopSystem(robot=robot, disable_display_output=True, align_anchor_to_robot_base=True)
teleop_sys.start()
# tracker variable of whether the robot is attached to the VR system
prev_robot_attached = False
# main simulation loop
for _ in range(10000):
if og.sim.is_playing():
teleop_sys.update()
if teleop_sys.teleop_data.robot_attached and not prev_robot_attached:
teleop_sys.reset_transform_mapping()
if robot_name == "Tiago":
teleop_sys.reset_transform_mapping("left")
else:
action = teleop_sys.teleop_data_to_action()
env.step(action)
prev_robot_attached = teleop_sys.teleop_data.robot_attached
# Shut down the environment cleanly at the end
teleop_sys.stop()
env.close()

if __name__ == "__main__":
main()
44 changes: 44 additions & 0 deletions omnigibson/examples/teleoperation/vr_simple_demo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
"""
Example script for interacting with OmniGibson scenes with VR and BehaviorRobot.
"""
import omnigibson as og
from omnigibson.utils.teleop_utils import OVXRSystem

def main():
# Create the config for generating the environment we want
env_cfg = {"action_timestep": 1 / 60., "physics_timestep": 1 / 120.}
scene_cfg = {"type": "InteractiveTraversableScene", "scene_model": "Rs_int"}
robot0_cfg = {
"type": "BehaviorRobot",
"controller_config": {
"gripper_0": {"command_input_limits": "default"},
"gripper_1": {"command_input_limits": "default"},
}
}
cfg = dict(env=env_cfg, scene=scene_cfg, robots=[robot0_cfg])

# Create the environment
env = og.Environment(configs=cfg)
env.reset()
# start vrsys
vrsys = OVXRSystem(robot=env.robots[0], show_control_marker=False, system="SteamVR", enable_touchpad_movement=True)
# We want a lower movement speed for controlling with VR headset
vrsys.base_movement_speed = 0.03
vrsys.start()
# set headset position to be 1m above ground and facing +x
vrsys.set_initial_transform(pos=[0, 0, 1], orn=[0, 0, 0, 1])

# main simulation loop
for _ in range(10000):
# step the VR system to get the latest data from VR runtime
vrsys.update()
# generate robot action and step the environment
action = vrsys.teleop_data_to_action()
env.step(action)

# Shut down the environment cleanly at the end
vrsys.stop()
env.close()

if __name__ == "__main__":
main()
2 changes: 2 additions & 0 deletions omnigibson/robots/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,5 @@
from omnigibson.robots.two_wheel_robot import TwoWheelRobot
from omnigibson.robots.franka import FrankaPanda
from omnigibson.robots.franka_allegro import FrankaAllegro
from omnigibson.robots.franka_leap import FrankaLeap
wensi-ai marked this conversation as resolved.
Show resolved Hide resolved
from omnigibson.robots.behavior_robot import BehaviorRobot
Loading
Loading