Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[add] support panda gripper teleop retargeting #25

Merged
merged 4 commits into from
Jul 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion assets
Submodule assets updated 60 files
+2 −0 .gitignore
+24 −0 CITATION.cff
+21 −1 README.md
+ doc/gallery/inspire-collision.png
+ doc/gallery/inspire_rt.webp
+ doc/gallery/panda-collision.png
+ doc/gallery/panda_rt.webp
+1,932 −0 robots/arms/rm75/meshes/collision/rm75_link_0.obj
+3,235 −0 robots/arms/rm75/meshes/collision/rm75_link_1.obj
+3,139 −0 robots/arms/rm75/meshes/collision/rm75_link_2.obj
+3,073 −0 robots/arms/rm75/meshes/collision/rm75_link_3.obj
+3,661 −0 robots/arms/rm75/meshes/collision/rm75_link_4.obj
+3,286 −0 robots/arms/rm75/meshes/collision/rm75_link_5.obj
+3,367 −0 robots/arms/rm75/meshes/collision/rm75_link_6.obj
+1,254 −0 robots/arms/rm75/meshes/collision/rm75_link_7.obj
+ robots/arms/rm75/meshes/visual/rm75_link_0.glb
+62 −0 robots/arms/rm75/meshes/visual/rm75_link_0.mtl
+15,400 −0 robots/arms/rm75/meshes/visual/rm75_link_0.obj
+ robots/arms/rm75/meshes/visual/rm75_link_1.glb
+22 −0 robots/arms/rm75/meshes/visual/rm75_link_1.mtl
+4,879 −0 robots/arms/rm75/meshes/visual/rm75_link_1.obj
+ robots/arms/rm75/meshes/visual/rm75_link_2.glb
+22 −0 robots/arms/rm75/meshes/visual/rm75_link_2.mtl
+7,297 −0 robots/arms/rm75/meshes/visual/rm75_link_2.obj
+ robots/arms/rm75/meshes/visual/rm75_link_3.glb
+22 −0 robots/arms/rm75/meshes/visual/rm75_link_3.mtl
+4,833 −0 robots/arms/rm75/meshes/visual/rm75_link_3.obj
+ robots/arms/rm75/meshes/visual/rm75_link_4.glb
+22 −0 robots/arms/rm75/meshes/visual/rm75_link_4.mtl
+7,002 −0 robots/arms/rm75/meshes/visual/rm75_link_4.obj
+ robots/arms/rm75/meshes/visual/rm75_link_5.glb
+22 −0 robots/arms/rm75/meshes/visual/rm75_link_5.mtl
+5,119 −0 robots/arms/rm75/meshes/visual/rm75_link_5.obj
+ robots/arms/rm75/meshes/visual/rm75_link_6.glb
+22 −0 robots/arms/rm75/meshes/visual/rm75_link_6.mtl
+6,888 −0 robots/arms/rm75/meshes/visual/rm75_link_6.obj
+ robots/arms/rm75/meshes/visual/rm75_link_7.glb
+32 −0 robots/arms/rm75/meshes/visual/rm75_link_7.mtl
+8,526 −0 robots/arms/rm75/meshes/visual/rm75_link_7.obj
+204 −0 robots/arms/rm75/rm75.urdf
+204 −0 robots/arms/rm75/rm75_glb.urdf
+622 −0 robots/assembly/rm75_inspire/rm75_inspire_left_hand.urdf
+622 −0 robots/assembly/rm75_inspire/rm75_inspire_right_hand.urdf
+1 −1 robots/assembly/xarm7_ability/xarm7_ability_left_hand.urdf
+1 −1 robots/assembly/xarm7_ability/xarm7_ability_left_hand_glb.urdf
+1 −1 robots/assembly/xarm7_ability/xarm7_ability_right_hand.urdf
+1 −1 robots/assembly/xarm7_ability/xarm7_ability_right_hand_glb.urdf
+6 −0 robots/hands/ability_hand/LICENSE.txt
+2 −3 robots/hands/allegro_hand/LICENSE
+2 −3 robots/hands/barrett_hand/LICENSE
+2 −3 robots/hands/dclaw_gripper/LICENSE
+6 −0 robots/hands/inspire_hand/LICENSE.txt
+3 −0 robots/hands/leap_hand/LICENSE.txt
+ robots/hands/panda_gripper/meshes/visual/finger.glb
+ robots/hands/panda_gripper/meshes/visual/hand.glb
+125 −0 robots/hands/panda_gripper/panda_gripper_glb.urdf
+2 −3 robots/hands/schunk_hand/LICENSE
+2 −3 robots/hands/shadow_hand/LICENSE
+8 −6 tools/generate_urdf_animation_sapien.py
+14 −6 tools/generate_urdf_collision_figure_sapien.py
16 changes: 16 additions & 0 deletions dex_retargeting/configs/teleop/panda_gripper_both.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
retargeting:
type: vector
urdf_path: panda_gripper/panda_gripper_glb.urdf

# Target refers to the retargeting target, which is the robot hand
target_joint_names: [ "panda_finger_joint1" ]
target_origin_link_names: [ "panda_leftfinger" ]
target_task_link_names: [ "panda_rightfinger" ]
scaling_factor: 1.5

# Source refers to the retargeting input, which usually corresponds to the human hand
# The joint indices of human hand joint which corresponds to each link in the target_link_names
target_link_human_indices: [ [ 4 ], [ 8 ] ]

# A smaller alpha means stronger filtering, i.e. more smooth but also larger latency
low_pass_alpha: 0.2
3 changes: 3 additions & 0 deletions dex_retargeting/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ class RobotName(enum.Enum):
leap = enum.auto()
ability = enum.auto()
inspire = enum.auto()
panda = enum.auto()


class RetargetingType(enum.Enum):
Expand All @@ -20,6 +21,7 @@ class RetargetingType(enum.Enum):
class HandType(enum.Enum):
right = enum.auto()
left = enum.auto()
both = enum.auto()


ROBOT_NAME_MAP = {
Expand All @@ -29,6 +31,7 @@ class HandType(enum.Enum):
RobotName.leap: "leap_hand",
RobotName.ability: "ability_hand",
RobotName.inspire: "inspire_hand",
RobotName.panda: "panda_gripper",
}

ROBOT_NAMES = list(ROBOT_NAME_MAP.keys())
Expand Down
9 changes: 8 additions & 1 deletion example/profiling/profile_online_retargeting.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,11 @@ def main():
# Vector retargeting
print(f"Being retargeting profiling with a trajectory of {data_len} hand poses.")
for robot_name in ROBOT_NAMES:
config_path = get_default_config_path(robot_name, RetargetingType.vector, HandType.right)
config_path = get_default_config_path(
robot_name,
RetargetingType.vector,
HandType.right if "gripper" not in ROBOT_NAME_MAP[robot_name] else HandType.both,
)
retargeting = RetargetingConfig.load_from_file(config_path).build()
total_time = profile_retargeting(retargeting, joint_data)
print(
Expand All @@ -57,6 +61,9 @@ def main():

# DexPilot retargeting
for robot_name in ROBOT_NAMES:
if "gripper" in ROBOT_NAME_MAP[robot_name]:
print(f"Skip {ROBOT_NAME_MAP[robot_name]} for DexPilot retargeting.")
continue
config_path = get_default_config_path(robot_name, RetargetingType.dexpilot, HandType.right)
retargeting = RetargetingConfig.load_from_file(config_path).build()
total_time = profile_retargeting(retargeting, joint_data)
Expand Down
4 changes: 2 additions & 2 deletions example/vector_retargeting/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,10 @@ This command uses the data saved from the previous step to create a rendered vid
*The following instructions assume that your computer has a webcam connected.*

```bash
python3 capture_webcam.py --video-path example/vector_retargeting/data/my_human_hand_video.mp4
python3 capture_webcam.py --video-path data/my_human_hand_video.mp4
```

This command enables you to use your webcam to record a video saved in MP4 format. To stop recording, press `q` on your
This command enables you to use your webcam to record a video saved in MP4 format. To stop recording, press `Esc` on your
keyboard.

### Real-time Visualization of Hand Retargeting via Webcam
Expand Down
1 change: 1 addition & 0 deletions example/vector_retargeting/capture_webcam.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ def main(video_path: str, video_capture_device: Union[str, int] = 0):
if cv2.waitKey(1) & 0xFF == 27:
break

print('Recording finished')
cap.release()
writer.release()
cv2.destroyAllWindows()
Expand Down
18 changes: 15 additions & 3 deletions tests/test_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,10 @@ def generate_position_retargeting_data_gt(robot: RobotWrapper, optimizer: Positi
def test_position_optimizer(self, robot_name, hand_type):
config_path = get_default_config_path(robot_name, RetargetingType.position, hand_type)

if not config_path.exists():
print(f"Skip the test for {robot_name.name} as the config file does not exist.")
return

# Note: The parameters below are adjusted solely for this test
# The smoothness penalty is deactivated here, meaning no low pass filter and no continuous joint value
# This is because the test is focused solely on the efficiency of single step optimization
Expand Down Expand Up @@ -97,7 +101,7 @@ def test_position_optimizer(self, robot_name, hand_type):
computed_target_pos = np.array([robot.get_link_pose(i)[:3, 3] for i in optimizer.target_link_indices])

# Position difference
error = np.mean(np.linalg.norm(computed_target_pos - random_target_pos, axis=1))
error = np.mean(np.linalg.norm(computed_target_pos - random_target_pos, axis=-1))
errors["pos"].append(error)

tac = time()
Expand All @@ -110,6 +114,10 @@ def test_position_optimizer(self, robot_name, hand_type):
def test_vector_optimizer(self, robot_name, hand_type):
config_path = get_default_config_path(robot_name, RetargetingType.vector, hand_type)

if not config_path.exists():
print(f"Skip the test for {robot_name.name} as the config file does not exist.")
return

# Note: The parameters below are adjusted solely for this test
# For retargeting from human to robot, their values should remain the default in the retargeting config
# The smoothness penalty is deactivated here, meaning no low pass filter and no continuous joint value
Expand Down Expand Up @@ -146,7 +154,7 @@ def test_vector_optimizer(self, robot_name, hand_type):
computed_target_vector = computed_task_pos - computed_origin_pos

# Vector difference
error = np.mean(np.linalg.norm(computed_target_vector - random_target_vector, axis=1))
error = np.mean(np.linalg.norm(computed_target_vector - random_target_vector, axis=-1))
errors["pos"].append(error)

tac = time()
Expand All @@ -159,6 +167,10 @@ def test_vector_optimizer(self, robot_name, hand_type):
def test_dexpilot_optimizer(self, robot_name, hand_type):
config_path = get_default_config_path(robot_name, RetargetingType.dexpilot, hand_type)

if not config_path.exists():
print(f"Skip the test for {robot_name.name} as the config file does not exist.")
return

# Note: The parameters below are adjusted solely for this test
# For retargeting from human to robot, their values should remain the default in the retargeting config
# The smoothness penalty is deactivated here, meaning no low pass filter and no continuous joint value
Expand Down Expand Up @@ -194,7 +206,7 @@ def test_dexpilot_optimizer(self, robot_name, hand_type):
computed_target_vector = computed_task_pos - computed_origin_pos

# Vector difference
error = np.mean(np.linalg.norm(computed_target_vector - random_target_vector, axis=1))
error = np.mean(np.linalg.norm(computed_target_vector - random_target_vector, axis=-1))
errors["pos"].append(error)

tac = time()
Expand Down
Loading