Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merging action-primitives-moma branch with additional features #273

Merged
merged 18 commits into from
Sep 25, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
538 changes: 424 additions & 114 deletions omnigibson/action_primitives/starter_semantic_action_primitives.py

Large diffs are not rendered by default.

7 changes: 5 additions & 2 deletions omnigibson/controllers/ik_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def limiter(command_pos: Array[float], command_quat: Array[float], control_dict:
robot_description_path=robot_description_path,
robot_urdf_path=robot_urdf_path,
eef_name=eef_name,
default_joint_pos=default_joint_pos,
default_joint_pos=self.default_joint_pos,
)

# Other variables that will be filled in at runtime
Expand Down Expand Up @@ -270,7 +270,10 @@ def _command_to_control(self, command, control_dict):
target_joint_pos = self.solver.solve(
target_pos=target_pos,
target_quat=target_quat,
initial_joint_pos=current_joint_pos,
tolerance_pos=0.002,
weight_pos=20.0,
max_iterations=2000,
# initial_joint_pos=current_joint_pos,
)

if target_joint_pos is None:
Expand Down
19 changes: 16 additions & 3 deletions omnigibson/robots/manipulation_robot.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,6 +278,9 @@ def is_grasping(self, arm="default", candidate_obj=None):
finger_links = {link for link in self.finger_links[arm]}
is_grasping = len(candidate_obj.states[ContactBodies].get_value().intersection(finger_links)) > 0

if arm == self.default_arm:
pass
# print("GRASPING:", is_grasping)
return is_grasping

def _find_gripper_contacts(self, arm="default", return_contact_positions=False):
Expand Down Expand Up @@ -1260,8 +1263,8 @@ def _dump_state(self):
if self.grasping_mode == "physical":
return state

# TODO: Include AG_state

# Include AG_state
state["ag_obj_constraint_params"] = self._ag_obj_constraint_params
return state

def _load_state(self, state):
Expand All @@ -1271,7 +1274,17 @@ def _load_state(self, state):
if self.grasping_mode == "physical":
return

# TODO: Include AG_state
# Include AG_state
# TODO: currently doese not take care of cloth objects
self._ag_obj_constraint_params = state["ag_obj_constraint_params"]
for arm in self._ag_obj_constraint_params.keys():
if len(self._ag_obj_constraint_params[arm]) > 0:
data = self._ag_obj_constraint_params[arm]
obj = og.sim.scene.object_registry("prim_path", data["ag_obj_prim_path"])
link = obj.links[data["ag_link_prim_path"].split("/")[-1]]
self._ag_data[arm] = (obj, link)
self._ag_obj_in_hand[arm] = obj
self._establish_grasp(arm=arm, ag_data=self._ag_data[arm], contact_pos=data["contact_pos"])

def _serialize(self, state):
# Call super first
Expand Down
1 change: 1 addition & 0 deletions omnigibson/robots/robot_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,6 +288,7 @@ def _get_proprioception_dict(self):
joint_qvel=joint_velocities,
joint_qeffort=joint_efforts,
robot_pos=pos,
robot_pose=np.append(pos[:2], ori[2]),
robot_ori_cos=np.cos(ori),
robot_ori_sin=np.sin(ori),
robot_lin_vel=self.get_linear_velocity(),
Expand Down
7 changes: 7 additions & 0 deletions omnigibson/robots/tiago.py
Original file line number Diff line number Diff line change
Expand Up @@ -665,6 +665,7 @@ def finger_joint_names(self):

@property
def usd_path(self):
# return os.path.join(gm.ASSET_PATH, "models/tiago/tiago_dual_omnidirectional_stanford/tiago_dual_omnidirectional_stanford_33_with_wrist_cam.usd")
return os.path.join(gm.ASSET_PATH, "models/tiago/tiago_dual_omnidirectional_stanford/tiago_dual_omnidirectional_stanford_33.usd")

@property
Expand All @@ -674,6 +675,7 @@ def simplified_mesh_usd_path(self):
@property
def robot_arm_descriptor_yamls(self):
return {"left": os.path.join(gm.ASSET_PATH, "models/tiago/tiago_dual_omnidirectional_stanford_left_arm_descriptor.yaml"),
"left_fixed": os.path.join(gm.ASSET_PATH, "models/tiago/tiago_dual_omnidirectional_stanford_left_arm_fixed_trunk_descriptor.yaml"),
"right": os.path.join(gm.ASSET_PATH, "models/tiago/tiago_dual_omnidirectional_stanford_right_arm_fixed_trunk_descriptor.yaml"),
"combined": os.path.join(gm.ASSET_PATH, "models/tiago/tiago_dual_omnidirectional_stanford.yaml")}

Expand Down Expand Up @@ -727,6 +729,11 @@ def set_position_orientation(self, position=None, orientation=None):
super().set_position_orientation(position, orientation)
# Move the joint frame for the world_base_joint
if self._world_base_fixed_joint_prim is not None:
# Position and orientation are lists when restoring scene from json. Cast them to np.array
if isinstance(position, list):
position = np.array(position)
if isinstance(orientation, list):
orientation = np.array(orientation)
self._world_base_fixed_joint_prim.GetAttribute("physics:localPos0").Set(tuple(position))
self._world_base_fixed_joint_prim.GetAttribute("physics:localRot0").Set(Gf.Quatf(*orientation[[3, 0, 1, 2]]))

Expand Down
67 changes: 42 additions & 25 deletions omnigibson/utils/grasping_planning_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,6 @@
from omnigibson.utils.constants import JointType, JointAxis
from omni.isaac.core.utils.rotations import gf_quat_to_np_array


p = None

REVOLUTE_JOINT_FRACTION_ACROSS_SURFACE_AXIS_BOUNDS = (0.4, 0.6)
PRISMATIC_JOINT_FRACTION_ACROSS_SURFACE_AXIS_BOUNDS = (0.2, 0.8)
GRASP_OFFSET = np.array([0, 0.05, -0.08])
Expand Down Expand Up @@ -79,7 +76,7 @@ def get_grasp_poses_for_object_sticky(target_obj):

# return grasp_candidate

def get_grasp_position_for_open(robot, target_obj, should_open, relevant_joint=None):
def get_grasp_position_for_open(robot, target_obj, should_open, relevant_joint=None, num_waypoints="default"):
# Pick a moving link of the object.
relevant_joints = [relevant_joint] if relevant_joint is not None else _get_relevant_joints(target_obj)[1]
if len(relevant_joints) == 0:
Expand All @@ -89,24 +86,34 @@ def get_grasp_position_for_open(robot, target_obj, should_open, relevant_joint=N
random.shuffle(relevant_joints)
selected_joint = None
for joint in relevant_joints:
# from IPython import embed; embed()
# if joint.name == "drawer:joint_j_link_2":
# selected_joint = joint
# break
current_position = joint.get_state()[0][0]
joint_range = joint.upper_limit - joint.lower_limit
openness_fraction = (current_position - joint.lower_limit) / joint_range
if (should_open and openness_fraction < 0.8) or (not should_open and openness_fraction > 0.05):
selected_joint = joint
break
# current_position = joint.get_state()[0][0]
# joint_range = joint.upper_limit - joint.lower_limit
# openness_fraction = (current_position - joint.lower_limit) / joint_range
# if (should_open and openness_fraction < 0.8) or (not should_open and openness_fraction > 0.05):
# selected_joint = joint

if selected_joint is None:
return None

if selected_joint.joint_type == JointType.JOINT_REVOLUTE:
return grasp_position_for_open_on_revolute_joint(robot, target_obj, selected_joint, should_open)
return grasp_position_for_open_on_revolute_joint(robot, target_obj, selected_joint, should_open, num_waypoints=num_waypoints)
elif selected_joint.joint_type == JointType.JOINT_PRISMATIC:
return grasp_position_for_open_on_prismatic_joint(robot, target_obj, selected_joint, should_open)
return grasp_position_for_open_on_prismatic_joint(robot, target_obj, selected_joint, should_open, num_waypoints=num_waypoints)
else:
raise ValueError("Unknown joint type encountered while generating joint position.")


def grasp_position_for_open_on_prismatic_joint(robot, target_obj, relevant_joint, should_open):
def grasp_position_for_open_on_prismatic_joint(robot, target_obj, relevant_joint, should_open, num_waypoints="default"):
link_name = relevant_joint.body1.split("/")[-1]

# Get the bounding box of the child link.
Expand All @@ -118,17 +125,15 @@ def grasp_position_for_open_on_prismatic_joint(robot, target_obj, relevant_joint
) = target_obj.get_base_aligned_bbox(link_name=link_name, visual=False)

# Match the push axis to one of the bb axes.
push_axis_idx = JointAxis.index(relevant_joint.axis)
canonical_push_axis = np.eye(3)[push_axis_idx]
joint_orientation = gf_quat_to_np_array(relevant_joint.get_attribute("physics:localRot0"))[[1, 2, 3, 0]]
push_axis = R.from_quat(joint_orientation).apply([1, 0, 0])
assert np.isclose(np.max(np.abs(push_axis)), 1.0) # Make sure we're aligned with a bb axis.
push_axis_idx = np.argmax(np.abs(push_axis))
canonical_push_axis = np.eye(3)[push_axis_idx]


# TODO: Need to figure out how to get the correct push direction.
canonical_push_direction = canonical_push_axis * np.sign(push_axis[push_axis_idx])
push_direction = np.sign(push_axis[push_axis_idx]) if should_open else -1 * np.sign(push_axis[push_axis_idx])
canonical_push_direction = canonical_push_axis * push_direction

# Pick the closer of the two faces along the push axis as our favorite.
points_along_push_axis = (
Expand Down Expand Up @@ -158,11 +163,15 @@ def grasp_position_for_open_on_prismatic_joint(robot, target_obj, relevant_joint
PRISMATIC_JOINT_FRACTION_ACROSS_SURFACE_AXIS_BOUNDS[1] * diff_lateral_pos_wrt_surface_center,
)
lateral_pos_wrt_surface_center = min_lateral_pos_wrt_surface_center + sampled_lateral_pos_wrt_min
grasp_position_in_bbox_frame = center_of_selected_surface_along_push_axis + lateral_pos_wrt_surface_center + canonical_push_direction * 0.2
grasp_position_in_bbox_frame = center_of_selected_surface_along_push_axis + lateral_pos_wrt_surface_center
grasp_quat_in_bbox_frame = T.quat_inverse(joint_orientation)
grasp_pose_in_world_frame = T.pose_transform(
bbox_center_in_world, bbox_quat_in_world, grasp_position_in_bbox_frame, grasp_quat_in_bbox_frame
)

# Now apply the grasp offset.
offset_grasp_pose_in_bbox_frame = (grasp_position_in_bbox_frame, grasp_quat_in_bbox_frame)
dist_from_grasp_pos = robot.finger_lengths[robot.default_arm] + 0.05
offset_grasp_pose_in_bbox_frame = (grasp_position_in_bbox_frame + canonical_push_axis * push_axis_closer_side_sign * dist_from_grasp_pos, grasp_quat_in_bbox_frame)
offset_grasp_pose_in_world_frame = T.pose_transform(
bbox_center_in_world, bbox_quat_in_world, *offset_grasp_pose_in_bbox_frame
)
Expand All @@ -172,7 +181,7 @@ def grasp_position_for_open_on_prismatic_joint(robot, target_obj, relevant_joint
current_joint_pos = relevant_joint.get_state()[0][0]

required_pos_change = target_joint_pos - current_joint_pos
push_vector_in_bbox_frame = canonical_push_direction * required_pos_change
push_vector_in_bbox_frame = canonical_push_direction * abs(required_pos_change)
target_hand_pos_in_bbox_frame = grasp_position_in_bbox_frame + push_vector_in_bbox_frame
target_hand_pose_in_world_frame = T.pose_transform(
bbox_center_in_world, bbox_quat_in_world, target_hand_pos_in_bbox_frame, grasp_quat_in_bbox_frame
Expand All @@ -183,7 +192,13 @@ def grasp_position_for_open_on_prismatic_joint(robot, target_obj, relevant_joint

# Decide whether a grasp is required. If approach direction and displacement are similar, no need to grasp.
grasp_required = np.dot(push_vector_in_bbox_frame, canonical_push_axis * -push_axis_closer_side_sign) < 0
waypoints = interpolate_waypoints(offset_grasp_pose_in_world_frame, target_hand_pose_in_world_frame)
# TODO: Need to find a better of getting the predicted position of eef for start point of interpolating waypoints. Maybe
# break this into another function that called after the grasp is executed, so we know the eef position?
waypoint_start_offset = -0.05 * approach_direction_in_world_frame if should_open else 0.05 * approach_direction_in_world_frame
waypoint_start_pose = (grasp_pose_in_world_frame[0] + -1 * approach_direction_in_world_frame * (robot.finger_lengths[robot.default_arm] + waypoint_start_offset), grasp_pose_in_world_frame[1])
waypoint_end_pose = (target_hand_pose_in_world_frame[0] + -1 * approach_direction_in_world_frame * (robot.finger_lengths[robot.default_arm]), target_hand_pose_in_world_frame[1])
waypoints = interpolate_waypoints(waypoint_start_pose, waypoint_end_pose, num_waypoints=num_waypoints)

return (
offset_grasp_pose_in_world_frame,
waypoints,
Expand All @@ -193,16 +208,19 @@ def grasp_position_for_open_on_prismatic_joint(robot, target_obj, relevant_joint
required_pos_change
)

def interpolate_waypoints(start_pose, end_pose):

def interpolate_waypoints(start_pose, end_pose, num_waypoints="default"):
start_pos, start_orn = start_pose
travel_distance = np.linalg.norm(end_pose[0] - start_pos)
num_poses = np.max([2, int(travel_distance / 0.01) + 1])
pos_waypoints = np.linspace(start_pos, end_pose[0], num_poses)

if num_waypoints == "default":
num_waypoints = np.max([2, int(travel_distance / 0.01) + 1])
pos_waypoints = np.linspace(start_pos, end_pose[0], num_waypoints)

# Also interpolate the rotations
combined_rotation = R.from_quat(np.array([start_orn, end_pose[1]]))
slerp = Slerp([0, 1], combined_rotation)
orn_waypoints = slerp(np.linspace(0, 1, num_poses))
orn_waypoints = slerp(np.linspace(0, 1, num_waypoints))
quat_waypoints = [x.as_quat() for x in orn_waypoints]
return [waypoint for waypoint in zip(pos_waypoints, quat_waypoints)]

Expand Down Expand Up @@ -279,7 +297,8 @@ def grasp_position_for_open_on_revolute_joint(robot, target_obj, relevant_joint,
grasp_quat_in_bbox_frame = get_orientation_facing_vector_with_random_yaw(canonical_open_direction * open_axis_closer_side_sign * -1)

# Now apply the grasp offset.
offset_in_bbox_frame = canonical_open_direction * open_axis_closer_side_sign * 0.1
dist_from_grasp_pos = robot.finger_lengths[robot.default_arm] + 0.05
offset_in_bbox_frame = canonical_open_direction * open_axis_closer_side_sign * dist_from_grasp_pos
offset_grasp_pose_in_bbox_frame = (grasp_position + offset_in_bbox_frame, grasp_quat_in_bbox_frame)
offset_grasp_pose_in_world_frame = T.pose_transform(
bbox_center_in_world, bbox_quat_in_world, *offset_grasp_pose_in_bbox_frame
Expand All @@ -302,7 +321,7 @@ def grasp_position_for_open_on_revolute_joint(robot, target_obj, relevant_joint,
for i in range(turn_steps):
partial_yaw_change = (i + 1) / turn_steps * required_yaw_change
rotated_grasp_pose_in_bbox_frame = rotate_point_around_axis(
offset_grasp_pose_in_bbox_frame, bbox_wrt_origin, joint_axis, partial_yaw_change
(offset_grasp_pose_in_bbox_frame[0], offset_grasp_pose_in_bbox_frame[1]), bbox_wrt_origin, joint_axis, partial_yaw_change
)
rotated_grasp_pose_in_world_frame = T.pose_transform(
bbox_center_in_world, bbox_quat_in_world, *rotated_grasp_pose_in_bbox_frame
Expand Down Expand Up @@ -332,7 +351,7 @@ def get_orientation_facing_vector_with_random_yaw(vector):
side = np.cross(rand_vec, forward)
side /= np.linalg.norm(3)
up = np.cross(forward, side)
assert np.isclose(np.linalg.norm(up), 1, atol=1e-3)
# assert np.isclose(np.linalg.norm(up), 1, atol=1e-3)
rotmat = np.array([forward, side, up]).T
return R.from_matrix(rotmat).as_quat()

Expand Down Expand Up @@ -361,6 +380,4 @@ def get_closest_point_to_point_in_world_frame(
vector_in_arbitrary_frame = vectors_in_arbitrary_frame[closer_option_idx]
vector_in_world_frame = vectors_in_world[closer_option_idx]

return closer_option_idx, vector_in_arbitrary_frame, vector_in_world_frame


return closer_option_idx, vector_in_arbitrary_frame, vector_in_world_frame
Loading
Loading