Skip to content

Commit

Permalink
More fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
cgokmen committed Oct 25, 2023
1 parent 10af25a commit 2dc78d7
Show file tree
Hide file tree
Showing 5 changed files with 26 additions and 39 deletions.
48 changes: 18 additions & 30 deletions omnigibson/action_primitives/starter_semantic_action_primitives.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,8 @@
TORSO_FIXED = False
JOINT_POS_DIFF_THRESHOLD = 0.005

MAX_ALLOWED_JOINT_ERROR_FOR_LINEAR_MOTION = np.deg2rad(45)

log = create_module_log(module_name=__name__)


Expand Down Expand Up @@ -567,7 +569,7 @@ def _move_base_backward(self, steps=5, speed=0.2):

def _move_hand_backward(self, steps=5, speed=0.2):
"""
Yields action for the robot to move hand so the eef is in the target pose using the planner
Yields action for the robot to move its base backwards.
Args:
steps (int): steps to move eef
Expand All @@ -577,17 +579,15 @@ def _move_hand_backward(self, steps=5, speed=0.2):
np.array or None: Action array for one step for the robot to move hand or None if its at the target pose
"""
initial_eef_pos = self.robot.get_eef_position()
print("moving hand backward")
for _ in range(steps):
action = self._empty_action(arm_pose_to_keep=initial_eef_pos)
action[self.robot.controller_action_idx["gripper_{}".format(self.arm)]] = 1.0
action[self.robot.controller_action_idx["arm_{}".format(self.arm)][0]] = -speed
yield self._postprocess_action(action)
print("moving hand backward done")

def _move_hand_upward(self, steps=5, speed=0.1):
"""
Yields action for the robot to move hand so the eef is in the target pose using the planner
Yields action for the robot to move hand upward.
Args:
steps (int): steps to move eef
Expand All @@ -596,14 +596,13 @@ def _move_hand_upward(self, steps=5, speed=0.1):
Returns:
np.array or None: Action array for one step for the robot to move hand or None if its at the target pose
"""
# TODO: Combine these movement functions.
initial_eef_pos = self.robot.get_eef_position()
print("moving hand backward")
for _ in range(steps):
action = self._empty_action(arm_pose_to_keep=initial_eef_pos)
action[self.robot.controller_action_idx["gripper_{}".format(self.arm)]] = 1.0
action[self.robot.controller_action_idx["arm_{}".format(self.arm)][2]] = speed
yield self._postprocess_action(action)
print("moving hand backward done")

def _grasp(self, obj):
"""
Expand Down Expand Up @@ -1062,9 +1061,7 @@ def _move_hand_direct_ik(self, target_pose, stop_on_contact=False, ignore_failur
pos_diff = np.linalg.norm(prev_pos - current_pos)
orn_diff = (Rotation.from_quat(prev_orn) * Rotation.from_quat(current_orn).inv()).magnitude()
orn_diff = (Rotation.from_quat(prev_orn) * Rotation.from_quat(current_orn).inv()).magnitude()
print(pos_diff, orn_diff)
orn_diff = (Rotation.from_quat(prev_orn) * Rotation.from_quat(current_orn).inv()).magnitude()
print(pos_diff, orn_diff)
if pos_diff < 0.0003 and orn_diff < 0.01:
raise ActionPrimitiveError(
ActionPrimitiveError.Reason.EXECUTION_ERROR,
Expand Down Expand Up @@ -1113,8 +1110,17 @@ def _move_hand_direct_cartesian(self, target_pose, stop_on_contact=False, ignore
if controller_config["name"] == "InverseKinematicsController":
waypoints = list(zip(pos_waypoints, quat_waypoints))

for waypoint in waypoints[:-1]:
yield from self._move_hand_direct_ik(waypoint, stop_on_contact=stop_on_contact, ignore_failure=ignore_failure, stop_if_stuck=stop_if_stuck)
for i, waypoint in enumerate(waypoints):
if i < len(waypoints) - 1:
yield from self._move_hand_direct_ik(waypoint, stop_on_contact=stop_on_contact, ignore_failure=ignore_failure, stop_if_stuck=stop_if_stuck)
else:
yield from self._move_hand_direct_ik(
waypoints[-1],
pos_thresh=0.01, ori_thresh=0.1,
stop_on_contact=stop_on_contact,
ignore_failure=ignore_failure,
stop_if_stuck=stop_if_stuck
)

# Also decide if we can stop early.
current_pos, current_orn = self.robot.eef_links[self.arm].get_position_orientation()
Expand All @@ -1125,24 +1131,6 @@ def _move_hand_direct_cartesian(self, target_pose, stop_on_contact=False, ignore

if stop_on_contact and detect_robot_collision_in_sim(self.robot, ignore_obj_in_hand=False):
return

yield from self._move_hand_direct_ik(
waypoints[-1],
pos_thresh=0.01, ori_thresh=0.1,
stop_on_contact=stop_on_contact,
ignore_failure=ignore_failure,
stop_if_stuck=stop_if_stuck
)

# Also decide if we can stop early.
current_pos, current_orn = self.robot.eef_links[self.arm].get_position_orientation()
pos_diff = np.linalg.norm(np.array(current_pos) - np.array(target_pose[0]))
orn_diff = (Rotation.from_quat(current_orn) * Rotation.from_quat(target_pose[1]).inv()).magnitude()
if pos_diff < 0.005 and orn_diff < np.deg2rad(0.1):
return

if stop_on_contact and detect_robot_collision_in_sim(self.robot, ignore_obj_in_hand=False):
return

if not ignore_failure:
raise ActionPrimitiveError(
Expand All @@ -1160,7 +1148,7 @@ def _move_hand_direct_cartesian(self, target_pose, stop_on_contact=False, ignore

failed_joints = []
for joint_idx, target_joint_pos, current_joint_pos in zip(control_idx, joint_pos, current_joint_positions):
if np.abs(target_joint_pos - current_joint_pos) > np.rad2deg(45):
if np.abs(target_joint_pos - current_joint_pos) > MAX_ALLOWED_JOINT_ERROR_FOR_LINEAR_MOTION:
failed_joints.append(joints[joint_idx].joint_name)

if failed_joints:
Expand Down Expand Up @@ -1296,7 +1284,7 @@ def _get_head_goal_q(self, target_obj_pose):
if head2_joint_limits[0] < phi < head2_joint_limits[1]:
head2_joint_goal = phi

# if not possible to look at object, return default head joint positions
# if not possible to look at object, return current head joint positions
else:
default_head_pos = self._get_reset_joint_pos()[self.robot.controller_action_idx["camera"]]
head1_joint_goal = default_head_pos[0]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
"""
WARNING!
The StarterSemanticActionPrimitive is a work-in-progress and is only provided as an example.
It currently only works with BehaviorRobot with its JointControllers set to absolute mode.
See provided behavior_robot_mp_behavior_task.yaml config file for an example. See examples/action_primitives for
runnable examples.
A set of action primitives that work without executing low-level physics but instead teleporting
objects directly into their post-condition states. Useful for learning high-level methods.
"""

from aenum import IntEnum, auto
Expand Down
1 change: 0 additions & 1 deletion omnigibson/controllers/ik_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,6 @@ def _command_to_control(self, command, control_dict):
tolerance_pos=0.002,
weight_pos=20.0,
max_iterations=100,
# initial_joint_pos=current_joint_pos,
)

if target_joint_pos is None:
Expand Down
3 changes: 1 addition & 2 deletions omnigibson/envs/env_base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import copy
import gym
import numpy as np

Expand Down Expand Up @@ -329,7 +328,7 @@ def load(self):
self._scene_graph_builder = SceneGraphBuilder(**self.config["scene_graph"])
# Here we can directly start it because we have already loaded everything & played
self._scene_graph_builder.start(self.scene)
self._scene_graph_builder.step(self.scene) # let's take a step too.
self._scene_graph_builder.step(self.scene) # let's take a step too.

# Denote that the scene is loaded
self._loaded = True
Expand Down
7 changes: 5 additions & 2 deletions omnigibson/objects/dataset_object.py
Original file line number Diff line number Diff line change
Expand Up @@ -499,8 +499,11 @@ def get_base_aligned_bbox(self, link_name=None, visual=False, xy_aligned=False,
if self.prim_type == PrimType.CLOTH:
particle_contact_offset = self.root_link.cloth_system.particle_contact_offset
particle_positions = self.root_link.compute_particle_positions()
points.extend(particle_positions - particle_contact_offset)
points.extend(particle_positions + particle_contact_offset)
particles_in_world_frame = np.concatenate([
particle_positions - particle_contact_offset,
particle_positions + particle_contact_offset
], axis=0)
points.extend(trimesh.transformations.transform_points(particles_in_world_frame, world_to_base_frame))
else:
links = {link_name: self._links[link_name]} if link_name is not None else self._links
for link_name, link in links.items():
Expand Down

0 comments on commit 2dc78d7

Please sign in to comment.