Skip to content

Commit

Permalink
Start addressing comments
Browse files Browse the repository at this point in the history
  • Loading branch information
cgokmen committed Oct 24, 2023
1 parent 9b8aafb commit ab6efe6
Show file tree
Hide file tree
Showing 6 changed files with 84 additions and 57 deletions.
2 changes: 1 addition & 1 deletion docker/sbatch_example.sh
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ enroot start \
${ENV_KWARGS} \
${MOUNT_KWARGS} \
${CONTAINER_NAME} \
source /isaac-sim/setup_conda_env.sh && pytest tests/test_object_states.py
micromamba run -n omnigibson /bin/bash --login -c "source /isaac-sim/setup_conda_env.sh && pytest tests/test_object_states.py"

# Clean up the image if possible.
enroot remove -f ${CONTAINER_NAME}
3 changes: 2 additions & 1 deletion docs/getting_started/slurm.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ With enroot installed, you can follow the below steps to run OmniGibson on SLURM

1. Download the dataset to a location that is accessible by cluster nodes. To do this, you can use
the download_dataset.py script inside OmniGibson's scripts directory, and move it to the right spot
later. **This step is already done for SVL and Viscam nodes**
later. In the below example, /cvgl/ is a networked drive that is accessible by the cluster nodes.
**For Stanford users, this step is already done for SVL and Viscam nodes**
```{.shell .annotate}
OMNIGIBSON_NO_OMNIVERSE=1 python scripts/download_dataset.py
mv omnigibson/data /cvgl/group/Gibson/og-data-0-2-1
Expand Down
56 changes: 39 additions & 17 deletions omnigibson/action_primitives/action_primitive_set_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from typing import List

from future.utils import with_metaclass
from omnigibson import Environment

from omnigibson.robots import BaseRobot
from omnigibson.scenes.interactive_traversable_scene import InteractiveTraversableScene
Expand All @@ -13,18 +14,19 @@

class ActionPrimitiveError(ValueError):
class Reason(IntEnum):
# The planning for a primitive was successfully completed, but an error occurred during execution.
EXECUTION_ERROR = 0

# The planning for a primitive failed possibly due to not being able to find a path.
PLANNING_ERROR = 1

# A primitive could not be executed because a precondition was not satisfied, e.g. PLACE was called without an
# object currently in hand.
PRE_CONDITION_ERROR = 2
PRE_CONDITION_ERROR = 0

# A sampling error occurred: a position to place an object could not be found.
SAMPLING_ERROR = 3
# A sampling error occurred: e.g. a position to place an object could not be found, or the robot could not
# find a pose near the object to navigate to.
SAMPLING_ERROR = 1

# The planning for a primitive failed possibly due to not being able to find a path.
PLANNING_ERROR = 2

# The planning for a primitive was successfully completed, but an error occurred during execution.
EXECUTION_ERROR = 3

# The execution of the primitive happened correctly, but while checking post-conditions, an error was found.
POST_CONDITION_ERROR = 4
Expand Down Expand Up @@ -52,18 +54,22 @@ class BaseActionPrimitiveSet(with_metaclass(ABCMeta, object)):
def __init_subclass__(cls, **kwargs):
"""
Registers all subclasses as part of this registry. This is useful to decouple internal codebase from external
user additions. This way, users can add their custom robot by simply extending this Robot class,
and it will automatically be registered internally. This allows users to then specify their robot
user additions. This way, users can add their custom primitive set by simply extending this class,
and it will automatically be registered internally. This allows users to then specify their primitive set
directly in string-from in e.g., their config files, without having to manually set the str-to-class mapping
in our code.
"""
if not inspect.isabstract(cls):
REGISTERED_PRIMITIVE_SETS[cls.__name__] = cls

def __init__(self, task, scene, robot):
self.task: BaseTask = task
self.scene: InteractiveTraversableScene = scene
self.robot: BaseRobot = robot
def __init__(self, env):
self.env : Environment = env

@property
def robot(self):
# Currently returns the first robot in the environment, but can be scaled to multiple robots
# by creating multiple action generators and passing in a robot index etc.
return self.env.robots[0]

@abstractmethod
def get_action_space(self):
Expand All @@ -72,5 +78,21 @@ def get_action_space(self):

@abstractmethod
def apply(self, action):
"""Given a higher-level action, generates a sequence of lower level actions (or raise ActionPrimitiveError)"""
pass
"""
Apply a primitive action.
Given a higher-level action in the same format as the action space (e.g. as a number),
generates a sequence of lower level actions (or raise ActionPrimitiveError). The action
will get resolved and passed into apply_ref.
"""
pass

@abstractmethod
def apply_ref(self, action, *args):
"""
Apply a primitive action by reference.
Given a higher-level action from the corresponding action set enum and any necessary arguments,
generates a sequence of lower level actions (or raise ActionPrimitiveError)
"""
pass
64 changes: 34 additions & 30 deletions omnigibson/action_primitives/starter_semantic_action_primitives.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
"""
WARNING!
The StarterSemanticActionPrimitive is a work-in-progress and is only provided as an example.
It currently only works with BehaviorRobot with its JointControllers set to absolute mode.
See provided behavior_robot_mp_behavior_task.yaml config file for an example. See examples/action_primitives for
It currently only works with Fetch and Tiago with their JointControllers set to delta mode.
See provided tiago_primitives.yaml config file for an example. See examples/action_primitives for
runnable examples.
"""
import inspect
import logging
import random
from enum import IntEnum
from aenum import IntEnum, auto
from math import ceil
import cv2
from matplotlib import pyplot as plt
Expand Down Expand Up @@ -94,7 +94,8 @@
def indented_print(msg, *args, **kwargs):
logger.debug(" " * len(inspect.stack()) + str(msg), *args, **kwargs)

class RobotCopy():
class RobotCopy:
"""A data structure for storing information about a robot copy, used for collision checking in planning."""
def __init__(self):
self.prims = {}
self.meshes = {}
Expand All @@ -105,7 +106,10 @@ def __init__(self):
"simplified": ([5, 0, -5.0], [0, 0, 0, 1]),
}

class UndoableContext(object):
class PlanningContext(object):
"""
A context manager that sets up a robot copy for collision checking in planning.
"""
def __init__(self, robot, robot_copy, robot_copy_type="original"):
self.robot = robot
self.robot_copy = robot_copy
Expand Down Expand Up @@ -205,15 +209,16 @@ def _construct_disabled_collision_pairs(self):
colliders += disabled_colliders

class StarterSemanticActionPrimitiveSet(IntEnum):
GRASP = 0
PLACE_ON_TOP = 1
PLACE_INSIDE = 2
OPEN = 3
CLOSE = 4
NAVIGATE_TO = 5 # For mostly debugging purposes.
RELEASE = 6 # For reorienting grasp
TOGGLE_ON = 7
TOGGLE_OFF = 8
_init_ = 'value __doc__'
GRASP = auto(), "Grasp an object"
PLACE_ON_TOP = auto(), "Place the currently grasped object on top of another object"
PLACE_INSIDE = auto(), "Place the currently grasped object inside another object"
OPEN = auto(), "Open an object"
CLOSE = auto(), "Close an object"
NAVIGATE_TO = auto(), "Navigate to an object (mostly for debugging purposes - other primitives also navigate first)"
RELEASE = auto(), "Release an object, letting it fall to the ground. You can then grasp it again, as a way of reorienting your grasp of the object."
TOGGLE_ON = auto(), "Toggle an object on"
TOGGLE_OFF = auto(), "Toggle an object off"

class StarterSemanticActionPrimitives(BaseActionPrimitiveSet):
def __init__(self, task, scene, robot, add_context=False, enable_head_tracking=True, always_track_eef=False):
Expand Down Expand Up @@ -346,10 +351,10 @@ def _load_robot_copy(robot):

def get_action_space(self):
if ACTIVITY_RELEVANT_OBJECTS_ONLY:
assert isinstance(self.task, BehaviorTask), "Activity relevant objects can only be used for BEHAVIOR tasks"
self.addressable_objects = sorted(set(self.task.object_scope.values()), key=lambda obj: obj.name)
assert isinstance(self.env.task, BehaviorTask), "Activity relevant objects can only be used for BEHAVIOR tasks"
self.addressable_objects = sorted(set(self.env.task.object_scope.values()), key=lambda obj: obj.name)
else:
self.addressable_objects = sorted(set(self.scene.objects_by_name.values()), key=lambda obj: obj.name)
self.addressable_objects = sorted(set(self.env.scene.objects_by_name.values()), key=lambda obj: obj.name)

# Filter out the robots.
self.addressable_objects = [obj for obj in self.addressable_objects if not isinstance(obj, BaseRobot)]
Expand Down Expand Up @@ -838,8 +843,7 @@ def _target_in_reach_of_robot(self, target_pose):
bool: Whether eef can reach the target pose
"""
relative_target_pose = self._get_pose_in_robot_frame(target_pose)
joint_pos, _ = self._ik_solver_cartesian_to_joint_space(relative_target_pose)
return False if joint_pos is None else True
return self._target_in_reach_of_robot_relative(relative_target_pose)

def _target_in_reach_of_robot_relative(self, relative_target_pose):
"""
Expand Down Expand Up @@ -923,7 +927,7 @@ def _move_hand_joint(self, joint_pos, control_idx):
Returns:
np.array or None: Action array for one step for the robot to move arm or None if its at the joint positions
"""
with UndoableContext(self.robot, self.robot_copy, "original") as context:
with PlanningContext(self.robot, self.robot_copy, "original") as context:
plan = plan_arm_motion(
robot=self.robot,
end_conf=joint_pos,
Expand Down Expand Up @@ -958,7 +962,7 @@ def _move_hand_ik(self, eef_pose, stop_if_stuck=False):
eef_ori = T.quat2axisangle(eef_pose[1])
end_conf = np.append(eef_pos, eef_ori)

with UndoableContext(self.robot, self.robot_copy, "original") as context:
with PlanningContext(self.robot, self.robot_copy, "original") as context:
plan = plan_arm_motion_ik(
robot=self.robot,
end_conf=end_conf,
Expand Down Expand Up @@ -1506,7 +1510,7 @@ def _navigate_to_pose(self, pose_2d):
Returns:
np.array or None: Action array for one step for the robot to navigate or None if it is done navigating
"""
with UndoableContext(self.robot, self.robot_copy, "simplified") as context:
with PlanningContext(self.robot, self.robot_copy, "simplified") as context:
plan = plan_base_motion(
robot=self.robot,
end_conf=pose_2d,
Expand All @@ -1530,7 +1534,7 @@ def _navigate_to_pose(self, pose_2d):

def _draw_plan(self, plan):
SEARCHED = []
trav_map = self.scene._trav_map
trav_map = self.env.scene._trav_map
for q in plan:
# The below code is useful for plotting the RRT tree.
SEARCHED.append(np.flip(trav_map.world_to_map((q[0], q[1]))))
Expand Down Expand Up @@ -1673,10 +1677,10 @@ def _sample_pose_near_object(self, obj, pose_on_obj=None, **kwargs):
- 3-array: (x,y,z) Position in the world frame
- 4-array: (x,y,z,w) Quaternion orientation in the world frame
"""
with UndoableContext(self.robot, self.robot_copy, "simplified") as context:
with PlanningContext(self.robot, self.robot_copy, "simplified") as context:
for _ in range(MAX_ATTEMPTS_FOR_SAMPLING_POSE_NEAR_OBJECT):
if pose_on_obj is None:
pos_on_obj = self._sample_position_on_aabb_face(obj)
pos_on_obj = self._sample_position_on_aabb_side(obj)
pose_on_obj = [pos_on_obj, np.array([0, 0, 0, 1])]

distance = np.random.uniform(0.0, 5.0)
Expand All @@ -1686,8 +1690,8 @@ def _sample_pose_near_object(self, obj, pose_on_obj=None, **kwargs):
[pose_on_obj[0][0] + distance * np.cos(yaw), pose_on_obj[0][1] + distance * np.sin(yaw), yaw + np.pi - avg_arm_workspace_range]
)
# Check room
obj_rooms = obj.in_rooms if obj.in_rooms else [self.scene._seg_map.get_room_instance_by_point(pose_on_obj[0][:2])]
if self.scene._seg_map.get_room_instance_by_point(pose_2d[:2]) not in obj_rooms:
obj_rooms = obj.in_rooms if obj.in_rooms else [self.env.scene._seg_map.get_room_instance_by_point(pose_on_obj[0][:2])]
if self.env.scene._seg_map.get_room_instance_by_point(pose_2d[:2]) not in obj_rooms:
indented_print("Candidate position is in the wrong room.")
continue

Expand All @@ -1702,9 +1706,9 @@ def _sample_pose_near_object(self, obj, pose_on_obj=None, **kwargs):
)

@staticmethod
def _sample_position_on_aabb_face(target_obj):
def _sample_position_on_aabb_side(target_obj):
"""
Returns a position on the axis-aligned bounding box (AABB) faces of the target object.
Returns a position on one of the axis-aligned bounding box (AABB) side faces of the target object.
Args:
target_obj (StatefulObject): Object to sample a position on
Expand Down Expand Up @@ -1738,7 +1742,7 @@ def _sample_pose_in_room(self, room: str):
"""
# TODO(MP): Bias the sampling near the agent.
for _ in range(MAX_ATTEMPTS_FOR_SAMPLING_POSE_IN_ROOM):
_, pos = self.scene.get_random_point_by_room_instance(room)
_, pos = self.env.scene.get_random_point_by_room_instance(room)
yaw = np.random.uniform(-np.pi, np.pi)
pose = (pos[0], pos[1], yaw)
if self._test_pose(pose):
Expand Down
4 changes: 2 additions & 2 deletions omnigibson/tasks/grasp_task.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import random
import numpy as np
import omnigibson as og
from omnigibson.action_primitives.starter_semantic_action_primitives import UndoableContext
from omnigibson.action_primitives.starter_semantic_action_primitives import PlanningContext
from omnigibson.reward_functions.grasp_reward import GraspReward

from omnigibson.tasks.task_base import BaseTask
Expand Down Expand Up @@ -62,7 +62,7 @@ def _reset_agent(self, env):
initial_joint_pos = np.array(robot.get_joint_positions()[joint_combined_idx])
control_idx_in_joint_pos = np.where(np.in1d(joint_combined_idx, joint_control_idx))[0]

with UndoableContext(env._primitive_controller.robot, env._primitive_controller.robot_copy, "original") as context:
with PlanningContext(env._primitive_controller.robot, env._primitive_controller.robot_copy, "original") as context:
for _ in range(MAX_JOINT_RANDOMIZATION_ATTEMPTS):
joint_pos, joint_control_idx = self._get_random_joint_position(robot)
initial_joint_pos[control_idx_in_joint_pos] = joint_pos
Expand Down
12 changes: 6 additions & 6 deletions omnigibson/utils/motion_planning_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def plan_base_motion(
Args:
robot (omnigibson.object_states.Robot): Robot object to plan for
end_conf (Iterable): [x, y, yaw] 2d pose to plan to
context (UndoableContext): Context to plan in that includes the robot copy
context (PlanningContext): Context to plan in that includes the robot copy
planning_time (float): Time to plan for
Returns:
Expand Down Expand Up @@ -204,7 +204,7 @@ def plan_arm_motion(
Args:
robot (BaseRobot): Robot object to plan for
end_conf (Iterable): Final joint position to plan to
context (UndoableContext): Context to plan in that includes the robot copy
context (PlanningContext): Context to plan in that includes the robot copy
planning_time (float): Time to plan for
Returns:
Expand Down Expand Up @@ -305,7 +305,7 @@ def plan_arm_motion_ik(
Args:
robot (BaseRobot): Robot object to plan for
end_conf (Iterable): Final end effector pose to plan to
context (UndoableContext): Context to plan in that includes the robot copy
context (PlanningContext): Context to plan in that includes the robot copy
planning_time (float): Time to plan for
Returns:
Expand Down Expand Up @@ -425,7 +425,7 @@ def set_base_and_detect_collision(context, pose):
Moves the robot and detects robot collisions with the environment and itself
Args:
context (UndoableContext): Context to plan in that includes the robot copy
context (PlanningContext): Context to plan in that includes the robot copy
pose (Array): Pose in the world frame to check for collisions at
Returns:
Expand All @@ -450,7 +450,7 @@ def set_arm_and_detect_collision(context, joint_pos):
Sets joint positions of the robot and detects robot collisions with the environment and itself
Args:
context (UndoableContext): Context to plan in that includes the robot copy
context (PlanningContext): Context to plan in that includes the robot copy
joint_pos (Array): Joint positions to set the robot to
Returns:
Expand Down Expand Up @@ -481,7 +481,7 @@ def detect_robot_collision(context):
Detects robot collisions
Args:
context (UndoableContext): Context to plan in that includes the robot copy
context (PlanningContext): Context to plan in that includes the robot copy
Returns:
bool: Whether the robot is in collision
Expand Down

0 comments on commit ab6efe6

Please sign in to comment.