From d1a86a1e399c93a6ce7f58a0b8fc2055b3c9b48e Mon Sep 17 00:00:00 2001 From: tthielke Date: Wed, 27 Sep 2023 18:11:18 +0200 Subject: [PATCH 01/11] [donbot-pms] first concept new process modules --- doc/source/notebooks/bullet_world.ipynb | 1 - resources/{donbot.urdf => iai_donbot.urdf} | 0 .../process_modules/donbot_process_modules.py | 154 +++++++----------- .../process_modules/pr2_process_modules.py | 1 - src/pycram/robot_descriptions/__init__.py | 2 +- .../robot_descriptions/donbot_description.py | 4 +- 6 files changed, 59 insertions(+), 103 deletions(-) delete mode 120000 doc/source/notebooks/bullet_world.ipynb rename resources/{donbot.urdf => iai_donbot.urdf} (100%) diff --git a/doc/source/notebooks/bullet_world.ipynb b/doc/source/notebooks/bullet_world.ipynb deleted file mode 120000 index bdb627d54..000000000 --- a/doc/source/notebooks/bullet_world.ipynb +++ /dev/null @@ -1 +0,0 @@ -../../../examples/bullet_world.ipynb \ No newline at end of file diff --git a/resources/donbot.urdf b/resources/iai_donbot.urdf similarity index 100% rename from resources/donbot.urdf rename to resources/iai_donbot.urdf diff --git a/src/pycram/process_modules/donbot_process_modules.py b/src/pycram/process_modules/donbot_process_modules.py index 98d45f430..da063fe11 100644 --- a/src/pycram/process_modules/donbot_process_modules.py +++ b/src/pycram/process_modules/donbot_process_modules.py @@ -1,14 +1,18 @@ import time from threading import Lock +import numpy as np import pybullet as p import pycram.bullet_world_reasoning as btr import pycram.helper as helper -from ..bullet_world import BulletWorld +from ..bullet_world import BulletWorld, Object +from ..external_interfaces.ik import request_ik from ..local_transformer import LocalTransformer +from ..pose import Pose from ..process_module import ProcessModule, ProcessModuleManager from ..robot_descriptions import robot_description +from tf.transformations import euler_from_quaternion, quaternion_from_euler def _park_arms(arm): @@ -30,16 +34,8 @@ class DonbotNavigation(ProcessModule): """ def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == 'navigate': - robot = BulletWorld.robot - # Reset odom joints to zero - for joint_name in robot_description.odom_joints: - robot.set_joint_state(joint_name, 0.0) - # Set actual goal pose - robot.set_position_and_orientation(solution['target'], solution['orientation']) - time.sleep(0.5) - local_transformer.update_from_btr() + robot = BulletWorld.robot + robot.set_pose(desig.target) class DonbotPickUp(ProcessModule): """ @@ -48,16 +44,18 @@ class DonbotPickUp(ProcessModule): """ def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == 'pick': - object = solution['object'] - robot = BulletWorld.robot - target = object.get_position() - inv = p.calculateInverseKinematics(robot.id, robot.get_link_id(solution['gripper']), target, - maxNumIterations=100) - helper._apply_ik(robot, inv) - robot.attach(object, solution['gripper']) - time.sleep(0.5) + object = desig.object_desig.bullet_world_object + robot = BulletWorld.robot + grasp = robot_description.grasps.get_orientation_for_grasp(desig.grasp) + target = object.get_pose() + target.orientation.x = grasp[0] + target.orientation.y = grasp[1] + target.orientation.z = grasp[2] + target.orientation.w = grasp[3] + + _move_arm_tcp(target, robot, "left") + tool_frame = robot_description.get_tool_frame("left") + robot.attach(object, tool_frame) class DonbotPlace(ProcessModule): @@ -66,57 +64,20 @@ class DonbotPlace(ProcessModule): """ def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == 'place': - object = solution['object'] - robot = BulletWorld.robot - inv = p.calculateInverseKinematics(robot.id, robot.get_link_id(solution['gripper']), solution['target'], - maxNumIterations=100) - helper._apply_ik(robot, inv) - robot.detach(object) - time.sleep(0.5) + object = desig.object.bullet_world_object + robot = BulletWorld.robot + # Transformations such that the target position is the position of the object and not the tcp + object_pose = object.get_pose() + local_tf = LocalTransformer() + tcp_to_object = local_tf.transform_pose(object_pose, + robot.get_link_tf_frame(robot_description.get_tool_frame("left"))) + target_diff = desig.target.to_transform("target").inverse_times(tcp_to_object.to_transform("object")).to_pose() -class DonbotAccessing(ProcessModule): - """ - This process module responsible for opening drawers to access the objects inside. This works by firstly moving - the end effector to the handle of the drawer. Next, the end effector is moved the respective distance to the back. - This provides the illusion the robot would open the drawer by himself. - Then the drawer will be opened by setting the joint pose of the drawer joint. - """ - - def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == 'access': - kitchen = solution['part_of'] - robot = BulletWorld.robot - gripper = solution['gripper'] - drawer_handle = solution['drawer_handle'] - drawer_joint = solution['drawer_joint'] - dis = solution['distance'] - inv = p.calculateInverseKinematics(robot.id, robot.get_link_id(gripper), - kitchen.get_link_position(drawer_handle)) - helper._apply_ik(robot, inv) - time.sleep(0.2) - han_pose = kitchen.get_link_position(drawer_handle) - new_p = [han_pose[0] - dis, han_pose[1], han_pose[2]] - inv = p.calculateInverseKinematics(robot.id, robot.get_link_id(gripper), new_p) - helper._apply_ik(robot, inv) - kitchen.set_joint_state(drawer_joint, 0.3) - time.sleep(0.5) + _move_arm_tcp(target_diff, robot, "left") + robot.detach(object) -class DonbotParkArms(ProcessModule): - """ - This process module is for moving the arms in a parking position. - It is currently not used. - """ - - def _execute(self, desig): - solutions = desig.reference() - if solutions['cmd'] == 'park': - _park_arms() - class DonbotMoveHead(ProcessModule): """ @@ -125,36 +86,25 @@ class DonbotMoveHead(ProcessModule): """ def _execute(self, desig): - solutions = desig.reference() - if solutions['cmd'] == 'looking': - robot = BulletWorld.robot - neck_base_frame = local_transformer.projection_namespace + '/' + robot_description.chains["neck"].base_link if \ - local_transformer.projection_namespace else \ - robot_description.chains["neck"].base_link - if type(solutions['target']) is str: - target = local_transformer.projection_namespace + '/' + solutions['target'] if \ - local_transformer.projection_namespace else \ - solutions['target'] - pose_in_neck_base = local_transformer.tf_transform(neck_base_frame, target) - elif helper_deprecated.is_list_pose(solutions['target']) or helper_deprecated.is_list_position(solutions['target']): - pose = helper_deprecated.ensure_pose(solutions['target']) - pose_in_neck_base = local_transformer.tf_pose_transform(local_transformer.map_frame, neck_base_frame, pose) - - vector = pose_in_neck_base[0] - # +x as forward - # +y as left - # +z as up - x = vector[0] - y = vector[1] - z = vector[2] - conf = None - if y > 0: - conf = "left" - else: - conf = "right" - for joint, state in robot_description.get_static_joint_chain("neck", conf).items(): - robot.set_joint_state(joint, state) + target = desig.target + robot = BulletWorld.robot + + local_transformer = LocalTransformer() + + pose_in_shoulder = local_transformer.transform_pose(target, robot.get_link_tf_frame("ur5_shoulder_link")) + pose_in_shoulder.position.z += 0.5 + + new_pan = np.arctan2(pose_in_shoulder.position.y, pose_in_shoulder.position.x) + new_tilt = np.arctan2(pose_in_shoulder.position.z, pose_in_shoulder.position.x ** 2 + pose_in_shoulder.position.y ** 2) * -1 + + tcp_rotation = quaternion_from_euler(new_tilt, 0, new_pan) + shoulder_pose = robot.get_link_pose("ur5_shoulder_link") + pose_in_shoulder.position.z += 0.5 + shoulder_pose.rotation = list(tcp_rotation) + print(pose_in_shoulder) + + _move_arm_tcp(shoulder_pose, robot, "left") class DonbotMoveGripper(ProcessModule): """ @@ -242,11 +192,19 @@ def _execute(self, desig): obj_type = solution['object_type'] return list(filter(lambda obj: obj.type == obj_type, BulletWorld.current_bullet_world.objects))[0] +def _move_arm_tcp(target: Pose, robot: Object, arm: str) -> None: + gripper = robot_description.get_tool_frame(arm) + + joints = robot_description.chains[arm].joints + + inv = request_ik(target, robot, joints, gripper) + helper._apply_ik(robot, inv, joints) + class DonbotManager(ProcessModuleManager): def __init__(self): - super().__init__("donbot") + super().__init__("iai_donbot") self._navigate_lock = Lock() self._pick_up_lock = Lock() self._place_lock = Lock() diff --git a/src/pycram/process_modules/pr2_process_modules.py b/src/pycram/process_modules/pr2_process_modules.py index f93544d70..3930a8a9b 100644 --- a/src/pycram/process_modules/pr2_process_modules.py +++ b/src/pycram/process_modules/pr2_process_modules.py @@ -72,7 +72,6 @@ def _execute(self, desig: PickUpMotion.Motion): target.orientation.w = grasp[3] arm = desig.arm - arm_short = "r" if arm == "right" else "l" _move_arm_tcp(target, robot, arm) tool_frame = robot_description.get_tool_frame(arm) diff --git a/src/pycram/robot_descriptions/__init__.py b/src/pycram/robot_descriptions/__init__.py index a03f390ad..897e86d4d 100644 --- a/src/pycram/robot_descriptions/__init__.py +++ b/src/pycram/robot_descriptions/__init__.py @@ -49,7 +49,7 @@ def update_robot_description(robot_name=None, from_ros=None): return None # Choose Description based on robot name - if 'donbot' in robot: + if 'iai_donbot' in robot: description = DonbotDescription elif 'pr2' in robot: description = PR2Description diff --git a/src/pycram/robot_descriptions/donbot_description.py b/src/pycram/robot_descriptions/donbot_description.py index 839b4e351..728a9e324 100644 --- a/src/pycram/robot_descriptions/donbot_description.py +++ b/src/pycram/robot_descriptions/donbot_description.py @@ -4,7 +4,7 @@ class DonbotDescription(RobotDescription): def __init__(self): - super().__init__("donbot", "base_footprint", "base_link", "ur5_base_link", + super().__init__("iai_donbot", "base_footprint", "base_link", "ur5_base_link", "arm_base_mounting_joint", odom_frame="odom", odom_joints=["odom_x_joint", "odom_y_joint", "odom_z_joint"]) # Camera @@ -14,7 +14,7 @@ def __init__(self): rs_camera = CameraDescription("rs_camera_link", horizontal_angle=0.99483, vertical_angle=0.75049, minimal_height=0.5, maximal_height=1.2) - # not in the donbot.urdf, although used in cram + # not in the iai_donbot.urdf, although used in cram # realsense_camera = CameraDescription("rs_camera_depth_optical_frame", # horizontal_angle=0.99483, vertical_angle=0.75049, # minimal_height=0.5, maximal_height=1.2) From 10788440a7527e42c9957650daa8d912a4ca618b Mon Sep 17 00:00:00 2001 From: tthielke Date: Fri, 29 Sep 2023 11:42:46 +0200 Subject: [PATCH 02/11] [donbot-pms] new process module donbot --- launch/ik_and_description.launch | 4 +- src/pycram/bullet_world.py | 4 +- .../process_modules/donbot_process_modules.py | 79 +++++++------------ 3 files changed, 34 insertions(+), 53 deletions(-) diff --git a/launch/ik_and_description.launch b/launch/ik_and_description.launch index 61f2315da..8baf63449 100644 --- a/launch/ik_and_description.launch +++ b/launch/ik_and_description.launch @@ -1,5 +1,5 @@ - + @@ -30,7 +30,7 @@ + textfile="$(find pycram)/resources/iai_donbot.urdf"/> diff --git a/src/pycram/bullet_world.py b/src/pycram/bullet_world.py index 23c40d591..69a47c0da 100644 --- a/src/pycram/bullet_world.py +++ b/src/pycram/bullet_world.py @@ -274,7 +274,9 @@ def add_vis_axis(self, pose: Pose, :param length: Optional parameter to configure the length of the axes """ - position, orientation = pose.to_list() + pose_in_map = self.local_transformer.transform_pose(pose, "map") + + position, orientation = pose_in_map.to_list() vis_x = p.createVisualShape(p.GEOM_BOX, halfExtents=[length, 0.01, 0.01], rgbaColor=[1, 0, 0, 0.8], visualFramePosition=[length, 0.01, 0.01]) diff --git a/src/pycram/process_modules/donbot_process_modules.py b/src/pycram/process_modules/donbot_process_modules.py index da063fe11..bf001f6d5 100644 --- a/src/pycram/process_modules/donbot_process_modules.py +++ b/src/pycram/process_modules/donbot_process_modules.py @@ -7,6 +7,7 @@ import pycram.bullet_world_reasoning as btr import pycram.helper as helper from ..bullet_world import BulletWorld, Object +from ..designators.motion_designator import MoveArmJointsMotion, WorldStateDetectingMotion from ..external_interfaces.ik import request_ik from ..local_transformer import LocalTransformer from ..pose import Pose @@ -92,17 +93,14 @@ def _execute(self, desig): local_transformer = LocalTransformer() pose_in_shoulder = local_transformer.transform_pose(target, robot.get_link_tf_frame("ur5_shoulder_link")) - pose_in_shoulder.position.z += 0.5 new_pan = np.arctan2(pose_in_shoulder.position.y, pose_in_shoulder.position.x) new_tilt = np.arctan2(pose_in_shoulder.position.z, pose_in_shoulder.position.x ** 2 + pose_in_shoulder.position.y ** 2) * -1 tcp_rotation = quaternion_from_euler(new_tilt, 0, new_pan) - shoulder_pose = robot.get_link_pose("ur5_shoulder_link") - pose_in_shoulder.position.z += 0.5 - shoulder_pose.rotation = list(tcp_rotation) + shoulder_pose = Pose([-0.2, 0.3, 1.31], [-0.31, 0.63, 0.70, -0.02], "map") - print(pose_in_shoulder) + print(shoulder_pose) _move_arm_tcp(shoulder_pose, robot, "left") @@ -113,15 +111,11 @@ class DonbotMoveGripper(ProcessModule): """ def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == "move-gripper": - robot = BulletWorld.robot - gripper = solution['gripper'] - motion = solution['motion'] - for joint, state in robot_description.get_static_gripper_chain(gripper, motion).items(): - # TODO: Test this, add gripper-opening/-closing to the demo.py - robot.set_joint_state(joint, state) - time.sleep(0.5) + robot = BulletWorld.robot + gripper = desig.gripper + motion = desig.motion + for joint, state in robot_description.get_static_gripper_chain(gripper, motion).items(): + robot.set_joint_state(joint, state) class DonbotDetecting(ProcessModule): @@ -131,17 +125,17 @@ class DonbotDetecting(ProcessModule): """ def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == "detecting": - robot = BulletWorld.robot - object_type = solution['object_type'] - cam_frame_name = solution['cam_frame'] - front_facing_axis = solution['front_facing_axis'] + robot = BulletWorld.robot + object_type = desig.object_type + # Should be "wide_stereo_optical_frame" + cam_frame_name = robot_description.get_camera_frame() + # should be [0, 0, 1] + front_facing_axis = robot_description.front_facing_axis - objects = BulletWorld.current_bullet_world.get_objects_by_type(object_type) - for obj in objects: - if btr.visible(obj, robot.get_link_position_and_orientation(cam_frame_name), front_facing_axis, 0.5): - return obj + objects = BulletWorld.current_bullet_world.get_objects_by_type(object_type) + for obj in objects: + if btr.visible(obj, robot.get_link_pose(cam_frame_name), front_facing_axis): + return obj class DonbotMoveTCP(ProcessModule): @@ -150,14 +144,10 @@ class DonbotMoveTCP(ProcessModule): """ def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == "move-tcp": - target = solution['target'] - gripper = solution['gripper'] - robot = BulletWorld.robot - inv = p.calculateInverseKinematics(robot.id, robot.get_link_id(gripper), target) - helper._apply_ik(robot, inv) - time.sleep(0.5) + target = desig.target + robot = BulletWorld.robot + + _move_arm_tcp(target, robot, desig.arm) class DonbotMoveJoints(ProcessModule): @@ -166,19 +156,10 @@ class DonbotMoveJoints(ProcessModule): list that should be applied or a pre-defined position can be used, such as "parking" """ - def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == "move-arm-joints": - robot = BulletWorld.robot - left_arm_poses = solution['left_arm_poses'] - - if type(left_arm_poses) == dict: - for joint, pose in left_arm_poses.items(): - robot.set_joint_state(joint, pose) - elif type(left_arm_poses) == str and left_arm_poses == "park": - _park_arms("left") - - time.sleep(0.5) + def _execute(self, desig: MoveArmJointsMotion.Motion): + robot = BulletWorld.robot + if desig.left_arm_poses: + robot.set_joint_states(desig.left_arm_poses) class DonbotWorldStateDetecting(ProcessModule): @@ -186,11 +167,9 @@ class DonbotWorldStateDetecting(ProcessModule): This process module detectes an object even if it is not in the field of view of the robot. """ - def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == "world-state-detecting": - obj_type = solution['object_type'] - return list(filter(lambda obj: obj.type == obj_type, BulletWorld.current_bullet_world.objects))[0] + def _execute(self, desig: WorldStateDetectingMotion.Motion): + obj_type = desig.object_type + return list(filter(lambda obj: obj.type == obj_type, BulletWorld.current_bullet_world.objects))[0] def _move_arm_tcp(target: Pose, robot: Object, arm: str) -> None: gripper = robot_description.get_tool_frame(arm) From a7ff795059365d404182a2448b231feba9a31767 Mon Sep 17 00:00:00 2001 From: tthielke Date: Tue, 17 Oct 2023 14:16:23 +0200 Subject: [PATCH 03/11] [boxy-pms] new process module boxy started --- .../process_modules/boxy_process_modules.py | 92 ++++++++++--------- .../robot_descriptions/donbot_description.py | 1 + 2 files changed, 48 insertions(+), 45 deletions(-) diff --git a/src/pycram/process_modules/boxy_process_modules.py b/src/pycram/process_modules/boxy_process_modules.py index 7879654ca..1870b449d 100644 --- a/src/pycram/process_modules/boxy_process_modules.py +++ b/src/pycram/process_modules/boxy_process_modules.py @@ -6,8 +6,9 @@ import pycram.bullet_world_reasoning as btr import pycram.helper as helper from ..bullet_world import BulletWorld +from ..designators.motion_designator import * from ..external_interfaces.ik import request_ik -from ..local_transformer import LocalTransformer as local_tf +from ..local_transformer import LocalTransformer as local_tf, LocalTransformer from ..process_module import ProcessModule, ProcessModuleManager from ..robot_descriptions import robot_description @@ -33,18 +34,9 @@ class BoxyNavigation(ProcessModule): The process module to move the robot from one position to another. """ - def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == 'navigate': - robot = BulletWorld.robot - # Reset odom joints to zero - for joint_name in robot_description.odom_joints: - robot.set_joint_state(joint_name, 0.0) - # Set actual goal pose - robot.set_position_and_orientation(solution['target'], solution['orientation']) - time.sleep(0.5) - local_tf.update_from_btr() - + def _execute(self, desig: MoveMotion.Motion): + robot = BulletWorld.robot + robot.set_pose(desig.target) class BoxyPickUp(ProcessModule): @@ -53,23 +45,21 @@ class BoxyPickUp(ProcessModule): The object has to be reachable for this process module to succeed. """ - def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == 'pick': - object = solution['object'] - robot = BulletWorld.robot - grasp = robot_description.grasps.get_orientation_for_grasp(solution['grasp']) - target = [object.get_position(), grasp] - target = helper._transform_to_torso(target, robot) - arm = "left" if solution['gripper'] == robot_description.get_tool_frame("left") else "right" - joints = robot_description._safely_access_chains(arm).joints - #tip = "r_wrist_roll_link" if solution['gripper'] == "r_gripper_tool_frame" else "l_wrist_roll_link" - inv = request_ik(robot_description.base_frame, solution['gripper'], target, robot, joints) - helper._apply_ik(robot, inv, solution['gripper']) - #inv = p.calculateInverseKinematics(robot.id, robot.get_link_id(solution['gripper']), target, - # maxNumIterations=100) - robot.attach(object, solution['gripper']) - time.sleep(0.5) + def _execute(self, desig: PickUpMotion.Motion): + object = desig.object_desig.bullet_world_object + robot = BulletWorld.robot + grasp = robot_description.grasps.get_orientation_for_grasp(desig.grasp) + target = object.get_pose() + target.orientation.x = grasp[0] + target.orientation.y = grasp[1] + target.orientation.z = grasp[2] + target.orientation.w = grasp[3] + + arm = desig.arm + + _move_arm_tcp(target, robot, arm) + tool_frame = robot_description.get_tool_frame(arm) + robot.attach(object, tool_frame) class BoxyPlace(ProcessModule): @@ -77,20 +67,25 @@ class BoxyPlace(ProcessModule): This process module places an object at the given position in world coordinate frame. """ - def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == 'place': - object = solution['object'] - robot = BulletWorld.robot - target = object.get_position_and_orientation() - target = helper._transform_to_torso(target, robot) - arm = "left" if solution['gripper'] == robot_description.get_tool_frame("left") else "right" - joints = robot_description._safely_access_chains(arm).joints - #tip = "r_wrist_roll_link" if solution['gripper'] == "r_gripper_tool_frame" else "l_wrist_roll_link" - inv = request_ik(robot_description.base_frame, solution['gripper'], target, robot, joints) - helper._apply_ik(robot, inv, solution['gripper']) - robot.detach(object) - time.sleep(0.5) + def _execute(self, desig: PlaceMotion.Motion): + """ + + :param desig: A PlaceMotion + :return: + """ + object = desig.object.bullet_world_object + robot = BulletWorld.robot + arm = desig.arm + + # Transformations such that the target position is the position of the object and not the tcp + object_pose = object.get_pose() + local_tf = LocalTransformer() + tcp_to_object = local_tf.transform_pose(object_pose, + robot.get_link_tf_frame(robot_description.get_tool_frame(arm))) + target_diff = desig.target.to_transform("target").inverse_times(tcp_to_object.to_transform("object")).to_pose() + + _move_arm_tcp(target_diff, robot, arm) + robot.detach(object) class BoxyAccessing(ProcessModule): @@ -142,7 +137,6 @@ def _execute(self, desig): _park_arms() - class BoxyMoveHead(ProcessModule): """ This process module moves the head to look at a specific point in the world coordinate frame. @@ -286,6 +280,14 @@ def _execute(self, desig): return list(filter(lambda obj: obj.type == obj_type, BulletWorld.current_bullet_world.objects))[0] +def _move_arm_tcp(target: Pose, robot: Object, arm: str) -> None: + gripper = robot_description.get_tool_frame(arm) + + joints = robot_description.chains[arm].joints + + inv = request_ik(target, robot, joints, gripper) + helper._apply_ik(robot, inv, joints) + class BoxyManager(ProcessModuleManager): def __init__(self): diff --git a/src/pycram/robot_descriptions/donbot_description.py b/src/pycram/robot_descriptions/donbot_description.py index 728a9e324..40d535171 100644 --- a/src/pycram/robot_descriptions/donbot_description.py +++ b/src/pycram/robot_descriptions/donbot_description.py @@ -45,6 +45,7 @@ def __init__(self): arm_manip = ManipulatorDescription(arm_inter, tool_frame="gripper_tool_frame", gripper_description=gripper) # or ur5_tool0 self.add_chain("left", arm_manip) + self.add_chain("right", arm_manip) # Neck neck_base_link = "ur5_base_link" neck = ChainDescription("neck", arm_joints, arm_links, base_link=neck_base_link) From e3a11400e3b5d239771bc5668ff87846b5626bfe Mon Sep 17 00:00:00 2001 From: tthielke Date: Wed, 25 Oct 2023 09:46:51 +0200 Subject: [PATCH 04/11] [boxy-pms] process module ready to test --- .../process_modules/boxy_process_modules.py | 158 +++++++----------- 1 file changed, 61 insertions(+), 97 deletions(-) diff --git a/src/pycram/process_modules/boxy_process_modules.py b/src/pycram/process_modules/boxy_process_modules.py index 1870b449d..762b52197 100644 --- a/src/pycram/process_modules/boxy_process_modules.py +++ b/src/pycram/process_modules/boxy_process_modules.py @@ -1,6 +1,7 @@ import time from threading import Lock +import numpy as np import pybullet as p import pycram.bullet_world_reasoning as btr @@ -11,6 +12,7 @@ from ..local_transformer import LocalTransformer as local_tf, LocalTransformer from ..process_module import ProcessModule, ProcessModuleManager from ..robot_descriptions import robot_description +from tf.transformations import euler_from_quaternion, quaternion_from_euler def _park_arms(arm): @@ -124,7 +126,9 @@ def _execute(self, desig): kitchen.set_joint_state(drawer_joint, 0.3) time.sleep(0.5) - +""" +Keine Entsprechung im PR2-Modul? Wird dies benötigt? ----- -> Open/Close +""" class BoxyParkArms(ProcessModule): """ This process module is for moving the arms in a parking position. @@ -136,7 +140,9 @@ def _execute(self, desig): if solutions['cmd'] == 'park': _park_arms() - +""" +Richtiger Link? Wie Arm ansprechen? ----- +""" class BoxyMoveHead(ProcessModule): """ This process module moves the head to look at a specific point in the world coordinate frame. @@ -144,65 +150,39 @@ class BoxyMoveHead(ProcessModule): """ def _execute(self, desig): - solutions = desig.reference() - if solutions['cmd'] == 'looking': - robot = BulletWorld.robot - neck_base_frame = local_tf.projection_namespace + '/' + robot_description.chains["neck"].base_link - if type(solutions['target']) is str: - target = local_tf.projection_namespace + '/' + solutions['target'] - pose_in_neck_base = local_tf.tf_transform(neck_base_frame, target) - elif helper_deprecated.is_list_pose(solutions['target']) or helper_deprecated.is_list_position(solutions['target']): - pose = helper_deprecated.ensure_pose(solutions['target']) - pose_in_neck_base = local_tf.tf_pose_transform(local_tf.map_frame, neck_base_frame, pose) - - vector = pose_in_neck_base[0] - # +x as forward - # +y as left - # +z as up - x = vector[1] - y = -vector[0] - z = vector[2] - conf = None - if x > 0: - if z < 0.5: - if y > 0.4: - conf = "down_left" - elif y < -0.4: - conf = "down_right" - else: - conf = "down" - else: - if y > 0.4: - conf = "left" - elif y < -0.4: - conf = "right" - else: - conf = "forward" - else: - if z < 0.5: - conf = "behind" - else: - conf = "behind_up" - for joint, state in robot_description.get_static_joint_chain("neck", conf).items(): - robot.set_joint_state(joint, state) + target = desig.target + robot = BulletWorld.robot + + local_transformer = LocalTransformer() + + pose_in_neck = local_transformer.transform_pose(target, robot.get_link_tf_frame("neck_base_link")) + + new_pan = np.arctan2(pose_in_neck.position.y, pose_in_neck.position.x) + new_tilt = np.arctan2(pose_in_neck.position.z, + pose_in_neck.position.x ** 2 + pose_in_neck.position.y ** 2) * -1 + + tcp_rotation = quaternion_from_euler(new_tilt, 0, new_pan) + shoulder_pose = Pose([-0.2, 0.3, 1.31], [-0.31, 0.63, 0.70, -0.02], "map") + + print(shoulder_pose) + """ + Wie spreche ich den Arm an? Geht das so überhaupt? ----- -> neck + """ + _move_arm_tcp(shoulder_pose, robot, "neck") class BoxyMoveGripper(ProcessModule): """ This process module controls the gripper of the robot. They can either be opened or closed. - Furthermore, it can only moved one gripper at a time. + Furthermore, it can only move one gripper at a time. """ def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == "move-gripper": - robot = BulletWorld.robot - gripper = solution['gripper'] - motion = solution['motion'] - for joint, state in robot_description.get_static_gripper_chain(gripper, motion).items(): - # TODO: Test this, add gripper-opening/-closing to the demo.py - robot.set_joint_state(joint, state) - time.sleep(0.5) + robot = BulletWorld.robot + gripper = desig.gripper + motion = desig.motion + for joint, state in robot_description.get_static_gripper_chain(gripper, motion).items(): + robot.set_joint_state(joint, state) class BoxyDetecting(ProcessModule): @@ -212,17 +192,17 @@ class BoxyDetecting(ProcessModule): """ def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == "detecting": - robot = BulletWorld.robot - object_type = solution['object_type'] - cam_frame_name = solution['cam_frame'] - front_facing_axis = solution['front_facing_axis'] + robot = BulletWorld.robot + object_type = desig.object_type + # Should be "wide_stereo_optical_frame" + cam_frame_name = robot_description.get_camera_frame() + # should be [0, 0, 1] + front_facing_axis = robot_description.front_facing_axis - objects = BulletWorld.current_bullet_world.get_objects_by_type(object_type) - for obj in objects: - if btr.visible(obj, robot.get_link_position_and_orientation(cam_frame_name), front_facing_axis, 0.5): - return obj + objects = BulletWorld.current_bullet_world.get_objects_by_type(object_type) + for obj in objects: + if btr.visible(obj, robot.get_link_pose(cam_frame_name), front_facing_axis): + return obj class BoxyMoveTCP(ProcessModule): @@ -230,42 +210,28 @@ class BoxyMoveTCP(ProcessModule): This process moves the tool center point of either the right or the left arm. """ - def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == "move-tcp": - target = solution['target'] - gripper = solution['gripper'] - robot = BulletWorld.robot - inv = p.calculateInverseKinematics(robot.id, robot.get_link_id(gripper), target) - helper._apply_ik(robot, inv) - time.sleep(0.5) + def _execute(self, desig: MoveTCPMotion.Motion): + target = desig.target + robot = BulletWorld.robot + _move_arm_tcp(target, robot, desig.arm) -class BoxyMoveJoints(ProcessModule): +""" +MoveArmJoints? PR2 hat beides, wird hier auch beides benötigt?----- -> so lassen +""" +class BoxyMoveArmJoints(ProcessModule): """ This process modules moves the joints of either the right or the left arm. The joint states can be given as list that should be applied or a pre-defined position can be used, such as "parking" """ - def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == "move-arm-joints": - robot = BulletWorld.robot - right_arm_poses = solution['right_arm_poses'] - left_arm_poses = solution['left_arm_poses'] - if type(right_arm_poses) == dict: - for joint, pose in right_arm_poses.items(): - robot.set_joint_state(joint, pose) - elif type(right_arm_poses) == str and right_arm_poses == "park": - _park_arms("right") - - if type(left_arm_poses) == dict: - for joint, pose in left_arm_poses.items(): - robot.set_joint_state(joint, pose) - elif type(right_arm_poses) == str and left_arm_poses == "park": - _park_arms("left") + def _execute(self, desig: MoveArmJointsMotion.Motion): - time.sleep(0.5) + robot = BulletWorld.robot + if desig.right_arm_poses: + robot.set_joint_states(desig.right_arm_poses) + if desig.left_arm_poses: + robot.set_joint_states(desig.left_arm_poses) class BoxyWorldStateDetecting(ProcessModule): @@ -273,11 +239,9 @@ class BoxyWorldStateDetecting(ProcessModule): This process module detectes an object even if it is not in the field of view of the robot. """ - def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == "world-state-detecting": - obj_type = solution['object_type'] - return list(filter(lambda obj: obj.type == obj_type, BulletWorld.current_bullet_world.objects))[0] + def _execute(self, desig: WorldStateDetectingMotion.Motion): + obj_type = desig.object_type + return list(filter(lambda obj: obj.type == obj_type, BulletWorld.current_bullet_world.objects))[0] def _move_arm_tcp(target: Pose, robot: Object, arm: str) -> None: @@ -331,7 +295,7 @@ def move_tcp(self): def move_arm_joints(self): if ProcessModuleManager.execution_type == "simulated": - return BoxyMoveJoints(self._move_arm_joints_lock) + return BoxyMoveArmJoints(self._move_arm_joints_lock) def world_state_detecting(self): if ProcessModuleManager.execution_type == "simulated": From 97de9dcf54797802269be9df8a4fdaa12f546b74 Mon Sep 17 00:00:00 2001 From: tthielke Date: Thu, 23 Nov 2023 12:43:25 +0100 Subject: [PATCH 05/11] [donbot-pms] process module done --- .../process_modules/donbot_process_modules.py | 27 ++++++++++++++----- .../robot_descriptions/donbot_description.py | 6 +++++ 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/src/pycram/process_modules/donbot_process_modules.py b/src/pycram/process_modules/donbot_process_modules.py index bf001f6d5..59788e1c5 100644 --- a/src/pycram/process_modules/donbot_process_modules.py +++ b/src/pycram/process_modules/donbot_process_modules.py @@ -90,24 +90,37 @@ def _execute(self, desig): target = desig.target robot = BulletWorld.robot + _park_arms("left") + local_transformer = LocalTransformer() pose_in_shoulder = local_transformer.transform_pose(target, robot.get_link_tf_frame("ur5_shoulder_link")) - new_pan = np.arctan2(pose_in_shoulder.position.y, pose_in_shoulder.position.x) - new_tilt = np.arctan2(pose_in_shoulder.position.z, pose_in_shoulder.position.x ** 2 + pose_in_shoulder.position.y ** 2) * -1 + if pose_in_shoulder.position.x >= 0 and pose_in_shoulder.position.x >= abs(pose_in_shoulder.position.y): + for joint, pose in robot_description.get_static_joint_chain("left", "front").items(): + robot.set_joint_state(joint, pose) + if pose_in_shoulder.position.y >= 0 and pose_in_shoulder.position.y >= abs(pose_in_shoulder.position.x): + for joint, pose in robot_description.get_static_joint_chain("left", "arm_right").items(): + robot.set_joint_state(joint, pose) + if pose_in_shoulder.position.x <= 0 and abs(pose_in_shoulder.position.x) > abs(pose_in_shoulder.position.y): + for joint, pose in robot_description.get_static_joint_chain("left", "back").items(): + robot.set_joint_state(joint, pose) + if pose_in_shoulder.position.y <= 0 and abs(pose_in_shoulder.position.y) > abs(pose_in_shoulder.position.x): + for joint, pose in robot_description.get_static_joint_chain("left", "arm_left").items(): + robot.set_joint_state(joint, pose) + + new_pan = (np.arctan2(pose_in_shoulder.position.y, pose_in_shoulder.position.x) + np.pi) - tcp_rotation = quaternion_from_euler(new_tilt, 0, new_pan) - shoulder_pose = Pose([-0.2, 0.3, 1.31], [-0.31, 0.63, 0.70, -0.02], "map") + print(new_pan) - print(shoulder_pose) + print(pose_in_shoulder) - _move_arm_tcp(shoulder_pose, robot, "left") + robot.set_joint_state("ur5_shoulder_pan_joint", new_pan) class DonbotMoveGripper(ProcessModule): """ This process module controls the gripper of the robot. They can either be opened or closed. - Furthermore, it can only moved one gripper at a time. + Furthermore, it can only move one gripper at a time. """ def _execute(self, desig): diff --git a/src/pycram/robot_descriptions/donbot_description.py b/src/pycram/robot_descriptions/donbot_description.py index 40d535171..474cf7775 100644 --- a/src/pycram/robot_descriptions/donbot_description.py +++ b/src/pycram/robot_descriptions/donbot_description.py @@ -72,6 +72,12 @@ def __init__(self): park = [3.234022855758667, -1.5068710486041468, -0.7870314756976526, -2.337625328694479, 1.5699548721313477, -1.6504042784320276] self.add_static_joint_chain("left", "park", park) + front = [-1.5, -1.5, 0, 1.5, -1.5, 1.5] + arm_right = [-3, -1.5, 0, 1.5, -1.5, 1.5] + back = [-4.7, -1.5, 0, 1.5, -1.5, 1.5] + arm_left = [0, -1.5, 0, 1.5, -1.5, 1.5] + self.add_static_joint_chains("left", {"front": front, "arm_right": arm_right, "back": back, + "arm_left": arm_left}) def get_camera_frame(self, name="rgb_camera"): # TODO: Hacky since only one optical camera frame from pr2 is used From 328b5198e2cef61b8e9e18a4bf7e82197ee6d9f6 Mon Sep 17 00:00:00 2001 From: tthielke Date: Thu, 23 Nov 2023 12:44:43 +0100 Subject: [PATCH 06/11] [boxy-pms] process module progress --- .../process_modules/boxy_process_modules.py | 109 ++++++++++-------- .../robot_descriptions/boxy_description.py | 12 +- 2 files changed, 68 insertions(+), 53 deletions(-) diff --git a/src/pycram/process_modules/boxy_process_modules.py b/src/pycram/process_modules/boxy_process_modules.py index 762b52197..7963024d9 100644 --- a/src/pycram/process_modules/boxy_process_modules.py +++ b/src/pycram/process_modules/boxy_process_modules.py @@ -8,6 +8,7 @@ import pycram.helper as helper from ..bullet_world import BulletWorld from ..designators.motion_designator import * +from ..enums import JointType from ..external_interfaces.ik import request_ik from ..local_transformer import LocalTransformer as local_tf, LocalTransformer from ..process_module import ProcessModule, ProcessModuleManager @@ -90,44 +91,46 @@ def _execute(self, desig: PlaceMotion.Motion): robot.detach(object) -class BoxyAccessing(ProcessModule): +class BoxyOpen(ProcessModule): """ - This process module responsible for opening drawers to access the objects inside. This works by firstly moving - the end effector to the handle of the drawer. Next, the end effector is moved the respective distance to the back. - This provides the illusion the robot would open the drawer by himself. - Then the drawer will be opened by setting the joint pose of the drawer joint. + Low-level implementation of opening a container in the simulation. Assumes the handle is already grasped. """ - def _execute(self, desig): - solution = desig.reference() - if solution['cmd'] == 'access': - kitchen = solution['part_of'] - robot = BulletWorld.robot - gripper = solution['gripper'] - drawer_handle = solution['drawer_handle'] - drawer_joint = solution['drawer_joint'] - dis = solution['distance'] - robot.set_joint_state(robot_description.torso_joint, -0.1) - arm = "left" if solution['gripper'] == robot_description.get_tool_frame("left") else "right" - joints = robot_description._safely_access_chains(arm).joints - #inv = p.calculateInverseKinematics(robot.id, robot.get_link_id(gripper), kitchen.get_link_position(drawer_handle)) - target = helper._transform_to_torso(kitchen.get_link_position_and_orientation(drawer_handle), robot) - #target = (target[0], [0, 0, 0, 1]) - inv = request_ik(robot_description.base_frame, gripper, target , robot, joints ) - helper._apply_ik(robot, inv, gripper) - time.sleep(0.2) - cur_pose = robot.get_pose() - robot.set_position([cur_pose[0]-dis, cur_pose[1], cur_pose[2]]) - han_pose = kitchen.get_link_position(drawer_handle) - new_p = [[han_pose[0] - dis, han_pose[1], han_pose[2]], kitchen.get_link_orientation(drawer_handle)] - new_p = helper._transform_to_torso(new_p, robot) - inv = request_ik(robot_description.base_frame, gripper, new_p, robot, joints) - helper._apply_ik(robot, inv, gripper) - kitchen.set_joint_state(drawer_joint, 0.3) - time.sleep(0.5) + def _execute(self, desig: OpeningMotion.Motion): + part_of_object = desig.object_part.bullet_world_object + + container_joint = part_of_object.find_joint_above(desig.object_part.name, JointType.PRISMATIC) + + goal_pose = btr.link_pose_for_joint_config(part_of_object, { + container_joint: part_of_object.get_joint_limits(container_joint)[1] - 0.05}, desig.object_part.name) + + _move_arm_tcp(goal_pose, BulletWorld.robot, desig.arm) + + desig.object_part.bullet_world_object.set_joint_state(container_joint, + part_of_object.get_joint_limits( + container_joint)[1]) + + +class BoxyClose(ProcessModule): + """ + Low-level implementation that lets the robot close a grasped container, in simulation + """ + def _execute(self, desig: ClosingMotion.Motion): + part_of_object = desig.object_part.bullet_world_object + + container_joint = part_of_object.find_joint_above(desig.object_part.name, JointType.PRISMATIC) + + goal_pose = btr.link_pose_for_joint_config(part_of_object, { + container_joint: part_of_object.get_joint_limits(container_joint)[0]}, desig.object_part.name) + + _move_arm_tcp(goal_pose, BulletWorld.robot, desig.arm) + + desig.object_part.bullet_world_object.set_joint_state(container_joint, + part_of_object.get_joint_limits( + container_joint)[0]) """ -Keine Entsprechung im PR2-Modul? Wird dies benötigt? ----- -> Open/Close +Keine Entsprechung im PR2-Modul? Wird dies benötigt? ----- -> Open/Close --> Done, noch nicht getestet """ class BoxyParkArms(ProcessModule): """ @@ -140,9 +143,7 @@ def _execute(self, desig): if solutions['cmd'] == 'park': _park_arms() -""" -Richtiger Link? Wie Arm ansprechen? ----- -""" + class BoxyMoveHead(ProcessModule): """ This process module moves the head to look at a specific point in the world coordinate frame. @@ -153,22 +154,32 @@ def _execute(self, desig): target = desig.target robot = BulletWorld.robot + _park_arms("left") + local_transformer = LocalTransformer() - pose_in_neck = local_transformer.transform_pose(target, robot.get_link_tf_frame("neck_base_link")) + pose_in_shoulder = local_transformer.transform_pose(target, robot.get_link_tf_frame("neck_shoulder_link")) - new_pan = np.arctan2(pose_in_neck.position.y, pose_in_neck.position.x) - new_tilt = np.arctan2(pose_in_neck.position.z, - pose_in_neck.position.x ** 2 + pose_in_neck.position.y ** 2) * -1 + if pose_in_shoulder.position.x >= 0 and pose_in_shoulder.position.x >= abs(pose_in_shoulder.position.y): + for joint, pose in robot_description.get_static_joint_chain("left", "front").items(): + robot.set_joint_state(joint, pose) + if pose_in_shoulder.position.y >= 0 and pose_in_shoulder.position.y >= abs(pose_in_shoulder.position.x): + for joint, pose in robot_description.get_static_joint_chain("left", "neck_right").items(): + robot.set_joint_state(joint, pose) + if pose_in_shoulder.position.x <= 0 and abs(pose_in_shoulder.position.x) > abs(pose_in_shoulder.position.y): + for joint, pose in robot_description.get_static_joint_chain("left", "back").items(): + robot.set_joint_state(joint, pose) + if pose_in_shoulder.position.y <= 0 and abs(pose_in_shoulder.position.y) > abs(pose_in_shoulder.position.x): + for joint, pose in robot_description.get_static_joint_chain("left", "neck_left").items(): + robot.set_joint_state(joint, pose) - tcp_rotation = quaternion_from_euler(new_tilt, 0, new_pan) - shoulder_pose = Pose([-0.2, 0.3, 1.31], [-0.31, 0.63, 0.70, -0.02], "map") + new_pan = (np.arctan2(pose_in_shoulder.position.y, pose_in_shoulder.position.x) + np.pi) - print(shoulder_pose) - """ - Wie spreche ich den Arm an? Geht das so überhaupt? ----- -> neck - """ - _move_arm_tcp(shoulder_pose, robot, "neck") + print(new_pan) + + print(pose_in_shoulder) + + robot.set_joint_state("neck_shoulder_pan_joint", new_pan) class BoxyMoveGripper(ProcessModule): @@ -216,9 +227,7 @@ def _execute(self, desig: MoveTCPMotion.Motion): _move_arm_tcp(target, robot, desig.arm) -""" -MoveArmJoints? PR2 hat beides, wird hier auch beides benötigt?----- -> so lassen -""" + class BoxyMoveArmJoints(ProcessModule): """ This process modules moves the joints of either the right or the left arm. The joint states can be given as diff --git a/src/pycram/robot_descriptions/boxy_description.py b/src/pycram/robot_descriptions/boxy_description.py index dc459d492..d41867466 100644 --- a/src/pycram/robot_descriptions/boxy_description.py +++ b/src/pycram/robot_descriptions/boxy_description.py @@ -28,12 +28,18 @@ def __init__(self): neck_behind = [-1.68, -0.26, -0.12, -0.24, 1.49, 3.09] # neck_forward = [-1.39, -3.09, -0.78, 1.91, 1.51, -0.14] neck_forward = [-1.20, 2.39, 0.17, 1.77, 1.44, -0.35] - neck_right = [-1.20, 2.39, 0.17, 1.77, 1.44, 0.0] - neck_left = [-1.20, 2.39, 0.17, 1.77, 1.44, -0.7] + # neck_right = [-1.20, 2.39, 0.17, 1.77, 1.44, 0.0] + # neck_left = [-1.20, 2.39, 0.17, 1.77, 1.44, -0.7] neck = ChainDescription("neck", neck_joints, neck_links, base_link=neck_base) neck.add_static_joint_chains({"away": neck_away, "down": neck_down, "down_left": neck_down_left, "down_right": neck_down_right, "behind_up": neck_behind_up, "behind": neck_behind, - "forward": neck_forward, "right": neck_right, "left": neck_left}) + "forward": neck_forward}) + front = [-1.5, -1.5, 0, 1.5, -1.5, 1.5] + neck_right = [-3, -1.5, 0, 1.5, -1.5, 1.5] + back = [-4.7, -1.5, 0, 1.5, -1.5, 1.5] + neck_left = [0, -1.5, 0, 1.5, -1.5, 1.5] + self.add_static_joint_chains("left", {"front": front, "arm_right": neck_right, "back": back, + "arm_left": neck_left}) self.add_chain("neck", neck) # Arms left_joints = ["left_arm_0_joint", "left_arm_1_joint", "left_arm_2_joint", From 565b09a8d57ed5ccdf59e9f291b73e524bd6a846 Mon Sep 17 00:00:00 2001 From: tthielke Date: Mon, 27 Nov 2023 10:34:49 +0100 Subject: [PATCH 07/11] [boxy-pms] process module preliminary state --- launch/ik_and_description.launch | 2 +- resources/boxy.urdf | 2 +- .../process_modules/boxy_process_modules.py | 20 +++++++++++++------ .../robot_descriptions/boxy_description.py | 13 ++++++------ 4 files changed, 23 insertions(+), 14 deletions(-) diff --git a/launch/ik_and_description.launch b/launch/ik_and_description.launch index 8baf63449..160635333 100644 --- a/launch/ik_and_description.launch +++ b/launch/ik_and_description.launch @@ -1,5 +1,5 @@ - + diff --git a/resources/boxy.urdf b/resources/boxy.urdf index 2c7661b8a..40af20fc3 100644 --- a/resources/boxy.urdf +++ b/resources/boxy.urdf @@ -3,7 +3,7 @@ - + diff --git a/src/pycram/process_modules/boxy_process_modules.py b/src/pycram/process_modules/boxy_process_modules.py index 7963024d9..bb372b62f 100644 --- a/src/pycram/process_modules/boxy_process_modules.py +++ b/src/pycram/process_modules/boxy_process_modules.py @@ -154,33 +154,41 @@ def _execute(self, desig): target = desig.target robot = BulletWorld.robot - _park_arms("left") + _park_arms("neck") local_transformer = LocalTransformer() pose_in_shoulder = local_transformer.transform_pose(target, robot.get_link_tf_frame("neck_shoulder_link")) + BulletWorld.current_bullet_world.add_vis_axis(pose_in_shoulder) + if pose_in_shoulder.position.x >= 0 and pose_in_shoulder.position.x >= abs(pose_in_shoulder.position.y): - for joint, pose in robot_description.get_static_joint_chain("left", "front").items(): + for joint, pose in robot_description.get_static_joint_chain("neck", "front").items(): robot.set_joint_state(joint, pose) if pose_in_shoulder.position.y >= 0 and pose_in_shoulder.position.y >= abs(pose_in_shoulder.position.x): - for joint, pose in robot_description.get_static_joint_chain("left", "neck_right").items(): + for joint, pose in robot_description.get_static_joint_chain("neck", "neck_right").items(): robot.set_joint_state(joint, pose) if pose_in_shoulder.position.x <= 0 and abs(pose_in_shoulder.position.x) > abs(pose_in_shoulder.position.y): - for joint, pose in robot_description.get_static_joint_chain("left", "back").items(): + for joint, pose in robot_description.get_static_joint_chain("neck", "back").items(): robot.set_joint_state(joint, pose) if pose_in_shoulder.position.y <= 0 and abs(pose_in_shoulder.position.y) > abs(pose_in_shoulder.position.x): - for joint, pose in robot_description.get_static_joint_chain("left", "neck_left").items(): + for joint, pose in robot_description.get_static_joint_chain("neck", "neck_left").items(): robot.set_joint_state(joint, pose) - new_pan = (np.arctan2(pose_in_shoulder.position.y, pose_in_shoulder.position.x) + np.pi) + pose_in_shoulder = local_transformer.transform_pose(target, robot.get_link_tf_frame("neck_shoulder_link")) + + new_pan = np.arctan2(pose_in_shoulder.position.y, pose_in_shoulder.position.x) + np.pi print(new_pan) print(pose_in_shoulder) + print(robot.get_joint_state("neck_shoulder_pan_joint")) + robot.set_joint_state("neck_shoulder_pan_joint", new_pan) + print(robot.get_joint_state("neck_shoulder_pan_joint")) + class BoxyMoveGripper(ProcessModule): """ diff --git a/src/pycram/robot_descriptions/boxy_description.py b/src/pycram/robot_descriptions/boxy_description.py index d41867466..b03a7d15f 100644 --- a/src/pycram/robot_descriptions/boxy_description.py +++ b/src/pycram/robot_descriptions/boxy_description.py @@ -34,13 +34,14 @@ def __init__(self): neck.add_static_joint_chains({"away": neck_away, "down": neck_down, "down_left": neck_down_left, "down_right": neck_down_right, "behind_up": neck_behind_up, "behind": neck_behind, "forward": neck_forward}) - front = [-1.5, -1.5, 0, 1.5, -1.5, 1.5] - neck_right = [-3, -1.5, 0, 1.5, -1.5, 1.5] - back = [-4.7, -1.5, 0, 1.5, -1.5, 1.5] - neck_left = [0, -1.5, 0, 1.5, -1.5, 1.5] - self.add_static_joint_chains("left", {"front": front, "arm_right": neck_right, "back": back, - "arm_left": neck_left}) self.add_chain("neck", neck) + front = [0, -1.57, 0, 1.58, -1.5, 1.5] + neck_right = [-1.5, -1.57, 0, 1.58, -1.5, 1.5] + back = [3.14, -1.57, 0, 1.58, -1.5, 1.5] + neck_left = [1.5, -1.57, 0, 1.5, 1.58, 1.5] + self.add_static_joint_chains("neck", {"front": front, "neck_right": neck_right, "back": back, + "neck_left": neck_left}) + # Arms left_joints = ["left_arm_0_joint", "left_arm_1_joint", "left_arm_2_joint", "left_arm_3_joint", "left_arm_4_joint", "left_arm_5_joint", From 3a63a89a082fb552f5e20d17904fbd3bbd2868fb Mon Sep 17 00:00:00 2001 From: tthielke Date: Mon, 27 Nov 2023 13:46:28 +0100 Subject: [PATCH 08/11] [donbot-pms] process module tested --- launch/ik_and_description.launch | 2 +- .../process_modules/donbot_process_modules.py | 22 ++++++------------- 2 files changed, 8 insertions(+), 16 deletions(-) diff --git a/launch/ik_and_description.launch b/launch/ik_and_description.launch index 160635333..8baf63449 100644 --- a/launch/ik_and_description.launch +++ b/launch/ik_and_description.launch @@ -1,5 +1,5 @@ - + diff --git a/src/pycram/process_modules/donbot_process_modules.py b/src/pycram/process_modules/donbot_process_modules.py index 59788e1c5..791ba3af4 100644 --- a/src/pycram/process_modules/donbot_process_modules.py +++ b/src/pycram/process_modules/donbot_process_modules.py @@ -90,32 +90,24 @@ def _execute(self, desig): target = desig.target robot = BulletWorld.robot - _park_arms("left") - local_transformer = LocalTransformer() pose_in_shoulder = local_transformer.transform_pose(target, robot.get_link_tf_frame("ur5_shoulder_link")) if pose_in_shoulder.position.x >= 0 and pose_in_shoulder.position.x >= abs(pose_in_shoulder.position.y): - for joint, pose in robot_description.get_static_joint_chain("left", "front").items(): - robot.set_joint_state(joint, pose) + robot.set_joint_states(robot_description.get_static_joint_chain("left", "front")) if pose_in_shoulder.position.y >= 0 and pose_in_shoulder.position.y >= abs(pose_in_shoulder.position.x): - for joint, pose in robot_description.get_static_joint_chain("left", "arm_right").items(): - robot.set_joint_state(joint, pose) + robot.set_joint_states(robot_description.get_static_joint_chain("left", "arm_right")) if pose_in_shoulder.position.x <= 0 and abs(pose_in_shoulder.position.x) > abs(pose_in_shoulder.position.y): - for joint, pose in robot_description.get_static_joint_chain("left", "back").items(): - robot.set_joint_state(joint, pose) + robot.set_joint_states(robot_description.get_static_joint_chain("left", "back")) if pose_in_shoulder.position.y <= 0 and abs(pose_in_shoulder.position.y) > abs(pose_in_shoulder.position.x): - for joint, pose in robot_description.get_static_joint_chain("left", "arm_left").items(): - robot.set_joint_state(joint, pose) - - new_pan = (np.arctan2(pose_in_shoulder.position.y, pose_in_shoulder.position.x) + np.pi) + robot.set_joint_states(robot_description.get_static_joint_chain("left", "arm_left")) - print(new_pan) + pose_in_shoulder = local_transformer.transform_pose(target, robot.get_link_tf_frame("ur5_shoulder_link")) - print(pose_in_shoulder) + new_pan = np.arctan2(pose_in_shoulder.position.y, pose_in_shoulder.position.x) - robot.set_joint_state("ur5_shoulder_pan_joint", new_pan) + robot.set_joint_state("ur5_shoulder_pan_joint", new_pan + robot.get_joint_state("ur5_shoulder_pan_joint")) class DonbotMoveGripper(ProcessModule): """ From cba2665f6f12b4bf8056992e49bb8e86f5158393 Mon Sep 17 00:00:00 2001 From: tthielke Date: Mon, 27 Nov 2023 13:46:59 +0100 Subject: [PATCH 09/11] [boxy-pms] process module tested --- .../process_modules/boxy_process_modules.py | 28 ++++--------------- .../robot_descriptions/boxy_description.py | 8 +++--- 2 files changed, 10 insertions(+), 26 deletions(-) diff --git a/src/pycram/process_modules/boxy_process_modules.py b/src/pycram/process_modules/boxy_process_modules.py index bb372b62f..6e095d248 100644 --- a/src/pycram/process_modules/boxy_process_modules.py +++ b/src/pycram/process_modules/boxy_process_modules.py @@ -154,40 +154,24 @@ def _execute(self, desig): target = desig.target robot = BulletWorld.robot - _park_arms("neck") - local_transformer = LocalTransformer() pose_in_shoulder = local_transformer.transform_pose(target, robot.get_link_tf_frame("neck_shoulder_link")) - BulletWorld.current_bullet_world.add_vis_axis(pose_in_shoulder) - if pose_in_shoulder.position.x >= 0 and pose_in_shoulder.position.x >= abs(pose_in_shoulder.position.y): - for joint, pose in robot_description.get_static_joint_chain("neck", "front").items(): - robot.set_joint_state(joint, pose) + robot.set_joint_states(robot_description.get_static_joint_chain("neck", "front")) if pose_in_shoulder.position.y >= 0 and pose_in_shoulder.position.y >= abs(pose_in_shoulder.position.x): - for joint, pose in robot_description.get_static_joint_chain("neck", "neck_right").items(): - robot.set_joint_state(joint, pose) + robot.set_joint_states(robot_description.get_static_joint_chain("neck", "neck_right")) if pose_in_shoulder.position.x <= 0 and abs(pose_in_shoulder.position.x) > abs(pose_in_shoulder.position.y): - for joint, pose in robot_description.get_static_joint_chain("neck", "back").items(): - robot.set_joint_state(joint, pose) + robot.set_joint_states(robot_description.get_static_joint_chain("neck", "back")) if pose_in_shoulder.position.y <= 0 and abs(pose_in_shoulder.position.y) > abs(pose_in_shoulder.position.x): - for joint, pose in robot_description.get_static_joint_chain("neck", "neck_left").items(): - robot.set_joint_state(joint, pose) + robot.set_joint_states(robot_description.get_static_joint_chain("neck", "neck_left")) pose_in_shoulder = local_transformer.transform_pose(target, robot.get_link_tf_frame("neck_shoulder_link")) - new_pan = np.arctan2(pose_in_shoulder.position.y, pose_in_shoulder.position.x) + np.pi - - print(new_pan) - - print(pose_in_shoulder) - - print(robot.get_joint_state("neck_shoulder_pan_joint")) - - robot.set_joint_state("neck_shoulder_pan_joint", new_pan) + new_pan = np.arctan2(pose_in_shoulder.position.y, pose_in_shoulder.position.x) - print(robot.get_joint_state("neck_shoulder_pan_joint")) + robot.set_joint_state("neck_shoulder_pan_joint", new_pan + robot.get_joint_state("neck_shoulder_pan_joint")) class BoxyMoveGripper(ProcessModule): diff --git a/src/pycram/robot_descriptions/boxy_description.py b/src/pycram/robot_descriptions/boxy_description.py index b03a7d15f..87ad6b0ab 100644 --- a/src/pycram/robot_descriptions/boxy_description.py +++ b/src/pycram/robot_descriptions/boxy_description.py @@ -35,10 +35,10 @@ def __init__(self): "down_right": neck_down_right, "behind_up": neck_behind_up, "behind": neck_behind, "forward": neck_forward}) self.add_chain("neck", neck) - front = [0, -1.57, 0, 1.58, -1.5, 1.5] - neck_right = [-1.5, -1.57, 0, 1.58, -1.5, 1.5] - back = [3.14, -1.57, 0, 1.58, -1.5, 1.5] - neck_left = [1.5, -1.57, 0, 1.5, 1.58, 1.5] + front = [-1.5, -1.57, 0, 1.58, -1.5, 0] + neck_right = [3.14, -1.57, 0, 1.58, -1.5, 0] + back = [1.5, -1.57, 0, 1.58, -1.5, 0] + neck_left = [0, -1.57, 0, 1.58, -1.5, 0] self.add_static_joint_chains("neck", {"front": front, "neck_right": neck_right, "back": back, "neck_left": neck_left}) From 7bd05630772d33a8119657f0d76fe21148ed074e Mon Sep 17 00:00:00 2001 From: tthielke Date: Tue, 19 Dec 2023 19:10:53 +0100 Subject: [PATCH 10/11] [doc] re-added symbolic link --- doc/source/notebooks/bullet_world.ipynb | 1 + 1 file changed, 1 insertion(+) create mode 120000 doc/source/notebooks/bullet_world.ipynb diff --git a/doc/source/notebooks/bullet_world.ipynb b/doc/source/notebooks/bullet_world.ipynb new file mode 120000 index 000000000..bdb627d54 --- /dev/null +++ b/doc/source/notebooks/bullet_world.ipynb @@ -0,0 +1 @@ +../../../examples/bullet_world.ipynb \ No newline at end of file From 8202199930f78a5bebc3f99d12fca456b14f042b Mon Sep 17 00:00:00 2001 From: tthielke Date: Tue, 19 Dec 2023 19:19:59 +0100 Subject: [PATCH 11/11] [pms] cleanup --- launch/ik_and_description.launch | 2 +- src/pycram/process_modules/boxy_process_modules.py | 8 +++----- src/pycram/process_modules/donbot_process_modules.py | 4 ++-- 3 files changed, 6 insertions(+), 8 deletions(-) diff --git a/launch/ik_and_description.launch b/launch/ik_and_description.launch index 8baf63449..c9816dc5d 100644 --- a/launch/ik_and_description.launch +++ b/launch/ik_and_description.launch @@ -1,5 +1,5 @@ - + diff --git a/src/pycram/process_modules/boxy_process_modules.py b/src/pycram/process_modules/boxy_process_modules.py index 6e095d248..4ce16bf96 100644 --- a/src/pycram/process_modules/boxy_process_modules.py +++ b/src/pycram/process_modules/boxy_process_modules.py @@ -129,9 +129,7 @@ def _execute(self, desig: ClosingMotion.Motion): part_of_object.get_joint_limits( container_joint)[0]) -""" -Keine Entsprechung im PR2-Modul? Wird dies benötigt? ----- -> Open/Close --> Done, noch nicht getestet -""" + class BoxyParkArms(ProcessModule): """ This process module is for moving the arms in a parking position. @@ -184,8 +182,7 @@ def _execute(self, desig): robot = BulletWorld.robot gripper = desig.gripper motion = desig.motion - for joint, state in robot_description.get_static_gripper_chain(gripper, motion).items(): - robot.set_joint_state(joint, state) + robot.set_joint_states(robot_description.get_static_gripper_chain(gripper, motion)) class BoxyDetecting(ProcessModule): @@ -253,6 +250,7 @@ def _move_arm_tcp(target: Pose, robot: Object, arm: str) -> None: inv = request_ik(target, robot, joints, gripper) helper._apply_ik(robot, inv, joints) + class BoxyManager(ProcessModuleManager): def __init__(self): diff --git a/src/pycram/process_modules/donbot_process_modules.py b/src/pycram/process_modules/donbot_process_modules.py index 791ba3af4..ed5c8ae80 100644 --- a/src/pycram/process_modules/donbot_process_modules.py +++ b/src/pycram/process_modules/donbot_process_modules.py @@ -109,6 +109,7 @@ def _execute(self, desig): robot.set_joint_state("ur5_shoulder_pan_joint", new_pan + robot.get_joint_state("ur5_shoulder_pan_joint")) + class DonbotMoveGripper(ProcessModule): """ This process module controls the gripper of the robot. They can either be opened or closed. @@ -119,8 +120,7 @@ def _execute(self, desig): robot = BulletWorld.robot gripper = desig.gripper motion = desig.motion - for joint, state in robot_description.get_static_gripper_chain(gripper, motion).items(): - robot.set_joint_state(joint, state) + robot.set_joint_states(robot_description.get_static_gripper_chain(gripper, motion)) class DonbotDetecting(ProcessModule):