From f5f96fcb1e7eed2a39e0e912b7e4715d5418e8d7 Mon Sep 17 00:00:00 2001 From: Leon Okrusch Date: Fri, 12 Jan 2024 15:58:51 +0100 Subject: [PATCH 01/12] feat(145): distance of objects complete --- code/agent/config/dev_objects.json | 14 ++-- code/agent/src/agent/agent.py | 6 +- code/perception/launch/perception.launch | 6 +- code/perception/src/lidar_distance.py | 90 ++++++++++++++++-------- code/perception/src/vision_node.py | 65 ++++++++++++++++- 5 files changed, 136 insertions(+), 45 deletions(-) diff --git a/code/agent/config/dev_objects.json b/code/agent/config/dev_objects.json index 90b24361..e6fbdeba 100644 --- a/code/agent/config/dev_objects.json +++ b/code/agent/config/dev_objects.json @@ -104,23 +104,23 @@ "type": "sensor.lidar.ray_cast", "id": "LIDAR", "spawn_point": { - "x": 0.7, - "y": 0.4, - "z": 1.60, + "x": 0.0, + "y": 0.0, + "z": 1.70, "roll": 0.0, "pitch": 0.0, "yaw": 0.0 }, - "range": 85, - "rotation_frequency": 10, + "range": 50, + "rotation_frequency": 15, "channels": 64, "upper_fov": 10, "lower_fov": -30, - "points_per_second": 600000, + "points_per_second": 1000000, "atmosphere_attenuation_rate": 0.004, "dropoff_general_rate": 0.45, "dropoff_intensity_limit": 0.8, - "dropoff_zero_intensity": 0.4 + "dropoff_zero_intensity": 0.2 }, { "type": "sensor.other.radar", diff --git a/code/agent/src/agent/agent.py b/code/agent/src/agent/agent.py index ad8d076c..2076bef3 100755 --- a/code/agent/src/agent/agent.py +++ b/code/agent/src/agent/agent.py @@ -23,10 +23,10 @@ def get_ros_entrypoint(self): def sensors(self): sensors = [ {'type': 'sensor.camera.rgb', 'id': 'Center', - 'x': 0.7, 'y': 0.0, 'z': 1.60, 'roll': 0.0, 'pitch': 0.0, - 'yaw': 0.0, 'width': 300, 'height': 200, 'fov': 100}, + 'x': 0.0, 'y': 0.0, 'z': 1.70, 'roll': 0.0, 'pitch': 0.0, + 'yaw': 0.0, 'width': 1280, 'height': 720, 'fov': 100}, {'type': 'sensor.lidar.ray_cast', 'id': 'LIDAR', - 'x': 0.7, 'y': 0.0, 'z': 1.60, 'roll': 0.0, 'pitch': 0.0, + 'x': 0.0, 'y': 0.0, 'z': 1.70, 'roll': 0.0, 'pitch': 0.0, 'yaw': 0.0}, {'type': 'sensor.other.radar', 'id': 'RADAR', 'x': 0.7, 'y': -0.4, 'z': 1.60, 'roll': 0.0, 'pitch': 0.0, diff --git a/code/perception/launch/perception.launch b/code/perception/launch/perception.launch index be3c7d27..53c1c74b 100644 --- a/code/perception/launch/perception.launch +++ b/code/perception/launch/perception.launch @@ -54,7 +54,7 @@ - deeplabv3_resnet101 - yolov8x-seg --> - + @@ -66,8 +66,8 @@ - - + + diff --git a/code/perception/src/lidar_distance.py b/code/perception/src/lidar_distance.py index 1228c607..a8c4b35b 100755 --- a/code/perception/src/lidar_distance.py +++ b/code/perception/src/lidar_distance.py @@ -4,17 +4,16 @@ import numpy as np import lidar_filter_utility from sensor_msgs.msg import PointCloud2 - +# from perception.msg import MinDistance from sklearn.cluster import DBSCAN from sklearn.preprocessing import StandardScaler import matplotlib.pyplot as plt -from perception.msg import MinDistance # from mpl_toolkits.mplot3d import Axes3D # from itertools import combinations import cv2 from sensor_msgs.msg import Image as ImageMsg from cv_bridge import CvBridge -from matplotlib.colors import LinearSegmentedColormap +# from matplotlib.colors import LinearSegmentedColormap class LidarDistance(): @@ -41,10 +40,9 @@ def callback(self, data): coordinates, max_x=rospy.get_param('~max_x', np.inf), min_x=rospy.get_param('~min_x', -np.inf), - max_y=rospy.get_param('~max_y', np.inf), - min_y=rospy.get_param('~min_y', -np.inf), - max_z=rospy.get_param('~max_z', np.inf), min_z=rospy.get_param('~min_z', -np.inf), + min_y=rospy.get_param('~min_y', -np.inf), + max_y=rospy.get_param('~max_y', np.inf), ) reconstruct_bit_mask = lidar_filter_utility.bounding_box( @@ -81,15 +79,19 @@ def callback(self, data): ) # handle minimum distance - plot = self.plot_blob(min_dist_coordinates_xyz) - img_msg = self.bridge.cv2_to_imgmsg(plot, - encoding="passthrough") - img_msg.header = data.header - self.min_dist_img_publisher.publish(img_msg) + if min_dist_coordinates_xyz.shape[0] > 0: + plot = self.plot_blob(min_dist_coordinates_xyz) + img_msg = self.bridge.cv2_to_imgmsg(plot, + encoding="passthrough") + img_msg.header = data.header + self.min_dist_img_publisher.publish(img_msg) + # else: + # self.pub_min_dist.publish(np.inf) # handle reconstruction of lidar points rainbow_cloud = self.reconstruct_img_from_lidar( reconstruct_coordinates_xyz) + img_msg = self.bridge.cv2_to_imgmsg(rainbow_cloud, encoding="passthrough") img_msg.header = data.header @@ -107,25 +109,28 @@ def listener(self): '~point_cloud_topic', '/carla/hero/' + rospy.get_namespace() + '_filtered' ), - PointCloud2 + PointCloud2, + queue_size=10 ) # publisher for the closest blob in the lidar point cloud - self.pub_min_dist = rospy.Publisher( + """self.pub_min_dist = rospy.Publisher( rospy.get_param( '~range_topic', '/paf/hero/Center/min_distance' ), - MinDistance + MinDistance, + queue_size=10 ) - + """ # publisher for reconstructed lidar image self.rainbow_publisher = rospy.Publisher( rospy.get_param( '~image_distance_topic', '/paf/hero/Center/rainbow_image' ), - ImageMsg + ImageMsg, + queue_size=10 ) # publisher for 3d blob graph @@ -134,7 +139,8 @@ def listener(self): '~image_distance_topic', '/paf/hero/Center/min_dist_image' ), - ImageMsg + ImageMsg, + queue_size=10 ) rospy.Subscriber(rospy.get_param('~source_topic', "/carla/hero/LIDAR"), @@ -145,19 +151,43 @@ def listener(self): def plot_blob(self, xyz_coords): # creates a 3d graph thazt highlights blobs of points xyz_coords_standardized = StandardScaler().fit_transform(xyz_coords) - pairwise_distances_x = np.abs(np.subtract.outer(xyz_coords[:, 0], - xyz_coords[:, 0])) - min_x_distance = np.min(pairwise_distances_x[pairwise_distances_x > 0]) - self.pub_min_dist.publish(min_x_distance) + # pairwise_distances_x = np.abs(np.subtract.outer(xyz_coords[:, 0], + # xyz_coords[:, 0])) + + # publish minimum distance + # min_x_distance = np.min(p + # airwise_distances_x[pairwise_distances_x > 0]) + eps = 0.2 min_samples = 5 dbscan = DBSCAN(eps=eps, min_samples=min_samples) labels = dbscan.fit_predict(xyz_coords_standardized) + min_distances_within_clusters = [] + + # Iterate through each cluster + for label in set(labels): + if label != -1: # Ignore noise points + cluster_points = xyz_coords[labels == label] + pairwise_distances = np.linalg.norm( + cluster_points - + np.mean(cluster_points, axis=0), axis=1) + min_distance_within_cluster = np.min( + pairwise_distances) + min_distances_within_clusters.append( + min_distance_within_cluster) + + # Find the overall minimum distance within clusters + # min_distance_within_clusters = np.min(min_distances_within_clusters) + + # Publish the minimum distance within clusters + # self.pub_min_dist.publish(min_distance_within_clusters) + fig = plt.figure() ax = fig.add_subplot(111, projection='3d') for label in set(labels): + # print(label) if label == -1: ax.scatter(xyz_coords[labels == label][:, 0], xyz_coords[labels == label][:, 1], @@ -186,9 +216,9 @@ def reconstruct_img_from_lidar(self, coordinates_xyz): # intrinsic matrix for camera: # width -> 300, height -> 200, fov -> 100 (agent.py) im = np.identity(3) - im[0, 2] = 300 / 2.0 - im[1, 2] = 200 / 2.0 - im[0, 0] = im[1, 1] = 300 / (2.0 * np.tan(100 * np.pi / 360.0)) + im[0, 2] = 1280 / 2.0 + im[1, 2] = 720 / 2.0 + im[0, 0] = im[1, 1] = 1280 / (2.0 * np.tan(100 * np.pi / 360.0)) # extrinsic matrix for camera ex = np.zeros(shape=(3, 4)) @@ -198,16 +228,16 @@ def reconstruct_img_from_lidar(self, coordinates_xyz): m = np.matmul(im, ex) # reconstruct camera image with LIDAR-Data - img = np.zeros(shape=(200, 300), dtype=np.uint8) + img = np.zeros(shape=(720, 1280), dtype=np.float32) for c in coordinates_xyz: point = np.array([c[1], c[2], c[0], 1]) pixel = np.matmul(m, point) x, y = int(pixel[0]/pixel[2]), int(pixel[1]/pixel[2]) - if x >= 0 and x <= 300 and y >= 0 and y <= 200: - img[199-y][299-x] = 255 - c[0] + if x >= 0 and x <= 1280 and y >= 0 and y <= 720: + img[719-y][1279-x] = c[0] # Rainbox color mapping to highlight distances - colors = [(0, 0, 0)] + [(1, 0, 0), (1, 1, 0), + """colors = [(0, 0, 0)] + [(1, 0, 0), (1, 1, 0), (0, 1, 0), (0, 1, 1), (0, 0, 1)] cmap_name = 'rainbow' @@ -216,9 +246,9 @@ def reconstruct_img_from_lidar(self, coordinates_xyz): N=256) img_colored = (rainbow_cmap(img / np.max(img)) * 255).astype(np.uint8) - img_bgr = cv2.cvtColor(img_colored, cv2.COLOR_RGBA2BGR) + img_bgr = cv2.cvtColor(img_colored, cv2.COLOR_RGBA2BGR)""" - return img_bgr + return img if __name__ == '__main__': diff --git a/code/perception/src/vision_node.py b/code/perception/src/vision_node.py index 06dab72a..e21d3656 100755 --- a/code/perception/src/vision_node.py +++ b/code/perception/src/vision_node.py @@ -14,11 +14,12 @@ import cv2 from rospy.numpy_msg import numpy_msg from sensor_msgs.msg import Image as ImageMsg -from std_msgs.msg import Header +from std_msgs.msg import Header, Float32MultiArray from cv_bridge import CvBridge from torchvision.utils import draw_bounding_boxes, draw_segmentation_masks import numpy as np from ultralytics import NAS, YOLO, RTDETR, SAM, FastSAM + """ VisionNode: @@ -81,10 +82,13 @@ def __init__(self, name, **kwargs): self.side = self.get_param("side", "Center") self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + self.depth_images = [] # publish / subscribe setup self.setup_camera_subscriptions() + self.setup_rainbow_subscription() self.setup_camera_publishers() + self.setup_object_distance_publishers() self.image_msg_header = Header() self.image_msg_header.frame_id = "segmented_image_frame" @@ -94,6 +98,7 @@ def __init__(self, name, **kwargs): self.weights = model_info[1] self.type = model_info[2] self.framework = model_info[3] + self.save = True print("Vision Node Configuration:") print("Device -> ", self.device) print(f"Model -> {self.get_param('model')},") @@ -120,6 +125,14 @@ def setup_camera_subscriptions(self): qos_profile=1 ) + def setup_rainbow_subscription(self): + self.new_subscription( + msg_type=numpy_msg(ImageMsg), + callback=self.handle_rainbow_image, + topic='/paf/hero/Center/rainbow_image', + qos_profile=1 + ) + def setup_camera_publishers(self): self.publisher = self.new_publisher( msg_type=numpy_msg(ImageMsg), @@ -127,6 +140,13 @@ def setup_camera_publishers(self): qos_profile=1 ) + def setup_object_distance_publishers(self): + self.distance_publisher = self.new_publisher( + msg_type=numpy_msg(Float32MultiArray), + topic=f"/paf/{self.role_name}/{self.side}/object_distance", + qos_profile=1 + ) + def handle_camera_image(self, image): # free up cuda memory if self.device == "cuda": @@ -146,6 +166,24 @@ def handle_camera_image(self, image): pass + def handle_rainbow_image(self, image): + cv_image = self.bridge.imgmsg_to_cv2(img_msg=image, + desired_encoding='passthrough') + # cv_image = cv2.cvtColor(cv_image, cv2.COLOR_RGB2BGR) + self.depth_images.append(cv_image) + if len(self.depth_images) > 1: + self.depth_images.pop(0) + """if self.save: + for i in range(len(self.depth_images)): + cv2.imshow(f"{i}", self.depth_images[i]) + + self.save = False + cv2.waitKey(0) # Wait for any key press + cv2.destroyAllWindows()""" + + else: + self.logerr("Depth-Fiel build up! No distances available yet!") + def predict_torch(self, image): self.model.eval() cv_image = self.bridge.imgmsg_to_cv2(img_msg=image, @@ -175,7 +213,30 @@ def predict_ultralytics(self, image): cv_image = cv2.cvtColor(cv_image, cv2.COLOR_RGB2BGR) output = self.model(cv_image) - + distance_output = [] + for r in output: + boxes = r.boxes + for box in boxes: + cls = box.cls.item() + pixels = box.xyxy[0] + if len(self.depth_images) > 0: + distances = np.asarray( + [self.depth_images[i][int(pixels[1]):int(pixels[3]):1, + int(pixels[0]):int(pixels[2]):1] + for i in range(len(self.depth_images))]) + non_zero_filter = distances[distances != 0] + + if len(non_zero_filter) > 0: + obj_dist = np.min(non_zero_filter) + else: + obj_dist = np.inf + + distance_output.append([cls, obj_dist]) + + # print(distance_output) + # self.logerr(distance_output) + self.distance_publisher.publish( + Float32MultiArray(data=distance_output)) return output[0].plot() def create_mask(self, input_image, model_output): From acc6896b83c305180d2d37ae08278c85e2287f86 Mon Sep 17 00:00:00 2001 From: Leon Okrusch Date: Fri, 12 Jan 2024 18:10:10 +0100 Subject: [PATCH 02/12] feat(145): visualization of distance --- code/perception/launch/perception.launch | 36 ++++++++--------- code/perception/src/lidar_distance.py | 45 +++++++++------------ code/perception/src/lidar_filter_utility.py | 4 ++ code/perception/src/vision_node.py | 31 ++++++++++++-- 4 files changed, 68 insertions(+), 48 deletions(-) diff --git a/code/perception/launch/perception.launch b/code/perception/launch/perception.launch index 53c1c74b..a85d012a 100644 --- a/code/perception/launch/perception.launch +++ b/code/perception/launch/perception.launch @@ -34,25 +34,25 @@ - + Image-Segmentation: + - deeplabv3_resnet101 + - yolov8x-seg --> diff --git a/code/perception/src/lidar_distance.py b/code/perception/src/lidar_distance.py index a8c4b35b..6a1caa4e 100755 --- a/code/perception/src/lidar_distance.py +++ b/code/perception/src/lidar_distance.py @@ -4,7 +4,7 @@ import numpy as np import lidar_filter_utility from sensor_msgs.msg import PointCloud2 -# from perception.msg import MinDistance +from std_msgs.msg import Float32 from sklearn.cluster import DBSCAN from sklearn.preprocessing import StandardScaler import matplotlib.pyplot as plt @@ -38,11 +38,11 @@ def callback(self, data): # ndarray-made-up-of-numpy-void-numbers min_dist_bit_mask = lidar_filter_utility.bounding_box( coordinates, - max_x=rospy.get_param('~max_x', np.inf), - min_x=rospy.get_param('~min_x', -np.inf), + max_x=50., + min_x=2., min_z=rospy.get_param('~min_z', -np.inf), - min_y=rospy.get_param('~min_y', -np.inf), - max_y=rospy.get_param('~max_y', np.inf), + min_y=-2.5, + max_y=2.5, ) reconstruct_bit_mask = lidar_filter_utility.bounding_box( @@ -85,8 +85,8 @@ def callback(self, data): encoding="passthrough") img_msg.header = data.header self.min_dist_img_publisher.publish(img_msg) - # else: - # self.pub_min_dist.publish(np.inf) + else: + self.pub_min_dist.publish(np.inf) # handle reconstruction of lidar points rainbow_cloud = self.reconstruct_img_from_lidar( @@ -114,15 +114,15 @@ def listener(self): ) # publisher for the closest blob in the lidar point cloud - """self.pub_min_dist = rospy.Publisher( + self.pub_min_dist = rospy.Publisher( rospy.get_param( '~range_topic', '/paf/hero/Center/min_distance' ), - MinDistance, + Float32, queue_size=10 ) - """ + # publisher for reconstructed lidar image self.rainbow_publisher = rospy.Publisher( rospy.get_param( @@ -151,37 +151,28 @@ def listener(self): def plot_blob(self, xyz_coords): # creates a 3d graph thazt highlights blobs of points xyz_coords_standardized = StandardScaler().fit_transform(xyz_coords) - # pairwise_distances_x = np.abs(np.subtract.outer(xyz_coords[:, 0], - # xyz_coords[:, 0])) + """pairwise_distances_x = np.abs(np.subtract.outer(xyz_coords[:, 0], + xyz_coords[:, 0]))""" # publish minimum distance - # min_x_distance = np.min(p - # airwise_distances_x[pairwise_distances_x > 0]) - eps = 0.2 min_samples = 5 dbscan = DBSCAN(eps=eps, min_samples=min_samples) labels = dbscan.fit_predict(xyz_coords_standardized) - min_distances_within_clusters = [] + min_distance_cluster = [] # Iterate through each cluster for label in set(labels): if label != -1: # Ignore noise points cluster_points = xyz_coords[labels == label] - pairwise_distances = np.linalg.norm( - cluster_points - - np.mean(cluster_points, axis=0), axis=1) - min_distance_within_cluster = np.min( - pairwise_distances) - min_distances_within_clusters.append( - min_distance_within_cluster) - - # Find the overall minimum distance within clusters - # min_distance_within_clusters = np.min(min_distances_within_clusters) + min_distance_cluster.append(np.min(cluster_points[:, 0])) # Publish the minimum distance within clusters - # self.pub_min_dist.publish(min_distance_within_clusters) + if len(min_distance_cluster) > 0: + self.pub_min_dist.publish(min(min_distance_cluster)) + else: + self.pub_min_dist.publish(np.inf) fig = plt.figure() ax = fig.add_subplot(111, projection='3d') diff --git a/code/perception/src/lidar_filter_utility.py b/code/perception/src/lidar_filter_utility.py index 4cd50260..5871ba35 100755 --- a/code/perception/src/lidar_filter_utility.py +++ b/code/perception/src/lidar_filter_utility.py @@ -31,6 +31,10 @@ def bounding_box(points, min_x=-np.inf, max_x=np.inf, min_y=-np.inf, """ + """print(min_x, max_x, "X") + print(min_y, max_y, "Y") + print(min_z, max_z, "Z")""" + bound_x = np.logical_and(points['x'] > min_x, points['x'] < max_x) bound_y = np.logical_and(points['y'] > min_y, points['y'] < max_y) bound_z = np.logical_and(points['z'] > min_z, points['z'] < max_z) diff --git a/code/perception/src/vision_node.py b/code/perception/src/vision_node.py index e21d3656..2d9ea6ae 100755 --- a/code/perception/src/vision_node.py +++ b/code/perception/src/vision_node.py @@ -214,6 +214,8 @@ def predict_ultralytics(self, image): output = self.model(cv_image) distance_output = [] + c_boxes = [] + c_labels = [] for r in output: boxes = r.boxes for box in boxes: @@ -231,13 +233,34 @@ def predict_ultralytics(self, image): else: obj_dist = np.inf + c_boxes.append(torch.tensor(pixels)) + c_labels.append(f"Class: {cls}, Meters: {obj_dist}") distance_output.append([cls, obj_dist]) # print(distance_output) # self.logerr(distance_output) self.distance_publisher.publish( Float32MultiArray(data=distance_output)) - return output[0].plot() + + transposed_image = np.transpose(cv_image, (2, 0, 1)) + image_np_with_detections = torch.tensor(transposed_image, + dtype=torch.uint8) + + c_boxes = torch.stack(c_boxes) + print(image_np_with_detections.shape, c_boxes.shape, c_labels) + box = draw_bounding_boxes(image_np_with_detections, + c_boxes, + c_labels, + colors='blue', + width=3, + font_size=12) + # print(box.shape) + np_box_img = np.transpose(box.detach().numpy(), + (1, 2, 0)) + box_img = cv2.cvtColor(np_box_img, cv2.COLOR_BGR2RGB) + return box_img + + # return output[0].plot() def create_mask(self, input_image, model_output): output_predictions = torch.argmax(model_output, dim=0) @@ -268,8 +291,10 @@ def apply_bounding_boxes(self, input_image, model_output): box = draw_bounding_boxes(image_np_with_detections, boxes, labels, - colors='red', - width=2) + colors='blue', + width=3, + font_size=24) + np_box_img = np.transpose(box.detach().numpy(), (1, 2, 0)) box_img = cv2.cvtColor(np_box_img, cv2.COLOR_BGR2RGB) From 6e57d225d5bb21eae0acf2eabe762307f98b8a1f Mon Sep 17 00:00:00 2001 From: okrusch <102369315+okrusch@users.noreply.github.com> Date: Sat, 13 Jan 2024 08:23:17 +0100 Subject: [PATCH 03/12] Add files via upload --- doc/00_assets/2_15_layover.png | Bin 0 -> 14721 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 doc/00_assets/2_15_layover.png diff --git a/doc/00_assets/2_15_layover.png b/doc/00_assets/2_15_layover.png new file mode 100644 index 0000000000000000000000000000000000000000..d28807844a4b147ce3ccf39b823c60deb37b8dbd GIT binary patch literal 14721 zcmeHuc{J4T|LEHmvX&BAB8ftY>;@rj9arK@Eb0pzGRKOajxE#)tjiS5+`{uWTLcbkuVwxxE+QMOh7vq^x~x z!zIqAIOSv!)yKiyO%49a9FdWaHUV zYDeiGM`1LG?#EO;hO+hGqeb^Hw%AsHp)8=-R!QN&L&ZOYF+8 zR)LO1DKFVQ~ z_t=^io8GoKZfGek0=}V*eBm|jx^J>&M#@#u{?<%^6^|p84tz9r{Y<#!oZiO)o$dXh zg6`?grh4acWl~QAx?QY*JuNzDbK#sUnBY%asE?f{zC)>ss@=od-n<@@=T|)0sCDq& zEQ|a>5J21S#08=gE?pd!r@yQ#EW+QP4fnf-1N&FpoJYHU<^PR8H5!FN&`11Q+U}b% z7T8{|T$%=x-O?-8<)91>wlg!ad{;a{oxjfaV01ZuX`McTVLc6ku>lyeD%YW`R9O`A z2r%`pj6@ln{3YJz+d5nMgDcMmoRr!yEb%_}GGWu)@ zg`ltR#=|_cm8uwL4P+OsqbKp^>fJ5wXxr9`8KDusORwNoClrT26#zCU`{D2BS{tg{ z`cO<8C&qq+*3K2?lFf}gV-S6iZ~4g1;ZJk$FwY}a2ntWy>A8%`hxQrcWSrnyucf zH0Q2jc+ZumW^aFJUnt|RY+bzwqhW@-siS4~Zd60iRHm)@jIq+WTAx-o_G#sKG>SvE znNU9R*nZg7S97l*$EIi@b(;z5&UL!yNg6Lf$Jci0TjW=qI2D?>sM?66)pK*?h}-y` zcn&IlMX89dxFsAryO*p2L3lv?C$d>HJ7s&>BS*^n(Jm?7Q}o|Ks3v$M4D$B&%>(&6n< z<6=rovTnI$<7BBDl;j&PZQi%aMkT#%p#mT&zNPd=$Ez2~)i+A``o9n3__2Y&-rUJ& z+laW;V0k5wmDIN7)@C6<3NV{7Yzv6tTxqxbToLsoHaqQL;q3!+CG<|&yW!(b$O47S zCKZshSy#DfUO>T=Fm6@ZtheQg(g4@O_?@XOKZZu`L&K5G7n`NCTpE6q7=4(&VHDbc zk*VN-P=;j^t>R*4Im>ojwCwM{bY$7Y-0%)O<-`I7ch{$ADaBDf-bj*tDW%S@n2=33 zUKbgv(~kI%HY#4}9yK&JTVy)N=5+{#aJ7H#=;DhzqUD|B%d6BPo3E*%^6q9^U1V#G zZAXlSQid$Tk+rqb{^p&6nWk(p8+ufM&05p2XMIWTxl(zlhjxqG>}j>i2$M(@_ed37 zKc|voL8&_SlAEc$-=9(fGCytBmF+~=X3QP~KE0?&R%%e#bE#x8==$*aHsz6IeIWrNuHYt*u!`|59sxAnE%>75L3B9HSNjDbwr&P3^%4vLC z{(9D(H$guYJ%5xsKoifKme%yt>a(oMNrLUFLTjQV=cPM>0~NsI>lC=CtX?6+Sv9IohZF)~h*IWu3iphOydv{V*>dWFq|j&f zA~OrK**m4-(A<4Lfu0LODt^jmccytY@eE29=&Z3j#iwQ#+1N-TkdW+WSJqY-`_64P zs@!gT$Y%KTv-A*R9o@H-aY>JRR-CuQBb>kXq_UCh*XC{~$U#)p^LV4jEbpaeK_{yd zZGXPjSHZjXdItvwLt!+cjVst6zrOtT<$(L$kxgTU6yFJOKK6-kR*RW!d1+cH#p<&` z*EM(U=#$V(X7dk8??dC0b8C<_h3g#J#DNHOpa6@p!lBR?WET@Q>68KNbJ+F zI`|tNImi4b;)mTSve^x!c|e)E8~3$O#2GhO*2u*>eVaZQQjm*!$3Fw{H_8Wt2D5_n zMl7Pn2=3aogZpnWPN#;&!aYa1Rt$^~LR_<#!J&`9y9_s11}k7R$~k&+|q?O_iGkAv{y*NRnpWZ{&>Pl-kJ3CnlQ5NBNt2Rd|c`WV@nGL*w_&*2p%P zadSlD-C$bn&zGPnBOlL;)r8dKUurng#hsb3sGmjH^Yjm19ZP*IA-^M*oP4WN)aC&p zEmurKb>|@Ec(}DW;~^-nygkYMD8V&8zF)|Vaz`Nt)h%*@mB^qlnc647tSeDQp`0(OenPX}ctZ_=w` zXVEC%HVS?0`y51y*x^NSSkF*ym%HV+*F1W&9THm<41Q(qpq~x0fVa3jP8jSf6RUa> zG~??=dN})LNrb-ciax_Vr7R`prdz&7C0*#XzHg0c@skA))faf~32=LUzYNvR+onxz zoM$1^hphF}`{#J_vp7!OJ%BZOPjt-d)fWATi6H&j7(zh!R@RS^%WOT*0WxoV| zCqFdhK#*rZ`jjBFtIt_nk5>9k_{j08KQ3Q(cCyAQeo~BlUsIA*2IEs3WCOoxo5y^V z%`42Vr9Pea?8snO9;|`FdaC583e@dxmYTJ3MR~2g#_QNZVb1Iuukl>?KqRN|gk&MZ zqK=QJ+=(Fq>Pl#pz5AO#u1sqxzVn5JPNJJuFuw^81_vm!vw!YkLIojrkjZVJCpcVw zvHoTGQiWF`dkEc~Ey`^JMz@cAy9IP+CiD&pMSc+F+Blc=(`1$uLiFN7DAHtGTOaoQ zSN^>+)p=(r^+nY2<22FlbH)uvw+CF?2EBjr@t|^#GI1rzH{ET!(5=HZJ{12 z#$x^*JUk29p4KSeyy ztCNm4L38VFG|}=U^7P2w5TJ!*K6;M2uo!B>93#fLEpEcZCcam<>Fqz~cbm~#zxyIU zD+{$>9peVm=Jol2v0CTF_RV*Ae8m!;2T*^}%S&UtF@B5kv~h z=y{Q^Zm>f$3)KTP26RVA19TS;3p?CmXIlPXcO?C)A~0=xub@=&j2HD`{DDr{weJTE zqLh7CO>UnIlJO!fW5e#xxaDo+eUrbzOao~fj%-TbJQ8p#cOX|xNUKls1C5;CLU-Pv zMqw!-CL4JnzZT+rRv}iLZx5r{@Xj>txP3A^4;w80QA-Nz`Zl^MVD-`A}SXk{!_;!LTuN9h{6pO;t%vf@4?I79He$ zqIb2pCVM+ykbE7E*Irgcxtk4pZm9&jTW%F@lv*!Le0SuAphRA|qfUMA&OYK=z7=e5 zsB+8Z$cFer3JS;g1MTil>S*>2dZ>0Bsa zFn9a#cmb#Wcbs9m03UAfF(|VwxKoj>$N3`(D znh~f{MZ+r@0~(E=82vhF^_#h%rR?_t-D?UCULUpawMs2tLgIk4=s!fOSEDs%|sPGOZw;)&UD=vupFzq{GAhp){ zyspv}c9vg16bJ=H_OmsbaM;Zo0CCh@oO?iR&oH{k3tY8@%Ng^5`iOYfYnX|=fP%AE-p zd=}+G2VpR-4T<#?mE)pNgl_|IlsX*kbDA3n@xX_UANKSj1zPBnFYRE&CX5-wNnbz7 zkr`SO1rOwKr`*d0yF_vMX%Y}rB@_HIYGkn8BRGCv^84AJ(RGA7@?X6tFsClLMGtyt z9-5PNd;2G$=|^6aC8^k8mKkygkTnxR^VXCXTU~1J@!5AFSq%8b>cHx?B2bon?F{Ct zUghMTJ4aieyS?e}JY}vtwY_xUP9*MxcAkQYiwKO#QXGm{g6z+kDkpYgp29R|hXCnd zfb60aXFhM?&xoP(D)Lv|Y8UN&DA!?gvl8)*m~pUdLk~hPBejW~v?UB;+L?Wpc}fd5 zGO~&c(8Nf=Rr9jNXuywNZb-RJJH>)B76`gKThDk~T1==;sfIJDsNK|8SR(Xb&NN2n zfDK@+>t2h?^&|#!NCzj~$&5v#@ZLG_4J-71lIm5-EgLupBy7r;!W8pi`5lL2*8vUZ z2s(JQ9^d(FGk>IyUlp&!Vw_z$2jq~P;D60)G-p~x&rVj>hOb}nI7F-NM`;aZm}4GV zdyu_>L+3%Y`6Q*`HL}PFyf@*HgL#0Rx~&(@402T!#ED{OIt66h6wlz7(H8xb<*xht zjF1Cjq!BP)%-9iex z3OrXR^Bb14cyk7XEFdV5VA}lHzVRWJwjP7>PUg)B-FzEWPmQ~L^+6=dx^G9IyKqRd z0ZEta6w+1iaPAnj&@OJVWaCm)THsD@Cm|KN|j}r6s91rK>?An(pUzxw14Q zO&rd*l_fBOq}IQ=ELLFb10I-)K!E~!F^1(_`PkBhDEvMwo3(435i}U=96k{3GzdtF z+74ba3_rDJSx)R(t2gzDvE;RAq3j=~$w=|2bGq6|=z6~THUNj%MD$;HOei%%v@K=} ztc#5OPIXRu6Rh4a1uMk5TDC05^^;@*J^ap%{7H8&bRE$jrrWinbB}nVRHitATrSOocP5y>!EGOmP`b8@2qgZj zSy^3=`2L49d8WZq{)IH(|Bc4BSn^PVr~jK2AG6cU(M4Rw{n?}Ig*xE@9s4|=1PY0R ztLS91+twK7;9}^_uEmV~dUV(R)&XvGyym8ens48%YET%B&=bNULVxswf2vMdhEBPq zu5GD)o9mQ($32C0LhVL!ODrYcn?kwreAJ9F?Dog4QnJT5I+=1c5Kfxy2!nxbtqkV{ zufC)#i1D*YD!c9+-BL)h>hW{hVOE%m+Q|J1{=q;Wz|LnVSwpQw_}4;aB; zWb_xDK-U-2WlL0d;~SGb@{qIGsO@3!COY|5i-6c?6?hrgadCDug#dSEFWoP6Qjys{ zPHVUK2mNJ!*f7A1iWuawVPfXDg#$+RU;N})9z*yJf2(tW-#nv4;Qg~I4TG0hpdgTi zuKc&Ot&VG&do}? ze8bvKcA-|cZgxSosE$|+aK@%|EsiH=U>g<#I%{bB@33iL(C*hr8*2--RwG*G2WqF^ zgseJF9j zsw~pZy3{u31g;0mwgQxEl~QFWav|K)M?x1q#I z;{B}fl1q~*^whCKEpw6xv$MXN1oXsxGwH3pJx@hxjmsjntChPu?JXbJq#)E#QPH^Q zp#NYv>P_+~cGi`myN?LTgNdpyd3&C@-!5TD z1QUDL_Ol18DG!lRbX<0Aht**j_t!@fve){5eI}df-FMiPPqWX zuAvNBXHn@+-z*T1ujNI)_9d*XPw5m^|7hP`(px~IG%^N`zqB?#cv>+c13_ituBTqt zaX2KB$zH-j1^KxTg*>RYl6bo(fzxWTKu5VR??T|Y60 zlU zGOx%egi*ngxN)fP=Pn3zp@#JuMMRd|(xL^&XIdqHT(%rJ36LCm97{bKvy^I+HMmrp z2}RPA&1UTwID_vK^RW{}9+AXkCK_vr=GdnqE|0@WG9F(q0pfIfMS6(-3MGMDR=&4$ z;iDSOZx2nAii$SE3Y0d%v=+k!J0s|j0);obav=UX38?vC*&6un6bglmMqPM+SMD93 zS5pxlf+SkQh(2=tPyNp&`qtr3+1uL#8Iuf$`@!ZJfOtDe!4^1!0A&@ir#E33Zm5l__d_r5jR9+e4IwJwmwKaUp>!LN z2ww{(KXZ|W`ep?0rKtf5=-P#^X((z%wW}_hD#1WC5NEgX%!!F@P?)NF1H1d#%J@8l zWPDe#T1-qY#&(`R=3|DUrxy;5u*pLr7XRw4UM|ke7lIGK_<{v671!|f9xBTz;aBH^%lUdrZ zP4&rz8i2iVdlLG!5)IAa(C~13J5GH&!1_=CKw(I8v9(dXyhxd9wZW&c}8dFu8?-c~Wr{eR%LZE=oAQYwN z=icqA?oeB|kI*K>3DhQlw!+KYegUt3eQz-T_QUXb)>PDYC3fzU#q0sMmA?Z@o0+mAwnQ z(ok2@J7^GLRrjfxN~oAlc%m`l7N|k(+_x21I_Xh^50t1z=bQ-r)DgMQ=uBWuLz!I_ z!VYbxPsWN2QF*2czMw?YQAs5L^lY8pgH$dC)%lx*A+@NhPra-W6AGU{PE8#bV@Hv+ z3znM&<8&GuR2q#`W9^7kaV4`PsM+u1UA|-Dzd$8*nXf&r-d@-NzVcosHb?a!KfUtS zF!f^YZc5M}L$CJ)V{L902bf&!HwQqJ4EG_&^DGIV3aOGSBCovbarctc@}&;=353V3 zix}bsD1=*kg-(#E1TGW+RxoFScuGa)LheIOFgC}nLIbas3^ukkfpqppDs4$`u zsFDP7#vG<$Q}E0lqc#G6U?=fW)6|X{YQi&rWdt+8#^jIF}(11kN9d3 z$~FCt_;#Dx(Io(6;;>=eWG4Tjdu9_OmdOCZfZ#5k`q%2K z<^-XxcL)(G@C*}8;a>>>%>rG-5K4u0U(jX>l=Cp}{>uxbdn_~?GgJ;iAuI%Vs2vh8 z3(PL%w$!Rc7nZYo?C-KK;wSM`&MY93zBMv&RnVFCK=5 zegNxY5&YI0x(JP-NJ>)ZVQ7i^YBOR(u#-Ph4uR!b>DB`eHe`-~M>>__Y&Rn$3Jf^3 zA)(X00AQ;(nJ%5=lHt`FLYEq!n^ldc=&0=NYZ#4%FfxuYTt<=?8gN#WhUjlI z5d#s=E|(Q-x@D+hoieO4?_6+Vf_?+_IpO@M-lvczUM43ADfe229yQBEL*qcJP%|4L zao67@qkqS@l@6K#+8MI7it3|nX0<&8IRn=YI>pTAAVnJlO~eAe7OEaNuExMroSe~I z@aqH=2wte7uBJx@sUIA`SUHcy`j&BYZtAunUWXv&gY_Cpr(uY2vee^bN-yH88a?z= znx6-vy>wzIP|C?o{i~VsQu#V@sW&W+7m5a&5sDC1owz48nTfPAu%yBJf>;5HnKjit zKt7Sz*`4>o!G6sT$Dg|7mV%i#gq$1D#05#J782FPL8dSM)&KuVqKSGB+R<{K-)mh@PCXQCa*HJkZ;{ z524DbmdeBVELpQQH!K48ArO5$rm9iXMyQ`>!oJBNjyX(jXK;-_Ey8&7;7lzS8% z9lohOopGtDnjK{eR>k|vY#vqGxrsQ~T+9Z@@D7}X8rZapi7W^4LYB$m z`7gRo3DpJPF=cA;X^ZzwPLE(U2L!FjGNmu9D8l=7Aeq{ZW!eV%%2-_(b zvg9y2nRJX0Ys@u*g>eGaPT>d{#;D#rkBln-oh$;)UB^LUsD~5(v@UAN~|R5MQm8oc^E&6)OYPG2x6% z-qXLRDs!%|OQkjB24Y(ZqT07YvHf0|tVuhAJ;fciZFG8yY7mN-x;TI%t)H|OX8Psy zWILu}UNg2osPNX^G!4@y?T*r9CV|`TeS20=N#(#F_(VE@;}l;C)4P){y_&4l9V}tG z+V)6Xq#}0{mRSF34&o;Ii1=)Ka94(Y;oGbo;YF z{^3al+Tx4CLJ<8-)Vf;h$)@iQB!h-%HO>OgDUP_&Sm_qlwycnr)5xvc7eC&XqWre= zdUm>iJ|<~&_>m4~j~nU+)?L+jTqEnSvFo@$Yk~ctPIpKfWW=D$BS9rw1mbqsoh6;O z1$vAA^NTVWh$7_3)dLb@w!A-#1yZ82q^H@^@lQYH<4e}Oo|wKVy}-D`j#2^fRq&zi zv*rC%_p}Y2mi4Ei zpH)C_AREhnbnR^&NL}grJ4U|!Qvi><{*ZXYlCd&>cz+2l{}pEc0yO6YM4_n}Z%J9J zzYNjRR&iTlL+nXuR>|gloIsoGBAM0!4p=q0tnK;mg@)3>3lHfj${6#RpDG_zA)&K? zax03Hhb_z3wZ~gpohBwh&H~7E&H*vAH9j?kX+eNLBhx7X|SRUL@dAedjuuyLu z@%>e{Xa2QY%7-at!}b#5YT{h&-RzCmcxwfGkE987@owt6YW$f$5V5$Q zN@GDaH6C0#v#8X%FL?GKKI_5nH`#l48u~{aPohl2oAXE6@TCzb=zZe!p%#9Ny+mO- zf2Mk8V*`lu6&NWw@8;5{r;v$gnD&SsW$%pon#3MIievp~tbE`!ACjWN{;TffNXobK z4em)KdIvSY{PLB0@y|^w*1j){s;3B7EM2?U=!<#%d0~`8BTQ2RH8qx~DiU0s8o)NA z#L^O=*5toWf}|gDa-y0G{Iy==C{`6jx~cN*t-x{;gAgOoh|(QT*EZ8TNE`LIBb5yU z!{lFf2tVu;nS5Zjy`Ql4MaPSe(k2g|9NwzRi6fHS2w$T|S@GN8=0bVimoKk*eE(pl zzQ=l{p_yGPz7(8#)jPnSrt4;baV5*2TRzlnWE5&lRc;?Y`B0DTOI;U)QcDZCT;`z2pNwNa;3#d$sgw!K2v=^=P@_zrf~UXn^H;trfi)GpWROH@YaR3t?aTJW}JQvwo)M1sRmA< zrLanq9Ioeu&M@+AJaK==T^m;&DXBM`ICf6bMyf8~^3$K|m?R147m$nKa%5Dxl#>d} zxA$rGTf=sX!s@MDgUv_GpiXe9imSfESFlklI&~iv7cIH&2V3*^zdgPQzbn;zTOV80 zLw-BG6kJju!)Mv?yY}b4#a>AHkIe-orWaJJ;#thTw%Yb4&d%NAPGM}rJiG3+SInD36}i3+b$rcJ4-cO1}K``lq-qELCsOeLf_y6;|EBT%iZzhAJHk`C! zww5GoaK-T!67@((+@PG=cn;4VbB*F$c+1^Ba8_fHK#MY-ye09P8vMZgd#p6y(0T7Pg%5e z&Zeb@@3^!cbvOmIi+3u9z}VxZX-3?Kzv~RRI$VE_6l9_tZcDEC2&cHEzrNU5Z;1=A zeow;H_+JGy-v;ukSP{PRF{d-u_j0cF{Fs*t=~)6X)J(9|TX6b1n;*S{E#UocE+LdQ zqbSR;=JZ6%Fx+R!aMG;{nBAuDqr5?*(4U)3Od5KY_w-73<9PMx1oN`&l3J|fF}rc5 z6`^42H zu-8VnS`1*@oygX^`{Ui1^*_-ge1%t&1s`JvHx7R@41<4NTUo%U&=u&xHBd39Rp&dE zy|~sa6=`mdl6LD?$<{%m8JTs^44Bm+lL?nuR3U7Xjf@mKkN_-|O?+S$aJrvwBEDd4 zRp8uS=#lV6Js9z{N2)tJo*$6^LG;Rdmb8&(sbIZ=>SMW3Zx?*J(HH4&BerYWslwHXJva<(vgvP zq2|bz0;H6d?)%Ob)2SHHS!oms4aEQ^_&k@?eOABPX-fd&os(0HDxr%81|7-)eeVo* zdwqZ2r`Bjh_R=;P;yJf1C#FSiW*V(_8>YFNEhdD2uF8xp_2G!^uQ-8H>)atF3gBU2 zXQ?q`-EnoEkg9vR`&YM7zL*laHz0}zMUEvxFKoZM+kW+=wRj04QOeqBy=Q;O+ZLYT zf4Q{{4OILr7fXm9tcf0_FFp13TC#hy=~W*dYy(hiR#V(O`t<1DM4_K;K9; zSY+5~X#Fxj6s=iH?;rq9n7_M?1Ik3pAJe!&Z3X84UWieKic9inGUd68RfL6#bFMwd zQ#@lvybl4B@?Th__!aBKAn?C>@1l3TxrMlZYM9qfd2;qg1qeTC>@Yj93k!iwWicP3 z+2#edBc$Q-l2$`2>Y?f1kYHwau+X_<^k%;s4t5$2f|1mJx^uYx5_yg`?VlLc|0WGb z_0j)VDJ}s1zy3#)-SFT5k49f`6H+`h}-9H;0^jx9)O~@UvvO> z3jDqV{X5mzKk-&?Jq;?D2|=Y8Sg3Yevifw+TcmR2sdo7vDsQ=8YX8fX`3Iz0MsX&w>u literal 0 HcmV?d00001 From 8ba38c51a507d5019d08d5700aba8863e245caef Mon Sep 17 00:00:00 2001 From: okrusch <102369315+okrusch@users.noreply.github.com> Date: Sat, 13 Jan 2024 08:58:36 +0100 Subject: [PATCH 04/12] Update 12_distance_to_objects.md --- doc/06_perception/12_distance_to_objects.md | 65 +++++++++++++++------ 1 file changed, 48 insertions(+), 17 deletions(-) diff --git a/doc/06_perception/12_distance_to_objects.md b/doc/06_perception/12_distance_to_objects.md index 59863390..721e9eeb 100644 --- a/doc/06_perception/12_distance_to_objects.md +++ b/doc/06_perception/12_distance_to_objects.md @@ -39,14 +39,14 @@ The goal is to calculate the projection of point P and find its Pixl-Coordinates To do this you need a couple of thins: 1. Camera-Calibration - 1. Width - 2. Height - 3. Field-of-View + 1. Width + 2. Height + 3. Field-of-View 2. LIDAR-Sensor-Position - 1. Origin of 3D-World-Coordinates + 1. Origin of 3D-World-Coordinates 3. Camera-Sensor - 1. Position - 2. Orientation + 1. Position + 2. Orientation The formula for this projection proposed by the literature looks like this: @@ -55,11 +55,11 @@ The formula for this projection proposed by the literature looks like this: To get the camera-intrinsic matrix we need the width, height and fov of the image produced by the camera. Luckily we cn easly get these values from the sensor configuration in (agent.py) -In our case we use the following configuration: Width: 300, Height: 200, FOV: 100 +In our case we use the following configuration: Width: 1280, Height: 720, FOV: 100 -The Intrinsic Matrix is calculated within the code using these values and a piece of code from pyLot (insert link) +The Intrinsic Matrix is calculated within the code using these values and a piece of code from pyLot. -Next up we need the extrinsic Camera-Matrix. We can set this Matrix to the Identity-Matrix, if both LIDAR and Camera are in the exact same position (e.g (0,0, 0)) in the world. +Next up we need the extrinsic Camera-Matrix. We can set this Matrix to the Identity-Matrix, if both LIDAR and Camera are in the exact same position (e.g (0, 0, 0)) in the world. ### Purpose @@ -72,16 +72,47 @@ U can imageine putting both images on top of each other. ### Implementation -All that is left now is the calculation of the Pixel-Coordinates for every LIDAR-Point in front of the vehicle and some nice Visualization. +To reconstruct the depth image, we simply implement the above formulas using numpy. In order to get better results we had to adjust the LIDAR-Sensor Setup. Check the topic below for more details. -LIDAR-Points that are not within the field of view of the camera would be projected to (u, v)-Points that dont match the Camera-Image -(for example: (-100, 23)) +The resulting Image takes the distance in meters as values for its pixels. It therefore is a grayscale image. -To visualize the reconstruced Image we create a cv2 Image where the RGB-Values a mapped to a rainbox-color scheme according to the distance at each pixel. +![Grayscale Depth Image](../00_assets/3d_2d_formula.png) -insert image +In the next step we want to get the distance for every bounding box the object-detection found. -## Next Steps +We want to return a list of tuple containing a class and a distance, like this: -- Combine Object-Detection with LIDAR-Reconstruction to get a depth image -- Provide List of Objects with distance in a publisher +[ + [class_id, distance_meters], + [12.0, 3.456], + ..., + [2.0, np.inf], + [2.0, 7.890], +] + +Since we cant be certain we will find a Lidar-Point in the depth image for every Pixel in the bounding box of the original image, +we will check for the minimum value within the bounding box in the depth image. + +This makes sense, since the LIDAR doesn´t recognize points behind any object. + +If there is no distance found in the depth image, we will return infinity for this bounding box. + +## LIDAR-Configuration + +This topic came to our attention, as we realised that the LIDAR was flickering, as you can see in the following image series. + +![Grayscale Depth Image](../00_assets/3d_2d_formula.png) +![Grayscale Depth Image](../00_assets/3d_2d_formula.png) +![Grayscale Depth Image](../00_assets/3d_2d_formula.png) + +These are the Grayscale-Depth Images reconstructed within 600 milliseconds. + +We can see that we usually only see one half of the LIDAR-Points. + +The simple reason for this is a lower spin rate in the LIDAR-Configuration. + +By adjusting the spin rate we can achieve a fully reconstructed image. However, a higher spin rate results in a lower Resolution of LIDAR-Points. + +We now use a slightly higher spin rate, that consistently produces a full depth image, but also receives only sightly less LIDAR-Points. + +Doing this, we noticed, that the LIDAR-Sensor wasn´t positioned correctly. For some reason it was offset to the front left. From c9d4c845e8b3797469ea0e53e94ca14ee9473645 Mon Sep 17 00:00:00 2001 From: okrusch <102369315+okrusch@users.noreply.github.com> Date: Sat, 13 Jan 2024 09:01:03 +0100 Subject: [PATCH 05/12] Add files via upload --- doc/00_assets/2_layover.png | Bin 0 -> 11037 bytes doc/00_assets/3_layover.png | Bin 0 -> 15383 bytes doc/00_assets/4_layover.png | Bin 0 -> 11080 bytes 3 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 doc/00_assets/2_layover.png create mode 100644 doc/00_assets/3_layover.png create mode 100644 doc/00_assets/4_layover.png diff --git a/doc/00_assets/2_layover.png b/doc/00_assets/2_layover.png new file mode 100644 index 0000000000000000000000000000000000000000..462c87af9b6a7f64e3e9d231af743b4010b1011d GIT binary patch literal 11037 zcmeHtXH-*Z)b0TR5zAmlq&bcvRg|KlG#y2eq7;FIrU*zvl};#80Uf1S00lvCARvJx zASBd~h$A>sf|NuOnxaGqAu1*TLiutI&RXBS>)yF@@3+=n-;d$X$w|(6-@TvRp1ogw zJ7;UTUQSUC0D$#pPXB5T08)zpAlWS|BmRbCAx#lqq{5D$ag-JR#mQd2CBEMge$pk} zA=o!O>f%)&z%M8`&_^fC`>KymP}r5=aNa74DFCPeXMVMCjLzc@x#t!gCP|2+vONbvaziE1-3CGU% zaZN}_SW0NpZlBK+~1Qc-@-zONGBq96BN;JELY)wyG32L61jsOKl}8X17Y zFtnbkf}eY_>u2y!5>SU$ZXIS@VX@@O66m+2^AliHQbSI`y?H?eV^3FJm-!k?#+Lk2 z4(&0Dbo|_Zv|8qAH@Gw!-qCm1yFKUbOoz)FsP0|4u-SLb?310VR@U#_>R&A#TM#QZ zOTY3FR7Ed`=_{Xl76Wcb9d(@O&nf%u){3}4ce=fO25niWukTbm)03&y6_Ih)JZbUkb?-WQt?IjSM5k9)yG_20?cqr$WL69vMbh%?GaMoC9PHB= zt;NGW zck(QMVCvck+of#qsPA~_W@zEkKHJ)Slh?=tOvj$ae%lv|1~YXK2xuqXQ>(IX6ljF6x*}kE@hN`Bp$OQZHLMUC~cIK<`9`^+$JxXAqy^ zLMrxvfTLdoBQDRIBH}f3kOiTGWo9a8? zP3fOvrwMW#p?wZ%7`C{~sHk;#I7O6P3OYlH{I$;%Sr|OuHkj+<2+(C&v4!Wd@LnC_Wc>p0p zeVvPX2dnnbDHNq3W(u*#;E&HXTt7I-kf@jh%3ZqSI7YWHG1=ZQ^UbqZOsHc%<5co6 zIX!aY@Fg2?C9JDDPZ6GWJax=y?AQCjgD)Yg&5nHzKPAyYfiZUG#bB?V<$vfHbMz+2 z5hAVjvA-MOJU~w|m*0Ahan31H**hkUR;8aka)|k8EogINedhM|jS9V-9lc?KR@$tG z_jsivBvMF)&a86l%wca~eCt9m9W+egT4KJ|!&l7fhE~Ws{F>=^r(wpLI~&JfOWT_C z#}gqJ2`L-d&&H*#qBmEHHZ2o!OSbc94_mwHmWqgTuh@0|x>q~yrC7Aq=0mW$TVlV& zogq|j9nX?f&q`;(8tB=_n}}@-HYbPm5z9Yq1w)D|C20O-+KFA(R2Egza{kjhlD!%X z8)A^oM=aYee2aPyBkn?Q6_8-C$^UU&z=&40v^mjS>N|MEcU+fb2j9sFPNjhR73w#q zdJ^{5=})J2EVipdu4FuR2lZGVc`L^jPVm7UW9BP+3VFm$)4@T0Pl>a<38; zg;G+c$zl`bzfG>*4_8ibFGaq%9Y>4QpI9b

MaoRIiN?71d&h~_xkht#yq|J3YPnxb##D`g0dNJ5A z=ZZw<>V#5`KncRaleqm|zf=_5;`(GGvaD%mn}>N+@k|BhT`BY2P%hYvII>_!B*Fid zVMAdj+ppE{ikqGL(Jb(n&1E^;;63&iOM9kamZcrvQprRvth8%>9lzAq=A}=}G_wo@ zL>6=K$~w)T+djp9f;Ezg+=HQ)^Xsv{Ba8Q!?#8;JDv8%_AtNbTT6^6I&2De=K04bx zxIYZ#M<~wj5hEoMoO=!aAU0Y0W2f$=S05{nn4F?Ms)#WC|)l;8?#}!AAe%Lk6Z{=HuQU?oK0spI7L)Ul)b^PhH_TjNt;iD1Fb(3 zX)}3r?4OF*$=1l?-@mmg4YGIlVm<;Nj?@~j%FY8?CA>*<(o-I06vmv98*#IZ0{*IrO-cwfbV5|KLGN_dSw zRcAlB2oV$XIKLGk@W=g*9LQ+S)GSO7m-EQ}7GH=BGPPWi>9-;$Ld7wBJRSR>&uSZ< z?eRWH(~xokQr~ZS_8Vs{KQ0|_Htvz*%rOb8>9NI(M?u~9Xgi{W;xTaf68C`j(VgA> z;mLvA!`7Ypvh!M|!GecW#aRK}<7Tk;i%(@J?s9Ulea% zVq$9M3W;uAjFH1dDh{$|bVN9(GGfXG&7srXuw4y@h!MPMJ%?AU6?zqZinu-Z<|IPyrK{sJm0Qt&Aj;;xZuJMlFc$Xj-_UE_u8(SN8>m zYbBM?J>Fql0X<$~-X8B4M%CpbK$JA6=wDXiLxL`_l1SwKDH#_~as=%J(G4*P437lz+j z%swWpT}f6>Iu3gonl0(J^#M48@__`c)2zuaD9hFFgPV2lzCXQP;0dX)*WMvU<}QL8 zJoJ{5NEEq~-E|<}1sV~@%k-7+!iDu8i1%}#G#OG|)H{?homjl{+c2_E3gKt0Ry!T` z3b|*6QV$*N$%zi8(7mQQs=8$CaU#`;6m5g{6$Rumt?I<}_K4ilnuY1xDZ1@f?fA|} zy~uHwFM&`fsSP`=x1M&0D&fdnb3kLpPUrVRf}qaSoYG9^xyi?H67*q#v-BmujL=t{tEo^lM8n^AIGF{L=}zpJkH_$b3BvgFq$7SLEfZbk z>OFN02VFM1z1=wkvVP;R5)@C=jA|QTMRJ9P>_|ZbJYAYEkCA_(DJ}v zx5mzs<4uCPh~=XmVaUhj=?-IW77<;Ig8HLz&FPRJy~C7%>G+AEdC3LQ!Y~h6U{-C@ zK1Ww!v+&VergtU@c_gE#O(QL4;_E=+6jtayR(*#*JVsySMR(!?@y51kTEg-pJSKbk zOTi309PHPk2RZzPk9XdSLh0%Ce9=gOP_Wu-9TZG}>`F5RByEHejk&@PE&lVZ`DzaW z2c_MtIwNG08UEt^SwY-dL0mh@&!hBvT<2G~kUSX&Q##_oMOByx*pbdd!8(lJKgHJ^ zYaY{I=lA)C12Qc|TFNLB{gZBQTJ2b8|nY!* zz$p*ql#inX$Hy@a5Z;xLKJae~Gp(WK7ie-VAFL=FKi1&c-b`#;i6(Oeuw*>4p3E?^Z0n!S);7n$r4)mjxKFDfeEuAL*v*aR6YV3X(L@#&%wY|O;G@D{4gZ9-4fRA4%^p5kb-G$GQqE)}GMpQ6 zkQ(}SNqH1Py^zVH5awdI_1RukPRM;ndL7NYbRl1B#_n-B7B*ER@#!9eDF@J!?mFMR zecbBN9VqFe&4wA$togam2c{42_sR!L>gQC1(&!my$CDmTHm;}+`{c`K{akv{fUpVg8;SlskatsVQc50fn;r%TWizxj#bZ9zy|T7B?W1z?HsZd82iAo4bhR zxOtn(fG?;1?MmVXwm-}FjrVP2#yf$4QYA3s8&TqZCGjI4?bm<7?8yK=(i1{e>0Iwq z5^;0TM0*_4a**j&X|PyD(!v`K6q=O?!2<`+Dn_Va@$1{^ng(Wz8C2`2nfP^5$#=fD zLcDs(y3Ii6L5|yw9(U#0{QQI5>Qsn2phKLkQAQE5D+3UqB|at0vZX;Qy-Hi;l!|n^ zt+|~Aaeu%|&JVlUvOHjA$UxoFl@#ghWJC%88`2eTT$v>40UXyCBq6Qs9{+>dJ!p>< zc)48jMPAs~B~-z#Sg5xG>RivFqhEccLc*aRbEU&^62rp+%aS9HIiEZB*_t|u^4 zB0-GBzF{Qs+0N!%)aA!a%A6&3yB}nW(RN27km^@M8GIPxZGCsaGz8H zu|H+xnH3~6HDs$cP>o$g>407Zp!HUe`=8Rzmx5E9P#F`CA;&N-ye9)VLlvd_TInjY zt;c{B5(1bjK=tGA(**FKY~tNweVXow_uC(t`Nzyo??(n)!2L;@oJz*QY#b5|En`I>bF?(?Tj7m|K&xVm`dw@uhzb!1)eYh77*nWTl;@ent;D8 zT;k@C!;hWcQr8WNwC}&77bp@mPyxKe)ntxSB_9$XK60$P4lXMH>yP03&sFpPB-)RP z`$d(&;7mA-CjIyEhJWOLf4R=T4}%r@pP;e#ODuWA;syYHU{SmML*gfKwGmH5|49mi z&MftQ6Z5Be-mt*0c7PcfIFdak0aV(E*|6Sk1qB8Di9(WDV<(-26a^_0wm1eQ=NO6r zpcqV8%O4~FFRK+EG&jRz@(?6x;By*S9d-ArNdZrM!CuL&B2te3ypK>N3v?d`N%l{O zHL@eFjq<=gh|;zYD_muP%jd-Xklr=nZ30ub7AXo)9>X)z0S6aC?={(IX2h2KDmXH6){IpzO2x2yx>-w6QD z(9mD~`fUUN$Q6PA3!wZgr(qp%4m!06A2sO{RcHp3>AJCGfJYf%S(7cjBoB;(Lk+Kk zmh@~Ev($HeBRL-Fkr9TVYsWcuy(3hm0>$!F^mJ*KsR&((+gjVCmq+`k*74=m=+DMtZySW=a8v?LdW&Ju7(=GCoAMrqgUFV(S72a0vkfz*JV2_+AP(XA^-ErXqikb+#!Js+otAXj1Wy_00w)IMY^xVu8tb$oY;9#b5I3kvLOB2j z5|RfR(iU(UBM8p)%T~j#F1R)dGoKyy+I>btEgxgN3CKRVTkI9a7bYJy_8S!Ws^Y!H z(%C`MRd3%y4|-|rEd|g(l1n`)b%XEdB}pYiCo9`E3>ohZa+oPd&RTrVMQvbo16Uvn z9pDbBXITKCeFt>653-UH07#AZ&K$}A9yb4x8UBVDPcL~kZ9_ANpNaKbMX5pBHG~CJN+rQM3J4-CH1y}(iQ3(I^3~2Bn?=B)-v~_W7pJ@a$*`Lw0WC>bP!-`-~IbEaIR6l2rPPB z1=VZwa8J~miF@f?fZkehnWA16T)*Fw8v#$-k7#*_Q$uumysjhJASxr0b2$c+iYOgA zeRvhx5p15->^ozl>93H&Bfh*0OBGtCPy(QUwyM0OHMmJZMSniJei}%H7^F+pp{#me zY(VFhao0E_6%8Bp>GKl}83GUB(G3u==x*327k|0H=ebo1=UY&K*D=s^vJVsv?y0Ch zG#Rr>Z!d)Yxyjf@QYU?>L^u7ZK~|(Aa;P{l?I4-d=Tb6$1h@hM^@s}ddQS9p_g~J| z#NfJr-(r-I;s{^PPNz{|c-HGNS2?MNnB^~F+&m$DgB02XBy#c?A34lKc7#2Hxe9$5 zwEwN1O}{GV$goiT@04L$Z%?T{eoIpodl?u~c}B?EArnlfLPY8*li-yo25b?os#P_% z@7YF0<*w>iU75LWj&+IyBK0`_Y-}92GSmLVY-yE;ri_e>;_jt@y`0=A+FgJIt#U2>sP>D2ke0y`&z_D0VcH* zrl0<#{*Xkpx_CLD*;nV0qDHYIzv-b*(~R^e6vW|Pt<#P0QwbwQ`JyZAwdN*ZJ7&Hb z*2r5@^tWk%p*RNw3m!=N{i)xmKdFH&p;!ZnUu|AL?^n3Nn_C|UQ+$~Ed2umI5%zsn z#d58r#A`LsKGuOgG}!CsQ5D(Pr}k}wxJg6zNLD7^?s&3SV8rDNo|C*RbqI`2^CTMa zPb=id{(yF+u^tghsXkN|pn-(iJSi9Z5?5A<>^)hF-Qb69)s(izl)c=YcD@}I*D_o) zuF|y;Z3-fVz7uz0Qyzw4FzB;f2^z;P`23xVkx6g>W zkK5jCul5dR@~*l^#33IH;l_TC?m{_aqK?=BlVCTDzSw7wQh8xGrgdsZQL1PkdyO7+ zezLBpBp3HRy1XH<23yo0Ob7W>$w|nngy+@H;6Mlz6n3p z1NaVk;+DROjM{D6$jgm;amNva8-cw3OeA{*sh@5?yAGg0NkHAST+S(+A6UaNcoQQT z0!`t=gK9#4YhFbnrW^)7f<@BH-ZIxUE~vLRv+c5>Lnbp5s8pYF!G;u+xr|YuOX_S4lkcyMNw(p;03j8 zqj!99gY#~LPWU4xy_De_?Mm z>NsN>i&bEi%T7(WJ;d;4Cb)w@ET|c<&;F@ExyXS0d2SGxg+?D&vJd)1e4km3)`8l< zPSetg^q#n}-p}qyC}Zc|5Q%xHF{*R7IyofE^G*tr8chI$56!|Po!jjw(_G+f$ENqz z10CidVF^bjm9>yl0@|M^1li?}?HjryVGlL=>wIb^%1sWLOnl7)>QX?sy!K)4PEH_5 zh6Ofu*_eWvofjJp0&U>P4y}NcU=g0QR6Hr?#6C$WKcQ!C0yZ9c$-_6Oh9=2wlriDQ zm?qDcPi{D%RiHQgani1KH<7&9b@j1$DYxu|u2rM{iZNn(SE2Cq&yzg%JJFoPFA%q> zyp&Xl2!8Xe*D2VG4^?u?tf<1fG)FIVEcE8Br{YCap^t}g58oWUrb;z~p8Ts%9>hPiqWVm5 z+<@toesdc~LUN^*?#@@oehptObN{c_7+OZ*cyNCdIsYf>$^oUg&jwxRANDo>Pwm>@ gO(Fl=?fYnXHL~y7O!`I=)TW*}Y5ObwxYzan00?FGZvX%Q literal 0 HcmV?d00001 diff --git a/doc/00_assets/3_layover.png b/doc/00_assets/3_layover.png new file mode 100644 index 0000000000000000000000000000000000000000..bb1cf8376e09ec3ff978303aa8d1a76d974210f4 GIT binary patch literal 15383 zcmeHuc{tSH+wdg%qC}RIEr~+5XtETtWZyEjtW!glF}6WPmSm}H@ntYX_Ux3w*rljs z9cJvySJ~GLLU_-o@9%xD_xC)%>v^v0`R{$NpMU0a&V8SA?sM`5_*C! zMQU68e(BhXmCwS1u7ZWq!i}A;_f|?uOG}jowqq6-C*+&dbmm2m00LokROQ3(LAi(P z)j_Fz2XlvchxdX><~tInt9N#cJIBd#rcSd0C>Yr&5gB}CX6%y)a<4h zF#Fkil|1Gxm*f5Lmu4&9lb?P_PO~=p>sM&KilOOv1H~8Od0c!9+Q-|i5 zfax&zJkG|=$F%{K`hP3h^{s5m!DCda5euYWzDu3TW1|<@X~B1J7d;)M=dWc{RUp`0 z)ZaJ?7yb#1Z0A$boA=l@@SEtZ8sWUo4dEg^oBa2gMFMuP^SXAAFYGV1?2*{2ZFN_* zMfMkS4NpKg7HxY6i+6WBnsy?nLqWUucI%pgME3`{593aXoPm(aqw-g8q^yk9 zrk;#QI2n6rf%r}5f1u&NA@Sefe<`{D#1`yc-zA9Yu(Z|^??d#2_Qb)0`#;=OI`8arII*<1pd|yMgN7?h=e{c%SML5cF>`h z-~9JK{`Z*JpoKucZhvK*(ni?ce6{2O|NbxWXbos#BxOZ$PtR|wNjWb4uvxNiyJ@?F zDkYTo+X{`R`hR|oXHRI^#8q3RE{t=H-9?f9vXtW z{#z|ZyySR}RZq~f~UxNcg?8p&5M;dlBbje)LVQDAuN}0RikKb zi!c6Fj9Sk6=kfO6KH8_oBKy?X6S@cpO>C80^07?ATFESFIU7^7pd`j#C&J3}o9^Uu z4O_F^BY?`2r(O^i6)ivl2Adm74XPz#HZv#F{FqwvLI!Nxq3+k_Hen%gqOE*K-}WtZ z@U=r33DU5Zk?w+?VR~L4`dTzK*bt=6OW|F>U?B* zPm1vJ9XeYgJjAm|ZY9dw&&tt*>owonKFva#9;~$q4Y^I|RGPG~f29dJg5isz>`^Q{ zRjFJV4Y79m<}0KtLwjeSttJVv7bABuAD z#T`H8&sQ{D@C5*ym26z~TZY%l^Z73cu&$kXr|Cjjo*wf;p+*4|$!#{_q&_!bAY?(r zT^ZS%y&`)pcdTw($qWF?&5D@mE`-{4u#P5N2~%=+6~&fK|E9e>(#XRgn^HNcX~A5v3!W!{Z0^m23MUe`D$6uWF%8B*F_QE=sIy3pOM zm#4upW}K-?H@uIE^|){rSmW_t=&u_`^tFY_BNkl)YuVMFkz9-{ShNSw{pMuT5)lhF zr)Ugnv_D~_0NMU9_VQWX0o#g#jt>J-R-aeJ`=!NY zg(%5ghV<8qzdcyk+$!Mj{R!qPo|sW>IZYT-|BM)bE30h(5?B;qth$#MqTT_ygpZ6vwaNP~C!pS_2%g$33?1(Hd3$m}K#5AWo1 zc$TWA48vK%o_}v^!-@_njJZ9I81Uw3%=-ceMOv8+IN5i-pqI_w3=N+>#l&XnhkH&N zftp6UE)ondr{j-;#DY?&+9H38Mne|wV@Y|*=9T!|%ul1e_hFM&C9B)Bi~N`@lrlj~ z)on?)jv3%n?^Oe4F7Nug33hxPnrt16jf_E+`g>?otiIrb6KJ2c`CED~WsgN%P{~9C zVwbcjnUgQIkhv-C-@nn6oG)#y)XE>}lPJu$L!9Ql1wX0HT#3K49AtmZN?2$hoV-A4(&XSz(KfaJd}YY5LBvp*a6 zLhbw)hwn7v=f6BUyLt_wvTX8@dw$aT$C=GvbwCBgzW>GxG7+pQDhZY$`&i~V$6y|a z|0Teirpu=*Vp}n%0OXa{zC?-`>nSmGzLkZR%b2r#EMMAQsr8IweppQ1+Mx2@6$KwM z0j%pJ<1Z}tAU89$VdF9rlc@mt&ReUsp6wHtZP+U8T}seB_ha(T<(fV(P4)E=Te8=wMrw)PqqRR87@|0 zk9sy9bv#=l`>Ohl8GOl8&kXacQ40GC!-^Jf9-bviyeGDo?wqp*$nwhq>`{dUIbLeM zcXCai<%eF_cvGOz{|gn^yV61!3nLcCyC5JF1yKa9lOKJn$&n91(WFdv^!`FM;OOyL zV_{(X7JmQ}u*~~3Qxn%1g)40-y;IjWbM91W$6EkzpKAj;9c9~|X5uH9d&09XN zOhLPB^!QR%ehG|)S_LxW%&zQ0P8KcgtL%M&Ju3V(F~E?4ol@ehlefv5B&&&HUi=}B zT6kdZeDp%67y0~yF`c3_7Y3qw3pFS_fb1`K@|e4Mg}v_N@;Wi8=g-sgYrV51tf8Woa=`i}RTW`V(wVG;=DT-tDQ~v$wK0n( zBl3O|W|cF3pCu0W#wAIu{bv|Yj=5@7SJ;B_xbPRbu`*5g z`LejppW%%e!(H`W9FF;RhR?%a1hzpEnf8#&u^Zd5-~B?#&^4EANVYxL%=WL!Gm;us zdFvpWAWB#kS-!D=chkKp=r+$^+_x;9m3dG9!4$DHyBo0VChn$B3PC~MzxY$`3ou1th{ng#h3vMjxE!@l;xqMHJbvBHgmcad;9bP=1r z&rC95i(KZd`0*3F6DgxwvRVSQn6QxWrIyhULlOh^G>b@(vk+!Zu;p)g9<2#1K`%3{ z%Uk5*ZhC(0_j{bD?ryGaK2?BGI02|b_xcE16y>$L+eeEhM((lM1eJYLIG#jyE)>9s zJ`*djWxodG*P|o+P$jPUO(m%e>6xli7@kGqr_&rUHg1sPxQ4UdBO3r00(1S-$YXkj z{2c22L(89?CJsDH*nRRt3Ab7b@jIJP+Op$>E*7W`F9R>68YY*8v4SO zYZ=@pcsM$Vlb(ftty3#9F};l1DuTe=E~2)r^hpuAbrO6ubJJ9ligFE5p>=IzHT}747?L^m%JS9y68m>Wk&vq5?*fqmwO-~?E z1sfKjS#Y8(>VTQSITtxVeQ0VBZ4E6i0PgF2_^HDrx@Kvc~pQSZ|o24C9=G6 z(5Wo<%*YcxjUCBqPqDPS);EZnBVtWvpoyOtm8&5#jl5snrFv#gHxB4M=Nrz?%~Z8# zQp~8^kMyO90pX3esp?KtF5q4;q2?50?b*9J&YTnSdD^k-kA!jux%^NWbvTyWbAx5W z_pj)R3jyg!-Cqja)DV`_{6kMJiCEZS(6l|YqKQGoRP3XbH5Cwmq2-|!dn0-Ct=MDI zs1c+HaUpHEyQ*7?kdcjZ9;D<2mT6EZ01bI4QI&^W?k_rz9q+GI$-9LKM!d~ShHspa z+4P6kHrg&ax{5xg-T_uoQ*U#XD+RlstK~R=5BqD37Awm6RXvqgy^e-*>?O6pL1%M4 z5Z<0r==yb8)~9|L1thT4E1}vVbn7w~8jn_gpVDX0R)%3uG1?jlrim<5ujf#Yk(fk;W}Ld>j2~q)O$p4#7d`&EKOVDU z_ZIllX6w)pUg3t8wBaW2-Gcc-^@MA~$=O%F7WBc4TLC{|RXW026@ByhxqC^WqBNQ_ zCR{ghCfhhO)tN0*AqLl22B(1t(fU-Vaak zl?lUMo?o&eU4e7)F`m5>iJ#a(n7+&%F;HuhFAUHvk}y33*pIH-oo~$@)58iZb`w2O zn5?P0+TW*>@hpuw44PE<>jCFaa+)Z7Utvcd<8&sB4J;x8juEr$cY-9m-VI8_C8|54 z`IF4p-X~q1de+#MjDz)cbE2iG@1)t0Kmo;ai;O%Z#1Ye8BQ7ykY+{U>gM~Plm#dC% zN4MVR-3NgyV2c)PnUie;w2_FtgmBK>G3~Gm)|T@oeSJNi3+to(y`vOFtQ9NJx?tf< ze%S;wF`VzE&XN*1HGo>thib|FmKQVAnC@5as~u{L$h`k~13Lmu8e{Y_@^XQ%D+9lk zYUCW2j+=@U?H79#=r+P*+JXJ0$9M(gONy+FebKlcf!)#gf+3=FMsK^o$laouHTcZ>ET2_G(a4|{RH;C^fU3Ma? zxU3Sv=4Kc(3nvqQcBh<6I<~~YZ_b)G6p)YXZd>U#hX>SD9e2X85bc|VgM*#LNounT z{5KxFe&fXob|`LPD*Vt**LR^<-as&?nBjEjDXCH+qqO1bt2OmztQQTItu~aE^Fs8S zK#m2gDqA%2zzCdo&>ytu`z7#Wf3*9L@e)BjfIbrxs7IHIcokyxzO`UF!Iy^99*E-v z8>95u8#r2ec()|ODcz%AhABtBsJFI~Aq+{yB;15@+_Z$xV?c?qs)|IZpt=~7u&!-> zUaMP2BXK|UsK;6HrXYcKn}wfX`P|TuH4FWSFogR;hRqNMpWEi;*HC+>q7vul*J~R|l(x z17K|lze=NzC1Cdr%2NIcEV-KOy3?TO7?yXa4dP;P7@Y2M<@m1{Owo|z%6v@Rs&4E= zt#WfgAM)+@ZD0Mc`=1}V?gYgHF^veZkJb?LuWK}x$w%P!V+qI7dILiAfr{^hwymOqtf$R#I)l|7$7qGXkhPdg(&@{x@6DtZWM=@Ao=}#jTgBK|evRZ#Yrx7GU&{N&qfU#(p*y?D0`Fsm+zkofRFZg!c>$vt>@hs~O7_Pnd2 zYOde2JtpybW6)Ee;Y3hivdW*jF}h6#^Ck^_BEI>tv#KA-u!^cF)z9*T;xMwG z1i*vALqn{sB}`nI4A9&X32BH9OQ(|Ac#=ZmAfcUy2=Hn(7vaTaUV2|9$jK?lE3CO=pf z=B@dUn>FGo#ubOLtWO~u*k!hWeSE;oLxaUD0x09Rhqg*k#DJk?ajm2DblY#;Zriu) z5VxK;26if8Z{%;DiMt{)bV%KccgqsDG1ZMRWwoBEO_bmb?po*5Il46GpH{TE$a=(o zUOp+!%6@{}9fsruhV-U_r=5CNqBJMfN!NTJ9+gO&Pdr(q8?l6~kLZJ4-{qT~+qJ|7 z=9Zb(HsoZxb@+cd>wjS6I1=wNi|?v|PhsaD87Rtb`{k}W7rboRA`-@4fQ{vW>lxpB zZ{A_F30Qr{QT!T_dLutQ)xV){yVB&7-QMW1+efe&1mGR``!9;!#a$+veRJ6S7}GlU z&z^Z*l5g#()-&Vot{G{c0>$^##}V@s@!30GF^Ih+>j?R}<4I}mx|GSeX}HY`tFGJ+ zY3)7w;IgKsQccI1eErX&q9UWD4|9RK8SVWWGk(fyv6?Y9*wKOj%h)^De^>d_C<6ng zyC244^Zct<@>(A4)yO&QGb91E6Br7-`;}J#M1YBoR>ZX-lXJyFjoU zx9jwbpvmc56Z=uyyRAs^=TqofS0z&HX7PLjSEDVn3XH#5TW$RTgFRzq^zsxRkQ2{T z`@BdtWB$Gs&G&Ar-g3aiESgR<*G@BP*={(_|1;L@Q4qYHm+_@e>$^{j8WY>rt9ST; z(~)W_Rc+BqMsOPq&`QJ0;G1dk6Nu%5T+2dF%l9Gil#&v@FU#DD_m>|L3JW0fhgD5! z2tOzAm(4hESoCG|{r97PRGYNu`*Voa-ejb09Cj7S14@=@~7tvm$^!>hZ z;)-s4X<+~emXewL<&3B&x|OJn{w*Bk#TmvJoyPPFgLq4w%=-mlcp+|`I(9?n0A``G zME{jh-TPo+YjFJ{A;@`iK#5#y*ReIcUm6{Du{Wj@F=AzV`JS+x=#Jk!@;E?@VW1<{ z4EGdK2eBm>xOv2iHOzF+VlhyB1icLjw98Pq<_Zyp>6#UDcEOb3TuKmFzM>hqkShR) zjP^anNS3&vJiGWxSK?;shP-Yqb@8YNDfch}lyg0%29_FOFz#2-TL9ZBqjI5#Mf0|I zesB?vcT-qhj`XKqfBXghNMxC36e~3|aSgbj{EEFa58*-9`T_4Uu>hvvl4P{a7g9U= zz)`o8Sb5A5duZGJd&JTNIPoLh+g7l?R|3uqmi56zSs#j5H01O>>hclECwHa|n_(qF zWPnhX*%(#-@R~Apo-`$i*XEt?BK!l{CKrHVRs!sy!Paj~S!-_PW5~rpIDZ3)dQql& zm)cvDs~>i=GwJchuR#+qd5AsEmqn6ZBdyoP939>KU)b%(mZ#zGo z|G$y-n~zc3Ruw@J9F%b{VrkvuH0a=oUg9l7FXP+(faxNpfGQffab_ZyneOUtJVpGC zf;IbcWUmh`H?z$LOln-#_UC=SDXZ@M5lyJLk1hG^Ye0|<#h+_#Ogwhj+a`|wz6}bz zFttGHaE7}p(7`UkMecG%l!}FR#P{K8iOj1%DK&t`U`4+TL)q%(d%IFg`d33E&8lu~ zb02@Nqaa4%1C>XZ+__Bt6VHyhig!ncV~H#hmP2_oP=u7b_?rP9*vIEMmh8h1AZsb8lrHr?bP^innP^Slv9iG4H~pL#%k{ThIC<4+1>e{C!~ z0-Xb?;$&#%P4Tzl#){@_;*9Xqy z(W#T*7&&Wjc32r2HG4l@@y!l#_-*?5WkBBES{>!^WnYqVQX}=FY+pBDy|SX`BEefr zMBH|ZmFb7yaZY4sOc39`E3o5cU&Z|ovwg}zi`?_)GWHj#oXitihdZl>rlSOuBIP5Eez!32D`1Vx7&@HOlN4ZQ?L zw3%;Dp2Z0mkB7eSry&9@8=T1oX@I6D=g}-xWE@q7JcTtIqnowhYo~%zs}N*}_{I*9 z4w5-E#L-y#qhxjW{B~Thst<@zFj#wN$R(GM+D{ugc|X5zotlIcvZt`fNQ(960Ig5& z>=CIZc<~0fyC{b{GSE@HS})cd9X!`G0`R_sskO*oZ+lPtrc|BeRzri8I~cVMQrx<; zbruL{(o&9{eW`*@DB7Z!@`;Zp$gQl2Z%=G53ZlHE2u?SooC(li%~vz|!Oy?ER(#bH z0OCt9T2&=Dy5{6&Olw0$S%lLEFheupU1&(}0=ubL^RRxHhwcGi-9O-fF#cI^KJ`@_ z@yh0Ixb35Mym-unes&vrR)Chq&4Z+<-OXui4syE9RZ$Lp@Uqu)Hxa^)q?X7EJx-JUeYN3&T=gNFxei6Pd5T1Bii+a^;T*nKo{F6Qci+f?q0^BckSU`5=mE5nT3&NUj*Z8-1pJqHDG=7@Z(>I zZIb`^UIjHpa@4RXVEvEzS$F5FG{rqok*dbGKEO4Dz6H_~MdJ37%>2;dfx6$p(I%~l zL#U*d0G zYLb6l?O|IR;I=xH@*2a{Q~^-A2B=XAmR~RHlX5SP)BaXVgnFr}!t`RLxip5!kn(MS zEa6YDOk4Zf(jLeYkwFq!j}iOT5#}dO@&SAd?x?X4c2WsSrtl~uLT&(*xz>-WDpN=6 zr?2)DG5q`m6|7*l^O#$|wvh)q6nz0*(D^7>h=J}rKNmGO!OpxJ5AJaQ^1|D~aoYe&_$RzRe_v?#Ce{wWnH z7&B=R8`9~Z37ZxAhfyyHu=nd_MXv)^!sa?Ysytd7E2KaHLGpv7 zIXT+KNP({HqV*y`iU11yZPGjCSt^42tE~n13?1`Pe97Jq$AYf$_ZE(XG zuE>JTlm5I=XbiRmbR6b1JVePcGA-ss@-higT5-nuv)(T!@+ygTYUV=f26#N|?7Q*!a z>Tlk<8Wtk@kHx`lJp_gUP5DijuZ!!Bawb32wbVcOBS=*R`W&oquV3q=zOPO z`28#o+5UGP23!}wXpU*v$xj^#4S~@Ai?iPU4PF{-jS%5o8P{IDKJ2@9&k^DPOl;*r z+2Q$I$smb<`AGMi48?~*et(hC^NJ$5K@HUB5UXJw4<~{Y^@4U}4__+n8QSRuZEqXq zHu+5}gEtx8KJ#q9C&I1&vja!}`{gEJ-EYw}$5`9gI2;~>1t&lPs{_xF-Uc0o-PHM8 z+H*1g{c}C!MevqMwoz%{-)W*^y7I<@B?}nrCG;9@uHVGMj5W(<)nATheB5_`bABQ} zmN{jbh-#<#d%@-}i6)JSCLi^~9E8r}o0(w2CD3bQ$1CiEw`iK;zsQ<;!eA_aKL&IK z&vHx^cuu+>DpCgvGMl?h%z;PGx0*koH2&GRKQQqIIM{mw{_O$-en1Nq>i6yD{yA4c zx_iAsx06RBO(2&zLTzb3)DHSNw=qx5=gY}x{K=Nd4|BaGII1f$K_pX0AAK(r%_r+d zUDZqRbF&Baz=!|p@=yh4ky;_8SwoTFEY?u1Bn4ni6A_C-plTjN+ko1mfi<@)`zSZe zHk{MV*m=KC9pMam8F+xxxBx4!A?5M3N6Urdz38#wuVMx))czbr5s`4|6&hLKQ|89s zb>}!9?y=49F#i$=IP`DQV@4Q@(8F{u7yKB`j(<@%9tmRuUDpnt1{_-YYT-DF3Tb@AL|2|a($;ux;aS><;{uOP zI?$UYphe|0YAr^XnhR_Gnw@86-q%)4>-#8l1!SOakowLJ%>oa^1ExvSF2#SPjyVgN zdgo7DN{186yWmG)NT|Lhm2G{?G6(M(D==k~qwqH8MQMa59W3}X&`J7F`a+6it1}#T zI3-!f3qax>yuk`+L=h(?WfQ%b1)`eZXRp2K+(F>>x5)qz3lu$$AnC$sgzp+O6s$?)3r=D&K%Amr*J7`|UFK}_WGHxl zm!dV0a{hoM@HGBKrQ$IYVw-qevwJKG_;oaRiX9v^y6e@xWf6x%n|LZ5BkHs(MuP-t zDGV69wAW$>rq_bQPH=9t14@&?1{C6o{65$zv;7o1a=0n4v7s{eVh5x zzUZ!2sq<&E2@)~EWU%uT`4-SS7+Me@B!-KYRq@5&%9n6g>@Pl55bRnIzvKz>VSb^mUH*emAz@4H$<^1Du!8hhH9gI%+b5hL5?XXxe{y^tlnyJ%5FI>~HuO z7vn+Ic=0z&!MUMnlNszMQ$rjr8g+h*5HB(t7m%MmJ$w}Ap$90-yQNkkPIm#d>g>eZ zCSIQb7cOY)0N7F67Fs}fXrO6GneakF)tO2?_{tf7L#xMSxt5~qUOI*AAjyDUGn=6b z@o3!vPvZ95&4P|xFOn0MRGUwg@&!2^JCHJG((zC-&##1VD|DbFL4Zlhi{p_MBgnjv zhCtlbqxhql)}-f(VOB(uPGY+b-T?@35u7ZMW^fxzN%6j7`wm3l1&YA-LDjGEYPZ6t zo}D1G_+8Kr%hH#wV&}umTVJpw&!Yw{DoC~qY;nrf-U1$2aa#V|u?g4xi4&>XD z#a`$-%-x3f3113_w)p#QWZPQ;PvG$gOm|wAFg}4-%jV;Lq%OL*r38}B@&}+Fudkmn zrsGjb`O?%U(&_)OJIVO1?Y*1}UMoLQ?a_S4VZn{Sn4GG}EF!X<4?bB=Xc~M|bXyO2 zMH(DTEdKLhiG9NdeBqEamV;Dv1SSmeu%FBwn^#i0Y%Qt%2I$Ko_^*h$55pnJ(>F1g*=vH96*Xbo(ziIR zfW<=z9cM+`ni$2xp_3yL?uB^dZ{6Xs?A5X~NOhe8%#jB?xKG%cJkJ%re4akCo}yUa zojc$=l`fOkVUu4mKlI)Pr1L;t>?w>|WcC|N@+XQg!@ZGCo7OAx6qnX}Q-E~HS@7FC z@iq{`xvgBc;*MKw^SI%jMRm**B%Qtbrju!4J(7SFk=%KmLO89E+HB31%SRM(qdTkw z9iU7I9}mjOl9ZP4=LR{w#S6d(lp1d^g{Sbut7&}q_OQj~@I1vIASKy@7r})aL;n--sR59B{R#)USUh=1C-L5AZ?bp?T~LDwcusN?(tYl0 z;JEPdG?0LmTI?KGlnBGYOuUt3#kU=lzNr_bGKT~T*uV)~yN7NW9#+dMiE=c#+B5pY z-&#k|B$aWo^gBoq&iEU+b>3Q(rhyr$3DcKLqgM|6U6xxcOO0LhXXS;A*Q;~4-{llA z)@8N}WDc8H04Z4kDOo%r7#tmcuMp;My|GGF+%#D?ml@a^GzqhD1L?*bR6?O6a~TwG z)>rlA*y~+&9qaS|^0{H60(SMDaH`(nF^mLLI*vdKCkX9?uN>vmAakGClt}kZ*=RlSQNL? zDnNEL2mT8O=}2ZJyJGNOI<2$yxOFj4E`H3qrR%Mny7m6;!={G4_PD>E(a2f9vpg&I zi3LZ`z`%q5ARs_EDsgiTCVzg~Wc0~p513=Bg@#wKgYM}%gEK{R`}y~M^A=5;Rxhah zxt8}pJFu|Sd}BlYV%u=S3GkHVHp^?a_B3`&UCA<$B;%$MxxTOP8C)6lSA&6r#PHb6 zVgLD&X!NxUj{7@c4@$RL_{-<4e6!Q@iWq`=@AJt#_-j>H@o@&C;F0*&%j?WCiXbKr zve6x(vG8*!Pt0XADVa^}LoB5n55itLsJg%M03)(?#|uQa2!Lc(MICk#ygmEkC!hp{=v@Dwm6k}KvUshJPEc+&Ox|^HmkZnqkEo+oadfKh75*#j&UYY7w za_xOoLxhYjsHX&<9=8kFU68pAv`kl5R^a(H1a%fJ^n>>$AW$OcaBtHwi@!NVUD1;= z;#^1WM#uaa1R!N#E*WQzFkF`1q6HxrLoCS<0)esubYHj?Y1zASDa}vmRP}c^5fi}J zeiphTr^P;&5H`J(?^ZtAnpxS6RFDDq1QcvIGxceY&vLJ2xh}6|R_#*qXTOw8eXwRq4&d$RpPCvI#_d5MtT_wbrqhu2M z0!u~q9%Xj{%jy}<2>NCEWh4LlQ0%!5_rp<1W{J`-r3tKiW0JwqDKFf__2`TNgQ}+2 zpXPthH<#>SHsr&(mnq{WCK@>Cj&upR*heaG0*u-9pIzBrtADu-E$a*g984Ri1hc*H zlAKC=g9QhOay~#Vt!ova@%1IZsUvOIMsa--#|%1wP+(ZV54zj74CCFW9H1llt`H+l z)HBxcwiy;0Ft|n>u>HgE6z0rf7Ogf!jV?Q_las5;a~4qAZYDktX}+)jsu6vDI0Rge z!@St&p6I9eZC3d_M(?kJ`I}z~(|b$4=Y3paC;-^pd|qAUweBg^Nu$uS0R1%%KtDgf z2|nf0fri2^L2C}u-2Q#Ck3aSM*v^0U|MzA}2N@Cth(|TT6Ace-0{3^+c9>6bk&E*t S-vTcPgK6I~P%l+QhWsxGb=s-` literal 0 HcmV?d00001 diff --git a/doc/00_assets/4_layover.png b/doc/00_assets/4_layover.png new file mode 100644 index 0000000000000000000000000000000000000000..ff8bd3d2b80569997c282e748e74adb0d388441f GIT binary patch literal 11080 zcmeHtcT|&EyYEX=s)EQMO>t0Eil7vwGme7NR3P*!LrVxCMWjcR84G1BARrJF0zwE8 zApwOzY+wOGO(e812mwMUA_+)4d0*yR>&|z+`>k`&ANQWS*6p`906htqXRR z5~A{=002mwJA2w500cJyfWJq08*e69xlDuiPcZV-IY(jMUxM&2H+f^Zs53564q*XN zF}@N0KwxMX(qA*uFT&qHG%_eGYEGcp6abWgbEkiDyqeD*^2{qXBx+u0UANn8aI>9H ze0rNya6tIhJ{8;ag?qjV+CQ9jjGaA*LvOAz&E{lz#{-f~=2Dypj||>1otyFB1;J0K zc;T&?2lG3@@u@#I{5||z8h;zZ->$)n6@Q0D*3vVa<1Z@Mte|7eW>pA0&}EJrU!jf9`ZG=Ca>T%U3hLubPL1vZKR(PWe{6xHAA1oe zS_5rD!S(i2XAtwa^ezX#AE9#@`#v$?6Lv2U7Gh?p$>6u{e-`6-;hV2x3sWkMaOr2x z%OqG%L1#<;v7WRkWv97s(OW0|4s@c=1)WqELfQxP7EP1A!`~xL9YyS#mD=Fron?Hw zo{P`3kVgc_7oR7L`O~#{ljJit2AC6DY>T?7bq9!Vz#G1NtqV)L|IrMjpuW>hXwm`p z7}|oGdTO((1&e3HTLLw|#8;>}E^p`U5f7Goom044M>;lrq$JJ_DQQSw&Vi`Zemd@G zFJU>|rfau#fMssLOB^90EZZClvwi2M1_>wpR&h^wpy(xzF!y7|ws$(2k05>y8_NID zu2{bYm%s+>t;w6eIzZP<#s$u6sOJN%c$ht;BNEn@audsuuVhh$#!|W7s(w)FZD+qxcPdU zyt5erXhbA(y{$P->ik&}?FOie@RXw4M(iPnSp&`%ZwjVmi%8ptYvNu3;=WdXzucZ` zs%bwH(8vr@0kx@2cdGyKGuF8wyD)lbqcn2W^bFiQW{k-4#U(Idj3n;`YTE+ zwNE_~Z4$l8dTM7j@kk-X=D9Tg<+U`d)$qKHJQHFMS&)-$I|-rIx2myPRK)G zO0~{8z0Ud8iTEOC=^I2xgX}`)d45te!OnN0k$z#WydppXJT4{ZbT|&TpxY6hHnN^J zPTJMtJid=dYFUQkWZK5TrBIw-#qqf?%&|&j+OfrDO*YQ9zSV_lVAvrth6P;*<=f{t ze!y|ocrY|^J=`V-0aTMTnv3MN;qq?pAEho5=ZGvim^d%Uz|w!XM8wq@-+ z#7`(p`j139YnO2Yb;qleyI0~z3y+a3SSKUXK*1H$HhbI>j&tH$rFxYcvd!MsSt*WR=I6Rh3T&(-arA#o1x(#HwZmU#9H`le*j zD5wHuv0?Ax(y7$yCf+rf6MHDG3aqfSX6UY9;l)}1nI=k(8J>&qfgGpjX|CPC;=|># zUOR%`6=rV4YCe66SgfZCIa?5a&(SoCG%8s zji}~Jtyti2+_5{PZ_HqGy&CigoSrrr>%bY?MPs*As{);|c7h{l=TTOKi{5axMUR?Q zxiRL8ZGMo!bsq7q!qeQ+x#gwDDSAd0GQ-?n$1hXb`DZ}Ek)L%4UL~Y!9mXOj?c>^W z_R$Orhyn61SPPZdva^ps&o+0qIg(%zJZOvDC?qZDZTL-28A5@Q@k4x{I<5y{P)d}? z>&~jF;3KbC;TD=HcaLMF-WA=_)gVj73$K z9-O9DIhtpTo%r5Y8J%MVQq@_Gl}+5#?U5ZZ8oZ43xWAZjMM9rLIr&nTe2?V}Np){s ztr_Iv#-UoMr>@1}>LHS+@?&)0VwK`)lZH5lU|LJk#L_}sU-zwLJYubz%_~??3hlkH zNI?SaO*HF`*F!g2a0r4nH;qsOwVTt)eE2CWVdYDMhuKx9^Uc>w*ZBd!CMfJ>h?cjT zG@%L|Kq74P*<1{?vBU@R)Xn&0WFuw%Ap(=Vngbcd|bLX?j%&mDDvj{LzSP|Y=sBW<|iX4u6jG@Xc9R~7#v{! zJ_fJS6X>4ucw8@10v$8Ky*sA^u>p^Fn58D^9}E*G&^|0FK4P8N2icf(Iqum&#Z3RB zh&oP9(Wns|JJX^6<7Ja(R*25MgEXy?uY0#Fsa!|Kup&O9KHJsfylLWC(go1JJIi&v zg;T{SSl5wGda#@oZ2_;QL5sYq*IXA5qc%5PHiHzBV~>>AgsWj}`gl#qHJY z2*_1F%5oNJt1c1=!~%jiv+McBsMIWaMPp75lBNS)bQqpz+pDWwzW9J?Jo1^CFR{DYH^jRdX1?Dc*XKyf`ltb%Z+OeXw<@S!<7suX{Mw9*! z=C(&d7OVX`PV!9sYgjh-UOXZMvNEzn)wFVBiHJyGugwc;?EwfIpr!~{M5_!kcD$|( zc0FGcu4gBgE zIHq71s`E}a>2v)2s|5JQ^>;R09F*bt6-9J*rAS2d3P!2j9dKRbsR{_C(otW|{zjwE zzmdOLYuGbi;{PZw;1Gebuq6eXFl@xriCCCrTHE@g^I%E>JPqyFi81-~*&I!Zm|f6i zPE+XAC670}WiW1-*HD8iEr}$x2a>&Aa4jE%$YAzBA!$3JwB_|7P_yRsgrYxY0{^P`P@H|SfcaIL?AO|F$&amsR z>aW^ur-=K$du^d4@8cAGdYZt@h?)w;KoD|o1QGG9;^guww`hWU3HhjZe9HXzw9%v!ibhltWXp8_YIpQmsoYob-6%y&u7w) zk~Om;^bj8ij-$QCBQLcQtl9M2?gMpKrbDO5o(<_E#nn$i;wRtt>oJ<5ZO%S-?x%jQ zj|y4Vhm5*29h*YbX}xv;vK`>v6b3VK(9%oY)^q65B{$I-D~D!VPk$#UAy3*MFE}-L zwwq$YEA<_X;>3$8iHi&LMQE|qQ98fRc*JdpN(aV+FaueuWO29vF;7(^u+TlS^vFFZ zk-E<$b`6ProE82|R2!p3S0tyL1A;ibQ$V`t&=Njz!aKm><8BeLD3 zg_LgWr^&QmglCL~wZ=e3YdC)A*q8oQlIZ}(<)L3TvQh8%8$$hm>ksCRHL%VZ~;`Ra`APFl;x$%V`gv#S6)dfI6Ey}bxwwGtUq zYxK>*+7Qws%3Tq6(~$mnB`Bv?{(aVPVAueO5u~MTsG!_8ll;!0H{mGjQ4$owQNDy< zP22})#>st6fpz%)Dt%+4?{2tanTzO5cm5~EzPwc89EUE3`G(6v_|-5P#vGxHt!*?$ z&k$%_j@e80umLq;FYs}TM9Gg(h5>{RLe7o40GaVWhF9OqcxJh0B)6{iDK5U(x1>ZP zoAdnkCC;|xP@4kTO$Z89TRi2gV!keZ@@LC-b|)ds&?39tn$x@q>oBH=VMjm3BT~Yx zw~*Dvh0EQ+4v&4*4xDp|iK+71yP3ho8cf_pW)!mMNqViO-v@J#;SwqIL|P#YOv^}>YJIa<4M+c&ydUmpRbv?Ta}45|EHe|ot#^ddA|$d0L^pZV@Y*lI&ES7Y2%-VN<3EK94#~AqK7v3um_~A( z5X@7{L>;~*YEv_1U*h`hNTY}}3KEfI`rXuWnANsG2@$yPGib#;=0!rN{Jt>2Z(zcmQFesD3Bd0K@#W%arXAEB zSEd+B^OJey6{U`1d|e?0Hdj4Z7OmNiON65v7C#rS&ozP}%I9o_6)^egEvN!kQklge{C@GJcHY;yzG;#)qO>qO+g@`^wT$tAHl|vhC z6u?9qv_c^Cncl%U1qZ72+{I}du`du#0Zx1t?Q4=EW)1t{?IUf=pHvgSR*w_tqE=bi z&!B`0?DmvWB{p7(QmSou-AZlklm=slwz&7RYxkgugU;J;1!gkm;HH1#uDB4gbarw) zXXA&yHJsZ(%Sio}wcB*ahWqTc{P``_T;~>KX!jqDi_J^9w`2~eAA7_o!;5^iB)8pj|Cs!%u|GRAK5B%X$220M zAZ}fEE1Vh^sd@~?Rb@k{2-sF5BYYHH2evuPEjiS8&$DGh)Yq$)hxWHS9b|YTbep!) zzZnVdDKyggJ)Xh+wQ#HY+Q%;*v}qC~Axbe0HQ5WLejw+xZ5h4mWxqXCBLPKu+Tj{d zqI;uVAi;a2rM-=l9 z$@XOY!>#tv&Fae2yZ=>S^0B8%wI6~)Yfi7%*!~vi^CWrhAyO&nTG`StDO(@P^YfYh z56=QETeP7rY~c{|!+)k3$kTX1?=M4}>Gh3h3uANNnTaT;Aw(Ti{h`bj5qzihBkm~7PTb!{>gwA_=Ow)g)jWm_y2iFHXEMf z_LPxRlf3W`)xn23;M$h8{S)v1emI}WU0wfnBz*styf&9tR#wKUwZlWq@U%}d;FB84 z9P)<$v%dL%Pe%XD<6kBC-&gLtv zk$0_W;~+CB|GzWy?-k%N2`O$9bhGt*36R-vxvlW|udoAe`7i$VKVwK{Eya;|j=FD? zAJA?>(^%u1Ll7ua{x5T)eV6Bf2ffJf+!QM;c&w<}1lmltYWe?=$?cZHpNBcBkA-VRuSIF+nzg2p;_tns2$B;(Aa8?J9XyhHvr=8}7$4B|sn#0+N@3Dxvr5~VyvCAh6ooX&=3eAgaBw_uB;&Wq=CbR{g`TNRuSe zw2@(E$?m+2wHE;6C`j!&w$MQgje8115GCl$wH|Y4?aoeleYel1cw;SQMF%(l)tp1> z(St+oVjXUmxmI!`4=8+qHK@4K8kuho_?1UH8$IFyivX}k-_tJ|VZ}#rdULBkFI@51 zcijP%o?CoHvXD*Q;Yl*ELif>B4L?~LqFlDmQRygF{J#1}`1NMOu{}r+3nOpfIH*R{ z1%!nCE?i>MH;Om__0?_Qt}x=#9Nm=AdYJubJ?Rae;G^-u?R_25VoW^i0q_iLSE4L& zurf__Pio!HTYd%0xd7unPXMSOJ%>|5=B)=ahctRh`QC%b2PLX^?eXtH1(*3yOc$^X zrl#=*+ie(WjnSJ3a7hkDdQhn&3eg&s2ZhZkAhqV(t+H;^*qRlv7x~ErpsuuwaKZ1V zuDQ~g)~m+Aw^p9AP#hvUD>^{)v@o5+%Uzwxa&PXM{>Ipe z`T-QC&tped%&E{J=GBt)E_$E<>LOU(GcM_kj?Q%Rp8_VcKcZ512YT-)Ps8TkV93j9qxOExJMu8z z!O_}5iOV@RdtmiJ_si>_(3XlU>W+7$)etp)z!@x3$l+E)&UF>99AOKHZ?hzoLLGpz zHE()7(_i>GOoB*txrBWU#~sgzBW1cyZODz%v66yCsi1zQ9+ciWiXLHgxW^%>;iZmQ z)YY6y&s8shS9!rh0QCG5>=j0!*l1G9NqcF%q5$F{-p5%(_gV+rNsDuOA%7qDe)Q=+d+k4wbv!ucy1ceslo zIci;|=?R)vG9jeoDiT4JkFf(R1sPa4RRQ68e2z!E*q6@B!Mq#Q%2(9T>4Us+hA7VuemFsg^sotPq^QcQpe<@AWsQtD_NP$ERJ= zq^ifU)CX_?um~JEy_3(MnBA6V7`!>Qxv3JndUPD59#-vg^xg>ptHCJ6c&s+v6y0(GXD`=%N#4V{76 zV(^V`uw-}RgFKi9npJ(|tt;BWB{_ZGfL0ZZXOj9{N@$C`>IO1rNlf&}E^MR4hE1us zK&Kd(+oglcH(arX?z1(67H7IhrazbX7dcjxC@g*N)eTx%t2@-pG>w?2jn(sa+JSO> z$ZHO`!Sf%(r}vionf~!2Gol0JI9VFh6Gru|mGH|ZUjw@%V|}a4AQ1|mPbL@|H_CQL zi1<=5g4c`Dn>}g!%pnPcOYyx=77^qt+6NX)&{g1P zxPeh{`H2eOsPIp&IDhSBgS{$=P4>_Fi_6~B*_XkAS+LQC>d<&|H_z%Nu2sd=J@E1Y z#7_ZQXkX2Qctc5#X?6l&4@I9-NF3?m^X#FZp_`Tl=Yi*-N_{SsU+X=L#&Q=0d=kJa zjr`UvFEXv9w$$f5I`Az6U$!9Ng9f>9$s$r2hp#d63aM`tjy)61$pzh?tZ9MnrmwCf z7kcBFbXF{1+i6fx;di>!w;?(<&moi&yzxyHax_CRfq73*52&g%H%>PV;J2IOx0z9<)fepbdv0AG76Uzg+o)rP4c~WGJ)S;gPBS>WvZm#!yDxX z1~Ncu*Y*7};hXe|@46O%KPW37H_~>?V!^zTy{#m>R$vWEZzvhkc^&rp8ZdbsbP?Wg zJA#BBDJxU|Y9qZZ0IE`jdmR`{-+DcIZd}WSe(0es)sxCM@dV6wK0h|U@;;w+&6-rO zll&gKnOD7nv;$Firqvl#Aykt-1sHpve%{TZToLjJv_Cxekop2K#jNid zx$Q3p82>SKyRmF!A436Zvhjv3N~IzKI#6S9zMh~wUoJU2bzv973VN?7iZ_Y+${tk& z+@RI~Z|FF-6utx4dbjuoRsXVc_dgST|6ieMdUInDy=k1dgLfN1miSWvPd9hsgfU}< S*&i-~^XJakovu3Nlky+aNI7l* literal 0 HcmV?d00001 From 594caee671f2220cad96919dc4099208a1079141 Mon Sep 17 00:00:00 2001 From: okrusch <102369315+okrusch@users.noreply.github.com> Date: Sat, 13 Jan 2024 09:02:17 +0100 Subject: [PATCH 06/12] Update 12_distance_to_objects.md --- doc/06_perception/12_distance_to_objects.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/06_perception/12_distance_to_objects.md b/doc/06_perception/12_distance_to_objects.md index 721e9eeb..e909fede 100644 --- a/doc/06_perception/12_distance_to_objects.md +++ b/doc/06_perception/12_distance_to_objects.md @@ -76,7 +76,7 @@ To reconstruct the depth image, we simply implement the above formulas using num The resulting Image takes the distance in meters as values for its pixels. It therefore is a grayscale image. -![Grayscale Depth Image](../00_assets/3d_2d_formula.png) +![Grayscale Depth Image](../00_assets/2_15_layover.png) In the next step we want to get the distance for every bounding box the object-detection found. @@ -101,9 +101,9 @@ If there is no distance found in the depth image, we will return infinity for th This topic came to our attention, as we realised that the LIDAR was flickering, as you can see in the following image series. -![Grayscale Depth Image](../00_assets/3d_2d_formula.png) -![Grayscale Depth Image](../00_assets/3d_2d_formula.png) -![Grayscale Depth Image](../00_assets/3d_2d_formula.png) +![Grayscale Depth Image](../00_assets/2_layover.png) +![Grayscale Depth Image](../00_assets/3_layover.png) +![Grayscale Depth Image](../00_assets/4_layover.png) These are the Grayscale-Depth Images reconstructed within 600 milliseconds. From d6a66096793f11458187cd3632774dd45213d0ec Mon Sep 17 00:00:00 2001 From: okrusch <102369315+okrusch@users.noreply.github.com> Date: Mon, 15 Jan 2024 14:31:57 +0100 Subject: [PATCH 07/12] Update 12_distance_to_objects.md --- doc/06_perception/12_distance_to_objects.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/06_perception/12_distance_to_objects.md b/doc/06_perception/12_distance_to_objects.md index e909fede..0d6f1825 100644 --- a/doc/06_perception/12_distance_to_objects.md +++ b/doc/06_perception/12_distance_to_objects.md @@ -90,7 +90,7 @@ We want to return a list of tuple containing a class and a distance, like this: [2.0, 7.890], ] -Since we cant be certain we will find a Lidar-Point in the depth image for every Pixel in the bounding box of the original image, +Since we cant be certain we will find a Lidar-Point in the depth image for every Pixel in the bounding box of the original image, we will check for the minimum value within the bounding box in the depth image. This makes sense, since the LIDAR doesn´t recognize points behind any object. From 706d01870c6fba9310d9f7cab20f52599008ba6c Mon Sep 17 00:00:00 2001 From: Leon Okrusch Date: Wed, 24 Jan 2024 13:59:10 +0100 Subject: [PATCH 08/12] feat(145): resolve merge conflict --- code/perception/src/vision_node.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/code/perception/src/vision_node.py b/code/perception/src/vision_node.py index 820c4f69..c62212c1 100755 --- a/code/perception/src/vision_node.py +++ b/code/perception/src/vision_node.py @@ -145,6 +145,8 @@ def setup_object_distance_publishers(self): self.distance_publisher = self.new_publisher( msg_type=numpy_msg(Float32MultiArray), topic=f"/paf/{self.role_name}/{self.side}/object_distance", + qos_profile=1) + def setup_traffic_light_publishers(self): self.traffic_light_publisher = self.new_publisher( msg_type=numpy_msg(ImageMsg), @@ -215,7 +217,7 @@ def predict_ultralytics(self, image): cv_image = self.bridge.imgmsg_to_cv2(img_msg=image, desired_encoding='passthrough') cv_image = cv2.cvtColor(cv_image, cv2.COLOR_RGB2BGR) - + output = self.model(cv_image, half=True, verbose=False) distance_output = [] c_boxes = [] @@ -253,7 +255,6 @@ def predict_ultralytics(self, image): if 9 in output[0].boxes.cls: self.process_traffic_lights(output[0], cv_image, image.header) - c_boxes = torch.stack(c_boxes) print(image_np_with_detections.shape, c_boxes.shape, c_labels) box = draw_bounding_boxes(image_np_with_detections, From 9a2bd19ae48348ab29be5b265a954dda4eda8cc1 Mon Sep 17 00:00:00 2001 From: Leon Okrusch Date: Wed, 24 Jan 2024 16:55:07 +0100 Subject: [PATCH 09/12] feat(169): distance of objects publishes x,y,z --- code/perception/src/lidar_distance.py | 56 ++++++++++++++------- code/perception/src/vision_node.py | 72 ++++++++++++++++++++++----- 2 files changed, 96 insertions(+), 32 deletions(-) diff --git a/code/perception/src/lidar_distance.py b/code/perception/src/lidar_distance.py index 6a1caa4e..207de7d0 100755 --- a/code/perception/src/lidar_distance.py +++ b/code/perception/src/lidar_distance.py @@ -36,14 +36,14 @@ def callback(self, data): # https://stackoverflow.com/questions/44295375/how-to-slice-a-numpy- # ndarray-made-up-of-numpy-void-numbers - min_dist_bit_mask = lidar_filter_utility.bounding_box( + """min_dist_bit_mask = lidar_filter_utility.bounding_box( coordinates, max_x=50., min_x=2., min_z=rospy.get_param('~min_z', -np.inf), min_y=-2.5, max_y=2.5, - ) + )""" reconstruct_bit_mask = lidar_filter_utility.bounding_box( coordinates, @@ -53,24 +53,24 @@ def callback(self, data): ) # Filter coordinates based in generated bit_mask - min_dist_coordinates = coordinates[min_dist_bit_mask] + # min_dist_coordinates = coordinates[min_dist_bit_mask] reconstruct_coordinates = coordinates[reconstruct_bit_mask] # Create pointcloud from manipulated data - coordinates_manipulated = ros_numpy \ + """coordinates_manipulated = ros_numpy \ .point_cloud2.array_to_pointcloud2(min_dist_coordinates) - coordinates_manipulated.header = data.header + coordinates_manipulated.header = data.header""" # Publish manipulated pointCloud2 - self.pub_pointcloud.publish(coordinates_manipulated) + # self.pub_pointcloud.publish(coordinates_manipulated) # https://stackoverflow.com/questions/1401712/how-can-the-euclidean- # distance-be-calculated-with-numpy - min_dist_coordinates_xyz = np.array( + """min_dist_coordinates_xyz = np.array( lidar_filter_utility.remove_field_name(min_dist_coordinates, 'intensity') .tolist() - ) + )""" reconstruct_coordinates_xyz = np.array( lidar_filter_utility.remove_field_name(reconstruct_coordinates, @@ -79,23 +79,29 @@ def callback(self, data): ) # handle minimum distance - if min_dist_coordinates_xyz.shape[0] > 0: + """if min_dist_coordinates_xyz.shape[0] > 0: plot = self.plot_blob(min_dist_coordinates_xyz) img_msg = self.bridge.cv2_to_imgmsg(plot, encoding="passthrough") img_msg.header = data.header self.min_dist_img_publisher.publish(img_msg) else: - self.pub_min_dist.publish(np.inf) + self.pub_min_dist.publish(np.inf)""" # handle reconstruction of lidar points - rainbow_cloud = self.reconstruct_img_from_lidar( + dist_array = self.reconstruct_img_from_lidar( reconstruct_coordinates_xyz) - img_msg = self.bridge.cv2_to_imgmsg(rainbow_cloud, - encoding="passthrough") - img_msg.header = data.header - self.rainbow_publisher.publish(img_msg) + # img_msg = self.bridge.cv2_to_imgmsg(rainbow_cloud, + # encoding="passthrough") + # img_msg.header = data.header + # self.rainbow_publisher.publish(img_msg) + + dist_array_msg = \ + self.bridge.cv2_to_imgmsg(dist_array, + encoding="passthrough") + dist_array_msg.header = data.header + self.dist_array_publisher.publish(dist_array_msg) def listener(self): """ Initializes the node and it's publishers @@ -133,14 +139,24 @@ def listener(self): queue_size=10 ) - # publisher for 3d blob graph - self.min_dist_img_publisher = rospy.Publisher( + # publisher for 3d blob graph (Deprecated) + """self.min_dist_img_publisher = rospy.Publisher( rospy.get_param( '~image_distance_topic', '/paf/hero/Center/min_dist_image' ), ImageMsg, queue_size=10 + )""" + + # publisher for dist_array + self.dist_array_publisher = rospy.Publisher( + rospy.get_param( + '~image_distance_topic', + '/paf/hero/Center/dist_array' + ), + ImageMsg, + queue_size=10 ) rospy.Subscriber(rospy.get_param('~source_topic', "/carla/hero/LIDAR"), @@ -220,12 +236,15 @@ def reconstruct_img_from_lidar(self, coordinates_xyz): # reconstruct camera image with LIDAR-Data img = np.zeros(shape=(720, 1280), dtype=np.float32) + dist_array = np.zeros(shape=(720, 1280, 3), dtype=np.float32) for c in coordinates_xyz: point = np.array([c[1], c[2], c[0], 1]) pixel = np.matmul(m, point) x, y = int(pixel[0]/pixel[2]), int(pixel[1]/pixel[2]) if x >= 0 and x <= 1280 and y >= 0 and y <= 720: img[719-y][1279-x] = c[0] + dist_array[719-y][1279-x] = \ + np.array([c[0], c[1], c[2]], dtype=np.float32) # Rainbox color mapping to highlight distances """colors = [(0, 0, 0)] + [(1, 0, 0), (1, 1, 0), @@ -238,8 +257,7 @@ def reconstruct_img_from_lidar(self, coordinates_xyz): img_colored = (rainbow_cmap(img / np.max(img)) * 255).astype(np.uint8) img_bgr = cv2.cvtColor(img_colored, cv2.COLOR_RGBA2BGR)""" - - return img + return dist_array if __name__ == '__main__': diff --git a/code/perception/src/vision_node.py b/code/perception/src/vision_node.py index c62212c1..acbfd2ea 100755 --- a/code/perception/src/vision_node.py +++ b/code/perception/src/vision_node.py @@ -83,10 +83,12 @@ def __init__(self, name, **kwargs): self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu") self.depth_images = [] + self.dist_arrays = [] # publish / subscribe setup self.setup_camera_subscriptions() - self.setup_rainbow_subscription() + # self.setup_rainbow_subscription() + self.setup_dist_array_subscription() self.setup_camera_publishers() self.setup_object_distance_publishers() self.setup_traffic_light_publishers() @@ -134,6 +136,14 @@ def setup_rainbow_subscription(self): qos_profile=1 ) + def setup_dist_array_subscription(self): + self.new_subscription( + msg_type=numpy_msg(ImageMsg), + callback=self.handle_dist_array, + topic='/paf/hero/Center/dist_array', + qos_profile=1 + ) + def setup_camera_publishers(self): self.publisher = self.new_publisher( msg_type=numpy_msg(ImageMsg), @@ -190,6 +200,25 @@ def handle_rainbow_image(self, image): else: self.logerr("Depth-Fiel build up! No distances available yet!") + def handle_dist_array(self, dist_array): + dist_array = \ + self.bridge.imgmsg_to_cv2(img_msg=dist_array, + desired_encoding='passthrough') + print(dist_array.shape) + self.dist_arrays.append(dist_array) + # TODO: include buffer parameter into launch file + if len(self.depth_images) > 1: + self.dist_arrays.pop(0) + """if self.save: + for i in range(len(self.depth_images)): + cv2.imshow(f"{i}", self.depth_images[i]) + + self.save = False + cv2.waitKey(0) # Wait for any key press + cv2.destroyAllWindows()""" + else: + self.logerr("Depth-Fiel build up! No distances available yet!") + def predict_torch(self, image): self.model.eval() cv_image = self.bridge.imgmsg_to_cv2(img_msg=image, @@ -219,6 +248,8 @@ def predict_ultralytics(self, image): cv_image = cv2.cvtColor(cv_image, cv2.COLOR_RGB2BGR) output = self.model(cv_image, half=True, verbose=False) + + # handle distance of objects distance_output = [] c_boxes = [] c_labels = [] @@ -227,26 +258,41 @@ def predict_ultralytics(self, image): for box in boxes: cls = box.cls.item() pixels = box.xyxy[0] - if len(self.depth_images) > 0: + if len(self.dist_arrays) > 0: distances = np.asarray( - [self.depth_images[i][int(pixels[1]):int(pixels[3]):1, - int(pixels[0]):int(pixels[2]):1] - for i in range(len(self.depth_images))]) - non_zero_filter = distances[distances != 0] - + [self.dist_arrays[i] + [int(pixels[1]):int(pixels[3]):1, + int(pixels[0]):int(pixels[2]):1, ::] + for i in range(len(self.dist_arrays))]) + condition = distances[:, :, :, 0] != 0 + non_zero_filter = distances[condition] if len(non_zero_filter) > 0: - obj_dist = np.min(non_zero_filter) + obj_dist_index = np.argmin(non_zero_filter[:, 0]) + obj_dist = non_zero_filter[obj_dist_index] + abs_distance = np.sqrt( + obj_dist[0]**2 + + obj_dist[1]**2 + + obj_dist[2]**2) else: - obj_dist = np.inf + obj_dist = (np.inf, np.inf, np.inf) + abs_distance = np.inf c_boxes.append(torch.tensor(pixels)) - c_labels.append(f"Class: {cls}, Meters: {obj_dist}") - distance_output.append([cls, obj_dist]) + c_labels.append(f"Class: {cls}," + f"Meters: {round(abs_distance, 2)}," + f"({round(obj_dist[0], 2)}," + f"{round(obj_dist[1], 2)}," + f"{round(obj_dist[2], 2)})") + distance_output.append([cls, + abs_distance, + obj_dist[0], + obj_dist[1], + obj_dist[2]]) # print(distance_output) # self.logerr(distance_output) self.distance_publisher.publish( - Float32MultiArray(data=distance_output)) + Float32MultiArray(data=distance_output)) transposed_image = np.transpose(cv_image, (2, 0, 1)) image_np_with_detections = torch.tensor(transposed_image, @@ -256,7 +302,7 @@ def predict_ultralytics(self, image): self.process_traffic_lights(output[0], cv_image, image.header) c_boxes = torch.stack(c_boxes) - print(image_np_with_detections.shape, c_boxes.shape, c_labels) + # print(image_np_with_detections.shape, c_boxes.shape, c_labels) box = draw_bounding_boxes(image_np_with_detections, c_boxes, c_labels, From f2c3d8cf5e3aa7bc31743f90d2e2d0a8454c622e Mon Sep 17 00:00:00 2001 From: Leon Okrusch Date: Thu, 25 Jan 2024 10:14:11 +0100 Subject: [PATCH 10/12] feat(169): x,y,z distance publisher --- code/perception/launch/perception.launch | 4 +-- code/perception/src/vision_node.py | 39 ++++++--------------- doc/06_perception/12_distance_to_objects.md | 10 +++--- 3 files changed, 18 insertions(+), 35 deletions(-) diff --git a/code/perception/launch/perception.launch b/code/perception/launch/perception.launch index 3c463823..9711ae1b 100644 --- a/code/perception/launch/perception.launch +++ b/code/perception/launch/perception.launch @@ -86,7 +86,7 @@ - + diff --git a/code/perception/src/vision_node.py b/code/perception/src/vision_node.py index acbfd2ea..632b5059 100755 --- a/code/perception/src/vision_node.py +++ b/code/perception/src/vision_node.py @@ -83,7 +83,7 @@ def __init__(self, name, **kwargs): self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu") self.depth_images = [] - self.dist_arrays = [] + self.dist_arrays = None # publish / subscribe setup self.setup_camera_subscriptions() @@ -204,20 +204,7 @@ def handle_dist_array(self, dist_array): dist_array = \ self.bridge.imgmsg_to_cv2(img_msg=dist_array, desired_encoding='passthrough') - print(dist_array.shape) - self.dist_arrays.append(dist_array) - # TODO: include buffer parameter into launch file - if len(self.depth_images) > 1: - self.dist_arrays.pop(0) - """if self.save: - for i in range(len(self.depth_images)): - cv2.imshow(f"{i}", self.depth_images[i]) - - self.save = False - cv2.waitKey(0) # Wait for any key press - cv2.destroyAllWindows()""" - else: - self.logerr("Depth-Fiel build up! No distances available yet!") + self.dist_arrays = dist_array def predict_torch(self, image): self.model.eval() @@ -258,13 +245,12 @@ def predict_ultralytics(self, image): for box in boxes: cls = box.cls.item() pixels = box.xyxy[0] - if len(self.dist_arrays) > 0: + if self.dist_arrays is not None: distances = np.asarray( - [self.dist_arrays[i] - [int(pixels[1]):int(pixels[3]):1, - int(pixels[0]):int(pixels[2]):1, ::] - for i in range(len(self.dist_arrays))]) - condition = distances[:, :, :, 0] != 0 + self.dist_arrays[int(pixels[1]):int(pixels[3]):1, + int(pixels[0]):int(pixels[2]):1, + ::]) + condition = distances[:, :, 0] != 0 non_zero_filter = distances[condition] if len(non_zero_filter) > 0: obj_dist_index = np.argmin(non_zero_filter[:, 0]) @@ -280,17 +266,16 @@ def predict_ultralytics(self, image): c_boxes.append(torch.tensor(pixels)) c_labels.append(f"Class: {cls}," f"Meters: {round(abs_distance, 2)}," - f"({round(obj_dist[0], 2)}," - f"{round(obj_dist[1], 2)}," - f"{round(obj_dist[2], 2)})") + f"({round(float(obj_dist[0]), 2)}," + f"{round(float(obj_dist[1]), 2)}," + f"{round(float(obj_dist[2]), 2)})") distance_output.append([cls, abs_distance, obj_dist[0], obj_dist[1], obj_dist[2]]) - # print(distance_output) - # self.logerr(distance_output) + print(distance_output) self.distance_publisher.publish( Float32MultiArray(data=distance_output)) @@ -309,12 +294,10 @@ def predict_ultralytics(self, image): colors='blue', width=3, font_size=12) - # print(box.shape) np_box_img = np.transpose(box.detach().numpy(), (1, 2, 0)) box_img = cv2.cvtColor(np_box_img, cv2.COLOR_BGR2RGB) return box_img - # return output[0].plot() def process_traffic_lights(self, prediction, cv_image, image_header): diff --git a/doc/06_perception/12_distance_to_objects.md b/doc/06_perception/12_distance_to_objects.md index 0d6f1825..82568f0f 100644 --- a/doc/06_perception/12_distance_to_objects.md +++ b/doc/06_perception/12_distance_to_objects.md @@ -80,14 +80,14 @@ The resulting Image takes the distance in meters as values for its pixels. It th In the next step we want to get the distance for every bounding box the object-detection found. -We want to return a list of tuple containing a class and a distance, like this: +We want to return a list of tuple containing a class, an absolut distance and X, Y, Z coordinates of the objcet, like this: [ - [class_id, distance_meters], - [12.0, 3.456], + [class_id, abs_distance, X, Y, Z], + [12.0, 7.970812491638262, 5.6549606, -5.5982423, -0.4636328], + [12.0, 8.684970384807999, 5.6547265, 6.57918, -0.40886718], ..., - [2.0, np.inf], - [2.0, 7.890], + [2.0, 1.3798048392074562, 1.065586, -0.60292965, -0.63628906] ] Since we cant be certain we will find a Lidar-Point in the depth image for every Pixel in the bounding box of the original image, From 7c53ad82578ac4220e55b88da22b8c3a70b0c540 Mon Sep 17 00:00:00 2001 From: Leon Okrusch Date: Thu, 25 Jan 2024 23:20:15 +0100 Subject: [PATCH 11/12] feat(170): distance of objects for multiple cameras --- code/agent/config/dev_objects.json | 4 +- code/agent/launch/agent.launch | 1 - code/agent/src/agent/agent.py | 41 +++- code/perception/launch/perception.launch | 19 +- code/perception/src/lidar_distance.py | 273 +++++++++++------------ code/perception/src/vision_node.py | 68 +++++- 6 files changed, 228 insertions(+), 178 deletions(-) diff --git a/code/agent/config/dev_objects.json b/code/agent/config/dev_objects.json index e6fbdeba..c1d5e69b 100644 --- a/code/agent/config/dev_objects.json +++ b/code/agent/config/dev_objects.json @@ -45,7 +45,7 @@ }, { "type": "sensor.camera.rgb", - "id": "Left", + "id": "Right", "spawn_point": { "x": 0.0, "y": 0.0, @@ -64,7 +64,7 @@ }, { "type": "sensor.camera.rgb", - "id": "Right", + "id": "Left", "spawn_point": { "x": 0.0, "y": 0.0, diff --git a/code/agent/launch/agent.launch b/code/agent/launch/agent.launch index d076596d..fe19e7de 100644 --- a/code/agent/launch/agent.launch +++ b/code/agent/launch/agent.launch @@ -21,6 +21,5 @@ - diff --git a/code/agent/src/agent/agent.py b/code/agent/src/agent/agent.py index 2076bef3..72e1c106 100755 --- a/code/agent/src/agent/agent.py +++ b/code/agent/src/agent/agent.py @@ -1,5 +1,6 @@ from leaderboard.autoagents.ros1_agent import ROS1Agent from leaderboard.autoagents.autonomous_agent import Track +import math def get_entry_point(): @@ -22,12 +23,40 @@ def get_ros_entrypoint(self): def sensors(self): sensors = [ - {'type': 'sensor.camera.rgb', 'id': 'Center', - 'x': 0.0, 'y': 0.0, 'z': 1.70, 'roll': 0.0, 'pitch': 0.0, - 'yaw': 0.0, 'width': 1280, 'height': 720, 'fov': 100}, - {'type': 'sensor.lidar.ray_cast', 'id': 'LIDAR', - 'x': 0.0, 'y': 0.0, 'z': 1.70, 'roll': 0.0, 'pitch': 0.0, - 'yaw': 0.0}, + { + 'type': 'sensor.camera.rgb', + 'id': 'Center', + 'x': 0.0, 'y': 0.0, 'z': 1.70, + 'roll': 0.0, 'pitch': 0.0, 'yaw': 0.0, + 'width': 1280, 'height': 720, 'fov': 100 + }, + { + 'type': 'sensor.camera.rgb', + 'id': 'Back', + 'x': 0.0, 'y': 0.0, 'z': 1.70, + 'roll': 0.0, 'pitch': 0.0, 'yaw': math.radians(180.0), + 'width': 1280, 'height': 720, 'fov': 100 + }, + { + 'type': 'sensor.camera.rgb', + 'id': 'Left', + 'x': 0.0, 'y': 0.0, 'z': 1.70, + 'roll': 0.0, 'pitch': 0.0, 'yaw': math.radians(-90.0), + 'width': 1280, 'height': 720, 'fov': 100 + }, + { + 'type': 'sensor.camera.rgb', + 'id': 'Right', + 'x': 0.0, 'y': 0.0, 'z': 1.70, + 'roll': 0.0, 'pitch': 0.0, 'yaw': math.radians(90.0), + 'width': 1280, 'height': 720, 'fov': 100 + }, + { + 'type': 'sensor.lidar.ray_cast', + 'id': 'LIDAR', + 'x': 0.0, 'y': 0.0, 'z': 1.70, + 'roll': 0.0, 'pitch': 0.0, 'yaw': 0.0 + }, {'type': 'sensor.other.radar', 'id': 'RADAR', 'x': 0.7, 'y': -0.4, 'z': 1.60, 'roll': 0.0, 'pitch': 0.0, 'yaw': -45.0, 'horizontal_fov': 30, 'vertical_fov': 30}, diff --git a/code/perception/launch/perception.launch b/code/perception/launch/perception.launch index 9711ae1b..47713eb3 100644 --- a/code/perception/launch/perception.launch +++ b/code/perception/launch/perception.launch @@ -38,9 +38,12 @@ - - + - yolov8x-seg --> + @@ -70,18 +73,12 @@ - - - - - - diff --git a/code/perception/src/lidar_distance.py b/code/perception/src/lidar_distance.py index 207de7d0..ff2d40df 100755 --- a/code/perception/src/lidar_distance.py +++ b/code/perception/src/lidar_distance.py @@ -4,13 +4,8 @@ import numpy as np import lidar_filter_utility from sensor_msgs.msg import PointCloud2 -from std_msgs.msg import Float32 -from sklearn.cluster import DBSCAN -from sklearn.preprocessing import StandardScaler -import matplotlib.pyplot as plt # from mpl_toolkits.mplot3d import Axes3D # from itertools import combinations -import cv2 from sensor_msgs.msg import Image as ImageMsg from cv_bridge import CvBridge # from matplotlib.colors import LinearSegmentedColormap @@ -34,74 +29,94 @@ def callback(self, data): """ coordinates = ros_numpy.point_cloud2.pointcloud2_to_array(data) - # https://stackoverflow.com/questions/44295375/how-to-slice-a-numpy- - # ndarray-made-up-of-numpy-void-numbers - """min_dist_bit_mask = lidar_filter_utility.bounding_box( + # Center + reconstruct_bit_mask_center = lidar_filter_utility.bounding_box( coordinates, - max_x=50., - min_x=2., - min_z=rospy.get_param('~min_z', -np.inf), - min_y=-2.5, - max_y=2.5, - )""" + max_x=np.inf, + min_x=0., + min_z=-1.6, + ) + reconstruct_coordinates_center = \ + coordinates[reconstruct_bit_mask_center] + reconstruct_coordinates_xyz_center = np.array( + lidar_filter_utility.remove_field_name( + reconstruct_coordinates_center, + 'intensity') + .tolist() + ) + dist_array_center = self.reconstruct_img_from_lidar( + reconstruct_coordinates_xyz_center, focus="Center") + dist_array_center_msg = \ + self.bridge.cv2_to_imgmsg(dist_array_center, + encoding="passthrough") + dist_array_center_msg.header = data.header + self.dist_array_center_publisher.publish(dist_array_center_msg) - reconstruct_bit_mask = lidar_filter_utility.bounding_box( + # Back + reconstruct_bit_mask_back = lidar_filter_utility.bounding_box( coordinates, - max_x=rospy.get_param('~max_x', np.inf), - min_x=rospy.get_param('~min_x', -np.inf), - min_z=rospy.get_param('~min_z', -np.inf), + max_x=0.0, + min_x=-np.inf, + min_z=-1.6, ) - - # Filter coordinates based in generated bit_mask - # min_dist_coordinates = coordinates[min_dist_bit_mask] - reconstruct_coordinates = coordinates[reconstruct_bit_mask] - - # Create pointcloud from manipulated data - """coordinates_manipulated = ros_numpy \ - .point_cloud2.array_to_pointcloud2(min_dist_coordinates) - coordinates_manipulated.header = data.header""" - - # Publish manipulated pointCloud2 - # self.pub_pointcloud.publish(coordinates_manipulated) - - # https://stackoverflow.com/questions/1401712/how-can-the-euclidean- - # distance-be-calculated-with-numpy - """min_dist_coordinates_xyz = np.array( - lidar_filter_utility.remove_field_name(min_dist_coordinates, - 'intensity') + reconstruct_coordinates_back = coordinates[reconstruct_bit_mask_back] + reconstruct_coordinates_xyz_back = np.array( + lidar_filter_utility.remove_field_name( + reconstruct_coordinates_back, + 'intensity') .tolist() - )""" + ) + dist_array_back = self.reconstruct_img_from_lidar( + reconstruct_coordinates_xyz_back, focus="Back") + dist_array_back_msg = \ + self.bridge.cv2_to_imgmsg(dist_array_back, + encoding="passthrough") + dist_array_back_msg.header = data.header + self.dist_array_back_publisher.publish(dist_array_back_msg) - reconstruct_coordinates_xyz = np.array( - lidar_filter_utility.remove_field_name(reconstruct_coordinates, - 'intensity') + # Left + reconstruct_bit_mask_left = lidar_filter_utility.bounding_box( + coordinates, + max_y=np.inf, + min_y=0.0, + min_z=-1.6, + ) + reconstruct_coordinates_left = coordinates[reconstruct_bit_mask_left] + reconstruct_coordinates_xyz_left = np.array( + lidar_filter_utility.remove_field_name( + reconstruct_coordinates_left, + 'intensity') .tolist() ) + dist_array_left = self.reconstruct_img_from_lidar( + reconstruct_coordinates_xyz_left, focus="Left") + dist_array_left_msg = \ + self.bridge.cv2_to_imgmsg(dist_array_left, + encoding="passthrough") + dist_array_left_msg.header = data.header + self.dist_array_left_publisher.publish(dist_array_left_msg) - # handle minimum distance - """if min_dist_coordinates_xyz.shape[0] > 0: - plot = self.plot_blob(min_dist_coordinates_xyz) - img_msg = self.bridge.cv2_to_imgmsg(plot, - encoding="passthrough") - img_msg.header = data.header - self.min_dist_img_publisher.publish(img_msg) - else: - self.pub_min_dist.publish(np.inf)""" - - # handle reconstruction of lidar points - dist_array = self.reconstruct_img_from_lidar( - reconstruct_coordinates_xyz) - - # img_msg = self.bridge.cv2_to_imgmsg(rainbow_cloud, - # encoding="passthrough") - # img_msg.header = data.header - # self.rainbow_publisher.publish(img_msg) - - dist_array_msg = \ - self.bridge.cv2_to_imgmsg(dist_array, + # Right + reconstruct_bit_mask_right = lidar_filter_utility.bounding_box( + coordinates, + max_y=-0.0, + min_y=-np.inf, + min_z=-1.6 + ) + reconstruct_coordinates_right = coordinates[reconstruct_bit_mask_right] + reconstruct_coordinates_xyz_right = np.array( + lidar_filter_utility.remove_field_name( + reconstruct_coordinates_right, + 'intensity') + .tolist() + ) + dist_array_right = self.reconstruct_img_from_lidar( + reconstruct_coordinates_xyz_right, focus="Right") + dist_array_right_msg = \ + self.bridge.cv2_to_imgmsg(dist_array_right, encoding="passthrough") - dist_array_msg.header = data.header - self.dist_array_publisher.publish(dist_array_msg) + dist_array_right_msg.header = data.header + self.dist_array_right_publisher.publish(dist_array_right_msg) def listener(self): """ Initializes the node and it's publishers @@ -119,41 +134,41 @@ def listener(self): queue_size=10 ) - # publisher for the closest blob in the lidar point cloud - self.pub_min_dist = rospy.Publisher( + # publisher for dist_array + self.dist_array_center_publisher = rospy.Publisher( rospy.get_param( - '~range_topic', - '/paf/hero/Center/min_distance' + '~image_distance_topic', + '/paf/hero/Center/dist_array' ), - Float32, + ImageMsg, queue_size=10 ) - # publisher for reconstructed lidar image - self.rainbow_publisher = rospy.Publisher( + # publisher for dist_array + self.dist_array_back_publisher = rospy.Publisher( rospy.get_param( '~image_distance_topic', - '/paf/hero/Center/rainbow_image' + '/paf/hero/Back/dist_array' ), ImageMsg, queue_size=10 ) - # publisher for 3d blob graph (Deprecated) - """self.min_dist_img_publisher = rospy.Publisher( + # publisher for dist_array + self.dist_array_left_publisher = rospy.Publisher( rospy.get_param( '~image_distance_topic', - '/paf/hero/Center/min_dist_image' + '/paf/hero/Left/dist_array' ), ImageMsg, queue_size=10 - )""" + ) # publisher for dist_array - self.dist_array_publisher = rospy.Publisher( + self.dist_array_right_publisher = rospy.Publisher( rospy.get_param( '~image_distance_topic', - '/paf/hero/Center/dist_array' + '/paf/hero/Right/dist_array' ), ImageMsg, queue_size=10 @@ -164,59 +179,7 @@ def listener(self): rospy.spin() - def plot_blob(self, xyz_coords): - # creates a 3d graph thazt highlights blobs of points - xyz_coords_standardized = StandardScaler().fit_transform(xyz_coords) - """pairwise_distances_x = np.abs(np.subtract.outer(xyz_coords[:, 0], - xyz_coords[:, 0]))""" - - # publish minimum distance - eps = 0.2 - min_samples = 5 - dbscan = DBSCAN(eps=eps, min_samples=min_samples) - labels = dbscan.fit_predict(xyz_coords_standardized) - - min_distance_cluster = [] - - # Iterate through each cluster - for label in set(labels): - if label != -1: # Ignore noise points - cluster_points = xyz_coords[labels == label] - min_distance_cluster.append(np.min(cluster_points[:, 0])) - - # Publish the minimum distance within clusters - if len(min_distance_cluster) > 0: - self.pub_min_dist.publish(min(min_distance_cluster)) - else: - self.pub_min_dist.publish(np.inf) - - fig = plt.figure() - ax = fig.add_subplot(111, projection='3d') - - for label in set(labels): - # print(label) - if label == -1: - ax.scatter(xyz_coords[labels == label][:, 0], - xyz_coords[labels == label][:, 1], - xyz_coords[labels == label][:, 2], - c='gray', marker='o', label='Noise') - else: - ax.scatter(xyz_coords[labels == label][:, 0], - xyz_coords[labels == label][:, 1], - xyz_coords[labels == label][:, 2], - label=f'Cluster {label + 1}', s=50) - - ax.set_xlabel('X') - ax.set_ylabel('Y') - ax.set_zlabel('Z') - - fig.canvas.draw() - img = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8, sep='') - img = img.reshape(fig.canvas.get_width_height()[::-1] + (3,)) - img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) - return img - - def reconstruct_img_from_lidar(self, coordinates_xyz): + def reconstruct_img_from_lidar(self, coordinates_xyz, focus): # reconstruct 3d LIDAR-Data and calculate 2D Pixel # according to Camera-World @@ -238,25 +201,39 @@ def reconstruct_img_from_lidar(self, coordinates_xyz): img = np.zeros(shape=(720, 1280), dtype=np.float32) dist_array = np.zeros(shape=(720, 1280, 3), dtype=np.float32) for c in coordinates_xyz: - point = np.array([c[1], c[2], c[0], 1]) - pixel = np.matmul(m, point) - x, y = int(pixel[0]/pixel[2]), int(pixel[1]/pixel[2]) - if x >= 0 and x <= 1280 and y >= 0 and y <= 720: - img[719-y][1279-x] = c[0] - dist_array[719-y][1279-x] = \ - np.array([c[0], c[1], c[2]], dtype=np.float32) - - # Rainbox color mapping to highlight distances - """colors = [(0, 0, 0)] + [(1, 0, 0), (1, 1, 0), - (0, 1, 0), (0, 1, 1), - (0, 0, 1)] - cmap_name = 'rainbow' - rainbow_cmap = LinearSegmentedColormap.from_list(cmap_name, - colors, - N=256) + if focus == "Center": + point = np.array([c[1], c[2], c[0], 1]) + pixel = np.matmul(m, point) + x, y = int(pixel[0]/pixel[2]), int(pixel[1]/pixel[2]) + if x >= 0 and x <= 1280 and y >= 0 and y <= 720: + img[719-y][1279-x] = c[0] + dist_array[719-y][1279-x] = \ + np.array([c[0], c[1], c[2]], dtype=np.float32) + if focus == "Back": + point = np.array([c[1], c[2], c[0], 1]) + pixel = np.matmul(m, point) + x, y = int(pixel[0]/pixel[2]), int(pixel[1]/pixel[2]) + if x >= 0 and x <= 1280 and y >= 0 and y < 720: + img[y][1279-x] = -c[0] + dist_array[y][1279-x] = \ + np.array([-c[0], c[1], c[2]], dtype=np.float32) + if focus == "Left": + point = np.array([c[0], c[2], c[1], 1]) + pixel = np.matmul(m, point) + x, y = int(pixel[0]/pixel[2]), int(pixel[1]/pixel[2]) + if x >= 0 and x <= 1280 and y >= 0 and y <= 720: + img[719-y][1279-x] = c[1] + dist_array[y][1279-x] = \ + np.array([c[0], c[1], c[2]], dtype=np.float32) + if focus == "Right": + point = np.array([c[0], c[2], c[1], 1]) + pixel = np.matmul(m, point) + x, y = int(pixel[0]/pixel[2]), int(pixel[1]/pixel[2]) + if x >= 0 and x < 1280 and y >= 0 and y < 720: + img[y][1279-x] = -c[1] + dist_array[y][1279-x] = \ + np.array([c[0], c[1], c[2]], dtype=np.float32) - img_colored = (rainbow_cmap(img / np.max(img)) * 255).astype(np.uint8) - img_bgr = cv2.cvtColor(img_colored, cv2.COLOR_RGBA2BGR)""" return dist_array diff --git a/code/perception/src/vision_node.py b/code/perception/src/vision_node.py index 632b5059..a4ceea97 100755 --- a/code/perception/src/vision_node.py +++ b/code/perception/src/vision_node.py @@ -19,6 +19,7 @@ from torchvision.utils import draw_bounding_boxes, draw_segmentation_masks import numpy as np from ultralytics import NAS, YOLO, RTDETR, SAM, FastSAM +import rospy """ VisionNode: @@ -80,13 +81,26 @@ def __init__(self, name, **kwargs): self.bridge = CvBridge() self.role_name = self.get_param("role_name", "hero") self.side = self.get_param("side", "Center") + self.center = self.get_param("center") + self.back = self.get_param("back") + self.left = self.get_param("left") + self.right = self.get_param("right") + self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu") self.depth_images = [] self.dist_arrays = None # publish / subscribe setup - self.setup_camera_subscriptions() + if self.center: + self.setup_camera_subscriptions("Center") + if self.back: + self.setup_camera_subscriptions("Back") + if self.left: + self.setup_camera_subscriptions("Left") + if self.right: + self.setup_camera_subscriptions("Right") + # self.setup_rainbow_subscription() self.setup_dist_array_subscription() self.setup_camera_publishers() @@ -120,13 +134,14 @@ def __init__(self, name, **kwargs): # tensorflow setup - def setup_camera_subscriptions(self): + def setup_camera_subscriptions(self, side): self.new_subscription( msg_type=numpy_msg(ImageMsg), callback=self.handle_camera_image, - topic=f"/carla/{self.role_name}/{self.side}/image", + topic=f"/carla/{self.role_name}/{side}/image", qos_profile=1 ) + print(f"Subscribed to Side: {side}") def setup_rainbow_subscription(self): self.new_subscription( @@ -145,11 +160,34 @@ def setup_dist_array_subscription(self): ) def setup_camera_publishers(self): - self.publisher = self.new_publisher( - msg_type=numpy_msg(ImageMsg), - topic=f"/paf/{self.role_name}/{self.side}/segmented_image", - qos_profile=1 - ) + if self.center: + self.publisher_center = self.new_publisher( + msg_type=numpy_msg(ImageMsg), + topic=f"/paf/{self.role_name}/Center/segmented_image", + qos_profile=1 + ) + print("Publisher to Center!") + if self.back: + self.publisher_back = self.new_publisher( + msg_type=numpy_msg(ImageMsg), + topic=f"/paf/{self.role_name}/Back/segmented_image", + qos_profile=1 + ) + print("Publisher to Back!") + if self.left: + self.publisher_left = self.new_publisher( + msg_type=numpy_msg(ImageMsg), + topic=f"/paf/{self.role_name}/Left/segmented_image", + qos_profile=1 + ) + print("Publisher to Left!") + if self.right: + self.publisher_right = self.new_publisher( + msg_type=numpy_msg(ImageMsg), + topic=f"/paf/{self.role_name}/Right/segmented_image", + qos_profile=1 + ) + print("Publisher to Right!") def setup_object_distance_publishers(self): self.distance_publisher = self.new_publisher( @@ -179,7 +217,17 @@ def handle_camera_image(self, image): img_msg = self.bridge.cv2_to_imgmsg(vision_result, encoding="rgb8") img_msg.header = image.header - self.publisher.publish(img_msg) + side = rospy.resolve_name(img_msg.header.frame_id).split('/')[2] + if side == "Center": + self.publisher_center.publish(img_msg) + if side == "Back": + self.publisher_back.publish(img_msg) + if side == "Left": + self.publisher_left.publish(img_msg) + if side == "Right": + self.publisher_right.publish(img_msg) + + print(f"Published Image on Side: {side}") pass def handle_rainbow_image(self, image): @@ -204,6 +252,7 @@ def handle_dist_array(self, dist_array): dist_array = \ self.bridge.imgmsg_to_cv2(img_msg=dist_array, desired_encoding='passthrough') + print("RECEIVED DIST") self.dist_arrays = dist_array def predict_torch(self, image): @@ -275,7 +324,6 @@ def predict_ultralytics(self, image): obj_dist[1], obj_dist[2]]) - print(distance_output) self.distance_publisher.publish( Float32MultiArray(data=distance_output)) From 1b838adcf7ce7e54d8952f443faf68ebf9719aee Mon Sep 17 00:00:00 2001 From: Leon Okrusch Date: Wed, 6 Mar 2024 13:44:33 +0100 Subject: [PATCH 12/12] feat(197): dimensions of objects --- code/perception/src/vision_node.py | 73 +++++++++++++++++++++--------- 1 file changed, 52 insertions(+), 21 deletions(-) diff --git a/code/perception/src/vision_node.py b/code/perception/src/vision_node.py index 98783d2e..7345d0c0 100755 --- a/code/perception/src/vision_node.py +++ b/code/perception/src/vision_node.py @@ -141,7 +141,7 @@ def setup_camera_subscriptions(self, side): topic=f"/carla/{self.role_name}/{side}/image", qos_profile=1 ) - print(f"Subscribed to Side: {side}") + # print(f"Subscribed to Side: {side}") def setup_rainbow_subscription(self): self.new_subscription( @@ -166,28 +166,28 @@ def setup_camera_publishers(self): topic=f"/paf/{self.role_name}/Center/segmented_image", qos_profile=1 ) - print("Publisher to Center!") + # print("Publisher to Center!") if self.back: self.publisher_back = self.new_publisher( msg_type=numpy_msg(ImageMsg), topic=f"/paf/{self.role_name}/Back/segmented_image", qos_profile=1 ) - print("Publisher to Back!") + # print("Publisher to Back!") if self.left: self.publisher_left = self.new_publisher( msg_type=numpy_msg(ImageMsg), topic=f"/paf/{self.role_name}/Left/segmented_image", qos_profile=1 ) - print("Publisher to Left!") + # print("Publisher to Left!") if self.right: self.publisher_right = self.new_publisher( msg_type=numpy_msg(ImageMsg), topic=f"/paf/{self.role_name}/Right/segmented_image", qos_profile=1 ) - print("Publisher to Right!") + # print("Publisher to Right!") def setup_object_distance_publishers(self): self.distance_publisher = self.new_publisher( @@ -227,7 +227,7 @@ def handle_camera_image(self, image): if side == "Right": self.publisher_right.publish(img_msg) - print(f"Published Image on Side: {side}") + # print(f"Published Image on Side: {side}") pass def handle_rainbow_image(self, image): @@ -252,7 +252,7 @@ def handle_dist_array(self, dist_array): dist_array = \ self.bridge.imgmsg_to_cv2(img_msg=dist_array, desired_encoding='passthrough') - print("RECEIVED DIST") + # print("RECEIVED DIST") self.dist_arrays = dist_array def predict_torch(self, image): @@ -284,7 +284,8 @@ def predict_ultralytics(self, image): cv_image = cv2.cvtColor(cv_image, cv2.COLOR_RGB2BGR) output = self.model(cv_image, half=True, verbose=False) - distance_output = [] + + """distance_output = [] c_boxes = [] c_labels = [] for r in output: @@ -292,6 +293,7 @@ def predict_ultralytics(self, image): for box in boxes: cls = box.cls.item() pixels = box.xyxy[0] + print(pixels) if len(self.depth_images) > 0: distances = np.asarray( [self.depth_images[i][int(pixels[1]):int(pixels[3]):1, @@ -315,7 +317,7 @@ def predict_ultralytics(self, image): transposed_image = np.transpose(cv_image, (2, 0, 1)) image_np_with_detections = torch.tensor(transposed_image, - dtype=torch.uint8) + dtype=torch.uint8)""" # handle distance of objects distance_output = [] @@ -333,28 +335,57 @@ def predict_ultralytics(self, image): ::]) condition = distances[:, :, 0] != 0 non_zero_filter = distances[condition] + distances_copy = distances.copy() + distances_copy[distances_copy == 0] = np.inf + if len(non_zero_filter) > 0: - obj_dist_index = np.argmin(non_zero_filter[:, 0]) - obj_dist = non_zero_filter[obj_dist_index] + sorted_indices = np.argsort(distances_copy[:, :, 0], + axis=None) + x1, y1 = np.unravel_index(sorted_indices[0], + distances_copy.shape[:2]) + x2, y2 = np.unravel_index(sorted_indices[1], + distances_copy.shape[:2]) + obj_dist1 = distances_copy[x1][y1].copy() + obj_dist2 = distances_copy[x2][y2].copy() + abs_distance = np.sqrt( - obj_dist[0]**2 + - obj_dist[1]**2 + - obj_dist[2]**2) + obj_dist1[0]**2 + + obj_dist1[1]**2 + + obj_dist1[2]**2) + + # create 2d glass plane at object + # with box dimension + scale_width = abs(obj_dist1[1] - obj_dist2[1])\ + / abs(y1-y2) + scale_height = abs(obj_dist1[2] - obj_dist2[2])\ + / abs(x1-x2) + width = distances_copy.shape[1] * scale_width + height = distances_copy.shape[0] * scale_height + + # upper left + ul_x = obj_dist1[0] + ul_y = obj_dist1[1] - (-y1 + scale_width) + ul_z = obj_dist1[2] - (-x1 + scale_height) + + # lower right + lr_x = obj_dist1[0] + lr_y = ul_y + width + lr_z = ul_z + height + else: - obj_dist = (np.inf, np.inf, np.inf) + obj_dist1 = (np.inf, np.inf, np.inf) abs_distance = np.inf c_boxes.append(torch.tensor(pixels)) c_labels.append(f"Class: {cls}," f"Meters: {round(abs_distance, 2)}," - f"({round(float(obj_dist[0]), 2)}," - f"{round(float(obj_dist[1]), 2)}," - f"{round(float(obj_dist[2]), 2)})") + f"({round(float(obj_dist1[0]), 2)}," + f"{round(float(obj_dist1[1]), 2)}," + f"{round(float(obj_dist1[2]), 2)})") distance_output.append([cls, abs_distance, - obj_dist[0], - obj_dist[1], - obj_dist[2]]) + ul_x, ul_y, ul_z, + lr_x, lr_y, lr_z]) self.distance_publisher.publish( Float32MultiArray(data=distance_output))