From a5644d4b8bf8cea993eb1065619dc8a1d6842084 Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Wed, 30 Aug 2023 23:17:41 -0400 Subject: [PATCH 01/22] Trail_detection_node package Trail_detection_node package. The models are too large so they are uploaded to the google drive. --- trail_detection_node/package.xml | 21 + .../resource/trail_detection_node | 0 trail_detection_node/setup.cfg | 4 + trail_detection_node/setup.py | 28 ++ trail_detection_node/test.py | 3 + trail_detection_node/test/test_copyright.py | 25 + trail_detection_node/test/test_flake8.py | 25 + trail_detection_node/test/test_pep257.py | 23 + .../trail_detection_node/GANav_visualizer.py | 412 +++++++++++++++++ .../trail_detection_node/__init__.py | 0 .../GANav_visualizer.cpython-310.pyc | Bin 0 -> 8527 bytes .../GANav_visualizer.cpython-37.pyc | Bin 0 -> 8540 bytes .../__pycache__/__init__.cpython-310.pyc | Bin 0 -> 173 bytes .../__pycache__/__init__.cpython-37.pyc | Bin 0 -> 167 bytes .../__pycache__/model_loader.cpython-310.pyc | Bin 0 -> 3390 bytes .../__pycache__/model_loader.cpython-39.pyc | Bin 0 -> 2460 bytes .../v1_trailDetectionNode.cpython-310.pyc | Bin 0 -> 7419 bytes .../v2_trailDetectionNode.cpython-310.pyc | Bin 0 -> 6580 bytes .../__pycache__/vgg.cpython-310.pyc | Bin 0 -> 5456 bytes .../__pycache__/vgg.cpython-39.pyc | Bin 0 -> 5926 bytes ...izer_v1_trailDetectionNode.cpython-310.pyc | Bin 0 -> 9034 bytes ...izer_v2_trailDetectionNode.cpython-310.pyc | Bin 0 -> 8399 bytes .../trail_detection_node/model_loader.py | 99 ++++ .../trail_detection_node/setup.py | 125 +++++ .../v1_trailDetectionNode.py | 278 +++++++++++ .../v2_trailDetectionNode.py | 233 ++++++++++ .../trail_detection_node/vgg.py | 191 ++++++++ .../visualizer_v1_trailDetectionNode.py | 431 ++++++++++++++++++ .../visualizer_v2_trailDetectionNode.py | 392 ++++++++++++++++ 29 files changed, 2290 insertions(+) create mode 100644 trail_detection_node/package.xml create mode 100644 trail_detection_node/resource/trail_detection_node create mode 100644 trail_detection_node/setup.cfg create mode 100644 trail_detection_node/setup.py create mode 100644 trail_detection_node/test.py create mode 100644 trail_detection_node/test/test_copyright.py create mode 100644 trail_detection_node/test/test_flake8.py create mode 100644 trail_detection_node/test/test_pep257.py create mode 100644 trail_detection_node/trail_detection_node/GANav_visualizer.py create mode 100644 trail_detection_node/trail_detection_node/__init__.py create mode 100644 trail_detection_node/trail_detection_node/__pycache__/GANav_visualizer.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/GANav_visualizer.cpython-37.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/__init__.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/__init__.cpython-37.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/model_loader.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/model_loader.cpython-39.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/v1_trailDetectionNode.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/v2_trailDetectionNode.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/vgg.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/vgg.cpython-39.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/visualizer_v1_trailDetectionNode.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/visualizer_v2_trailDetectionNode.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/model_loader.py create mode 100644 trail_detection_node/trail_detection_node/setup.py create mode 100644 trail_detection_node/trail_detection_node/v1_trailDetectionNode.py create mode 100644 trail_detection_node/trail_detection_node/v2_trailDetectionNode.py create mode 100644 trail_detection_node/trail_detection_node/vgg.py create mode 100644 trail_detection_node/trail_detection_node/visualizer_v1_trailDetectionNode.py create mode 100644 trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py diff --git a/trail_detection_node/package.xml b/trail_detection_node/package.xml new file mode 100644 index 00000000..45159f4b --- /dev/null +++ b/trail_detection_node/package.xml @@ -0,0 +1,21 @@ + + + + trail_detection_node + 0.0.0 + TODO: Package description + zhaodong + TODO: License declaration + + sensor_msgs + cv_bridge + + ament_copyright + ament_flake8 + ament_pep257 + python3-pytest + + + ament_python + + diff --git a/trail_detection_node/resource/trail_detection_node b/trail_detection_node/resource/trail_detection_node new file mode 100644 index 00000000..e69de29b diff --git a/trail_detection_node/setup.cfg b/trail_detection_node/setup.cfg new file mode 100644 index 00000000..3cedd943 --- /dev/null +++ b/trail_detection_node/setup.cfg @@ -0,0 +1,4 @@ +[develop] +script_dir=$base/lib/trail_detection_node +[install] +install_scripts=$base/lib/trail_detection_node diff --git a/trail_detection_node/setup.py b/trail_detection_node/setup.py new file mode 100644 index 00000000..511d6ad0 --- /dev/null +++ b/trail_detection_node/setup.py @@ -0,0 +1,28 @@ +from setuptools import setup + +package_name = 'trail_detection_node' + +setup( + name=package_name, + version='0.0.0', + packages=[package_name], + data_files=[ + ('share/ament_index/resource_index/packages', + ['resource/' + package_name]), + ('share/' + package_name, ['package.xml']), + ], + install_requires=['setuptools'], + zip_safe=True, + maintainer='zhaodong', + maintainer_email='tom2352759619@hotmail.com', + description='TODO: Package description', + license='TODO: License declaration', + tests_require=['pytest'], + entry_points={ + 'console_scripts': [ + 'trail_detection = trail_detection_node.v2_trailDetectionNode:main', + 'visualizer = trail_detection_node.visualizer_v2_trailDetectionNode:main', + 'original_visualizer = trail_detection_node.visualizer_v1_trailDetectionNode:main', + ], + }, +) diff --git a/trail_detection_node/test.py b/trail_detection_node/test.py new file mode 100644 index 00000000..5d715cab --- /dev/null +++ b/trail_detection_node/test.py @@ -0,0 +1,3 @@ +from trail_detection_node.visualizer_v2_trailDetectionNode import * + +main() diff --git a/trail_detection_node/test/test_copyright.py b/trail_detection_node/test/test_copyright.py new file mode 100644 index 00000000..97a39196 --- /dev/null +++ b/trail_detection_node/test/test_copyright.py @@ -0,0 +1,25 @@ +# Copyright 2015 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_copyright.main import main +import pytest + + +# Remove the `skip` decorator once the source file(s) have a copyright header +@pytest.mark.skip(reason='No copyright header has been placed in the generated source file.') +@pytest.mark.copyright +@pytest.mark.linter +def test_copyright(): + rc = main(argv=['.', 'test']) + assert rc == 0, 'Found errors' diff --git a/trail_detection_node/test/test_flake8.py b/trail_detection_node/test/test_flake8.py new file mode 100644 index 00000000..27ee1078 --- /dev/null +++ b/trail_detection_node/test/test_flake8.py @@ -0,0 +1,25 @@ +# Copyright 2017 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_flake8.main import main_with_errors +import pytest + + +@pytest.mark.flake8 +@pytest.mark.linter +def test_flake8(): + rc, errors = main_with_errors(argv=[]) + assert rc == 0, \ + 'Found %d code style errors / warnings:\n' % len(errors) + \ + '\n'.join(errors) diff --git a/trail_detection_node/test/test_pep257.py b/trail_detection_node/test/test_pep257.py new file mode 100644 index 00000000..b234a384 --- /dev/null +++ b/trail_detection_node/test/test_pep257.py @@ -0,0 +1,23 @@ +# Copyright 2015 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_pep257.main import main +import pytest + + +@pytest.mark.linter +@pytest.mark.pep257 +def test_pep257(): + rc = main(argv=['.', 'test']) + assert rc == 0, 'Found code style errors / warnings' diff --git a/trail_detection_node/trail_detection_node/GANav_visualizer.py b/trail_detection_node/trail_detection_node/GANav_visualizer.py new file mode 100644 index 00000000..9919553f --- /dev/null +++ b/trail_detection_node/trail_detection_node/GANav_visualizer.py @@ -0,0 +1,412 @@ +import torch +from .mmseg.apis import inference_segmentor, init_segmentor +from PIL import Image as ImagePIL +from torchvision import transforms +import cv2 +import os + +import rclpy +from rclpy.node import Node +from geometry_msgs.msg import PoseStamped +from sensor_msgs.msg import Image, PointCloud2 +import sensor_msgs_py.point_cloud2 as pc2 +import numpy as np +import math +from cv_bridge import CvBridge + +''' + The transformation matrix as well as the coordinate conversion and depth estimation functions are copied from human_detection_node +''' +camera_transformation_k = np.array([ + [628.5359544,0,676.9575694], + [0,627.7249542,532.7206716], + [0,0,1]]) + +rotation_matrix = np.array([ + [-0.007495781893,-0.0006277316155,0.9999717092], + [-0.9999516401,-0.006361853422,-0.007499625104], + [0.006366381192,-0.9999795662,-0.0005800141927]]) + +rotation_matrix = rotation_matrix.T + +translation_vector = np.array([-0.06024059837, -0.08180891509, -0.3117851288]) +image_width=1280 +image_height=1024 + +def convert_to_lidar_frame(uv_coordinate): + """ + convert 2d camera coordinate + depth into 3d lidar frame + """ + point_cloud = np.empty( (3,) , dtype=float) + point_cloud[2] = uv_coordinate[2] + point_cloud[1] = ( image_height - uv_coordinate[1] )*point_cloud[2] + point_cloud[0] = uv_coordinate[0]*point_cloud[2] + + inverse_camera_transformation_k = np.linalg.inv(camera_transformation_k) + inverse_rotation_matrix = np.linalg.inv(rotation_matrix) + point_cloud = inverse_camera_transformation_k @ point_cloud + point_cloud = inverse_rotation_matrix @ (point_cloud-translation_vector) + return point_cloud + +def convert_to_camera_frame(point_cloud): + """ + convert 3d lidar data into 2d coordinate of the camera frame + depth + """ + length = point_cloud.shape[0] + translation = np.tile(translation_vector, (length, 1)).T + + point_cloud = point_cloud.T + point_cloud = rotation_matrix@point_cloud + translation + point_cloud = camera_transformation_k @ point_cloud + + uv_coordinate = np.empty_like(point_cloud) + + """ + uv = [x/z, y/z, z], and y is opposite so the minus imageheight + """ + uv_coordinate[0] = point_cloud[0] / point_cloud[2] + uv_coordinate[1] = image_height - point_cloud[1] / point_cloud[2] + uv_coordinate[2] = point_cloud[2] + + uv_depth = uv_coordinate[2, :] + filtered_uv_coordinate = uv_coordinate[:, uv_depth >= 0] + return filtered_uv_coordinate + +def estimate_depth(x, y, np_2d_array): + """ + estimate the depth by finding points closest to x,y from thhe 2d array + """ + # Calculate the distance between each point and the target coordinates (x, y) + distances_sq = (np_2d_array[0,:] - x) ** 2 + (np_2d_array[1,:] - y) ** 2 + + # Find the indices of the k nearest points + k = 5 # Number of nearest neighbors we want + closest_indices = np.argpartition(distances_sq, k)[:k] + pixel_distance_threshold = 2000 + + valid_indices = [idx for idx in closest_indices if distances_sq[idx]<=pixel_distance_threshold] + if len(valid_indices) == 0: + # lidar points disappears usually around 0.4m + distance_where_lidar_stops_working = -1 + return distance_where_lidar_stops_working + + filtered_indices = np.array(valid_indices) + # Get the depth value of the closest point + closest_depths = np_2d_array[2,filtered_indices] + + return np.mean(closest_depths) + +def load_model(device): + + config = "./trail_detection_node/trained_models/rugd_group6/ganav_rugd_6.py" + checkpoint = "./trail_detection_node/trained_models/rugd_group6/ganav_rugd_6.pth" + model = init_segmentor(config, checkpoint, device="cuda:0") + print('Finished loading model!') + + return model + +def find_route(model, device, cv_image): + result = inference_segmentor(model, cv_image) + result_np = np.vstack(result).astype(np.uint8) + pred = np.where(result_np == 0, 255, 0) + + # # prediction visualize + # cv2.imshow('prediction',pred) + # cv2.waitKey(25) + + route = np.zeros_like(pred) + # calculate the center line by taking the average + row_num = 0 + for row in pred: + white_pixels = list(np.nonzero(row)[0]) + if white_pixels: + average = (white_pixels[0] + white_pixels[-1]) / 2 + route[row_num][round(average)] = 255 + row_num = row_num + 1 + return route, pred + +def equalize_hist_rgb(img): + # Split the image into its color channels + r, g, b = cv2.split(img) + + # Equalize the histograms for each channel + r_eq = cv2.equalizeHist(r) + g_eq = cv2.equalizeHist(g) + b_eq = cv2.equalizeHist(b) + + # Merge the channels + img_eq = cv2.merge((r_eq, g_eq, b_eq)) + # img_eq = cv2.fastNlMeansDenoisingColored(img_eq,None,10,20,7,42) + # img_eq = cv2.GaussianBlur(img_eq, (25,25), 0) + return img_eq + +''' + The traildetector node has two subscriptions(lidar and camera) and one publisher(trail position). After it receives msgs from both lidar and camera, + it detects the trail in the image and sends the corresponding lidar position as the trail location. + The msgs are synchornized before processing, using buffer and sync function. + To find the path, the node will process the image, find a line to follow (by taking the average of left and right of the path), estimate the lidar points depth, + and choose to go to the closest point. + V1_traildetection assumes that the path is pointing frontward and has only one path in front. +''' +class trailDetector(Node): + def __init__(self, model, device): + super().__init__('trail_detector') + # define trail subscription + self.trail_publisher = self.create_publisher( + PoseStamped, + 'trail_location', + 10) + + # define camera subscription + self.camera_subscription = self.create_subscription( + Image, + 'camera', + self.camera_callback, + 10) + self.camera_subscription + + # define lidar subscription + self.lidar_subscription = self.create_subscription( + PointCloud2, + 'velodyne_points', + self.lidar_callback, + 10) + self.lidar_subscription + + self.bridge = CvBridge() + + # load model and device + self.model = model + self.device = device + + # buffers to hold msgs for sync + self.buffer_size = 50 # 30 is a random choice, to be discussed + self.lidar_buffer = [] + self.camera_buffer = [] + self.only_camera_mode = True + + def camera_callback(self, msg): + if len(self.camera_buffer) >= self.buffer_size: + self.camera_buffer.pop(0) + self.camera_buffer.append(msg) + self.sync() + + def lidar_callback(self, msg): + if len(self.lidar_buffer) >= self.buffer_size: + self.lidar_buffer.pop(0) + self.lidar_buffer.append(msg) + self.sync() + + def sync(self): + # if one of the sensor has no msg, then return + if self.only_camera_mode and self.camera_buffer: + camera_msg = self.camera_buffer[0] + self.camera_buffer.pop(0) + self.only_camera_callback(camera_msg) + elif not self.lidar_buffer or not self.camera_buffer: + return + + while self.lidar_buffer and self.camera_buffer: + lidar_msg = self.lidar_buffer[0] + camera_msg = self.camera_buffer[0] + + time_tolerance = 20000000 # nanosec, random choice to be discussed + time_difference = abs(lidar_msg.header.stamp.nanosec - camera_msg.header.stamp.nanosec) + # print(time_difference) + + # compare the time difference, if it's within tolerance, then pass to the trail_callback, otherwise discard the older msg + if time_difference <= time_tolerance: + self.lidar_buffer.pop(0) + self.camera_buffer.pop(0) + self.get_logger().info("msgs received!") + self.trail_callback(lidar_msg, camera_msg) + elif lidar_msg.header.stamp.nanosec > camera_msg.header.stamp.nanosec: + self.camera_buffer.pop(0) + else: + self.lidar_buffer.pop(0) + + def trail_callback(self, lidar_msg, camera_msg): + # process lidar msg + point_gen = pc2.read_points( + lidar_msg, field_names=( + "x", "y", "z"), skip_nans=True) + points = [[x, y, z] for x, y, z in point_gen] + points = np.array(points) + points2d = convert_to_camera_frame(points) + + # process camera msg + cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') + + # increase brightness + M = np.ones(cv_image.shape, dtype = 'uint8') * 50 # increase brightness by 50 + cv_image = cv2.add(cv_image, M) + + # # image visualizer + # cv2.imshow('raw image',cv_image) + # cv2.waitKey(25) + + route = find_route(self.model, self.device, cv_image) + + # # visualize route purely + # cv2.imshow("white_route",route) + # cv2.waitKey(25) + + route_indices = list(zip(*np.nonzero(route))) + if not route_indices: + print("No centerline found!") + return + + # #filter points that have no lidar points near it + # filtered_route_indices = [] + # for index in route_indices: + # point = [] + # point.append(index[0]) + # point.append(index[1]) + # point.append(estimate_depth(index[0], index[1], points2d)) + # if point[2] == -1: + # continue + # else: + # filtered_route_indices.append(point) + + # find the corresponding lidar points using the center line pixels + filtered_3dPoints = [] + # for index in filtered_route_indices: + for index in route_indices: + point = [] + # point.append(index[0]) + # point.append(index[1]) + # point.append(index[2]) + + point.append(index[0]) + point.append(index[1]) + point.append(estimate_depth(index[0], index[1], points2d)) + + point_3d = convert_to_lidar_frame(point) + filtered_3dPoints.append(point_3d) + + filtered_3dPoints = np.array(filtered_3dPoints) + # find the nearest 3d point and set that as goal + distances_sq = filtered_3dPoints[:,0]**2 + filtered_3dPoints[:,1]**2 + filtered_3dPoints[:,2]**2 + smallest_index = np.argmin(distances_sq) + # print(math.sqrt(distances_sq[smallest_index])) + # smallest_index = np.argmin(filtered_3dPoints[:,2]) + + # visualize after-pocessed image + visualize_cv_image = cv_image + print(f"{visualize_cv_image.shape[0]}") + circle_x = route_indices[smallest_index][0] + circle_y = route_indices[smallest_index][1] + + # # visualize the lidar points in image + # uv_x, uv_y, uv_z = points2d + # for index_lidar in range(points2d.shape[1]): + # # print(f"{int(uv_x[index_lidar])},{int(uv_y[index_lidar])}") + # cv2.circle(visualize_cv_image, (int(uv_x[index_lidar]), int(image_height - uv_y[index_lidar])), radius=5, color=(255, 0, 0), thickness=-1) + + # visualize center line in image + for index_circle in range(len(route_indices)): + if index_circle == smallest_index: + continue + else: + red_circle_x = route_indices[index_circle][0] + red_circle_y = route_indices[index_circle][1] + cv2.circle(visualize_cv_image, (red_circle_y, red_circle_x), radius=5, color=(0, 0, 255), thickness=-1) + + # visualize the chosen point in image + cv2.circle(visualize_cv_image, (circle_y, circle_x), radius=7, color=(0, 255, 0), thickness=-1) + cv2.circle(visualize_cv_image, (0, 0), radius=12, color=(0, 255, 0), thickness=-1) + + cv2.imshow('circled image',visualize_cv_image) + cv2.waitKey(25) + + # publsih message + trail_location_msg = PoseStamped() + trail_location_msg.header.stamp = lidar_msg.header.stamp + trail_location_msg.header.frame_id = "velodyne" + #position + trail_location_msg.pose.position.x = filtered_3dPoints[smallest_index][0] + trail_location_msg.pose.position.y = filtered_3dPoints[smallest_index][1] + trail_location_msg.pose.position.z = filtered_3dPoints[smallest_index][2] + #orientation + yaw = math.atan2(filtered_3dPoints[smallest_index][1], filtered_3dPoints[smallest_index][0]) + trail_location_msg.pose.orientation.x = 0.0 + trail_location_msg.pose.orientation.y = 0.0 + trail_location_msg.pose.orientation.z = math.sin(yaw/2) + trail_location_msg.pose.orientation.w = math.cos(yaw / 2) + self.get_logger().info("location published!") + self.trail_publisher.publish(trail_location_msg) + + def only_camera_callback(self, camera_msg): + # process camera msg + cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') + + # cv_image = cv2.fastNlMeansDenoisingColored(cv_image,None,5,10,7,14) + + # # histogram equalization method 1 + # cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2YCrCb) + # cv_image[:,:,0] = cv2.equalizeHist(cv_image[:,:,0]) + # cv_image = cv2.cvtColor(cv_image, cv2.COLOR_YCrCb2BGR) + + # # histogram equalization method 2 + # cv_image = equalize_hist_rgb(cv_image) + + # # histogram equalization method 3 CLAHE + # image_bw = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY) + # clahe = cv2.createCLAHE(clipLimit=5) + # cv_image = clahe.apply(image_bw) + 30 + + # # histogram equalization method 4 CLAHE + # cla = cv2.createCLAHE(clipLimit=4.0) + # H, S, V = cv2.split(cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)) + # eq_V = cla.apply(V) + # cv_image = cv2.cvtColor(cv2.merge([H, S, eq_V]), cv2.COLOR_HSV2BGR) + + # # increase brightness + # M = np.ones(cv_image.shape, dtype = 'uint8') * 50 # increase brightness by 50 + # cv_image = cv2.add(cv_image, M) + + # # image visualizer + # cv2.imshow('raw image',cv_image) + # cv2.waitKey(25) + + route, pred = find_route(self.model, self.device, cv_image) + + # # visualize route purely + # cv2.imshow("white_route",route) + # cv2.waitKey(25) + + # increase the brightness of image where is predicted as road + sign = cv2.cvtColor(pred, cv2.COLOR_GRAY2BGR) + cv_image = cv2.add(cv_image, sign) + + route_indices = list(zip(*np.nonzero(route))) + if not route_indices: + print("No centerline found!") + return + + for index_circle in range(len(route_indices)): + red_circle_x = route_indices[index_circle][0] + red_circle_y = route_indices[index_circle][1] + cv2.circle(cv_image, (red_circle_y, red_circle_x), radius=5, color=(0, 0, 255), thickness=-1) + + cv2.imshow('circled image',cv_image) + cv2.waitKey(25) + + + + +def main(args=None): + print(torch.cuda.is_available()) + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + model = load_model(device) + + rclpy.init(args=args) + trailDetectorNode = trailDetector(model, device) + rclpy.spin(trailDetectorNode) + + trailDetectorNode.destroy_node() + rclpy.shutdown() + cv2.destroyAllWindows() + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/trail_detection_node/trail_detection_node/__init__.py b/trail_detection_node/trail_detection_node/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/trail_detection_node/trail_detection_node/__pycache__/GANav_visualizer.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/GANav_visualizer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..23211bce82b256058757eb70014e788a5350c843 GIT binary patch literal 8527 zcmbtZYm6k{Xn};x%&8Br5gvt?8+r z>aN~fH8VR+wPP~7*cPlI5MCijJ%#{FcnBpDQeT zeCJko&-B>&MYrnKz2`pa);+K9oC*sCN5Su3-gtxk_jyHmgBk~a78;M@iY}{)!W5yPR(*w1oRkfC0)m5S!fzdLnmaLmWre#-cSyzK>H7n;iRY%@))g10tkZ%>L1vw@Y z6k8M330b#;64o(Wlhw(={HfM-b!JdMQk|7+%vI-b&j$0Yh3bN;WEF3=f3%@6=M@!Y zPL_F;`mS0%Hs~uzYEhOGgT6&?v3b0@I2ciSMaB5L*kttto2r)CG@H4uR8O)aY!>%Z zY>v(2em7fSM{&Q09b=2QpJvC|UAVs|t308w6Rdn)VP&tdqgU@`_pp=f)OEFbpI2db zuWS6by!&<(yo-88UX{~(bk@szI9sKniD|#R>T$1K_uR;9w7hm4a#8f#e!SnVsKUI| zsx>@RoS4_z(Q3$B5n9G_$cUP|97f)ixYp`;tfFxfYZJ|H#}|TdgPp?=`@-hq+-F!s zZ!G=m#w(Y8bLUY`n`nIC!>5nly7$JT6+LZ#z5m)*{`%K<9&J4G+O;?T?2FG2J7h1{ zXy1C{vf14Iw=X|;yYc#$mui3Y*^9Rui^b=@^yhcpd<308^qv3t+~@DSdAsrNa^?O4{f3;7jQ+JC}O3lGBwti zc2n!AiNf>+B~h7vUWs+Ij0I(0wilEcx*O*eP|bAAfM#N>(lq*JZ0)K&ZBWmkt|wZe zmK3b3Cu(fZE3yvOR;=#hP8q*?*xvMbTt3Ij^;*m0HBds%{B|w&%J-L<*NNB4;7eG3 zkd*_U)p&W8W1yT|QTKGb!Y%Nd#oHZkj}BX&xIFtxR%q>xFuhhM-WKL+5Y}Rm_enC` zHP3IX#li~kq@W>mzr88u(jB-Xk*URg*ml>&1P|l1-m1mizb2;SkRYXPdUXh4#Sn#! zO?Q9yBG(~-a_b~f;shSx@yK((-!tZh0}egwmckeDR&Rg&%v#v;&UDvmAq(4$GdzsW zxm(ehh}X|xH-6wUFUC%2$8AXYyAz*yY`M1SZu-$iE%3V@U+Qd&*#QsSICSL+yYhtZ z!Be9t6h$Mi&Z{$OUdyYtnpX{#-v#oRSdk4REs|pO3Mz@xg#Ipo^|9L5AWACJR<&KN zY4kMo89gO76Np2i%_*o`RL5QIXA*@3Cre_6`Kdr2Qxcu25HlNXZJ!F!)R`tllEhXg zv89q3%(#K);rRyIvXvN2T{k7oOiYNN*Gkg|zeuNs1lusP}y2t6fD;8fMx)pfs zM!Y6+2ZW8!Q;!WnlbRuBSN$M{J!I~f>`9SHi{mt5?f{Di0w}ltAUdO?C>&!-wIqKX zP19!ddGNWU%@D8eMR%RlZ1~HfLcAtg^f?rX#=u>sr6-=iU@(1}sKx=Rno{SKmta16 zdaU%-7s0Dt%uDnel2gojNqs?iL0#2YhS}Hc#JH&Js68{*5_1Pe3f|#EEZegZ>k14` zUr$U(N(Rr-lZ@o64XTmYyJTRT?q$goFN&e@Feb#h)SRqrmskBZY)qppRWvF?M40BW4qw)gqFEO{%9%pRSB|96=~x@>tU<&@UwX6TeOWurF&|40I9?*oe#o#Er=dQWjK8A4t^CgmC}5TH##*Q z`(#Ok4jB}tJOW{~yjmMFtzHxAw#chw z?7-4QaoCC6t&p#S8=^Fl=HVj6;i{5zQTn>~*Dy!eFPm5Db`uj&`Ee_Akv0r zsE(G`9F0GWt}(MkY#N!ZQ)n=?2T>*;mZ&6H%($sTSXBlAR3Wb9xt6<+Exo&SB|F@P zF1BEQgXj$3XfU_I!;Q{EXBxFO46md-1f$!1{QtW|yw`npsFVaf^VIg{+B=j_mO z>1)+Nj0h0ZxfbCN42@qzbIe5KKp36t&!K_yghANR=q&qC_+7P+TGN068=#odIKvb$ zbytVNt9?r$eFzKcyAi}9o8DcZ z6r%$xE@#D_$nbE>ZEv)Mj*7@{t@*L%N*fS~ObtK+5at~`BAe#osj(N}kr8`KFYjuKJD#MYG6DAhuWgvTRzs*?uTD^a=4nihUHw|Sa}}%j`AonT;0 zDOK$SWfii;wCi?Nf(V2x1~Zw3yD43gQngr?IcR5Cj^%N;Ux2NzW+71pQII(7qJ&`~ zA5#c=9APB7N6({(l^q-~#GT9vjxh#~Vfrp0w`-scD9vzhWOchKt=V73&u2-vN-6AY-zEs^(eCuKRsfUewWreM4FvM3(JjTA+YKNbVY@K(h{9Y^s&fwx*X?YqKqX-8 zISH4TNLL?G#q?lYw6PM^x!<8@rlHux0HeTB1uLX|(*wfTP{JXim?GBE$f>kFHZXmU z6&cMPP{-WL#wub-Hv$kA`E*`N5{1F@2F+5~4z`ELoeX3}7e?d-tNW`V2Fbd>`n4!> z-BeP^L-96>U%?eUgo2!rrqbW~u3^#Lp!)w>a90Ogwr1&;DlZ6ecXmv!mWBsiCl5dT zQJsYA0kH5wt9)I7r;m=_a z{%I5d#&TQNh(O!o&r|V}RD6bt9V(>P8o4eWgZ8IHzk#B=IKGE*;TbcCWDLn?(t$=e zA39Ej&(SfaWg6YcVyb%O`=)ZyGV zY{b~A27iwFqtmqOgIC2!g~M0g_kOio1lyxB_v)U%>9LcQJn!P5Qi*vUH8FEgXNKy@ ziFHD+twh3F^J)xs!-Rirb>KH^ZD8=aa2j6h24MpZP8dk`ghR75TmwK=3Z63%AZ$cr zQ$|v?2)OSdltrjT@EK-@+RP_ar?k;w`5?jhI?GBn))SDh%tLl&XzCV?~sV%_i!z`q&!hS2BPJ2;; ziik`~TRfBGvEM@CV0M9KUj3+i8wYRWB$UroZfGn|@fj;3DjZS@L(1Gft+-F?6|u^Z zq=;1}6028=CzBH19=rQBybbo3O(w7++P%p^9lPpH#k28TlHGyf!<(JxO()aI1mea& zx%yR%njXxU8PsXUkvQ9&PfE#=WCn5P1S_#gHg(g)dwg}!GnGuD2bs=cPfDkp=RsP3 zfz2f)(mL?vD4VCVh>z_ky;*28a;V8{bJ0D{7Ucb|#2M}%?Gw$idvZXbvrQ?d29)B6 za(8@B|MY=zqctD_bL^;`_a67;sCEkD?@i`d@fdhI=%qF8J4CAhSG*S%4`S0ga+se%(LFn^k+KDsodSq=i+YitgR@2? z9d0z%;J!=Di+swYz)FM^JMkxJae|Ya0QK85%VD{WgtNzyH1x`=gzrxt%K31zBDd1S zaN6}@noj}7Egi5QLD@zm%z79AZ)M{(zrNn~qG%5}1Z0U1%%GetqBgq8VvqLmcA@U` zdVugwh7o%*O=J&&Spy^FuRA@2aOEM|f`wj!6R)2WIe;@Zz;H6e;gkfDSsuQLnkWEm z!;n%okf6H}VuSnyjrjz1>ovw?_^^+CIK|yU@A@776pee73h9+8;FifkURN4#x`7x{}c_az!<%UqS~19^1dBZQ4orY8;w2r?8k z6((J~KF^%Di+e{Aa1>W$MYM8QcbP zAXbWc>Dh#S&oRDCBh-)br|Djhan~r>F*!89c*pOj5Tt^L&0##RqETd zt;0%n6WjSVuBeHELVV!U?-g|l8cX_&j;x?HWf>IqtHy+l{-Rk_kE%8jEsk~`F4Hhm zz*oxJm`ge1qv|}8G2~7y%ts)qQBb-l-Rq2WsU`I=FdJ9jF>FX^e&4mC0jD=|u5gta z(uGPBfmG8##zFriVprrCOk@;LHpy9SIs4PdxqDC(b?o#MAt@X&~t={~anQIekEJf0vpPXMc%mzemOIqo_Ek zdXQ#F;g^!ne-uqb1b(B<3G(y5pyDs7_!r$d2?U5mv3`t}xkVXn5=A1So3WKPv z?ghaxa+8}*Xo!sAR#E$9Tu~8)jVR8($F}faEQOr<-r@2_aRVt)jISC^;JaOkTady+ z!Ygg_5>wkigupl|us;M0;t2dt-#l&-dLSWHWzx**XGuVTd-T3T;aY6* z5V|lZCkRW|*+5#*fYFUAMQM0ff9;|acz!xFl zZaZ^&>W&y7^vjo?O7jW!CHm%vCcdT6A;7v)uED~n^cA!);;+k<UOr5q_I!G0G<>1gDw6Ww7Q8LUuQejz&|2F z7Uq7Cs31F+`eyn{lzuNdNwql|n3D*=-SQa_44s&Cv#A@GUiTu=q+Rf+Msg~TkpB(U z_SI6>WHQ}8pDNJ9V1g1ogTfvJ?>2Zr79Q3g7q#-LrWur@p#OIbb6B&?B9s~Gtl1Ox WJpNgGMw=+v%BP)cb^+eu&i?_6kEGrJ literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/__pycache__/GANav_visualizer.cpython-37.pyc b/trail_detection_node/trail_detection_node/__pycache__/GANav_visualizer.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5e803d19b28ae02db23c1203a34cd91860783dc4 GIT binary patch literal 8540 zcmcIpTZ|;vS+09mS6}A3J2SguZ|{27cKb5BUR%a-R?gnAy~G(WUI#mcO{J$#O?CBD zSM{E%xwWbti&@Lb!&(VM0)dfwB6tW$@Q_IG5NQ!05sOHGP=Zt-A><%2NMMk{19`xF z|EcbtOAI_9x>e`$pHrvmod5Rye_gFqED68A`|v~d@0TR$L+Xrv1vD<>ieHx{iAhWj zBwJz%Qv*$>+dun+9bZ5q%9k!3z$Hf|R_8jVBFyC3Q7i7sjJ~+W>^^&KeEVGKJ z@t&UQsk^d$Qp~8b2{wt5DK^b!P|vbs?D%lTqPy6>&t4Ss=GZ)25cl2BPS~f|NxQ}t z*?o5;`!u_sokD$v)!1p&53n=r0n}&NS#}QfIrboX2=#+6NbF%&zaz1_TiH?VhuGun z5%%aE*?!oqv&Ytyugd&Y_u(B0`;u=-^`$*J@8vz5u~b*G89!Wgxf?cJCw9G#8zvFY zs($Dv`~A9{={GtJ&qc#Zcq5EgBi@P8)0QJf)Y8o;c5fw(PS<61h3i zQ*ZD7!?)hL=l$SYON~GK+Vy+hV)d=x`IC=7`i%FfAN|MIf9>Os?s@-I`?T?4wRg|^ z%YXgb7ytS9G5I^6|EcabW_q9T-rxJZbKj>qcdcLk&tH_U-Saj-{?m!S`Rm`k=Y8l*5K14SaWWhN&IQ|>B#IhB~Y zAf+->FG-1tp0*&(i~fQ%OSN`M0#(oF3~4&%N^NbRC&sSaSBC8Z+G?t#@}z`y)l^Q* zc}cX1S4QuuRl~0tg&QtUY8P3p+32{u0ZPP~A2t%V_Gpc{-K15+VMnzmSuOBcgV$C$ zCW@Qu@}7zRgxyScJKbbE(^rG2kz{3`*zL4j-)kkd5#ULI zm#KcZkx+En| zlf=Y6g$KAicAfv`8TQ<0!jWfvQ{wa3)sJ64--IE3haGVz5g%Z&|Z!;jY+f!}laQg=H$KIDOuM2EK&wKq!vP?256*G8w$AqwkPsHff&h5SygtGw$@iLruC&nPaz7a zGAE&JP#d*8D5R1Q8gX|niYKL1WikZJL|=i}2?0GxLa8!E2qOusN&-tI)0p-So`$Du z=!;&eF?mfFG(FWJe0p#BknoK~i#3u)E^Q=_`+^v)){>Sxl2CCxBjFSaaT2#&@G596 z*Nd4Rw;ElSSJAb##1CA449!e_mCw@L8KPT4GI8i@E}sISZtyvpWCU*LB`v#jK+O1Y zy2FHs37xQyulhj(^TwP*QYVBZFBWL2xdY4{ik^7=Cove4U`d)KPb&s-R`#)~LCNa>riecCIu;LGxw%(Ybh znsQsal`FI04?L8zbODx!ZJ}9aew;Mmt70dAQi=U$^H(`y(~@62fPxtuIr3ZiBsh@V4aG=1-5{OWoxo8<=gx(%NAWH~Ywl9%aX z`?l6`8zCfF-pb@{yA*bvi_8&2nY?CC4A0M@Q^jlBl?|8_HtwDozx-wkVmh=eaT0Z7 zXEWk!;DkLnmf_JN)zPYgYw_@)_!;btKR|^L>gwS=ewMaR>LKYDR1LDLDYKAb4Yj3| z75*qj#@r%Kjja`}%j7;JncP+?lUyWP?hkH-r*)U~+@_ zJYV;iy z2>BMTavE5Lwwmc(4lh*K_+#kV2F$^#@7Y$fBUXv|PTeCWx^B zw{)g*6_$dZ2W@N}O6ZKwl&k~J61HGRp>rHS*LURsT5S!wtAU~miwrBk(ZOg_syh$5tDSAya}vAF)i}9(<1dy!tT!jpq!NY7VL?U78azWm|8pV z7HNU$@GXVgS6`Db0^2XRD9rTU$9O@0j*78X*DX$#E!PNR1De=evyDbfpd!=Pae&Wc zx{zbrg4c{i_SYUn;VV*9GL+PUJnwxnAZ+~iPnYr z7I!7CUEtBC6Rvk`6%D(*+42+D5oRE^3k_fgpq8KD5t%dxPfgqaPs{DO40|q58y0v) zEz*Zc^4ZT~##jqXbjBRJiVoRk^1g4VeLy&2s{!2vza(%_K}*LR!OOlOEU}jAsj&*) z!X6iJzVb3KklNh12;bKY{KPK1w{w{BA{~C8Z_3r_@HVMS)4UtlFxMs2Kv+DB%rh)C>! z3YJ#ZR}lv~G4O6y&hN`fS!MXVVRteLgY6MOC!<(bGc9(5)&13Ac#94oeIt$?Cznw2 zNc?#ezltlqfI0ypQD0YXQpoc;gZ$xC<-u@nBY)Ov-A zTPTLR;I9%vc($>7<2OM2BV31fv3PhFhlOrzJjrm9Zx!tL7c_AE5?>-F`49|C5p*+6AXhKZy5-v3a?sKdXxw7gOM9p zBGnoD0+KaS5g{etg9=lo1)(s7O)5c5gi87y08A0^Xo-U0MiUMfJf#o_XRo&nKvIJh zkee|QrCmskw#iIZysI#4762zH4lIH`yEINdqKvDO8apcBQ#F~us*?nu-W$TF_?MC? zRw7_alIH}UOX!u;@?biJyITMhfFH_xmHl2alaHbV6(O3Cs$@1T128=okd8BOef_z@9MHdcVSQY=yW=T(HZ1Ehod>2GNA`)g9SF1 zPLi&HPbb(solJ6aN9rGk4kJ^V9&ax?_pt?0-=A8ey`X=pU2{$kDRj~~<;;*$9aA1i z&JNBUm^WSn5;4b4i2ELNPLEq>F#n-+j#W>B!^2Tpdz_2s8Ew zjo9W0#Chn6UvFjv_i7_#23x%Z*WbKomjLA05buc)h(CmzL;{Ha3|d(Qh#V%B5`=*F zwie+9`AM4dX&P1=jEV4KA1CoA#X!&Ra>6V8aVmskreIv;4-sE+uj3=#0Q(FhPi8kW z^4(;J0g&(5%nF+gKl!Y?EwCl0JO_Urg>8x?mg6&9>%v>uCQ6aPI3gsOhCEQq){)l> zvC)Y8;Ff^Bwi^4Ptu~`LlQ;PnXl0v<7ijsy@Q~{B!o~16Y2*zWDdtIC&kb!ON67SS zLdq8ze;P9~`E&eJR98e?HqM7kjdLAOvYXOt4MUv z3YaxLPTGP8c}!`W`Dr;@pHnr zIN02=JcZa-WFF{4N#7i{@p+_Q#QKw1x0sr66>#CgZNrh=4&g@h!4$I)Rg-Jb2h*%H zHmv1k{zdEqW~PDaf3-0GlX2lISb;aGV6@g@Q^CnN)Mtc-bDt(rhTwn-lWs$>Hj-;( zn(XSemtT7M73YOlu6*I*)fZmjZJJGL&ey1*ME3z*52!2Rj1ILzDk2ofv=C?$$|Uqf zC<}iEG+T>(FXS-|uTw#vr}!onM@;?aLAwQWD0~Ql|5LLCe4>C92`NNPAZ-zn);n|9 z349V1ty>h-lFxFS%yKB@wH{Er>^PLa$VWtkCt@EFI*9m!lV)&wnXEDl3Y%^ai~>V? zX|VAoO%g%kRkVHum&jQFNuD(g{1?rL)5sw&sevGXBb{=BZzFBM!(9P+kP|}=E${Oh zlSAau0FPt@2#`t0FoFd7N^+OL21&Lo@^HqWNKy^Rqz){K2@_*b5_Ne{8OMm)a;8y= z5r(n54j-t&M#uH4u-}=E&s@zGPw`52*^iyZ2EcD)1pxaokVHxl#=IGHw`~A+KM~sk zSGzE@WjND>N894tlx@bX^@K&6p>QDf)Nt@h5WEKS6>Y}ISN3U5vG5W-aOw~z>ErAX zsUTDDhwqVCh_6;KBJ(gUgySt3BU9#YV;$l(&2t>eS3SbtqP49~2VaPn8eKom^MG{x zwtDl%OCnKWn>Xm2A-eb?Lq`BpOi2s_vvLopL^1!8SV?Sui9W#b0uiP>7oY7CzD?#e zUP2*UnLQD^VI1*&YLRB2JV152+e^YTJ}`=L;8op1Q&!T%i_8s5n5rMXh~>6fF|( z%)FGV&U0Xa6u*eV9A*b_`rrU5C1oOs7iBmFs;h7(d2QgUlZIH!oG|C{FPgLF+~k*} JEvsV5{{{8MbW{KU literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/__pycache__/__init__.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a29346dca3da9405d20064e434c475fcbd687b4f GIT binary patch literal 173 zcmd1j<>g`kg5`EMQ$X}%5P=LBfgA@QE@lA|DGb33nv8xc8Hzx{2;x_Oenx(7s(w{Q zVtz_~Ub=o!ez8$}d9i+RQL=tXQDSCJd`fCbYH~?teqMZDeo88K!T9*hyv&mLc)fzk WTO2mI`6;D2sdgZnikW}}3j+XQCM-|@ literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/__pycache__/__init__.cpython-37.pyc b/trail_detection_node/trail_detection_node/__pycache__/__init__.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..108db98bb6506399a21ca2fadf9221c73d5f37da GIT binary patch literal 167 zcmZ?b<>g`kg5`EMQ$X}%5CH>>K!yVl7qb9~6oz01O-8?!3`HPe1o10CKO;XkRlh1D zF+U|gFI~SVzt||gyjZ`uC|SRxC^0i9J|(pzHMt}+KQBHnKP45rV0?ULUS>&ryk0@& UEe@O9{FKt1R6CGOpMjVG0N*eyod5s; literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/__pycache__/model_loader.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/model_loader.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9b9029ed969ff988e7ccbe4405900b9c4afb469e GIT binary patch literal 3390 zcmbtX&668P74M#JjXqXhJI)7SmMTLbD0Y+ehB6S65Ql{W*+XnXl@3hJPPeq$9nB~` zBYU&Va=@l?GIuyhTqyqu{3-M?oc6#ir{rLMuSe1$WxK7UOJe_Z?^~VW;lc>wBF2iU~)! zPnd8eKlUHn{Qw$I_|W(ZO#_-hG@xlLG@;=4Suec6PVC@_Wpy!qoo#? zkEe^fiM#j=qGTnn*kfyE&GD_3U9rfnu-A#DZkA5v2s((2>;IK7DetpL$7(+>$MJyC!TQ~PLwQFJj$f_;>I*P z$?`|p)?u8S9Ojw)>DC|4FRsBQ7q5_D774^jp}}-Ro+jzE7(J9dM>|I{7P?s^xsuT& z&(k~Wojo1I)6+EmM5>;x+L)~XKBhJxdM|0Wm``SE^W|^qC%E&S*j>{=4+>r=|Fo~lEy`$gQa)WbsD>I>A>hPE~E}K*b|v= z)!xaYSPcvKxe-NXIu>MKe6(|vkLAwUQJjlB8}6vQ*o__)JBQOzDt0IwX(VJRlX8@2 zQI-pNwQ`I%X_V%%kZODKiQ0rg>Q++*_DHyhn8dXK3c9B z8yVEzs$}+sfu1(y$UC(*GPPWguyv^(cnw-ecc>orBstVjY|nwjn1U@;>kp2!hoqppIc5R zsqAg2Elkz?^lQs%KcAM9X$d6OY#S)Uqk5BIIx*T^9S){NO~pOkUT6(7_tswB(Kc3t z5z|vV)%VF*BYL#A4{8CxfSf)sF!KK=6i9O936l^;3R=r{hZY81$PperctzJ=TVc&Cbr<@A*6diz+|@A|>$Y{aQ9J$WR7J+p zz9vMq9mCXsn&thYAS zGh;oIE`rLTbfJEL&3g^?1|GF1%%|^AmrgEfmxS>cIaA$%==p}}iV#^(G}+@#>i&$x zTO__pO+$=*=nHakcR|WcJSn-;6GWus4xDZ;mR^6-XpPM zog!bZ?P|hhxJHpD;|c;sz#MGp@XqsY$6(^cj3d`98E4LH+??5Ph6B%BDg)rz<(bVj z&`BK3T3!@lI;NSUS|5$C5rzx?8mdrf&m08HB|_AJ74B1jpE5f19#P;aS~F04PEQ0J zVYTP>oQ`z?9hiEJgA3aF!iF&Coqgnee-aBZ%7)rG9L2>099R8OHc8_|4*&L7y8ihN zylnVkaBsY6oJ$8N^(_ePd?S`)jXm~M;}bFFNpj=zbW}}2tVj? z^#OD}YYneg!19nL(@G&f>#p`&fKa`S7gxfH9QF}Sr6>n%lXsg20llUU%3LK!+gVnZ ziR;Vm_F$TsGZKz_D0j=YBD!fCe+VaM&5*!&cl0y6 z3fvzkEDi^Rk6^01Kp0^(BYhfCI^nH8wFo7BJ93Q1%G|yed6axkn9ZCgggKlZ_$PK0 zfW~DWXuPSW#^^oL_Q$X<*=bYlKInF@-%e3n9OwWJbqxq75oIK@7-bf-xqaYtoTsEi znZw*CwFnMi{u2^;Pl-b!AGQy)yS=sZo{|<=Z||laZZ#b~JYAS|nCf>R1u1AnPOPCd zf{&`~io|vWXB}Gd?ZT-XW}VUl7qp&e6+W|vL1m5T3H_E9wS%xC2Ob2-`ObQcd{1Ci zeXa%X=Gw+w>oPLeHj5VXDq6WGc_; za@kJ`Z3&^nd;EUM;boF(cjAS4Y)+(Gusa~&3vksqp}nnKJh;K0J0lx-Ks}BXFOs5= zaoqN`qj=WQo=CGqDILtBBbRXP((?eudx_#Y6yQ&6zEyjN4-?r{ &u`!S1XXNkQ zjs3jOH;(p`oaLgsA#-&jeyBE-OgHc{vY7FLr$sLpvB(*JwXzRyvN+2V#^w6pv21}m z2x+wjgwPiCXh4^(7Cmo%ExHUphqf$_uKat{GO4gWi*|}RF&p?{xYHI)^%oET6shQl z9$Uas1o|;RacEUGa88#TLm~|A$^pp(&M|r)IM=KV3h%(LyviR2Bd8h^sAJMH=hQi_ zRkg~$K#oJRDy)K6uBun{Du7iDv#L?iSFUPS?yy!hM^KX>1w;8K_)s@~wTxZsf9zVu zu7!3*0}>Cbk-Ia-pumOqGRTia?zS!MVAORp$$DK81K0(Zs@;^Uuuz^uavq66ID*?C zOVe##?}>uTL7pWA2SD|Xxb~94fD2||G{K!)l!J`7m*gcd)%4Lz+iE{Ai$PhaD466~ zN16;WheSa+FxneY*eTT{2{)tWRBLi_bK%uZZNqMn2T2A``35>`$Q3->hol1NGIT_} z353uPf0hFipOC7DW#LU3EHLW57=A~9a8NCDzzuyU=Cw=z!d7*&KQp#+dyOhWE(K(4-&?DqN|;~ zUZQrPiu*k=$dZ(IfBXT*FCVsRvWX_>{3MfMZ!m(lulAW_SpdA>%j;hQ;n%z?K-rSMR!L;=*D6{trcaE*VC z&djRttIH2&9e(f@Ooe>{lGv0VNJ?v^+m= z3Qc((^b1yNC>m1?*e#e-IIYx82-~3!id?4q>p~d7MrUW@dZ!eoQbWsu#GCQ*E!ZTo q;ctv-VcPr3px=OVaw{fW&k3HcgAXQ2at9l%!uxJO+#1%(wf_K_V@JaP literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/__pycache__/v1_trailDetectionNode.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/v1_trailDetectionNode.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8d94f2c6747331e53919f20480d27449e3c971e8 GIT binary patch literal 7419 zcmZ`-ZH!#kS-#(M=W}O2?A^83&e)FA4lV0=Q;JKS*vh6&Ov1*B4JZTUYIe@u**kmZ z-toD2cD=)06S{VtsJIXsfk3I4HS~uCqCZ;HA5`@ZDnUdlKvi2ffGQ{mEfO*){liCN zp7-3D{YpBcd+s^!`Ml?S&+|U-35&&?g5RUR^MIYND$4h%u=i)7a1l>*UR4yPFx6LT z3e%YGt8J~OsU$Oey=~M?nKyl_ovCGHUiIyoE&F6^S-dSj*Us1SvM%En+QnK?=54=( z{ziMGHZtf}ZjaW+2Kn*YgdF2g?GWBs|8RSYtK3v;r`^Zci4~2%>7L$H@D1t}`BYDB z)4ncjV;_~OD$GyLFPw`gbE#c#y2#{WUJs(Bkhe)}EQE}t`O9JCUWx1Nj?1bVH_)48 zUJ%dwVVBLJi9NsiH1`+=(VMgX)V+J@4>m7ynx^@YPn>BRzxnT#k6Yg__3k(S=0E@8x&Qd;jr*wl+Ry%E=k9p#Li5e-FP;82wYro0 z{JWo4=c#h-;h&HE{dfQPe)Fxf-~LYfzrJ$6dG4?N<;_b!_||<-F%`vA@M#TN_3%fZ zO3yr=XcbAUv{a_X8q@A*8)~93eOgIWrk_`09VKI0nUdvcWrE(uc?D84Z8MOWXsfi0 zz8PCv>V`JRXOPzuEm6w~#?=!wwx<-Chw`dc@9A6xzeX6Wx;(DTu}Y)fc6l9=kTWl+ z$8P0A73Oy0k9YJ6F|h9iMOy`eyNZ$K9sQmOC!@zM2(U&=IEF z?!@cDT=K(uED9c3hO_K?&E;5FK4$WpLid7IF`2Hw*)f@V?1h1|B1U)^r}=h0=H7KN zCY$)FY}IYVA+H*u*j;sYS1mdt+<{2JFbIw|HX0hw}>_abdtSsAEWixSXt~~Tc?m^f#Gdc+!<1F8@*nV$L9DX)!48%c|gSBCC`st?lNbe2};XJlNstTxktqVJCw_x zLuGUXNzNEkEves}rfC!U6!cuyCP>$hp}Ik*ZTQO~L%Jqe^ahedW6&YC@ZR9Du~G-_t~ac~M*s8gAr7*ThuE zyYAwc4`fcf%-v`?^w}OA=g#}6ZiMa5Q$L4E-=Ss9D7}-z4M;6!_Pif9>VEVTGQ;NE z5Amy-sg)M$yxFPq*dsU*I&4swatnmfcIyFbTD>mRb&(G`&Kz^nDiu&U*h7bQ7RwgJ z)jH5*r+9K_@oUR4*a6Z+aoCBRwUDns8=|~p&BH-T!%?N?qV#ir3}42jU6=NM+)tw- zq67B0Y3O9U6EJB*Gt`_`&~h4o5>@+Pi}r}1YaUPZQ6w=yhxiyl$Q=XjOYOr&_H_nR z+fr315C%p7rN$<+Hl+l{mRZ=QVT8HbY@Yq-Rk#EzX!!Lg682)fv9cHjt{CZXmjF1x zVO^|8uE^~QdJ7S6ygzKeFTl3}-Ts-SMsO~2(bI?mlMDiI<{hj#11Shc41YGSeWo+jb&kwTxfA9%npYWgHc+{U4=r0DMv^(K#nQZ zKu!)Ms}>=Qi2s(=vYLadFf{%=iu=HWfYHVycf10ELO?(}oMpE4u)?m;3JTcb?3B??Usf)vy2Fku(2zq4`_$ zpY{LkfB%kH`AhHB{>1oy*t8>C`tj6=9A97KgfANJEUvi#)_NBR(o_(4V=!wica0Zwu?5p8b zH;B+Iy8y0@Bj0mHwi`s(x~|)E`Li_HCuEl(bebI2WdUK@_3I)tXe0DSrz;Hdy21>) z?asPHB2Lapm|a-mIg!IqVI+MdY!tvHGC>%SP?$XI2CQ1-bo%)v>V&mBlI@8sK5dwl zZj=H7VWyQ<*o`~gSQt2nOpX(~BE!Qq2dfY|GNQ1y?BR$=_>DxS4k`x{`JO{S?!>`5 zVz-6)qYon?__0)Y)psY19FD`x!-`%YuZp~(*{A_Lr6ra7=+MBk??(rfe$ja(u2NH( zQqyiLOYnqDyJ^=XNFjVO7@!DmQ+iFMX0a^Gp`5u5)31>U-el zu!&lym>|e-N(c-TXM}<9Ic0K;&Ztbsm?Cjhl=cE1%xX|3uSFhdn*c^wDTxsytF9li z^#B?o${lSVM{RW+w5to&C2R-o$BP|DlpEY7tm||aVLfm_99|nRm`=7M72|_;QFk$F zaIZsCrcvU^fHwhUeZq7!m0^psuPH&BD5WfVr{!2$-q(;wl&}_Mpt;9#^NZc3C6_x9 z9iu|pFBOU6V7|0i)rAqc{?cw=1VdQ|K?l5!s{E!t)NPql`mG|K73vpK(B+Kk{q%O$@?sA%XLh%lZ`((6abvD7j6^tCYM(iS&&- z4wC;eWPd{P&m!r~>|euvL)k}72xfQUb%6plP~kW>Ej7Y5(3P~@q%!xYE{(RREOQ#u z;7GwUTYAfAnTeLZr^poEywY zR}B6#%|09_ptbHDRpBdCAC+kJd!MS2s@=Qz{0mC21cgQw?lxR+)nzBDc`o<+6>Ndf z>x+@FmfbqT={7-M+8sE8dH_$<;Lo9j)QHG6-PrNNCXiYfUa%AnQDk@|L3X9_WK#uT zMv)uX1LaEy8C-$116}kmTBvaJd%GkesJLXL7d4(hRz#MaTly^=!ZhcB!Pi$lx33 zE{a~eiE~aDUZA>?eeL>D{!7$E?|B{WAeX|5#CQrH<<(b8VZA_-2!4mepQBE;yi{>K zxTusIBb9OVfn7!@UB|_a3C0^>xgqzk1CrhdJ@OEBH^NA$Yy3B8o8CA!5v*G&6B2g%o@2aD&1mODQBu^MkbHC}+xt3bP5s6 zdYPCo_^_cGI;8}ZN8~+YPCE}6wE&-B%i0*)%}7|CkU&~i4?5HR=7@_5&_6mkpno~X zL5SM*DPM4$Yu&n^)<||F`GR!RsT-&DisFDvHC*2x@+o;ZK8;!#`|>t&VG9-h2_wW88JN5cX~XeSwppL&GvJGc^Fi2e#poApwQL&AQbBf;0UN zwYBMPeUrSq)z9Jv4YEY<=ZHi>k{^;OcvWFEy0|!h8S~-r8Hl^0Y9Tdf3duP6EZmDn zPJI>rsJ;k)NAZ)4+7S;meB2Jhe|oX}An>jZjI)3<9P@BpUNs9lTJFXyTnnlO{~Gm` z1CXM{=svWd+vWp?LC3%gvhZ(F868TvWybxo)Ul0=8~zO%1xln=j#E_^svk&0$%4FZ zfZ-7jNG~*W1bULF^Piz!d3l|&>P5ug2_5;1lzg4)a`MAw>86~^Tbt3QiyQWsuagBw zvq%QfK_nyFU8zmokM5B=o%LCHRMN}-IZ@bK;$NUSe}NLZ0Ont$?peg91KLk9fQ)kG zJ#cy>Tp<}4O__Z@JpfN( zx0PrC2^lQ?!BDW-6qw39QcW|6{L}x3hB?exW(h~vKx=!%p2FYOrptEjhFt*DdHDYa CJ*Tw* literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/__pycache__/v2_trailDetectionNode.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/v2_trailDetectionNode.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..da38997b960fee7a3d0a12059864382d75072af4 GIT binary patch literal 6580 zcmaJ_Ym6M(RjyZ8S64r##~yoTJhr#(U2nEY81K$5!4U6yv*Tpruo+uxBS=Fi)%2~Y zsd05x`&RY5RJ}xut<5TOP(+j;C_uN9{8%O8p^b!)5dHxYtk?(;+C*G}0tyICB(xy% z2T<00=T`STybIl`Tlb#(tb5M+&UbHIt=bBHZ~yi?>}zKg9#3*vRTQQ$ zHB?#()0iHr9j&FQBr`(2W3&n~UkJ@ku~n3LHMClm>{Du$@HRucQ*M=IT`{b5s;#Qb zTVW0Tjm|`CV$`qRnQR>y<&U(s6wc89U`}l&r+OLtUO#w zYiis;Z;}O3x){biwtyzq;`$3bU>HRA=l`j9@5{=HB9+aJDiAC-Un^FQ6acXa!b|CQa} zJNGuVx@-T&yT7I`Qsu^jKcD#f@BZ$6|IMe~{!ZsVzIETf@K^u*l`B8`+xvl1P?SKy zr?qL-;~#x0J&SmfbtI{>rZP3vn08m|tC_;|StV1Meo;wvl#E$rMwVxlDS8_h6-Wzt zn~|)5w#u3@D5U04?Q5fa5qUk+GPSN?Ts>1$YetcID6eU5zhF1;Ysb;L$J53FYqZ^t z$6ZKb&VtBIy~YO{%fuvCU~6Y z`Hq|NU{f5CO~PEZ?zPjHHw{tktvmax7v(M)l+z}Q5|3a4k0+kgqO!{ z$tC8yTjKag6;2vEa-$u&(dRIEas)}usHijQlv>d$s-;#`L*=I+K4e`4a4@^vy6z#9 zDQj@lSy(`-4m4Pl%Cr@2sI3`&4RuCeNedaQBGaZ75wJv*%@p&V8$lR~qT6H#S$fNn=Zu1MT}guVM?9V1|Wc)$Mw`hN3XiAoTcg%BnZ`F}(Q{ z6>Mpy*!r6upM;=k@)?qwp%?k-swf|@IX+D_7VJ$rh&a9ygsI0p<{UCXX*qdvk~&Nu zQ1QqPuv?+ZCdai3zr0d5~-6n4v|4PV^u1S`B2}!0gXqRdE zj%UCPrXMA#aX_l((rM*2ph#a&m4W&>=xT_5nSMuVikYveuPU#qD;g^@>$a5{mz5p0 zUr4pAumh|@82DLM>YJH)9k?^lvjQxoh^^-jEc$cmO_c*3zo^~PuID~3HPzIFHvc35iEX~Fi3$sn)Q#F+ z;v~1kOgGr{;N~Nlldf_vS&c(>K*#pQ5Y_Ft(|zt2G3kHOGEOSnXU7|mTg?2$FmAhH z@*Fbb=DQE@YZh`Vt+?Fpx;zaCPJ|8{6a~2j!svKz1e;bjg}NomQP){uPF|$~Do16$v@mlLbR32)Sz@e5nJ3 z$brsaYC~0p0%2eTP-j;$`~FiOzJZWnM(xl|5@9X7?VHPSTc*5QnY%e9e{r<51z5w3=bcg3x+R=r?kyAQdH-TE6^|(Ecz_~sB@gNG4 zRS%&vcBQ4uE@yx( ztU0A$!Pr(-npM)7tg?e3mz9`-pjf)KmRYc^(h}&N{oSv2pS8b#$$xYGPk!gb`G2_N ze|hoq;lKXR-w}&E)nXs{{?%_^^0#&$f6DsiTbKNAza@W{_9N2n18h1kQ*xdX(nE7r zSduPZyYiAK5$NUKthd(}gwMHFTD8j!pkqdzzOt3aS=EJ%oNs* z_=XoHXjWPR*M`e)d!p2fl3P8`+xGa2G}*^wmne384(l?3FdcVO6i01@-tP8xI{6EBNB=Nk9!eoR{1Assw>n9Yj-5u6D54w zI4i>_9SB4ruQcOc+U=#nfFm+FPU?vwk2f5wLg>hd%EoE{7m@Irh@uNB2NL-nC!lcR zU>&I!V*cbqNC0cH#7@1fTz5qa_ZH_bLdBoO26bWB%abz znbOi;RaOuQnReT1Nsz+v$#@ke-ja+)R3%*G6b}wP9)gA;z0pImWE)AU?7;U}j%x~$ zG(x^1$iHKd2WCnN>z8F~R4xGrsoqY@JMc26lHuUsR6+I?r^Za_xf7swplNHikHF4~ zL!G>JP={oa5R@sfX=_Q;4gwp@XH=%>M3HBgfJB%%SrHTKUKq2j2$~>jo$Q`Qt&a!4 zf4tN@0-n_a%@US_coU?KBkFDL5x8}G%P<#29F8yz+{?)k7^k1x7mNWo$J<$!NlZWe@*c;U){0c|)_&vO#;EiFxO>?t`oULd|W&OQU+ zJuK%7cqHkdz^AG7H-B6=>#C)hx~UraaoF7U@k3@ZKi;)N_Ci#Y_H2GV?a5oFr>Tsv|-ggwY`Q2GKWY-dro<002@4yflf#ua(F`GIK`< zewNb;dQ}NSzd%T7WmfVmB5{dkBYW|IhM5IieWGt84r2~{pUNGh!M(N@tb6PXu05WCE%2X0BGm00;0EQDzzdlZ;ar}GQgSosA|EA( zI2Jh(;m&nD2k7uG>wR1q;G#eUdg#9X9y)CBC(vVmN`8lml6maW5B`&oeS_rlNVcCm zY}UE2kNxWXm@kg%a3ZkjxaY6p?x)VYB%srj7qw%eP1{pTaii@;@K;j(-Bn6neM4Aj-feHiEPLcqdO=Jfl|CpyPKEpHAh7 z$1+6hLjwO5InZ{{DvmzA zImtguWqWk=#8-_`RA%$*5+5fGGlzF!)v@{a__A0G`l8)0>XO4 zyiG7yDvri*o@V@m)V-E~oP;mmFQdU#={o&tLXf7JP92xh|?Mb zsqfZwVz_m^PJtZZ+yJyvTEqW@ye6@=i<+&P2>J;2+7YxniL#BLKZP~`;gkQLwKh!( zX5?51X13#iVfRAHR~+Y7&kge$3Hh9sD5|4~(DuS`92KOW?4VY1b>2blE}n$WDwYB+ zGA(8AaaVF#3H!^)q!RE4`Vi1e;6E@Z08p9NnHqtM0Qhhm$`BlI2S`wl^Z#9HYtb#> z4xBtS2PK^KAT%-`wljjRCw|0noELdKU$`3*S?ZP}O@aB}qTk=v&SjlQHW${MTH25D-UpWAg zMlpFv6Lc^3kQI<41kv}QsY=iBRv!qrg zm-6gVvS|uPVWY`Kx%E=yAR0vh_t5?~J^FV#6f(FQHj-?+-#Mmff2YRICxgZ%ywO(xT;r^+m69)O=p~&cxY00mjcJWk zDaEv!#`VXnWO0L=JjE@Z<~GmpEFb#9E~T$)JjaJ0X?)n%m+gl}DZ?E;!bcymQWiZA zK4ySp3La8${4wK)zc5QfkUPRBAeRGnlpg~&4D4I{HDCvT9p~Q$HUjK*{syp7V0r!> zUNLzi}a+nYb38By@%q4_5A}HBnpH*znEyeLlbYXg$ zH`l^?)8kVs_v5ChEKM~=6sK0=r5Ep4n5>+id*@tn&f8zV zxyeHDd~vqmRrlAgILRyJa?vls66jfcVf(NslXGXz%ob(~`|L_fJ?WMC?Adevx&6&b zPMMsm%$+S4W(xc3N?yd;7SEoU@y<|J#})>{L!=0#2~buElYm9QCP2AV!Mp1r8G52A z0ADLHu9bA2`BbYiN}xxXl0ixINGq9?Ncg5c)l98qAtDG8MH}e4%WRw(ACA5WB9ltG*Yv zgda&g4CTRj?ybbZsz1M4d#~B7&vKdeR%iZA95h2WL@qy@ zi1bF*ivyT_0&ny>zyW5l95Y##*}!_x`nQ<@_3L&H^=GQLS``X_-a|M=;JbLEvjFf0 zeBEX1@bMc9wc2AJn33!7j>1tD~d4iupBce>m!i+JzCTkCo`rG^pP1Jixd&(81{U3pl_Fe6Y z_VL0xL-ev(Up5}<;v~N2a;n3YEe;E~v0+d|FQ>WIV&YBWZrZACcCkO*HMjd_(D(a& zuEkTt?YGm2_np#NNWZVRLuw|ITV2fa|MDuz(+jZK3tb91Q~33kqf3)2xXgvku&zQg zi0>y0OGeiojQ{ES-`bCU=;UM?f~`6tE*nHa7{y*#@dZ*ulh(VT= z$RPm7;jdAqf06?nLwiiM%zw5gQ~;7ndvZw^L+HEcq$!(2ui+z`v#y(%2R!GxcUxXP z;i#AwC(%JoEr++$tK!$|uDe4!(nnBOsX}oCNJQ1tMxNNCqiIFGwkr=Pim2qMNcs(s znzpWUwr+GNn$EAA9h2+W`6oJhtSwWSMWNl$YwSGa?pmPf9Ro6q3h#!wj#v{j9leu^ zwN21b7qw+oj5k=-pgv(X;r6ICov0mi-B6>YI{N2S3o{E_RFEaRwMD^>7#X_bi_ovT zlvC1*Vi9m(IEp7Ol4yFx;~YL;beK#B;Y!`B_>PrCN9LAyFDZ%QZODpQ0*4740&py8 zRH`+ZEmJJ0n2}c9yYGug4h5lGS@J?m9Z9w-Erbr@Y{#OjG#+YrcYHVLL)t{N_$HFr z#_tFl(@pj|8`r5uiR(1w@YW@K#07w@Px=_P15Nz+X%n`KpY)mu)^9o^;zz_%fW{Fw z`am_+IQ_x)^xo5S#$I@^XN}lbYR9mBnsa!6V-K95KjPk5`V8z*Y+kaaLne z;Y86+vR_^T=-|A;Q7gHAQSJFL>f&IsRp%{@Mu5FpO`=4Wb=6DMJ>9ZFlwy`pLV{`=lRoR zY|zey{EcdUQMCNG^I)|^80Ed~5g|L-6UK-)8eYv`@MAUBHSrou5jP2tg-?X!0?&E;sdZ72$^LTGP>6|)nJ;i(A_5-r;+|pbL0THL?>Tfjf6}p=2UKGD1i>TO$fz}iE0ZDw^lO1gRF6YiY z*SS|-hI1E4Wpz;*XdZDtBQfP%Y7I7Tmvg_-yjSR4vU}UPbgUZeT;l$gB$RWhHQ4%H z&MiLIxmRC?bAL*DX9)~+E^!}|gmNym2Aj9bxz9E4)YVhYD|9Z|z3p83^I@>{#Qif# zDCbg(&hQOv=C13QTU4h-5n$`DbeQ`UFo%hc)aROWOs3TTR?<%Xz6!(SfOrEWs%NdI)VPySj_DyZVCYLLoQ6|l;!%DQtjMoM@g z52X<_YU1ZaFmNf`u_LLyNgZ$Pi_i%duOGl9EuwwQ#>Hoh*}87&Z0FDV*H|w5mUYm= WeH_34WbN$m&YwL@7hK%V|NB2(_ce6@ literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/__pycache__/vgg.cpython-39.pyc b/trail_detection_node/trail_detection_node/__pycache__/vgg.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7ff37c2ace5ee8be1c9f6edf0d90753bd6111841 GIT binary patch literal 5926 zcmc&&&2JmW6`$E%E|(v&WLf@dT!(ej*xOp4wB^t+Qpxd0+}LUmD^0j`gzeBRBykksE>Y@Ev#qxkE4sr{KHDjlvY1hBuKLgK5aY z_mDdbGjImJkK8!S!dvhIXnOLt@XJ@6QUQQ{93-WP6zF#mO7tB+Es$1xb3hnuoC~Ah6 zr>CK@5!4zkOf~OE4N+d1YKZD|y#Zb=oK^!&mP(MHam$6NX0-C+?edf5OY`qsEX=zH zYd1fcFI+0j<=x7`+7%|DTq+g30?q{WEWGgi&?uAhGc$Afx%>f}lBp-%63kt==v_S6 zsN|H%`SSdQQhqjnu%_fiv~A(S%&a>@O>Il)2oI4Uk|aWFN*F{;A{G%^No7oX9;!o6 zIEBPxMGmZ}LF#i>;k0}nriwZ(p@*z!&@#ez;&W!OqKTkP3k@6X`~$~cF7I|koe`hK zL`sDbl88mT!yahcS{GMo2YE-2jO_&QF7YPvNlX@Kpm#OoQebpBzSCW%%JpI89j&W% zlh{Y5ql;#TZD(l&V5nYV$<}t1HAokqm57X?uJ(vbgsrA0l>X9k96#_Q$B|aWbEB5< zLa7CTJiG{QGxFEH#r5ikjYe$_WYS%)Hj$O-yWYK)7eu~Wljdzd@LVB{kG$I-%j7lD zXf|4rOqXkJ82T086Y_`}0W14$&)M+&>Pi&ahBQL2RuQA9oy-KSx}#d)=vk#tuig_b zQ%)>$oG0wxA55<_>fUsF#ce<%s7{MUIOlAH)1fF&N5b`M4tSAQj{HX81h|TyU5E=C z*Sk1`V;{#9o= z6HM|?#c6!Ut_aq1sh&3TZx$shuJ9d-5_3wEK|uv__Qne?*3~@3L#` zmyb3%qLxS6s{TL|@8jE8O?3FG2{;4Lw{$}CY7$tBi?2w!ZK=G`#a-#1)oYu=w(EVV z2?>(+^GU?{VC^*4wv;rZMl!(c;yB-|uMnPG!a2KvLvS;MS8LgtG$?Y*Y|sd53YLC! zKQ=5)y879~pKkuG{qV5qh&iuYdCQ09j`S8)*{jtYXFxAn3W5No!UO3Q5Pbv%@cuFb7(C|GmL}} zp0JfnT%oGTrVD_+UbeYR`a!eimOa~yQIWdiej0~CaUSc6IU+}i96@56QZHAkGF>7J zD6B}c=HB;2C^LTGlvmsUhYlrQmnK36@iky_wR|&mcg=I+Hl$4t5(dU23xCJ7apX>G z6BwDqO&W4!X8~X0GLoHN^*IdxO#I|&7UwQL*v-aVKWmSQ_erD(^&@WeiOQ;e`kn1- zy)SEzzi?;I>anlXK)3xe`{=>?9y&n3$Ni)91-LKqMai#9eogZ0dtxQQiO^1U!v!QA zCbEDr60|F7kB=}eo@5Vrh5HyAzv5qT#3d6~I@-3{?;{h8ldOw-0LD&@_UQJnadGEn zXD5~4bL!ZcJBI+N_|=whX&1|#Sv+H>bNDQZYIs?3lxqr)_PfA?+_@5N%J${ltx9fL zw7l~<6tzSU=G`+rNJ?C*(H#XeFmscrK3Yep|Wd)p%$5 z2^Uv!((%FeL*mG8`|U}mPZ!gGJx|55qf0DW$|;Q@nveLH7PWHhH1c>J>ZdKxHT70`qF?m z|6Jf*dlkT2!uiD4(4nTH^U`lI4VbD2JdzAI)k_E7x0>rU0*~z21D?9T549#qf4A3~ zJ>ZdKxHT70`qF?`crNg+zY5^}6z3Cv`WR}eG*)?x>6^eK$#7G>bl^SLTxYMJwO=Ff z$c{bW(MKaO)S4vyGo}G+_JBtcx|P-OfaW;1u|s>WC?MJSD;>~(ft=07$Le*%=0pA` zWkP+Elvey{G6>XVq*p!ka!K|*~t6BntEgm420r0&l`dyqV n3*SXz(N$KnOpWjPeQjpP-%cMk@lJ!k|Cm;KWZw@j!)g9MMQe(O literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/__pycache__/visualizer_v1_trailDetectionNode.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/visualizer_v1_trailDetectionNode.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cd4e32d62bc9b3ba483c8792aec0f75c18dbad32 GIT binary patch literal 9034 zcmaJ`3yfUXS-$VN^W52o_hGN&v12EmCSBX>w1n2gX*Nx2HQAWhCF;=XYIe@u**iNk zcYN;MUGL~l5_O#>1zZRX6*P%5>(YX0MG>t8l!8GNK}5u0km1;3X*_bU6Rdllt7RN48nP`MXZa9&jurZBax zloX~hy{4BfnHlya&wXn_1dQ0>Ds~4bgxJLnu`9nu;J1nHc~3EQ8so(DII3}*f{P- z*aVxz{Z=-`_Tzq(9bnVAA7cmEEx6y7R322=Ay&Aeu!5W2&`YohA6`)^dIbB}-St2bXgi^d=Q-v2!Jshh9fsQzo=-PU()L4hZ1sh$FC6;@^}3q+#2X(`=csY*=AR7x?YDmYM)l=W-}q+ZKfiRN zdiP&^_l0xc`|BH?g17P%Ls2TU>i!RP;wf{uf>n@Esi{m2HKtwFI%=daeM*T`rk_zl z9W`T0nUwV@WsL5|83kN3?$hI$=&RI>t{GaJYDepp6DaGE7O8m!^Xicr+LMYb;}DBh z`@U2Gzlz^nb$M8rWra$);qo##K4)ID9J+-&3e0VVO9h;ZUpUPQb&r*KVUeSw99&el zbiBfKoHq;CTkaNZw%l>K_r;{pnk`|vjaIlW%*DE24n@Wz$#9ljueuZptBxntt3vmh zt70Nvfin=9a_IR@XITt!Ka9(ba>%{QVnlYS$Glay68gMoi0sO$v%PweZjnGa6_O}% z2oG?1;5t9-858{ud!BV&;nR4l*J~%1{Dym?y;Sy@->jbCelY8-1t%6(ygEC9^>}rM zxgpjf$-4VB*l_AWsq`$u{u1??INky3;1PC*z#wW~q6RHiLzn_A82XlOG! zN@zw9kVuze^|wBFD;MIhI0_(`^Cu7YpH%+m~FqNqwP}6K$IS zp2HT*Ln6&!soZk;5URomy}HXMsHk4#$hcqGP&;O*Mdk*~7CghdSh8b9)&-cLt{$0? zlmwomM+teZHn>J)Z<5JLwa?2#af1*x0Oo~`E;cX=>xD(H39C~rNTm%5(AoeU3otyF z??fy28|aS#5P&l0<@N3JotI}T&CV9>g7=q+J-R?Y>{1s;bDX7k-_5EmLRY_ zOA~~CD{$6)zKqil`GGX|C&~3^l_wX(uX_jHj7z!B?S8rUqah&0I%yg@3GWz0+RzL& zrDe2~#_vJXF0(}% ztklavAnb*5WqHAGx?-rsT{7fNDE&e?a7AjH(LVZ)lPBGlUs*b_SZM+rIdUSWRSt0I z&Z=LTfroA174XVi`-`{qEDbn{`iZtxSz+aOodjCSG|DZ9!XK4I9n7d3~ z77QEL9IoIZ5EhxQ5K<9oAza!fP8}xA>{@irn{aJi8vt9CAX)=l)~eLQh~SJi)NYai zaCcJ?U^I+dx|`Wlel`OMP{_=6tj7>282E1>WN6x_l;G|)$OrvDhztF*kwv(T;kn(M zoNH)PspY%Fn@T6a%qS5hn7$0RNQMz*V@KKAQBLeA+dIlhDu*Mr(I^2r#!|qFD0TNm zgz{{rcal-!e&xfrc2eOu_!ChIvX2#E_30?hH>0$~m1&lqQYH|@98fwLtjms)Q%X1) zWj5f-qa-un`jeMF0zS+)q(3tcjGOw_r&~Xk`u5rC%d5Zt(*6^Ff42I`xu358^Y{OT zp!2~Jd-t~=`P$j)`qpiy?9ab)w)(YKjyvN(jXRa**?cu1Iiy&A}>!j)KUnEqPEt>30H=C|vb&ZFIPu1qky2CY^v!~v0i5Xn^J>a6G#fo&pHKZILsFf{sGJdSk|h0 zhz{IKF*<%9ZLEoa1OcE87u=`{l?xSgK^WX|FA1Y6=>ky;?YQsVjN$%l&Jw0`cy;0D z@vMNPF0Z9@+Kaqy===#V$oA~|ktd>Ga6gEvlvJjaw5OFtz#gVuu}jh;A$&HN$t>JW z36YeN#o#|sPp~x0;BG$+%TY=~o3bJ+4Reh5VM6| z*vSoUA}DkW)Di44+-oo_o47l0UNvMcWJ?mNt<+MzmNd#~n5oPLTAgf!LvwNn3>9Yx z?ox5`god19NkQC- z^?~9T&UOWeTi6buhZj1I$XB>a&ZM=n00jd0;3QmPD1O?2D@J?$f|Z4!!o3zbw>Via z)Wgj1p>^`Z=qjUEXIEFk#Ud9+P=lT$aeY_E_&pY6zF~**rWaNg7ZIoh2-id=9v8Dj zw)ebVHSag;>;2f0d|6Q!M&Q;Lw`W6?oOBS40y{WPEUAQ;_%k5ChAVg{2!SI_rN8w9 z!=k%E<^Qz+pLW)4&C)GZUJ&B;_%6Ac>2Gv}5NH2KWfHDAT(6N?2yyH+bpzs1)0R!1 zqZ&|EPvB5CNSkAk+Jv7Yp}4Ni$DzH&>2&!gh&&4-^p@X(J#DqzCc~?*H!IgQ{tL3D z5mfhRCjS{U{5h`I=-~4`OVmEN`!)8Q@eh3){)?D|{}KoSHMuNObGa=3DJnfr0)c)8eY{afv4gNT_2ghjDJFlV<3x}t? z{gm3y;n;%$cPp;9>axSd41W$A6-x}AiWu9eGkx{sbaX;5FCd$^t zn{e|Lk*c~O&}0?PPZ(Zv(eIn3{u~ILq~N7`0)&N#WPFfR%pv@Aky=6MPvIQQ4%L}Q zs_ufZS3XE^1g_vX2%?srv>HgikOwieN!3O|CZp!?AH$W=reAGSimIR5@%aoX#4dYJ zX#)~|DLr3-a#A7?IT;!3T-7_2BQdWa9+wffA)&1yohwe8VsM-R0v^QL7E1sQT7bQY z$g0`QX347>ON{|~hDm^03m}o&$q!_30jzH59Xrg0Lzp#B(fP+oijq+>I2R7HGzC9& zu2rRzijshp$2)11Gf}2H0w9^0g42gth-=yHS~wcFf`SX^ETt_RBY4@#Mk$QW(#T5> z%eMhU#r%|8SDw;XhVlh0hb%>(lkIaRwt2a2UMGiH_C-0&G89>zd^jBC@%C8V^LQJq zFBuJCM$|jQy)stS8390^h>{yHe0Z}%ozZAC8bTi94=;Tgy+(T@#(HHMu`f*4CZl|` zFB(H0WQgV2FdMmQ;yu3HYZ-}#(Soddza{2V{(UE}JH;lVJZT-yWj~vwy$BC%D4lU= zGxGV-cx~D_$fo4|mMGO@ zcu+cm{RrPtuE?TW3xao;eW7VA*S-+zfUZQ8J)pNH5^pk(%jaN`Mt4qpJFwQZoiGV#M|% z`RA#+$Fk3W_kH4@0BN7xt&y?@mmLF$x5~mGgA8Yl%#OcOU4r}0GdJ)kvj;2TQ{>DS zXmWy+{G%YRkInmqirYl`4{2?;ut?Fs;q5pm4uQyuV6*6>Kums|N-_x2g@vfZTM2|& zk?~G4T=FW*O*aU(C@In(CP&Z&_;xawXN%VHdbZ;6N*%$yj4QTe+S2X=vnqPXUwgC< z;R=1U1q(d`CtjHqX@E1Fr~Nq<7G3XKh`6Q^7ZkzF;`yD)sg9+Fqsl6)fk!aRoIHLzDRQjhs$FV_qLa zV41f@EQ8WEsxf4vJ!j_B{i=<`UrIXzmuZ+W;L9g%U|afzv0t4;a**7qh4BbPk^C7^ zr=@!xlP)!{?geJM)pr**Bvm1sB3)}61vtHdbA_u!h6o%4QZ*yeHuMhxSRrObt|+1$ zA~{PFH5-=4L|F@yVTy7>NR7vi66x;XWSC{h9L44jy>kf0@G%7j-RurAq``ZJ(VFL9 zzzX!$V4eChh#Q%T@ zeaG0LQ(vX(AA=MVv8a-iNNOTv(J~3 zTt>gn^Gt05h5=Zp0NelykO6>h`tWs?TsBFDxLy7l05=@}@c6%gKve^$yb91*UH~+u?2$}+!XQ@a_$Uad>xJ^g zaDXiss|?&x$o+Nsu~FE;(n`quwPtLDhkK3hsn;Ka3GmkfWLmapPB}42ni$@7mh|y) zk35jM^qQL_5F;3n!$(JaZj@?iBVUwJ`G3(oIBn{OV+v3$zAa9&W%_Af0`Xi&OPpWU zkq47OJSU%zZ)ucEhBc$uo)RFj?7x>v|3)2B^7Uuviz+A07Q(oXBA! z@&jU8I$USXV*m`aThgn=UQ;@`^Td;C{sfhtBtqNAze{9W5oL)4T-aw~)wu@;fFw7F z-HVlNoC6tD7+YLa%cz=WP)dOQKQPRG$ue_LUnsC<580FWXYC`}P~KLaNUhsh7~`A& E2S8v-)Bpeg literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/__pycache__/visualizer_v2_trailDetectionNode.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/visualizer_v2_trailDetectionNode.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1e2bdc9339a34b12ccf04eccec095a727c47c602 GIT binary patch literal 8399 zcma)BYm6kZ;yb)iXPzYC|%%oCG!@AwobL=vl%K6Cjad;7`Q<00dSX9)cv|LKIAb(14IZ$L-@x`RTQQ$HBjmb z)0iHp9j&f)^t!GR-3W}1S+``~400X2Zp*wHUkxuDz` ztB=XN9aJ!m*%_~o5BpDaChJqf{DJzk9AlW>3 zyy?MBErk`Xsz{46Eg{vn)cVcCy0WB>$+R-8JK`N_SL;WHEyk~^Xn&MV)Q_>r`YmjV z9e7@;A7|5S2Jc(hEStmoHg=F5!uxi1nB9c;9qeXy1n*j2d0b&tcJz6L9ra3^di_pz zD?7$+d0wsG<=xGWU)1=Qy}LFQ>_I&*|Fzq;>HHVBao$Qz6Xs(Ji}%IC!t0*-xTXs8 ziB6;CA>$;x5ys0A?-1Eoj2KZ1Pe-wLK52Bi9;<2G#3)4b!(<_ddTbs|?1j}wxXj8j)hE8N`4Fe+TlfCdtp{Je{e_2WdRG36!K<&l_W8|+T4!Iq z{KlXB_Ro(hWG&Zd#d_;$v%U2%UwQ6Y>ziLW(fHb@AHCK(Tz>A0e|r6mv#9)u@BiSl zzkdCVYps8)K45*fym77dH~;yM@B5EGdhQx3zxu)VcCSuuoNc|d{iR#KL9MPhzx>um z)di|tyZ+~6|M0Efzt(#7^f$iQ`L9>5weI_?@4fWI_y6{quV9zHVkkW#^i@tz!!>LK_i(-sNNtW+467F9X6S7H= z(N?`?67iZLO1)KgclDysB?fbw#9`tHCh&Odx&PN3GouDK%=NOuZ^EwLY@b|-I^M~R zl}5y(uyqp5Iq$B;CzpDDz)oU4e&8}M!CGj|A^85CiK~9xYXts=$KBO=+NvKsI_gH! za-zE~riZuUCXp-8+?8j38`cp|ASoL~byl5Hi&{~&)uL*s{0PYV7>#r!$(dxdtH`8E z8?rnHW=Pb52If+kwybSwZKJQD&gd(NnSw!5ZAL-fqCDQ}AeSn{N_k>D^v_uIkdo?5 z1>@N$Yr9l1tIjmZq{PTNF*2phV8#oW5Az!+%Tj7E^`a?hW@>^d&5gx~0 zksOPozv%Hv5NZ}bNOUXk!d9{(3VYa(&ryvH)|0X#rkDL7@wms_ecULSC`)dp4l{eM zc*v4+`R_($Op`grglftAbu>+z(r0nc6>W;{`VLe#N$^Hb9vQl8qQ##@l4=aM%e3so zQ>YN8PZHJGLsc{CjPh|Pl)j!Q1NCFLt1a|P^%vw#G3(>%i^_}Yvc_`Ee%?-vN0m*r zZzfu5ZbEIrQryq-eJi!jLj?`=)C8yGFpr+*~OxF8d+0POB<~Hm*WyV>GNn@m#(ewLI#eJvx8^8k{%Qckg#m-mO$S+jI*4U*~R+ z1yW0-bm=bR!$?}-;txB0+AlczW9mhfLs5QOyQH1Z#9iJL44ro36m%22{IVvB%#V{s z*z{sIz9eS5{$&pmKcuI)IF0PMis% zW+R9nL}t`{`#PSQnQ>{c!CT!1PkhoWLI)2DQyzgZI$k3LPpg-Ox-JS~*PUl>R;5B~ z4$sh~lf|+{X%&ix?G%sgEPibT3_H{`aT0Z7cP-)YO&3ynBG0D^^)6j`|r@+#NW~h!<)EteUMb$pFMJ$Z#%hc9$$RtoY1no$LTrv78 zSXE2Zwl>h2m1>_;KCf-5{CKJkjKpkPBU)k(ats!b*;i1nDq&@(MLz3wV++(nm4SWL zB+r~p^ht?n3mJC_bG6kv{hkZ3GFI3O8gVS_rAG7OQWSb(tjj%8s4oLp{(086eM$n7>_T)Mj&HBZ38ZrmI5<~!SqceE@W7>eNJhSlt`#{H*& zo{F7D*Cjq89@#kb<2ube=7)Z~;=!Cp4JmSRw4*hLlZ*&0iiOc_K+RcxjO)k#sG$qy zPdqSuGZ7~2ZF5B!L>CtGR>59EOCn)*IZQ5q*^@MqDF+s1`Px}s#xZ15GmdfO=(3Fm zVoaL30v?%AkEvD7fq^qL{xcx%Q+Xv2Z2YBpI|YPA>MH?Pq*?-#wuM`VN;3x*-SZYq z+rS3kRwby`0H3uMwJ=f`qfK>?X8_;>Ck3pAaw`mqTgs=3-~ffxT;F;I&V+&ACfth9 zKCQ&}t${!2_bmQsUrH^)Z4C1c%5to+Eu~!-8K zf#v6v88{nVgi{EIAwa)%n(Mb$`t|c*+qa#zfA_Vst*^f(pR>Dmb^AIlfzxf& zv~A)aexCAx{xM;k(?lWDTRSLh=_5S-#FHXV>O9j^dUJJN*v-{sAqpZcN()at`P4c0 zk;l)?pL_ffP9R3)7N9|44TOCmy5NN|n&lVaU`P}t^1U#=)bl({M#_f2Uv>#2x5bG$ zp*uT`%OW>yBlKpsCk(<9!VG(z?z+^m{9fv1_Q1UN@dD)?j2Fcc0)Wq1v&8j*-!7!gKRW}9L(8{;2t#EmV(pV!dj96gFSLs zX>yOORJXSTnFXrh#MNRf+sZC=k*Jfy#&K^cZgRg%rZWpoOzoWyW;!5KkM?qZ?%wud ztmDNoIkXv2iiLAN8*T|Z$V;)O4xAvm$Y;(R%Hq(&-7b$Vqe0?b@H^i5^{~0ZqmY!Q zsBo_ZhW7ZvXzt-!pLqowqnG!F}USHFOKF)a?tu$j&rbQ?q~ zD;0K2$yip46jUGp=rd}^im>n{xIF%)h19_qMWSC?z?h}f!T87if8%3Zj6{8t3R&lV zIW1wdvK%dzr%=Kvl+qGHhsp~YbI6rqML2LHN?}Bq*`*bCY5g&bGMhOkxrs!oK1{sGCG??&~guLFvAX{<69ce*Z7ThlRBIn>A4+H12i-$#M!pZHrP2CPzNvk)dt^wV`^qTQAq6Krq#R9-4Q|=f zZfA@wtv}0VWvk=vk)2!>?QczIS@96=YFJBS+;#))b|~jNwv<7Q%}G8{+IJ4_N@plf z+*doe2ql>*i7NM+p1!8$!v%A|GG$yU2&}J{wH{ ziwq25tUZqG$U?)QK&7|zw>h!~zY{%nr{upuMTx<8fH*%1+TRiV6q1co`vpDIpd3$ zNq!(WAC)uTUwXFX#|5u zEacL*_4UTujq>)ZSk7PKA8#NbFIO7ex5~h+HgM~dGhrFz{lYj?TE_1n@Vo{*FDV*P z%BpP@5luM*PuVbm^$GFkZP}`b_Opn(0RO|tQ>b_XUiE}}7-a{Zz!ZAJq#yoqlHR1C z)#xE1+i?+S)iA~Qd;tRu$PBj?t~V&auWcjMq=5#uOyGp46r9OH(V8gPP*NuHT9Qv3 za<&n3%uF}pi1DeU#Nd(-k*x`L8e>VrU=GF@;)_GIYAy0Dtbkvoa7(z;?lzRRLRQy9EwVdQ{ z{%1tkVOSobNIq{X;j2u4s)uI?pjB0=Ftk$V~cm{6My zSZE)wbU9JqW_`#4El|@ok_4XzC=$0NQ3XB~7^5<;Ff{~I0D-9ZtN=uT>n1FR2*?#& zA&?w=$xZSqtwEj`0H{PCI9n7?u?7YCE)GicmR8hQ6b41RcmwlNcma-mTq|dn^M26y z>qsD_vD;Y1HS&kQG-=^HIFv=Y9ys*z?;zLrY#H0~ygdItJ z+Vn%1T82(!2{e@95y4ZEzaJUqC}){vh%p3Nv&ZaN{7QCJ8>`sL&pGGp5|r}w{{@l} B*2e$< literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/model_loader.py b/trail_detection_node/trail_detection_node/model_loader.py new file mode 100644 index 00000000..908c74f6 --- /dev/null +++ b/trail_detection_node/trail_detection_node/model_loader.py @@ -0,0 +1,99 @@ +import torch.nn as nn +import torch.nn.functional as F +from .vgg import vgg16 + +class FCN8s(nn.Module): + def __init__(self, nclass, backbone='vgg16', aux=False, pretrained_base=True, norm_layer=nn.BatchNorm2d, **kwargs): + super(FCN8s, self).__init__() + self.aux = aux + if backbone == 'vgg16': + self.pretrained = vgg16(pretrained=pretrained_base).features + else: + raise RuntimeError('unknown backbone: {}'.format(backbone)) + self.pool3 = nn.Sequential(*self.pretrained[:17]) + self.pool4 = nn.Sequential(*self.pretrained[17:24]) + self.pool5 = nn.Sequential(*self.pretrained[24:]) + self.head = _FCNHead(512, nclass, norm_layer) + self.score_pool3 = nn.Conv2d(256, nclass, 1) + self.score_pool4 = nn.Conv2d(512, nclass, 1) + if aux: + self.auxlayer = _FCNHead(512, nclass, norm_layer) + + self.__setattr__('exclusive', + ['head', 'score_pool3', 'score_pool4', 'auxlayer'] if aux else ['head', 'score_pool3', + 'score_pool4']) + + def forward(self, x): + pool3 = self.pool3(x) + pool4 = self.pool4(pool3) + pool5 = self.pool5(pool4) + + outputs = [] + score_fr = self.head(pool5) + + score_pool4 = self.score_pool4(pool4) + score_pool3 = self.score_pool3(pool3) + + upscore2 = F.interpolate(score_fr, score_pool4.size()[2:], mode='bilinear', align_corners=True) + fuse_pool4 = upscore2 + score_pool4 + + upscore_pool4 = F.interpolate(fuse_pool4, score_pool3.size()[2:], mode='bilinear', align_corners=True) + fuse_pool3 = upscore_pool4 + score_pool3 + + out = F.interpolate(fuse_pool3, x.size()[2:], mode='bilinear', align_corners=True) + outputs.append(out) + + if self.aux: + auxout = self.auxlayer(pool5) + auxout = F.interpolate(auxout, x.size()[2:], mode='bilinear', align_corners=True) + outputs.append(auxout) + + return tuple(outputs) + +class FCN32s(nn.Module): + """There are some difference from original fcn""" + + def __init__(self, nclass, backbone='vgg16', aux=False, pretrained_base=True, + norm_layer=nn.BatchNorm2d, **kwargs): + super(FCN32s, self).__init__() + self.aux = aux + if backbone == 'vgg16': + self.pretrained = vgg16(pretrained=pretrained_base).features + else: + raise RuntimeError('unknown backbone: {}'.format(backbone)) + self.head = _FCNHead(512, nclass, norm_layer) + if aux: + self.auxlayer = _FCNHead(512, nclass, norm_layer) + + self.__setattr__('exclusive', ['head', 'auxlayer'] if aux else ['head']) + + def forward(self, x): + size = x.size()[2:] + pool5 = self.pretrained(x) + + outputs = [] + out = self.head(pool5) + out = F.interpolate(out, size, mode='bilinear', align_corners=True) + outputs.append(out) + + if self.aux: + auxout = self.auxlayer(pool5) + auxout = F.interpolate(auxout, size, mode='bilinear', align_corners=True) + outputs.append(auxout) + + return tuple(outputs) + +class _FCNHead(nn.Module): + def __init__(self, in_channels, channels, norm_layer=nn.BatchNorm2d, **kwargs): + super(_FCNHead, self).__init__() + inter_channels = in_channels // 4 + self.block = nn.Sequential( + nn.Conv2d(in_channels, inter_channels, 3, padding=1, bias=False), + norm_layer(inter_channels), + nn.ReLU(inplace=True), + nn.Dropout(0.1), + nn.Conv2d(inter_channels, channels, 1) + ) + + def forward(self, x): + return self.block(x) \ No newline at end of file diff --git a/trail_detection_node/trail_detection_node/setup.py b/trail_detection_node/trail_detection_node/setup.py new file mode 100644 index 00000000..ff6a39fd --- /dev/null +++ b/trail_detection_node/trail_detection_node/setup.py @@ -0,0 +1,125 @@ +from setuptools import find_packages, setup + + +def readme(): + with open('README.md', encoding='utf-8') as f: + content = f.read() + return content + + +version_file = 'mmseg/version.py' + + +def get_version(): + with open(version_file, 'r') as f: + exec(compile(f.read(), version_file, 'exec')) + return locals()['__version__'] + + +def parse_requirements(fname='requirements.txt', with_version=True): + """Parse the package dependencies listed in a requirements file but strips + specific versioning information. + + Args: + fname (str): path to requirements file + with_version (bool, default=False): if True include version specs + + Returns: + List[str]: list of requirements items + + CommandLine: + python -c "import setup; print(setup.parse_requirements())" + """ + import sys + from os.path import exists + import re + require_fpath = fname + + def parse_line(line): + """Parse information from a line in a requirements text file.""" + if line.startswith('-r '): + # Allow specifying requirements in other files + target = line.split(' ')[1] + for info in parse_require_file(target): + yield info + else: + info = {'line': line} + if line.startswith('-e '): + info['package'] = line.split('#egg=')[1] + else: + # Remove versioning from the package + pat = '(' + '|'.join(['>=', '==', '>']) + ')' + parts = re.split(pat, line, maxsplit=1) + parts = [p.strip() for p in parts] + + info['package'] = parts[0] + if len(parts) > 1: + op, rest = parts[1:] + if ';' in rest: + # Handle platform specific dependencies + # http://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-platform-specific-dependencies + version, platform_deps = map(str.strip, + rest.split(';')) + info['platform_deps'] = platform_deps + else: + version = rest # NOQA + info['version'] = (op, version) + yield info + + def parse_require_file(fpath): + with open(fpath, 'r') as f: + for line in f.readlines(): + line = line.strip() + if line and not line.startswith('#'): + for info in parse_line(line): + yield info + + def gen_packages_items(): + if exists(require_fpath): + for info in parse_require_file(require_fpath): + parts = [info['package']] + if with_version and 'version' in info: + parts.extend(info['version']) + if not sys.version.startswith('3.4'): + # apparently package_deps are broken in 3.4 + platform_deps = info.get('platform_deps') + if platform_deps is not None: + parts.append(';' + platform_deps) + item = ''.join(parts) + yield item + + packages = list(gen_packages_items()) + return packages + + +if __name__ == '__main__': + setup( + name='mmsegmentation', + version=get_version(), + description='Open MMLab Semantic Segmentation Toolbox and Benchmark', + long_description_content_type='text/markdown', + author='MMSegmentation Authors', + author_email='openmmlab@gmail.com', + keywords='computer vision, semantic segmentation', + url='http://github.com/open-mmlab/mmsegmentation', + packages=find_packages(exclude=('configs', 'tools', 'demo')), + classifiers=[ + 'Development Status :: 4 - Beta', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + ], + license='Apache License 2.0', + setup_requires=parse_requirements('requirements/build.txt'), + tests_require=parse_requirements('requirements/tests.txt'), + install_requires=parse_requirements('requirements/runtime.txt'), + extras_require={ + 'all': parse_requirements('requirements.txt'), + 'tests': parse_requirements('requirements/tests.txt'), + 'build': parse_requirements('requirements/build.txt'), + 'optional': parse_requirements('requirements/optional.txt'), + }, + ext_modules=[], + zip_safe=False) diff --git a/trail_detection_node/trail_detection_node/v1_trailDetectionNode.py b/trail_detection_node/trail_detection_node/v1_trailDetectionNode.py new file mode 100644 index 00000000..c20f7df5 --- /dev/null +++ b/trail_detection_node/trail_detection_node/v1_trailDetectionNode.py @@ -0,0 +1,278 @@ +import torch +from .model_loader import FCN8s +from PIL import Image as ImagePIL +from torchvision import transforms +import cv2 + +import rclpy +from rclpy.node import Node +from geometry_msgs.msg import PoseStamped +from sensor_msgs.msg import Image, PointCloud2 +import sensor_msgs_py.point_cloud2 as pc2 +import numpy as np +import math +from cv_bridge import CvBridge + +''' + The transformation matrix as well as the coordinate conversion and depth estimation functions are copied from human_detection_node +''' +camera_transformation_k = np.array([ + [628.5359544,0,676.9575694], + [0,627.7249542,532.7206716], + [0,0,1]]) + +rotation_matrix = np.array([ + [-0.007495781893,-0.0006277316155,0.9999717092], + [-0.9999516401,-0.006361853422,-0.007499625104], + [0.006366381192,-0.9999795662,-0.0005800141927]]) + +rotation_matrix = rotation_matrix.T + +translation_vector = np.array([-0.06024059837, -0.08180891509, -0.3117851288]) +image_width=1280 +image_height=1024 + +def convert_to_lidar_frame(uv_coordinate): + """ + convert 2d camera coordinate + depth into 3d lidar frame + """ + point_cloud = np.empty( (3,) , dtype=float) + point_cloud[2] = uv_coordinate[2] + point_cloud[1] = ( image_height - uv_coordinate[1] )*point_cloud[2] + point_cloud[0] = uv_coordinate[0]*point_cloud[2] + + inverse_camera_transformation_k = np.linalg.inv(camera_transformation_k) + inverse_rotation_matrix = np.linalg.inv(rotation_matrix) + point_cloud = inverse_camera_transformation_k @ point_cloud + point_cloud = inverse_rotation_matrix @ (point_cloud-translation_vector) + return point_cloud + +def convert_to_camera_frame(point_cloud): + """ + convert 3d lidar data into 2d coordinate of the camera frame + depth + """ + length = point_cloud.shape[0] + translation = np.tile(translation_vector, (length, 1)).T + + point_cloud = point_cloud.T + point_cloud = rotation_matrix@point_cloud + translation + point_cloud = camera_transformation_k @ point_cloud + + uv_coordinate = np.empty_like(point_cloud) + + """ + uv = [x/z, y/z, z], and y is opposite so the minus imageheight + """ + uv_coordinate[0] = point_cloud[0] / point_cloud[2] + uv_coordinate[1] = image_height - point_cloud[1] / point_cloud[2] + uv_coordinate[2] = point_cloud[2] + + uv_depth = uv_coordinate[2, :] + filtered_uv_coordinate = uv_coordinate[:, uv_depth >= 0] + return filtered_uv_coordinate + +def estimate_depth(x, y, np_2d_array): + """ + estimate the depth by finding points closest to x,y from thhe 2d array + """ + # Calculate the distance between each point and the target coordinates (x, y) + distances_sq = (np_2d_array[0,:] - x) ** 2 + (np_2d_array[1,:] - y) ** 2 + + # Find the indices of the k nearest points + k = 5 # Number of nearest neighbors we want + closest_indices = np.argpartition(distances_sq, k)[:k] + pixel_distance_threshold = 2000 + + valid_indices = [idx for idx in closest_indices if distances_sq[idx]<=pixel_distance_threshold] + if len(valid_indices) == 0: + # lidar points disappears usually around 0.4m + distance_where_lidar_stops_working = -1 + return distance_where_lidar_stops_working + + filtered_indices = np.array(valid_indices) + # Get the depth value of the closest point + closest_depths = np_2d_array[2,filtered_indices] + + return np.mean(closest_depths) + +def load_model(device): + model = FCN8s(nclass=6, backbone='vgg16', pretrained_base=True, pretrained=True) + model.load_state_dict(torch.load('src/trail_detection_node/trail_detection_node/model/fcn8s_vgg16_pascal_voc.pth')) + model = model.to(device) + print('Finished loading model!') + + return model + +def find_route(model, device, cv_image): + PIL_image = ImagePIL.fromarray(cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB)) + transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), + ]) + image = transform(PIL_image).unsqueeze(0).to(device) + with torch.no_grad(): + output = model(image) + # pred is prediction generated by the model, route is the variable for the center line + pred = torch.argmax(output[0], 1).squeeze(0).cpu().data.numpy() + pred[pred != 1] = 0 # only see the trail + pred[pred == 1] = 255 + pred = np.array(pred, dtype=np.uint8) + route = np.zeros_like(pred) + # calculate the center line by taking the average + row_num = 0 + for row in pred: + white_pixels = list(np.nonzero(row)[0]) + if white_pixels: + average = (white_pixels[0] + white_pixels[-1]) / 2 + route[row_num][round(average)] = 255 + row_num = row_num + 1 + return route + +''' + The traildetector node has two subscriptions(lidar and camera) and one publisher(trail position). After it receives msgs from both lidar and camera, + it detects the trail in the image and sends the corresponding lidar position as the trail location. + The msgs are synchornized before processing, using buffer and sync function. + To find the path, the node will process the image, find a line to follow (by taking the average of left and right of the path), estimate the lidar points depth, + and choose to go to the closest point. + V1_traildetection assumes that the path is pointing frontward and has only one path in front. +''' +class trailDetector(Node): + def __init__(self, model, device): + super().__init__('trail_detector') + # define trail subscription + self.trail_publisher = self.create_publisher( + PoseStamped, + 'trail_location', + 10) + + # define camera subscription + self.camera_subscription = self.create_subscription( + Image, + 'camera', + self.camera_callback, + 10) + self.camera_subscription + + # define lidar subscription + self.lidar_subscription = self.create_subscription( + PointCloud2, + 'velodyne_points', + self.lidar_callback, + 10) + self.lidar_subscription + + self.bridge = CvBridge() + + # load model and device + self.model = model + self.device = device + + # buffers to hold msgs for sync + self.buffer_size = 30 # 30 is a random choice, to be discussed + self.lidar_buffer = [] + self.camera_buffer = [] + + def camera_callback(self, msg): + if len(self.camera_buffer) >= self.buffer_size: + self.camera_buffer.pop(0) + self.camera_buffer.append(msg) + self.sync() + + def lidar_callback(self, msg): + if len(self.lidar_buffer) >= self.buffer_size: + self.lidar_buffer.pop(0) + self.lidar_buffer.append(msg) + self.sync() + + def sync(self): + # if one of the sensor has no msg, then return + if not self.lidar_buffer or not self.camera_buffer: + return + + while self.lidar_buffer and self.camera_buffer: + lidar_msg = self.lidar_buffer[0] + camera_msg = self.camera_buffer[0] + + time_tolerance = 5000000 # nanosec, random choice to be discussed + time_difference = abs(lidar_msg.header.stamp.nanosec - camera_msg.header.stamp.nanosec) + print(time_difference) + + # compare the time difference, if it's within tolerance, then pass to the trail_callback, otherwise discard the older msg + if time_difference <= time_tolerance: + self.lidar_buffer.pop(0) + self.camera_buffer.pop(0) + self.get_logger().info("msgs received!") + self.trail_callback(lidar_msg, camera_msg) + elif lidar_msg.header.stamp.nanosec > camera_msg.header.stamp.nanosec: + self.camera_buffer.pop(0) + else: + self.lidar_buffer.pop(0) + + def trail_callback(self, lidar_msg, camera_msg): + # process lidar msg + point_gen = pc2.read_points( + lidar_msg, field_names=( + "x", "y", "z"), skip_nans=True) + points = [[x, y, z] for x, y, z in point_gen] + points = np.array(points) + points2d = convert_to_camera_frame(points) + + # process camera msg + cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') + route = find_route(self.model, self.device, cv_image) + route_indices = list(zip(*np.nonzero(route))) + + if not route_indices: + print("No centerline found!") + return + + # find the corresponding lidar points using the center line pixels + filtered_3dPoints = [] + for index in route_indices: + point = [] + point.append(index[0]) + point.append(index[1]) + point.append(estimate_depth(index[0], index[1], points2d)) + point_3d = convert_to_lidar_frame(point) + filtered_3dPoints.append(point_3d) + + filtered_3dPoints = np.array(filtered_3dPoints) + # find the nearest 3d point and set that as goal + distances_sq = filtered_3dPoints[:,0]**2 + filtered_3dPoints[:,1]**2 + filtered_3dPoints[:,2]**2 + smallest_index = np.argmin(distances_sq) + + # publsih message + trail_location_msg = PoseStamped() + trail_location_msg.header.stamp = lidar_msg.header.stamp + trail_location_msg.header.frame_id = "velodyne" + #position + trail_location_msg.pose.position.x = filtered_3dPoints[smallest_index][0] + trail_location_msg.pose.position.y = filtered_3dPoints[smallest_index][1] + trail_location_msg.pose.position.z = filtered_3dPoints[smallest_index][2] + #orientation + yaw = math.atan2(filtered_3dPoints[smallest_index][1], filtered_3dPoints[smallest_index][0]) + trail_location_msg.pose.orientation.x = 0.0 + trail_location_msg.pose.orientation.y = 0.0 + trail_location_msg.pose.orientation.z = math.sin(yaw/2) + trail_location_msg.pose.orientation.w = math.cos(yaw / 2) + self.get_logger().info("location published!") + self.trail_publisher.publish(trail_location_msg) + + + + + +def main(args=None): + print(torch.cuda.is_available()) + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + model = load_model(device) + + rclpy.init(args=args) + trailDetectorNode = trailDetector(model, device) + rclpy.spin(trailDetectorNode) + + trailDetectorNode.destroy_node() + rclpy.shutdown() + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/trail_detection_node/trail_detection_node/v2_trailDetectionNode.py b/trail_detection_node/trail_detection_node/v2_trailDetectionNode.py new file mode 100644 index 00000000..309ab765 --- /dev/null +++ b/trail_detection_node/trail_detection_node/v2_trailDetectionNode.py @@ -0,0 +1,233 @@ +import torch +from .model_loader import FCN8s +from PIL import Image as ImagePIL +from torchvision import transforms +import cv2 + +import rclpy +from rclpy.node import Node +from geometry_msgs.msg import PoseStamped +from sensor_msgs.msg import Image, PointCloud2 +import sensor_msgs_py.point_cloud2 as pc2 +import numpy as np +import math +from cv_bridge import CvBridge + +import message_filters + +''' + The transformation matrix as well as the coordinate conversion and depth estimation functions are copied from human_detection_node +''' +camera_transformation_k = np.array([ + [628.5359544,0,676.9575694], + [0,627.7249542,532.7206716], + [0,0,1]]) + +rotation_matrix = np.array([ + [-0.007495781893,-0.0006277316155,0.9999717092], + [-0.9999516401,-0.006361853422,-0.007499625104], + [0.006366381192,-0.9999795662,-0.0005800141927]]) + +rotation_matrix = rotation_matrix.T + +translation_vector = np.array([-0.06024059837, -0.08180891509, -0.3117851288]) +image_width=1280 +image_height=1024 + +def convert_to_lidar_frame(uv_coordinate): + """ + convert 2d camera coordinate + depth into 3d lidar frame + """ + point_cloud = np.empty( (3,) , dtype=float) + point_cloud[2] = uv_coordinate[2] + point_cloud[1] = ( image_height - uv_coordinate[1] )*point_cloud[2] + point_cloud[0] = uv_coordinate[0]*point_cloud[2] + + inverse_camera_transformation_k = np.linalg.inv(camera_transformation_k) + inverse_rotation_matrix = np.linalg.inv(rotation_matrix) + point_cloud = inverse_camera_transformation_k @ point_cloud + point_cloud = inverse_rotation_matrix @ (point_cloud-translation_vector) + return point_cloud + +def convert_to_camera_frame(point_cloud): + """ + convert 3d lidar data into 2d coordinate of the camera frame + depth + """ + length = point_cloud.shape[0] + translation = np.tile(translation_vector, (length, 1)).T + + point_cloud = point_cloud.T + point_cloud = rotation_matrix@point_cloud + translation + point_cloud = camera_transformation_k @ point_cloud + + uv_coordinate = np.empty_like(point_cloud) + + """ + uv = [x/z, y/z, z], and y is opposite so the minus imageheight + """ + uv_coordinate[0] = point_cloud[0] / point_cloud[2] + uv_coordinate[1] = image_height - point_cloud[1] / point_cloud[2] + uv_coordinate[2] = point_cloud[2] + + uv_depth = uv_coordinate[2, :] + filtered_uv_coordinate = uv_coordinate[:, uv_depth >= 0] + return filtered_uv_coordinate + +def estimate_depth(x, y, np_2d_array): + """ + estimate the depth by finding points closest to x,y from thhe 2d array + """ + # Calculate the distance between each point and the target coordinates (x, y) + distances_sq = (np_2d_array[0,:] - x) ** 2 + (np_2d_array[1,:] - y) ** 2 + + # Find the indices of the k nearest points + k = 5 # Number of nearest neighbors we want + closest_indices = np.argpartition(distances_sq, k)[:k] + pixel_distance_threshold = 2000 + + valid_indices = [idx for idx in closest_indices if distances_sq[idx]<=pixel_distance_threshold] + if len(valid_indices) == 0: + # lidar points disappears usually around 0.4m + distance_where_lidar_stops_working = -1 + return distance_where_lidar_stops_working + + filtered_indices = np.array(valid_indices) + # Get the depth value of the closest point + closest_depths = np_2d_array[2,filtered_indices] + + return np.mean(closest_depths) + +def load_model(device): + model = FCN8s(nclass=6, backbone='vgg16', pretrained_base=True, pretrained=True) + model.load_state_dict(torch.load('src/trail_detection_node/trail_detection_node/model/fcn8s_vgg16_pascal_voc.pth')) + model = model.to(device) + print('Finished loading model!') + + return model + +def find_route(model, device, cv_image): + PIL_image = ImagePIL.fromarray(cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB)) + transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), + ]) + image = transform(PIL_image).unsqueeze(0).to(device) + with torch.no_grad(): + output = model(image) + # pred is prediction generated by the model, route is the variable for the center line + pred = torch.argmax(output[0], 1).squeeze(0).cpu().data.numpy() + pred[pred != 1] = 0 # only see the trail + pred[pred == 1] = 255 + pred = np.array(pred, dtype=np.uint8) + route = np.zeros_like(pred) + # calculate the center line by taking the average + row_num = 0 + for row in pred: + white_pixels = list(np.nonzero(row)[0]) + if white_pixels: + average = (white_pixels[0] + white_pixels[-1]) / 2 + route[row_num][round(average)] = 255 + row_num = row_num + 1 + return route + +''' + The traildetector node has two subscriptions(lidar and camera) and one publisher(trail position). After it receives msgs from both lidar and camera, + it detects the trail in the image and sends the corresponding lidar position as the trail location. + The msgs are synchornized before processing, using buffer and sync function. + To find the path, the node will process the image, find a line to follow (by taking the average of left and right of the path), estimate the lidar points depth, + and choose to go to the closest point. + V1_traildetection assumes that the path is pointing frontward and has only one path in front. +''' +class trailDetector(Node): + def __init__(self, model, device): + super().__init__('trail_detector') + # define trail publisher + self.trail_publisher = self.create_publisher( + PoseStamped, + 'trail_location', + 10) + + # create subscribers + self.image_sub = message_filters.Subscriber(self, Image, 'camera') + self.lidar_sub = message_filters.Subscriber(self, PointCloud2, 'velodyne_points') + + self.bridge = CvBridge() + + # load model and device + self.model = model + self.device = device + + # create callback + queue_size = 30 + ts = message_filters.ApproximateTimeSynchronizer([self.image_sub, self.lidar_sub], queue_size, 0.5) + ts.registerCallback(self.trail_callback) + + def trail_callback(self, camera_msg, lidar_msg): + print("Message received!") + # process lidar msg + point_gen = pc2.read_points( + lidar_msg, field_names=( + "x", "y", "z"), skip_nans=True) + points = [[x, y, z] for x, y, z in point_gen] + points = np.array(points) + points2d = convert_to_camera_frame(points) + + # process camera msg + cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') + route = find_route(self.model, self.device, cv_image) + route_indices = list(zip(*np.nonzero(route))) + + if not route_indices: + print("No centerline found!") + return + + # find the corresponding lidar points using the center line pixels + filtered_3dPoints = [] + for index in route_indices: + point = [] + point.append(index[0]) + point.append(index[1]) + point.append(estimate_depth(index[0], index[1], points2d)) + point_3d = convert_to_lidar_frame(point) + filtered_3dPoints.append(point_3d) + + filtered_3dPoints = np.array(filtered_3dPoints) + # find the nearest 3d point and set that as goal + distances_sq = filtered_3dPoints[:,0]**2 + filtered_3dPoints[:,1]**2 + filtered_3dPoints[:,2]**2 + smallest_index = np.argmin(distances_sq) + + # publsih message + trail_location_msg = PoseStamped() + trail_location_msg.header.stamp = lidar_msg.header.stamp + trail_location_msg.header.frame_id = "velodyne" + #position + trail_location_msg.pose.position.x = filtered_3dPoints[smallest_index][0] + trail_location_msg.pose.position.y = filtered_3dPoints[smallest_index][1] + trail_location_msg.pose.position.z = filtered_3dPoints[smallest_index][2] + #orientation + yaw = math.atan2(filtered_3dPoints[smallest_index][1], filtered_3dPoints[smallest_index][0]) + trail_location_msg.pose.orientation.x = 0.0 + trail_location_msg.pose.orientation.y = 0.0 + trail_location_msg.pose.orientation.z = math.sin(yaw/2) + trail_location_msg.pose.orientation.w = math.cos(yaw / 2) + self.get_logger().info("location published!") + self.trail_publisher.publish(trail_location_msg) + + + + + +def main(args=None): + print(torch.cuda.is_available()) + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + model = load_model(device) + + rclpy.init(args=args) + trailDetectorNode = trailDetector(model, device) + rclpy.spin(trailDetectorNode) + + trailDetectorNode.destroy_node() + rclpy.shutdown() + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/trail_detection_node/trail_detection_node/vgg.py b/trail_detection_node/trail_detection_node/vgg.py new file mode 100644 index 00000000..fe5c163c --- /dev/null +++ b/trail_detection_node/trail_detection_node/vgg.py @@ -0,0 +1,191 @@ +import torch +import torch.nn as nn +import torch.utils.model_zoo as model_zoo + +__all__ = [ + 'VGG', 'vgg11', 'vgg11_bn', 'vgg13', 'vgg13_bn', 'vgg16', 'vgg16_bn', + 'vgg19_bn', 'vgg19', +] + +model_urls = { + 'vgg11': 'https://download.pytorch.org/models/vgg11-bbd30ac9.pth', + 'vgg13': 'https://download.pytorch.org/models/vgg13-c768596a.pth', + 'vgg16': 'https://download.pytorch.org/models/vgg16-397923af.pth', + 'vgg19': 'https://download.pytorch.org/models/vgg19-dcbb9e9d.pth', + 'vgg11_bn': 'https://download.pytorch.org/models/vgg11_bn-6002323d.pth', + 'vgg13_bn': 'https://download.pytorch.org/models/vgg13_bn-abd245e5.pth', + 'vgg16_bn': 'https://download.pytorch.org/models/vgg16_bn-6c64b313.pth', + 'vgg19_bn': 'https://download.pytorch.org/models/vgg19_bn-c79401a0.pth', +} + + +class VGG(nn.Module): + def __init__(self, features, num_classes=1000, init_weights=True): + super(VGG, self).__init__() + self.features = features + self.avgpool = nn.AdaptiveAvgPool2d((7, 7)) + self.classifier = nn.Sequential( + nn.Linear(512 * 7 * 7, 4096), + nn.ReLU(True), + nn.Dropout(), + nn.Linear(4096, 4096), + nn.ReLU(True), + nn.Dropout(), + nn.Linear(4096, num_classes) + ) + if init_weights: + self._initialize_weights() + + def forward(self, x): + x = self.features(x) + x = self.avgpool(x) + x = x.view(x.size(0), -1) + x = self.classifier(x) + return x + + def _initialize_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + nn.init.constant_(m.bias, 0) + + +def make_layers(cfg, batch_norm=False): + layers = [] + in_channels = 3 + for v in cfg: + if v == 'M': + layers += [nn.MaxPool2d(kernel_size=2, stride=2)] + else: + conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1) + if batch_norm: + layers += (conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)) + else: + layers += [conv2d, nn.ReLU(inplace=True)] + in_channels = v + return nn.Sequential(*layers) + + +cfg = { + 'A': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'], + 'B': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'], + 'D': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'], + 'E': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'], +} + + +def vgg11(pretrained=False, **kwargs): + """VGG 11-layer model (configuration "A") + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + """ + if pretrained: + kwargs['init_weights'] = False + model = VGG(make_layers(cfg['A']), **kwargs) + if pretrained: + model.load_state_dict(model_zoo.load_url(model_urls['vgg11'])) + return model + + +def vgg11_bn(pretrained=False, **kwargs): + """VGG 11-layer model (configuration "A") with batch normalization + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + """ + if pretrained: + kwargs['init_weights'] = False + model = VGG(make_layers(cfg['A'], batch_norm=True), **kwargs) + if pretrained: + model.load_state_dict(model_zoo.load_url(model_urls['vgg11_bn'])) + return model + + +def vgg13(pretrained=False, **kwargs): + """VGG 13-layer model (configuration "B") + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + """ + if pretrained: + kwargs['init_weights'] = False + model = VGG(make_layers(cfg['B']), **kwargs) + if pretrained: + model.load_state_dict(model_zoo.load_url(model_urls['vgg13'])) + return model + + +def vgg13_bn(pretrained=False, **kwargs): + """VGG 13-layer model (configuration "B") with batch normalization + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + """ + if pretrained: + kwargs['init_weights'] = False + model = VGG(make_layers(cfg['B'], batch_norm=True), **kwargs) + if pretrained: + model.load_state_dict(model_zoo.load_url(model_urls['vgg13_bn'])) + return model + + +def vgg16(pretrained=False, **kwargs): + """VGG 16-layer model (configuration "D") + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + """ + if pretrained: + kwargs['init_weights'] = False + model = VGG(make_layers(cfg['D']), **kwargs) + if pretrained: + model.load_state_dict(model_zoo.load_url(model_urls['vgg16'])) + return model + + +def vgg16_bn(pretrained=False, **kwargs): + """VGG 16-layer model (configuration "D") with batch normalization + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + """ + if pretrained: + kwargs['init_weights'] = False + model = VGG(make_layers(cfg['D'], batch_norm=True), **kwargs) + if pretrained: + model.load_state_dict(model_zoo.load_url(model_urls['vgg16_bn'])) + return model + + +def vgg19(pretrained=False, **kwargs): + """VGG 19-layer model (configuration "E") + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + """ + if pretrained: + kwargs['init_weights'] = False + model = VGG(make_layers(cfg['E']), **kwargs) + if pretrained: + model.load_state_dict(model_zoo.load_url(model_urls['vgg19'])) + return model + + +def vgg19_bn(pretrained=False, **kwargs): + """VGG 19-layer model (configuration 'E') with batch normalization + Args: + pretrained (bool): If True, returns a model pre-trained on ImageNet + """ + if pretrained: + kwargs['init_weights'] = False + model = VGG(make_layers(cfg['E'], batch_norm=True), **kwargs) + if pretrained: + model.load_state_dict(model_zoo.load_url(model_urls['vgg19_bn'])) + return model + + +if __name__ == '__main__': + img = torch.randn((4, 3, 480, 480)) + model = vgg16(pretrained=False) + out = model(img) diff --git a/trail_detection_node/trail_detection_node/visualizer_v1_trailDetectionNode.py b/trail_detection_node/trail_detection_node/visualizer_v1_trailDetectionNode.py new file mode 100644 index 00000000..616b689d --- /dev/null +++ b/trail_detection_node/trail_detection_node/visualizer_v1_trailDetectionNode.py @@ -0,0 +1,431 @@ +import torch +from .model_loader import FCN8s, FCN32s +from PIL import Image as ImagePIL +from torchvision import transforms +import cv2 +import os + +import rclpy +from rclpy.node import Node +from geometry_msgs.msg import PoseStamped +from sensor_msgs.msg import Image, PointCloud2 +import sensor_msgs_py.point_cloud2 as pc2 +import numpy as np +import math +from cv_bridge import CvBridge + +''' + The transformation matrix as well as the coordinate conversion and depth estimation functions are copied from human_detection_node +''' +camera_transformation_k = np.array([ + [628.5359544,0,676.9575694], + [0,627.7249542,532.7206716], + [0,0,1]]) + +rotation_matrix = np.array([ + [-0.007495781893,-0.0006277316155,0.9999717092], + [-0.9999516401,-0.006361853422,-0.007499625104], + [0.006366381192,-0.9999795662,-0.0005800141927]]) + +rotation_matrix = rotation_matrix.T + +translation_vector = np.array([-0.06024059837, -0.08180891509, -0.3117851288]) +image_width=1280 +image_height=1024 + +def convert_to_lidar_frame(uv_coordinate): + """ + convert 2d camera coordinate + depth into 3d lidar frame + """ + point_cloud = np.empty( (3,) , dtype=float) + point_cloud[2] = uv_coordinate[2] + point_cloud[1] = ( image_height - uv_coordinate[1] )*point_cloud[2] + point_cloud[0] = uv_coordinate[0]*point_cloud[2] + + inverse_camera_transformation_k = np.linalg.inv(camera_transformation_k) + inverse_rotation_matrix = np.linalg.inv(rotation_matrix) + point_cloud = inverse_camera_transformation_k @ point_cloud + point_cloud = inverse_rotation_matrix @ (point_cloud-translation_vector) + return point_cloud + +def convert_to_camera_frame(point_cloud): + """ + convert 3d lidar data into 2d coordinate of the camera frame + depth + """ + length = point_cloud.shape[0] + translation = np.tile(translation_vector, (length, 1)).T + + point_cloud = point_cloud.T + point_cloud = rotation_matrix@point_cloud + translation + point_cloud = camera_transformation_k @ point_cloud + + uv_coordinate = np.empty_like(point_cloud) + + """ + uv = [x/z, y/z, z], and y is opposite so the minus imageheight + """ + uv_coordinate[0] = point_cloud[0] / point_cloud[2] + uv_coordinate[1] = image_height - point_cloud[1] / point_cloud[2] + uv_coordinate[2] = point_cloud[2] + + uv_depth = uv_coordinate[2, :] + filtered_uv_coordinate = uv_coordinate[:, uv_depth >= 0] + return filtered_uv_coordinate + +def estimate_depth(x, y, np_2d_array): + """ + estimate the depth by finding points closest to x,y from thhe 2d array + """ + # Calculate the distance between each point and the target coordinates (x, y) + distances_sq = (np_2d_array[0,:] - x) ** 2 + (np_2d_array[1,:] - y) ** 2 + + # Find the indices of the k nearest points + k = 5 # Number of nearest neighbors we want + closest_indices = np.argpartition(distances_sq, k)[:k] + pixel_distance_threshold = 2000 + + valid_indices = [idx for idx in closest_indices if distances_sq[idx]<=pixel_distance_threshold] + if len(valid_indices) == 0: + # lidar points disappears usually around 0.4m + distance_where_lidar_stops_working = -1 + return distance_where_lidar_stops_working + + filtered_indices = np.array(valid_indices) + # Get the depth value of the closest point + closest_depths = np_2d_array[2,filtered_indices] + + return np.mean(closest_depths) + +def load_model(device): + model = FCN32s(nclass=2, backbone='vgg16', pretrained_base=True, pretrained=True) + # model_location = '32s_batch8/fcn32s_vgg16_pascal_voc_best_model.pth' + model_location = '500epoch/fcn32s_vgg16_pascal_voc.pth' + if os.path.isfile(f'src/trail_detection_node/trail_detection_node/model/{model_location}'): + model.load_state_dict(torch.load(f'src/trail_detection_node/trail_detection_node/model/{model_location}',map_location=torch.device('cuda:0'))) + else: + model.load_state_dict(torch.load(f'trail_detection_node/model/{model_location}',map_location=torch.device('cuda:0'))) + model = model.to(device) + print('Finished loading model!') + + return model + +def find_route(model, device, cv_image): + PIL_image = ImagePIL.fromarray(cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB)) + transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), + ]) + image = transform(PIL_image).unsqueeze(0).to(device) + with torch.no_grad(): + output = model(image) + # pred is prediction generated by the model, route is the variable for the center line + pred = torch.argmax(output[0], 1).squeeze(0).cpu().data.numpy() + pred[pred == 0] = 255 # only see the trail + pred[pred == 1] = 0 + pred[pred == 2] = 0 + pred[pred == 3] = 0 + pred[pred == 4] = 0 + pred[pred == 5] = 0 + pred = np.array(pred, dtype=np.uint8) + + # # prediction visualize + # cv2.imshow('prediction',pred) + # cv2.waitKey(25) + + route = np.zeros_like(pred) + # calculate the center line by taking the average + row_num = 0 + for row in pred: + white_pixels = list(np.nonzero(row)[0]) + if white_pixels: + average = (white_pixels[0] + white_pixels[-1]) / 2 + route[row_num][round(average)] = 255 + row_num = row_num + 1 + return route, pred + +def equalize_hist_rgb(img): + # Split the image into its color channels + r, g, b = cv2.split(img) + + # Equalize the histograms for each channel + r_eq = cv2.equalizeHist(r) + g_eq = cv2.equalizeHist(g) + b_eq = cv2.equalizeHist(b) + + # Merge the channels + img_eq = cv2.merge((r_eq, g_eq, b_eq)) + # img_eq = cv2.fastNlMeansDenoisingColored(img_eq,None,10,20,7,42) + # img_eq = cv2.GaussianBlur(img_eq, (25,25), 0) + return img_eq + +''' + The traildetector node has two subscriptions(lidar and camera) and one publisher(trail position). After it receives msgs from both lidar and camera, + it detects the trail in the image and sends the corresponding lidar position as the trail location. + The msgs are synchornized before processing, using buffer and sync function. + To find the path, the node will process the image, find a line to follow (by taking the average of left and right of the path), estimate the lidar points depth, + and choose to go to the closest point. + V1_traildetection assumes that the path is pointing frontward and has only one path in front. +''' +class trailDetector(Node): + def __init__(self, model, device): + super().__init__('trail_detector') + # define trail subscription + self.trail_publisher = self.create_publisher( + PoseStamped, + 'trail_location', + 10) + + # define camera subscription + self.camera_subscription = self.create_subscription( + Image, + 'camera', + self.camera_callback, + 10) + self.camera_subscription + + # define lidar subscription + self.lidar_subscription = self.create_subscription( + PointCloud2, + 'velodyne_points', + self.lidar_callback, + 10) + self.lidar_subscription + + self.bridge = CvBridge() + + # load model and device + self.model = model + self.device = device + + # buffers to hold msgs for sync + self.buffer_size = 50 # 30 is a random choice, to be discussed + self.lidar_buffer = [] + self.camera_buffer = [] + self.only_camera_mode = False + + def camera_callback(self, msg): + if len(self.camera_buffer) >= self.buffer_size: + self.camera_buffer.pop(0) + self.camera_buffer.append(msg) + self.sync() + + def lidar_callback(self, msg): + if len(self.lidar_buffer) >= self.buffer_size: + self.lidar_buffer.pop(0) + self.lidar_buffer.append(msg) + self.sync() + + def sync(self): + # if one of the sensor has no msg, then return + if self.only_camera_mode and self.camera_buffer: + camera_msg = self.camera_buffer[0] + self.camera_buffer.pop(0) + self.only_camera_callback(camera_msg) + elif not self.lidar_buffer or not self.camera_buffer: + return + + while self.lidar_buffer and self.camera_buffer: + lidar_msg = self.lidar_buffer[0] + camera_msg = self.camera_buffer[0] + + time_tolerance = 20000000 # nanosec, random choice to be discussed + time_difference = abs(lidar_msg.header.stamp.nanosec - camera_msg.header.stamp.nanosec) + # print(time_difference) + + # compare the time difference, if it's within tolerance, then pass to the trail_callback, otherwise discard the older msg + if time_difference <= time_tolerance: + self.lidar_buffer.pop(0) + self.camera_buffer.pop(0) + self.get_logger().info("msgs received!") + self.trail_callback(lidar_msg, camera_msg) + elif lidar_msg.header.stamp.nanosec > camera_msg.header.stamp.nanosec: + self.camera_buffer.pop(0) + else: + self.lidar_buffer.pop(0) + + def trail_callback(self, lidar_msg, camera_msg): + # process lidar msg + point_gen = pc2.read_points( + lidar_msg, field_names=( + "x", "y", "z"), skip_nans=True) + points = [[x, y, z] for x, y, z in point_gen] + points = np.array(points) + points2d = convert_to_camera_frame(points) + + # process camera msg + cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') + + # increase brightness + M = np.ones(cv_image.shape, dtype = 'uint8') * 50 # increase brightness by 50 + cv_image = cv2.add(cv_image, M) + + # # image visualizer + # cv2.imshow('raw image',cv_image) + # cv2.waitKey(25) + + route = find_route(self.model, self.device, cv_image) + + # # visualize route purely + # cv2.imshow("white_route",route) + # cv2.waitKey(25) + + route_indices = list(zip(*np.nonzero(route))) + if not route_indices: + print("No centerline found!") + return + + # #filter points that have no lidar points near it + # filtered_route_indices = [] + # for index in route_indices: + # point = [] + # point.append(index[0]) + # point.append(index[1]) + # point.append(estimate_depth(index[0], index[1], points2d)) + # if point[2] == -1: + # continue + # else: + # filtered_route_indices.append(point) + + # find the corresponding lidar points using the center line pixels + filtered_3dPoints = [] + # for index in filtered_route_indices: + for index in route_indices: + point = [] + # point.append(index[0]) + # point.append(index[1]) + # point.append(index[2]) + + point.append(index[0]) + point.append(index[1]) + point.append(estimate_depth(index[0], index[1], points2d)) + + point_3d = convert_to_lidar_frame(point) + filtered_3dPoints.append(point_3d) + + filtered_3dPoints = np.array(filtered_3dPoints) + # find the nearest 3d point and set that as goal + distances_sq = filtered_3dPoints[:,0]**2 + filtered_3dPoints[:,1]**2 + filtered_3dPoints[:,2]**2 + smallest_index = np.argmin(distances_sq) + # print(math.sqrt(distances_sq[smallest_index])) + # smallest_index = np.argmin(filtered_3dPoints[:,2]) + + # visualize after-pocessed image + visualize_cv_image = cv_image + print(f"{visualize_cv_image.shape[0]}") + circle_x = route_indices[smallest_index][0] + circle_y = route_indices[smallest_index][1] + + # # visualize the lidar points in image + # uv_x, uv_y, uv_z = points2d + # for index_lidar in range(points2d.shape[1]): + # # print(f"{int(uv_x[index_lidar])},{int(uv_y[index_lidar])}") + # cv2.circle(visualize_cv_image, (int(uv_x[index_lidar]), int(image_height - uv_y[index_lidar])), radius=5, color=(255, 0, 0), thickness=-1) + + # visualize center line in image + for index_circle in range(len(route_indices)): + if index_circle == smallest_index: + continue + else: + red_circle_x = route_indices[index_circle][0] + red_circle_y = route_indices[index_circle][1] + cv2.circle(visualize_cv_image, (red_circle_y, red_circle_x), radius=5, color=(0, 0, 255), thickness=-1) + + # visualize the chosen point in image + cv2.circle(visualize_cv_image, (circle_y, circle_x), radius=7, color=(0, 255, 0), thickness=-1) + cv2.circle(visualize_cv_image, (0, 0), radius=12, color=(0, 255, 0), thickness=-1) + + cv2.imshow('circled image',visualize_cv_image) + cv2.waitKey(25) + + # publsih message + trail_location_msg = PoseStamped() + trail_location_msg.header.stamp = lidar_msg.header.stamp + trail_location_msg.header.frame_id = "velodyne" + #position + trail_location_msg.pose.position.x = filtered_3dPoints[smallest_index][0] + trail_location_msg.pose.position.y = filtered_3dPoints[smallest_index][1] + trail_location_msg.pose.position.z = filtered_3dPoints[smallest_index][2] + #orientation + yaw = math.atan2(filtered_3dPoints[smallest_index][1], filtered_3dPoints[smallest_index][0]) + trail_location_msg.pose.orientation.x = 0.0 + trail_location_msg.pose.orientation.y = 0.0 + trail_location_msg.pose.orientation.z = math.sin(yaw/2) + trail_location_msg.pose.orientation.w = math.cos(yaw / 2) + self.get_logger().info("location published!") + self.trail_publisher.publish(trail_location_msg) + + def only_camera_callback(self, camera_msg): + # process camera msg + cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') + # cv_image = cv2.resize(cv_image,(480,480)) + + # cv_image = cv2.fastNlMeansDenoisingColored(cv_image,None,5,5,5,11) + + # # histogram equalization method 1 + # cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2YCrCb) + # cv_image[:,:,0] = cv2.equalizeHist(cv_image[:,:,0]) + # cv_image = cv2.cvtColor(cv_image, cv2.COLOR_YCrCb2BGR) + + # histogram equalization method 2 + # cv_image = equalize_hist_rgb(cv_image) + + # # histogram equalization method 3 CLAHE + # image_bw = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY) + # clahe = cv2.createCLAHE(clipLimit=5) + # cv_image = clahe.apply(image_bw) + 30 + + # # histogram equalization method 4 CLAHE + # cla = cv2.createCLAHE(clipLimit=4.0) + # H, S, V = cv2.split(cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)) + # eq_V = cla.apply(V) + # cv_image = cv2.cvtColor(cv2.merge([H, S, eq_V]), cv2.COLOR_HSV2BGR) + + # # increase brightness + # M = np.ones(cv_image.shape, dtype = 'uint8') * 50 # increase brightness by 50 + # cv_image = cv2.add(cv_image, M) + + # # image visualizer + # cv2.imshow('raw image',cv_image) + # cv2.waitKey(25) + + route, pred = find_route(self.model, self.device, cv_image) + + # # visualize route purely + # cv2.imshow("white_route",route) + # cv2.waitKey(25) + + # increase the brightness of image where is predicted as road + sign = cv2.cvtColor(pred, cv2.COLOR_GRAY2BGR) + cv_image = cv2.add(cv_image, sign) + + route_indices = list(zip(*np.nonzero(route))) + if not route_indices: + print("No centerline found!") + return + + # for index_circle in range(len(route_indices)): + # red_circle_x = route_indices[index_circle][0] + # red_circle_y = route_indices[index_circle][1] + # cv2.circle(cv_image, (red_circle_y, red_circle_x), radius=5, color=(0, 0, 255), thickness=-1) + + cv2.imshow('circled image',cv_image) + cv2.waitKey(25) + + + + +def main(args=None): + print(torch.cuda.is_available()) + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + model = load_model(device) + + rclpy.init(args=args) + trailDetectorNode = trailDetector(model, device) + rclpy.spin(trailDetectorNode) + + trailDetectorNode.destroy_node() + rclpy.shutdown() + cv2.destroyAllWindows() + +if __name__ == '__main__': + main() diff --git a/trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py b/trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py new file mode 100644 index 00000000..5055d3f4 --- /dev/null +++ b/trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py @@ -0,0 +1,392 @@ +import torch +from .model_loader import FCN8s, FCN32s +from PIL import Image as ImagePIL +from torchvision import transforms +import cv2 +import os + +import rclpy +from rclpy.node import Node +from geometry_msgs.msg import PoseStamped +from sensor_msgs.msg import Image, PointCloud2 +import sensor_msgs_py.point_cloud2 as pc2 +import numpy as np +import math +from cv_bridge import CvBridge + +import message_filters +from scipy.ndimage import grey_erosion +''' + The transformation matrix as well as the coordinate conversion and depth estimation functions are copied from human_detection_node +''' +camera_transformation_k = np.array([ + [628.5359544,0,676.9575694], + [0,627.7249542,532.7206716], + [0,0,1]]) + +rotation_matrix = np.array([ + [-0.007495781893,-0.0006277316155,0.9999717092], + [-0.9999516401,-0.006361853422,-0.007499625104], + [0.006366381192,-0.9999795662,-0.0005800141927]]) + +rotation_matrix = rotation_matrix.T + +translation_vector = np.array([-0.06024059837, -0.08180891509, -0.3117851288]) +image_width=1280 +image_height=1024 + +def convert_to_lidar_frame(uv_coordinate): + """ + convert 2d camera coordinate + depth into 3d lidar frame + """ + point_cloud = np.empty( (3,) , dtype=float) + point_cloud[2] = uv_coordinate[2] + point_cloud[1] = ( image_height - uv_coordinate[1] )*point_cloud[2] + point_cloud[0] = uv_coordinate[0]*point_cloud[2] + + inverse_camera_transformation_k = np.linalg.inv(camera_transformation_k) + inverse_rotation_matrix = np.linalg.inv(rotation_matrix) + point_cloud = inverse_camera_transformation_k @ point_cloud + point_cloud = inverse_rotation_matrix @ (point_cloud-translation_vector) + return point_cloud + +def convert_to_camera_frame(point_cloud): + """ + convert 3d lidar data into 2d coordinate of the camera frame + depth + """ + length = point_cloud.shape[0] + translation = np.tile(translation_vector, (length, 1)).T + + point_cloud = point_cloud.T + point_cloud = rotation_matrix@point_cloud + translation + point_cloud = camera_transformation_k @ point_cloud + + uv_coordinate = np.empty_like(point_cloud) + + """ + uv = [x/z, y/z, z], and y is opposite so the minus imageheight + """ + uv_coordinate[0] = point_cloud[0] / point_cloud[2] + uv_coordinate[1] = image_height - point_cloud[1] / point_cloud[2] + uv_coordinate[2] = point_cloud[2] + + uv_depth = uv_coordinate[2, :] + filtered_uv_coordinate = uv_coordinate[:, uv_depth >= 0] + return filtered_uv_coordinate + +def estimate_depth(x, y, np_2d_array): + """ + estimate the depth by finding points closest to x,y from thhe 2d array + """ + # Calculate the distance between each point and the target coordinates (x, y) + distances_sq = (np_2d_array[0,:] - x) ** 2 + (np_2d_array[1,:] - y) ** 2 + + # Find the indices of the k nearest points + k = 5 # Number of nearest neighbors we want + closest_indices = np.argpartition(distances_sq, k)[:k] + pixel_distance_threshold = 2000 + + valid_indices = [idx for idx in closest_indices if distances_sq[idx]<=pixel_distance_threshold] + if len(valid_indices) == 0: + # lidar points disappears usually around 0.4m + distance_where_lidar_stops_working = -1 + return distance_where_lidar_stops_working + + filtered_indices = np.array(valid_indices) + # Get the depth value of the closest point + closest_depths = np_2d_array[2,filtered_indices] + + return np.mean(closest_depths) + +def load_model(device): + model = FCN32s(nclass=2, backbone='vgg16', pretrained_base=True, pretrained=True) + # model_location = '32s_batch8/fcn32s_vgg16_pascal_voc_best_model.pth' + model_location = '500epoch/fcn32s_vgg16_pascal_voc.pth' + if os.path.isfile(f'src/trail_detection_node/trail_detection_node/model/{model_location}'): + model.load_state_dict(torch.load(f'src/trail_detection_node/trail_detection_node/model/{model_location}',map_location=torch.device('cuda:0'))) + else: + model.load_state_dict(torch.load(f'trail_detection_node/model/{model_location}',map_location=torch.device('cuda:0'))) + model = model.to(device) + print('Finished loading model!') + + return model + +def find_route(model, device, cv_image): + PIL_image = ImagePIL.fromarray(cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB)) + transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), + ]) + image = transform(PIL_image).unsqueeze(0).to(device) + with torch.no_grad(): + output = model(image) + # pred is prediction generated by the model, route is the variable for the center line + pred = torch.argmax(output[0], 1).squeeze(0).cpu().data.numpy() + pred[pred == 0] = 255 # only see the trail + pred[pred == 1] = 0 + pred = np.array(pred, dtype=np.uint8) + + pred = grey_erosion(pred, size=(4,4)) + + # # prediction visualize + # cv2.imshow('prediction',pred) + # cv2.waitKey(25) + + route = np.zeros_like(pred) + # calculate the center line by taking the average + row_num = 0 + for row in pred: + white_pixels = list(np.nonzero(row)[0]) + if white_pixels: + average = (white_pixels[0] + white_pixels[-1]) / 2 + route[row_num][round(average)] = 255 + row_num = row_num + 1 + return route, pred + +def equalize_hist_rgb(img): + # Split the image into its color channels + r, g, b = cv2.split(img) + + # Equalize the histograms for each channel + r_eq = cv2.equalizeHist(r) + g_eq = cv2.equalizeHist(g) + b_eq = cv2.equalizeHist(b) + + # Merge the channels + img_eq = cv2.merge((r_eq, g_eq, b_eq)) + # img_eq = cv2.fastNlMeansDenoisingColored(img_eq,None,10,20,7,42) + # img_eq = cv2.GaussianBlur(img_eq, (25,25), 0) + return img_eq + +''' + The traildetector node has two subscriptions(lidar and camera) and one publisher(trail position). After it receives msgs from both lidar and camera, + it detects the trail in the image and sends the corresponding lidar position as the trail location. + The msgs are synchornized before processing, using buffer and sync function. + To find the path, the node will process the image, find a line to follow (by taking the average of left and right of the path), estimate the lidar points depth, + and choose to go to the closest point. + V1_traildetection assumes that the path is pointing frontward and has only one path in front. +''' +class trailDetector(Node): + def __init__(self, model, device): + super().__init__('trail_detector') + self.only_camera_mode = True + self.bridge = CvBridge() + + # load model and device + self.model = model + self.device = device + + # define trail publication + self.trail_publisher = self.create_publisher( + PoseStamped, + 'trail_location', + 10) + + if self.only_camera_mode: + # define camera subscription + print("Mode: Only Camera") + self.camera_subscription = self.create_subscription( + Image, + 'camera', + self.only_camera_callback, + 10) + self.camera_subscription + else: + # create subscribers and synchronizer + print("Mode: Lidar and Camera") + self.image_sub = message_filters.Subscriber(self, Image, 'camera') + self.lidar_sub = message_filters.Subscriber(self, PointCloud2, 'velodyne_points') + queue_size = 30 + ts = message_filters.ApproximateTimeSynchronizer([self.image_sub, self.lidar_sub], queue_size, 0.5) + ts.registerCallback(self.trail_callback) + + + + + def trail_callback(self, camera_msg, lidar_msg): + print("Message received!") + # process lidar msg + point_gen = pc2.read_points( + lidar_msg, field_names=( + "x", "y", "z"), skip_nans=True) + points = [[x, y, z] for x, y, z in point_gen] + points = np.array(points) + points2d = convert_to_camera_frame(points) + + # process camera msg + cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') + + # # increase brightness + # M = np.ones(cv_image.shape, dtype = 'uint8') * 50 # increase brightness by 50 + # cv_image = cv2.add(cv_image, M) + + # # image visualizer + # cv2.imshow('raw image',cv_image) + # cv2.waitKey(25) + + route, _ = find_route(self.model, self.device, cv_image) + + # # visualize route purely + # cv2.imshow("white_route",route) + # cv2.waitKey(25) + + route_indices = list(zip(*np.nonzero(route))) + if not route_indices: + print("No centerline found!") + return + + #filter points that have no lidar points near it + filtered_route_indices = [] + for index in route_indices: + point = [] + point.append(index[0]) + point.append(index[1]) + point.append(estimate_depth(index[0], index[1], points2d)) + if point[2] == -1: + continue + else: + filtered_route_indices.append(point) + + # find the corresponding lidar points using the center line pixels + filtered_3dPoints = [] + for index in filtered_route_indices: + # for index in route_indices: + point = [] + point.append(index[0]) + point.append(index[1]) + point.append(index[2]) + + # point.append(index[0]) + # point.append(index[1]) + # point.append(estimate_depth(index[0], index[1], points2d)) + + point_3d = convert_to_lidar_frame(point) + filtered_3dPoints.append(point_3d) + + filtered_3dPoints = np.array(filtered_3dPoints) + # find the nearest 3d point and set that as goal + distances_sq = filtered_3dPoints[:,0]**2 + filtered_3dPoints[:,1]**2 + filtered_3dPoints[:,2]**2 + smallest_index = np.argmin(distances_sq) + # print(math.sqrt(distances_sq[smallest_index])) + # smallest_index = np.argmin(filtered_3dPoints[:,2]) + + # visualize after-pocessed image + visualize_cv_image = cv_image + print(f"{visualize_cv_image.shape[0]}") + circle_x = route_indices[smallest_index][0] + circle_y = route_indices[smallest_index][1] + + # # visualize the lidar points in image + # uv_x, uv_y, uv_z = points2d + # for index_lidar in range(points2d.shape[1]): + # # print(f"{int(uv_x[index_lidar])},{int(uv_y[index_lidar])}") + # cv2.circle(visualize_cv_image, (int(uv_x[index_lidar]), int(image_height - uv_y[index_lidar])), radius=5, color=(255, 0, 0), thickness=-1) + + # visualize center line in image + for index_circle in range(len(route_indices)): + if index_circle == smallest_index: + continue + else: + red_circle_x = route_indices[index_circle][0] + red_circle_y = route_indices[index_circle][1] + cv2.circle(visualize_cv_image, (red_circle_x, red_circle_y), radius=5, color=(0, 0, 255), thickness=-1) + + # visualize the chosen point in image + cv2.circle(visualize_cv_image, (circle_x, circle_y), radius=7, color=(0, 255, 0), thickness=-1) + cv2.circle(visualize_cv_image, (0, 0), radius=12, color=(0, 255, 0), thickness=-1) + + cv2.imshow('circled image',visualize_cv_image) + cv2.waitKey(25) + + # publsih message + trail_location_msg = PoseStamped() + trail_location_msg.header.stamp = lidar_msg.header.stamp + trail_location_msg.header.frame_id = "velodyne" + #position + trail_location_msg.pose.position.x = filtered_3dPoints[smallest_index][0] + trail_location_msg.pose.position.y = filtered_3dPoints[smallest_index][1] + trail_location_msg.pose.position.z = filtered_3dPoints[smallest_index][2] + #orientation + yaw = math.atan2(filtered_3dPoints[smallest_index][1], filtered_3dPoints[smallest_index][0]) + trail_location_msg.pose.orientation.x = 0.0 + trail_location_msg.pose.orientation.y = 0.0 + trail_location_msg.pose.orientation.z = math.sin(yaw/2) + trail_location_msg.pose.orientation.w = math.cos(yaw / 2) + self.get_logger().info("location published!") + self.trail_publisher.publish(trail_location_msg) + + def only_camera_callback(self, camera_msg): + # process camera msg + cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') + # cv_image = cv2.resize(cv_image,(480,480)) + + # cv_image = cv2.fastNlMeansDenoisingColored(cv_image,None,5,5,5,11) + + # # histogram equalization method 1 + # cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2YCrCb) + # cv_image[:,:,0] = cv2.equalizeHist(cv_image[:,:,0]) + # cv_image = cv2.cvtColor(cv_image, cv2.COLOR_YCrCb2BGR) + + # histogram equalization method 2 + # cv_image = equalize_hist_rgb(cv_image) + + # # histogram equalization method 3 CLAHE + # image_bw = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY) + # clahe = cv2.createCLAHE(clipLimit=5) + # cv_image = clahe.apply(image_bw) + 30 + + # # histogram equalization method 4 CLAHE + # cla = cv2.createCLAHE(clipLimit=4.0) + # H, S, V = cv2.split(cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)) + # eq_V = cla.apply(V) + # cv_image = cv2.cvtColor(cv2.merge([H, S, eq_V]), cv2.COLOR_HSV2BGR) + + # # increase brightness + # M = np.ones(cv_image.shape, dtype = 'uint8') * 50 # increase brightness by 50 + # cv_image = cv2.add(cv_image, M) + + # # image visualizer + # cv2.imshow('raw image',cv_image) + # cv2.waitKey(25) + + route, pred = find_route(self.model, self.device, cv_image) + + # # visualize route purely + # cv2.imshow("white_route",route) + # cv2.waitKey(25) + + # increase the brightness of image where is predicted as road + sign = cv2.cvtColor(pred, cv2.COLOR_GRAY2BGR) + cv_image = cv2.add(cv_image, sign) + + route_indices = list(zip(*np.nonzero(route))) + if not route_indices: + print("No centerline found!") + return + + # for index_circle in range(len(route_indices)): + # red_circle_x = route_indices[index_circle][0] + # red_circle_y = route_indices[index_circle][1] + # cv2.circle(cv_image, (red_circle_y, red_circle_x), radius=5, color=(0, 0, 255), thickness=-1) + + cv2.imshow('circled image',cv_image) + cv2.waitKey(25) + + + + +def main(args=None): + print(torch.cuda.is_available()) + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + model = load_model(device) + + rclpy.init(args=args) + trailDetectorNode = trailDetector(model, device) + rclpy.spin(trailDetectorNode) + + trailDetectorNode.destroy_node() + rclpy.shutdown() + cv2.destroyAllWindows() + +if __name__ == '__main__': + main() From efd93a159fb62fd79194cfa2e0d93fe21319a426 Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Thu, 31 Aug 2023 16:33:30 -0400 Subject: [PATCH 02/22] add psp model, change the coordinate conversion --- .../__pycache__/jpu.cpython-310.pyc | Bin 0 -> 2452 bytes .../__pycache__/jpu.cpython-37.pyc | Bin 0 -> 2441 bytes .../__pycache__/model_loader.cpython-310.pyc | Bin 3390 -> 6824 bytes .../__pycache__/model_loader.cpython-37.pyc | Bin 0 -> 6861 bytes .../__pycache__/model_store.cpython-310.pyc | Bin 0 -> 1425 bytes .../__pycache__/segbase.cpython-310.pyc | Bin 0 -> 2194 bytes .../__pycache__/segbase.cpython-37.pyc | Bin 0 -> 2155 bytes .../v2_trailDetectionNode.cpython-310.pyc | Bin 6580 -> 7265 bytes .../__pycache__/vgg.cpython-37.pyc | Bin 0 -> 5925 bytes ...izer_v2_trailDetectionNode.cpython-310.pyc | Bin 8399 -> 8830 bytes ...lizer_v2_trailDetectionNode.cpython-37.pyc | Bin 0 -> 8356 bytes .../__pycache__/__init__.cpython-310.pyc | Bin 0 -> 330 bytes .../__pycache__/__init__.cpython-39.pyc | Bin 0 -> 328 bytes .../__pycache__/densenet.cpython-310.pyc | Bin 0 -> 8119 bytes .../__pycache__/densenet.cpython-39.pyc | Bin 0 -> 8190 bytes .../__pycache__/eespnet.cpython-310.pyc | Bin 0 -> 6119 bytes .../__pycache__/eespnet.cpython-39.pyc | Bin 0 -> 6085 bytes .../__pycache__/resnet.cpython-310.pyc | Bin 0 -> 6189 bytes .../__pycache__/resnet.cpython-39.pyc | Bin 0 -> 6424 bytes .../__pycache__/resnetv1b.cpython-310.pyc | Bin 0 -> 7205 bytes .../__pycache__/resnetv1b.cpython-37.pyc | Bin 0 -> 8293 bytes .../__pycache__/resnetv1b.cpython-39.pyc | Bin 0 -> 7945 bytes .../__pycache__/vgg.cpython-310.pyc | Bin 0 -> 5493 bytes .../__pycache__/vgg.cpython-39.pyc | Bin 0 -> 5965 bytes .../__pycache__/xception.cpython-310.pyc | Bin 0 -> 9518 bytes .../__pycache__/xception.cpython-39.pyc | Bin 0 -> 10103 bytes .../base_models/resnetv1b.py | 264 ++++++++++++++++++ .../trail_detection_node/jpu.py | 68 +++++ .../trail_detection_node/model_loader.py | 96 ++++++- .../trail_detection_node/model_store.py | 36 +++ .../trail_detection_node/segbase.py | 60 ++++ .../v2_trailDetectionNode.py | 43 ++- .../visualizer_v2_trailDetectionNode.py | 61 ++-- 33 files changed, 597 insertions(+), 31 deletions(-) create mode 100644 trail_detection_node/trail_detection_node/__pycache__/jpu.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/jpu.cpython-37.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/model_loader.cpython-37.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/model_store.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/segbase.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/segbase.cpython-37.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/vgg.cpython-37.pyc create mode 100644 trail_detection_node/trail_detection_node/__pycache__/visualizer_v2_trailDetectionNode.cpython-37.pyc create mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/__init__.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/__init__.cpython-39.pyc create mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/densenet.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/densenet.cpython-39.pyc create mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/eespnet.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/eespnet.cpython-39.pyc create mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/resnet.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/resnet.cpython-39.pyc create mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/resnetv1b.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/resnetv1b.cpython-37.pyc create mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/resnetv1b.cpython-39.pyc create mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/vgg.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/vgg.cpython-39.pyc create mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/xception.cpython-310.pyc create mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/xception.cpython-39.pyc create mode 100644 trail_detection_node/trail_detection_node/base_models/resnetv1b.py create mode 100644 trail_detection_node/trail_detection_node/jpu.py create mode 100644 trail_detection_node/trail_detection_node/model_store.py create mode 100644 trail_detection_node/trail_detection_node/segbase.py diff --git a/trail_detection_node/trail_detection_node/__pycache__/jpu.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/jpu.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..96f976eaeff4e2dbe8bba73d606c4b100bf72eb2 GIT binary patch literal 2452 zcmai0UylFPtD^8@6|daN1IZWkAgD;>RzXGg z!8Y=0EAS6i*#QTgF9`U-w>pmT`l=WWwc*=otd$FAbg1%?1;*8*p;QV_H$fDod0Ix1 zaq`@FGrtW_z<=Xx=cX~lSnj4;b{d9jne|MN=EE$`r8Zv8n~$W*WftjlERCm2l?rM6 zVJrj$-2@`d;xa9Ad%g+tLJguUej?Q~Y8p|LWUU0V zM!R}rdz5Bkqf~L4MM9P`!QG;~5b}8C!{KOs_(aXatONMc73Vw+=^7ocy>?0K^9Upk zn9T>u7@+I$=!-xqG9eW`qr*Y(b6ZFHZVapMdI>ffbEI ze+FbxG`3hATPzM#C>D#Y-KApj6h=5DK$H0{xD#e7p4@~---pt9y)-5q65p)=GtQC< zi<9Br2QCzgvrkJl@q~9s4;hvN%;whqTdjj7>)@@{4QmaEQ#q@1KhTrLfp?52%_G*K z2;XSkI%3@v%{N-Nk65=v>y6ec79!eP(-$xlM&5$M4M@~goFGKppZ*7n)8{}Sp{@G8 z`Pvk(ZHn(rv8p;J6f%36vplou7v7VPM=~$dI8%6&hW{vUKQJ!B%9k^Cb;hpDV5l+2 zh|A`sxVaFoEX1oFcUDr(G>?*7Y4tG-q?r(YJF^$s&!{{Uo# za_2O~ zEQw3Cj}zCdyr$UGZ7XiB3J5}_yKQme0E$#9gHK+fVD;@~&%=u<#Gp{w*9{8s2C z>QL{+8XGt9LTkiT?}N?K1`CwLD!|mz__rY-tMudtou=`lNE8VG>ow4tz?5#S$&N!!4@%Mv+?d&c(>&-D=(YM3xZ!`f@`xb@GXW^W5?knpe=fe Kdd?|!`sDvy&pjjn literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/__pycache__/jpu.cpython-37.pyc b/trail_detection_node/trail_detection_node/__pycache__/jpu.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ba624d87e89303e64e1f29b1fb1b3dbf84126f4f GIT binary patch literal 2441 zcmah~-EZ7P5a0FMKHpbT+K_%wAmssid?X@G5rPV#R!XJPMip%Y@{8qo_bzUI_I1}s z=*f9X5l{IaE-y$t@L%vZ@UX8u-M@gu$IRN!g|t!E^33e)cy~NI^PAh!R_t7`!nfN<3iJS;NZp!;)Bj6egh8m^x`8i3T3#Hh&x&O!#bZa z*lR|CY(~v$f8-b*TvGReK*bZGU6sq23*8un zoI|j+$Kxc-<197j>sFe{VUUE+h3rw?41y>Ll?sAi$zR{B?Pf!rR|lYom{M7URwW?Z~l`D8R+9X*$Ikd+7jNvs8WmA2^Q(y8lNt^Yr=&S(QW~drHHhfO~2M!7$_F1m#W;JR65exgs&8bo+LgN4r}P z<8^LgATOi3Tg5pNNfHEemmn+NgmsCYnT)`&DJ%q!x^&q!x(KGn1FIK@`WVQ(NKA29 zrZ_C9Ml23frzeWTl^EL?BOKMcU}mD&b02lWZmZN zGp)NXS$BBnOzQ;$3Ei#Pzc3_5-GIR+r002@AS67V{R4}$zkxs+8})^HZH8Ai!|!HT zN4-Uf%v@xdT_)*TkHoXFNb@*MB;KU9zZLhlwSzEnvt%13yHUbUv&Mu^^|81<7jMkP zjh<5$QaetAXg5p&Xq0yLVxI5HB|OW03e_}FbtWGdn(^-nSg0?65V~O5bQzL*fu7=L z=_|WQ2l~VuClJbUVCG`8&jt{}OF1R+p+J25m)5z5EVTWr7|p}S!c58z5NfPPuG z7|nP*lo+C3M}B~xUq{lzJ#5n+=%$97%!M3f&=rJs@=Qj%nnhtQKSbM)kRZ0mPk_|7 z??TfV=gN2bBFvS%2}{0>@*K)a3`4a05?*x<2%*>M61@V?0)3x))})PBd5o$OvqQdx z`viuA`f-OP>Y3)8RIBzPZthi%8PP7y6R@n}|!l)~3LN@hihn}}w HcK+OdIXN?e literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/__pycache__/model_loader.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/model_loader.cpython-310.pyc index 9b9029ed969ff988e7ccbe4405900b9c4afb469e..40bfbde6c5e6e7fef1fc9362c1c3bcd7a5d67f7a 100644 GIT binary patch literal 6824 zcmbtZ%X1t@8K0i_KD1h`mBewtOdiB+V#}5+C&39xY$r}gg+;NH2U}DoqwU$%%Dc0h zo>@P{h)Us5frE46030ZZ;^G5GDEL?A2nR|xisBTC3cs&sc6KGN3016Wdb+>vp6U6% z@Athln42pq`1Stucm9vBDav1IF#XdpcokRpM>JgFtf%x@ld-g~_L-_MrLQ%0P*kq< zjK0}4vstTYfu{HDexX@l${PwdxcRBVO`mNS?`X{uC>FOtu`@~;lmah;Qp_lG1%;P* z`BSA?!Q33LV6KwQ&4V(}tDsafN)?m^UIV4(7x0D!&eoOsu>+i4X&f-Lb(R=6I-TX` z&?&F`oy%V6e{aD3UW?XD|LDu<7oJ1oD^13grpg(8rLS%4ZT&N)&A85uPYX?a7q@Um z^D{+Pnik$^C&tw)jTb_pVfWRImPwDweirTqt`e^BFK8kqVzF{Z-Bb6$%~*>SSBvqk z>#BG-(qo;gciFZ9su@_3#kD;&h+>{YoHGj8wGctMUapN(s=CO$m0<5;ZhE%E~B+Z{;lw`^Y} z>8~pol_sNlxgj}mk0r&S@FU@M1D}7fI0~)@gPVb~>9wwJ4g&w8^P}B^1+e7cI5mn& z4df=!QIdjxtJNEY-5Y*gPxLL{<4GxO4TSFw2ZP@E>A`AZd!t)D@3t@Mny6s6n5U+S zrv6Z3grlJ^1W6&WUAG%_BiBtdY>~%I?6&VkBjJaFgq4)9je@A#_g@!cAQH1Z5PdI7 z)F4QT>;A_hACq1$G161W+iE(xfTu$b-o#PmArkY-Ah@x@Cx;RAwMof!LqGDONVx7j zE73#0*G|l!)$_tIvB%sIi?nur!a#Qu>YEe;oX3^lnwZybdZH78&jr_&+wnlB_# z0CWAWzFUY3vGs_uTa>Gcu|0EDDK5n}R+Z(da?EC~nv0FSLOiz*!w5JQ#SNUHB+pfm z`zjCESCRWFv@a?HgfLh4jK+b~c6+ni14=xxaZgQj;(Rja^}3zFg~|uM2&beimS<7MwQ51otiqIXm}b9F}j7jsMBF( z^9;S)0<@sO{OcGwVeGK*J;JeOgo<@b87J9WrYw1iG7O^1z6ND6B! z$60kz(jM$bR$%@3!v3F$3R9F408r)z z=-(#a*ipxRft#J&N7U`qD+uDDAZ`lsiDHf#X|u^VLd|PLxJr$b5t%{xdEz1Jk%<-$ zQ}YNlq&t({;u|z{sFA|Ubxl6%MbJZfdPN=Eu8Oo+PpLceDbr`0Dtc#bs%~KbfGZOt zlNl^BT^h`k4sN7I%ZLy=#4t~}88TuW=E}%ShbdmbT1y%-Q?50q)|zsyDb0l)>trs( zvv_&EAkO2KazZk0nT9LWklIbKE-qkLwKpi7a#%xT+Y>44x%mz1bC}uOH>a@mzA2v%7lS;}^ zt%=SNz%b5Ezzap%o(^Fd1c+3y+_($nCyNfcNf5Y;-X5U6Pj?6$V%7GweG2Pr3YhAJ z#sR(hKtm(rovX0>*3jd;8*~zVv+IQ$h+3^~Fzk6P|8Kwk75)DF3b-usA@yEzQ!@8a zddx{QiGIz0``yHPO$>%)dX3HApmn`&rq&(~!fkDNLE!gb-^V@iP1^l5?XE}E(C(>I`Z`e% zX@^O3eA-M;Dzcqw{=Q5J#jBW@saC|WAK=PNc#Ty{_iF%≥Rh;6-#IL?V&0jfezc zu}lZ<>aiNLHtQmE=gJ6Z>nZ_@wcoMNv2qV;w$Ju8NZ4qka>Zx8jVEo?^%MuWsKoa{ z2K5?lNg2YLu$=^wlw_orhImt-M2tWPxwgKxd~2Bk%pM4mn>qrp6C{*>3;NO)u0ua! zJ^iGw9YZAti47-}f&|G95*vfQA(?mAZVRv9Pyz_7T*fZ^=GbM!=AL*hcgVQ4#Rc+A7$r5^F&#&7hTM(B@{)DiU;N zL%fMoO1X&B)FT^rK&Bn>ki->fx3p4;1}LO)!@?!*4I?BXmu_^34J#b^#*Gdo0LyuA zCGVZjd#h3oiP3_TmeboxdOM%qRv~^0!=w&K1d8`*IKz2GpoioOD}dVgZ;{d8akg{( zh?r*M?~otc!?TY1v51kXBgm%k8vYEjnD{O&o24)uS>lmdJ)5O~+`ezt$Yu+eg=CNH zHKWotf^C>k@|I@DpxVJ*6$)rIQaPQ$VK>AQS}8nOMvZs-2o|J6oA|#?TJa$@r2WFB zhEwwqHDo%3hbA$ifoN?dTFZ;>^N)!^`5@#)H^_@_kQd!nkKj=H$xrY=>8z^kWcCM% zJaSYbV>f9Yag+axPn0&7UPG#CD`i$K3!>czaY7^gLelOr#K{|S)>b9d;fSO^q*}&w z4Z}nn4VyyAYvYhtH1MR{D+&ybf}BH1U<=i|R7B7tW6z9?Z4@r3TtdE(+whJmFXa~V z$tjCz?Ag2kFNutb7v=Gpw=7EFmK$BN%XKw%RUhCKT?MI68bK0>Q(G(ELU%(n z(8BD}sq3W_*L_+e{Uz}&(T3r0mN^z{w2s_R-AIx71QEy{3wmpc(HZdkIVLl&NLg?V zptL7mF=Om(R37q*gm7R)dc|E>JtV?F3nIUFF@f(7=n!;1@g6lZ{Npb`4@nOwmx6~J zzxyjaB(dmcv^iC>Me>kDd$5v`D;g^qXYd~dsbXnrqi3m&A-#zSFc%XOhiRbn5#yUP zzv8U7{J?kCJQ2dZIGDXT5Z8-E2R{&z*de72s#H^&Ies@u#E}loovv-BIkX%(hE4PzU@() zvWsN?#8{O=sY=64l`;V>J>fia<-N6Q&zyEv&MiML50cnpF`-&Z25$T#)gK$YaEQU! z*ugK7e^T2T%$?pv=uc4z3OR^zWH2blWB^eTGQ`u7$tg^v_+>mJJtNnUC9jZaoXV2* zsVo^W%2Rl6mjbJ!xp5Zz4=|tFGHKOh3P^S%O@zj22y0I->H-a?^4D@rG-hD z!D15`6O5j$u9@N`thHoy&5~=askNy0VXc*=$mY=$IVn@g)E!$s$*5(V@xi&WJ(YV< zXpt~CQCsJ6tqPejKPXxL0~Tk=at-!BO_K9une9|ECT3?rnVPig>IR~Z!&$Ro?Kb1+JdO{T4KxiW+@2L2=>@`xYQm6vh0frHY#}Iz96n78va+3 zR$4kVv_ii_{1fC%5)=P8L1j&rz0&H8OhGBpkfDfl74r2{PD#2anXuR|rIopt88(C` c(3F)L;#gZPvzl5vS9@HYxAmG;duXxxAJJ3B_W%F@ delta 1114 zcmZuw&rcIk5Z>4Q*>1a(QlLN)L?Nz31O#IuQU#QFpo#HkysTw+DVSxK%(m#oc#yLZ z-w7`s^=QJ$gC?HzUvO_8Ie7J`&OAt~#!dFin|brTnfczlx3eGnttgkvNceq!^3{7| z?OJ8#?T(+hu`GFtr|w9e>a&OXGx@%O9?jFyqaS(F=rO!BdeTRpjK^+DcIJTiss{|w zW{J98uTPzKcRP#wL+lId%b^=Af({DkPSm)$fHpogGL&^isr$Q&XN z<+K_UR2R@67r2)Guz4Dnb{eKAQZNdNvt&*ZOcS)UhI44zx=y-aWgA<-Sji(C|tw1x$Omd3D1W}8X0B9tXnC}B*Z8Ir=Zl)|iE zc?*_M-zqEfy!o8j{IyvgtH0W#?|lXP6p{+5L=us+L`equH?z!!`A>6T;v_jn5fW=7 zZ1~`~>$NcSgZO`1-sQ{L+v7H?+6-3kjGg*Jop$M>YMPr>{y95XnWq%(-F9PC+a3Im zMy9~>rnp+$OpHbZ?)pR+LNEW(H+qjQDUtDk0u4o?twdfk@Ws#_PL1fZokXm}kQ;OO X-1ab(1oN~%nHM6tSk~SQ8i0SFBer|PD zeO1LP^?F6a)Bp3ob=g2qBk!33Wk$||GMiIoK{+AkK$#25SpS3+Yg+T<0Zy*190-(6ky^L9-GvuX zsjUUwD}EHbJCH%YO=HFn{bqWi3n&7uC8XBUrJ&yz7@KCt{9Nk@X-ey}atpsmmT*4% zbIsIRCG6*<*0rlEFGtG2>}xmLHZ6sxl)qJcqranwwOAzDU42jA2M-e?(L5u;J~wrB zHZ~Je>i5K^1*#o3;*vD>oJ8Ljcg4>{T;8lC+NLcvY5rV)OuMgPRJEYuWCgWWP%Wty z)Oy^Ib|Mn%F6}xKm-l96DG}1yr}hbMqd#xYC5HOz12ax0+TJNy2K{fcLN_GqT{CpD zde_+3Ndmx&#tL%*+^!5&5G%hI26E@rD7+aCZinu=-@ds%2!l)RPj?SafF%cyqUdQl zYG8?`P|?zIu-)#DqTa0_H8%oZrq!rDP=Plb4Ejrl8_TKVkGA{%oj^4oO08%#43x5H z3CHt#VK4T))W8TnQ|fdAKOU(dQglOU?fNK;dt1SqN)1$McLr+9k5fGi)5==#$tXal z-%qV@-%0JT z-S?v?b;hhv#D;Wc!YywdGMZLGoX6vzmfAOO`>Gp(sb$aO>7Ms{?Y}q9Zw$7A^E(^< zKn}w0c{PX@z1z|GNVU%s=lh-v;-DS(2B8;1=8q?Ckv4e!fiD9!KfI%6ahx*V$VH)v zhOmVrX7z@6nEx7L7JsH_=(ac|b{Z`C^J8we>Hg@M!Ao4eiZ}WX3doO^h<&lEqb5kM zcOiLudSakvcC}r|{hpDSXxXTlOfR8k@mMFeH%p0~l=hr`*rZIdNk64|&)h91<)rk8 zwp-y*mBcxAR5ht44o1~@R4oz5j;bftUOB1n!{R~XD(as&LzU0f;CYQNnAhNW4Vo9% zpl4)>td#>6XlK3Gha&juMpI8s;&@v3`@L@HL9W9dzYY45L-zHiHNL; z3jOIOUiv;MocygomMY>&(01yXJmqs?TA|3HzZF1{E1n0=jrvrtdEO@@zn}G}hiJd* z6+dootUzm^i6q2oiOBU_W7>^~Htb_Uv6?{=-0r_{TIjWJ}P$#K4MaAP(usYCe1%E|r zmRP>klSKFy6}*H?wWp{!jbePmg-G&dub?NQZ8Vs)v1x0tLED+h?&qjABP1`Z88IC; zK%3a1+@_IrGhoR~FfOxeRt)bUfdwlpm6e#VXjX1rY&FE#65BeP$J&R++C0`~i{d0^ zZc)^;*k7GN0b`<`qec@&v#iciotekndY+muP;rh5<`8pm9?dZa&r=sqWUE8|k6NH& zkqTzrgny(^-^Uw0j6wqpuriA|d`W@n%YXnGpC4;*S&_o1i&VUbA}x8I9JHZ|q_yfL zDhh2?FH^JFb$o-Yqa-fW9g(6s!o-$ybrz!I^^|`8SWnrB6YRvnsbeP&!0~_+XR(~M zP0XT)*hpNYH)^4nQZpgN3x|Eb9dv*9TYCQTJzO~} z5-SMvkXcHXqn<#Kn%9GO-cL(!sKJm7wYAyF^UVFn2!=MjuWL%r5PpiRr zQ;Zp3#zY)wOJYq#74j5tD-Z$_1xM_h82d;^)rl~Xe(Y7e5g7+40Xqk%Zk zP0*oOLJN_|>@^8ufH$I*G_f;V>@^lUXG-k;Uuh$r{)l#UEGorH#@WZn3xzzNxgzZer#iP85f^1pjk&ZFOOLfxOc`=CkH#xT9Hka_TKm+IWk{ z$ZHJNW6dda#sSi0I)h*cfsaL}PiMTletUvcrc~F01gV^+h zb{7^ihDDU{2cZu(*WnSOUgA0!MS8+@SWVQ+aRu6Dbo4~OPwnbdyEfIXPqiDYbM}fN zBw+4m7WGv$4@m9dJh586P5mU=sR2#MoQxBfr9X@jm|VWqrJuYg5rN$5QjD-r)E0}{ zQc+uGey3I&cd?Lt7PHS%_F2Xyv%J$yGk&Qz=`gvQ!V184kWLiijR+h`>uSXxdFHcX z=h3N4IjVpU;MpFQnZ~nNkSN0+Cr=!>2F_0~%*cDmV@J$5)obRxUg*ovkc2a$H?vQ*nj*>r`b<6aM8uP^J`)m3(a~A20Yr?2l-M|&(6Hfk z!*o!9yHX&cg;fef4AGd@XbBmCvD5ESM!}4&Jv+8Gk;S0A3E`l0fQ-7V<_7VN!XR3E zjw}Nm5qHT7e@oN1UEG_nqPoW%jY#LxUCW){Fm0rMQfWhVp1oVQ2y zlHiQ2CfNYiZ)gbv<1Q>6qGsqz29dfVaQaF(`72tBWdXPhq#SQPmcOryl*GzkjQ4QF zsj4E%@sfDhCzAq*Qf4{KA|^ABm@LK@8$Yy?+BoeBawrKnlOv70)(*nJUG-H2z;V%g zd!TMsDlQ%n5i24}j<|WQc(|EmUffGA%sCTi%F1YKJy7nT<3jg?E<&#$a={xn-UwXI zU(qhokCBv#=Xu&VbAy$lSfRUEY*hv4@|+ifIttWfk@9lG?4)jdXBfE8$)Mwp`thYR z$VZ{);)OG>xRd(XV#%DgaADyL;==Keg^P=4n%QY4c_N%=u)c|ePcfOIVdNQhe98g2 zoJ?{N*h_HW=(&m`xtBP{bm`+ToG*gvk6$TPCGM=<3ELZL5F!g9-K)qvMd#jHec!#} zXCZj8X?{qg2ynV}z+|z?<gy`0y_!A^o{|WX!Z~r{2IOr z@;1F8uw;)Q~wA351Ey+#Pb1I{zG!S%uKz5UZZu3yk4Zp?3PCd9_+H_oZ|vqV`8oy7si#?YrR@_5hV59nc+T@xwxakO(NN+F<4h1b&COL&*n1+>2~kp2@oi$Hn~ z0FedIc_59ZV^$dOVs>IUIU57Z}3JWy3R>cZ#ar*&AHTJ z4`@CNGe`I2xH;b$g`7f0E*x3Gaa|EtS_b~Lk|l25r)DV%y2L+4aFg2jKMS&ToJz}* zJuH9}cd)a!&ZGHtLXYh5DdeS}@wCI77pQl4wf|HwUieB2$^Ek|PcnZY#c9sCf&tf(<|SwT-0o7r zTSx@;u<0&jUg|hqmWj?q`OI{WxsK~iahez0>{wrNRl>_oB-ln?H(M9fi@t`g9)QVF zg??)NNY;2wDpKJ;$ti5lXNnkjhOK^Kty33MWaJL9y2~k zj-pXY!$C9~91cg}gbp}A@ZEov%z>o)7!9Uxbict7wXG?KcnUGDht#opmVwsB|TFC6xLt$&Lv-ncqxlDFB8FX zfNw)}|J>3RTd3E;DqsAEKuhK=fseqckVdCiZfnf&38@exf8Y!J8`iM02hwHE?_>{w z;M`}c{pTOa%1?##&+C)?T=}KYK0O7}=&azu8!SCI;H@>Fgsp3lsf-Rx=LcFW`L|LE zX`1knIo0wW?0c^bX_+$}l?kcijH>KD_^JuU!*}74R(d zYq3sY02SJLaRnC)>LhET$%*sYt?(6I<9ghva7CE&5M7bAqnj3kwOg;WD(4EzFDkfv z+GFl(Wc>>a*V&@ec(%H~0b>9Bea`D1sWGUNUAe(RXRFVj*4Nf@F`$zTC%0$(zqugFQo;;H2JGsX=NJqmF_j_ql_zs(58K>f$L%Y zp|%CTM)M<}rPah9bkzm3i?{JE@xZ!x*Xd#T5GH$DrWMCbBynsyG5kREQt~*K7IFC* z7}El=*<FE;x{a@Sw?x+Ge~^=Ju<7 S+QuGa=aDXPn=ba8Zu4IxGJEL& literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/__pycache__/segbase.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/segbase.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c382683ae99ab55aac4a2c2309d76d554616133a GIT binary patch literal 2194 zcma)7&2Jnv6t_K}yO~W>NWa8~MruVEXp(NyNGL@JQBfpRqC&!DIE*H1XEV)=XYqKG zc9~q-O1<$Ha7YlB{taCCC*a^Kr`$Mm;@La9*;GB?m7m}9duu=c-p7=kP8-2^`1?Qf zPYQ&2TZ?UT&6NB`5D}cw$3on z2rKLBN8d-53IwY-lN)!E!)~QIW2t+mdqLb-GhzUz>5I%gJ$>5j(C^cu+62N-jNy%h z;@F~{muL@D2Rv`ZHoU-pfntZYXd70S&d~r??`s)@xIwL{}XIkiZtVlhBV7l3G37vKshSKK`5UNSok4j zd+9Kjn@bb8mhOe;{O#$H%^m=_6ha&(;>Oxiv|@~uKRMySPS4eNHN%J1fjI}W`U8kW z64&TCIU%PAquLg86mv+KW3MJPN~{{RiCtUNIw0bvbm&~|)E>2eA+`G({#k2Xz65#) z^y|`lqy8G`UC`f`{u}k4Y*Fu|U0bL4Iev!0iy!1%A5s5T3?YROW_WsAK+!fNHr#8t zDA;Kj7sNpN3*H{OLHd&JaN;gCqe0hbreN zqjFz#;QUu8>L{lg4wx{xqx>YvcqWrXS?TbJa(ORLt4in$6)jCP&_biHg71bS#YLY03MmqO8`EqiS_`nB{a;W0WTplDVf#HsMgxZ{Dm} zUthN}cq%T#xr;EX>pT|1}t zHk$Bzz(;uWJ_H3IrBJ#dfIThQrb45(iK%jWB|q#cYnOMGEm%HOUXeZ-lx42mJblVU zw^`Skb-h{dnm|UbxCSp)_=zIDF3dHU>oD({xQ)qs+9J@nCyi6LG*mhf0uli7=RcP$ z;DC&7TtI#HpBIqVQP4?eke>$zFhLqeif=$wMP&35J52K-G;%;;(wmMlxgTbHFqBo8 zavDlVJSHpAw0N)aF(T$^;sZGM^~d!~uaxZtpMcIxoEAWR%S)`7<& zsG;}YKp^Qa6xka4M8s_{HFXqO=T0db;!0n4&8DmvVM-|`pyg#|kb#5!y#UzVb`zrwj9oSW<#ca$q8{@@kMtCex{ yJM8X``_g$!0{2pVw{qb_3;vt?X28*H#5kh-PscC(Ob6U}VA>v6rY*>yz`wCAPKRA&;mhb5wUYlEFpss3c^SzI1w?;Hmmi{+@J+=<9tMR9eRESb1I;+a(@GjYTPJxHC zvA(|dU1&8uDO4%+=8gDppql<{HMlW&VY#_smOZ!)A2RQxc-wrSZ#dKzFhL>;{xMD? zi}#|HO-HB|l-hV|)#x`-DHAIJe(0#bhf(?rvn zJZ2~Clu%OJ@(QOVy|s9cR)YX z-YfNMpm#z4RQs>gFY6BXPP(;qN*~i7DOmBFUgACmZe&3~LC)yxJ+O_yn>|h-fW4jV zU5PG*tm%%WXd!OA7UQV1$gqXDxiP&oDG$o(s9c0#EBN6!BaDy?QUe%V**HfT*ZX0= zanxiiq)C;=kK?S&bR0KUGI`jzWtt~SNlZxDL5A2$1iR^dJAs-g#FtW5vT>nKibP8b zRn~*?B=j3wiF_z8;1|od?awnpzK+WaXAZ@CPzQ~BaFocAl34UmQk^(9ugCGP7kmJq1>i2jqm-^pqY$=77x3$&hP{efym3 z7#Xq=&bu{(GnSD(%$H-^$bM~u?10>nhc!9rK=xX-T|1}teKMDaAae?N!=`UTJOE7& zMH&Fki{fl+aVPAWcs5R2m4}1K+A9Z*Ek!^UZd?Wr7&G z@(pmip${9D4uCd*ZUWtsi0h24;E5grAGcOrgB^7R7-2mC{@jnb^dg(Cya4<8FBi~b z2r$_U^q5`%3PB(_UBS{}(+|aAl24#l0|fIHbd>4+AS=fct%9WFfrf+=TFJJHd#!yT zGj7SZ;NF)V@!M;~El?_x2`}f#{KUdoG<0DsUOG4S7^6<*)f(FS3V{{=0kNH#Jxsof z@1t{}b!JdwLqPeYm#~T+6zeH8Mq2hZ1090EcX)nJ(PPlj*E&< za+G^<{QV@!Ta6dTyh`KPRE&HFK9%nwdmmWSZ)-Fac@9SwF%PMH6AxkNO=rQHR>*wc zX3H1-i@qK9WC$CLuPd4Eua#xP%F<+&=_1i(6L+vmy^ g9|il(YiEYcB$I*ur!x;f!(ba*Y}$kTyTn}UKT@J0bN~PV literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/__pycache__/v2_trailDetectionNode.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/v2_trailDetectionNode.cpython-310.pyc index da38997b960fee7a3d0a12059864382d75072af4..0b071b712413254dc114f8b93e995c0381dd323d 100644 GIT binary patch delta 3172 zcmZ8jO^n;t5$2;LilQj$Z>5#I+O^hpyjJ`RZ{6B~QKX3*$8o)N-~dHg2Q1C^R?9W@ zlSl4uSEVYXYquy6BQba>V7Sow&=fu7;+%^1(4qy39&*cthXMt1@ue<`oZHSkY1f-d z;Jok6%$qlFIB$lZF8|}A(aYsD34R}c`7ipj6Az8X=uIL?RHDR_Oo_@g;gODP%AJIn zAjnU8$xg~t1WtMBj%um`6E9lmLOMAJFVHyIfULMw`It8;ZU03Xs z%<>e^n4^j?QZ=hEX1v)>&8$s@b+ew4SY>>uEm7?W0h$$52b$O==G;`46I?^k{8ZLp zjrDo6G1VwMA)voNi{@ckGMltaXYNVnBCXKs2*43KOKUJ5rFD7;#$$AjHeh@yBVCo~ zJYBda(FLaOB+TRV2t7=j_lUW~PSC}>GJnFBb|i3^+!9ag=rauO$}@XpuN|tz)+t04?Bl*I$hukp+@^`aAL~nKYvBCx1y6p8h-8B&LC8TA4WI zui1UZ^DvE*f$K3oi;%p-t1$8kPBhl(2iunC-etT5jF!Ud$X7hpZ3k;{R_x{3f!piy zTI4CmvkhD<$b9rzd6g_Z{ku{i#E71!ZzvOC^qP8U0_UUm)i+2!`m6e6vhXCXo{u&% z%cE}sbnvu$zXX6ZJg>v!-vkmsWH7U6@0LtA z2l6f<63A1bv`dBrkg^N$4bn8TBP6$^mbSt%IY}0jEEC;7|*?54uBe+%YK|oh3PO&6L;my5=}&3 zWdD-YLB`J^F``fPOGJtOsh=iqMvdGB1+#}QN8ihxRjwnv8hwysa|dz&n6b9%cB#dC z8v*M@x^aPYpT1@|gs9Q4^XHdSsFHCpdo6$C4$p|W=Te)CIhK`Jxx-!BW_-Fyws3uK zzuSey{QlYb7AQZAJ}6wv*8rg@ku6`!(HDjLqtipJEe*4wMzy*$)B^)hwl3AAVJ^&$ z^Pw(s1W|4e1%*%#b75)+3IpU%mg;qh=HPz1?|m!GfMyo?i2W_dM+0+-O1C7~#|Sf5 zq<4=@_XMk>$##3DCvF^=foYrt#W1@=hWXF{^Lb(Z$3bG83=K$JBQ&6%N%v)%U6sKV zy)5l>v^~!33!eUhHyjnfOerjYnPR96OF=o92_gG;2>7`;EQjT=2q*jez0bl-SO()5 zCD0HU;xw!TmGx>^3M&w~nVPgJ(>yKE94$UbK@2_%D&tvN3QM~(9HKI=(Xu#BeMcHr zLwWtsXniiMt{<}I=!_T}p%E5g8!!#?`Sk_s@RS1^PdUvg2VyhjECxr$M-S-j+t`(d zv$P_#j#-EIQ4{oE3TLT4H-hj^#c1TXV7}ljh0^+oaVxBfS0asH-j&8D=~rj7FI;{k z(b@|)4$Qz+s67f=!Axk?R|dr!%=hg|n>9Ien7hg7BHZZYO(eu*nOZzvbmEf`!@2?Ep4(;3)!s&z zF7gwg3l4}Sf~*KetZl=`CG@MUy$3CT~Qw`a<0U4)aX zjp&C!;>5u1a~r4Fcw)uw_gR;6>XE2+ltd$ zwi{e$+gyzv&z#O4h-~!v%*)64HZDr{H|}_@zZPq47C>ojgULAQc2|3?8vki@v~p2# za0fTCE9JMUtF8wjWz(u=L4AX|4RHg&egst;4;zz>m~6(yecfd$LgdG)kXy@?pNhtn z%4pN|H*C)xFw5DrM1W&mShA)^F>tF1hB~?x7Z1$wRc&nB~3b_tghiegF(;q)D5%{gy6Ljei?>{Et8&biQS8LNGA-MdK9x z4id%j`rG(Y-2ek~D%ezj^ID99tm&F0ReO zzRnut1Ab89xkXCzKch?&{-5chTWMqz+pO1N0pG@#?Vka{RpjS=*7bY*1y{7xxdU9Q zzkLRBF#2t+HhPG=yo*FM^D$b;KpsvzF1BAjKq77}eilwX5El`NPsZ$|d2hKiSQEEZ yT+qn{7xxuEH@LXu_)bNX_#Y5KS<8K;xJ4`V8Yx8W`m4FDTra3vSJffk{`)`igc9BW delta 2360 zcmZ8i&2Jk;6yI5|?e*HbwwuJ?X_L@~nyM-EGf1T>YAKYoX(fsTOV!2kOq@-3Z8N(` z+i0TzX-}vKGzf93oC*#I4nVjdAub610(x0hLLBLZLnSUqz*w z#1ls~)yOAhreIAtnwvJ$kuK?ET;0?oUUdxcSKO?b9r)$kyt!k*51GT@yGEQ5a|D=_ zQ*euBaiA=jrIf^md*zlywYvmpI-(h%u?=EY20A@LjfiFkx+<$K*39a_B6pX7{V2_w zJLwK{7agL*Hzjk7j?e2$a6&5{FU61NmVfAGcvmQuQEnX>Tu$- z^;8)578+f~4G=@cw;jd{$dXHZ1Xezbg2vpgf5UR@tBmIXs3&+yTuGEkQCv?^NGGv%&O1#*_ybmo*{<#O}*Q9d!?hU`4=Rxvi{{fb@tj_hK}@0Y^$Ckd;8%AgjcW)5Lm&ye8F?vwvck zFuvSsJ$Lj{T?rHIrql4eP@QWuug-PaEX;PrMa?imt;?Cu8+Mz~m059HJA7=u*`D^S z$kpmLyk^6(mOIU-v!dW|bclKj{Y&uOfR^Jk(irTfP7cwRyWuE#6u3 znIk;o2yr&QG@67Yg`PWjNI{d7?KWD>@|Nbf8p)1L8$6OzC@1E)OuR(*~^TMr|L0&7*fKIh_{C?pV+FBy2AG&+}9y@ z3(1zA*HGLK{IG{_$wk`C|vH>CMqV81VBRF zc3ZC3viy$KTt3JhRP<{KxhrTATd}*`LN3whc3GQpoEm(zkMNdjw|P>0SJVvq{-dxmK8aq&~B95YBx+%KIug{{NUI@B#Inq5uiIF_|759??WCBp^oHas3q zJypt(T#>ty*pOk)mb<6l1hh^No!*&{Ta2uROd38@nOu}uu!;OWC3xoB(_^47I`T=Ft zvJkPXI+?BS6+c!=tuS`+>^Y8Oho482G=yp{EZd&lY4bS37m#eCPK&lQ)y6kP13uqk z9T&#;4GYT1n*zdd%7j^uwY?61jH=>f^}yN(80jq}Q8j*wrm5z#H5g|&+xT@P2a(`i z*uNFWkc;lgbbm5lv#Gxj-HlNt_Deb{Ogy@NA!AZFel`CZ2^7NQlU)?5$6@|2IqU?s ajdu|UWKRs&PU@;$&Z*i3HKP&bzyAS<*FEF_ diff --git a/trail_detection_node/trail_detection_node/__pycache__/vgg.cpython-37.pyc b/trail_detection_node/trail_detection_node/__pycache__/vgg.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f9ab1214c86a1fa1160d2075f0d950f2c09ada13 GIT binary patch literal 5925 zcmc&&-ESMm5x>1V-jPS@!?I*siQ}?PYx|n$leTQSMr%3#KoVO`W48tf6o(^cNu5L< z>Aj;QGZfIm22vDI(3idxeh~W*G%v}2(}#U((Uc)<~67 zn4^OMCZxau8`6*gr=ZCV?^vZVCY}lZ--=QRrvPB^h&+p=68wqGD%mF^*f&^{FXnu7>rd50kB@aZ^+lC!3-+ z)o22*M^mbUiEL?3B?ekahs>8+m5~cS&y);u zPyYkN*2P=5js}d`VaQ|sIt=#y>pmInJ7jQYccC;&-6{bhw-6{yl2Gb zW(ru3cnf(OQ<`E%Pe(2TW{=_9>1it8_`dwE-qU+Fwvp``qSe(lbJPQ9s9I{l)wh%t zNS8n>6B$N5-2s`3+AS`W?lSFpK^VlIC(~8#$8EtQsfVFFF%Nz#4p#X5O6{{|vpx&b z_E%~xWTo?vKWXzY4*a^b?gt_Fg)~3s_kSbp+oIWOwqxm3>V6aj)qo3mQuTnsL5F** zJg6zW=Y6C7_tUAxW`j?47X2nP!`hT+ zMzh{(G!==;R4n|U?g5W^B@UXQ7ozvRIujil{ag%U-={G}?;z3GAhTGGnap8n>FNU4B~7w5@gBYta*lHBYE!KG0xsf5q*gIh z-eDt{I$2VEySzdj(LEaW7n@g!KTe1N4 z2V=t^1TWd3Eoa2nRJxf~-)0Zj!6(jk-3-=U>z7)PqSF4ijriUx?Vz-yO0%jb6RaNg z^RoQo1l#rkj@b`A0-7m!z3uAKq*yI;VKb~NKnC&C#IQ8z+83kW-uqYQ`7hm^v{A5K zN1Qo96ogUihZQc+F*Ip?)C^Y;=OzJA4lVmZBM55@7d8EH0qfEQnER$=k+ITN&{pnwydLPF5Xu~9W=IW~+lm&4RKtzbvW9Pk}o zWUaRYl6P4i|MW&YfBVIF_LcR zHFgyxpA2H2-ZfB$QDogP*AZX>x_UPiYnzyl!B1OK(RPDX4XP84CZ#*MHJx(1=DP7@ z@G%BhgsA=%g}uzemVp{c_F#)pj=;z+a}n~oM~)<|C>8;5aY>blpOdKF@&Rz7AGu80 zLD;JM74BLIBr*^DwIuq94^URj5E&zK5{YX`qf)I&r%V7)K#^A6f67H9vq9)p7X1*r zjwD-=7UBiLwQFx>Xqj#J%iK%q+`uNc@nCL;p;P(@a_96>3`ycHmT%oeB8tTQo(P6} zjO#d545(~gvRjhfmh4X7)yWt%U!P)%sDrL1a)1sF`VF-(#pu-I+A~(gyssGAR1?>_ z`lec-VhhZ1t%sotonEI!rSk{GWPWC5LTQ{=`=k6N99lJ~wS`Y%H-CQqyzAugnHRO_ zy5i`oB{&7>kVF1b8CMbadj8{T{*h?&5A#^m7Gae4w_8Nni5+2V@#BVH;|n}iZQT{` z;xNP~L}+xbtt5YcE7R5?$$oL+nv9@ zncefNLA;n(aLgw$DCnqJIi3kY7ZUS0V72LbX=20z75$3HXGDID#5Kg{#6KiLW<6+b z+D-YG_L%$3Ys`IQ5HGwI;r*q`k%;_gnJVfct~ie2c&(Gxvc@hufnuH?{Pa z{pLOv;1*vC+&jks+y^*pb!%`mmZY=T-}YPb*ns<5YhJu_(S3`+B{TPdOBXUnV{U5c zyZz=)fJ?`u25vn)&o#GbLm{pq3I0iYXCk<3p-qU(?tbroT&C2upiC#XgJGx+2&J*) zvmFQZXmWR(nNk^QuOn^G^VLy{_B+ygppKsh_N>Xxl~RqC>Eymj-QJKlCKp*WJVEyi zG9~;FLTLnzn%Xl-15d(xHcsk(KwS-_Z=g1=5Xvb3qQy}z_Ma=|%q TmgU(6HfuTq2Yz@qb=dy_03=^w>bYFN%sYsZF-x~?tQMjW##4ixuH9d-Qi z-ch!MBiKgHLr@rru|SZxZTf4N#y!p8~f)3l#U1_N8!Zr0qjro6hV} zwqgmKo%wcVc5Zieb{-!4--%dzcvzF*@8-Y$P49j4R&1Os-P)V$Hv^Rbk*GuiRG)QdlxF8AogIz}@v&d@B)!T21_(*lgU={TK$ zu}mjv5ym}X>6AoEbn1pgr;OOTQrSy)(dk#|jvJ)1&)83QF3aqf#=dn4A|@BauRQYz zxAp8J0<^4n!IQ_&&fA^}!wYjTE?ihRYdHKqIY`t8e#3F zWBh`)uMALl6ZXx~sJ2ds-M`E=U z9v3?V#0z%KmNUA0}OXC2o?}`5)q^=bzC{ z8dtkj3zpQEO!!h(Uwj`Hv7P7gVVb-dBvSwCKb#RK z>xS2fFrV(_-MpJxmfs~T^)9&@hDGy(015*h0~q%gamU?)aG2V1h&I9_Tvp+6=QfWo z!R(x?36JPy!tMJ4e&6K4VXZ_PQQ8#xMJyeY#$jc(*= zMjTmTUFsEGxiQ|DaEpy`eS&7iIO)c>_S7x*8^uORpBg|A6R03EJwV|62gnX*=fIpXu`p>x~8ZAHu4QA2D?yq6ro6*azw6SMX+Sp6;BJsAQ!qYf! zOLY9{(%U2m4@#%%+coxW?01XEW44zk_`|V`dKm5+o9Bt^u^MeysuQA^(&Rqsm*y2s8&?(;H)iPNl|uL8P~W8#2Yr7^`>3dowi&eh(W2}qA|*s#QLC)pG+S&6r69bfP)T1$Uags| zW*MFW>$7hN#BAD2?Q7o9wW{fS$5>@y{!(s#WZPMAJNHWR9n>8f)ZX$G`+)y8H?KAT zWgY%i?&ToqdSr#an4cuG{Br(4f7J{5Yw6d$KzC(8R|UW3Mc@rFu0snztIUh~!XUIn zof$?;9~d(TE&VkFkEz@AV$7i1m6F?JP*FgKf0obZp2?ygL&SfApZy;HPkz_M5Q)MY zq>&ViGKtCod_YM+!Dlo^@IR!2hWs)-u7O|zv_Mi)WL8eV>lFZtC>e)0Y>O|Etil%x z<7A#M7k<;fi~I8-l8=!549U-dloR5x*?k0_Ibu8~b{U8lJAUrWx%2v|^T#gFy?W}r zr&K8w8NhylI^xk5X@CzQ`xTN;fs}{X2gv_kItbK1B9z-eumPyI*ykuevop5*?0JdITfyW)MuAk`V5f4JHb|7WUmg%r{UY(roi!4r3 z=$lx3^_XQ{g4}6ev!R4MMt9qv)@LM zEP-r*bN1V)C}ICat&pyx9(?!;h>LKUF>Ws^x;7pAG3Z1TfSBmBf;g!xeESSj5qk~A z2n0}DS}|?xXR%_4yU0!=_a!Q5;yY(s7-Wd+#Z1O;cx1d6TPON%zXRWaqUgyZX0uv1^lK({mP92H#ltT} yM^OvUz|YP%k;6k4d&2&X+|%?CT#;8xSu(=cN+*)(P)Uv_LQ-F=hKAv4eD#0g*i|n8 delta 2893 zcmaJ@O>7&-72a9?h+HnoB`Jx&BB@`CvMt+jynHTs9%QXokC-jbG7 z9V&ruzW2R1Z)V=iyxA}2fA?&p6%I=R{N4WdKWXC2cOyAM@9s}`i9iMFt$Q0j)$d|o zeV`Fg11=`@pc(`_sY|NlwjnhHt-mfe!fM#{8K_4ZQ8nt~KwW`2zD7)q_4~&g33afK zhty#=Mp8{e8?2`qY4v2fZ_KC}AfU` zwiFbe6KIi+-V*4j9$xpTGjx)U(eYbE-LD^@6R(QwBYpq6023xJyFdON5>Z6QcXn>! zgz5O9Ir@ajKPE?rZ|fiAfk;O9``+)7!q&6CA~_R<Ub@quCo+E za)~9OWy7dQdZTTvYIWmPoh5)M`B{c<`qL!Mf8n1Bji93|^89c9Gh}+};DAC%l%EQ` z=g$LW zsfimN>bJ#rh4;lx!Y24L()qjaXg%%ht%<-*Mt4EGqDtY-&r14otYBSwp zWuvLnwTUMWAJ*Hg%F?05N)s}qxvpBfY*xy3ZKYM=Ur29wBCryC)A0 zW2zM572{-#6h(;y0JE324#*#nLKti%@sSrA_A+APcuiwXXGf5U@<-v5#Lvegl{iLW zUjpIymW`%$f`$0ok!f;)|1wg`?#d{7NQ*|3YOJ+v>5QL^9w!#R5jDtV{+H;v6MKAP z`qkxf9fs7F4AatB^~ya6VagyBQSwMcB8tcBVXtr1lsQ5!@dvRx5xk0y{|Yl`Rc9CY z^YLslfK(KdR@d6gSL%kjq%*d_UyG-SkAFLUnFRUY<6nab&nKp$z1@c{>^6uG`D#Lo zrU7A;Iry2Mi~P@tH@f>fp{mf4ZFwVNhp3bmHgV^AT4+bJqCG1}U~S(z+%xo}a$2Ba zIQqubIa`7lGV)jFAZFN>=7m?r{(t=LuHhgQYL;Tl>!cI0!w@dwhBGbChSv^n5(pZG zG9Y|Yq|%}YL(2(an+ZK;k~^Hd!*!w%C1ytbN>4fcBs^)dDswuWlMcFZ2KhZjW}p-~#9 z${io%#pzq(b^z`8{@kiw}WhoofJ*EZWCHz8x_HS z(oRu13)|JVVvMOh+&){_nBEjNN;K_mvQV4Z*l#BhQvoVNW??P%Vp=!Na#b%fy`mc{ zIvp)}*fBVs>^T24`Hhp?SE=M>Gf4Mrh(18EZD)B@4}thSg2Vh;sxd_niYTxI^=^C# z5RBoPTyc@_OBW^{4ea7CqBj~#;*R4ZnZK4kJySZs7Qo_o+Lb4q5Yx+4Yqy|Wm~O## z>j#_0{%`Vsr4wBX;RO`uQ7nRRJZnaqRT25i?Y7>e>@?a6J%&{qMw1m$@v(9fuK!WA zUUsb&gH`If<2M@SQtO&CaII`uFX*extquMqn2pZh0kO)Bwi8%nWqv0!9NLvL{vh*A z>@5s6&@UmSG<%1Co;l$+aFiB*D*H6Ivm@jn|8Dm1lh>UWTw+T z)~-9dwN-vTmr3v0nYSVOZ{cUI@osKXj*tk+5SfTX7WvO}c|{2ZTtx;t_S!D1Hc{q_8&;{}{!dbYs%k*SVZezk-=(AE9vTJf`2> zNcK}iKLb%3VDBLQHHuy~5H&&IlTmH5-=Mx{Vc!Do6u+6Tp2IrJaGeY}C#-3edf7B9 zdcCe`ySlxHF79!?3Frs#GjaO@f=mJvN}Jsj_#ea^zficIDFcrWR+&}JQlvNEbHK5O z{9z%PcnKgJl1jb3>UfQ&VX>P$R2+k6XttO>{wX@Trw-4b6LVjVdEL@0R*NmP;H3QT zQXFps-s3fXqnI7~EA|Lz8Zt2XWl&28`GexfDaZ4b=g%`A#{Mcgg&THu54X7V1ga&-P-S4B`m(m!j?zdCOb<(aG)LL@Cm)y(NyQl8m y!%X7gnG)FR2ytf^CV~AC#ZIDJl;_G=3ci?1WAm|apeRO_fY6mL2Er2Y{^!3TX0RUs diff --git a/trail_detection_node/trail_detection_node/__pycache__/visualizer_v2_trailDetectionNode.cpython-37.pyc b/trail_detection_node/trail_detection_node/__pycache__/visualizer_v2_trailDetectionNode.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..42bf6d0e28b9baed1955ee56f4fb47a8eb85cc1d GIT binary patch literal 8356 zcmb7JdyHJyS-L;B%|lH$u5Ie%ka9CS=kA@m zp1CtV_wG}7O6fWz6*9DVKFbfw zamV=_yN@4X_w%Fto|_6g!SCf2yif9D{5akZ@ca1vct6Nb@RN8~`2+kxyiYx+@G3ub zQ{kt)(vHp^;`96=e)^`$PJ0jYhp%g&Q$^D|y`$hf>J_DWW{>Eww1>M_s(NZZySVgt zlv;Q_x`6kkE0>nMxT>b+3$2>(q2k1%7Dg*=(TdPAmfD=4BbVBdcO|a1Iv%fT!oq9> z3&MD@*rNqo5u7^Z&5+l^1?(=V|}3kDNIC)dyaG+W(W@ zTiy&wNz=PMK2=lw73eeJ}z3Fnsc3;*>Q zb@8^p@!lVg{>^uO>$d;aBj0|f_3z)f?LYo!|M;aBe(+bf1H}ZZDu$xe=~n26x_lP# zj@D7c%BsrMSmW9)t*a&q*JqVP<@z}#*3mL%l^NNdRVL|eoKpZbGn_um1g^4b^vu}W zRl8chor}40ugcRs;~`;HtW(;|L&$$>Xh#x2K^cWFfgApK4(zHN93R-b&4t zX1f-rg@DNKHoU-Z#LQ}9rKX?iLAahyXFG6*d{K)*RQGy1D%x>YZ`EQEY^LK9rJ2Fj zy?WdhRU<8~t-JfXPe(e$HEx~QhTV$=JP~>B|F8y|9w6Mc)>jpA7^nJS<4mL7^3H5G zYHbcqI|Ghea5thek*J@+Zi1%Ey%;;89fx4$_Xn;A(ORt;YUgWYT%t%OlA|-$oKq5=t6(@AZ4GQF z8Fr4CROgyxPGVr47??_KaN~8XhSd$UWh*hbdfk*TGcm!8=JwJZ%s6BD zeS)CI0jQcmr>~;>8WpmO1l`B=&wr}aqF|{8_FB%ipF!?zG)}MMP*0rnz5Fc zJ5W^DH4*cC*GjA_P&z$5F~K1@tfMD6Nl_b6Be8c$$vE4WBssh&hSWo$5YaM~vb*1OSSp4&_sL_)dCNqxE9vE$c=8K zGo4`5gM{~CZrl)F)Mz*PK4pLCoNJ=H-fnfCdWBY|ZCJ|o@xkFUKAAt)Y}aee=qXeN z=zH(sS2Z)y!$(H7T1305BR@L-bCU{*=BsDDx7ewCQ8LjE+xIrI=@R1H5Z!li*?lZ$~R=;$id*sYM)2j+@k0H&Kb9ZOCck#x1?8f={(r zUDbLzw-W7(%9piWRXm%hJtH<(tpO}HdpQn+$L(9_SCz0b#K>o(?#4hrG#GeSMJ>Mz zCnk2dvxBh-aqdXQV$24eLofco*{fBZSz*0diy~$(*Xq}o+o8usJHjI+9zyJwYmv8o zsuOk2;Bg1z+)gb5z1{V8-Cf2txh-h<<~*$G_GAA)(;)|**|zFyy!OP|s>%wjTE``B zB2L+!dtW2eA2j`J5C&1hg9&ffq|nKZW7QEs@{?-qC^b5@xRF{x1SY`A2+@TI#2(nb z9;YU3Y`u{h1WzsQt%J!@EpDfV2S!WHj)1YP>f%wF&y>@%l6(#Km_A!!-z~3HEMyQRlIIB!6 zyfmkD3s~1q^0P`jn-q3nnv*=YVaW40{_>iF9;jt7^yrfBeCJm?A9lWb-hXTT_kZQ^ znZG;l|Kj4$HvjoQ|N6Xtl<_CO`_ebh`&)Y_AF+S)?eqRO-AuSC;!q5FS0x-@=QzV_4S3+uCK?7?PgoB;^Ip$zI55W z@Z9Bv%gNwW|BhE`51Xzh93p?P}$mOkibjnxqu2u52?Xdp=!IDbO{hKkaVC&OGr*kSwjTcwNTR` zHD+S1P#YW$&Qh1EhWIe%f{WE@1~DsmH!>ghdAdpyK?Nc;o9RKzPgRksD3_TbT<-=m zd|572X`vtQyN^FMn9XfqI#>A1zl60SBAlX?bqA+)^s+8Kj&_}ZgP-27M5j^l6sB^8 zX>TYiaALT26Z;_(gE&`e1-#8S;59I~HPu{Nl*%;o%%S!MQ_{`7}%0MO0jH@eGHcBiONf1bE~o_Lqld|9_#j0*7c$c2Wuf8^pCE4 z%{Jc(!P69aMSJ&PA|DTae}BF@4Gvj@51!gC+>#)6T~=;m`aw)amOzXEH?@{!e7`Ss ztXvl!*`m(cG9(lpj36tJj%G*MhyEC#Y@&}Gtu04&5p>8vW|@S^gX_UAH_4O(U!KoB zh@XzOyeOhjHFJt0=3L39TlS!oq>$49L6^nyS)hhCn|r3y5$#PN#NO4QkZKk zNky`<@O-ebCl>o_r+s1!_EI&WO}*yIfulQ6d260aIu-_(awT~eQ7t?X*UhAboG|eT z6u*rpnn$5P^exraE!Bbtrr~dE7R+f@HxM=2x(x+U24qUzp1OnI=Lh5&GCoB?q&pVz zMB`w8a?&JY!Zm~QKXBJ{vamN1Qpljeh;@VlhSVlzVo(?XR$e&U&LMEO!=gY8PV>J_v<)ucVhDs@kBhC;0||Goaou^ zWHOnQWft#SpHpxAL3kF}ll_s0`gIyP6(3riO2(2y$rOUZ5+6lwvV6-_6&i&zWxeCc z1bWAim+bduaLQ61g!QKRp=4}V!wntk&2Xuf;@KUgJDq5&hgav4>D9yT9G{l&BS~>^ z7POD9-s9fehY$@jNTmj_5c+1gS$$v%4a3-D66M>4<*x7C!VYxUV_TZR7OR3buU==_%S%dWUs|TC{lI% zDk95~mB4FqH-yTGSU$QQbWjhYp^YZTiVV#aJO{$pfrW-)fpYit9|>O#aU3J|mlG~E zrMEqFxdkPWQku9Qou0i@@H1^X&_(YTt0Jog#e?{_z2-Ndipt!J0?K-LVZBX}#rEV< zyHfW;=s4u|yvhnWP{;P&$Sic1zTz=#1Q#T8HM01S1TYKUT(B0UW?lM;`M43(uZOtW zJ@OF;BM4cJMV5Ft#UA+u+r@ey>P1`gYI}q&HKm*6Da5m*KC> z*kvAT5Q?MJZ6X^E@9#m>r8@;{D^G!ttc6^|Mw;8G1@T9{t<3US4eSjT#YBwO>ZG>J zBDn!)2KmpajZ&r%?qZ14fL&@JrGVrH4%ij}NF^i4RF48A9O?)K)eYwOUW~xa$Bt4X z2;tejjE%8e|E8+5;#JzT)3=m>d+WNdY$$l!>|)=VONgYGfEQLyoLr^jcix2)i?-3G5QbT+$$z zy-|*AVqcwFOQMDyAmCia>;1LLU4kxFXh2ztrf8skw?I;qBA!CQN?Gjr+~sFpSwH}q z>NU=#yo!KuC`&AbRw@J0q-G06H7D6#Y!G0`$KrW3DG2sMu|++1Dcx59y9gB(O`?LJ zr81dA1|MXSgwGY!?c;Zn=lz^iL?v*W$!fT6>bR6gT5D3h;JOrVWj)dh6vT6aR!xh2 zPqFSbn*(o=c86>INkWpI<7L#oh9{!f2Hz9zw>yGhcbFtHOVa&YMJ2}90m{kkN|ys) z3XDNpRJa<#A%uIP;;RCj4e$ZEe8@-N0&U^&A;8?BP}Az=i8J7`=sibf;H=(=d>48} zdP}ovETsmeyu^8|3xi`I%8#n0j4+e{5I>Cqf*ZNDbx34w8L52=)MefXTD9K9Hx;O` zAeM(>M$`#HeA9(;5bZ7bttPdj##+qV8)2q-$NHVmG@I9;5ZW6NLfp^LoN{8y1+cL@ zNJigo?jk#B+93Qi(OrJEff&v}I140}l-N?m&tM)RD&e^<1=90bN~1uL5a|}u@B;3( zB4oOSWRvO0I>=x~oTKK5%*#Q&QH(;xlV zF{%fc3W;j^7ATW2GAkp$!erlLC~}lvXxK;=YHb7@UdIH1DW{(qVag;+I%kqjIRHsy zd9|WT&3##vHDdVGK9@<)Qy`8Ky@C+vznUH&QMvQu=_{{{~wpojnf literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/__init__.cpython-310.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7415bb9207466ed09c1dd577d6510c09b33f3166 GIT binary patch literal 330 zcmYk1%}T^D5XX~NYn9yxsj!BHJ&O1QA}9!Z3E5<518q_=ao6cHx zCw_U?RH-)L5FEr*1wi8VeCB?=OXa?8w`Tx2PL29S!x*i1A1qXK<4=ALqVofY*G_rk zT`TS(pm(a5&PPBIgR;Fe5J69?LLC5UeuXe77p0d13ikJ gLXbI@(mETZyc&!|)a6wYDn$N!AzgYHM@&Et; literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/__init__.cpython-39.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5e121f62b7173ff67fdf64749444896d236734b9 GIT binary patch literal 328 zcmYk2T}s3-5XYNVYZdk&71kj5DB=l3P!RSdWRsx{q)ExdUDq3U5y1=jkXN6)f=^D| z2s>f^zxl`nGIFsvtH_={-}M{uD<_9mQ*uj=S0uX9%vO(kvckHsDLg7XCe{yC-+VI~ z?O9^ycYT{W4GzIUOl<%pu1@#dT&`2I8HUvf0M5D5y(pNY@$RDvExqw4KU+Tf3Ha+M zz4mV4_YlxKX@&Cw_)?R@IhjzJd^-O#E z&$nvE@u&vE+H7DKFYNNqE-LZ5gJ~$}DAjoLtKlPJSsb9INTI3QGmkST>aXQ*)`P9F43$^jT|;?+ePi^Fy)*_+K^4I%_Bi9{BIh zO2K~AC)9p5v23UVKQ-&NI;akzJSpXre|V^TM9NQKGzZjzdQv@g*IYV?($lJp(joPX zI(FAsnpDrK<9HrcC)9Iz9#PM$7w~*SRn&`k9#t=?m+^d3O{r--pHeew7SE@HvYK19 z^xp*L)-!5eom8jPscmQJSa4LGzGE&u8$25v!}GW@FGJgxjmnuF+GDX|CWQ~Q3Ur_@ zG-^>LpSTx-b{H%MQBqJWwS#DGer~9;u%~i%w`G2IF4?adO+N}$b@<(fnUL`a=Gljt zkh$^NN)&a&S7&Ba=T^Jf@zr$qcGS_emFbRNo@sSd&Mk`b*XPfiIa8bUzy8}8>{M-TZuaEqGjm`6?YV`ysaMpg({ujmS-xFyv;!0AED0S( zXFzJW_gw75E1U%h47$&frA#bYr*3^})Jx!FX!nJOo zHT96V)oP>Nh^p1Z^OdS%^{q97#A>$_=Uc(0ZztLFd{ZTPr90iKhA&idi4z9RdQxhy zweWIxEvnW7KU&j4m=u6%wy(3(C19g z+?W_zRMUNp)hu-FUsNsEvn;GPP}p5qvAJz-TCs5hOKa89hhXeXEOqd*%DQK7T2XGx z#bUCSvADM_nAus>T)Y)4F}sP&dAA$wWifIkuMeRqwO_8@(YJi9QXQKYlN=4zS&MW9 zeUgj-OJdjjsAB7<$TB}pitXUmz-Ib5g*`{|Jjsh7PvZ__vsz6@Aw!Wt z$#}R-CpaDghigoj6P`@HhU%VcfE;}H%aH?Uk)@LRXmPIDsjZ4~Yn*le;-=5K7C_pd1a6+Z#&8Fr(gYSu^;KY7=Igm-~!{AoD)RG<@P+ko+EvBC#DU;CS^f8iW zK@zi(WTfMR#^$o8mcvnyQLAVcVQ}Y*gO>Aof628;fFPS2yOK1p(lgyz+!NhnM& zkcbH%up{R~R?m)E>);ofyvN&-qrqaaEC?e*mk@yfiyCQ2t3b0_<0f4KTE z0*k#LW#u`%7E%|ZaOaYF)SM)&?dok{Vf`9E@F)v%kld9+s?{})n3|l2?8iNpSIDK% zx1W-II}KySmN~H{0$SQK4QGe;@q(twcIf-}ot-Qm`Ws|$iR}=gcitl5Km;du{u}-G z`0f~*_<8|%_+4P>UeibRmJ`7*BbyK$+sek+XyELv>?L0D#oi$$LUxtO|! z#5S{O+%k*UhMqE;;-V>QyN8(9iP$I9MiE?wm(b&*C~tJxlf>?{aW+ z4G}`aZ)!TAIGCoZCHeRLk5ey8+(AH-58z}0;~oJk!QaGg25njU*h%4ngU;DYjdtMc z#FK!gQEvn~$)zDxn0Sq_(GDZOT?-P2F;_CS>Ni^0)e+q27Gj(vdn;%xuS9~TNgi5a z_o%8%7+z&8_53o2T9oNm(WBm2PWEvS621M6vS!I1wTm`; z@Gc3pK5_ncW1mFmIFb!8XHO_ar)DU1o*!cjjK3hzSOb60)sI`kSHON0>Z+XJt}D3fs=VMXaOf^#p~3m7 zn2cRWPrUtM3hwHQ5USrJ`8g7*P+tPs;b&`kkp4xyWnFOhZ&BFAT?V*_j2wxL-0d8; z&>TWyD?Eay#pioScib_*V{)Hp+AU|(>-W(YfoPc<*{xjcq2$Wx$Rb7wKR@8RI5q9^ zy=w(&FJ6;~EB%hX`oKFpY3;t#>nmuLU({{PL4Olu<9M13sD?Ga8I)^&yWD9vZ4%9>C2+O%1DY$EsExrmgMVe?OKHO7U~X}TvkhI z!b05gWRmf_-R5nC)&$RLDr~sNsind$6wd3nMZhlSWTmi+AKK?Pu9G}NLZHxGQAu%l zifJstMVt;wR;Da-S)Lp?XAm9*8Mj=-jgyEVT|^d>plPXSI{&vZd13FG+Vy)b@&4tY z!1{UI;aecFdBeCNUc5!0M+AW#@hG_1t&tph3aiPgTd`SX?46b}v|9hhAu4gsGxl16 zG++RC93hpgKEX0W*~F|hK zh~^l##M7=h3ZIA5n$G9&{G$(PVaSvS1iw74rsT&5*R(^E-(`dcsDSnX6=Paf82xsq z3?gp4bB$HTahTXU`Yk02NKz&V33!a0E> z{C$uJ8aixTwKwfeXETHJ3jx8d+YKYO{@nb5xrwxj{$^w`6$22@F<}xhNW<~zumO%6 zV_Cx@sc}5h#|{JIys7KKy1G z+uE&;UJZ|x2YEhUf81D~W^%8q>N9f-v#(6gE}T7kavIe)u2sujAL*EBPd@OWud%EU zZG=*t8eCH4UY;KK%CD9i?MRp$*Wz01dZ5dldb!t(&4q(5ulu@z7=kUTm;RhCe}Fv> z!A05EK^e)=mjPQs&c0km&Tm?zc69*sKs=-Hsp~){!dn>W!6(mw7XSm-{pK3bu+xq- z;H!LVCBS#_@lKh!O&Np=>i$|YDi6s_Bb~6!af+VPuW~MzBMsN<$<+Z~Le@44Mbe5N zu0;N_yyll{fsQc5@;cOo32~ASecWj0r_@e5*mw<%p=ybWH%;dsa%NfA-NYy)gZvC|LA=RlRg|`h_=NWndTc5E#?d zD-|%o{l&TYk63*3)l>`db!6;fz1C)mr`m#22NjsFQaS#a*SB3SM<5Y%rI?KL`PHx+ z)Fd!SEFC1dTBp@*Ad#1s@m4oFoy4U%s%yHLcqtRbFmalVFiM=&;5KrGE#_#F403pD z-DFJqBLa;AY&XcI7t=V=+f}iay+QVoG8|jB6-Z%rjSw1P86(@bD5GOx`I3Dl^M>X`tAy2;@ z7l1fG8hK^}af&!~R4e2?V&B-E9@NAd#@L>LU}Q|Z6O(j!czf13?IB%y}%?~wd1$?uWK zPCwZ0lL@iR1AmA7B|4@SLWuvR7WxD9tl=J9CQ1%&TIhe6eu@lbVGl642LO-lMg7a* zLPdPsPi>5cOl0raeamFL0x zH}e0Cgtf1h_zAl{RwARKe`gPo$l6y+6eT}aVgWM#V<3?==E7UpjgZDZ#|8$QJO9a2 z8>C`htenu|$5W^xr7!>ZwcBZWe1E?>Ee8P!B@%D9EN9f@K-vqTMl+n=MfQv^1R9b; ztKG=mM|3D`~ZkBae->76}+Ttrn}vp@!_H zs(WUvL36WNY2&Ona-7X|H+h&@1OfCC1bN6G$U_k1ArAo(pz@FadCB|*V*}?qr$`P* zmj&dW>C;uG?sd-j>eOK{Hs%?4e&gT&MtpY4F#d-!qfY^uZ{kg)X&Ax~X4|OguURuW zZ*}aNZEBv=u`Rv9rqiG2H7iO(29FY@wQ4sE5IkmCx z7@{b=`-bpB`}X)fyEXw%NtD4U_c@c`jEQk@#`~ND;7o`~a3;Z-LaT$|91v6BOoa!L zpB4wjbkh)reqlCjaabHd`Y5M0|JX?SIH#Y(Yz~QK@w|B9zF9jg&WjgC1*s$AB{6f~ zs2vqAi<9^s6Q{&0_#PLpiqrT$C#vE#d{2ni#T)oOFJ{FYzAuP*v4HQ3VMQ#iSznkk z3eD}8#F988&Wf`;PHiSUA5O%G8gwsmKGM(LD6alX(;^BwvRK>#}k+rA7`S^tv6TG zw3}R-pBM3F)Q$r&*S(v@vc5VO%jSG17GXP?9~o~pSUz(xtY17o*G*TSvGwxB*;`BJ z&!4X^1kc_Ylbx+EE-suocYg8NTQ4pz&R!5_&n*V$7N~XAkq$&8^F+v8WDcZ`ci%&G z=t&kpLW7K_W(gAt>olxSjfN>KVc##-a-4=y+;?mFPYuVY6`%%rRq|Qj*MqyE+=VR~ zbsB5%P`ZrQ#+&>dNNS{JX56#3tsNL-W^CKS%<`Fe&)l&ZW}joF&UQ}NJEJ@&b#9|x zW@py40n=pTe2^Py<)R-h-Pc#~34M9o7uU@!m~k${9~t|DLcc#JGyH-uu)^|+%0;p4 zEUKIg+v|%rRIVOJ8;f!btGV{jRL-qdkmx!~x(+^6WoegIU3FD1S?`8Y(mp8H_ghgb z^?l_9Lio_W^>(PNC{oUM!|UHu`76{?sG^W@*O#z`YC$+K*(eE*^G<>vfq+zIEm zR)bi?QFH!-P$qE{wEZYf!(={{L8}d0Ov8GLp5}wiFhTvbL1R^oy#~6US3!a?4Fetwp}Qb)r4~ISP-+bLI;Ua zwUpUaF0DIiMNJlRwJ49ENXxI#h~;J=h2CTHnktZ>;`LNkQAgz%Sd?84(yA?AAeQ-w zDo5ewP-gNZb@mF8SBbm^@*>{E1u@J?)59KY9UR?({+JJ4QlBX3grq)xUwM!&`~IEv zpsh>fade|fmxHvvx&py33EpfTM~(G`Uk}@D-`BI?sqmzDKBT78coRYf*PQlv;dNy8 zJwx1}mcJS|fE9V#;sI7%ZpZaC-r72^x!*4x_>^ant{|}|Qo@SVCX~2pe7w|iQwIWd zGiTa3Vq^}iJ_p+`+{^chK$IL1#TC{O=k-cyDJx|~pnwS!D2Vcde6I|kFK(B`Smucm<3O2}u85i@VFqx*OUI}mQ95d@ z*>W``hnC^q>^on;@QQz<<`n4vojw1OTgj3s-D9i8Q$ng(A z(WiC{pqA{7rw6lf+J}4OW?w*Rdizi|e*5qa{GMo~UwVW~SGn{km0}OLSJbBbF)>N` z=!Jg$Nv?l*SifKXh1o~F1MpwSDi-8nNNAUoMqUQFrVWIBn+5H+EWo!Rds)!PXtIX) zG4mE-_9?Md#?jcf|5HMHJ?Hevd0g8U>65lEK8L&y!#nHn5L!W7k}|WINisN9d@uM! z+nRER4oxwHsfFbCFk~?TE4v*=Yy&t+VTi-Z`RlDH45aedp=mW*p;QI!QYFf3C9Nn) zgQy-VhkPzIu@TmT7oP$N^vugP*sj>~XuykfUK|+}t|AzBw*qL(JLdM3GsO z6Lp1#AtwsjYH0TRRT6rAG)`Ht z1|Cat0qiHCt|&0>x{SN7C^GH>hwj4*8s3}AYT~NC=?*5zxGS%rQ~55DUnfEvD6fO; z(y;X+$lxKor99*AKOnJ(yX4ryCvw;;a(4i4e7Jc7EkmH{DLzB*lw%u`h z-k^^B42YJyo!>5G9#SsfiY!7X<`;*27q_K7zIU_4<>i~D=(HY1{{15@=zg`=0yX&v zFRsW4vzNaHvUO5l|NW#Mw8Kh0h$?Z^zFSGcv=Sth3zb+_7MC^Ln5xhXB;yLf83GY` z-I#Z-I;PR6LtWEhrizXAsGdU45*ZVd)TyRJ6l{oRR4(Xt+jrqi6I}Ybv(Xx7Yo$H# zTaw@64)!=_s--=|kO7Txi^xkv2pS~mk}8ibQTq_2yn0fsk*waVmNao%K|)Y!Sd*p; zU-2mP+XWOH1@$fQbOnp2lZ8n&FHcdL+*ai@T9HttwGbhtHeXChWJ1ur zYK9v2J*+0R=u(R!<0f5WMy!-~B0+J6w{PPvV9p~OKk?YgM}!BL?^pZJ)a%ooXxjNU zjY+oWnt8MQ)%b*DjUo%(t$?t>?%t#<`D<9xcZkbA6btj6N4Ro}%888Dm9(}#clR#R zyE{Z468UG4Yx`iE+%1A_f{5hLKvL|@G2s%~-A_GnHW^FOZ+KHs&aN*A5W= zy0Q_-7Q6&15&Zz?T;+Y7WpE!Vfeb4McfJ9bVsi0~3Zi;*+-p~cKnr**0z=&b5)qsw zsT>Y@8N3jfw-L10fqrq6O2AcRb2Y@c7(A{}^pYnb!bY&(PAemQYF{R)&^&pc^{BL# z4^s)F>-Q@|G(uc9O}MAkAX!a=CN~Qz^-!jmVr2u{g$21{_G{3bl?9z;AKebK(%llH&y?^=fZ=arfjW?M0 z|0D7GM{}p&zC^BDMwh*e482+f6WrfgT>6-jZ(q{8kljK|E|cpK1#$ciHg%YR_^Q>D zUwQ-C_2U5S6IaV>Y{2)EZdhj*Kv^kEG1ieuFjKDL6 z8Y7;!*M+h%j%Mj!bXM1Dr( zk%8zC`zO4Efhhm11N{JPuqygIeV{x_{{`=0Aj&`MK);JN7>GVkAEvVF+a4l);&p5R5)!6=mfI-{_Fo;peiz5dt8D3qtDdIzT-0^*mE z)q#@#rWD;ws^&HNKP4G7X2HSl1N`%06zN#|!9jM8?-=a0C~q&#*Iso<*Me8mPUiLw zksLD^hg7K^x7+%smV~1vs^I$pN9q}!KBPI3_orhc6ctqY_xL9yKB+Xg-KPo!;H*CS zM?kWykzWEyPb7&^(k@Hd1nsj^@LNR6ZgrZBW~!j0p2>(#uy(wPCdz#i>%i|qv2_IzWS$M@E0G{w7*d0oHZ=hepp}CC?lhy2iAj z)w2~xPn@CKbBDEFZRqtprVTW1yvKU=FKFE4*4rAlB4fkfH+l_lZ0>;LRGcO_F0X-8 zt2lG~6t8ptZPsf=r+8yi7rls0+q}u=c#F3V&E9;}+Shsu(Lyx8&w8ghyN(@S*ShT^ z>|R^#8qyE0jMAOwREg*v)<9x7`M3ncCBY|hr4fUJ^LNa)Orp!RTDJB5hG#}aX_Sj z)%q?<$4?W523`Yib`!mGEoX(cuTS+uoONLoT43a6p%>e;7IH>VcW4t3(^c7Zm004vPG;nsnTnH|(27m@_`==6Ex zr|!hd-F*$MwcIPrefGTe^`%KYH_^{7>I>Q#+J|5Gg|*2M^Qhkoz*4{l;@`Pys^>3eqHY1s~h;aB>Mwb&A64H z!k3c(%Kf+0{@eLHxX!+g`4_O#=|Vrm=>>~8l~d55U-LFWoAOvg__Wl|K69Hz(arnDS1# zQkJvz(RRW+H=|CpwH+p%HFSPH8t3chcVo;(Iyp+V^LUhYpn-Oli<2t60nBaT3zP zf)q`fH{&q7t*eTlj=~4I(Or@jzBdv|XJIZuoW^+&2%1nDahgjT-!u$!K^v6rSd2D; zm0K&)OyVr>Ih#Qi??sukM3@fnW#M=nrCgdB4p8c8D!o@nd^?F=L2rEjcpUIuY16q} zwu>`lIc9Rx7`RZDNN(5)Wmah6ckZUmtOG5R#?X=t^&X7>)xG^&!1@Cki&{ zyCjRHrAp!)Hg=oIS`hS;Fw27Aw)W4R<@M1pTHad^M|_kHmaj!38>L|qq@z5_mU9ut zNx-8#>XRUr!`&!D|MOWi4AVUBqc9joX|BX{e*9)W68-h%{zyd2_*Q($^35=df~s7( zs|({d#RJ$22`Kv%NR!#jXD$4e*dklh-Ky4P9)3P+vWLO<@w=bxHOTv2IF@psWP1FN zmm)R0f;ammNTE%&f*rC6Y-|J8cUCLlPX=saOsv9! zC7WDdf|Z}qP&cM#VJUk)jd?A+cJ55w!r=NLEElWT-=Z07+<^67LCFKPuWN-pt;35j zqG)-ofH%X8@L^NGun#E;n5%-Etuk%VLaEyjA0@`4L>>Z>MiQmsVXA(D$R~+ssq>Zw zbWACUxIkQ!CK%>Un`HHm5f}qgQvq;Z2r3<}2{&*150tC{6FqW8(f0MN!Fi+p}m0+p?w_UQtG#o}jAiaPz049X89|YuP z$`(QM%KCF*-d|sZjl!TvF~keRZd7{OkCFuDd}n8$r7pBRW%1-ZGIBGnc1~}dRlGi` zcRgg2^kj6Nvw-NSAV{&j)-w=6D5kTXsluQIsra(~JWTC+I2=-j z3#pM_atPTikb7dGULfvGTma>&xVjz9vR5QmBK zxTTM*#VPrwI9(HD@`zsj95{p2Q#?jwg~)85@O7&1b>h-?dyQk~81Q&)O(-Q$I-xXy zcJ?{E*%C;TK_TH&+z;Bi2?cFJS?&L{jJ<^u;=JQRNMy$kod+q?6}(D}GZzA(Oqp^f zX0A^S65s$K{ZNN%(7CqF#HYzcz!|_1)HD((_<%b=UDkmK7o=CMPT>?b9KmwRw(f_U z(WVXwP&pY54h2pIVzkRZuVh&&oOl(R!kN)2i)%!lBJwnmXFw#oDm{ezU|dQ=o!t#I zblvitpQTRvt&2T}dcpL2E{Ty!f<9R4yDNAvp0G7_xMYr3@m3yyd;lG=&f1_Z{`G$~ zEqH+WlgC>*0%+j9cGG36U0Zws6N)bqd6CG>?#0Vg{V;1M75og|%G#TH+gEDY2k)+D zX6>X-Giz7ySAkzxIKVHVd;-6&Lig-0_)97f{}qa%Jo1e`>HP6S+DJ|Zty=aY(0eT1 z5OvOgD-ko0Q|RwO9q^CbV&ZUfz~BL#eYOB!z^yloNp0c*QPu{m@WAtdmOX`9&jVRD zIH}2MZWXmcUm)aK&uu769q7_NtKFvoFRBto*1@f9n&QQxmNJz6qVa^b^@*aM6DDo! zWBiP#a~J6MX5mk3yhaf%Z9-MO9Ze*PS64hU_qeanH@>IvX+w{Z?OwX3r@qfmBxS_5J( zXE>gRW^>e>CT|^-(fK!OHEQuy#K-r~dY#5V`XLi?t9h%Q*xM3Dqa6;8qRLq~rE`ZGaM9N$&HTQinYT9X z-`C$|;)f`GWaELNx$z*M=L`Jw0WxgVI^+V;{sU^CepGSnL!D{Zb6U;tF*U=lW<4I{ z4eBwgclf;{meh#ft2G}|YcBFL2N-i=Mb&!miYgK=t*SuXHA`@%0v_ceCD#&XsJuvo zqPj{`%G88BR@XS{5VuW4fip!2oV|}1&p)1ykuW0_6K|lV9CfV0Q|v9go^GZ94xOwY zCQ)Z6Otzy7W!lD6FoRQ^MLO|14W7lhu5qGC#p(O0opu2<7N+E^r75Ch8!4bkz*_q0 zC{5xt3Pqg1Ir!J@zy0ymo7bN1GHGNv?>=0{Z}MoRlYNp(4-vtw3L-f&qjhQaqd1Yq zc(ki9ru4o-n|?uvktow9JBfCpWJNh#>8i@b;IXQ7DR@j_0r3dUSa!WsME+83 zJ?q&jo5@P7@@eYLh98DRzKW*oyC8%-T>Oy97?jCSW^)d?%!0~ssOIYD@FT2p&LHaP zE!IIu_5M%YM>}Tz;2dTG&M|zw31CiHl=&9Ha{}q@s&4*$uT|X{eK_i&ucr`qh4$Tq zp5KFd09t`_kRVNQfuqWnN}kFKmQ!!&6MF(o!x_TWLqIncUG{7(qU2U3N|;D}>|$SJl;6%n8SI_tL>(jituLX3utlZ3w&B68 z;nwRwpEOXbBcxKA=L3-$6kw%Y&c%n4Uj84Rd5;=~Bj=LO+i_z|(DsH4G60BI_Ws7C{3e5#nYbCgui_O(F@A zA(5|u$oY_mWg346(c#iSjK+l4-l8t%P8{tj!d#Zaq$bwHaLe88(qoHVUoReZ39M zM=pP&Wa=LDv*ZHF5tP(@wl{aIr^oP--4FylPpok#{Ue`7hgAW>CDy(whhc_cVY9cXl@e1NnvOI+1AkU>X6`+p( zj8d0f?JkOQf|{h}L|3u#*BUh~J*3v1h{x}pga>D?(SUFtb>Awy&6vP7k0?t(tcH?=gMQ3nV$MU+L^-X>TZmuxJ?@l#P?_)25I3PkUmSqr29naz@H)!sN;4u Z@^fwSd3xJupKm|lZMaTL-T9dR{x2Muk&OTV literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/eespnet.cpython-39.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/eespnet.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f39dc03db1d4fd716496a926ccd870fbd52f16de GIT binary patch literal 6085 zcma)ATWlOx8J;uu-JM-;Y{zkuUbYv}ZfW8=P0KB9(_2b!CL(EzR;9ya&v-rd?Cko? z*iOtWs&GUlB%=uN0H~mKRUz_*P@fSGyzqv2Ko&Yyn4U%g+`{z{F*kB!E8yy+(ZT;nX(I>kThFxA%kM#o?U&FmYx z#l(L0W7XR+I(1NN?ttQylm;j+ zuYgi1DO3CiuW|1>>omh7yuPN3UxjQu%^Q4*H+gfU6c;Nr7)9D7^cG{%(^ti;m0Z) z=@}rQMn}=nIpaDvxQXqW%hm_lGUFDvuU9%2ces09>)0P?rq*$=sfwT(_89?_fCC^6 ztk!jb9Xt&nb-V`N^gVEATE=p1M<45ZIOp8RHQ&h0T+hvu+727JgmE^n>Pdg0G6A%!>wH#GdpNQCL{%3rIY9N z58Y8Ub9XfKRpxO0kG>$Qj~jb5+tg&Xv!hyGC62syTsy9f>cFSQwXD9{#C~1w;H&HSxg_^pSIzjB z>@CJ)E9Dhf_JSrr%)VGM~o&8lWBH8pJZQB1{=dv zR+E9~`*CnB6#F{%vQK&-DugNZA>$Nezx!&}t~f%3=?PVF(nsu*675eBAeIYQ%jHoV z(87Y0Oqy4sAib%ph9E}a0bFj+OAFr{3Z<_w6+cR%%=ZOND2*t|q>XRt2brJ^N_Qv* ztN!BEMQO%SnsuBtKaIA-R9YfPdib(nI1Cdm%@hYH^(2wiHwSzp4&MMbzJECM`IfZl zT+VPopDVpC8Z@OJUL6WD7U^9Pez2T{!tVx|vi=?G6aRM8=$z;X%yK#`FzJF8u*XF{?pdT)5uLJ`=NO}twLy-=WAoi0%7N!fC z2%^~MVHS2t5DURpn1cUQ8uo)Ei@HF1{V>Utm`)9^WdqS&S?CT#xPWiPmn>Wf($FvQ z(p8-vUK96WFC?J!aR7}qn9V%a#BZL>>2u62TMbsl&qM#iY!3e(+pd%6JAEMKF3I%Z zArD1r_9EW&mjJmo)^fJTMzF6{Sl)3hhcD@|k&)?K&)}O@*$&&$;UOL$nbgY7f@fsq z7OdFh`aCTBn1;47Hgika@?)6S!fR*F*v$>D@4;%Ziv1m$vBC}b{qF&*0@|0f+#c89 zK^Q^w{5*#z!;J7>V=uS&C<2&Ef*h?fZP5hQu8RkV@F;>48a==ZOEHMfCq3_G8PZ|Id0`B=f zxtXFz(7dwtLXdS=mSCeWC{hgZGLh@0o_51HhB@Eb*_Vigwx=wfoJUGF=jtbpD@qrQ z+aB^n^rUo}lTcVy;g2GErDGs=P$Xv^Qw2Z^GVx^XB^cVJpg)Y^UC2|3qX3tc4_mq| z3Q{M%1GPhsjuO8nU() z3b-BR2cG7M5q@dSZI0+W5h(<$CdM5XO^>IbgT=h5=AP%5A_Lg@kR z?8|u5c>wYzF4WZi%+;rviC=?x|FMnj*+b&IVe2M?VGMXVR`rkem02LhptnQ|p& zrjHF0U=P83PlsdBxwgSXOb!CB0B(T93912c>fGEl;ePb8&&i$Kh7)*t{k`?4;bOG0 zL*i2oMuRhfgMsQRG!_I?~>%Bze69C8@}8@>4GE&cFtP?}Wz7^n%qxiw+{^3dwDrq*Eu$o~zJu`-n$BIM zyT8l5afMeXj3xI#N2{Bf__Lzb6wS@5+*9dx4T-8pD?*DMUPmZwsFc;q>IEIJj$ogjO>DRMEx!8|mZ zqvkYub617r8*KesYCY=nRG`NRO!yA(Yj(2UQMDep4y|{zT#r(PgLw!?p#gY!y+5e+ z3O-62P?}8m4zD*iS?}0g);m_NM>BZ5n1>*=2JqH;`ucgC2602ms!u`4rViN@6q6s> zbx4Z;!#NEpf)C}7Th3c@VsG;pjehtx3Mj|nkxuMsNQiztYGfz#M%G-tXGgzbh=+mQ zyLw;VSiPUm@L7Iz7bP`XE%JWozd`-u2NcEL)1hB+IsQR4{_>>%eqN{kNxQ`h{&OY& zJot6-sao$LwcZ>*wu=#y)ym$(tKHDoKUD#6eo4jWwpnCU%6n1)Qcx{%jBxU1DwwO< zq^L*8U$u?>7ImotMP+0PP)7DQymR_+hx%z3NWp@H+_W@B7;m8NGbx*vUNT7H zCO1}$035bW_=(2`UlrL1yoIqhS3mWx!I+9iW1f?j3f;xrO z7SliT{*Shc#O4G8G^TYbjb<_DH2#|kPNTKmEU$;|p61YZQ*e6`Z`y>0-;w4ZOGRRX zdT5Nh8iiY`Rx0jN9(-3H*(0PeoFPzMKtf~Dz0UgMz}ym}`p8uyD{7>zTb0wn>S)yv zPN}Z*a9?Cla2?azK;ni>8{dd~mGM;GprngZI_$Z+Y$5?e7Of1Q5<=Ki4fcoPNhQd= zpZ<~mOu=Kl%u^*EaqkuZmE1v4A3t1TRcyF1}gCm)pmZoSpJ{sxT;Nts`S=U z_cUeilVvZgw4|IZDX`L#@|2>`m-jl=@(ggzaVSljB#A%6CQ>RtktFIK^l}<{Ue$q5 zFdHf30kr{a8d|?Sb)cySi6gr#DB(P`#;tmf{23iqkqWZ;3xX&Yx+JTEhcYzE1u36Q zDZR|JWi3K{`r317FAb7MGIeW)JD=82gi5F(hnJ@XLQhl^|5IRXcA-5d zt`LoKdngv&rJPQZqRyH*Xr5l)NYrIa5XCHKrRpB)@OCN Rrc=}RO|MoR7jDUB{{^Xxk~9DS literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/resnet.cpython-310.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/resnet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5f38dcd9fc94b313126c383baa111a3da547799e GIT binary patch literal 6189 zcmcgwNsJ^%70t-4*4~#{JoeO#*8+N}u5QnQG2S%BV7DyKAfu8{l3fv9UDZ`tHIZ2} zJ*^sKsmJn#_6ZIM4&4?4^%c$>;KUt?L!6Mf&|H1P2=jhqR#lhXgBeREQ~C49ni2p1 z_y3PrsZ5R~O3V^5zv$U47J{Ys(9zYt^fwymmzoz5WObUM?-H z@$$whzq(N%B<8xjQYRCGEG|`PCTjjQe z3`Ell>$^g^VI=EZs8rHb*&B5~JTjFT1j>CC6Rz+hOJ(YT*J=8J2vxS-jS^nwG3AjV z=kWSxcWJBL5=*;Vew(+0+S1!XhV8&_dO;B?l7Cl(=)Vw(mLEj* zDwtYJ1d&gz3yJM4RohbZjc3CTg*ReIFfo;)O=I(uSVKsn%$qiIKb2>|eomGoWi88D zFrSs>z5_|xyA!3I(n(25Ew`d2a~Pv{EXRqPATmvawjUExQM@8dLWR%-nKk8EysCIc zE;0eS!W9r0p~@5d=kQsT_Plyf zk33InMLDFhlCm~}k|eb$`yKJ-_mwS0vl}+g^? zy0KNpdU={A%LZ-P@|ylVA+e&2=T)127~k$^aP2%&Ocf0F1la`Fw>G(ki z7{}Vpz2ZmJt#xQ+g=^7EvUfF>Xr?Nf&AxWDioT9ksr5w0^BAp@Aec66Hg8Vmv`IYz zmf$5%UU(kdx=JfZ~)yDr3m&M98GIr#WFN%_i|qurmrmmD1Nw8vmG9^uvw+Mf4vA5(E*eVUGk6hda8?q?{F5qs!Ag zGXjLHBv)7eyM<(7B$`m{Bpjl_2!Dc9Wz_hQkr2eM67)m^={AMOh=a# zdZZ~`*3l({QWpJep5fVjiY7S#AfMznxpM}_<@YJpj3xDB$a^y93=V+t@qGs^<4MZ| zTI`XQiKJzca;1@$$;dvK;!}sT=4tRVe6q(Hv$F>5hoCqIyO}>UUNe4JyzfS5dM@p2 z&y?@ueYSC~=Qhq$!p>*+EoR)7Nsq_S;}_K9;DYXvIW!Sv`{l=Zj>@-X&z652%zr7F z{|Su!-JtJ9%pdpuNY7nB>mLTKmxit62}iZ@1gc6|Pb(f2RXh<-C@^E#h#lTpiU* z$$KSvmy`FZntGZ0ov40CynLtDpUA&c(-XOG>a|Jj0nN=SKB2=G654uL55mX~szTY6 zr>Kc-zurRLjMP=OkijbVuBg|xB6$@{R~e)))IIXzL{R%)?II{%>5yDZ#nIT=pwui` zU#N0Hw-u)&$Xup(g>1)p5spQ5jsU8)pjhiPzbi0zNdAC4?Km@8`qQjA4&Qs0o!57s z&B7z+**r3tN!;dpwnI71toG0-ePXlSiFfn~Z{VA?LPuau1~f>Me~JP(?z=skvu!IR zs2PY+tifiDZA)H?%mYCFWNptzI!SR%_t|5I=6$zj+)wpVl;R{k?Vh`5AL3-)?+3^k zkqZ}M?U?{w?tc1Y|2>oX-o6%@4Xc;>kj&j&kIgwHwOx@?<76XyQ{3)S`U7cF(yd&- z(-8qzj`V|?C}y-`{3)(t{sb-w-12oIZxHz|k?#?qSSQy( zlzpcz?jj|}K~lbr3V4ZUsd0(ODN*ugsMnIy`DQ=o)bD9fWS!%Q4;_HzHR6Wf9L7-+Hbu-Njv|FDOit<`ccV(quYXZoqZ1SD@ngzRDZb%H z5BrF1nsun>SL{Qb=gTuaoS-7>z>W~>&KZXgEtxV2b9V`xtQ^s09G%GN z$!~%jlLf>@eFh+}D;q?HrlmJZt-q!EfC!bo+y>W{zxoZzHm#8yPTvxA$l9dZ8j&p` zbt0#2l?MKv8ueDG^o3i!cPa{&OJDt70fV73&(NxMQLFVf6;InCwf>13^$w}@C3m>C z@_*amF(K^uDO$DOVT!ni?vPslOg;1tsYFNl7W@v5-emZClHl3{;D6N7x5Wl7KE$xNk^Quk`$zz=MGgLc% z<>>F};LQVN)mt^$!T{wW{p^hV+|c+WXsYxJSQvgxyhz?%|69u&)A+4n+9uK_y4_E* cnXEhQOkbLQHv3vO7vJez8fjCS!uh}c15<{9 literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/resnet.cpython-39.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/resnet.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bcd9afc6b852a07dd136dd5631f403293612cf59 GIT binary patch literal 6424 zcmcgwTW=f372erhF0Y~_>f%crhjuOwlj=fIiXAsi6DM#kO@ttEk%B<6U2#U#O5{?W zUD~oh7X{=ZPYQyfeJIceDJWX#weNlJdx1X8Q_%)R^%q*8>33$Aq?jmeUBIR^XV2Vc z&pF>YXVx1V%Nh86{HNdWUp{FVf2GRFpM%P4DB<5gxWQS=XtO$FF>kgl(_lv1uG`FL z8r=Mx)l=N!HcxSfr+MaAsk+0T;W?gv!0KuKEHCgeaGAFZUgYBs3_dQ{W_Hi2=g>02 zC($yQwB*rJ;!|juN?Hp1IX=T@F@KC-;b-_+a7F$+KgZ958|M}N1it`og1^9@ z4@I~tB9)OM3`A7D*5_)=eQsq@Ws}~;>VREY+I@K=inhb+3k$q+H)wS{KEHi0>d3~% zd`C7H+8r)h;X;4ixs^tB@udbA^V`wJ!%VF$&(&(R^{b+>{IFA37U!xjH?E1=%2hq} z`a>Leu{yWHYirB=+WJG!U0Ir2t1eXqUxG-YjY>*d5Vy36q==Bs$u!6@18Goi|3`cV ziH#)(cPOBUFouGWzk2cEN9_N10Jk~qX zLN||WmERIF5G^v+^2m@gc>JTgu+eFYh20IW!#hE9;R7MVPT;lNpc9F3A(Ecoa=C~^gU+|$-4!9m z&xNAx1(DwX(`<_%@~C$%v7LoRM~c4jta+huM+^xjrqcA%c=-vOAtX^6W`WsX$}`}; zCQFjCmgO{i1xebwar2ViC5*ZP!XU{w^58=ils|PFUS$kH`aP3~I zXYQfbO-0UTnoh7!wG2j?=rO@(2V-(Q8})pTW)`^dk#S_L-a29`?FX=uhNz6GRM_1X zQl7)MD&x9-;76{jwW3m_vZ}Jyf~q97Df>h5-uIO)MXMWkE>Y)_%8@S#z4mrX#HQvb zI~1*TmC^f+prJt1+Ttm`&fUKnC;ttL{6ya+-iq}>fxG>a_9 zimU+Yu-&PlQ0MyxZ)iQzX#Dyz)t2-lH1ybg$mamhLv|lBIbc02;UM3Hvj&{q=QdN^ z=rZ{(W~B67jP99N4ZTKxrUmITcxU+(c9LZxmx(-05v&|gdJV#WQ44%8C6H-W~9A!z9f!7wUt8y-3vD+fP;JUZF zUMuOzik)pQ2m#|*yZIYl)Yw>sR+hLHy(D{AV~J)OqSfkaN2};X^h&KKGM*>soMg!v zoO3p>4eB9~1Rr?>NXh_7ohWLFKs2_FNx%OMPCbk{4Ne*;BVZ!@?U1fCi>10nto z$oVl=`G1jf&_@R$r;jkF&%u8Yt~P+H^nb#YoWl_+nG*v03bI~7zx)mnEt+}aM!mhZ z<@eAYUH}=v($^6+lum*uSsPt(X!@j5+I?vHV_MDMZ2GTa^e~7ZfLNRMNB~Lklx{OA zS4Fny@C?t6z#u2d&G~<~kSL5q5{jD?2GL=JKSP=_YWm1V4A+Vfv5=GKc!ejT)xLvv zr=M2m%>{FZ>OWMbf^JLE%9Pr}B z{S-M9$Ud0llZSNX68I@T(PNv_(*~@EfH(uInLRY#G=6yN zz7w74IrLt8=4VV6z@6PZ*K;<{Q>xCV_bq1J{vsLk#GWa?pb-Zb^oZ=Ci5T0jKgsh{ zzx_qe#!8hWt(kIaG` z>vWQoc~jDb=awWls(so>A~QyXgHETl#8tZO z?QDZpdAIFti8!DuN5}PQ@?1)uwdA?1CST{?cI4j?uit6*7xL~j^+L{j`fO5rL36!< zEpz}wO6!My5Jq0m5Xz=ZMU8KHejAxHl2_SA4y&BI!f$Rw@*0k=vPfWPc;v>Zp!UDo zO;E(rLAjES<8f+@lC$J|p~?r{cAStPcPZ@(*@-hFT#S5<5URDHSo<{JE3kIB3-|ce{aK)`}bfIT_9%LH;=^ z+_>-bY|gf<@B-juAU?58Hf?NK@JeJ5A$$x|gQpCK+k> zoIU#x_v!vP0M3XUI1p>k1mtG#XHJgaGimH?8JU|_Fa0T*xw#sfaawA7B4x&DM(!hV zyGw}=q)6$ua=h(r5pb1~UeFYktX7Epm?lv2qa3Z8aA@qYlm7wru_Rp_M!&bw{8N;W z)`p*l%xy@%@Gm=MYk3zT``uDs%0rtSNgW9dDU>%*!d(zJf;er2gpf8;0!bXCm~cqa z6V4uRak5C`oIzV0=%_7|w6TM1j~(PV=5rI;puT*+50bQ2HS~|a#II5~fky(he2d6C zMBXLxJt7q6_l5t!i!uIo{F;5L^Lshp!_6tO4(teV?woN5!IB}9D0g>oRmN4i+la!l zSB~Fhu6nJk@2m5%Mn=PK9B@2r<^$*%ZT8C_i&&_a4T zjh*Wc#?}6vw$~K+PNUbV*zz@;{HuiC>d4Wr8{fgk>6?rTuN4WG`;ACt_1f-kr=v2u zuPa+B7dL_paqG_w52>6j+!~spO6d_NoNjT(Yh$KYCf? zl0{BZejDVNY$mSi`vRw`Ph@C{S~S%AJCsq;Q0wCo&C24V7maMwtCMT%Tb&Myf?C&! zG>L2wIZa43@%JdBLZa43Af$VzVs)|l=%oa(4h`rG2}4)4DRroNnrx`|4=AIuq1MMH zo0X-fl-an#@%7$9$Q$-v86`vLeu6nWKKZCo4T$lD3JZ@Il ziT|@I6Mwc2lFt`2aTQ-xTjBiBXQp(6wr!PmT~B|+(DtqM8fPHy>jYCf;P{fRvte?U zr+xp`5l^M`emZO6fwKH|Q?@WcImoxSN51E1M?*eFWnRI-@Ga(53Tz==QwywQW^9us cfB$r{IVYDYT`FBEy_w7R|1vCtJTb%m4Zd`;8vpg?$}uD*5{CA?mNe2#&-isq zwv|>$MS(1o!dAtB;vl6cs&wbVi3^7m7fx_^TvA0*g$gc|Pe~}u_y66qDRvTPRQu!597)KpIk1jk%;0HFM4s37)B0rXh@) zJ!cDJ!I0+rV$P8nX-Qi;GArF*+jH5c44ISpHw~Hh%_VmuGnWIUM;1USq?9};y|NFK zzLe4<@09~`@J%sS05&9V1J(;{SndPX2W&*{2i6blfIJ9nK;9=0%Ok)Bf!!{T0viH$ zha3fVn?Ee?TsBqQ+>38)?UQ%OWAbi!_qH`R;`hmWHpJY1f4@J1@_-aCVAn4g#rt=0 z0>*4Hr*r51aMq8$GkH-Llph9uG^~Iy3d@YN)%I!{#hJ zQn>ceViYyP(-RZ2emSVtJvrXE8r4;KalEb;CTex*SHp?sy2fV8lgA${OMko(E#6Ak z=@Vnq)6?@O{ql)h?K*ROZ1SPfbcxvTqrdG3P>dvby&%FRYXwtJcfQkegkBv1ZF|$2^4R@MZ9Y z3Lr8f5gQxkmbnepiZV-9EMiO-XJRY1FmYxx8yh7%&c@~jI7?2H*~&_5n{b!;P2!h! z?8FZE9p=xq`17$1&K`_slnT^yhYjwk}q@@PZmdwP~o|-HFN6J_4gO?O;RstCjjnSZXd#=YzVc zm8#xVUttTmQmI_^!mw1@F+RRBu~@J96KjiJUDktziRXP4)&s9v3hI#`PDIM9R7=v2 z{4#BG!n^E;;2#V9nioWsGLVIuA4GfzW2wGPl1X91!6c^qH(eSlc3o3XKla$Bt98n+-E zDQ32?ObfG`*pe-c6`}4y51adfJ2p=mJVvu;<^^L%%oZJW2s5d}1a2pAl)xPXMgev# zbqs|TS8!tzCsc{9>P&qlQePn!245wH?0j8a_LQVs3`YPgkvH?$IzwPCKFeI&*L@Ia zubmu;S>V-th*`cw2fb1yyih7#TJfrBOU}R2@Pd#8l}=mtJmy8^#aT$jl;n$0lSJ-K zC8O+DtEE!MO7BOjqUEw=lkkTH02hmOOh@!xAL!+lGKgKjn;Qv}G*y}%MgmTejRYKH zmyvK`B6v#VJUeElw|6lRma4{cKH~Szi1(bsOzt2;f3-K+)B!hX{~j@HMG} zgtf)2En#GG6N@oT7Gqi+Ax;*i8VLFX71V#yr5$IM$ z)`2HM59u_!)bP{F*mXduGODTWLEBnG_j-ax-lH{reB(FQnkUew?OIu5GCn+B>Q)nt zp-|FcYKB5e1anRjP7F@BdEGYs=H#7vkHbMz+ zn;~zDw#OoC%Z{ui3S&uw(@=KPiL(;^7OhyrHcD_LI&LSy?oBu-IB0QfljnAWLTmgS z#?MfwjNWL0o*7F9>sYQi6X<#1J;_?O*=vKVH>Mq!GJnv(%6c|Q-}_QJ#S8X91G=7- zg-!DI{jsHfmA3Yh6+o-a?R`A+K-$)iwhXropl{MP7#!Nuvpw$Ke(jb)ta(TdvW>V6 z_^|Ac#nQfE1CmcCKLUy0zim8Wyi#0uqXV%^XR=|czo9(1bSQS04pZ1Khc`3Axb#8F zaReOyA&#xvnZw>jD$=SSm3gXP`XF|GD%9cD49C(L?$|Iv?`iRmVveN8M?C(W+8lqMV z45CgayySj(Jorv8D3MPkT}L=(QJkR*K|QEe0^d{cxy57ZZsKS2$}4G^SDk;szqCTG z*{dd|maS;p-qNT#_ndd-`Fg#I&@Ef@t~7w^o>I+Q_LG20I~-R{rsY&xPN(GwJ@}~f z8c}7{e{^-BIgq!yzymqY@;VjUb926oDcFIBI$sGZK^S>K+1D0@k-C4`tJEsN0wOb2 zLx858%YJ2HF;ZmDbq--Au}38iMH5fL=8lw`~2L_}a9n-N?Os5A%iR0s2geYfOeEi&QzJLt$Il01Skgp_wfY@6%Z z1!LWfVHZ{n^>FM`keK$v=&()Tm-c3MTL>dK3_b@5^|$u$Q9RMfmyI0h!nYw>*br~P zG9_z+1u!C)vYHJOHlTk!*ENpKW8^l`3;OI~2btJ?k8+iKXLJG0Vr}_4PN8VElLUn) z4TCR{l}b_j+L5oF9bE~Q1LPi~&D3EutgQK`M_*lsN^*+P>Ii9K#Ujy7q7`zTtVi1M z8Vx^?x^R94+Ew$PRI0AD12x>OClMfxhYL-*y?I4NH z`(=D-kd!1ALk|ob;MnC{nLI}GM=1b(r03Vj;!v!Q2R8A(m^Ul9rQeA6I0;l^tl`k- zg9fL7=9ax}ykoSUCEPf(<&dA;<%du9Z<7 zMJuD={u)K>SdH;Y&&#+|B&FhQ1GWQYZ2rQ$gpu1Pv6Io}_;D?kwOHK|b9v60k;9kM zl#v8p=g$zKT(33G{+=eFGdpUmjlwXup4-OHz`lFc@a%Y-c_+t*m^@76lA+h?b?wqS zS!qR8bvhqVI3IYD$mhAWU4ak4W5Y^Af~NbqL%V&)(s?{M&^ti@x8W=!mt6?8%~Q;| z^{VXHLJg};(nva`-_ZuhJB>go``{u*NXCFGYVA7I&g)hSXvKvf!=~Jc4cIO{$0k*Z zu)F$pUWi&GzP9jHsLHEi8Xd6;0RBiw9iVx#$G6QEkx zx^A>5wN9+Pds)BC7XC~lb#5VfZ@tkLn!>@^=OyZ=2!sS80xJYo30x*{qs@|`{1Y|u zW~uhMo9((lj>`Bx`}X$zGP~Eb6yE)vscIze%QxD-`T;TDXj3Frf1yU+6xF`yrgmL0 z@7_&)c~kv+`+k{C{ey<<+EnsNerubeQ0YS&i8n>HkNaPn80VMr^wF-%4boi_*E9I8 zJq^%lYAFS*z?iYQMe3!FQ9@WXYngQmrW2`ZWW(`U63!1voC){l%G@TBkQnc16s)ks ze;31EVAG2C$^Hc&Q7jv@(tM@rvp&$%tE$%{dV}RB(Q>qtO>7#8t?o%|T8M4<>TOV1 zRJG09(iT~*9a~+chIa{&KUc33U}jcb25bZJX!sYtYlKjgS`bIbFj4iUhogRkR@&0* zC{=q)>Mg31SgUsc&Z!?0x=G+?1bA8oJU1oe*NFae0#y6_8Q&n*y)%-`eW4jiRzD$T zo{?(ws91wV$0OirY9JZj{Se?nQJgE9=hP2@7Z0kRpwMpePnIBkTilmaSE5Qa9B+FX zn+b}{i_Vrx9=|Y>iP6q;ob#}6O7a%+YU~8qYATXI#TK6AkS~GHMbT0@?%s2WerW>$SCY!rDE9W#O6hEJYOj?+e!`AY61!(=tW!=X^4h8#*?0fBvO>PwUUk OyZDI#)1@Hulm7!H6F@)! literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/resnetv1b.cpython-37.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/resnetv1b.cpython-37.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cc7b1329892392085df974a496e69809b2460bf5 GIT binary patch literal 8293 zcmd5>OKjZ68Rqb{+|}x7S(Y5fkso<%66-BH2~sz45;t%YA$6QOSR`er9ZFhhm%AQP zvaM198OsiXE3ne-?xXCSU^Bi}0o)?z$Wt;c#BJbsWyq^#J)GFt`q47aJbW7tyzP{>g z8|6Go4)I}>45uXplpN+dO7yg(hd;`XaN`y$7x`m+l$*%)^2hlYw~*`OPw=DMMy{Wi z_(MF0+yH-)Kgah^wRh(FCA;SO?#{9!)6rr&15ZKMee6`WbhleK$MTc{e|$ z?t0UjQ`23y^D}qehO(=2*cwIk6mCAd5=G7Mxyec1xE|CR9-nC5j2fb{GSLvrll2Do zYvE*TUT5Yj(^F?F+@EMhEBDiMcJ9pV?CjDxzcP2fP3Nc1Og~$B#-E)(rL^vtx+XRsvEO39K=YZ+R(*|}D3eoE#um1NoZnF{H+^|R7U zcorF7D>JT@bjQ0x(eA%RJ01wQrIm9ZXr@+nuxoO%_ktHzD;H{w z${O~}J|LwnRGhSHx)R(Bq))Ti@090ItYbHDeZIsMv_j*d&O@R3q1wdJstarADwZ!=+Whby^yW!L&3l z`>(wvP2tzplkyoVpOFRHxuI8Y)_iG)k*IRN%Vwl7u zH0)qg_#4&6dg!($CksJC)ZLnQ(-(M!yz5qKUKqOWFSXC!om^?u{mIQ0ufZF^@}y{l zGw$_pG8C1`NO;wn%l*i&&^x*TpmX=aMKAQ-E@T1jfSnk@jA&{h2~F=~Mb^g(N)~ao z+2%->@+VqrR?(>eekKwv{!2*Vdk`_((qnBG*FClc9qmH%X->~_MwT;MxmAm6Yk6@A zEiBa*wPXF9rut~LG@xy^P_o4sMiNI!93$~CiQ^<_jZ3CDfs14}pbqW_Q8l_LjmCN; z9;YfwJV6z5OAT?|6P#*?v=$mO^#WeJKvE?K*UiJ-i;%V|ov+3 zUH96$S4*Gd{Tod$2x%7{p+S2tcu{3#0V<#2N&(_&D(+3yUh!)+*KN&^33d)9YqQ~^Dy^U(``sJ&Isp)#sf~O1JcWDsy)N84z^JnK_8H)%-AgUDi*5>LjJgi1fgS6uG60NUt&w zeab-eD+6IKsENZsC_^BIFx4xi0dbgKX_f>jU#UloQm(zd+B<6s4-zvlOJ-nJJVZ6s z_M=Uwa9(oM&J!o8;8d%$jec>O${vMisWsb-Y6|D@B-x0-;&6(@0z1kEfJueMPL+HC z8sA4}|0x=GTZkhxw(x7Wa;rIo)fHeh_eEHBIu`Q)R_AFk&XVXrtte5!lO&!Z(G6LJ zl~+(6QeRzgy%V%uYn6dkoS|+fNKBF-y(X%krkvUdgh(+%A{)M?Foi?fGjm9tIP6tB z>Fzju8FkzKS7Ffb;Tluln_!T*r>#m2;-Fx9Hu2CdRQ3omyw$%L*uWPvCrl2Qd>Tr% z@HNzkE#Yt$0PVmW9CCy!T(Utf8P9G-=B^c)t2WsJ^85+!-Lc~whf~H=jK7B~Y9toJ z`rI1C9Q=pRpjTy0fTsLi)VLU{rCo@6e2%C)0bZw$o4LKMa{ z%&cdJEJ$BkPNc#b(_lSvytqS@pg%UjiO;6bd&#okxut6Ksj&yrXZ?6)sAmIcoje;1 z<_@%M_k6Hj`^g|?J;Vo<-8qE(Fz=7q>fvDxP@%OSxxoRFBYWD*+PiOWInikB(7J3B zd;bk`W2;AFXZ0aIL{Y?!!L(o^t#eG(`IzeLKCJ3kdx%`J`{TSo_qW-#kFRR6eOni= z-gqM$>*aK;N6?EdPG_ygF_vl<99QFX(b~mP)vwQs~WlygrbtJI#U8Z*$Z6q)dX;G zIFopbEC!9BRt-`CctC<;qI8?K$--^RIh1-e@#x z;Cs2cccY1{>~ZVfnxC*RX)7`|onB|s>uh?RlY=jEuNhT0{1-QtTOD~D%c>*$H8oFV zSERmF!4QfVgtAZ#t3eofLB*FQaZlO5=2h#}U>V#^)V-Q3?dyJZc_k8Liew&Kl&VK= z;-B_1f{5l6(e$~@bKh^eVdU4#Lz}*6B>Wdq zM3sZCD78seZyCC7&-5LP7`_TYEU}N7_#1{ta#DHB7=?!_!cUFDTiNUwJFXu^uE>T} z{SmYoU?bow#4(4FHis^^!qCi*sU1+EtwcYH?|YD5LJI!|!L_X%LI}2IgvWqY4Ge&- zvSDq_5SJoYw@3%0=qN|fkpvtRN~CqS*`B_YTh_Lm7?xqp78|icgfnf2-eI3mpIbY* zJqA3UMLl~>7e6~tk621)y}roi4%{I)$Ts^vtWz>K*Z}Ru(-Ew3Y;5bW0>`)VT|HT< zm;4UeG40wXS2eta5j!6d@(SI&rG=Acu@>OC3Q3B3WUNBLNzn7nV_!Qzz8g zk;Zsf-SnRu|LzvRs9I+Yv5!uoRfjYDI^7Z(lD5}u`T>{4%j>{Z-G4=hhLAQKYTb*Z zCA?tSFBKBv^DSz1O0`luGCTl!<>|#`s=zu(Zo+Oap!kPKAx#Uu9ec?n?qitPXG7RI zN~$Pjfc6uJdB+}V!#4{~$TnC=Y6op11N)c+CeSwm6ADo%2VJE(Bg-LdP0NySgzDK@ zSwefL%t_1GZa!wa1&&r+#~!0+J*{WZrLmyQUq^rm?TJe$z($~uC80_gLrE5qsLRihLeiZ^u}mwTNv}f`(G0d1?w-Vmq7`L` z5>7g4$*pc9%KVUcC|D}UjlS!Ah!&K?qFW0J41&hXCSl-) zCUOWVq@b~ieev$dJJ1t3Z(n`;mUle6y1R8d9H_l59<~4Fu{16;UIMucUz` zQNzy)&r?CAQE#6AH4Q*3Vr!e@qT{4(;(1tWuNI!4XjcXTscLdIkx#nbY&4`pM`Z50 zsI|sJ7On(-A@bGO(yGFu z?bBWoMTl%V|AlhJd(@jpViBUX7HvPsQThqe0i7o2rC+2|;Zw&;B$i36kf@SaC9wtp zn_i=QokT#QL4qdGnv7BmE%|SezPgGLO`3Qw!~;{z?`c80!Im6df1Y9#qx#<|#=;za zNAvjVB1ReYz6Yro*`b&dsTEAVB0NCNkfHd)S5-6n4raE08bb6#JTS=onHD64OiMG# z2b~9~nH(nZDNW>VnE9;#{mBXSaVY&Tluf6W$RFGibf-%4-x9SKqV^W60c5%M)o9LJ;V$7VjU&Q>Pt(#pft&Em#oAE zY|zuTA5fLa4qPL;L*XwlK zu_EFIHOp2)+@xFL>G!b=boTXkq%V6J=-ZbFN!+;%gzEqEGLQ-R2hxFMpf4145I=lT zn4`JT$@l)J5LZeJ8tM4gFNz4|rGwWor4{KUpJ;>hBgWz6c0H=r!ihFnwN!zkIx^?F zp8AxrjBcg*{V>J2hyn4mlZvd zHqMEfU7vLnMhG6#c^=ck_n7k(dxUhB>i{1^y2JVmoh5&cIdp31iJ`X(J=vdApg-p5 I1E9M90`fKWO8@`> literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/resnetv1b.cpython-39.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/resnetv1b.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5996a2eb7ccda4dbdd2d28706da994d6f8e1b79d GIT binary patch literal 7945 zcmd5>TWlOx8J_#juJ4ZR*lC(NO?uh3w&Poph7fv5T}WH2N&>BxmSw!>*j{^gc5`Oz zBxYPt9cT-36%~mmgzO3-`JESdK|(z7!V^54r%DJ33*L~BNF~bmpP9YZshX;4S?B!o z&$-Wk{>%5DcwJo?4Zm0Z@+&s-u%`W$UYdUnUY=SDvBThc*kf#%jezTAkF)#OQE+|i z33fj_2Ckotu?N^FxI=t^9bYwWYvSLvui@IxAbXIVU?&ISi}8xrqr@x$c)L9viRT3@f6rJYEq?6XgoyrC_4Aud&&}cDS^1Te5o*A2-o?bk|3p4jRboSKPf&C+r%bSEXdp7DZW;h9pUunN7|*D=?8!EIWH z_%w=%5(tn`3-w6bGIoqz7*=SmSdkuKxyX#H$il+rb}G{Hc9e>YE%eMgp}CV{)-Lg` z>TghgW=BrspueN~r|bPQk&T`mm`}@hGL5!5zfa50?dj55D0;zF(TCTpAzfB1d10~Q zivgJFl$PzQ3cg;ftOfbn z=48gNh;qK<-Q)tFkk01|B`*l_`91BfYN%>7UaVJxw?i9$v2n!zo20nfJ>A3OpqQyj)- zNov6`h-P%_ox1apYh>{3)-A<3`o>U;evH>dS}awMEl=gc~1)wmU#X=}Q;gb}tHi{6oOMpJXtMw(Z&J$*jwh$C1@+(+boBFBh4 zKx7nT&k`r_kopar7=wh0;Z12))X2Hji0htUbc(?ch^AW*0{)1q;^Mh6 z*eZx*)Lxp@%=gM1YL>~9pw~*ocjoig*1S^ElIAz6o*$5+lC)*VGhSF&o`+UUF?A4P zlB&BB%_#6vDW7lJ>7!^Bv|Xjyr2GK|M;BXlaBAIMs=k1#mfzi7gh6)7j3yU>fT&yq zf?|t{aN!~|0*XPMlpSV`}n-2J!PG&2A)1`b#xAG-D%9l9vYV&jA5Pe5K$klAGpo14r5+Sot zha^UbYbaSmzsTXn{$iT^#k4p|J(UuU5~m!(gT$$GtLvJY`wZ&qCN}^*tC`&qy_a}0 zxn_BP3X^*SQ+`&H+o_oqnOx0Cof@~2QU|qyA^oT9oj$!UydJr(E_t>SV?fS#@ z`O}!+B#?~JXn~b!E1I&fbZsTnXTW#Fd)ZYH8@;+BSSPGGGooQ<9ouB>-AO%R1$#t; zrKecuHbwfL$hxhI(@A?5c>%QBsz+@&YMmA<{AJ=H;>T zpYjohzz?vVNM9Ko(4hGw`ypuj;a%;b_WG-vZa5sdB$X`#`}{4qk(DEnyK*0aem1ae z>Y6{E^f{{fyhnX@?pJ;6U8ExQ_hT$W@3-}9??ujSLp)Vq=VG$X1DL@O$Lb?Sv5p$! zA5&`_S1a79w?9~G$DDVRhZ~&V{{a+g|0xjV;Py%Xn!MiC{S;LY*30VR5qcQ0(;|K! zx~65#CRjtYMGcdPb8%k^Kx#fjPtVU&R3&={AhQT*$WFiFmx@031fp*Cgg8n4m5Wt7 zsk>O2xx%llQE>K3vAb2CwBd7UR+)auyYX_RQUZ8OmAxBPu(Bgx_EvcesidQj)nxLV zN}kinb4K<(&Ae(@T<1@(FV!aU)|b>o&WmcFLiu!Kv4AC12oGeY7!>^=^!x&s76D1w zv+5PgMSls{Oq2o9q;s7YmzG08K3%2(EU9~#S70=bCd%cJi7RB1O~;_bULd@k>@ScV z{#rSPgg})kg&H}NX~uaqAB4Ot`!~3##9$VwL6HGisAH3Q`JfQ<9jq5z0wGifEQh}V z1QxeO|Knzc5o$USaEA3GXgR7MGme1k)CUYge?us};2qU#0Cc1OaxJ&42PCBd1yZHj zk1$CG%CjiJKS7wbnLkfwM&3*y3sQb5;%Ui?)7*C7SznhYoWQ*MJnZ z^f%#`;=RH8wGdu7GPexa|FO+<%RIK4BfX7Lx_0gN_yRL>-=!=i(~K-&QOZib1}S8% zMtUG{m7%G_Q^q8yedFj?FO07Ft3I-h(OS|l8WcD9xzX=!!W30C7~cM zLD*I4(FPcJ$vBz7sUw-7w1POCSi+fg4%ui@7pD-^&#BkND2D3Xq)y*SNBT~NVH7i< zY1GzHYs0CU^U6`~e}rGQvmN0)kIjj15FvXLFA#Z=$UKO&){Fc)@0(4dErQ2ih=AGZDZMvqUJvP%bIPvUE}kT#2*9 zHI{7vB$D@QC^0~aQ;H}0hZSy_D^Q%M{RsS1AuA8a&k)MTwVM9EzGxJ2M!(eGRmq>o zDf@;sk7z3OGj{AQK?9m zZpZR#q9l{mz#`xvdW2l9EbSuvKCT$nstk3qM-6DS?^!Z~YX-U<@DVDURb;SBfwa{U zIk!?`O&_p|y~fES$>`T80YxMYZr*}fx_FYV32JQ-=@)<4$l56*Q?7BJE_mbD&i0*s z=ED|CG^vt^G6XtYQ3Ck`F=de2sm7P32c4?$sqZ=wHAh3fB(UG194udI{gUM?$eD_+ z_Kkf0;`|e>(=5*Tw)r4&){I1D-xV5HC31}j-8YJWNJwOj$U%aqVDU$kg9T5mU$Wp^ z6kFk6rRUx4JNx`kLSl*dB#tNUD!%DI=qN-)oevT)+2Ws34i+%A{(lH~-^Et;PlCjr zqo;jmpTB_rMvEy`uL(H*zmt0tFu~{#P!1L_wchD@V`5zWe@b$-+X9(L$Enu~G{+^7 z=DdpQ(?X!drABB2Bv}kV3F*E8DCDyAHWJkcx2*(hmEZU_ zroE~HF>bMYmN+sU6#(;##S&Lug)Zntr4rIjy}CFrgnOwt29kNpjyMJe_zW&?p+=ds zwAE)xi=y0~Ev{3;kBHnLa+8SatlX_4%5P`T@DCIPTQs0g3pn$rE~x&C6wW4Xupu0= zMek(V;)ft};w@s|A+k-R?JMXKi10-FS5W;I`4!)%y|sS@+0Uo?3bMNoQQE(P>gkHS z45Wf9?Q=9AxxW1mK(1!>xt4#%oOl!U+4kp%_z9||8~=BrpWLDk#;z literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/vgg.cpython-310.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/vgg.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..60bfbec7ece515c2213c7247625275dee4c7797c GIT binary patch literal 5493 zcmc&&&2JmW72nxiEJ7c%BbFHu#`0moks6a+bS%hz~zzU&%HO-h+)$~NT5W1dW%S3ZgdIY$Ln!tLGabSV5wvWn$11JOEGEfvd~tG; zN2_5y^7+K_gCvsG#feDPCL0kK^>|WGFkY$f;#t31npjR2U%X#&yn12g?enD>e}DaE z#*3v3rRk!-u)lt#abB%dN}>cypl9iY?Zcvs&zwCwU7RlNvnwt2xL@JZ=gy1s`fE{pc;TZMM@G zW1wZpi!Ea_Pcv`>d4tzY&AH~5UJ=qKFe(Iwp|3lrQgL%xNNt14cwP_&iRY=zg7A~3 z6tObHP#v7*{&Etmh}o6eyHQl1<|^&4)Ruv&+#PYRDZ(W1>&m$mgu<7~eox$bU!||g zXgO*oDp#%haU3iJLaIZ01_%dh!dn$VZ83>mTiLOwFUTQirv}1i!_z%5dtU3aE%%hE ztk+?A-c#dCIk^}$#N^tdAMq%xO};B+9EE<}3!_BDlZo_$y2nK#s!0%qlm4oRAwM3A zh7WzKKxz$W?h|)>JMAZ{krds$ulTX>p49-926+G~j^T|@032Wr%QKthSO!=ZI{&t_ zpnkQMN0FN7E@_pbr27y~Vfqf<_#6OShJa|Zbp*mD+s4?Y*eKkJ{z5_8FVWXe&> zgD_6~uqq_-$WYEZQMiJ%uqj^E&|N=h1Yr$w}J<#Zxg*_FVLRkj-G&WWc6uJ}oH zaSmeBT#nL6>Fqu}pEdUQOdf?8)v5S6Kpp`&3V#hV^W!|=2-+jMW&d++Oa~yTv?t+A zIe@tdTKSeor6_TuY2AO?MNRP_r z8)l7NfZRO?G_z$vhEa9iu-6f5a;jyvQi-t%I*O;Uq$~CYTd=54nCo!6)S5}umc4H2 z(NZn*Gb)(bxh<;NirwC#U`LD$+!Znub&qmNIdLKbE~Kk@@)C)rmwnFR^Ldx4bPz7r z{i<-CPIP2%`}aF_Q@#aRIZfa&fkObUqpa#eP30;S3p!?$Q}-XBnyG;x^s0+~h^b@6 zR+NL#L7eSabe+Zn4gaq2I(?yTl*dEP2 zyuY#gPS6{1?<_qAHZnG=*cHXDDt2vG&7^RmXeZe(E(5f1DB-A;+`Oa@1_|opG2;^IoFKUch%Tmzp7z$oEQ^79oUM$>PD9p>Ic&h+bQ-*QD-yRXN<6Yq> zEZl7PH8Cd=J=S&k8cdP52#|$OJ4vo%OwNL!hUiS~p;RWq<3Tl1S*@qH7DXzfc};$QCox-D&( zdz?_%;RI)w6VhwHdod>zR)b`*pfkMCsqn#?cG1i6&iqp*ufXWeY4~S^eL!IMaX@o@ z(0l87yt|%snC-is@?CIy0a<)*Kwfzn0ePEj)AxYBrV=+Kv2s_ozj?oah}(7bH=6ef zT}^h+%U_a3RBU8l>xui2BtGiO_P2hQb7!9G+^a9cxpSnlzRmPCkGP+bn079;`kS}Q zx!-8sD|9Z|z3p5&R`qu-aeqq^+PT!~Z~ZRkmY(a}YcIpOKPA0$1o}FcxKBtzJC|Dh z&D-VN=bCr=+G+O{I+yI;b}s!x(cgOF{+T4SbE!pV_y#s}&vWf9s?)Lru=Q6u%>4?O z%jAdpbKO0rQu@y=mFfJ)6^5My>fuhanFRHC;@JkB(gb-2y;a)te0|=3pIXjseO1s$ z`e(a+=l>V2f-b(g2I)-I0ju4rtvgR+q@*A6P+38vCVx%@3zxDTJCfd;^zk-x2|D3s y@&lM;#I%oPaPb*o8Pjx3w)5xwYb2k0(>dtiK91jia+%!V&R=GbF1Waz|M!2+pF}SJ literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/vgg.cpython-39.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/vgg.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..98225cbc36f655be44cd3ec2a071e38164b0bf57 GIT binary patch literal 5965 zcmc&&&2JmW6`$E%E|(v&WLf@dT!(ej*xQ<-B-(Ol7^&p=BW`WgNR+fCS}c~FA+-{@ zOV2K)n4y3cHkw`(};dPTnLe%f=oOHpaD8>w#WQF`JFEiHEeDhL(pP zQ=pTI9#M4aF=a;|8RZQ7K0C&aL%l5f0XxA?0yoCavv09CfIGzU>=gSpaN}&6on~(W zH^B-l$G!vHVK&3gujPqQFgXR%g6WxJH*ay(eiPA zoF98Y%O@E9kl1rOFnh9Or=zeqfaR{LIW;ajtm4rfBNCTVZn- zF7k^98x19DkR*|yPYDBwiNr#Jo>Yaj z=fOJogwsGcDN{zu8cTgfY82h)VXCa73q2%d1KkMTiO+~Z$|is^Ei`Df^A8w%xw_jC zc7}Ww5-Jr&Kq3;+E_tABYd!GNF7S>X8QTd)d&rx>Cm~r(V|q^mF2#%h05u$v$)vkpZviXQAM$%`?nj>6kmhaA=dO^(2mJO& zGI>n|t)LyrbhY7zp;z;`kVn)AP}%EnXM=n7l_<0gX@tB{6XT$r%=qo5qgue|S*1_E z-V-WQPAqbqC*zEBMW-}U*58*=BV|3g?1hd{;Skit_y2t7ninxzI!Qwz90 z{g;_CXeSHdX?oY7Rm8{cgFFKCJxJjNAh1{fg&y4m2yD|nN4K>u-PREp43<{Oj9nQl zs=g#BwB7kHJa$6aPE109G}b-70Tu-d3slwC1Z*cn(oe)`c*o>TEJ@Y^vEd4K7BZpQ zd|(P`Iti(hLG}CcDh{;!p!VoWO8Y+ogWC7VHS)_xn-suJBW+cGpo#b4ZLB7`bk$@q z1Ez232>8_`BW)_aMCrDr@JwJ?Gzsd!85s~J5fxiw;VIW%N_?qiAJ--ex6iq;*G-LcqqvE-tOjQHbIC5m> zsvA{TmY~`k6I0kzvfpN~X3c$TCQd*#j8Ne*AOygdrBiCnvUCDgE(@u1TtN?&+2?4Q z$UviIJBhCDyG;(Ro^}x8Ffrh>j&rZ=He!jwx_Ap3s6pje-}kD#(Quq0z0ef|0!l>@ z)GI`QSoF|@l~iRn_TV3d7>yKyegjOMY-)^d>Rm*UziD(0rmbmWx~oNG6{9c2-z}|9 zFG0O~St!%GI@F;UkGG6Xz?i6XwQeFJ+fWYSlB}v2y+vy}w(+N8>27IV!_uy?sjJ=+ zUF~xWlc}X0jMI|d*+Gy4HZp5m_`KnuFG(|ugvYqBl}ud0s>zni7_9ZOO=Z&aTMf6$ zZ8Jti>W=$K94*Cps4M1>97S>jh;2%}TC2-+1u>woBF%<-A7Yryc)n9zaeWv%lyqI1 z02#p7kja(%W}5CAcj7jvO&<~l#3KuS$Fxb{PHR&TnZ!*Ta%^V-Ug9#4onH+&*d>Vg z(bFuWw4`Sn& z^m7WhB;ra}+g8Vd$YjPz(t|w!VyA{%dFR*Q+_~A=yju61+MROe03bE5-WF;v%bi&~ zW2bZQEQ)$~S#g+a366WB&spwV1@>rcrx}t!w4VT^Pf;Zqy#Uvs_OllNZ%13&zC~CV zS_E^?A|yM%f3Zc#ZFtd2PI-7Pj_h7XEzzr4o#iJ~T!l%;=ij@?eS~E1F+fROC?B%t zso0))d>w7i1&}`5p8p4+i_Zo0)mH)NJ20R4<}lJ!cx>|_4VkJBJ(P?#)k}xoH=64; zLJ#e@EPf6&zH^PVCWgjOAq`ox4?L8Nw&nszUmEb{p9{QeuL5{WFrWCgI?_~lUivMh zAyf5%hmz5zdg;LXMsvMJ;GrG+z*G19k=8`%@Ag`=4?L8Nw&nszUmEaA&jsG~R{^}A zz+JQj_G<(l+OZEj{I(=US`(#zhBRc&KJZY2 zx3VT2&>Y7$c5v?%B_KP0#RK{;fU~LiSiNr8bj1IpOsG$n(u%)e`hI*FIT~l%k=F>P zpB}~%sz7`Iy~?EHxawwyXDeymQI|q>eS3Pci@&ZZ)o{a$<2y#6_`@1PLVaBm7qO3o zaDC=W-D}px&#*v;i}%p!L!Cp^RmQpkI>8n^gJ*a3_uVBNePPNq{fa2krXAzi5*dt;KZ>|vPj3uSWa^=w*d}0 zm;rA$_&^ODN1#iw61d{5vd98RrBcB@i!8IqZ%CzjnQVeZHsMZ{nEAfb-O~U#C`s8R zgPJq9&wccLob#Q>#b9EhY~b^QfBsuF@f(KmZ+tlVaq#dKzTtm@C_|Y&qm}%c>sHG$ zld`>TS%zt>JFS9gtQpGsYqRBk+fcT0o*2prth>b>yXApXP%b!b$|-?UR3129$|-|W zQe|+;DW?KXMODG6rkpA`6RHMIE#*vrGpXv})Kg9koGH}+r;&0d)dh7%%|0<(b(GGk zIh3YQI;ZAQY6R2j{5?xQ3>rH|YetzjvDTYL^Wq`aZ7es7vHfnp6IYgQT#urlA9V-) zxcp%!*kI|6YkB$2%W-uri2O%sT|9a8$wzhV7^B_c3yyxc<@|(~Kmwy>Dx+no$|uIE zDce_S*~zB0oMfw7g-;B}Xt|hfHJ-c~Y_xTIr5D^7^dDSNhc-qzG(nb@J05^k6;lz~YVmNnnsv|@Kn4>mW#*jeeeLtO#$z|<40E60U!b0g5Ygs0f^{cgV- z`F>ov7wCS_^TX~|5PPcIYs=PK{eJ8w3ycdLEc9}0t@Pv4#-Q7e9(Kc^>BUYM^j70a zx9@lEwEGw&jH`pqX!N9~_$oJybXNtsfd`pNT<#C_y5DO*4)lQ;mwdm|Ylorl9~hrK zT)Z<_4;HuXvAqzFr#b3j+Oz&CQ znlrcNPMrCrZ2LMEnr9!D3;5o^H+&srXzUp{O5-lS*1o-s-#vS1?we9_MkO4$@SqR} z=5o`GE34f{{zhA=ZhuX-G1~_ zw=SNGk+DZvb|Ja=t~&eHwm0-3r}nalojD?yez3aA zBYzOIO6cK7gQ(qW68=FyhVpsI9;05RL4XYqCrYC zTR{i>LS*k1m9w9KSZT-DE)UJ^%Fxrx!=nEBu)J4>et7#PKoRArB4ZW=2wmmwx>-#! zvOHPam8hBsd*^Y81MF&@hZa#ke$GKa=gk8G|x2d-$L(C$ModtPWDVz z|94|rQ%MR#N8VC+e%Ad9!;=1|)B25_A9j-CV5cXB<)3pmYXbSjM3t6w4aap$Bos)0 z0T0bdJ&7OvDhbg{69x1P2oU>TcLRtF3NUv*2)_IM*wI06Gj@92FpAyw#s>5#u@ME` z?ym)L`DSo`lLooni)|fjG$#^uJ;zb!WmIrOP0dPBinE9(!>Fwz|6w<}#n-*8Qy$C=bn_VrsFak1}KQ$=)7D&yio3m!sbS-?Lw*0G_^JWdd zbtp5-^_0h_xzSS%rLCIC{dnl6Hf7NL^bI^D&<#+w2;a)sH1#xicp|p#<8RyULmjql zAfpX5#L?e0^h@Y^V9T=fBFg$TGMpUXVjC7RsXC6?0u;Z4)=!B}MC*p>SRR(MHIYm_ z#p$M-L(%fBTT2J7Zt_jU0sS=+Ids9DU*}`~o*ZZ>9K>5O4xV9|mRWmNe_4d)RH%7+ z1wAsTQ6{`3=zQSZEv(uCIgTnkDYc+ss`$ic6$Nq#`<~|Ze)ht91Jzya25RAYue;XA zZ|2^W7w|d!cc9MU0$LukM1&{8l0eeGj)5dVk)>ypwG9Z`E+~5&zR`9Oss>d=%}Y{a zr!_8WCQ(yF%_X#T@P+k49}hKFWPfZ%SPDsWbk z)+W5=658gi%V<3ztpQW?r7;%d1fxyLXcOSpRDHy)s;S)~l^guJYM`xy)>ZkN7C&$@ zsslK^XiCkfGdmU_Z)&?V^!6I)(ZC)HuERiT#z>BMs4P$&BZa}oJX6B zGR_6;;6-K24!(f$i|QhD3cBSl5Zn%4Sk~Xhk?UV2p*?C^xy&C@yfX${9MV{VA!SOM zpka!Phi|dPPe7WE{yKgVoFYVNgk#1bJaIVCI`B%ujA}|oUEA$gCr@vRy->dp6yIdoo zkE@B6adEw?R(peoaZx&5c`Y>|j2y0V%&W=xue1DmQohFWwRq~fY9q*d5L|z-_Woed zyQ1P^`@z}c>G^4h!M$!xW5xQhFY~yX z)`0m4(M~$IjClr=R`A<|?PsA+YbI|yVutz$T6z%)&ntW+7~ftTJlDv(x{a8fTNeTgHxB5^4jW%H!&*Z zwY3p%a#YG|>nYFNn;M#X4TaH_g-C;|ozCijC5Sn&8OGr=d=6$r^7Z$4j1+qP0m)60 zTO{8n;bA7n{UJ+#K++=lA<0K1KO*@YYW6s+3t75)S^$PSmXb&4t;nVMuERhhvPdDWM_5a6^J9{+R&l&T~ zkf&x}47`}Q$<$y!Z{q9Gl$iL&2@^k^_84d4e*qI;`@d)6H9g(TY>DKE)A~LoXB4g+ zQPz=}I-;=>;E3AN1Rc@c69!6bgO&F{a#+FeDXi!oYjXh00|%bf-{X4}Ag+n- zp9&Bm9kWr`;2jAS>@-wxM%jr{?7mH}<9eE&=r9SDeRzBf2W))9?~Fl^G&=!D{X}*` zB9$~dQTW{KM3JEb5?bC6e&4WoBs+n!WG6gK961jpzKUsff~^WeFNxO2^g#mbbNVE+ zeloVNrQq7j+mft8^0qWL&_8!tA7&IV!!do3PpA%y5*SQKKEXSlPoP7aq`jCUZ`Kn3 zazL&qYObTBC^vT{6AzJQPP{M-Xh7BVu6oe z0r>_dqTfa#O)*^0)CeI*g^fs-Z)q7b@1H9b_w_ti18-l_e)ShLd8ew>(_<>-ib^3^ zMf8meAyq*1ovTz)(=}Y~a8=odT19OVO(Mn}&+|n#>=4qrd$^d$^Lx9FWTF^k+H;0v z-6ojyl~&Ar@$$k-THQY%74Le(f=F0$N#QIf@f-z8cBENOq>9r^q)LEWy#x3H(Nv0(8 zsnbkP(@fs?GU~?(#(R7p2PvCd6}}~d*O%CVqaNv%ivxSSb4(mOIl~V4$37HRe)N41 zY>bVcJxu~K>MH;D*343Vi#YyhscHP)z&E7Zfgr-zH$HNKxjivDeOtuG{Z|d1RhHmHxYGL-(~qWNtZ-c9-$PM zf=3(ezVh24k_WQrSI9Q|vDt}Dzh$?h=!lpsus2cXhg?0)VIA?|EFy#&qQs-?{$j$$ z1Nz=EVJnFob-+LN$%HLrQgLL?%>Z`*tJV<&Dx?SmRC167#7?+zDT`dL%bq+Zq9d#R zR~Y+w5rrTk2ag>WLF64Rl1|uv5k_PiQ(-Jd9@==Sgt~HaJqnJ;%k2@T3{HjVx)Dbb zbn-;OpMZGR6p#+(NtEk~p($&oMAU1`iCO#(CYtA`vSib12HSt8;jL!O#m=Mi^urJ6D8HBH{qXOll(yp-HZCzsI2E|9siL~BaWQ)V9{ z{69w*cdvhA{+u@r$QDu!dic+fZHNCsuxBHCiVG*0UK0pyK@o6SG_L z4+?R4LkGN=M@C=&2|Kv=aQD0x#swKl203{mREm$WK~Kvl@cxEAnqzA6Pu&u|gW7Ci z`UxIdjcT!3V;Kkl= u;qY-O{w*xU2wws53?htYHSETP#-+weW3AC_Of(wh7t3DR^E{-up8XF@yM9#w literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/xception.cpython-39.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/xception.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..85ec7e51a85d7127e84efe3e6a46a28b659b8906 GIT binary patch literal 10103 zcmeHN>u(&_b)VPH&OW##mlQ=&4>PgjcH3ICD9KLUCN(9QX;GVXETu^|b|@CW@D|EPwaO(&7`z?eI<&5PSoml zlH&K8!4^|*Tpgz0yp)tTg2;cA6M#~6(!Uod**vWM^@5`j@Sl~K1;=_kgz zDN|Rd+v$YWophS&xt|!0QFqbZax!@{*lOs;?RIdz+j+38j%>7YWP+@&HNC89{xQvU zXMXrSkjRM4*x0r9tpluMY{!Og$2m|Z%H4Hi6F)n4lnI_0yUN=0ke841vAK(SzL%A< zl#N_x&jGKX40I~DruqJwmAD(ayS)`A&h1ts)Fm(vO+CT9Vv-BDw*svTcuG9qZ*^Lc z?b5%3!&Vs7yu=BE_Igrk zb^PX?MhA_ANx8cn4W9HgU*(38ZmB?5@gQAEik+_B^xKWcfj%^og6}umjWG26L*uiD zOLw}P!P3s1Mptz^8%ys8I_!2DZNJlvf^aF)jaJ)NK@>FE%caJ{AVm4s!eFxjk!d2a zu?gXkF}$|*IO^)=ouy`12TPk>g$^#=ZiIoKrTY?eaqF?3Lhm>;ArppKF%i{ef8UPH15;AYAcX}N9u(rx zT&uZBX}$Hx-)bn;>TJj~rqg!{jp{j)(;$bIp2uHeKGH9dCuEVF{CZbEY-lxuhfuJ` zteQpstew**$C8z85{3E~ry`GAhH?Xm*orM>&O>TRXN}WF&!Mc&8@*hdQ`Umfb0N1b zp7W8hPg!;$xwzMz1FPr79^}+s6OlaPL_t{1)x^daYXupGeg)j&SnH=o1k(@J*SX{m zf_ecp{HPl>+Vwn>w}Xu|5p-02f>RbK$oAHD6rPwB{Z+i+Ebg!jVwkh0C%;*9=j=0c zGMxTePICsO=R#HqR#2c$V>5sP6@ALncAM$4@892Uw9_1sDg8Cp<(rdt<|Oo zhBRXs#0bKLfYmR6CMUqkip_g=c!5iP&(?28_FYIXMFJcE(jH+9%O6_;Rc^;skbXZG z+536r9Hj77*fn~^*zA>JPyam5>#exBUxa3O2PWVU>8K=a<^%#=?KQ$=)CWv8G%qeruEL*Nw20%OVE1L^| zYX>?Q*t<~2Lyi;KADa;d1f$Qyjp3=HD($&YI+uVW(3X#!eNUAI z(h5@Agq>VK*4~>KozZMv;`?aYm?I21h^G7HQ<)j^j@B-4gQp>qO60`W%y*-VR3o@E{lvy}Y=A4u{GbpnlWzLUj z=Pb&cmv+u!4$muF=I{d2FRAm;Dd?8JNN_v6u%>Td(KRm!;oAe(RrLtU%?)RF!=Fdu)k% zdqjjiR#Gh9!JE?N;71s#v7R4kb4*ETxMjF#(;C8R%|3y%E!qpUucVMKgXi=lGi%-G zk2bg30j=Rcy`yzk5BC90J2J6vcP{BmNNH+zYUMAJyDUJExVLF7T+05Jv;WKbGOJuA zp^dAFmPvlIrPkZshe=*4Ex(>^A@m$Bv(3wC`&XF0lBTaReKnb0Q4KhG4}z5k8}E0! z?PZnZ8xJeVQ}D&bGq4qT*~a9KOIeOJ z&#j5nn!^T#jqIuCxTse(ou_51Y-1yX*TD^}f0xajnLb*+adrF-Mw@!DgSimu!45{J zL-s6jI$+3U?#DPsWSHA`Nx0|rYrsd%>cS_@Qr;j{7^H?ZVD4q*VA^G=VQzWAn;4{q zxs?HLa*!J4PGvlEe>yhzs|u|v3!Vl?JJZhtmcZx0WEe+JaUITzvKry4HxAch=x< zFRK@^3y2_`gv z#4F6ALAfuC2Ic9dA#~`)E}}vCk!TRo5)Jauam1JqV$5gJAePF-UK$q}QwO2OFQ_BE zMkJ;a@91CpUC zE%NbeAaAkzY1Ee%Ru)%D141i(MTCRM2xz7cdWGp>TSK+v%A93`>_39E7c@C9sFbcK zB|-~`mEhnGF%zRcRLZ=j8Nb%#tn@GHC6`uok@$2xG8@^j0T6=U$05=%BD?2E)Q}+o zd)|;Z=>h}vQi^d(9yD2Lsrzq4`MX}66A4WZl$^EH_ohI~eAJ4mRI+=iRLYhqawn21 z{-8)I7c$9-PgTpG3>Bt6WhjA4-DYFoAQ`h&zB61h9>&Z;>gLUzGR_Fc3ln?BDH35S zBGDJ7EGkzs)Azm1_YIeIk&hIS_xL_GVwgK6zNH^-GuNIk3M_wPL=-$(zb0E5y(lQW zjz{aiNEA4lwI*r*Vw%AY{)aSDPKv@l!qXB}Vc;F!8a*OJRq}Z907HmrfoCF8#nVHi zcTMCJ6+r8Nr;#OrCpU#BG0U?kO0AqOKVLioO5;o)0iaoyqXm{Yi~t~NF;9+Q(K)8Z zEcgeUM*+iNnCj!IU6Cci;#Ydkq{_>QZiBoZwj=vBHhKj&@UJaP>OVACUwkK1l~8 zDFlzU8Xe^~LWF{3(65kfb`rChn10=EMA10$`#wicu{AL2D#RV9jx(dY) z=qDiB6$KnbdJ^d=#UKN7rbWyvYw4!@Ep#+RJEDI-Z1_64vNIC{kA6Tx2|M}~NUfTx zLx}Wz2E(^UZ^op!YVy`zFdW;VB)uqAapQkR0>pX`NRF`=w%IB_A zh3HgqN`ipVAXu<6US-%ZZZp?$hwMZjFC~KVqVXb85Y#YadW6rvlUIw>SvQbT)LQkU66iEaO^_>&MR^Hk8y{LB1gu$(ZU(}-gY|AFF*@7D2&@Z;3a`?96RkVN)CFHXfc^gjA+W1)G zQT7E4@*LgmMnD{@c2Y_Uh3H?Pe@M<`yNjc8oXAB1LYmDe$w}XF>IeQ|E-7y5fZtZ& zSVsSt72JFHd|)F?a?+GEa&jrC4Igu6Qq&?|)9BOW|GH;G_wbT>u|ENCUeZ4T1LNn+ z>yJq|ZoNzLgyb(t{)*&;MA5wZ>oLP1|9^NGexpAO0hTW{42s*&#tegeeyp&G#r6G~ zwW?!qp7}o|vy^xu78g66GIbwnH+{GuUo-;?hkQb$B$ zEfmLeP3|&9%n|4lB@#)|oM+8mrfT|AriM!-sYUP-@An~!_{!p&7^D%B#pB+p<=NG9 X)r-~J)s1ScI#I1wUn+Y1dFFosldSwi literal 0 HcmV?d00001 diff --git a/trail_detection_node/trail_detection_node/base_models/resnetv1b.py b/trail_detection_node/trail_detection_node/base_models/resnetv1b.py new file mode 100644 index 00000000..21d67b7a --- /dev/null +++ b/trail_detection_node/trail_detection_node/base_models/resnetv1b.py @@ -0,0 +1,264 @@ +import torch +import torch.nn as nn +import torch.utils.model_zoo as model_zoo + +__all__ = ['ResNetV1b', 'resnet18_v1b', 'resnet34_v1b', 'resnet50_v1b', + 'resnet101_v1b', 'resnet152_v1b', 'resnet152_v1s', 'resnet101_v1s', 'resnet50_v1s'] + +model_urls = { + 'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth', + 'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth', + 'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth', + 'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth', + 'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth', +} + + +class BasicBlockV1b(nn.Module): + expansion = 1 + + def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None, + previous_dilation=1, norm_layer=nn.BatchNorm2d): + super(BasicBlockV1b, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, 3, stride, + dilation, dilation, bias=False) + self.bn1 = norm_layer(planes) + self.relu = nn.ReLU(True) + self.conv2 = nn.Conv2d(planes, planes, 3, 1, previous_dilation, + dilation=previous_dilation, bias=False) + self.bn2 = norm_layer(planes) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + identity = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + out = self.relu(out) + + return out + + +class BottleneckV1b(nn.Module): + expansion = 4 + + def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None, + previous_dilation=1, norm_layer=nn.BatchNorm2d): + super(BottleneckV1b, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, 1, bias=False) + self.bn1 = norm_layer(planes) + self.conv2 = nn.Conv2d(planes, planes, 3, stride, + dilation, dilation, bias=False) + self.bn2 = norm_layer(planes) + self.conv3 = nn.Conv2d(planes, planes * self.expansion, 1, bias=False) + self.bn3 = norm_layer(planes * self.expansion) + self.relu = nn.ReLU(True) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + identity = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + out = self.relu(out) + + return out + + +class ResNetV1b(nn.Module): + + def __init__(self, block, layers, num_classes=1000, dilated=True, deep_stem=False, + zero_init_residual=False, norm_layer=nn.BatchNorm2d): + self.inplanes = 128 if deep_stem else 64 + super(ResNetV1b, self).__init__() + if deep_stem: + self.conv1 = nn.Sequential( + nn.Conv2d(3, 64, 3, 2, 1, bias=False), + norm_layer(64), + nn.ReLU(True), + nn.Conv2d(64, 64, 3, 1, 1, bias=False), + norm_layer(64), + nn.ReLU(True), + nn.Conv2d(64, 128, 3, 1, 1, bias=False) + ) + else: + self.conv1 = nn.Conv2d(3, 64, 7, 2, 3, bias=False) + self.bn1 = norm_layer(self.inplanes) + self.relu = nn.ReLU(True) + self.maxpool = nn.MaxPool2d(3, 2, 1) + self.layer1 = self._make_layer(block, 64, layers[0], norm_layer=norm_layer) + self.layer2 = self._make_layer(block, 128, layers[1], stride=2, norm_layer=norm_layer) + if dilated: + self.layer3 = self._make_layer(block, 256, layers[2], stride=1, dilation=2, norm_layer=norm_layer) + self.layer4 = self._make_layer(block, 512, layers[3], stride=1, dilation=4, norm_layer=norm_layer) + else: + self.layer3 = self._make_layer(block, 256, layers[2], stride=2, norm_layer=norm_layer) + self.layer4 = self._make_layer(block, 512, layers[3], stride=2, norm_layer=norm_layer) + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + self.fc = nn.Linear(512 * block.expansion, num_classes) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + if zero_init_residual: + for m in self.modules(): + if isinstance(m, BottleneckV1b): + nn.init.constant_(m.bn3.weight, 0) + elif isinstance(m, BasicBlockV1b): + nn.init.constant_(m.bn2.weight, 0) + + def _make_layer(self, block, planes, blocks, stride=1, dilation=1, norm_layer=nn.BatchNorm2d): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2d(self.inplanes, planes * block.expansion, 1, stride, bias=False), + norm_layer(planes * block.expansion), + ) + + layers = [] + if dilation in (1, 2): + layers.append(block(self.inplanes, planes, stride, dilation=1, downsample=downsample, + previous_dilation=dilation, norm_layer=norm_layer)) + elif dilation == 4: + layers.append(block(self.inplanes, planes, stride, dilation=2, downsample=downsample, + previous_dilation=dilation, norm_layer=norm_layer)) + else: + raise RuntimeError("=> unknown dilation size: {}".format(dilation)) + self.inplanes = planes * block.expansion + for _ in range(1, blocks): + layers.append(block(self.inplanes, planes, dilation=dilation, + previous_dilation=dilation, norm_layer=norm_layer)) + + return nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + x = self.maxpool(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + + x = self.avgpool(x) + x = x.view(x.size(0), -1) + x = self.fc(x) + + return x + + +def resnet18_v1b(pretrained=False, **kwargs): + model = ResNetV1b(BasicBlockV1b, [2, 2, 2, 2], **kwargs) + if pretrained: + old_dict = model_zoo.load_url(model_urls['resnet18']) + model_dict = model.state_dict() + old_dict = {k: v for k, v in old_dict.items() if (k in model_dict)} + model_dict.update(old_dict) + model.load_state_dict(model_dict) + return model + + +def resnet34_v1b(pretrained=False, **kwargs): + model = ResNetV1b(BasicBlockV1b, [3, 4, 6, 3], **kwargs) + if pretrained: + old_dict = model_zoo.load_url(model_urls['resnet34']) + model_dict = model.state_dict() + old_dict = {k: v for k, v in old_dict.items() if (k in model_dict)} + model_dict.update(old_dict) + model.load_state_dict(model_dict) + return model + + +def resnet50_v1b(pretrained=False, **kwargs): + model = ResNetV1b(BottleneckV1b, [3, 4, 6, 3], **kwargs) + if pretrained: + old_dict = model_zoo.load_url(model_urls['resnet50']) + model_dict = model.state_dict() + old_dict = {k: v for k, v in old_dict.items() if (k in model_dict)} + model_dict.update(old_dict) + model.load_state_dict(model_dict) + return model + + +def resnet101_v1b(pretrained=False, **kwargs): + model = ResNetV1b(BottleneckV1b, [3, 4, 23, 3], **kwargs) + if pretrained: + old_dict = model_zoo.load_url(model_urls['resnet101']) + model_dict = model.state_dict() + old_dict = {k: v for k, v in old_dict.items() if (k in model_dict)} + model_dict.update(old_dict) + model.load_state_dict(model_dict) + return model + + +def resnet152_v1b(pretrained=False, **kwargs): + model = ResNetV1b(BottleneckV1b, [3, 8, 36, 3], **kwargs) + if pretrained: + old_dict = model_zoo.load_url(model_urls['resnet152']) + model_dict = model.state_dict() + old_dict = {k: v for k, v in old_dict.items() if (k in model_dict)} + model_dict.update(old_dict) + model.load_state_dict(model_dict) + return model + + +def resnet50_v1s(pretrained=False, root='~/.torch/models', **kwargs): + model = ResNetV1b(BottleneckV1b, [3, 4, 6, 3], deep_stem=True, **kwargs) + if pretrained: + from ..model_store import get_resnet_file + model.load_state_dict(torch.load(get_resnet_file('resnet50', root=root)), strict=False) + return model + + +def resnet101_v1s(pretrained=False, root='~/.torch/models', **kwargs): + model = ResNetV1b(BottleneckV1b, [3, 4, 23, 3], deep_stem=True, **kwargs) + if pretrained: + from ..model_store import get_resnet_file + model.load_state_dict(torch.load(get_resnet_file('resnet101', root=root)), strict=False) + return model + + +def resnet152_v1s(pretrained=False, root='~/.torch/models', **kwargs): + model = ResNetV1b(BottleneckV1b, [3, 8, 36, 3], deep_stem=True, **kwargs) + if pretrained: + from ..model_store import get_resnet_file + model.load_state_dict(torch.load(get_resnet_file('resnet152', root=root)), strict=False) + return model + + +if __name__ == '__main__': + import torch + + img = torch.randn(4, 3, 224, 224) + model = resnet50_v1b(True) + output = model(img) diff --git a/trail_detection_node/trail_detection_node/jpu.py b/trail_detection_node/trail_detection_node/jpu.py new file mode 100644 index 00000000..db23bab8 --- /dev/null +++ b/trail_detection_node/trail_detection_node/jpu.py @@ -0,0 +1,68 @@ +"""Joint Pyramid Upsampling""" +import torch +import torch.nn as nn +import torch.nn.functional as F + +__all__ = ['JPU'] + + +class SeparableConv2d(nn.Module): + def __init__(self, inplanes, planes, kernel_size=3, stride=1, padding=1, + dilation=1, bias=False, norm_layer=nn.BatchNorm2d): + super(SeparableConv2d, self).__init__() + self.conv = nn.Conv2d(inplanes, inplanes, kernel_size, stride, padding, dilation, groups=inplanes, bias=bias) + self.bn = norm_layer(inplanes) + self.pointwise = nn.Conv2d(inplanes, planes, 1, bias=bias) + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + x = self.pointwise(x) + return x + + +# copy from: https://github.com/wuhuikai/FastFCN/blob/master/encoding/nn/customize.py +class JPU(nn.Module): + def __init__(self, in_channels, width=512, norm_layer=nn.BatchNorm2d, **kwargs): + super(JPU, self).__init__() + + self.conv5 = nn.Sequential( + nn.Conv2d(in_channels[-1], width, 3, padding=1, bias=False), + norm_layer(width), + nn.ReLU(True)) + self.conv4 = nn.Sequential( + nn.Conv2d(in_channels[-2], width, 3, padding=1, bias=False), + norm_layer(width), + nn.ReLU(True)) + self.conv3 = nn.Sequential( + nn.Conv2d(in_channels[-3], width, 3, padding=1, bias=False), + norm_layer(width), + nn.ReLU(True)) + + self.dilation1 = nn.Sequential( + SeparableConv2d(3 * width, width, 3, padding=1, dilation=1, bias=False), + norm_layer(width), + nn.ReLU(True)) + self.dilation2 = nn.Sequential( + SeparableConv2d(3 * width, width, 3, padding=2, dilation=2, bias=False), + norm_layer(width), + nn.ReLU(True)) + self.dilation3 = nn.Sequential( + SeparableConv2d(3 * width, width, 3, padding=4, dilation=4, bias=False), + norm_layer(width), + nn.ReLU(True)) + self.dilation4 = nn.Sequential( + SeparableConv2d(3 * width, width, 3, padding=8, dilation=8, bias=False), + norm_layer(width), + nn.ReLU(True)) + + def forward(self, *inputs): + feats = [self.conv5(inputs[-1]), self.conv4(inputs[-2]), self.conv3(inputs[-3])] + size = feats[-1].size()[2:] + feats[-2] = F.interpolate(feats[-2], size, mode='bilinear', align_corners=True) + feats[-3] = F.interpolate(feats[-3], size, mode='bilinear', align_corners=True) + feat = torch.cat(feats, dim=1) + feat = torch.cat([self.dilation1(feat), self.dilation2(feat), self.dilation3(feat), self.dilation4(feat)], + dim=1) + + return inputs[0], inputs[1], inputs[2], feat diff --git a/trail_detection_node/trail_detection_node/model_loader.py b/trail_detection_node/trail_detection_node/model_loader.py index 908c74f6..6c8e2399 100644 --- a/trail_detection_node/trail_detection_node/model_loader.py +++ b/trail_detection_node/trail_detection_node/model_loader.py @@ -2,6 +2,9 @@ import torch.nn.functional as F from .vgg import vgg16 +import torch +from .segbase import SegBaseModel + class FCN8s(nn.Module): def __init__(self, nclass, backbone='vgg16', aux=False, pretrained_base=True, norm_layer=nn.BatchNorm2d, **kwargs): super(FCN8s, self).__init__() @@ -96,4 +99,95 @@ def __init__(self, in_channels, channels, norm_layer=nn.BatchNorm2d, **kwargs): ) def forward(self, x): - return self.block(x) \ No newline at end of file + return self.block(x) + +''' +PSP NET +''' + +def _PSP1x1Conv(in_channels, out_channels, norm_layer, norm_kwargs): + return nn.Sequential( + nn.Conv2d(in_channels, out_channels, 1, bias=False), + norm_layer(out_channels, **({} if norm_kwargs is None else norm_kwargs)), + nn.ReLU(True) + ) + +class _PyramidPooling(nn.Module): + def __init__(self, in_channels, **kwargs): + super(_PyramidPooling, self).__init__() + out_channels = int(in_channels / 4) + self.avgpool1 = nn.AdaptiveAvgPool2d(1) + self.avgpool2 = nn.AdaptiveAvgPool2d(2) + self.avgpool3 = nn.AdaptiveAvgPool2d(3) + self.avgpool4 = nn.AdaptiveAvgPool2d(6) + self.conv1 = _PSP1x1Conv(in_channels, out_channels, **kwargs) + self.conv2 = _PSP1x1Conv(in_channels, out_channels, **kwargs) + self.conv3 = _PSP1x1Conv(in_channels, out_channels, **kwargs) + self.conv4 = _PSP1x1Conv(in_channels, out_channels, **kwargs) + + def forward(self, x): + size = x.size()[2:] + feat1 = F.interpolate(self.conv1(self.avgpool1(x)), size, mode='bilinear', align_corners=True) + feat2 = F.interpolate(self.conv2(self.avgpool2(x)), size, mode='bilinear', align_corners=True) + feat3 = F.interpolate(self.conv3(self.avgpool3(x)), size, mode='bilinear', align_corners=True) + feat4 = F.interpolate(self.conv4(self.avgpool4(x)), size, mode='bilinear', align_corners=True) + return torch.cat([x, feat1, feat2, feat3, feat4], dim=1) + +class _PSPHead(nn.Module): + def __init__(self, nclass, norm_layer=nn.BatchNorm2d, norm_kwargs=None, **kwargs): + super(_PSPHead, self).__init__() + self.psp = _PyramidPooling(2048, norm_layer=norm_layer, norm_kwargs=norm_kwargs) + self.block = nn.Sequential( + nn.Conv2d(4096, 512, 3, padding=1, bias=False), + norm_layer(512, **({} if norm_kwargs is None else norm_kwargs)), + nn.ReLU(True), + nn.Dropout(0.1), + nn.Conv2d(512, nclass, 1) + ) + + def forward(self, x): + x = self.psp(x) + return self.block(x) + +class PSPNet(SegBaseModel): + r"""Pyramid Scene Parsing Network + + Parameters + ---------- + nclass : int + Number of categories for the training dataset. + backbone : string + Pre-trained dilated backbone network type (default:'resnet50'; 'resnet50', + 'resnet101' or 'resnet152'). + norm_layer : object + Normalization layer used in backbone network (default: :class:`nn.BatchNorm`; + for Synchronized Cross-GPU BachNormalization). + aux : bool + Auxiliary loss. + + Reference: + Zhao, Hengshuang, Jianping Shi, Xiaojuan Qi, Xiaogang Wang, and Jiaya Jia. + "Pyramid scene parsing network." *CVPR*, 2017 + """ + + def __init__(self, nclass, backbone='resnet50', aux=False, pretrained_base=True, **kwargs): + super(PSPNet, self).__init__(nclass, aux, backbone, pretrained_base=pretrained_base, **kwargs) + self.head = _PSPHead(nclass, **kwargs) + if self.aux: + self.auxlayer = _FCNHead(1024, nclass, **kwargs) + + self.__setattr__('exclusive', ['head', 'auxlayer'] if aux else ['head']) + + def forward(self, x): + size = x.size()[2:] + _, _, c3, c4 = self.base_forward(x) + outputs = [] + x = self.head(c4) + x = F.interpolate(x, size, mode='bilinear', align_corners=True) + outputs.append(x) + + if self.aux: + auxout = self.auxlayer(c3) + auxout = F.interpolate(auxout, size, mode='bilinear', align_corners=True) + outputs.append(auxout) + return tuple(outputs) \ No newline at end of file diff --git a/trail_detection_node/trail_detection_node/model_store.py b/trail_detection_node/trail_detection_node/model_store.py new file mode 100644 index 00000000..a0193c79 --- /dev/null +++ b/trail_detection_node/trail_detection_node/model_store.py @@ -0,0 +1,36 @@ +"""Model store which provides pretrained models.""" +from __future__ import print_function + +import os +import zipfile + +__all__ = ['get_model_file', 'get_resnet_file'] + +_model_sha1 = {name: checksum for checksum, name in [ + ('25c4b50959ef024fcc050213a06b614899f94b3d', 'resnet50'), + ('2a57e44de9c853fa015b172309a1ee7e2d0e4e2a', 'resnet101'), + ('0d43d698c66aceaa2bc0309f55efdd7ff4b143af', 'resnet152'), +]} + +encoding_repo_url = 'https://hangzh.s3.amazonaws.com/' +_url_format = '{repo_url}encoding/models/{file_name}.zip' + + +def short_hash(name): + if name not in _model_sha1: + raise ValueError('Pretrained model for {name} is not available.'.format(name=name)) + return _model_sha1[name][:8] + + +def get_resnet_file(name, root='~/.torch/models'): + file_name = '{name}-{short_hash}'.format(name=name, short_hash=short_hash(name)) + root = os.path.expanduser(root) + + file_path = os.path.join(root, file_name + '.pth') + sha1_hash = _model_sha1[name] + if os.path.exists(file_path): + return file_path + + else: + print('Model file {} is not found.'.format(file_path)) + diff --git a/trail_detection_node/trail_detection_node/segbase.py b/trail_detection_node/trail_detection_node/segbase.py new file mode 100644 index 00000000..ae417652 --- /dev/null +++ b/trail_detection_node/trail_detection_node/segbase.py @@ -0,0 +1,60 @@ +"""Base Model for Semantic Segmentation""" +import torch.nn as nn + +from .jpu import JPU +from .base_models.resnetv1b import resnet50_v1s, resnet101_v1s, resnet152_v1s + +__all__ = ['SegBaseModel'] + + +class SegBaseModel(nn.Module): + r"""Base Model for Semantic Segmentation + + Parameters + ---------- + backbone : string + Pre-trained dilated backbone network type (default:'resnet50'; 'resnet50', + 'resnet101' or 'resnet152'). + """ + + def __init__(self, nclass, aux, backbone='resnet50', jpu=False, pretrained_base=True, **kwargs): + super(SegBaseModel, self).__init__() + dilated = False if jpu else True + self.aux = aux + self.nclass = nclass + if backbone == 'resnet50': + self.pretrained = resnet50_v1s(pretrained=pretrained_base, dilated=dilated, **kwargs) + elif backbone == 'resnet101': + self.pretrained = resnet101_v1s(pretrained=pretrained_base, dilated=dilated, **kwargs) + elif backbone == 'resnet152': + self.pretrained = resnet152_v1s(pretrained=pretrained_base, dilated=dilated, **kwargs) + else: + raise RuntimeError('unknown backbone: {}'.format(backbone)) + + self.jpu = JPU([512, 1024, 2048], width=512, **kwargs) if jpu else None + + def base_forward(self, x): + """forwarding pre-trained network""" + x = self.pretrained.conv1(x) + x = self.pretrained.bn1(x) + x = self.pretrained.relu(x) + x = self.pretrained.maxpool(x) + c1 = self.pretrained.layer1(x) + c2 = self.pretrained.layer2(c1) + c3 = self.pretrained.layer3(c2) + c4 = self.pretrained.layer4(c3) + + if self.jpu: + return self.jpu(c1, c2, c3, c4) + else: + return c1, c2, c3, c4 + + def evaluate(self, x): + """evaluating network with inputs and targets""" + return self.forward(x)[0] + + def demo(self, x): + pred = self.forward(x) + if self.aux: + pred = pred[0] + return pred diff --git a/trail_detection_node/trail_detection_node/v2_trailDetectionNode.py b/trail_detection_node/trail_detection_node/v2_trailDetectionNode.py index 309ab765..d3f96edf 100644 --- a/trail_detection_node/trail_detection_node/v2_trailDetectionNode.py +++ b/trail_detection_node/trail_detection_node/v2_trailDetectionNode.py @@ -1,5 +1,5 @@ import torch -from .model_loader import FCN8s +from .model_loader import FCN8s, PSPNet from PIL import Image as ImagePIL from torchvision import transforms import cv2 @@ -98,9 +98,12 @@ def estimate_depth(x, y, np_2d_array): return np.mean(closest_depths) def load_model(device): - model = FCN8s(nclass=6, backbone='vgg16', pretrained_base=True, pretrained=True) - model.load_state_dict(torch.load('src/trail_detection_node/trail_detection_node/model/fcn8s_vgg16_pascal_voc.pth')) + # model = FCN8s(nclass=6, backbone='vgg16', pretrained_base=True, pretrained=True) + model = PSPNet(nclass=2, backbone='resnet50', pretrained_base=True) + model_location = 'psp2/psp_resnet50_pascal_voc_best_model.pth' + model.load_state_dict(torch.load(f'src/trail_detection_node/trail_detection_node/model/{model_location}',map_location=torch.device('cuda:0'))) model = model.to(device) + model.eval() print('Finished loading model!') return model @@ -116,8 +119,8 @@ def find_route(model, device, cv_image): output = model(image) # pred is prediction generated by the model, route is the variable for the center line pred = torch.argmax(output[0], 1).squeeze(0).cpu().data.numpy() - pred[pred != 1] = 0 # only see the trail - pred[pred == 1] = 255 + pred[pred == 0] = 255 # only see the trail + pred[pred == 1] = 0 pred = np.array(pred, dtype=np.uint8) route = np.zeros_like(pred) # calculate the center line by taking the average @@ -181,13 +184,31 @@ def trail_callback(self, camera_msg, lidar_msg): print("No centerline found!") return + #filter points that have no lidar points near it + filtered_route_indices = [] + for index in route_indices: + point = [] + u = index[1] + v = image_height - index[0] + point.append(u) + point.append(v) + point.append(estimate_depth(u, v, points2d)) + if point[2] == -1: + continue + else: + filtered_route_indices.append(point) + + if not filtered_route_indices: + print("No usable centerline found!") + return + # find the corresponding lidar points using the center line pixels filtered_3dPoints = [] - for index in route_indices: + for index in filtered_route_indices: point = [] point.append(index[0]) point.append(index[1]) - point.append(estimate_depth(index[0], index[1], points2d)) + point.append(index[2]) point_3d = convert_to_lidar_frame(point) filtered_3dPoints.append(point_3d) @@ -200,19 +221,21 @@ def trail_callback(self, camera_msg, lidar_msg): trail_location_msg = PoseStamped() trail_location_msg.header.stamp = lidar_msg.header.stamp trail_location_msg.header.frame_id = "velodyne" - #position + # position trail_location_msg.pose.position.x = filtered_3dPoints[smallest_index][0] trail_location_msg.pose.position.y = filtered_3dPoints[smallest_index][1] trail_location_msg.pose.position.z = filtered_3dPoints[smallest_index][2] - #orientation + # orientation yaw = math.atan2(filtered_3dPoints[smallest_index][1], filtered_3dPoints[smallest_index][0]) trail_location_msg.pose.orientation.x = 0.0 trail_location_msg.pose.orientation.y = 0.0 trail_location_msg.pose.orientation.z = math.sin(yaw/2) trail_location_msg.pose.orientation.w = math.cos(yaw / 2) - self.get_logger().info("location published!") self.trail_publisher.publish(trail_location_msg) + # logging + self.get_logger().info("location published!") + self.get_logger().info(f"Point: {filtered_3dPoints[smallest_index][0]}, {filtered_3dPoints[smallest_index][1]}, {filtered_3dPoints[smallest_index][2]}") diff --git a/trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py b/trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py index 5055d3f4..da2f0696 100644 --- a/trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py +++ b/trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py @@ -1,5 +1,5 @@ import torch -from .model_loader import FCN8s, FCN32s +from .model_loader import FCN8s, FCN32s, PSPNet from PIL import Image as ImagePIL from torchvision import transforms import cv2 @@ -99,14 +99,20 @@ def estimate_depth(x, y, np_2d_array): return np.mean(closest_depths) def load_model(device): - model = FCN32s(nclass=2, backbone='vgg16', pretrained_base=True, pretrained=True) + # model = FCN32s(nclass=2, backbone='vgg16', pretrained_base=True, pretrained=True) + model = PSPNet(nclass=2, backbone='resnet50', pretrained_base=True) + # model_location = '32s_batch8/fcn32s_vgg16_pascal_voc_best_model.pth' - model_location = '500epoch/fcn32s_vgg16_pascal_voc.pth' + # model_location = '32s_self_labelled/fcn32s_vgg16_pascal_voc_best_model.pth' + model_location = 'psp2/psp_resnet50_pascal_voc_best_model.pth' + if os.path.isfile(f'src/trail_detection_node/trail_detection_node/model/{model_location}'): model.load_state_dict(torch.load(f'src/trail_detection_node/trail_detection_node/model/{model_location}',map_location=torch.device('cuda:0'))) else: model.load_state_dict(torch.load(f'trail_detection_node/model/{model_location}',map_location=torch.device('cuda:0'))) model = model.to(device) + + model.eval() print('Finished loading model!') return model @@ -169,7 +175,7 @@ def equalize_hist_rgb(img): class trailDetector(Node): def __init__(self, model, device): super().__init__('trail_detector') - self.only_camera_mode = True + self.only_camera_mode = False self.bridge = CvBridge() # load model and device @@ -239,14 +245,20 @@ def trail_callback(self, camera_msg, lidar_msg): filtered_route_indices = [] for index in route_indices: point = [] - point.append(index[0]) - point.append(index[1]) - point.append(estimate_depth(index[0], index[1], points2d)) + u = index[1] + v = image_height - index[0] + point.append(u) + point.append(v) + point.append(estimate_depth(u, v, points2d)) if point[2] == -1: continue else: filtered_route_indices.append(point) + if not filtered_route_indices: + print("No usable centerline found!") + return + # find the corresponding lidar points using the center line pixels filtered_3dPoints = [] for index in filtered_route_indices: @@ -273,27 +285,35 @@ def trail_callback(self, camera_msg, lidar_msg): # visualize after-pocessed image visualize_cv_image = cv_image print(f"{visualize_cv_image.shape[0]}") - circle_x = route_indices[smallest_index][0] - circle_y = route_indices[smallest_index][1] + circle_y = route_indices[smallest_index][0] + circle_x = route_indices[smallest_index][1] - # # visualize the lidar points in image - # uv_x, uv_y, uv_z = points2d - # for index_lidar in range(points2d.shape[1]): - # # print(f"{int(uv_x[index_lidar])},{int(uv_y[index_lidar])}") - # cv2.circle(visualize_cv_image, (int(uv_x[index_lidar]), int(image_height - uv_y[index_lidar])), radius=5, color=(255, 0, 0), thickness=-1) + # visualize the lidar points in image + uv_x, uv_y, uv_z = points2d + for index_lidar in range(points2d.shape[1]): + # print(f"{int(uv_x[index_lidar])},{int(uv_y[index_lidar])}") + cv2.circle(visualize_cv_image, (int(uv_x[index_lidar]), int(image_height - uv_y[index_lidar])), radius=5, color=(255, 0, 0), thickness=-1) # visualize center line in image for index_circle in range(len(route_indices)): if index_circle == smallest_index: continue else: - red_circle_x = route_indices[index_circle][0] - red_circle_y = route_indices[index_circle][1] - cv2.circle(visualize_cv_image, (red_circle_x, red_circle_y), radius=5, color=(0, 0, 255), thickness=-1) - + green_circle_row = route_indices[index_circle][0] + green_circle_coloum = route_indices[index_circle][1] + cv2.circle(visualize_cv_image, (green_circle_coloum, green_circle_row), radius=5, color=(0, 125, 125), thickness=-1) + + # visualize filtered center line in image + for index_circle in range(len(filtered_route_indices)): + if index_circle == smallest_index: + continue + else: + red_circle_row = filtered_route_indices[index_circle][1] + red_circle_coloum = filtered_route_indices[index_circle][0] + cv2.circle(visualize_cv_image, (red_circle_coloum, image_height - red_circle_row), radius=5, color=(0, 0, 255), thickness=-1) + # visualize the chosen point in image - cv2.circle(visualize_cv_image, (circle_x, circle_y), radius=7, color=(0, 255, 0), thickness=-1) - cv2.circle(visualize_cv_image, (0, 0), radius=12, color=(0, 255, 0), thickness=-1) + cv2.circle(visualize_cv_image, (circle_x, circle_y), radius=7, color=(255, 0, 255), thickness=-1) cv2.imshow('circled image',visualize_cv_image) cv2.waitKey(25) @@ -313,6 +333,7 @@ def trail_callback(self, camera_msg, lidar_msg): trail_location_msg.pose.orientation.z = math.sin(yaw/2) trail_location_msg.pose.orientation.w = math.cos(yaw / 2) self.get_logger().info("location published!") + self.get_logger().info(f"Point: {filtered_3dPoints[smallest_index][0]}, {filtered_3dPoints[smallest_index][1]}, {filtered_3dPoints[smallest_index][2]}") self.trail_publisher.publish(trail_location_msg) def only_camera_callback(self, camera_msg): From e51e85e6d2db72a1363bbbe501326ca8e5725059 Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Wed, 6 Sep 2023 19:45:46 -0400 Subject: [PATCH 03/22] Create README.md --- trail_detection_node/README.md | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 trail_detection_node/README.md diff --git a/trail_detection_node/README.md b/trail_detection_node/README.md new file mode 100644 index 00000000..ceda4310 --- /dev/null +++ b/trail_detection_node/README.md @@ -0,0 +1,33 @@ +SETUP GUIDE +============= +1. Create a new folder named ```model``` in trail_detection_node/trail_detection_node +2. Download ```psp_resnet50_pascal_voc_best_model.pth``` in NCRN Google Drive (Trail detection model folder) and put the file in the ```model``` folder +3. Download ```resnet50-25c4b509.pth``` in NCRN Google Drive (Trail detection model folder) and put the file in folder ```~/.torch/models``` +4. Build the package + +RUN the node +============= +The package contains two nodes: +1. The node that takes the camera and lidar data and sends the Posestamp message +``` +ros2 run trail_detection_node trail_detection +``` +2. The node that takes the camera and lidar data and shows the visualization of the output +``` +ros2 run trail_detection_node visualizer +``` + +Possible issues +============= +1. **Model doesn't load successfully:** + Change the directory in the ```load model``` function in both ```trail_detection_node/trail_detection_node/v2_trailDetectionNode.py```(line 103 and 104) and ```trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py```(line 107 and 110) +2. **Subfolder package relative import error:** + Temporary fix: Move the script to the same folder and change the import code + +Scripts explanation +============= +1. ```v1_trailDetectionNode.py``` and ```v2_trailDetectionNode.py```: two versions of ros2 node that sends the Posestamp message +2. ```visualizer_v1_trailDetectionNode.py``` and ```visualizer_v2_trailDetectionNode.py```: two versions of ros2 visualization node +3. ```GANav_visualizer.py```: ros2 visualization node for GANav model +4. ```jpu.py```, ```model_loader.py```, ```model_store.py```, ```segbase.py```, ```vgg.py```, and ```base_models``` folder: model loaders for FCN32S. FCN8s, and PSPNet. Copied from: https://github.com/Tramac/awesome-semantic-segmentation-pytorch + From 9beb822852de9e0bbe84f5a887a9af261540be00 Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Thu, 7 Sep 2023 20:14:47 -0400 Subject: [PATCH 04/22] Delete trail_detection_node/trail_detection_node/__pycache__ directory --- .../__pycache__/GANav_visualizer.cpython-310.pyc | Bin 8527 -> 0 bytes .../__pycache__/GANav_visualizer.cpython-37.pyc | Bin 8540 -> 0 bytes .../__pycache__/__init__.cpython-310.pyc | Bin 173 -> 0 bytes .../__pycache__/__init__.cpython-37.pyc | Bin 167 -> 0 bytes .../__pycache__/jpu.cpython-310.pyc | Bin 2452 -> 0 bytes .../__pycache__/jpu.cpython-37.pyc | Bin 2441 -> 0 bytes .../__pycache__/model_loader.cpython-310.pyc | Bin 6824 -> 0 bytes .../__pycache__/model_loader.cpython-37.pyc | Bin 6861 -> 0 bytes .../__pycache__/model_loader.cpython-39.pyc | Bin 2460 -> 0 bytes .../__pycache__/model_store.cpython-310.pyc | Bin 1425 -> 0 bytes .../__pycache__/segbase.cpython-310.pyc | Bin 2194 -> 0 bytes .../__pycache__/segbase.cpython-37.pyc | Bin 2155 -> 0 bytes .../v1_trailDetectionNode.cpython-310.pyc | Bin 7419 -> 0 bytes .../v2_trailDetectionNode.cpython-310.pyc | Bin 7265 -> 0 bytes .../__pycache__/vgg.cpython-310.pyc | Bin 5456 -> 0 bytes .../__pycache__/vgg.cpython-37.pyc | Bin 5925 -> 0 bytes .../__pycache__/vgg.cpython-39.pyc | Bin 5926 -> 0 bytes ...ualizer_v1_trailDetectionNode.cpython-310.pyc | Bin 9034 -> 0 bytes ...ualizer_v2_trailDetectionNode.cpython-310.pyc | Bin 8830 -> 0 bytes ...sualizer_v2_trailDetectionNode.cpython-37.pyc | Bin 8356 -> 0 bytes 20 files changed, 0 insertions(+), 0 deletions(-) delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/GANav_visualizer.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/GANav_visualizer.cpython-37.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/__init__.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/__init__.cpython-37.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/jpu.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/jpu.cpython-37.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/model_loader.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/model_loader.cpython-37.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/model_loader.cpython-39.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/model_store.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/segbase.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/segbase.cpython-37.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/v1_trailDetectionNode.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/v2_trailDetectionNode.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/vgg.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/vgg.cpython-37.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/vgg.cpython-39.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/visualizer_v1_trailDetectionNode.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/visualizer_v2_trailDetectionNode.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/__pycache__/visualizer_v2_trailDetectionNode.cpython-37.pyc diff --git a/trail_detection_node/trail_detection_node/__pycache__/GANav_visualizer.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/GANav_visualizer.cpython-310.pyc deleted file mode 100644 index 23211bce82b256058757eb70014e788a5350c843..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8527 zcmbtZYm6k{Xn};x%&8Br5gvt?8+r z>aN~fH8VR+wPP~7*cPlI5MCijJ%#{FcnBpDQeT zeCJko&-B>&MYrnKz2`pa);+K9oC*sCN5Su3-gtxk_jyHmgBk~a78;M@iY}{)!W5yPR(*w1oRkfC0)m5S!fzdLnmaLmWre#-cSyzK>H7n;iRY%@))g10tkZ%>L1vw@Y z6k8M330b#;64o(Wlhw(={HfM-b!JdMQk|7+%vI-b&j$0Yh3bN;WEF3=f3%@6=M@!Y zPL_F;`mS0%Hs~uzYEhOGgT6&?v3b0@I2ciSMaB5L*kttto2r)CG@H4uR8O)aY!>%Z zY>v(2em7fSM{&Q09b=2QpJvC|UAVs|t308w6Rdn)VP&tdqgU@`_pp=f)OEFbpI2db zuWS6by!&<(yo-88UX{~(bk@szI9sKniD|#R>T$1K_uR;9w7hm4a#8f#e!SnVsKUI| zsx>@RoS4_z(Q3$B5n9G_$cUP|97f)ixYp`;tfFxfYZJ|H#}|TdgPp?=`@-hq+-F!s zZ!G=m#w(Y8bLUY`n`nIC!>5nly7$JT6+LZ#z5m)*{`%K<9&J4G+O;?T?2FG2J7h1{ zXy1C{vf14Iw=X|;yYc#$mui3Y*^9Rui^b=@^yhcpd<308^qv3t+~@DSdAsrNa^?O4{f3;7jQ+JC}O3lGBwti zc2n!AiNf>+B~h7vUWs+Ij0I(0wilEcx*O*eP|bAAfM#N>(lq*JZ0)K&ZBWmkt|wZe zmK3b3Cu(fZE3yvOR;=#hP8q*?*xvMbTt3Ij^;*m0HBds%{B|w&%J-L<*NNB4;7eG3 zkd*_U)p&W8W1yT|QTKGb!Y%Nd#oHZkj}BX&xIFtxR%q>xFuhhM-WKL+5Y}Rm_enC` zHP3IX#li~kq@W>mzr88u(jB-Xk*URg*ml>&1P|l1-m1mizb2;SkRYXPdUXh4#Sn#! zO?Q9yBG(~-a_b~f;shSx@yK((-!tZh0}egwmckeDR&Rg&%v#v;&UDvmAq(4$GdzsW zxm(ehh}X|xH-6wUFUC%2$8AXYyAz*yY`M1SZu-$iE%3V@U+Qd&*#QsSICSL+yYhtZ z!Be9t6h$Mi&Z{$OUdyYtnpX{#-v#oRSdk4REs|pO3Mz@xg#Ipo^|9L5AWACJR<&KN zY4kMo89gO76Np2i%_*o`RL5QIXA*@3Cre_6`Kdr2Qxcu25HlNXZJ!F!)R`tllEhXg zv89q3%(#K);rRyIvXvN2T{k7oOiYNN*Gkg|zeuNs1lusP}y2t6fD;8fMx)pfs zM!Y6+2ZW8!Q;!WnlbRuBSN$M{J!I~f>`9SHi{mt5?f{Di0w}ltAUdO?C>&!-wIqKX zP19!ddGNWU%@D8eMR%RlZ1~HfLcAtg^f?rX#=u>sr6-=iU@(1}sKx=Rno{SKmta16 zdaU%-7s0Dt%uDnel2gojNqs?iL0#2YhS}Hc#JH&Js68{*5_1Pe3f|#EEZegZ>k14` zUr$U(N(Rr-lZ@o64XTmYyJTRT?q$goFN&e@Feb#h)SRqrmskBZY)qppRWvF?M40BW4qw)gqFEO{%9%pRSB|96=~x@>tU<&@UwX6TeOWurF&|40I9?*oe#o#Er=dQWjK8A4t^CgmC}5TH##*Q z`(#Ok4jB}tJOW{~yjmMFtzHxAw#chw z?7-4QaoCC6t&p#S8=^Fl=HVj6;i{5zQTn>~*Dy!eFPm5Db`uj&`Ee_Akv0r zsE(G`9F0GWt}(MkY#N!ZQ)n=?2T>*;mZ&6H%($sTSXBlAR3Wb9xt6<+Exo&SB|F@P zF1BEQgXj$3XfU_I!;Q{EXBxFO46md-1f$!1{QtW|yw`npsFVaf^VIg{+B=j_mO z>1)+Nj0h0ZxfbCN42@qzbIe5KKp36t&!K_yghANR=q&qC_+7P+TGN068=#odIKvb$ zbytVNt9?r$eFzKcyAi}9o8DcZ z6r%$xE@#D_$nbE>ZEv)Mj*7@{t@*L%N*fS~ObtK+5at~`BAe#osj(N}kr8`KFYjuKJD#MYG6DAhuWgvTRzs*?uTD^a=4nihUHw|Sa}}%j`AonT;0 zDOK$SWfii;wCi?Nf(V2x1~Zw3yD43gQngr?IcR5Cj^%N;Ux2NzW+71pQII(7qJ&`~ zA5#c=9APB7N6({(l^q-~#GT9vjxh#~Vfrp0w`-scD9vzhWOchKt=V73&u2-vN-6AY-zEs^(eCuKRsfUewWreM4FvM3(JjTA+YKNbVY@K(h{9Y^s&fwx*X?YqKqX-8 zISH4TNLL?G#q?lYw6PM^x!<8@rlHux0HeTB1uLX|(*wfTP{JXim?GBE$f>kFHZXmU z6&cMPP{-WL#wub-Hv$kA`E*`N5{1F@2F+5~4z`ELoeX3}7e?d-tNW`V2Fbd>`n4!> z-BeP^L-96>U%?eUgo2!rrqbW~u3^#Lp!)w>a90Ogwr1&;DlZ6ecXmv!mWBsiCl5dT zQJsYA0kH5wt9)I7r;m=_a z{%I5d#&TQNh(O!o&r|V}RD6bt9V(>P8o4eWgZ8IHzk#B=IKGE*;TbcCWDLn?(t$=e zA39Ej&(SfaWg6YcVyb%O`=)ZyGV zY{b~A27iwFqtmqOgIC2!g~M0g_kOio1lyxB_v)U%>9LcQJn!P5Qi*vUH8FEgXNKy@ ziFHD+twh3F^J)xs!-Rirb>KH^ZD8=aa2j6h24MpZP8dk`ghR75TmwK=3Z63%AZ$cr zQ$|v?2)OSdltrjT@EK-@+RP_ar?k;w`5?jhI?GBn))SDh%tLl&XzCV?~sV%_i!z`q&!hS2BPJ2;; ziik`~TRfBGvEM@CV0M9KUj3+i8wYRWB$UroZfGn|@fj;3DjZS@L(1Gft+-F?6|u^Z zq=;1}6028=CzBH19=rQBybbo3O(w7++P%p^9lPpH#k28TlHGyf!<(JxO()aI1mea& zx%yR%njXxU8PsXUkvQ9&PfE#=WCn5P1S_#gHg(g)dwg}!GnGuD2bs=cPfDkp=RsP3 zfz2f)(mL?vD4VCVh>z_ky;*28a;V8{bJ0D{7Ucb|#2M}%?Gw$idvZXbvrQ?d29)B6 za(8@B|MY=zqctD_bL^;`_a67;sCEkD?@i`d@fdhI=%qF8J4CAhSG*S%4`S0ga+se%(LFn^k+KDsodSq=i+YitgR@2? z9d0z%;J!=Di+swYz)FM^JMkxJae|Ya0QK85%VD{WgtNzyH1x`=gzrxt%K31zBDd1S zaN6}@noj}7Egi5QLD@zm%z79AZ)M{(zrNn~qG%5}1Z0U1%%GetqBgq8VvqLmcA@U` zdVugwh7o%*O=J&&Spy^FuRA@2aOEM|f`wj!6R)2WIe;@Zz;H6e;gkfDSsuQLnkWEm z!;n%okf6H}VuSnyjrjz1>ovw?_^^+CIK|yU@A@776pee73h9+8;FifkURN4#x`7x{}c_az!<%UqS~19^1dBZQ4orY8;w2r?8k z6((J~KF^%Di+e{Aa1>W$MYM8QcbP zAXbWc>Dh#S&oRDCBh-)br|Djhan~r>F*!89c*pOj5Tt^L&0##RqETd zt;0%n6WjSVuBeHELVV!U?-g|l8cX_&j;x?HWf>IqtHy+l{-Rk_kE%8jEsk~`F4Hhm zz*oxJm`ge1qv|}8G2~7y%ts)qQBb-l-Rq2WsU`I=FdJ9jF>FX^e&4mC0jD=|u5gta z(uGPBfmG8##zFriVprrCOk@;LHpy9SIs4PdxqDC(b?o#MAt@X&~t={~anQIekEJf0vpPXMc%mzemOIqo_Ek zdXQ#F;g^!ne-uqb1b(B<3G(y5pyDs7_!r$d2?U5mv3`t}xkVXn5=A1So3WKPv z?ghaxa+8}*Xo!sAR#E$9Tu~8)jVR8($F}faEQOr<-r@2_aRVt)jISC^;JaOkTady+ z!Ygg_5>wkigupl|us;M0;t2dt-#l&-dLSWHWzx**XGuVTd-T3T;aY6* z5V|lZCkRW|*+5#*fYFUAMQM0ff9;|acz!xFl zZaZ^&>W&y7^vjo?O7jW!CHm%vCcdT6A;7v)uED~n^cA!);;+k<UOr5q_I!G0G<>1gDw6Ww7Q8LUuQejz&|2F z7Uq7Cs31F+`eyn{lzuNdNwql|n3D*=-SQa_44s&Cv#A@GUiTu=q+Rf+Msg~TkpB(U z_SI6>WHQ}8pDNJ9V1g1ogTfvJ?>2Zr79Q3g7q#-LrWur@p#OIbb6B&?B9s~Gtl1Ox WJpNgGMw=+v%BP)cb^+eu&i?_6kEGrJ diff --git a/trail_detection_node/trail_detection_node/__pycache__/GANav_visualizer.cpython-37.pyc b/trail_detection_node/trail_detection_node/__pycache__/GANav_visualizer.cpython-37.pyc deleted file mode 100644 index 5e803d19b28ae02db23c1203a34cd91860783dc4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8540 zcmcIpTZ|;vS+09mS6}A3J2SguZ|{27cKb5BUR%a-R?gnAy~G(WUI#mcO{J$#O?CBD zSM{E%xwWbti&@Lb!&(VM0)dfwB6tW$@Q_IG5NQ!05sOHGP=Zt-A><%2NMMk{19`xF z|EcbtOAI_9x>e`$pHrvmod5Rye_gFqED68A`|v~d@0TR$L+Xrv1vD<>ieHx{iAhWj zBwJz%Qv*$>+dun+9bZ5q%9k!3z$Hf|R_8jVBFyC3Q7i7sjJ~+W>^^&KeEVGKJ z@t&UQsk^d$Qp~8b2{wt5DK^b!P|vbs?D%lTqPy6>&t4Ss=GZ)25cl2BPS~f|NxQ}t z*?o5;`!u_sokD$v)!1p&53n=r0n}&NS#}QfIrboX2=#+6NbF%&zaz1_TiH?VhuGun z5%%aE*?!oqv&Ytyugd&Y_u(B0`;u=-^`$*J@8vz5u~b*G89!Wgxf?cJCw9G#8zvFY zs($Dv`~A9{={GtJ&qc#Zcq5EgBi@P8)0QJf)Y8o;c5fw(PS<61h3i zQ*ZD7!?)hL=l$SYON~GK+Vy+hV)d=x`IC=7`i%FfAN|MIf9>Os?s@-I`?T?4wRg|^ z%YXgb7ytS9G5I^6|EcabW_q9T-rxJZbKj>qcdcLk&tH_U-Saj-{?m!S`Rm`k=Y8l*5K14SaWWhN&IQ|>B#IhB~Y zAf+->FG-1tp0*&(i~fQ%OSN`M0#(oF3~4&%N^NbRC&sSaSBC8Z+G?t#@}z`y)l^Q* zc}cX1S4QuuRl~0tg&QtUY8P3p+32{u0ZPP~A2t%V_Gpc{-K15+VMnzmSuOBcgV$C$ zCW@Qu@}7zRgxyScJKbbE(^rG2kz{3`*zL4j-)kkd5#ULI zm#KcZkx+En| zlf=Y6g$KAicAfv`8TQ<0!jWfvQ{wa3)sJ64--IE3haGVz5g%Z&|Z!;jY+f!}laQg=H$KIDOuM2EK&wKq!vP?256*G8w$AqwkPsHff&h5SygtGw$@iLruC&nPaz7a zGAE&JP#d*8D5R1Q8gX|niYKL1WikZJL|=i}2?0GxLa8!E2qOusN&-tI)0p-So`$Du z=!;&eF?mfFG(FWJe0p#BknoK~i#3u)E^Q=_`+^v)){>Sxl2CCxBjFSaaT2#&@G596 z*Nd4Rw;ElSSJAb##1CA449!e_mCw@L8KPT4GI8i@E}sISZtyvpWCU*LB`v#jK+O1Y zy2FHs37xQyulhj(^TwP*QYVBZFBWL2xdY4{ik^7=Cove4U`d)KPb&s-R`#)~LCNa>riecCIu;LGxw%(Ybh znsQsal`FI04?L8zbODx!ZJ}9aew;Mmt70dAQi=U$^H(`y(~@62fPxtuIr3ZiBsh@V4aG=1-5{OWoxo8<=gx(%NAWH~Ywl9%aX z`?l6`8zCfF-pb@{yA*bvi_8&2nY?CC4A0M@Q^jlBl?|8_HtwDozx-wkVmh=eaT0Z7 zXEWk!;DkLnmf_JN)zPYgYw_@)_!;btKR|^L>gwS=ewMaR>LKYDR1LDLDYKAb4Yj3| z75*qj#@r%Kjja`}%j7;JncP+?lUyWP?hkH-r*)U~+@_ zJYV;iy z2>BMTavE5Lwwmc(4lh*K_+#kV2F$^#@7Y$fBUXv|PTeCWx^B zw{)g*6_$dZ2W@N}O6ZKwl&k~J61HGRp>rHS*LURsT5S!wtAU~miwrBk(ZOg_syh$5tDSAya}vAF)i}9(<1dy!tT!jpq!NY7VL?U78azWm|8pV z7HNU$@GXVgS6`Db0^2XRD9rTU$9O@0j*78X*DX$#E!PNR1De=evyDbfpd!=Pae&Wc zx{zbrg4c{i_SYUn;VV*9GL+PUJnwxnAZ+~iPnYr z7I!7CUEtBC6Rvk`6%D(*+42+D5oRE^3k_fgpq8KD5t%dxPfgqaPs{DO40|q58y0v) zEz*Zc^4ZT~##jqXbjBRJiVoRk^1g4VeLy&2s{!2vza(%_K}*LR!OOlOEU}jAsj&*) z!X6iJzVb3KklNh12;bKY{KPK1w{w{BA{~C8Z_3r_@HVMS)4UtlFxMs2Kv+DB%rh)C>! z3YJ#ZR}lv~G4O6y&hN`fS!MXVVRteLgY6MOC!<(bGc9(5)&13Ac#94oeIt$?Cznw2 zNc?#ezltlqfI0ypQD0YXQpoc;gZ$xC<-u@nBY)Ov-A zTPTLR;I9%vc($>7<2OM2BV31fv3PhFhlOrzJjrm9Zx!tL7c_AE5?>-F`49|C5p*+6AXhKZy5-v3a?sKdXxw7gOM9p zBGnoD0+KaS5g{etg9=lo1)(s7O)5c5gi87y08A0^Xo-U0MiUMfJf#o_XRo&nKvIJh zkee|QrCmskw#iIZysI#4762zH4lIH`yEINdqKvDO8apcBQ#F~us*?nu-W$TF_?MC? zRw7_alIH}UOX!u;@?biJyITMhfFH_xmHl2alaHbV6(O3Cs$@1T128=okd8BOef_z@9MHdcVSQY=yW=T(HZ1Ehod>2GNA`)g9SF1 zPLi&HPbb(solJ6aN9rGk4kJ^V9&ax?_pt?0-=A8ey`X=pU2{$kDRj~~<;;*$9aA1i z&JNBUm^WSn5;4b4i2ELNPLEq>F#n-+j#W>B!^2Tpdz_2s8Ew zjo9W0#Chn6UvFjv_i7_#23x%Z*WbKomjLA05buc)h(CmzL;{Ha3|d(Qh#V%B5`=*F zwie+9`AM4dX&P1=jEV4KA1CoA#X!&Ra>6V8aVmskreIv;4-sE+uj3=#0Q(FhPi8kW z^4(;J0g&(5%nF+gKl!Y?EwCl0JO_Urg>8x?mg6&9>%v>uCQ6aPI3gsOhCEQq){)l> zvC)Y8;Ff^Bwi^4Ptu~`LlQ;PnXl0v<7ijsy@Q~{B!o~16Y2*zWDdtIC&kb!ON67SS zLdq8ze;P9~`E&eJR98e?HqM7kjdLAOvYXOt4MUv z3YaxLPTGP8c}!`W`Dr;@pHnr zIN02=JcZa-WFF{4N#7i{@p+_Q#QKw1x0sr66>#CgZNrh=4&g@h!4$I)Rg-Jb2h*%H zHmv1k{zdEqW~PDaf3-0GlX2lISb;aGV6@g@Q^CnN)Mtc-bDt(rhTwn-lWs$>Hj-;( zn(XSemtT7M73YOlu6*I*)fZmjZJJGL&ey1*ME3z*52!2Rj1ILzDk2ofv=C?$$|Uqf zC<}iEG+T>(FXS-|uTw#vr}!onM@;?aLAwQWD0~Ql|5LLCe4>C92`NNPAZ-zn);n|9 z349V1ty>h-lFxFS%yKB@wH{Er>^PLa$VWtkCt@EFI*9m!lV)&wnXEDl3Y%^ai~>V? zX|VAoO%g%kRkVHum&jQFNuD(g{1?rL)5sw&sevGXBb{=BZzFBM!(9P+kP|}=E${Oh zlSAau0FPt@2#`t0FoFd7N^+OL21&Lo@^HqWNKy^Rqz){K2@_*b5_Ne{8OMm)a;8y= z5r(n54j-t&M#uH4u-}=E&s@zGPw`52*^iyZ2EcD)1pxaokVHxl#=IGHw`~A+KM~sk zSGzE@WjND>N894tlx@bX^@K&6p>QDf)Nt@h5WEKS6>Y}ISN3U5vG5W-aOw~z>ErAX zsUTDDhwqVCh_6;KBJ(gUgySt3BU9#YV;$l(&2t>eS3SbtqP49~2VaPn8eKom^MG{x zwtDl%OCnKWn>Xm2A-eb?Lq`BpOi2s_vvLopL^1!8SV?Sui9W#b0uiP>7oY7CzD?#e zUP2*UnLQD^VI1*&YLRB2JV152+e^YTJ}`=L;8op1Q&!T%i_8s5n5rMXh~>6fF|( z%)FGV&U0Xa6u*eV9A*b_`rrU5C1oOs7iBmFs;h7(d2QgUlZIH!oG|C{FPgLF+~k*} JEvsV5{{{8MbW{KU diff --git a/trail_detection_node/trail_detection_node/__pycache__/__init__.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index a29346dca3da9405d20064e434c475fcbd687b4f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 173 zcmd1j<>g`kg5`EMQ$X}%5P=LBfgA@QE@lA|DGb33nv8xc8Hzx{2;x_Oenx(7s(w{Q zVtz_~Ub=o!ez8$}d9i+RQL=tXQDSCJd`fCbYH~?teqMZDeo88K!T9*hyv&mLc)fzk WTO2mI`6;D2sdgZnikW}}3j+XQCM-|@ diff --git a/trail_detection_node/trail_detection_node/__pycache__/__init__.cpython-37.pyc b/trail_detection_node/trail_detection_node/__pycache__/__init__.cpython-37.pyc deleted file mode 100644 index 108db98bb6506399a21ca2fadf9221c73d5f37da..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 167 zcmZ?b<>g`kg5`EMQ$X}%5CH>>K!yVl7qb9~6oz01O-8?!3`HPe1o10CKO;XkRlh1D zF+U|gFI~SVzt||gyjZ`uC|SRxC^0i9J|(pzHMt}+KQBHnKP45rV0?ULUS>&ryk0@& UEe@O9{FKt1R6CGOpMjVG0N*eyod5s; diff --git a/trail_detection_node/trail_detection_node/__pycache__/jpu.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/jpu.cpython-310.pyc deleted file mode 100644 index 96f976eaeff4e2dbe8bba73d606c4b100bf72eb2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2452 zcmai0UylFPtD^8@6|daN1IZWkAgD;>RzXGg z!8Y=0EAS6i*#QTgF9`U-w>pmT`l=WWwc*=otd$FAbg1%?1;*8*p;QV_H$fDod0Ix1 zaq`@FGrtW_z<=Xx=cX~lSnj4;b{d9jne|MN=EE$`r8Zv8n~$W*WftjlERCm2l?rM6 zVJrj$-2@`d;xa9Ad%g+tLJguUej?Q~Y8p|LWUU0V zM!R}rdz5Bkqf~L4MM9P`!QG;~5b}8C!{KOs_(aXatONMc73Vw+=^7ocy>?0K^9Upk zn9T>u7@+I$=!-xqG9eW`qr*Y(b6ZFHZVapMdI>ffbEI ze+FbxG`3hATPzM#C>D#Y-KApj6h=5DK$H0{xD#e7p4@~---pt9y)-5q65p)=GtQC< zi<9Br2QCzgvrkJl@q~9s4;hvN%;whqTdjj7>)@@{4QmaEQ#q@1KhTrLfp?52%_G*K z2;XSkI%3@v%{N-Nk65=v>y6ec79!eP(-$xlM&5$M4M@~goFGKppZ*7n)8{}Sp{@G8 z`Pvk(ZHn(rv8p;J6f%36vplou7v7VPM=~$dI8%6&hW{vUKQJ!B%9k^Cb;hpDV5l+2 zh|A`sxVaFoEX1oFcUDr(G>?*7Y4tG-q?r(YJF^$s&!{{Uo# za_2O~ zEQw3Cj}zCdyr$UGZ7XiB3J5}_yKQme0E$#9gHK+fVD;@~&%=u<#Gp{w*9{8s2C z>QL{+8XGt9LTkiT?}N?K1`CwLD!|mz__rY-tMudtou=`lNE8VG>ow4tz?5#S$&N!!4@%Mv+?d&c(>&-D=(YM3xZ!`f@`xb@GXW^W5?knpe=fe Kdd?|!`sDvy&pjjn diff --git a/trail_detection_node/trail_detection_node/__pycache__/jpu.cpython-37.pyc b/trail_detection_node/trail_detection_node/__pycache__/jpu.cpython-37.pyc deleted file mode 100644 index ba624d87e89303e64e1f29b1fb1b3dbf84126f4f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2441 zcmah~-EZ7P5a0FMKHpbT+K_%wAmssid?X@G5rPV#R!XJPMip%Y@{8qo_bzUI_I1}s z=*f9X5l{IaE-y$t@L%vZ@UX8u-M@gu$IRN!g|t!E^33e)cy~NI^PAh!R_t7`!nfN<3iJS;NZp!;)Bj6egh8m^x`8i3T3#Hh&x&O!#bZa z*lR|CY(~v$f8-b*TvGReK*bZGU6sq23*8un zoI|j+$Kxc-<197j>sFe{VUUE+h3rw?41y>Ll?sAi$zR{B?Pf!rR|lYom{M7URwW?Z~l`D8R+9X*$Ikd+7jNvs8WmA2^Q(y8lNt^Yr=&S(QW~drHHhfO~2M!7$_F1m#W;JR65exgs&8bo+LgN4r}P z<8^LgATOi3Tg5pNNfHEemmn+NgmsCYnT)`&DJ%q!x^&q!x(KGn1FIK@`WVQ(NKA29 zrZ_C9Ml23frzeWTl^EL?BOKMcU}mD&b02lWZmZN zGp)NXS$BBnOzQ;$3Ei#Pzc3_5-GIR+r002@AS67V{R4}$zkxs+8})^HZH8Ai!|!HT zN4-Uf%v@xdT_)*TkHoXFNb@*MB;KU9zZLhlwSzEnvt%13yHUbUv&Mu^^|81<7jMkP zjh<5$QaetAXg5p&Xq0yLVxI5HB|OW03e_}FbtWGdn(^-nSg0?65V~O5bQzL*fu7=L z=_|WQ2l~VuClJbUVCG`8&jt{}OF1R+p+J25m)5z5EVTWr7|p}S!c58z5NfPPuG z7|nP*lo+C3M}B~xUq{lzJ#5n+=%$97%!M3f&=rJs@=Qj%nnhtQKSbM)kRZ0mPk_|7 z??TfV=gN2bBFvS%2}{0>@*K)a3`4a05?*x<2%*>M61@V?0)3x))})PBd5o$OvqQdx z`viuA`f-OP>Y3)8RIBzPZthi%8PP7y6R@n}|!l)~3LN@hihn}}w HcK+OdIXN?e diff --git a/trail_detection_node/trail_detection_node/__pycache__/model_loader.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/model_loader.cpython-310.pyc deleted file mode 100644 index 40bfbde6c5e6e7fef1fc9362c1c3bcd7a5d67f7a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6824 zcmbtZ%X1t@8K0i_KD1h`mBewtOdiB+V#}5+C&39xY$r}gg+;NH2U}DoqwU$%%Dc0h zo>@P{h)Us5frE46030ZZ;^G5GDEL?A2nR|xisBTC3cs&sc6KGN3016Wdb+>vp6U6% z@Athln42pq`1Stucm9vBDav1IF#XdpcokRpM>JgFtf%x@ld-g~_L-_MrLQ%0P*kq< zjK0}4vstTYfu{HDexX@l${PwdxcRBVO`mNS?`X{uC>FOtu`@~;lmah;Qp_lG1%;P* z`BSA?!Q33LV6KwQ&4V(}tDsafN)?m^UIV4(7x0D!&eoOsu>+i4X&f-Lb(R=6I-TX` z&?&F`oy%V6e{aD3UW?XD|LDu<7oJ1oD^13grpg(8rLS%4ZT&N)&A85uPYX?a7q@Um z^D{+Pnik$^C&tw)jTb_pVfWRImPwDweirTqt`e^BFK8kqVzF{Z-Bb6$%~*>SSBvqk z>#BG-(qo;gciFZ9su@_3#kD;&h+>{YoHGj8wGctMUapN(s=CO$m0<5;ZhE%E~B+Z{;lw`^Y} z>8~pol_sNlxgj}mk0r&S@FU@M1D}7fI0~)@gPVb~>9wwJ4g&w8^P}B^1+e7cI5mn& z4df=!QIdjxtJNEY-5Y*gPxLL{<4GxO4TSFw2ZP@E>A`AZd!t)D@3t@Mny6s6n5U+S zrv6Z3grlJ^1W6&WUAG%_BiBtdY>~%I?6&VkBjJaFgq4)9je@A#_g@!cAQH1Z5PdI7 z)F4QT>;A_hACq1$G161W+iE(xfTu$b-o#PmArkY-Ah@x@Cx;RAwMof!LqGDONVx7j zE73#0*G|l!)$_tIvB%sIi?nur!a#Qu>YEe;oX3^lnwZybdZH78&jr_&+wnlB_# z0CWAWzFUY3vGs_uTa>Gcu|0EDDK5n}R+Z(da?EC~nv0FSLOiz*!w5JQ#SNUHB+pfm z`zjCESCRWFv@a?HgfLh4jK+b~c6+ni14=xxaZgQj;(Rja^}3zFg~|uM2&beimS<7MwQ51otiqIXm}b9F}j7jsMBF( z^9;S)0<@sO{OcGwVeGK*J;JeOgo<@b87J9WrYw1iG7O^1z6ND6B! z$60kz(jM$bR$%@3!v3F$3R9F408r)z z=-(#a*ipxRft#J&N7U`qD+uDDAZ`lsiDHf#X|u^VLd|PLxJr$b5t%{xdEz1Jk%<-$ zQ}YNlq&t({;u|z{sFA|Ubxl6%MbJZfdPN=Eu8Oo+PpLceDbr`0Dtc#bs%~KbfGZOt zlNl^BT^h`k4sN7I%ZLy=#4t~}88TuW=E}%ShbdmbT1y%-Q?50q)|zsyDb0l)>trs( zvv_&EAkO2KazZk0nT9LWklIbKE-qkLwKpi7a#%xT+Y>44x%mz1bC}uOH>a@mzA2v%7lS;}^ zt%=SNz%b5Ezzap%o(^Fd1c+3y+_($nCyNfcNf5Y;-X5U6Pj?6$V%7GweG2Pr3YhAJ z#sR(hKtm(rovX0>*3jd;8*~zVv+IQ$h+3^~Fzk6P|8Kwk75)DF3b-usA@yEzQ!@8a zddx{QiGIz0``yHPO$>%)dX3HApmn`&rq&(~!fkDNLE!gb-^V@iP1^l5?XE}E(C(>I`Z`e% zX@^O3eA-M;Dzcqw{=Q5J#jBW@saC|WAK=PNc#Ty{_iF%≥Rh;6-#IL?V&0jfezc zu}lZ<>aiNLHtQmE=gJ6Z>nZ_@wcoMNv2qV;w$Ju8NZ4qka>Zx8jVEo?^%MuWsKoa{ z2K5?lNg2YLu$=^wlw_orhImt-M2tWPxwgKxd~2Bk%pM4mn>qrp6C{*>3;NO)u0ua! zJ^iGw9YZAti47-}f&|G95*vfQA(?mAZVRv9Pyz_7T*fZ^=GbM!=AL*hcgVQ4#Rc+A7$r5^F&#&7hTM(B@{)DiU;N zL%fMoO1X&B)FT^rK&Bn>ki->fx3p4;1}LO)!@?!*4I?BXmu_^34J#b^#*Gdo0LyuA zCGVZjd#h3oiP3_TmeboxdOM%qRv~^0!=w&K1d8`*IKz2GpoioOD}dVgZ;{d8akg{( zh?r*M?~otc!?TY1v51kXBgm%k8vYEjnD{O&o24)uS>lmdJ)5O~+`ezt$Yu+eg=CNH zHKWotf^C>k@|I@DpxVJ*6$)rIQaPQ$VK>AQS}8nOMvZs-2o|J6oA|#?TJa$@r2WFB zhEwwqHDo%3hbA$ifoN?dTFZ;>^N)!^`5@#)H^_@_kQd!nkKj=H$xrY=>8z^kWcCM% zJaSYbV>f9Yag+axPn0&7UPG#CD`i$K3!>czaY7^gLelOr#K{|S)>b9d;fSO^q*}&w z4Z}nn4VyyAYvYhtH1MR{D+&ybf}BH1U<=i|R7B7tW6z9?Z4@r3TtdE(+whJmFXa~V z$tjCz?Ag2kFNutb7v=Gpw=7EFmK$BN%XKw%RUhCKT?MI68bK0>Q(G(ELU%(n z(8BD}sq3W_*L_+e{Uz}&(T3r0mN^z{w2s_R-AIx71QEy{3wmpc(HZdkIVLl&NLg?V zptL7mF=Om(R37q*gm7R)dc|E>JtV?F3nIUFF@f(7=n!;1@g6lZ{Npb`4@nOwmx6~J zzxyjaB(dmcv^iC>Me>kDd$5v`D;g^qXYd~dsbXnrqi3m&A-#zSFc%XOhiRbn5#yUP zzv8U7{J?kCJQ2dZIGDXT5Z8-E2R{&z*de72s#H^&Ies@u#E}loovv-BIkX%(hE4PzU@() zvWsN?#8{O=sY=64l`;V>J>fia<-N6Q&zyEv&MiML50cnpF`-&Z25$T#)gK$YaEQU! z*ugK7e^T2T%$?pv=uc4z3OR^zWH2blWB^eTGQ`u7$tg^v_+>mJJtNnUC9jZaoXV2* zsVo^W%2Rl6mjbJ!xp5Zz4=|tFGHKOh3P^S%O@zj22y0I->H-a?^4D@rG-hD z!D15`6O5j$u9@N`thHoy&5~=askNy0VXc*=$mY=$IVn@g)E!$s$*5(V@xi&WJ(YV< zXpt~CQCsJ6tqPejKPXxL0~Tk=at-!BO_K9une9|ECT3?rnVPig>IR~Z!&$Ro?Kb1+JdO{T4KxiW+@2L2=>@`xYQm6vh0frHY#}Iz96n78va+3 zR$4kVv_ii_{1fC%5)=P8L1j&rz0&H8OhGBpkfDfl74r2{PD#2anXuR|rIopt88(C` c(3F)L;#gZPvzl5vS9@HYxAmG;duXxxAJJ3B_W%F@ diff --git a/trail_detection_node/trail_detection_node/__pycache__/model_loader.cpython-37.pyc b/trail_detection_node/trail_detection_node/__pycache__/model_loader.cpython-37.pyc deleted file mode 100644 index 382efff7be9cea03567df6d79de8c371fa85d9b9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6861 zcmbtZOK%*<5uTpc&OS*|@;h-1ab(1oN~%nHM6tSk~SQ8i0SFBer|PD zeO1LP^?F6a)Bp3ob=g2qBk!33Wk$||GMiIoK{+AkK$#25SpS3+Yg+T<0Zy*190-(6ky^L9-GvuX zsjUUwD}EHbJCH%YO=HFn{bqWi3n&7uC8XBUrJ&yz7@KCt{9Nk@X-ey}atpsmmT*4% zbIsIRCG6*<*0rlEFGtG2>}xmLHZ6sxl)qJcqranwwOAzDU42jA2M-e?(L5u;J~wrB zHZ~Je>i5K^1*#o3;*vD>oJ8Ljcg4>{T;8lC+NLcvY5rV)OuMgPRJEYuWCgWWP%Wty z)Oy^Ib|Mn%F6}xKm-l96DG}1yr}hbMqd#xYC5HOz12ax0+TJNy2K{fcLN_GqT{CpD zde_+3Ndmx&#tL%*+^!5&5G%hI26E@rD7+aCZinu=-@ds%2!l)RPj?SafF%cyqUdQl zYG8?`P|?zIu-)#DqTa0_H8%oZrq!rDP=Plb4Ejrl8_TKVkGA{%oj^4oO08%#43x5H z3CHt#VK4T))W8TnQ|fdAKOU(dQglOU?fNK;dt1SqN)1$McLr+9k5fGi)5==#$tXal z-%qV@-%0JT z-S?v?b;hhv#D;Wc!YywdGMZLGoX6vzmfAOO`>Gp(sb$aO>7Ms{?Y}q9Zw$7A^E(^< zKn}w0c{PX@z1z|GNVU%s=lh-v;-DS(2B8;1=8q?Ckv4e!fiD9!KfI%6ahx*V$VH)v zhOmVrX7z@6nEx7L7JsH_=(ac|b{Z`C^J8we>Hg@M!Ao4eiZ}WX3doO^h<&lEqb5kM zcOiLudSakvcC}r|{hpDSXxXTlOfR8k@mMFeH%p0~l=hr`*rZIdNk64|&)h91<)rk8 zwp-y*mBcxAR5ht44o1~@R4oz5j;bftUOB1n!{R~XD(as&LzU0f;CYQNnAhNW4Vo9% zpl4)>td#>6XlK3Gha&juMpI8s;&@v3`@L@HL9W9dzYY45L-zHiHNL; z3jOIOUiv;MocygomMY>&(01yXJmqs?TA|3HzZF1{E1n0=jrvrtdEO@@zn}G}hiJd* z6+dootUzm^i6q2oiOBU_W7>^~Htb_Uv6?{=-0r_{TIjWJ}P$#K4MaAP(usYCe1%E|r zmRP>klSKFy6}*H?wWp{!jbePmg-G&dub?NQZ8Vs)v1x0tLED+h?&qjABP1`Z88IC; zK%3a1+@_IrGhoR~FfOxeRt)bUfdwlpm6e#VXjX1rY&FE#65BeP$J&R++C0`~i{d0^ zZc)^;*k7GN0b`<`qec@&v#iciotekndY+muP;rh5<`8pm9?dZa&r=sqWUE8|k6NH& zkqTzrgny(^-^Uw0j6wqpuriA|d`W@n%YXnGpC4;*S&_o1i&VUbA}x8I9JHZ|q_yfL zDhh2?FH^JFb$o-Yqa-fW9g(6s!o-$ybrz!I^^|`8SWnrB6YRvnsbeP&!0~_+XR(~M zP0XT)*hpNYH)^4nQZpgN3x|Eb9dv*9TYCQTJzO~} z5-SMvkXcHXqn<#Kn%9GO-cL(!sKJm7wYAyF^UVFn2!=MjuWL%r5PpiRr zQ;Zp3#zY)wOJYq#74j5tD-Z$_1xM_h82d;^)rl~Xe(Y7e5g7+40Xqk%Zk zP0*oOLJN_|>@^8ufH$I*G_f;V>@^lUXG-k;Uuh$r{)l#UEGorH#@WZn3xzzNxgzZer#iP85f^1pjk&ZFOOLfxOc`=CkH#xT9Hka_TKm+IWk{ z$ZHJNW6dda#sSi0I)h*cfsaL}PiMTletUvcrc~F01gV^+h zb{7^ihDDU{2cZu(*WnSOUgA0!MS8+@SWVQ+aRu6Dbo4~OPwnbdyEfIXPqiDYbM}fN zBw+4m7WGv$4@m9dJh586P5mU=sR2#MoQxBfr9X@jm|VWqrJuYg5rN$5QjD-r)E0}{ zQc+uGey3I&cd?Lt7PHS%_F2Xyv%J$yGk&Qz=`gvQ!V184kWLiijR+h`>uSXxdFHcX z=h3N4IjVpU;MpFQnZ~nNkSN0+Cr=!>2F_0~%*cDmV@J$5)obRxUg*ovkc2a$H?vQ*nj*>r`b<6aM8uP^J`)m3(a~A20Yr?2l-M|&(6Hfk z!*o!9yHX&cg;fef4AGd@XbBmCvD5ESM!}4&Jv+8Gk;S0A3E`l0fQ-7V<_7VN!XR3E zjw}Nm5qHT7e@oN1UEG_nqPoW%jY#LxUCW){Fm0rMQfWhVp1oVQ2y zlHiQ2CfNYiZ)gbv<1Q>6qGsqz29dfVaQaF(`72tBWdXPhq#SQPmcOryl*GzkjQ4QF zsj4E%@sfDhCzAq*Qf4{KA|^ABm@LK@8$Yy?+BoeBawrKnlOv70)(*nJUG-H2z;V%g zd!TMsDlQ%n5i24}j<|WQc(|EmUffGA%sCTi%F1YKJy7nT<3jg?E<&#$a={xn-UwXI zU(qhokCBv#=Xu&VbAy$lSfRUEY*hv4@|+ifIttWfk@9lG?4)jdXBfE8$)Mwp`thYR z$VZ{);)OG>xRd(XV#%DgaADyL;==Keg^P=4n%QY4c_N%=u)c|ePcfOIVdNQhe98g2 zoJ?{N*h_HW=(&m`xtBP{bm`+ToG*gvk6$TPCGM=<3ELZL5F!g9-K)qvMd#jHec!#} zXCZj8X?{qg2ynV}z+|z?<gy`0y_!A^o{|WX!Z~r{2IOr z@;1F8uw;)Q~wA351Ey+#Pb1I{zG!S%uKz5UZZu3yk4Zp?3PCd9_+H_oZ|vqV`8oy7si#?YrR@_5hV59nc+T@xwxakO(NN+F<4h1b&COL&*n1+>2~kp2@oi$Hn~ z0FedIc_59ZV^$dOVs>IUIU57Z}3JWy3R>cZ#ar*&AHTJ z4`@CNGe`I2xH;b$g`7f0E*x3Gaa|EtS_b~Lk|l25r)DV%y2L+4aFg2jKMS&ToJz}* zJuH9}cd)a+VaM&5*!&cl0y6 z3fvzkEDi^Rk6^01Kp0^(BYhfCI^nH8wFo7BJ93Q1%G|yed6axkn9ZCgggKlZ_$PK0 zfW~DWXuPSW#^^oL_Q$X<*=bYlKInF@-%e3n9OwWJbqxq75oIK@7-bf-xqaYtoTsEi znZw*CwFnMi{u2^;Pl-b!AGQy)yS=sZo{|<=Z||laZZ#b~JYAS|nCf>R1u1AnPOPCd zf{&`~io|vWXB}Gd?ZT-XW}VUl7qp&e6+W|vL1m5T3H_E9wS%xC2Ob2-`ObQcd{1Ci zeXa%X=Gw+w>oPLeHj5VXDq6WGc_; za@kJ`Z3&^nd;EUM;boF(cjAS4Y)+(Gusa~&3vksqp}nnKJh;K0J0lx-Ks}BXFOs5= zaoqN`qj=WQo=CGqDILtBBbRXP((?eudx_#Y6yQ&6zEyjN4-?r{ &u`!S1XXNkQ zjs3jOH;(p`oaLgsA#-&jeyBE-OgHc{vY7FLr$sLpvB(*JwXzRyvN+2V#^w6pv21}m z2x+wjgwPiCXh4^(7Cmo%ExHUphqf$_uKat{GO4gWi*|}RF&p?{xYHI)^%oET6shQl z9$Uas1o|;RacEUGa88#TLm~|A$^pp(&M|r)IM=KV3h%(LyviR2Bd8h^sAJMH=hQi_ zRkg~$K#oJRDy)K6uBun{Du7iDv#L?iSFUPS?yy!hM^KX>1w;8K_)s@~wTxZsf9zVu zu7!3*0}>Cbk-Ia-pumOqGRTia?zS!MVAORp$$DK81K0(Zs@;^Uuuz^uavq66ID*?C zOVe##?}>uTL7pWA2SD|Xxb~94fD2||G{K!)l!J`7m*gcd)%4Lz+iE{Ai$PhaD466~ zN16;WheSa+FxneY*eTT{2{)tWRBLi_bK%uZZNqMn2T2A``35>`$Q3->hol1NGIT_} z353uPf0hFipOC7DW#LU3EHLW57=A~9a8NCDzzuyU=Cw=z!d7*&KQp#+dyOhWE(K(4-&?DqN|;~ zUZQrPiu*k=$dZ(IfBXT*FCVsRvWX_>{3MfMZ!m(lulAW_SpdA>%j;hQ;n%z?K-rSMR!L;=*D6{trcaE*VC z&djRttIH2&9e(f@Ooe>{lGv0VNJ?v^+m= z3Qc((^b1yNC>m1?*e#e-IIYx82-~3!id?4q>p~d7MrUW@dZ!eoQbWsu#GCQ*E!ZTo q;ctv-VcPr3px=OVaw{fW&k3HcgAXQ2at9l%!uxJO+#1%(wf_K_V@JaP diff --git a/trail_detection_node/trail_detection_node/__pycache__/model_store.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/model_store.cpython-310.pyc deleted file mode 100644 index 4409cd4cf47bbd56aed593a3913a42d34600acbb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1425 zcmZuxzjNC*6b3*_qGU%-oHnVi3x^KTV2b)d+v??Vy=yxrm%FqbbnWpV0yODRgbF}* zDpMV5ckI-EARhBa^dG_6DO=}EeV}Y7%@sU6;sJQXd*Ang;?A88f>!&ZGHtLXYh5DdeS}@wCI77pQl4wf|HwUieB2$^Ek|PcnZY#c9sCf&tf(<|SwT-0o7r zTSx@;u<0&jUg|hqmWj?q`OI{WxsK~iahez0>{wrNRl>_oB-ln?H(M9fi@t`g9)QVF zg??)NNY;2wDpKJ;$ti5lXNnkjhOK^Kty33MWaJL9y2~k zj-pXY!$C9~91cg}gbp}A@ZEov%z>o)7!9Uxbict7wXG?KcnUGDht#opmVwsB|TFC6xLt$&Lv-ncqxlDFB8FX zfNw)}|J>3RTd3E;DqsAEKuhK=fseqckVdCiZfnf&38@exf8Y!J8`iM02hwHE?_>{w z;M`}c{pTOa%1?##&+C)?T=}KYK0O7}=&azu8!SCI;H@>Fgsp3lsf-Rx=LcFW`L|LE zX`1knIo0wW?0c^bX_+$}l?kcijH>KD_^JuU!*}74R(d zYq3sY02SJLaRnC)>LhET$%*sYt?(6I<9ghva7CE&5M7bAqnj3kwOg;WD(4EzFDkfv z+GFl(Wc>>a*V&@ec(%H~0b>9Bea`D1sWGUNUAe(RXRFVj*4Nf@F`$zTC%0$(zqugFQo;;H2JGsX=NJqmF_j_ql_zs(58K>f$L%Y zp|%CTM)M<}rPah9bkzm3i?{JE@xZ!x*Xd#T5GH$DrWMCbBynsyG5kREQt~*K7IFC* z7}El=*<FE;x{a@Sw?x+Ge~^=Ju<7 S+QuGa=aDXPn=ba8Zu4IxGJEL& diff --git a/trail_detection_node/trail_detection_node/__pycache__/segbase.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/segbase.cpython-310.pyc deleted file mode 100644 index c382683ae99ab55aac4a2c2309d76d554616133a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2194 zcma)7&2Jnv6t_K}yO~W>NWa8~MruVEXp(NyNGL@JQBfpRqC&!DIE*H1XEV)=XYqKG zc9~q-O1<$Ha7YlB{taCCC*a^Kr`$Mm;@La9*;GB?m7m}9duu=c-p7=kP8-2^`1?Qf zPYQ&2TZ?UT&6NB`5D}cw$3on z2rKLBN8d-53IwY-lN)!E!)~QIW2t+mdqLb-GhzUz>5I%gJ$>5j(C^cu+62N-jNy%h z;@F~{muL@D2Rv`ZHoU-pfntZYXd70S&d~r??`s)@xIwL{}XIkiZtVlhBV7l3G37vKshSKK`5UNSok4j zd+9Kjn@bb8mhOe;{O#$H%^m=_6ha&(;>Oxiv|@~uKRMySPS4eNHN%J1fjI}W`U8kW z64&TCIU%PAquLg86mv+KW3MJPN~{{RiCtUNIw0bvbm&~|)E>2eA+`G({#k2Xz65#) z^y|`lqy8G`UC`f`{u}k4Y*Fu|U0bL4Iev!0iy!1%A5s5T3?YROW_WsAK+!fNHr#8t zDA;Kj7sNpN3*H{OLHd&JaN;gCqe0hbreN zqjFz#;QUu8>L{lg4wx{xqx>YvcqWrXS?TbJa(ORLt4in$6)jCP&_biHg71bS#YLY03MmqO8`EqiS_`nB{a;W0WTplDVf#HsMgxZ{Dm} zUthN}cq%T#xr;EX>pT|1}t zHk$Bzz(;uWJ_H3IrBJ#dfIThQrb45(iK%jWB|q#cYnOMGEm%HOUXeZ-lx42mJblVU zw^`Skb-h{dnm|UbxCSp)_=zIDF3dHU>oD({xQ)qs+9J@nCyi6LG*mhf0uli7=RcP$ z;DC&7TtI#HpBIqVQP4?eke>$zFhLqeif=$wMP&35J52K-G;%;;(wmMlxgTbHFqBo8 zavDlVJSHpAw0N)aF(T$^;sZGM^~d!~uaxZtpMcIxoEAWR%S)`7<& zsG;}YKp^Qa6xka4M8s_{HFXqO=T0db;!0n4&8DmvVM-|`pyg#|kb#5!y#UzVb`zrwj9oSW<#ca$q8{@@kMtCex{ yJM8X``_g$!0{2pVw{qb_3;vt?X28*H#5kh-PscC(Ob6U}VA>v6rY*>yz`wCAPKRA&;mhb5wUYlEFpss3c^SzI1w?;Hmmi{+@J+=<9tMR9eRESb1I;+a(@GjYTPJxHC zvA(|dU1&8uDO4%+=8gDppql<{HMlW&VY#_smOZ!)A2RQxc-wrSZ#dKzFhL>;{xMD? zi}#|HO-HB|l-hV|)#x`-DHAIJe(0#bhf(?rvn zJZ2~Clu%OJ@(QOVy|s9cR)YX z-YfNMpm#z4RQs>gFY6BXPP(;qN*~i7DOmBFUgACmZe&3~LC)yxJ+O_yn>|h-fW4jV zU5PG*tm%%WXd!OA7UQV1$gqXDxiP&oDG$o(s9c0#EBN6!BaDy?QUe%V**HfT*ZX0= zanxiiq)C;=kK?S&bR0KUGI`jzWtt~SNlZxDL5A2$1iR^dJAs-g#FtW5vT>nKibP8b zRn~*?B=j3wiF_z8;1|od?awnpzK+WaXAZ@CPzQ~BaFocAl34UmQk^(9ugCGP7kmJq1>i2jqm-^pqY$=77x3$&hP{efym3 z7#Xq=&bu{(GnSD(%$H-^$bM~u?10>nhc!9rK=xX-T|1}teKMDaAae?N!=`UTJOE7& zMH&Fki{fl+aVPAWcs5R2m4}1K+A9Z*Ek!^UZd?Wr7&G z@(pmip${9D4uCd*ZUWtsi0h24;E5grAGcOrgB^7R7-2mC{@jnb^dg(Cya4<8FBi~b z2r$_U^q5`%3PB(_UBS{}(+|aAl24#l0|fIHbd>4+AS=fct%9WFfrf+=TFJJHd#!yT zGj7SZ;NF)V@!M;~El?_x2`}f#{KUdoG<0DsUOG4S7^6<*)f(FS3V{{=0kNH#Jxsof z@1t{}b!JdwLqPeYm#~T+6zeH8Mq2hZ1090EcX)nJ(PPlj*E&< za+G^<{QV@!Ta6dTyh`KPRE&HFK9%nwdmmWSZ)-Fac@9SwF%PMH6AxkNO=rQHR>*wc zX3H1-i@qK9WC$CLuPd4Eua#xP%F<+&=_1i(6L+vmy^ g9|il(YiEYcB$I*ur!x;f!(ba*Y}$kTyTn}UKT@J0bN~PV diff --git a/trail_detection_node/trail_detection_node/__pycache__/v1_trailDetectionNode.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/v1_trailDetectionNode.cpython-310.pyc deleted file mode 100644 index 8d94f2c6747331e53919f20480d27449e3c971e8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7419 zcmZ`-ZH!#kS-#(M=W}O2?A^83&e)FA4lV0=Q;JKS*vh6&Ov1*B4JZTUYIe@u**kmZ z-toD2cD=)06S{VtsJIXsfk3I4HS~uCqCZ;HA5`@ZDnUdlKvi2ffGQ{mEfO*){liCN zp7-3D{YpBcd+s^!`Ml?S&+|U-35&&?g5RUR^MIYND$4h%u=i)7a1l>*UR4yPFx6LT z3e%YGt8J~OsU$Oey=~M?nKyl_ovCGHUiIyoE&F6^S-dSj*Us1SvM%En+QnK?=54=( z{ziMGHZtf}ZjaW+2Kn*YgdF2g?GWBs|8RSYtK3v;r`^Zci4~2%>7L$H@D1t}`BYDB z)4ncjV;_~OD$GyLFPw`gbE#c#y2#{WUJs(Bkhe)}EQE}t`O9JCUWx1Nj?1bVH_)48 zUJ%dwVVBLJi9NsiH1`+=(VMgX)V+J@4>m7ynx^@YPn>BRzxnT#k6Yg__3k(S=0E@8x&Qd;jr*wl+Ry%E=k9p#Li5e-FP;82wYro0 z{JWo4=c#h-;h&HE{dfQPe)Fxf-~LYfzrJ$6dG4?N<;_b!_||<-F%`vA@M#TN_3%fZ zO3yr=XcbAUv{a_X8q@A*8)~93eOgIWrk_`09VKI0nUdvcWrE(uc?D84Z8MOWXsfi0 zz8PCv>V`JRXOPzuEm6w~#?=!wwx<-Chw`dc@9A6xzeX6Wx;(DTu}Y)fc6l9=kTWl+ z$8P0A73Oy0k9YJ6F|h9iMOy`eyNZ$K9sQmOC!@zM2(U&=IEF z?!@cDT=K(uED9c3hO_K?&E;5FK4$WpLid7IF`2Hw*)f@V?1h1|B1U)^r}=h0=H7KN zCY$)FY}IYVA+H*u*j;sYS1mdt+<{2JFbIw|HX0hw}>_abdtSsAEWixSXt~~Tc?m^f#Gdc+!<1F8@*nV$L9DX)!48%c|gSBCC`st?lNbe2};XJlNstTxktqVJCw_x zLuGUXNzNEkEves}rfC!U6!cuyCP>$hp}Ik*ZTQO~L%Jqe^ahedW6&YC@ZR9Du~G-_t~ac~M*s8gAr7*ThuE zyYAwc4`fcf%-v`?^w}OA=g#}6ZiMa5Q$L4E-=Ss9D7}-z4M;6!_Pif9>VEVTGQ;NE z5Amy-sg)M$yxFPq*dsU*I&4swatnmfcIyFbTD>mRb&(G`&Kz^nDiu&U*h7bQ7RwgJ z)jH5*r+9K_@oUR4*a6Z+aoCBRwUDns8=|~p&BH-T!%?N?qV#ir3}42jU6=NM+)tw- zq67B0Y3O9U6EJB*Gt`_`&~h4o5>@+Pi}r}1YaUPZQ6w=yhxiyl$Q=XjOYOr&_H_nR z+fr315C%p7rN$<+Hl+l{mRZ=QVT8HbY@Yq-Rk#EzX!!Lg682)fv9cHjt{CZXmjF1x zVO^|8uE^~QdJ7S6ygzKeFTl3}-Ts-SMsO~2(bI?mlMDiI<{hj#11Shc41YGSeWo+jb&kwTxfA9%npYWgHc+{U4=r0DMv^(K#nQZ zKu!)Ms}>=Qi2s(=vYLadFf{%=iu=HWfYHVycf10ELO?(}oMpE4u)?m;3JTcb?3B??Usf)vy2Fku(2zq4`_$ zpY{LkfB%kH`AhHB{>1oy*t8>C`tj6=9A97KgfANJEUvi#)_NBR(o_(4V=!wica0Zwu?5p8b zH;B+Iy8y0@Bj0mHwi`s(x~|)E`Li_HCuEl(bebI2WdUK@_3I)tXe0DSrz;Hdy21>) z?asPHB2Lapm|a-mIg!IqVI+MdY!tvHGC>%SP?$XI2CQ1-bo%)v>V&mBlI@8sK5dwl zZj=H7VWyQ<*o`~gSQt2nOpX(~BE!Qq2dfY|GNQ1y?BR$=_>DxS4k`x{`JO{S?!>`5 zVz-6)qYon?__0)Y)psY19FD`x!-`%YuZp~(*{A_Lr6ra7=+MBk??(rfe$ja(u2NH( zQqyiLOYnqDyJ^=XNFjVO7@!DmQ+iFMX0a^Gp`5u5)31>U-el zu!&lym>|e-N(c-TXM}<9Ic0K;&Ztbsm?Cjhl=cE1%xX|3uSFhdn*c^wDTxsytF9li z^#B?o${lSVM{RW+w5to&C2R-o$BP|DlpEY7tm||aVLfm_99|nRm`=7M72|_;QFk$F zaIZsCrcvU^fHwhUeZq7!m0^psuPH&BD5WfVr{!2$-q(;wl&}_Mpt;9#^NZc3C6_x9 z9iu|pFBOU6V7|0i)rAqc{?cw=1VdQ|K?l5!s{E!t)NPql`mG|K73vpK(B+Kk{q%O$@?sA%XLh%lZ`((6abvD7j6^tCYM(iS&&- z4wC;eWPd{P&m!r~>|euvL)k}72xfQUb%6plP~kW>Ej7Y5(3P~@q%!xYE{(RREOQ#u z;7GwUTYAfAnTeLZr^poEywY zR}B6#%|09_ptbHDRpBdCAC+kJd!MS2s@=Qz{0mC21cgQw?lxR+)nzBDc`o<+6>Ndf z>x+@FmfbqT={7-M+8sE8dH_$<;Lo9j)QHG6-PrNNCXiYfUa%AnQDk@|L3X9_WK#uT zMv)uX1LaEy8C-$116}kmTBvaJd%GkesJLXL7d4(hRz#MaTly^=!ZhcB!Pi$lx33 zE{a~eiE~aDUZA>?eeL>D{!7$E?|B{WAeX|5#CQrH<<(b8VZA_-2!4mepQBE;yi{>K zxTusIBb9OVfn7!@UB|_a3C0^>xgqzk1CrhdJ@OEBH^NA$Yy3B8o8CA!5v*G&6B2g%o@2aD&1mODQBu^MkbHC}+xt3bP5s6 zdYPCo_^_cGI;8}ZN8~+YPCE}6wE&-B%i0*)%}7|CkU&~i4?5HR=7@_5&_6mkpno~X zL5SM*DPM4$Yu&n^)<||F`GR!RsT-&DisFDvHC*2x@+o;ZK8;!#`|>t&VG9-h2_wW88JN5cX~XeSwppL&GvJGc^Fi2e#poApwQL&AQbBf;0UN zwYBMPeUrSq)z9Jv4YEY<=ZHi>k{^;OcvWFEy0|!h8S~-r8Hl^0Y9Tdf3duP6EZmDn zPJI>rsJ;k)NAZ)4+7S;meB2Jhe|oX}An>jZjI)3<9P@BpUNs9lTJFXyTnnlO{~Gm` z1CXM{=svWd+vWp?LC3%gvhZ(F868TvWybxo)Ul0=8~zO%1xln=j#E_^svk&0$%4FZ zfZ-7jNG~*W1bULF^Piz!d3l|&>P5ug2_5;1lzg4)a`MAw>86~^Tbt3QiyQWsuagBw zvq%QfK_nyFU8zmokM5B=o%LCHRMN}-IZ@bK;$NUSe}NLZ0Ont$?peg91KLk9fQ)kG zJ#cy>Tp<}4O__Z@JpfN( zx0PrC2^lQ?!BDW-6qw39QcW|6{L}x3hB?exW(h~vKx=!%p2FYOrptEjhFt*DdHDYa CJ*Tw* diff --git a/trail_detection_node/trail_detection_node/__pycache__/v2_trailDetectionNode.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/v2_trailDetectionNode.cpython-310.pyc deleted file mode 100644 index 0b071b712413254dc114f8b93e995c0381dd323d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7265 zcmaJ_Ym6LMR<3tfS3jobJ!6mUwiCx`62{IX3nA-`vvGElu-S=;jev%z=;>S2Q{(EY z_O0p}Pg5Pij*T}$1|nAcV0T520YL;>e(Xy80W4r82xJ!!%C5M87PKHVNGOZQA3z)P zom<`Wa01<`Tlb#(tb1PHIZ>(PDER&6d*5L{`>>*Xn;Hjy78=jsiC<6^g(*x8l)Az+ zrUzJHvkP-qwHMcJ1NO6_vJEbDeK zhWTc@Qm+i>jkhQ2lf(K{eOj(DQ=h>*AI!Gr>T|=s`TBfb@umkyS_*USs3;4vETYtR z)cVn3UrAESvK$-sEqDv-$Lb5i5tTbC#vfV{H3$)?#1-ly0so5TA7 zHqVaW{UAHa7VtiuSDshcF?Rfh!j5~zZN2^wJH<}0>J7Di#(S8ZysGg#-kEI$=T$Gu zr}n@u-SP4+?o_F1!u;gL))y$wZ2{M-EIB#cmDo`fB(JL?xOSefB5@5 zcP4umT3^`x-P7;TsN2pjz5f~YB6V)w`_r+%`{tMKw%&T|oo}@N<12StPyXdUf8oXV z{^qW);H!MaP?RQ}dh|n^Xv#%A@dk=SSy!2wXiU4U_0?2i`n-~=Oh2zAI$FlOGAG;f z$`rkg^9rbDHfBgOF;-bO24-UIsC{i%&!MiTTB=qQtgEMLV$UhE4kp&D-m^{>zh)F} zcs!{tv1+r?_ILx7h%-NIBwqEAD)TzYS`~bWs*kg3;Ijs=u5t{NlWXd(j!$?K{AS5k z$J?dLmNzc%zLpnS*b%1J?j&2nTn(Z|B8omqhP&qZt+hm00d@*nLifWBF`J#h-4mHc z;zyx-RgCc{$?EM!!u{)FLJkQs+J@ImB3?5@sk`CspI#I?Bv5XXBubpX1|E++_y5{s zb~NC=z1~##0>0|M-Lq>^+dJD^YeXyxTW5I`FS(oX*_Eyzu(LRiAGpj*a27gq2s!_7 z;>HqvpC3Fo8cQm%*x3>@Lsqy+rGN$I+SN~)|wVdo(Mi8|0AQ7Y3`wH-)Gr2sfq&ia}Z8qB4J{1zGGfhe+Nv}@QOC>XyaTEJte*P=d<;!tB!1xW zS*og6_zd2Bni`H2Q(XO3kJEkEEIvZ``1+R86FV$~KPBH6s^_Fr=UDa5Q z**EOecuv_?`(~o0<~D2<+`&^U-?vihGVIPkPfbWl4*Te7PV&_T)ky6fvNKNal4Ob( zCs2IY6k=UwRaUmDt9}Sm)2d2MjjK@A7z3-YHrF3TFOS+7j|mWf2Iq~fef};>-b!1u zOSj75!~ zKqfJZ=Yy!(2;!$v84cgPhhNRiq_o)Jtxkg{KA97tLk5K@uRs`WuMt9~)$2mt5{0ni zE-^RjQei5Gcj(g1;@G0J(SSACYo6LW{N@@2c4%qhB_Yyi{Qmk{Pp_A}VL8J}MP#vwPIU0W&U5CsTi7?ognXOBxBrrOJ z(U@^t@2e10FlqoNxvn!S)jqF$QA@Oe!QkSUeOud6Rj_mbLk2-qQL}b{RgxUbZ_6GM z!%SX;4TgnY@{w~_YPzt(X3&UZVXrirS68CY6Js6jk#!EC$t#W6>pjwmJ4U`gq!R=5%cDU8~qP;4|3w!p4IHK##842d5Y~u$0fNSp7&-y z>4$#2=E3bo4XGt^!pWM=Nvy>fkzGh<;z1&ti7?>>n`^=#y0DnH0saduiG<-n287w+ zaEAa*&(myEE+)$IwYO=;SW=dZv5wqBj$t6iWKYXrWL0(GgKRLBe;TAicBuq{jYs;R zDzFdP7_bXi1PnBGz%JMwb70}g?ZE#GY(QRB!dmT)x}&vdgptByY^#Gj0|Xv8DS$03 zP+?HqQGNm7J5|UQ^{pQPbT{zd)>S36A5-EdH^FlH{}_otVNgmfLR1X<4$5+^u^nZ- zG8o@c`Z;E%xirW0tMFZ9y-+u(o=mJyra6>T%mOMyZ7`h@p0b(V&!@TPlvf|C7#z4(&IlReE0h2Gp)5_WSVxflfz7p03Y zzx47e?z7Lovh>RH&vHT$B6krc4CYwaSE4Ik7-Lv|889B!r00ozH;k`!J+J5S7ihCj z$SGmuwm1nNj90sHUF3#igx>6Qg+UHom|?fw*^)+%KTfmEF2wRl;b5sKmhKlK44Wu& zA+!KQVe+UOvRa9MinfyCi?h2DNQyi@ZB&&Wnqm!MW}Q~lO*-8~7|?4b*GW8){VBtNLM+^?>P2$c zs2iG%9$3DtrScFHns^TV$nMcEeh`JH)K#X`wOh(6oGjCB*mVh^5TO~jAmViimPAR~ zk_&QrN~E zx0oU(aZ89J6t~PQskvi7jzECxP79a;Mp!3p9aKOWr=6GrnYNcj`JmN<_zc@41Gh_Y zyRb4GB*r$pAYxk~I6grdSF2Z z%GCATm~=}^VnkGV7m*9Rlk4t-3%-fty@5yK8S<%`N`LFS6|17!nx$K+q0d0(dNYT_ zWN~zBhvbDoEPb%F)b9bPklQ7fnW*a;@Z87&>&#@<4WLAc1C2xnx--Z|BX%$d8G`fH zdJeG#@FMJ`Me)IUo+Ko-wsqLgLQ=%664}t7CrfFkcKk8yCLJ`%OAFR8wP9Dkx9^}{ zNFC-NYA7aU)Cv>>_e*JEFqRgjEK{vCswI`Qn3muV;3**I1y+Q;F9D_c*FK)+F|t5( zs>hqyvrJz&+_RkKvFEAbp0ujmZM1TBiUTdIOs!-*E#NE0(lTa@$<=>K-bxv_QclZA z@hCSnRv&f(BmQAD+I~u-W zdN9W(<=4z_EB%>NTR*aXG@V&L;vQvF^1YCj({b#AmC-)7e%w7Vq+s76r8=bGW`>lL z$*I8u2gdEKv7_~8*|Z$>pnGDkR>kF8SrUW= zXyA8aVKyb`$R}%l^J<8j@0FT9Zw5fR(h%>;u-hIvgrO6_u9G9zQ5{)#cCwd~X~Un zsL6}~a80My$c0h{e)mZPV!sA55hid{h!2dokIR?Qg3x1}LT^T~P&fG& zogg>7B;mBY1PH%{^9jQbSEJe-{|t?AhB*ja!pP^zYL+)^A>_0&XDsaOy~H?S81B>T zl0-v$f#rS*s4PTLgY2ihcXzgU(tRBC-^Nq zaf*V19D{u9`(>Sio{C-}WCwg_z}Heq$vDD)6L@P#&mHV5tMr#6wDTHpA!gfhR8c*S zv5sm1GXg7W6PUk%wgb$FBp+s-S9=TppOV)sLPKPafQB5G0?}?j^`h%u!)0YX5;Ji6 zBvBedil!F?BUmBL%T4skfR7Wn{Y5-6#Q?SfFgq)0AFy&{=_GDgK_!uhVW988w-b68 z7=$3G%_~d|0cPRRRG82mPkFjwDMH#tTuw63L zM*4&QIjsVI(J0rgsmrxm7Bv+|AvjqCPIx|x?{x$=5})#yX=*`6_fP}m24sXL?F|1a z^*QqE7Afh>b3_<#c}P|#e2Zi}UPLhj$FYp$_Nfx@9z8&HJ6nrVRkOVLk|-W*aRQ(G zRVv1)7@!b&Xygzf6I|vLdC1UC=2f!{>q(-?oNFNi+nYXvE22EBG^CkXro@osS7wY! z80Ee3KcL#aJKXb$!ako#{nIK&#FtRmN(I_OIz`t`(U`1LiIl1Vge?ExHOx`XGAX42 WJ=W|odk+7CHea!w&@Mt>@BJ@GjkpH@ diff --git a/trail_detection_node/trail_detection_node/__pycache__/vgg.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/vgg.cpython-310.pyc deleted file mode 100644 index bc47269785bcb8b26e33c6f6351334c0c72468f8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5456 zcmc&&&2JmW72nxiE|(N#$+B$OjhnEJ8+(gLvP4@BU8A)ef7Gq5rm>QsY=iBRv!qrg zm-6gVvS|uPVWY`Kx%E=yAR0vh_t5?~J^FV#6f(FQHj-?+-#Mmff2YRICxgZ%ywO(xT;r^+m69)O=p~&cxY00mjcJWk zDaEv!#`VXnWO0L=JjE@Z<~GmpEFb#9E~T$)JjaJ0X?)n%m+gl}DZ?E;!bcymQWiZA zK4ySp3La8${4wK)zc5QfkUPRBAeRGnlpg~&4D4I{HDCvT9p~Q$HUjK*{syp7V0r!> zUNLzi}a+nYb38By@%q4_5A}HBnpH*znEyeLlbYXg$ zH`l^?)8kVs_v5ChEKM~=6sK0=r5Ep4n5>+id*@tn&f8zV zxyeHDd~vqmRrlAgILRyJa?vls66jfcVf(NslXGXz%ob(~`|L_fJ?WMC?Adevx&6&b zPMMsm%$+S4W(xc3N?yd;7SEoU@y<|J#})>{L!=0#2~buElYm9QCP2AV!Mp1r8G52A z0ADLHu9bA2`BbYiN}xxXl0ixINGq9?Ncg5c)l98qAtDG8MH}e4%WRw(ACA5WB9ltG*Yv zgda&g4CTRj?ybbZsz1M4d#~B7&vKdeR%iZA95h2WL@qy@ zi1bF*ivyT_0&ny>zyW5l95Y##*}!_x`nQ<@_3L&H^=GQLS``X_-a|M=;JbLEvjFf0 zeBEX1@bMc9wc2AJn33!7j>1tD~d4iupBce>m!i+JzCTkCo`rG^pP1Jixd&(81{U3pl_Fe6Y z_VL0xL-ev(Up5}<;v~N2a;n3YEe;E~v0+d|FQ>WIV&YBWZrZACcCkO*HMjd_(D(a& zuEkTt?YGm2_np#NNWZVRLuw|ITV2fa|MDuz(+jZK3tb91Q~33kqf3)2xXgvku&zQg zi0>y0OGeiojQ{ES-`bCU=;UM?f~`6tE*nHa7{y*#@dZ*ulh(VT= z$RPm7;jdAqf06?nLwiiM%zw5gQ~;7ndvZw^L+HEcq$!(2ui+z`v#y(%2R!GxcUxXP z;i#AwC(%JoEr++$tK!$|uDe4!(nnBOsX}oCNJQ1tMxNNCqiIFGwkr=Pim2qMNcs(s znzpWUwr+GNn$EAA9h2+W`6oJhtSwWSMWNl$YwSGa?pmPf9Ro6q3h#!wj#v{j9leu^ zwN21b7qw+oj5k=-pgv(X;r6ICov0mi-B6>YI{N2S3o{E_RFEaRwMD^>7#X_bi_ovT zlvC1*Vi9m(IEp7Ol4yFx;~YL;beK#B;Y!`B_>PrCN9LAyFDZ%QZODpQ0*4740&py8 zRH`+ZEmJJ0n2}c9yYGug4h5lGS@J?m9Z9w-Erbr@Y{#OjG#+YrcYHVLL)t{N_$HFr z#_tFl(@pj|8`r5uiR(1w@YW@K#07w@Px=_P15Nz+X%n`KpY)mu)^9o^;zz_%fW{Fw z`am_+IQ_x)^xo5S#$I@^XN}lbYR9mBnsa!6V-K95KjPk5`V8z*Y+kaaLne z;Y86+vR_^T=-|A;Q7gHAQSJFL>f&IsRp%{@Mu5FpO`=4Wb=6DMJ>9ZFlwy`pLV{`=lRoR zY|zey{EcdUQMCNG^I)|^80Ed~5g|L-6UK-)8eYv`@MAUBHSrou5jP2tg-?X!0?&E;sdZ72$^LTGP>6|)nJ;i(A_5-r;+|pbL0THL?>Tfjf6}p=2UKGD1i>TO$fz}iE0ZDw^lO1gRF6YiY z*SS|-hI1E4Wpz;*XdZDtBQfP%Y7I7Tmvg_-yjSR4vU}UPbgUZeT;l$gB$RWhHQ4%H z&MiLIxmRC?bAL*DX9)~+E^!}|gmNym2Aj9bxz9E4)YVhYD|9Z|z3p83^I@>{#Qif# zDCbg(&hQOv=C13QTU4h-5n$`DbeQ`UFo%hc)aROWOs3TTR?<%Xz6!(SfOrEWs%NdI)VPySj_DyZVCYLLoQ6|l;!%DQtjMoM@g z52X<_YU1ZaFmNf`u_LLyNgZ$Pi_i%duOGl9EuwwQ#>Hoh*}87&Z0FDV*H|w5mUYm= WeH_34WbN$m&YwL@7hK%V|NB2(_ce6@ diff --git a/trail_detection_node/trail_detection_node/__pycache__/vgg.cpython-37.pyc b/trail_detection_node/trail_detection_node/__pycache__/vgg.cpython-37.pyc deleted file mode 100644 index f9ab1214c86a1fa1160d2075f0d950f2c09ada13..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5925 zcmc&&-ESMm5x>1V-jPS@!?I*siQ}?PYx|n$leTQSMr%3#KoVO`W48tf6o(^cNu5L< z>Aj;QGZfIm22vDI(3idxeh~W*G%v}2(}#U((Uc)<~67 zn4^OMCZxau8`6*gr=ZCV?^vZVCY}lZ--=QRrvPB^h&+p=68wqGD%mF^*f&^{FXnu7>rd50kB@aZ^+lC!3-+ z)o22*M^mbUiEL?3B?ekahs>8+m5~cS&y);u zPyYkN*2P=5js}d`VaQ|sIt=#y>pmInJ7jQYccC;&-6{bhw-6{yl2Gb zW(ru3cnf(OQ<`E%Pe(2TW{=_9>1it8_`dwE-qU+Fwvp``qSe(lbJPQ9s9I{l)wh%t zNS8n>6B$N5-2s`3+AS`W?lSFpK^VlIC(~8#$8EtQsfVFFF%Nz#4p#X5O6{{|vpx&b z_E%~xWTo?vKWXzY4*a^b?gt_Fg)~3s_kSbp+oIWOwqxm3>V6aj)qo3mQuTnsL5F** zJg6zW=Y6C7_tUAxW`j?47X2nP!`hT+ zMzh{(G!==;R4n|U?g5W^B@UXQ7ozvRIujil{ag%U-={G}?;z3GAhTGGnap8n>FNU4B~7w5@gBYta*lHBYE!KG0xsf5q*gIh z-eDt{I$2VEySzdj(LEaW7n@g!KTe1N4 z2V=t^1TWd3Eoa2nRJxf~-)0Zj!6(jk-3-=U>z7)PqSF4ijriUx?Vz-yO0%jb6RaNg z^RoQo1l#rkj@b`A0-7m!z3uAKq*yI;VKb~NKnC&C#IQ8z+83kW-uqYQ`7hm^v{A5K zN1Qo96ogUihZQc+F*Ip?)C^Y;=OzJA4lVmZBM55@7d8EH0qfEQnER$=k+ITN&{pnwydLPF5Xu~9W=IW~+lm&4RKtzbvW9Pk}o zWUaRYl6P4i|MW&YfBVIF_LcR zHFgyxpA2H2-ZfB$QDogP*AZX>x_UPiYnzyl!B1OK(RPDX4XP84CZ#*MHJx(1=DP7@ z@G%BhgsA=%g}uzemVp{c_F#)pj=;z+a}n~oM~)<|C>8;5aY>blpOdKF@&Rz7AGu80 zLD;JM74BLIBr*^DwIuq94^URj5E&zK5{YX`qf)I&r%V7)K#^A6f67H9vq9)p7X1*r zjwD-=7UBiLwQFx>Xqj#J%iK%q+`uNc@nCL;p;P(@a_96>3`ycHmT%oeB8tTQo(P6} zjO#d545(~gvRjhfmh4X7)yWt%U!P)%sDrL1a)1sF`VF-(#pu-I+A~(gyssGAR1?>_ z`lec-VhhZ1t%sotonEI!rSk{GWPWC5LTQ{=`=k6N99lJ~wS`Y%H-CQqyzAugnHRO_ zy5i`oB{&7>kVF1b8CMbadj8{T{*h?&5A#^m7Gae4w_8Nni5+2V@#BVH;|n}iZQT{` z;xNP~L}+xbtt5YcE7R5?$$oL+nv9@ zncefNLA;n(aLgw$DCnqJIi3kY7ZUS0V72LbX=20z75$3HXGDID#5Kg{#6KiLW<6+b z+D-YG_L%$3Ys`IQ5HGwI;r*q`k%;_gnJVfct~ie2c&(Gxvc@hufnuH?{Pa z{pLOv;1*vC+&jks+y^*pb!%`mmZY=T-}YPb*ns<5YhJu_(S3`+B{TPdOBXUnV{U5c zyZz=)fJ?`u25vn)&o#GbLm{pq3I0iYXCk<3p-qU(?tbroT&C2upiC#XgJGx+2&J*) zvmFQZXmWR(nNk^QuOn^G^VLy{_B+ygppKsh_N>Xxl~RqC>Eymj-QJKlCKp*WJVEyi zG9~;FLTLnzn%Xl-15d(xHcsk(KwS-_Z=g1=5Xvb3qQy}z_Ma=|%q TmgU(6HfuTq2Yz@qb=dy_j`gzeBRBykksE>Y@Ev#qxkE4sr{KHDjlvY1hBuKLgK5aY z_mDdbGjImJkK8!S!dvhIXnOLt@XJ@6QUQQ{93-WP6zF#mO7tB+Es$1xb3hnuoC~Ah6 zr>CK@5!4zkOf~OE4N+d1YKZD|y#Zb=oK^!&mP(MHam$6NX0-C+?edf5OY`qsEX=zH zYd1fcFI+0j<=x7`+7%|DTq+g30?q{WEWGgi&?uAhGc$Afx%>f}lBp-%63kt==v_S6 zsN|H%`SSdQQhqjnu%_fiv~A(S%&a>@O>Il)2oI4Uk|aWFN*F{;A{G%^No7oX9;!o6 zIEBPxMGmZ}LF#i>;k0}nriwZ(p@*z!&@#ez;&W!OqKTkP3k@6X`~$~cF7I|koe`hK zL`sDbl88mT!yahcS{GMo2YE-2jO_&QF7YPvNlX@Kpm#OoQebpBzSCW%%JpI89j&W% zlh{Y5ql;#TZD(l&V5nYV$<}t1HAokqm57X?uJ(vbgsrA0l>X9k96#_Q$B|aWbEB5< zLa7CTJiG{QGxFEH#r5ikjYe$_WYS%)Hj$O-yWYK)7eu~Wljdzd@LVB{kG$I-%j7lD zXf|4rOqXkJ82T086Y_`}0W14$&)M+&>Pi&ahBQL2RuQA9oy-KSx}#d)=vk#tuig_b zQ%)>$oG0wxA55<_>fUsF#ce<%s7{MUIOlAH)1fF&N5b`M4tSAQj{HX81h|TyU5E=C z*Sk1`V;{#9o= z6HM|?#c6!Ut_aq1sh&3TZx$shuJ9d-5_3wEK|uv__Qne?*3~@3L#` zmyb3%qLxS6s{TL|@8jE8O?3FG2{;4Lw{$}CY7$tBi?2w!ZK=G`#a-#1)oYu=w(EVV z2?>(+^GU?{VC^*4wv;rZMl!(c;yB-|uMnPG!a2KvLvS;MS8LgtG$?Y*Y|sd53YLC! zKQ=5)y879~pKkuG{qV5qh&iuYdCQ09j`S8)*{jtYXFxAn3W5No!UO3Q5Pbv%@cuFb7(C|GmL}} zp0JfnT%oGTrVD_+UbeYR`a!eimOa~yQIWdiej0~CaUSc6IU+}i96@56QZHAkGF>7J zD6B}c=HB;2C^LTGlvmsUhYlrQmnK36@iky_wR|&mcg=I+Hl$4t5(dU23xCJ7apX>G z6BwDqO&W4!X8~X0GLoHN^*IdxO#I|&7UwQL*v-aVKWmSQ_erD(^&@WeiOQ;e`kn1- zy)SEzzi?;I>anlXK)3xe`{=>?9y&n3$Ni)91-LKqMai#9eogZ0dtxQQiO^1U!v!QA zCbEDr60|F7kB=}eo@5Vrh5HyAzv5qT#3d6~I@-3{?;{h8ldOw-0LD&@_UQJnadGEn zXD5~4bL!ZcJBI+N_|=whX&1|#Sv+H>bNDQZYIs?3lxqr)_PfA?+_@5N%J${ltx9fL zw7l~<6tzSU=G`+rNJ?C*(H#XeFmscrK3Yep|Wd)p%$5 z2^Uv!((%FeL*mG8`|U}mPZ!gGJx|55qf0DW$|;Q@nveLH7PWHhH1c>J>ZdKxHT70`qF?m z|6Jf*dlkT2!uiD4(4nTH^U`lI4VbD2JdzAI)k_E7x0>rU0*~z21D?9T549#qf4A3~ zJ>ZdKxHT70`qF?`crNg+zY5^}6z3Cv`WR}eG*)?x>6^eK$#7G>bl^SLTxYMJwO=Ff z$c{bW(MKaO)S4vyGo}G+_JBtcx|P-OfaW;1u|s>WC?MJSD;>~(ft=07$Le*%=0pA` zWkP+Elvey{G6>XVq*p!ka!K|*~t6BntEgm420r0&l`dyqV n3*SXz(N$KnOpWjPeQjpP-%cMk@lJ!k|Cm;KWZw@j!)g9MMQe(O diff --git a/trail_detection_node/trail_detection_node/__pycache__/visualizer_v1_trailDetectionNode.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/visualizer_v1_trailDetectionNode.cpython-310.pyc deleted file mode 100644 index cd4e32d62bc9b3ba483c8792aec0f75c18dbad32..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 9034 zcmaJ`3yfUXS-$VN^W52o_hGN&v12EmCSBX>w1n2gX*Nx2HQAWhCF;=XYIe@u**iNk zcYN;MUGL~l5_O#>1zZRX6*P%5>(YX0MG>t8l!8GNK}5u0km1;3X*_bU6Rdllt7RN48nP`MXZa9&jurZBax zloX~hy{4BfnHlya&wXn_1dQ0>Ds~4bgxJLnu`9nu;J1nHc~3EQ8so(DII3}*f{P- z*aVxz{Z=-`_Tzq(9bnVAA7cmEEx6y7R322=Ay&Aeu!5W2&`YohA6`)^dIbB}-St2bXgi^d=Q-v2!Jshh9fsQzo=-PU()L4hZ1sh$FC6;@^}3q+#2X(`=csY*=AR7x?YDmYM)l=W-}q+ZKfiRN zdiP&^_l0xc`|BH?g17P%Ls2TU>i!RP;wf{uf>n@Esi{m2HKtwFI%=daeM*T`rk_zl z9W`T0nUwV@WsL5|83kN3?$hI$=&RI>t{GaJYDepp6DaGE7O8m!^Xicr+LMYb;}DBh z`@U2Gzlz^nb$M8rWra$);qo##K4)ID9J+-&3e0VVO9h;ZUpUPQb&r*KVUeSw99&el zbiBfKoHq;CTkaNZw%l>K_r;{pnk`|vjaIlW%*DE24n@Wz$#9ljueuZptBxntt3vmh zt70Nvfin=9a_IR@XITt!Ka9(ba>%{QVnlYS$Glay68gMoi0sO$v%PweZjnGa6_O}% z2oG?1;5t9-858{ud!BV&;nR4l*J~%1{Dym?y;Sy@->jbCelY8-1t%6(ygEC9^>}rM zxgpjf$-4VB*l_AWsq`$u{u1??INky3;1PC*z#wW~q6RHiLzn_A82XlOG! zN@zw9kVuze^|wBFD;MIhI0_(`^Cu7YpH%+m~FqNqwP}6K$IS zp2HT*Ln6&!soZk;5URomy}HXMsHk4#$hcqGP&;O*Mdk*~7CghdSh8b9)&-cLt{$0? zlmwomM+teZHn>J)Z<5JLwa?2#af1*x0Oo~`E;cX=>xD(H39C~rNTm%5(AoeU3otyF z??fy28|aS#5P&l0<@N3JotI}T&CV9>g7=q+J-R?Y>{1s;bDX7k-_5EmLRY_ zOA~~CD{$6)zKqil`GGX|C&~3^l_wX(uX_jHj7z!B?S8rUqah&0I%yg@3GWz0+RzL& zrDe2~#_vJXF0(}% ztklavAnb*5WqHAGx?-rsT{7fNDE&e?a7AjH(LVZ)lPBGlUs*b_SZM+rIdUSWRSt0I z&Z=LTfroA174XVi`-`{qEDbn{`iZtxSz+aOodjCSG|DZ9!XK4I9n7d3~ z77QEL9IoIZ5EhxQ5K<9oAza!fP8}xA>{@irn{aJi8vt9CAX)=l)~eLQh~SJi)NYai zaCcJ?U^I+dx|`Wlel`OMP{_=6tj7>282E1>WN6x_l;G|)$OrvDhztF*kwv(T;kn(M zoNH)PspY%Fn@T6a%qS5hn7$0RNQMz*V@KKAQBLeA+dIlhDu*Mr(I^2r#!|qFD0TNm zgz{{rcal-!e&xfrc2eOu_!ChIvX2#E_30?hH>0$~m1&lqQYH|@98fwLtjms)Q%X1) zWj5f-qa-un`jeMF0zS+)q(3tcjGOw_r&~Xk`u5rC%d5Zt(*6^Ff42I`xu358^Y{OT zp!2~Jd-t~=`P$j)`qpiy?9ab)w)(YKjyvN(jXRa**?cu1Iiy&A}>!j)KUnEqPEt>30H=C|vb&ZFIPu1qky2CY^v!~v0i5Xn^J>a6G#fo&pHKZILsFf{sGJdSk|h0 zhz{IKF*<%9ZLEoa1OcE87u=`{l?xSgK^WX|FA1Y6=>ky;?YQsVjN$%l&Jw0`cy;0D z@vMNPF0Z9@+Kaqy===#V$oA~|ktd>Ga6gEvlvJjaw5OFtz#gVuu}jh;A$&HN$t>JW z36YeN#o#|sPp~x0;BG$+%TY=~o3bJ+4Reh5VM6| z*vSoUA}DkW)Di44+-oo_o47l0UNvMcWJ?mNt<+MzmNd#~n5oPLTAgf!LvwNn3>9Yx z?ox5`god19NkQC- z^?~9T&UOWeTi6buhZj1I$XB>a&ZM=n00jd0;3QmPD1O?2D@J?$f|Z4!!o3zbw>Via z)Wgj1p>^`Z=qjUEXIEFk#Ud9+P=lT$aeY_E_&pY6zF~**rWaNg7ZIoh2-id=9v8Dj zw)ebVHSag;>;2f0d|6Q!M&Q;Lw`W6?oOBS40y{WPEUAQ;_%k5ChAVg{2!SI_rN8w9 z!=k%E<^Qz+pLW)4&C)GZUJ&B;_%6Ac>2Gv}5NH2KWfHDAT(6N?2yyH+bpzs1)0R!1 zqZ&|EPvB5CNSkAk+Jv7Yp}4Ni$DzH&>2&!gh&&4-^p@X(J#DqzCc~?*H!IgQ{tL3D z5mfhRCjS{U{5h`I=-~4`OVmEN`!)8Q@eh3){)?D|{}KoSHMuNObGa=3DJnfr0)c)8eY{afv4gNT_2ghjDJFlV<3x}t? z{gm3y;n;%$cPp;9>axSd41W$A6-x}AiWu9eGkx{sbaX;5FCd$^t zn{e|Lk*c~O&}0?PPZ(Zv(eIn3{u~ILq~N7`0)&N#WPFfR%pv@Aky=6MPvIQQ4%L}Q zs_ufZS3XE^1g_vX2%?srv>HgikOwieN!3O|CZp!?AH$W=reAGSimIR5@%aoX#4dYJ zX#)~|DLr3-a#A7?IT;!3T-7_2BQdWa9+wffA)&1yohwe8VsM-R0v^QL7E1sQT7bQY z$g0`QX347>ON{|~hDm^03m}o&$q!_30jzH59Xrg0Lzp#B(fP+oijq+>I2R7HGzC9& zu2rRzijshp$2)11Gf}2H0w9^0g42gth-=yHS~wcFf`SX^ETt_RBY4@#Mk$QW(#T5> z%eMhU#r%|8SDw;XhVlh0hb%>(lkIaRwt2a2UMGiH_C-0&G89>zd^jBC@%C8V^LQJq zFBuJCM$|jQy)stS8390^h>{yHe0Z}%ozZAC8bTi94=;Tgy+(T@#(HHMu`f*4CZl|` zFB(H0WQgV2FdMmQ;yu3HYZ-}#(Soddza{2V{(UE}JH;lVJZT-yWj~vwy$BC%D4lU= zGxGV-cx~D_$fo4|mMGO@ zcu+cm{RrPtuE?TW3xao;eW7VA*S-+zfUZQ8J)pNH5^pk(%jaN`Mt4qpJFwQZoiGV#M|% z`RA#+$Fk3W_kH4@0BN7xt&y?@mmLF$x5~mGgA8Yl%#OcOU4r}0GdJ)kvj;2TQ{>DS zXmWy+{G%YRkInmqirYl`4{2?;ut?Fs;q5pm4uQyuV6*6>Kums|N-_x2g@vfZTM2|& zk?~G4T=FW*O*aU(C@In(CP&Z&_;xawXN%VHdbZ;6N*%$yj4QTe+S2X=vnqPXUwgC< z;R=1U1q(d`CtjHqX@E1Fr~Nq<7G3XKh`6Q^7ZkzF;`yD)sg9+Fqsl6)fk!aRoIHLzDRQjhs$FV_qLa zV41f@EQ8WEsxf4vJ!j_B{i=<`UrIXzmuZ+W;L9g%U|afzv0t4;a**7qh4BbPk^C7^ zr=@!xlP)!{?geJM)pr**Bvm1sB3)}61vtHdbA_u!h6o%4QZ*yeHuMhxSRrObt|+1$ zA~{PFH5-=4L|F@yVTy7>NR7vi66x;XWSC{h9L44jy>kf0@G%7j-RurAq``ZJ(VFL9 zzzX!$V4eChh#Q%T@ zeaG0LQ(vX(AA=MVv8a-iNNOTv(J~3 zTt>gn^Gt05h5=Zp0NelykO6>h`tWs?TsBFDxLy7l05=@}@c6%gKve^$yb91*UH~+u?2$}+!XQ@a_$Uad>xJ^g zaDXiss|?&x$o+Nsu~FE;(n`quwPtLDhkK3hsn;Ka3GmkfWLmapPB}42ni$@7mh|y) zk35jM^qQL_5F;3n!$(JaZj@?iBVUwJ`G3(oIBn{OV+v3$zAa9&W%_Af0`Xi&OPpWU zkq47OJSU%zZ)ucEhBc$uo)RFj?7x>v|3)2B^7Uuviz+A07Q(oXBA! z@&jU8I$USXV*m`aThgn=UQ;@`^Td;C{sfhtBtqNAze{9W5oL)4T-aw~)wu@;fFw7F z-HVlNoC6tD7+YLa%cz=WP)dOQKQPRG$ue_LUnsC<580FWXYC`}P~KLaNUhsh7~`A& E2S8v-)Bpeg diff --git a/trail_detection_node/trail_detection_node/__pycache__/visualizer_v2_trailDetectionNode.cpython-310.pyc b/trail_detection_node/trail_detection_node/__pycache__/visualizer_v2_trailDetectionNode.cpython-310.pyc deleted file mode 100644 index e52eba826c071582ef28a26f2cb32d392f97438d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8830 zcmaJ`dyE^$dEYme%g6C}r+Z7HBtJr1KFMbXQEkyD~{l2_}4#~36?<;wwm1C7sf5h+&72}VyV&w!I zsocRv*`aGnc(RlO1O#uBnxi-hJ$jRgM3FcXCU?8PrSiDc`k2G+5ZdohxNsm`}_voC$@6*8_8S zU$}H(!HdeOFrR8x>mDjj#H+1vDd5czEn^{I1kGFsLhn*kZMHpD);Q6UV16r_Zv-7S zhavX-+GE^j5=pksy$#pnLFfmqvRwNv(8&}`{HrN?j9XY$W|{ZDSa zeGd44>1Y4_g)iND`)2(orH8E_(^7L>#-KA1qFEZSdHunMb<&wvbFh`Q^KznwAMTxmF8HfR&9E` z3P`}2->ODl>E06a+R<_ebP7rjuu{WkRbE=+7$_%~)g2v2cop<#(MH?bq05#xF7Lja z7Fw$YbHS%R$pSwYgjkSkS7U0&~u}>*1-zj^ALXuphtSGB3hjXwNP1{rdyg{IFAP_?sSg z*XHP`e&g}MG?JFt_J$biQ^kz}SKhfR@BA+8BP^oG8(DQi9aXbhR<+fvYN)&f@Bv06 z83P9atn--tZ!G4}vB zN+wE*qcmZBABp=cDYySYbcTe|F^Z}s>Fa2kHmXm6&IN6hXkA8kjRbG-OQS-xCRq3d z6tTuYU8W^B9z%sNeT1OKKB$^N$Cb}Op>*{~>8YOvt+p{Q)}NP@V%BHW7nB#&C5@$+ zea()I$CWL$YergZZb5CqVm!psT`RUOK?U{n*aWAfu#O(5BwcMljo98MmE&w)kVNsq z2s!}jg-Dkun8l6KlHY>Xsh6bCh9yXCh=C<2o~tL(%Y!DyV*(hU%6WBTkG>0%wo>iv z&@K3Xk-AM5NG*}l9l;aMps0h3KjC!gyx{0htE(!9qI^laqFqYFT~ey732pW?bQ8P! zswT3`52I?U=7nx}MNG8)s~#l24|Ahs?uE-igYBcSbGCu*TF`7i@+=npPuj+evUz-P z0SU*RV*{vEn>v)H!CtwHgy-RAM#&95(mVf&gUjDmLPt_OS-)DVSTZXO`X53owg zg~@U6!^ybh>-@pPeH0zx7>d)Tp%e3tf~5`3P#rC+IT}BYt^;a|SQz9>)YdgrA}Ael z^O$j6@2X%_EmBvsp3baTds%r!+gACBSnU~+xnd1qk=aWz*gs}pN587H3cDlH$*kK) zpdX41%&Vf7-i8YlIn3F@+?ZH5lkk|ZLFASfeem>UI73#e)~JS|uotVf)y1IYiJ><4 zNTatP{fpJm+q}0Ow&zZva0e6Jb~OaG-L;_RF5<4-CUkyd7FKri!I0P9-)6k8b&(TK zZCbSst3GrZ9#pnjZM(!v#3!56zi86<6MoANmpz#Dpen^qVmnsOaFQUQ1)(t7Rj4}4 z55WRB8Vz(I0Fei#uSLRy&8;m9gW$qq-Wpg;Xi*>x4{RpPHiz+ri}p0(GbMVFm#^JD zCcHz|HQ^mct|7;G04AnRGmN5kTrFviYHK!lhhIeB0ri&yz{WFAlTO3iB2^ZFM`A64 zdD{jLK)sngi>SU0GuN}>gsalg*5FcWbsAyBFicyZK7&)xq z79+*OKO(2J0=LeINufuZ)vF@aA0zZyyCV$pEri+XG}{|e-*Pg+!t8*3&+shO9pDQ> z=?;M3p`%5r1qlOCm^|pTSUJZp(0Y>BaR68PxFU`74XV;(ps+}oNv9QbqIM^On88<& zcp^`vc(CqbD?&#_WY?E{h?}%Dp-5HXz`}KYkJOOmbg_@fyNXSOvnWW@S}GifcSj9J zb<~+#B|S?H1YAbPG9X7q(#2>)s#M;=lml9rq=^)Pl&Hs0A&DaW_;Kjqt|pxY7)+8H z^k^~3k11=AovwwN4rwxDYl+(6@=FL37Rmx;jQwL&6bL34Dvg@SJA{8Iqb z@ErJ&)uCT_5{0K!RHjt47nCKqI83`{SEK`r5ZYiSv+y=wfCi|*=_#i~PAb~OhlffW zpdxg1h2)15EPNeBq-^0L87>YnHoR^Ze)twti)+NVUe`pprnvCDd#MP*4i$nB&`rZT z*ZBP~NKineh6unzqcksr zNns_v?dDk0^|TBit1UL@A6oMo0o!PS%PCk3cgPyl@!cNQVD@NhV}$zmuu${zF?f{yeDhn>Z+#{D*# z&?HSUx_>=b?gm+VjF;zg_m3AtO)m^7xJ{g6U8fno57`=ay;1RiiqU*8e)Dtrws) z40R0U=GXzwpB<3sfCwA|YP$D%T1P+-$e)u83NH?U!oaiB6UmBo8DJQZjTCGqs{&ve?~1JcJnqiM=atW(YSk z6b~WAs64N+424=OivVu`aR!j_Jy>=R)*S|xVmu5i#n|eOL`poeGD7En8MlVBrQ;$n zqTL`9^SUeJq$7As%LE)d|<8i7_poIW( zh!t3l4PQ5LM?Z;1qlp+)k+Y9?r{bx&xT<|l<;BmbS3tFRs=xkpzfSAVBx}So@wCKO z9N?ol5+giziSdqo7#~LOTVO z9qLUooOfFTl@9f$*tn$E^p?^+8fz;vD~IEwD>Lq4HX+|f;-SHfqJ4CwBS8@G#NTkDqCq#SjZdu+FM9OLhfODsDL?&* zWmA&p2CeD6UB0ZanZ1?wwXi!xT~;!034>MF-xIPM z{9er1Tatf;nli@N4K(;^zy!hXN3nVOpc+f;!$7;fUv!SlB2+0N{-9G|hH@=1FZ3zn zM zulRLrL*(GfHN!fkIpO+CH+0X6aavaBn|_-wqb3t^@ZsBSuf_Ozf|O7sUcKMqWN6?< zS6lFqrI%?XN21hn#2=JH{U%a0>ms#Y^`l?)HWG)xTE;G6XiWsS&9<;*8qW0@JaZXn zpp==H+jt7e7mANfL=P<-HQ+wTCqr+chd%N&HN?>Bx^U`Vgow9}Z3)9~Aq04l_LAyf zVEGvT61APaLxJlUxCXMCPKA{Tu>f^QgEd(^{V&njXm>r=}o+Pcm0u378 z*X|FY&jU!u517@Zo76X|>$mHH-@)1b8c+B|6qH+#ZqB=TL<=^eg;A$y8K@a>iKwJh zJH-)2yfs8p6i4;@w7g1x+rkL#EOHfwiCBxq>qT`|D*&S*)AcjJJE)h`2_5kmoGwb^ z7qucV&!FufG8+Z9Gvs8={37jCqm<&{N0Pl8v~8jR=SW#WLW_bJBodGe0R$1niV zpUQ$B3GK6~yt;Y(AQj#(CO{~ND20p~q7=ua#9pUC^{nespqunar<)VwaavX6`i^xkoVB7u0b@(-pOoGI%Srq*M zHk5g82j_oyhsIQnq&$_l389Z|1rV# zBvRIh@r8Xhk)cP(g$YSF*vM>=R<-GmF9|ZEC0l@@jX)Ivto;AnFb6fu%tMwT&6+)A WPvAddm$adRt$fm{+BsL;B%|lH$u5Ie%ka9CS=kA@m zp1CtV_wG}7O6fWz6*9DVKFbfw zamV=_yN@4X_w%Fto|_6g!SCf2yif9D{5akZ@ca1vct6Nb@RN8~`2+kxyiYx+@G3ub zQ{kt)(vHp^;`96=e)^`$PJ0jYhp%g&Q$^D|y`$hf>J_DWW{>Eww1>M_s(NZZySVgt zlv;Q_x`6kkE0>nMxT>b+3$2>(q2k1%7Dg*=(TdPAmfD=4BbVBdcO|a1Iv%fT!oq9> z3&MD@*rNqo5u7^Z&5+l^1?(=V|}3kDNIC)dyaG+W(W@ zTiy&wNz=PMK2=lw73eeJ}z3Fnsc3;*>Q zb@8^p@!lVg{>^uO>$d;aBj0|f_3z)f?LYo!|M;aBe(+bf1H}ZZDu$xe=~n26x_lP# zj@D7c%BsrMSmW9)t*a&q*JqVP<@z}#*3mL%l^NNdRVL|eoKpZbGn_um1g^4b^vu}W zRl8chor}40ugcRs;~`;HtW(;|L&$$>Xh#x2K^cWFfgApK4(zHN93R-b&4t zX1f-rg@DNKHoU-Z#LQ}9rKX?iLAahyXFG6*d{K)*RQGy1D%x>YZ`EQEY^LK9rJ2Fj zy?WdhRU<8~t-JfXPe(e$HEx~QhTV$=JP~>B|F8y|9w6Mc)>jpA7^nJS<4mL7^3H5G zYHbcqI|Ghea5thek*J@+Zi1%Ey%;;89fx4$_Xn;A(ORt;YUgWYT%t%OlA|-$oKq5=t6(@AZ4GQF z8Fr4CROgyxPGVr47??_KaN~8XhSd$UWh*hbdfk*TGcm!8=JwJZ%s6BD zeS)CI0jQcmr>~;>8WpmO1l`B=&wr}aqF|{8_FB%ipF!?zG)}MMP*0rnz5Fc zJ5W^DH4*cC*GjA_P&z$5F~K1@tfMD6Nl_b6Be8c$$vE4WBssh&hSWo$5YaM~vb*1OSSp4&_sL_)dCNqxE9vE$c=8K zGo4`5gM{~CZrl)F)Mz*PK4pLCoNJ=H-fnfCdWBY|ZCJ|o@xkFUKAAt)Y}aee=qXeN z=zH(sS2Z)y!$(H7T1305BR@L-bCU{*=BsDDx7ewCQ8LjE+xIrI=@R1H5Z!li*?lZ$~R=;$id*sYM)2j+@k0H&Kb9ZOCck#x1?8f={(r zUDbLzw-W7(%9piWRXm%hJtH<(tpO}HdpQn+$L(9_SCz0b#K>o(?#4hrG#GeSMJ>Mz zCnk2dvxBh-aqdXQV$24eLofco*{fBZSz*0diy~$(*Xq}o+o8usJHjI+9zyJwYmv8o zsuOk2;Bg1z+)gb5z1{V8-Cf2txh-h<<~*$G_GAA)(;)|**|zFyy!OP|s>%wjTE``B zB2L+!dtW2eA2j`J5C&1hg9&ffq|nKZW7QEs@{?-qC^b5@xRF{x1SY`A2+@TI#2(nb z9;YU3Y`u{h1WzsQt%J!@EpDfV2S!WHj)1YP>f%wF&y>@%l6(#Km_A!!-z~3HEMyQRlIIB!6 zyfmkD3s~1q^0P`jn-q3nnv*=YVaW40{_>iF9;jt7^yrfBeCJm?A9lWb-hXTT_kZQ^ znZG;l|Kj4$HvjoQ|N6Xtl<_CO`_ebh`&)Y_AF+S)?eqRO-AuSC;!q5FS0x-@=QzV_4S3+uCK?7?PgoB;^Ip$zI55W z@Z9Bv%gNwW|BhE`51Xzh93p?P}$mOkibjnxqu2u52?Xdp=!IDbO{hKkaVC&OGr*kSwjTcwNTR` zHD+S1P#YW$&Qh1EhWIe%f{WE@1~DsmH!>ghdAdpyK?Nc;o9RKzPgRksD3_TbT<-=m zd|572X`vtQyN^FMn9XfqI#>A1zl60SBAlX?bqA+)^s+8Kj&_}ZgP-27M5j^l6sB^8 zX>TYiaALT26Z;_(gE&`e1-#8S;59I~HPu{Nl*%;o%%S!MQ_{`7}%0MO0jH@eGHcBiONf1bE~o_Lqld|9_#j0*7c$c2Wuf8^pCE4 z%{Jc(!P69aMSJ&PA|DTae}BF@4Gvj@51!gC+>#)6T~=;m`aw)amOzXEH?@{!e7`Ss ztXvl!*`m(cG9(lpj36tJj%G*MhyEC#Y@&}Gtu04&5p>8vW|@S^gX_UAH_4O(U!KoB zh@XzOyeOhjHFJt0=3L39TlS!oq>$49L6^nyS)hhCn|r3y5$#PN#NO4QkZKk zNky`<@O-ebCl>o_r+s1!_EI&WO}*yIfulQ6d260aIu-_(awT~eQ7t?X*UhAboG|eT z6u*rpnn$5P^exraE!Bbtrr~dE7R+f@HxM=2x(x+U24qUzp1OnI=Lh5&GCoB?q&pVz zMB`w8a?&JY!Zm~QKXBJ{vamN1Qpljeh;@VlhSVlzVo(?XR$e&U&LMEO!=gY8PV>J_v<)ucVhDs@kBhC;0||Goaou^ zWHOnQWft#SpHpxAL3kF}ll_s0`gIyP6(3riO2(2y$rOUZ5+6lwvV6-_6&i&zWxeCc z1bWAim+bduaLQ61g!QKRp=4}V!wntk&2Xuf;@KUgJDq5&hgav4>D9yT9G{l&BS~>^ z7POD9-s9fehY$@jNTmj_5c+1gS$$v%4a3-D66M>4<*x7C!VYxUV_TZR7OR3buU==_%S%dWUs|TC{lI% zDk95~mB4FqH-yTGSU$QQbWjhYp^YZTiVV#aJO{$pfrW-)fpYit9|>O#aU3J|mlG~E zrMEqFxdkPWQku9Qou0i@@H1^X&_(YTt0Jog#e?{_z2-Ndipt!J0?K-LVZBX}#rEV< zyHfW;=s4u|yvhnWP{;P&$Sic1zTz=#1Q#T8HM01S1TYKUT(B0UW?lM;`M43(uZOtW zJ@OF;BM4cJMV5Ft#UA+u+r@ey>P1`gYI}q&HKm*6Da5m*KC> z*kvAT5Q?MJZ6X^E@9#m>r8@;{D^G!ttc6^|Mw;8G1@T9{t<3US4eSjT#YBwO>ZG>J zBDn!)2KmpajZ&r%?qZ14fL&@JrGVrH4%ij}NF^i4RF48A9O?)K)eYwOUW~xa$Bt4X z2;tejjE%8e|E8+5;#JzT)3=m>d+WNdY$$l!>|)=VONgYGfEQLyoLr^jcix2)i?-3G5QbT+$$z zy-|*AVqcwFOQMDyAmCia>;1LLU4kxFXh2ztrf8skw?I;qBA!CQN?Gjr+~sFpSwH}q z>NU=#yo!KuC`&AbRw@J0q-G06H7D6#Y!G0`$KrW3DG2sMu|++1Dcx59y9gB(O`?LJ zr81dA1|MXSgwGY!?c;Zn=lz^iL?v*W$!fT6>bR6gT5D3h;JOrVWj)dh6vT6aR!xh2 zPqFSbn*(o=c86>INkWpI<7L#oh9{!f2Hz9zw>yGhcbFtHOVa&YMJ2}90m{kkN|ys) z3XDNpRJa<#A%uIP;;RCj4e$ZEe8@-N0&U^&A;8?BP}Az=i8J7`=sibf;H=(=d>48} zdP}ovETsmeyu^8|3xi`I%8#n0j4+e{5I>Cqf*ZNDbx34w8L52=)MefXTD9K9Hx;O` zAeM(>M$`#HeA9(;5bZ7bttPdj##+qV8)2q-$NHVmG@I9;5ZW6NLfp^LoN{8y1+cL@ zNJigo?jk#B+93Qi(OrJEff&v}I140}l-N?m&tM)RD&e^<1=90bN~1uL5a|}u@B;3( zB4oOSWRvO0I>=x~oTKK5%*#Q&QH(;xlV zF{%fc3W;j^7ATW2GAkp$!erlLC~}lvXxK;=YHb7@UdIH1DW{(qVag;+I%kqjIRHsy zd9|WT&3##vHDdVGK9@<)Qy`8Ky@C+vznUH&QMvQu=_{{{~wpojnf From 306fa0562158a9447c2ef924eea9b08e383c19d4 Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Thu, 7 Sep 2023 20:15:01 -0400 Subject: [PATCH 05/22] Delete trail_detection_node/trail_detection_node/base_models directory --- .../__pycache__/__init__.cpython-310.pyc | Bin 330 -> 0 bytes .../__pycache__/__init__.cpython-39.pyc | Bin 328 -> 0 bytes .../__pycache__/densenet.cpython-310.pyc | Bin 8119 -> 0 bytes .../__pycache__/densenet.cpython-39.pyc | Bin 8190 -> 0 bytes .../__pycache__/eespnet.cpython-310.pyc | Bin 6119 -> 0 bytes .../__pycache__/eespnet.cpython-39.pyc | Bin 6085 -> 0 bytes .../__pycache__/resnet.cpython-310.pyc | Bin 6189 -> 0 bytes .../__pycache__/resnet.cpython-39.pyc | Bin 6424 -> 0 bytes .../__pycache__/resnetv1b.cpython-310.pyc | Bin 7205 -> 0 bytes .../__pycache__/resnetv1b.cpython-37.pyc | Bin 8293 -> 0 bytes .../__pycache__/resnetv1b.cpython-39.pyc | Bin 7945 -> 0 bytes .../__pycache__/vgg.cpython-310.pyc | Bin 5493 -> 0 bytes .../__pycache__/vgg.cpython-39.pyc | Bin 5965 -> 0 bytes .../__pycache__/xception.cpython-310.pyc | Bin 9518 -> 0 bytes .../__pycache__/xception.cpython-39.pyc | Bin 10103 -> 0 bytes .../base_models/resnetv1b.py | 264 ------------------ 16 files changed, 264 deletions(-) delete mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/__init__.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/__init__.cpython-39.pyc delete mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/densenet.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/densenet.cpython-39.pyc delete mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/eespnet.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/eespnet.cpython-39.pyc delete mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/resnet.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/resnet.cpython-39.pyc delete mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/resnetv1b.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/resnetv1b.cpython-37.pyc delete mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/resnetv1b.cpython-39.pyc delete mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/vgg.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/vgg.cpython-39.pyc delete mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/xception.cpython-310.pyc delete mode 100644 trail_detection_node/trail_detection_node/base_models/__pycache__/xception.cpython-39.pyc delete mode 100644 trail_detection_node/trail_detection_node/base_models/resnetv1b.py diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/__init__.cpython-310.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 7415bb9207466ed09c1dd577d6510c09b33f3166..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 330 zcmYk1%}T^D5XX~NYn9yxsj!BHJ&O1QA}9!Z3E5<518q_=ao6cHx zCw_U?RH-)L5FEr*1wi8VeCB?=OXa?8w`Tx2PL29S!x*i1A1qXK<4=ALqVofY*G_rk zT`TS(pm(a5&PPBIgR;Fe5J69?LLC5UeuXe77p0d13ikJ gLXbI@(mETZyc&!|)a6wYDn$N!AzgYHM@&Et; diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/__init__.cpython-39.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/__init__.cpython-39.pyc deleted file mode 100644 index 5e121f62b7173ff67fdf64749444896d236734b9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 328 zcmYk2T}s3-5XYNVYZdk&71kj5DB=l3P!RSdWRsx{q)ExdUDq3U5y1=jkXN6)f=^D| z2s>f^zxl`nGIFsvtH_={-}M{uD<_9mQ*uj=S0uX9%vO(kvckHsDLg7XCe{yC-+VI~ z?O9^ycYT{W4GzIUOl<%pu1@#dT&`2I8HUvf0M5D5y(pNY@$RDvExqw4KU+Tf3Ha+M zz4mV4_YlxKX@&Cw_)?R@IhjzJd^-O#E z&$nvE@u&vE+H7DKFYNNqE-LZ5gJ~$}DAjoLtKlPJSsb9INTI3QGmkST>aXQ*)`P9F43$^jT|;?+ePi^Fy)*_+K^4I%_Bi9{BIh zO2K~AC)9p5v23UVKQ-&NI;akzJSpXre|V^TM9NQKGzZjzdQv@g*IYV?($lJp(joPX zI(FAsnpDrK<9HrcC)9Iz9#PM$7w~*SRn&`k9#t=?m+^d3O{r--pHeew7SE@HvYK19 z^xp*L)-!5eom8jPscmQJSa4LGzGE&u8$25v!}GW@FGJgxjmnuF+GDX|CWQ~Q3Ur_@ zG-^>LpSTx-b{H%MQBqJWwS#DGer~9;u%~i%w`G2IF4?adO+N}$b@<(fnUL`a=Gljt zkh$^NN)&a&S7&Ba=T^Jf@zr$qcGS_emFbRNo@sSd&Mk`b*XPfiIa8bUzy8}8>{M-TZuaEqGjm`6?YV`ysaMpg({ujmS-xFyv;!0AED0S( zXFzJW_gw75E1U%h47$&frA#bYr*3^})Jx!FX!nJOo zHT96V)oP>Nh^p1Z^OdS%^{q97#A>$_=Uc(0ZztLFd{ZTPr90iKhA&idi4z9RdQxhy zweWIxEvnW7KU&j4m=u6%wy(3(C19g z+?W_zRMUNp)hu-FUsNsEvn;GPP}p5qvAJz-TCs5hOKa89hhXeXEOqd*%DQK7T2XGx z#bUCSvADM_nAus>T)Y)4F}sP&dAA$wWifIkuMeRqwO_8@(YJi9QXQKYlN=4zS&MW9 zeUgj-OJdjjsAB7<$TB}pitXUmz-Ib5g*`{|Jjsh7PvZ__vsz6@Aw!Wt z$#}R-CpaDghigoj6P`@HhU%VcfE;}H%aH?Uk)@LRXmPIDsjZ4~Yn*le;-=5K7C_pd1a6+Z#&8Fr(gYSu^;KY7=Igm-~!{AoD)RG<@P+ko+EvBC#DU;CS^f8iW zK@zi(WTfMR#^$o8mcvnyQLAVcVQ}Y*gO>Aof628;fFPS2yOK1p(lgyz+!NhnM& zkcbH%up{R~R?m)E>);ofyvN&-qrqaaEC?e*mk@yfiyCQ2t3b0_<0f4KTE z0*k#LW#u`%7E%|ZaOaYF)SM)&?dok{Vf`9E@F)v%kld9+s?{})n3|l2?8iNpSIDK% zx1W-II}KySmN~H{0$SQK4QGe;@q(twcIf-}ot-Qm`Ws|$iR}=gcitl5Km;du{u}-G z`0f~*_<8|%_+4P>UeibRmJ`7*BbyK$+sek+XyELv>?L0D#oi$$LUxtO|! z#5S{O+%k*UhMqE;;-V>QyN8(9iP$I9MiE?wm(b&*C~tJxlf>?{aW+ z4G}`aZ)!TAIGCoZCHeRLk5ey8+(AH-58z}0;~oJk!QaGg25njU*h%4ngU;DYjdtMc z#FK!gQEvn~$)zDxn0Sq_(GDZOT?-P2F;_CS>Ni^0)e+q27Gj(vdn;%xuS9~TNgi5a z_o%8%7+z&8_53o2T9oNm(WBm2PWEvS621M6vS!I1wTm`; z@Gc3pK5_ncW1mFmIFb!8XHO_ar)DU1o*!cjjK3hzSOb60)sI`kSHON0>Z+XJt}D3fs=VMXaOf^#p~3m7 zn2cRWPrUtM3hwHQ5USrJ`8g7*P+tPs;b&`kkp4xyWnFOhZ&BFAT?V*_j2wxL-0d8; z&>TWyD?Eay#pioScib_*V{)Hp+AU|(>-W(YfoPc<*{xjcq2$Wx$Rb7wKR@8RI5q9^ zy=w(&FJ6;~EB%hX`oKFpY3;t#>nmuLU({{PL4Olu<9M13sD?Ga8I)^&yWD9vZ4%9>C2+O%1DY$EsExrmgMVe?OKHO7U~X}TvkhI z!b05gWRmf_-R5nC)&$RLDr~sNsind$6wd3nMZhlSWTmi+AKK?Pu9G}NLZHxGQAu%l zifJstMVt;wR;Da-S)Lp?XAm9*8Mj=-jgyEVT|^d>plPXSI{&vZd13FG+Vy)b@&4tY z!1{UI;aecFdBeCNUc5!0M+AW#@hG_1t&tph3aiPgTd`SX?46b}v|9hhAu4gsGxl16 zG++RC93hpgKEX0W*~F|hK zh~^l##M7=h3ZIA5n$G9&{G$(PVaSvS1iw74rsT&5*R(^E-(`dcsDSnX6=Paf82xsq z3?gp4bB$HTahTXU`Yk02NKz&V33!a0E> z{C$uJ8aixTwKwfeXETHJ3jx8d+YKYO{@nb5xrwxj{$^w`6$22@F<}xhNW<~zumO%6 zV_Cx@sc}5h#|{JIys7KKy1G z+uE&;UJZ|x2YEhUf81D~W^%8q>N9f-v#(6gE}T7kavIe)u2sujAL*EBPd@OWud%EU zZG=*t8eCH4UY;KK%CD9i?MRp$*Wz01dZ5dldb!t(&4q(5ulu@z7=kUTm;RhCe}Fv> z!A05EK^e)=mjPQs&c0km&Tm?zc69*sKs=-Hsp~){!dn>W!6(mw7XSm-{pK3bu+xq- z;H!LVCBS#_@lKh!O&Np=>i$|YDi6s_Bb~6!af+VPuW~MzBMsN<$<+Z~Le@44Mbe5N zu0;N_yyll{fsQc5@;cOo32~ASecWj0r_@e5*mw<%p=ybWH%;dsa%NfA-NYy)gZvC|LA=RlRg|`h_=NWndTc5E#?d zD-|%o{l&TYk63*3)l>`db!6;fz1C)mr`m#22NjsFQaS#a*SB3SM<5Y%rI?KL`PHx+ z)Fd!SEFC1dTBp@*Ad#1s@m4oFoy4U%s%yHLcqtRbFmalVFiM=&;5KrGE#_#F403pD z-DFJqBLa;AY&XcI7t=V=+f}iay+QVoG8|jB6-Z%rjSw1P86(@bD5GOx`I3Dl^M>X`tAy2;@ z7l1fG8hK^}af&!~R4e2?V&B-E9@NAd#@L>LU}Q|Z6O(j!czf13?IB%y}%?~wd1$?uWK zPCwZ0lL@iR1AmA7B|4@SLWuvR7WxD9tl=J9CQ1%&TIhe6eu@lbVGl642LO-lMg7a* zLPdPsPi>5cOl0raeamFL0x zH}e0Cgtf1h_zAl{RwARKe`gPo$l6y+6eT}aVgWM#V<3?==E7UpjgZDZ#|8$QJO9a2 z8>C`htenu|$5W^xr7!>ZwcBZWe1E?>Ee8P!B@%D9EN9f@K-vqTMl+n=MfQv^1R9b; ztKG=mM|3D`~ZkBae->76}+Ttrn}vp@!_H zs(WUvL36WNY2&Ona-7X|H+h&@1OfCC1bN6G$U_k1ArAo(pz@FadCB|*V*}?qr$`P* zmj&dW>C;uG?sd-j>eOK{Hs%?4e&gT&MtpY4F#d-!qfY^uZ{kg)X&Ax~X4|OguURuW zZ*}aNZEBv=u`Rv9rqiG2H7iO(29FY@wQ4sE5IkmCx z7@{b=`-bpB`}X)fyEXw%NtD4U_c@c`jEQk@#`~ND;7o`~a3;Z-LaT$|91v6BOoa!L zpB4wjbkh)reqlCjaabHd`Y5M0|JX?SIH#Y(Yz~QK@w|B9zF9jg&WjgC1*s$AB{6f~ zs2vqAi<9^s6Q{&0_#PLpiqrT$C#vE#d{2ni#T)oOFJ{FYzAuP*v4HQ3VMQ#iSznkk z3eD}8#F988&Wf`;PHiSUA5O%G8gwsmKGM(LD6alX(;^BwvRK>#}k+rA7`S^tv6TG zw3}R-pBM3F)Q$r&*S(v@vc5VO%jSG17GXP?9~o~pSUz(xtY17o*G*TSvGwxB*;`BJ z&!4X^1kc_Ylbx+EE-suocYg8NTQ4pz&R!5_&n*V$7N~XAkq$&8^F+v8WDcZ`ci%&G z=t&kpLW7K_W(gAt>olxSjfN>KVc##-a-4=y+;?mFPYuVY6`%%rRq|Qj*MqyE+=VR~ zbsB5%P`ZrQ#+&>dNNS{JX56#3tsNL-W^CKS%<`Fe&)l&ZW}joF&UQ}NJEJ@&b#9|x zW@py40n=pTe2^Py<)R-h-Pc#~34M9o7uU@!m~k${9~t|DLcc#JGyH-uu)^|+%0;p4 zEUKIg+v|%rRIVOJ8;f!btGV{jRL-qdkmx!~x(+^6WoegIU3FD1S?`8Y(mp8H_ghgb z^?l_9Lio_W^>(PNC{oUM!|UHu`76{?sG^W@*O#z`YC$+K*(eE*^G<>vfq+zIEm zR)bi?QFH!-P$qE{wEZYf!(={{L8}d0Ov8GLp5}wiFhTvbL1R^oy#~6US3!a?4Fetwp}Qb)r4~ISP-+bLI;Ua zwUpUaF0DIiMNJlRwJ49ENXxI#h~;J=h2CTHnktZ>;`LNkQAgz%Sd?84(yA?AAeQ-w zDo5ewP-gNZb@mF8SBbm^@*>{E1u@J?)59KY9UR?({+JJ4QlBX3grq)xUwM!&`~IEv zpsh>fade|fmxHvvx&py33EpfTM~(G`Uk}@D-`BI?sqmzDKBT78coRYf*PQlv;dNy8 zJwx1}mcJS|fE9V#;sI7%ZpZaC-r72^x!*4x_>^ant{|}|Qo@SVCX~2pe7w|iQwIWd zGiTa3Vq^}iJ_p+`+{^chK$IL1#TC{O=k-cyDJx|~pnwS!D2Vcde6I|kFK(B`Smucm<3O2}u85i@VFqx*OUI}mQ95d@ z*>W``hnC^q>^on;@QQz<<`n4vojw1OTgj3s-D9i8Q$ng(A z(WiC{pqA{7rw6lf+J}4OW?w*Rdizi|e*5qa{GMo~UwVW~SGn{km0}OLSJbBbF)>N` z=!Jg$Nv?l*SifKXh1o~F1MpwSDi-8nNNAUoMqUQFrVWIBn+5H+EWo!Rds)!PXtIX) zG4mE-_9?Md#?jcf|5HMHJ?Hevd0g8U>65lEK8L&y!#nHn5L!W7k}|WINisN9d@uM! z+nRER4oxwHsfFbCFk~?TE4v*=Yy&t+VTi-Z`RlDH45aedp=mW*p;QI!QYFf3C9Nn) zgQy-VhkPzIu@TmT7oP$N^vugP*sj>~XuykfUK|+}t|AzBw*qL(JLdM3GsO z6Lp1#AtwsjYH0TRRT6rAG)`Ht z1|Cat0qiHCt|&0>x{SN7C^GH>hwj4*8s3}AYT~NC=?*5zxGS%rQ~55DUnfEvD6fO; z(y;X+$lxKor99*AKOnJ(yX4ryCvw;;a(4i4e7Jc7EkmH{DLzB*lw%u`h z-k^^B42YJyo!>5G9#SsfiY!7X<`;*27q_K7zIU_4<>i~D=(HY1{{15@=zg`=0yX&v zFRsW4vzNaHvUO5l|NW#Mw8Kh0h$?Z^zFSGcv=Sth3zb+_7MC^Ln5xhXB;yLf83GY` z-I#Z-I;PR6LtWEhrizXAsGdU45*ZVd)TyRJ6l{oRR4(Xt+jrqi6I}Ybv(Xx7Yo$H# zTaw@64)!=_s--=|kO7Txi^xkv2pS~mk}8ibQTq_2yn0fsk*waVmNao%K|)Y!Sd*p; zU-2mP+XWOH1@$fQbOnp2lZ8n&FHcdL+*ai@T9HttwGbhtHeXChWJ1ur zYK9v2J*+0R=u(R!<0f5WMy!-~B0+J6w{PPvV9p~OKk?YgM}!BL?^pZJ)a%ooXxjNU zjY+oWnt8MQ)%b*DjUo%(t$?t>?%t#<`D<9xcZkbA6btj6N4Ro}%888Dm9(}#clR#R zyE{Z468UG4Yx`iE+%1A_f{5hLKvL|@G2s%~-A_GnHW^FOZ+KHs&aN*A5W= zy0Q_-7Q6&15&Zz?T;+Y7WpE!Vfeb4McfJ9bVsi0~3Zi;*+-p~cKnr**0z=&b5)qsw zsT>Y@8N3jfw-L10fqrq6O2AcRb2Y@c7(A{}^pYnb!bY&(PAemQYF{R)&^&pc^{BL# z4^s)F>-Q@|G(uc9O}MAkAX!a=CN~Qz^-!jmVr2u{g$21{_G{3bl?9z;AKebK(%llH&y?^=fZ=arfjW?M0 z|0D7GM{}p&zC^BDMwh*e482+f6WrfgT>6-jZ(q{8kljK|E|cpK1#$ciHg%YR_^Q>D zUwQ-C_2U5S6IaV>Y{2)EZdhj*Kv^kEG1ieuFjKDL6 z8Y7;!*M+h%j%Mj!bXM1Dr( zk%8zC`zO4Efhhm11N{JPuqygIeV{x_{{`=0Aj&`MK);JN7>GVkAEvVF+a4l);&p5R5)!6=mfI-{_Fo;peiz5dt8D3qtDdIzT-0^*mE z)q#@#rWD;ws^&HNKP4G7X2HSl1N`%06zN#|!9jM8?-=a0C~q&#*Iso<*Me8mPUiLw zksLD^hg7K^x7+%smV~1vs^I$pN9q}!KBPI3_orhc6ctqY_xL9yKB+Xg-KPo!;H*CS zM?kWykzWEyPb7&^(k@Hd1nsj^@LNR6ZgrZBW~!j0p2>(#uy(wPCdz#i>%i|qv2_IzWS$M@E0G{w7*d0oHZ=hepp}CC?lhy2iAj z)w2~xPn@CKbBDEFZRqtprVTW1yvKU=FKFE4*4rAlB4fkfH+l_lZ0>;LRGcO_F0X-8 zt2lG~6t8ptZPsf=r+8yi7rls0+q}u=c#F3V&E9;}+Shsu(Lyx8&w8ghyN(@S*ShT^ z>|R^#8qyE0jMAOwREg*v)<9x7`M3ncCBY|hr4fUJ^LNa)Orp!RTDJB5hG#}aX_Sj z)%q?<$4?W523`Yib`!mGEoX(cuTS+uoONLoT43a6p%>e;7IH>VcW4t3(^c7Zm004vPG;nsnTnH|(27m@_`==6Ex zr|!hd-F*$MwcIPrefGTe^`%KYH_^{7>I>Q#+J|5Gg|*2M^Qhkoz*4{l;@`Pys^>3eqHY1s~h;aB>Mwb&A64H z!k3c(%Kf+0{@eLHxX!+g`4_O#=|Vrm=>>~8l~d55U-LFWoAOvg__Wl|K69Hz(arnDS1# zQkJvz(RRW+H=|CpwH+p%HFSPH8t3chcVo;(Iyp+V^LUhYpn-Oli<2t60nBaT3zP zf)q`fH{&q7t*eTlj=~4I(Or@jzBdv|XJIZuoW^+&2%1nDahgjT-!u$!K^v6rSd2D; zm0K&)OyVr>Ih#Qi??sukM3@fnW#M=nrCgdB4p8c8D!o@nd^?F=L2rEjcpUIuY16q} zwu>`lIc9Rx7`RZDNN(5)Wmah6ckZUmtOG5R#?X=t^&X7>)xG^&!1@Cki&{ zyCjRHrAp!)Hg=oIS`hS;Fw27Aw)W4R<@M1pTHad^M|_kHmaj!38>L|qq@z5_mU9ut zNx-8#>XRUr!`&!D|MOWi4AVUBqc9joX|BX{e*9)W68-h%{zyd2_*Q($^35=df~s7( zs|({d#RJ$22`Kv%NR!#jXD$4e*dklh-Ky4P9)3P+vWLO<@w=bxHOTv2IF@psWP1FN zmm)R0f;ammNTE%&f*rC6Y-|J8cUCLlPX=saOsv9! zC7WDdf|Z}qP&cM#VJUk)jd?A+cJ55w!r=NLEElWT-=Z07+<^67LCFKPuWN-pt;35j zqG)-ofH%X8@L^NGun#E;n5%-Etuk%VLaEyjA0@`4L>>Z>MiQmsVXA(D$R~+ssq>Zw zbWACUxIkQ!CK%>Un`HHm5f}qgQvq;Z2r3<}2{&*150tC{6FqW8(f0MN!Fi+p}m0+p?w_UQtG#o}jAiaPz049X89|YuP z$`(QM%KCF*-d|sZjl!TvF~keRZd7{OkCFuDd}n8$r7pBRW%1-ZGIBGnc1~}dRlGi` zcRgg2^kj6Nvw-NSAV{&j)-w=6D5kTXsluQIsra(~JWTC+I2=-j z3#pM_atPTikb7dGULfvGTma>&xVjz9vR5QmBK zxTTM*#VPrwI9(HD@`zsj95{p2Q#?jwg~)85@O7&1b>h-?dyQk~81Q&)O(-Q$I-xXy zcJ?{E*%C;TK_TH&+z;Bi2?cFJS?&L{jJ<^u;=JQRNMy$kod+q?6}(D}GZzA(Oqp^f zX0A^S65s$K{ZNN%(7CqF#HYzcz!|_1)HD((_<%b=UDkmK7o=CMPT>?b9KmwRw(f_U z(WVXwP&pY54h2pIVzkRZuVh&&oOl(R!kN)2i)%!lBJwnmXFw#oDm{ezU|dQ=o!t#I zblvitpQTRvt&2T}dcpL2E{Ty!f<9R4yDNAvp0G7_xMYr3@m3yyd;lG=&f1_Z{`G$~ zEqH+WlgC>*0%+j9cGG36U0Zws6N)bqd6CG>?#0Vg{V;1M75og|%G#TH+gEDY2k)+D zX6>X-Giz7ySAkzxIKVHVd;-6&Lig-0_)97f{}qa%Jo1e`>HP6S+DJ|Zty=aY(0eT1 z5OvOgD-ko0Q|RwO9q^CbV&ZUfz~BL#eYOB!z^yloNp0c*QPu{m@WAtdmOX`9&jVRD zIH}2MZWXmcUm)aK&uu769q7_NtKFvoFRBto*1@f9n&QQxmNJz6qVa^b^@*aM6DDo! zWBiP#a~J6MX5mk3yhaf%Z9-MO9Ze*PS64hU_qeanH@>IvX+w{Z?OwX3r@qfmBxS_5J( zXE>gRW^>e>CT|^-(fK!OHEQuy#K-r~dY#5V`XLi?t9h%Q*xM3Dqa6;8qRLq~rE`ZGaM9N$&HTQinYT9X z-`C$|;)f`GWaELNx$z*M=L`Jw0WxgVI^+V;{sU^CepGSnL!D{Zb6U;tF*U=lW<4I{ z4eBwgclf;{meh#ft2G}|YcBFL2N-i=Mb&!miYgK=t*SuXHA`@%0v_ceCD#&XsJuvo zqPj{`%G88BR@XS{5VuW4fip!2oV|}1&p)1ykuW0_6K|lV9CfV0Q|v9go^GZ94xOwY zCQ)Z6Otzy7W!lD6FoRQ^MLO|14W7lhu5qGC#p(O0opu2<7N+E^r75Ch8!4bkz*_q0 zC{5xt3Pqg1Ir!J@zy0ymo7bN1GHGNv?>=0{Z}MoRlYNp(4-vtw3L-f&qjhQaqd1Yq zc(ki9ru4o-n|?uvktow9JBfCpWJNh#>8i@b;IXQ7DR@j_0r3dUSa!WsME+83 zJ?q&jo5@P7@@eYLh98DRzKW*oyC8%-T>Oy97?jCSW^)d?%!0~ssOIYD@FT2p&LHaP zE!IIu_5M%YM>}Tz;2dTG&M|zw31CiHl=&9Ha{}q@s&4*$uT|X{eK_i&ucr`qh4$Tq zp5KFd09t`_kRVNQfuqWnN}kFKmQ!!&6MF(o!x_TWLqIncUG{7(qU2U3N|;D}>|$SJl;6%n8SI_tL>(jituLX3utlZ3w&B68 z;nwRwpEOXbBcxKA=L3-$6kw%Y&c%n4Uj84Rd5;=~Bj=LO+i_z|(DsH4G60BI_Ws7C{3e5#nYbCgui_O(F@A zA(5|u$oY_mWg346(c#iSjK+l4-l8t%P8{tj!d#Zaq$bwHaLe88(qoHVUoReZ39M zM=pP&Wa=LDv*ZHF5tP(@wl{aIr^oP--4FylPpok#{Ue`7hgAW>CDy(whhc_cVY9cXl@e1NnvOI+1AkU>X6`+p( zj8d0f?JkOQf|{h}L|3u#*BUh~J*3v1h{x}pga>D?(SUFtb>Awy&6vP7k0?t(tcH?=gMQ3nV$MU+L^-X>TZmuxJ?@l#P?_)25I3PkUmSqr29naz@H)!sN;4u Z@^fwSd3xJupKm|lZMaTL-T9dR{x2Muk&OTV diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/eespnet.cpython-39.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/eespnet.cpython-39.pyc deleted file mode 100644 index f39dc03db1d4fd716496a926ccd870fbd52f16de..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6085 zcma)ATWlOx8J;uu-JM-;Y{zkuUbYv}ZfW8=P0KB9(_2b!CL(EzR;9ya&v-rd?Cko? z*iOtWs&GUlB%=uN0H~mKRUz_*P@fSGyzqv2Ko&Yyn4U%g+`{z{F*kB!E8yy+(ZT;nX(I>kThFxA%kM#o?U&FmYx z#l(L0W7XR+I(1NN?ttQylm;j+ zuYgi1DO3CiuW|1>>omh7yuPN3UxjQu%^Q4*H+gfU6c;Nr7)9D7^cG{%(^ti;m0Z) z=@}rQMn}=nIpaDvxQXqW%hm_lGUFDvuU9%2ces09>)0P?rq*$=sfwT(_89?_fCC^6 ztk!jb9Xt&nb-V`N^gVEATE=p1M<45ZIOp8RHQ&h0T+hvu+727JgmE^n>Pdg0G6A%!>wH#GdpNQCL{%3rIY9N z58Y8Ub9XfKRpxO0kG>$Qj~jb5+tg&Xv!hyGC62syTsy9f>cFSQwXD9{#C~1w;H&HSxg_^pSIzjB z>@CJ)E9Dhf_JSrr%)VGM~o&8lWBH8pJZQB1{=dv zR+E9~`*CnB6#F{%vQK&-DugNZA>$Nezx!&}t~f%3=?PVF(nsu*675eBAeIYQ%jHoV z(87Y0Oqy4sAib%ph9E}a0bFj+OAFr{3Z<_w6+cR%%=ZOND2*t|q>XRt2brJ^N_Qv* ztN!BEMQO%SnsuBtKaIA-R9YfPdib(nI1Cdm%@hYH^(2wiHwSzp4&MMbzJECM`IfZl zT+VPopDVpC8Z@OJUL6WD7U^9Pez2T{!tVx|vi=?G6aRM8=$z;X%yK#`FzJF8u*XF{?pdT)5uLJ`=NO}twLy-=WAoi0%7N!fC z2%^~MVHS2t5DURpn1cUQ8uo)Ei@HF1{V>Utm`)9^WdqS&S?CT#xPWiPmn>Wf($FvQ z(p8-vUK96WFC?J!aR7}qn9V%a#BZL>>2u62TMbsl&qM#iY!3e(+pd%6JAEMKF3I%Z zArD1r_9EW&mjJmo)^fJTMzF6{Sl)3hhcD@|k&)?K&)}O@*$&&$;UOL$nbgY7f@fsq z7OdFh`aCTBn1;47Hgika@?)6S!fR*F*v$>D@4;%Ziv1m$vBC}b{qF&*0@|0f+#c89 zK^Q^w{5*#z!;J7>V=uS&C<2&Ef*h?fZP5hQu8RkV@F;>48a==ZOEHMfCq3_G8PZ|Id0`B=f zxtXFz(7dwtLXdS=mSCeWC{hgZGLh@0o_51HhB@Eb*_Vigwx=wfoJUGF=jtbpD@qrQ z+aB^n^rUo}lTcVy;g2GErDGs=P$Xv^Qw2Z^GVx^XB^cVJpg)Y^UC2|3qX3tc4_mq| z3Q{M%1GPhsjuO8nU() z3b-BR2cG7M5q@dSZI0+W5h(<$CdM5XO^>IbgT=h5=AP%5A_Lg@kR z?8|u5c>wYzF4WZi%+;rviC=?x|FMnj*+b&IVe2M?VGMXVR`rkem02LhptnQ|p& zrjHF0U=P83PlsdBxwgSXOb!CB0B(T93912c>fGEl;ePb8&&i$Kh7)*t{k`?4;bOG0 zL*i2oMuRhfgMsQRG!_I?~>%Bze69C8@}8@>4GE&cFtP?}Wz7^n%qxiw+{^3dwDrq*Eu$o~zJu`-n$BIM zyT8l5afMeXj3xI#N2{Bf__Lzb6wS@5+*9dx4T-8pD?*DMUPmZwsFc;q>IEIJj$ogjO>DRMEx!8|mZ zqvkYub617r8*KesYCY=nRG`NRO!yA(Yj(2UQMDep4y|{zT#r(PgLw!?p#gY!y+5e+ z3O-62P?}8m4zD*iS?}0g);m_NM>BZ5n1>*=2JqH;`ucgC2602ms!u`4rViN@6q6s> zbx4Z;!#NEpf)C}7Th3c@VsG;pjehtx3Mj|nkxuMsNQiztYGfz#M%G-tXGgzbh=+mQ zyLw;VSiPUm@L7Iz7bP`XE%JWozd`-u2NcEL)1hB+IsQR4{_>>%eqN{kNxQ`h{&OY& zJot6-sao$LwcZ>*wu=#y)ym$(tKHDoKUD#6eo4jWwpnCU%6n1)Qcx{%jBxU1DwwO< zq^L*8U$u?>7ImotMP+0PP)7DQymR_+hx%z3NWp@H+_W@B7;m8NGbx*vUNT7H zCO1}$035bW_=(2`UlrL1yoIqhS3mWx!I+9iW1f?j3f;xrO z7SliT{*Shc#O4G8G^TYbjb<_DH2#|kPNTKmEU$;|p61YZQ*e6`Z`y>0-;w4ZOGRRX zdT5Nh8iiY`Rx0jN9(-3H*(0PeoFPzMKtf~Dz0UgMz}ym}`p8uyD{7>zTb0wn>S)yv zPN}Z*a9?Cla2?azK;ni>8{dd~mGM;GprngZI_$Z+Y$5?e7Of1Q5<=Ki4fcoPNhQd= zpZ<~mOu=Kl%u^*EaqkuZmE1v4A3t1TRcyF1}gCm)pmZoSpJ{sxT;Nts`S=U z_cUeilVvZgw4|IZDX`L#@|2>`m-jl=@(ggzaVSljB#A%6CQ>RtktFIK^l}<{Ue$q5 zFdHf30kr{a8d|?Sb)cySi6gr#DB(P`#;tmf{23iqkqWZ;3xX&Yx+JTEhcYzE1u36Q zDZR|JWi3K{`r317FAb7MGIeW)JD=82gi5F(hnJ@XLQhl^|5IRXcA-5d zt`LoKdngv&rJPQZqRyH*Xr5l)NYrIa5XCHKrRpB)@OCN Rrc=}RO|MoR7jDUB{{^Xxk~9DS diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/resnet.cpython-310.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/resnet.cpython-310.pyc deleted file mode 100644 index 5f38dcd9fc94b313126c383baa111a3da547799e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6189 zcmcgwNsJ^%70t-4*4~#{JoeO#*8+N}u5QnQG2S%BV7DyKAfu8{l3fv9UDZ`tHIZ2} zJ*^sKsmJn#_6ZIM4&4?4^%c$>;KUt?L!6Mf&|H1P2=jhqR#lhXgBeREQ~C49ni2p1 z_y3PrsZ5R~O3V^5zv$U47J{Ys(9zYt^fwymmzoz5WObUM?-H z@$$whzq(N%B<8xjQYRCGEG|`PCTjjQe z3`Ell>$^g^VI=EZs8rHb*&B5~JTjFT1j>CC6Rz+hOJ(YT*J=8J2vxS-jS^nwG3AjV z=kWSxcWJBL5=*;Vew(+0+S1!XhV8&_dO;B?l7Cl(=)Vw(mLEj* zDwtYJ1d&gz3yJM4RohbZjc3CTg*ReIFfo;)O=I(uSVKsn%$qiIKb2>|eomGoWi88D zFrSs>z5_|xyA!3I(n(25Ew`d2a~Pv{EXRqPATmvawjUExQM@8dLWR%-nKk8EysCIc zE;0eS!W9r0p~@5d=kQsT_Plyf zk33InMLDFhlCm~}k|eb$`yKJ-_mwS0vl}+g^? zy0KNpdU={A%LZ-P@|ylVA+e&2=T)127~k$^aP2%&Ocf0F1la`Fw>G(ki z7{}Vpz2ZmJt#xQ+g=^7EvUfF>Xr?Nf&AxWDioT9ksr5w0^BAp@Aec66Hg8Vmv`IYz zmf$5%UU(kdx=JfZ~)yDr3m&M98GIr#WFN%_i|qurmrmmD1Nw8vmG9^uvw+Mf4vA5(E*eVUGk6hda8?q?{F5qs!Ag zGXjLHBv)7eyM<(7B$`m{Bpjl_2!Dc9Wz_hQkr2eM67)m^={AMOh=a# zdZZ~`*3l({QWpJep5fVjiY7S#AfMznxpM}_<@YJpj3xDB$a^y93=V+t@qGs^<4MZ| zTI`XQiKJzca;1@$$;dvK;!}sT=4tRVe6q(Hv$F>5hoCqIyO}>UUNe4JyzfS5dM@p2 z&y?@ueYSC~=Qhq$!p>*+EoR)7Nsq_S;}_K9;DYXvIW!Sv`{l=Zj>@-X&z652%zr7F z{|Su!-JtJ9%pdpuNY7nB>mLTKmxit62}iZ@1gc6|Pb(f2RXh<-C@^E#h#lTpiU* z$$KSvmy`FZntGZ0ov40CynLtDpUA&c(-XOG>a|Jj0nN=SKB2=G654uL55mX~szTY6 zr>Kc-zurRLjMP=OkijbVuBg|xB6$@{R~e)))IIXzL{R%)?II{%>5yDZ#nIT=pwui` zU#N0Hw-u)&$Xup(g>1)p5spQ5jsU8)pjhiPzbi0zNdAC4?Km@8`qQjA4&Qs0o!57s z&B7z+**r3tN!;dpwnI71toG0-ePXlSiFfn~Z{VA?LPuau1~f>Me~JP(?z=skvu!IR zs2PY+tifiDZA)H?%mYCFWNptzI!SR%_t|5I=6$zj+)wpVl;R{k?Vh`5AL3-)?+3^k zkqZ}M?U?{w?tc1Y|2>oX-o6%@4Xc;>kj&j&kIgwHwOx@?<76XyQ{3)S`U7cF(yd&- z(-8qzj`V|?C}y-`{3)(t{sb-w-12oIZxHz|k?#?qSSQy( zlzpcz?jj|}K~lbr3V4ZUsd0(ODN*ugsMnIy`DQ=o)bD9fWS!%Q4;_HzHR6Wf9L7-+Hbu-Njv|FDOit<`ccV(quYXZoqZ1SD@ngzRDZb%H z5BrF1nsun>SL{Qb=gTuaoS-7>z>W~>&KZXgEtxV2b9V`xtQ^s09G%GN z$!~%jlLf>@eFh+}D;q?HrlmJZt-q!EfC!bo+y>W{zxoZzHm#8yPTvxA$l9dZ8j&p` zbt0#2l?MKv8ueDG^o3i!cPa{&OJDt70fV73&(NxMQLFVf6;InCwf>13^$w}@C3m>C z@_*amF(K^uDO$DOVT!ni?vPslOg;1tsYFNl7W@v5-emZClHl3{;D6N7x5Wl7KE$xNk^Quk`$zz=MGgLc% z<>>F};LQVN)mt^$!T{wW{p^hV+|c+WXsYxJSQvgxyhz?%|69u&)A+4n+9uK_y4_E* cnXEhQOkbLQHv3vO7vJez8fjCS!uh}c15<{9 diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/resnet.cpython-39.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/resnet.cpython-39.pyc deleted file mode 100644 index bcd9afc6b852a07dd136dd5631f403293612cf59..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6424 zcmcgwTW=f372erhF0Y~_>f%crhjuOwlj=fIiXAsi6DM#kO@ttEk%B<6U2#U#O5{?W zUD~oh7X{=ZPYQyfeJIceDJWX#weNlJdx1X8Q_%)R^%q*8>33$Aq?jmeUBIR^XV2Vc z&pF>YXVx1V%Nh86{HNdWUp{FVf2GRFpM%P4DB<5gxWQS=XtO$FF>kgl(_lv1uG`FL z8r=Mx)l=N!HcxSfr+MaAsk+0T;W?gv!0KuKEHCgeaGAFZUgYBs3_dQ{W_Hi2=g>02 zC($yQwB*rJ;!|juN?Hp1IX=T@F@KC-;b-_+a7F$+KgZ958|M}N1it`og1^9@ z4@I~tB9)OM3`A7D*5_)=eQsq@Ws}~;>VREY+I@K=inhb+3k$q+H)wS{KEHi0>d3~% zd`C7H+8r)h;X;4ixs^tB@udbA^V`wJ!%VF$&(&(R^{b+>{IFA37U!xjH?E1=%2hq} z`a>Leu{yWHYirB=+WJG!U0Ir2t1eXqUxG-YjY>*d5Vy36q==Bs$u!6@18Goi|3`cV ziH#)(cPOBUFouGWzk2cEN9_N10Jk~qX zLN||WmERIF5G^v+^2m@gc>JTgu+eFYh20IW!#hE9;R7MVPT;lNpc9F3A(Ecoa=C~^gU+|$-4!9m z&xNAx1(DwX(`<_%@~C$%v7LoRM~c4jta+huM+^xjrqcA%c=-vOAtX^6W`WsX$}`}; zCQFjCmgO{i1xebwar2ViC5*ZP!XU{w^58=ils|PFUS$kH`aP3~I zXYQfbO-0UTnoh7!wG2j?=rO@(2V-(Q8})pTW)`^dk#S_L-a29`?FX=uhNz6GRM_1X zQl7)MD&x9-;76{jwW3m_vZ}Jyf~q97Df>h5-uIO)MXMWkE>Y)_%8@S#z4mrX#HQvb zI~1*TmC^f+prJt1+Ttm`&fUKnC;ttL{6ya+-iq}>fxG>a_9 zimU+Yu-&PlQ0MyxZ)iQzX#Dyz)t2-lH1ybg$mamhLv|lBIbc02;UM3Hvj&{q=QdN^ z=rZ{(W~B67jP99N4ZTKxrUmITcxU+(c9LZxmx(-05v&|gdJV#WQ44%8C6H-W~9A!z9f!7wUt8y-3vD+fP;JUZF zUMuOzik)pQ2m#|*yZIYl)Yw>sR+hLHy(D{AV~J)OqSfkaN2};X^h&KKGM*>soMg!v zoO3p>4eB9~1Rr?>NXh_7ohWLFKs2_FNx%OMPCbk{4Ne*;BVZ!@?U1fCi>10nto z$oVl=`G1jf&_@R$r;jkF&%u8Yt~P+H^nb#YoWl_+nG*v03bI~7zx)mnEt+}aM!mhZ z<@eAYUH}=v($^6+lum*uSsPt(X!@j5+I?vHV_MDMZ2GTa^e~7ZfLNRMNB~Lklx{OA zS4Fny@C?t6z#u2d&G~<~kSL5q5{jD?2GL=JKSP=_YWm1V4A+Vfv5=GKc!ejT)xLvv zr=M2m%>{FZ>OWMbf^JLE%9Pr}B z{S-M9$Ud0llZSNX68I@T(PNv_(*~@EfH(uInLRY#G=6yN zz7w74IrLt8=4VV6z@6PZ*K;<{Q>xCV_bq1J{vsLk#GWa?pb-Zb^oZ=Ci5T0jKgsh{ zzx_qe#!8hWt(kIaG` z>vWQoc~jDb=awWls(so>A~QyXgHETl#8tZO z?QDZpdAIFti8!DuN5}PQ@?1)uwdA?1CST{?cI4j?uit6*7xL~j^+L{j`fO5rL36!< zEpz}wO6!My5Jq0m5Xz=ZMU8KHejAxHl2_SA4y&BI!f$Rw@*0k=vPfWPc;v>Zp!UDo zO;E(rLAjES<8f+@lC$J|p~?r{cAStPcPZ@(*@-hFT#S5<5URDHSo<{JE3kIB3-|ce{aK)`}bfIT_9%LH;=^ z+_>-bY|gf<@B-juAU?58Hf?NK@JeJ5A$$x|gQpCK+k> zoIU#x_v!vP0M3XUI1p>k1mtG#XHJgaGimH?8JU|_Fa0T*xw#sfaawA7B4x&DM(!hV zyGw}=q)6$ua=h(r5pb1~UeFYktX7Epm?lv2qa3Z8aA@qYlm7wru_Rp_M!&bw{8N;W z)`p*l%xy@%@Gm=MYk3zT``uDs%0rtSNgW9dDU>%*!d(zJf;er2gpf8;0!bXCm~cqa z6V4uRak5C`oIzV0=%_7|w6TM1j~(PV=5rI;puT*+50bQ2HS~|a#II5~fky(he2d6C zMBXLxJt7q6_l5t!i!uIo{F;5L^Lshp!_6tO4(teV?woN5!IB}9D0g>oRmN4i+la!l zSB~Fhu6nJk@2m5%Mn=PK9B@2r<^$*%ZT8C_i&&_a4T zjh*Wc#?}6vw$~K+PNUbV*zz@;{HuiC>d4Wr8{fgk>6?rTuN4WG`;ACt_1f-kr=v2u zuPa+B7dL_paqG_w52>6j+!~spO6d_NoNjT(Yh$KYCf? zl0{BZejDVNY$mSi`vRw`Ph@C{S~S%AJCsq;Q0wCo&C24V7maMwtCMT%Tb&Myf?C&! zG>L2wIZa43@%JdBLZa43Af$VzVs)|l=%oa(4h`rG2}4)4DRroNnrx`|4=AIuq1MMH zo0X-fl-an#@%7$9$Q$-v86`vLeu6nWKKZCo4T$lD3JZ@Il ziT|@I6Mwc2lFt`2aTQ-xTjBiBXQp(6wr!PmT~B|+(DtqM8fPHy>jYCf;P{fRvte?U zr+xp`5l^M`emZO6fwKH|Q?@WcImoxSN51E1M?*eFWnRI-@Ga(53Tz==QwywQW^9us cfB$r{IVYDYT`FBEy_w7R|1vCtJTb%m4Zd`;8vpg?$}uD*5{CA?mNe2#&-isq zwv|>$MS(1o!dAtB;vl6cs&wbVi3^7m7fx_^TvA0*g$gc|Pe~}u_y66qDRvTPRQu!597)KpIk1jk%;0HFM4s37)B0rXh@) zJ!cDJ!I0+rV$P8nX-Qi;GArF*+jH5c44ISpHw~Hh%_VmuGnWIUM;1USq?9};y|NFK zzLe4<@09~`@J%sS05&9V1J(;{SndPX2W&*{2i6blfIJ9nK;9=0%Ok)Bf!!{T0viH$ zha3fVn?Ee?TsBqQ+>38)?UQ%OWAbi!_qH`R;`hmWHpJY1f4@J1@_-aCVAn4g#rt=0 z0>*4Hr*r51aMq8$GkH-Llph9uG^~Iy3d@YN)%I!{#hJ zQn>ceViYyP(-RZ2emSVtJvrXE8r4;KalEb;CTex*SHp?sy2fV8lgA${OMko(E#6Ak z=@Vnq)6?@O{ql)h?K*ROZ1SPfbcxvTqrdG3P>dvby&%FRYXwtJcfQkegkBv1ZF|$2^4R@MZ9Y z3Lr8f5gQxkmbnepiZV-9EMiO-XJRY1FmYxx8yh7%&c@~jI7?2H*~&_5n{b!;P2!h! z?8FZE9p=xq`17$1&K`_slnT^yhYjwk}q@@PZmdwP~o|-HFN6J_4gO?O;RstCjjnSZXd#=YzVc zm8#xVUttTmQmI_^!mw1@F+RRBu~@J96KjiJUDktziRXP4)&s9v3hI#`PDIM9R7=v2 z{4#BG!n^E;;2#V9nioWsGLVIuA4GfzW2wGPl1X91!6c^qH(eSlc3o3XKla$Bt98n+-E zDQ32?ObfG`*pe-c6`}4y51adfJ2p=mJVvu;<^^L%%oZJW2s5d}1a2pAl)xPXMgev# zbqs|TS8!tzCsc{9>P&qlQePn!245wH?0j8a_LQVs3`YPgkvH?$IzwPCKFeI&*L@Ia zubmu;S>V-th*`cw2fb1yyih7#TJfrBOU}R2@Pd#8l}=mtJmy8^#aT$jl;n$0lSJ-K zC8O+DtEE!MO7BOjqUEw=lkkTH02hmOOh@!xAL!+lGKgKjn;Qv}G*y}%MgmTejRYKH zmyvK`B6v#VJUeElw|6lRma4{cKH~Szi1(bsOzt2;f3-K+)B!hX{~j@HMG} zgtf)2En#GG6N@oT7Gqi+Ax;*i8VLFX71V#yr5$IM$ z)`2HM59u_!)bP{F*mXduGODTWLEBnG_j-ax-lH{reB(FQnkUew?OIu5GCn+B>Q)nt zp-|FcYKB5e1anRjP7F@BdEGYs=H#7vkHbMz+ zn;~zDw#OoC%Z{ui3S&uw(@=KPiL(;^7OhyrHcD_LI&LSy?oBu-IB0QfljnAWLTmgS z#?MfwjNWL0o*7F9>sYQi6X<#1J;_?O*=vKVH>Mq!GJnv(%6c|Q-}_QJ#S8X91G=7- zg-!DI{jsHfmA3Yh6+o-a?R`A+K-$)iwhXropl{MP7#!Nuvpw$Ke(jb)ta(TdvW>V6 z_^|Ac#nQfE1CmcCKLUy0zim8Wyi#0uqXV%^XR=|czo9(1bSQS04pZ1Khc`3Axb#8F zaReOyA&#xvnZw>jD$=SSm3gXP`XF|GD%9cD49C(L?$|Iv?`iRmVveN8M?C(W+8lqMV z45CgayySj(Jorv8D3MPkT}L=(QJkR*K|QEe0^d{cxy57ZZsKS2$}4G^SDk;szqCTG z*{dd|maS;p-qNT#_ndd-`Fg#I&@Ef@t~7w^o>I+Q_LG20I~-R{rsY&xPN(GwJ@}~f z8c}7{e{^-BIgq!yzymqY@;VjUb926oDcFIBI$sGZK^S>K+1D0@k-C4`tJEsN0wOb2 zLx858%YJ2HF;ZmDbq--Au}38iMH5fL=8lw`~2L_}a9n-N?Os5A%iR0s2geYfOeEi&QzJLt$Il01Skgp_wfY@6%Z z1!LWfVHZ{n^>FM`keK$v=&()Tm-c3MTL>dK3_b@5^|$u$Q9RMfmyI0h!nYw>*br~P zG9_z+1u!C)vYHJOHlTk!*ENpKW8^l`3;OI~2btJ?k8+iKXLJG0Vr}_4PN8VElLUn) z4TCR{l}b_j+L5oF9bE~Q1LPi~&D3EutgQK`M_*lsN^*+P>Ii9K#Ujy7q7`zTtVi1M z8Vx^?x^R94+Ew$PRI0AD12x>OClMfxhYL-*y?I4NH z`(=D-kd!1ALk|ob;MnC{nLI}GM=1b(r03Vj;!v!Q2R8A(m^Ul9rQeA6I0;l^tl`k- zg9fL7=9ax}ykoSUCEPf(<&dA;<%du9Z<7 zMJuD={u)K>SdH;Y&&#+|B&FhQ1GWQYZ2rQ$gpu1Pv6Io}_;D?kwOHK|b9v60k;9kM zl#v8p=g$zKT(33G{+=eFGdpUmjlwXup4-OHz`lFc@a%Y-c_+t*m^@76lA+h?b?wqS zS!qR8bvhqVI3IYD$mhAWU4ak4W5Y^Af~NbqL%V&)(s?{M&^ti@x8W=!mt6?8%~Q;| z^{VXHLJg};(nva`-_ZuhJB>go``{u*NXCFGYVA7I&g)hSXvKvf!=~Jc4cIO{$0k*Z zu)F$pUWi&GzP9jHsLHEi8Xd6;0RBiw9iVx#$G6QEkx zx^A>5wN9+Pds)BC7XC~lb#5VfZ@tkLn!>@^=OyZ=2!sS80xJYo30x*{qs@|`{1Y|u zW~uhMo9((lj>`Bx`}X$zGP~Eb6yE)vscIze%QxD-`T;TDXj3Frf1yU+6xF`yrgmL0 z@7_&)c~kv+`+k{C{ey<<+EnsNerubeQ0YS&i8n>HkNaPn80VMr^wF-%4boi_*E9I8 zJq^%lYAFS*z?iYQMe3!FQ9@WXYngQmrW2`ZWW(`U63!1voC){l%G@TBkQnc16s)ks ze;31EVAG2C$^Hc&Q7jv@(tM@rvp&$%tE$%{dV}RB(Q>qtO>7#8t?o%|T8M4<>TOV1 zRJG09(iT~*9a~+chIa{&KUc33U}jcb25bZJX!sYtYlKjgS`bIbFj4iUhogRkR@&0* zC{=q)>Mg31SgUsc&Z!?0x=G+?1bA8oJU1oe*NFae0#y6_8Q&n*y)%-`eW4jiRzD$T zo{?(ws91wV$0OirY9JZj{Se?nQJgE9=hP2@7Z0kRpwMpePnIBkTilmaSE5Qa9B+FX zn+b}{i_Vrx9=|Y>iP6q;ob#}6O7a%+YU~8qYATXI#TK6AkS~GHMbT0@?%s2WerW>$SCY!rDE9W#O6hEJYOj?+e!`AY61!(=tW!=X^4h8#*?0fBvO>PwUUk OyZDI#)1@Hulm7!H6F@)! diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/resnetv1b.cpython-37.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/resnetv1b.cpython-37.pyc deleted file mode 100644 index cc7b1329892392085df974a496e69809b2460bf5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8293 zcmd5>OKjZ68Rqb{+|}x7S(Y5fkso<%66-BH2~sz45;t%YA$6QOSR`er9ZFhhm%AQP zvaM198OsiXE3ne-?xXCSU^Bi}0o)?z$Wt;c#BJbsWyq^#J)GFt`q47aJbW7tyzP{>g z8|6Go4)I}>45uXplpN+dO7yg(hd;`XaN`y$7x`m+l$*%)^2hlYw~*`OPw=DMMy{Wi z_(MF0+yH-)Kgah^wRh(FCA;SO?#{9!)6rr&15ZKMee6`WbhleK$MTc{e|$ z?t0UjQ`23y^D}qehO(=2*cwIk6mCAd5=G7Mxyec1xE|CR9-nC5j2fb{GSLvrll2Do zYvE*TUT5Yj(^F?F+@EMhEBDiMcJ9pV?CjDxzcP2fP3Nc1Og~$B#-E)(rL^vtx+XRsvEO39K=YZ+R(*|}D3eoE#um1NoZnF{H+^|R7U zcorF7D>JT@bjQ0x(eA%RJ01wQrIm9ZXr@+nuxoO%_ktHzD;H{w z${O~}J|LwnRGhSHx)R(Bq))Ti@090ItYbHDeZIsMv_j*d&O@R3q1wdJstarADwZ!=+Whby^yW!L&3l z`>(wvP2tzplkyoVpOFRHxuI8Y)_iG)k*IRN%Vwl7u zH0)qg_#4&6dg!($CksJC)ZLnQ(-(M!yz5qKUKqOWFSXC!om^?u{mIQ0ufZF^@}y{l zGw$_pG8C1`NO;wn%l*i&&^x*TpmX=aMKAQ-E@T1jfSnk@jA&{h2~F=~Mb^g(N)~ao z+2%->@+VqrR?(>eekKwv{!2*Vdk`_((qnBG*FClc9qmH%X->~_MwT;MxmAm6Yk6@A zEiBa*wPXF9rut~LG@xy^P_o4sMiNI!93$~CiQ^<_jZ3CDfs14}pbqW_Q8l_LjmCN; z9;YfwJV6z5OAT?|6P#*?v=$mO^#WeJKvE?K*UiJ-i;%V|ov+3 zUH96$S4*Gd{Tod$2x%7{p+S2tcu{3#0V<#2N&(_&D(+3yUh!)+*KN&^33d)9YqQ~^Dy^U(``sJ&Isp)#sf~O1JcWDsy)N84z^JnK_8H)%-AgUDi*5>LjJgi1fgS6uG60NUt&w zeab-eD+6IKsENZsC_^BIFx4xi0dbgKX_f>jU#UloQm(zd+B<6s4-zvlOJ-nJJVZ6s z_M=Uwa9(oM&J!o8;8d%$jec>O${vMisWsb-Y6|D@B-x0-;&6(@0z1kEfJueMPL+HC z8sA4}|0x=GTZkhxw(x7Wa;rIo)fHeh_eEHBIu`Q)R_AFk&XVXrtte5!lO&!Z(G6LJ zl~+(6QeRzgy%V%uYn6dkoS|+fNKBF-y(X%krkvUdgh(+%A{)M?Foi?fGjm9tIP6tB z>Fzju8FkzKS7Ffb;Tluln_!T*r>#m2;-Fx9Hu2CdRQ3omyw$%L*uWPvCrl2Qd>Tr% z@HNzkE#Yt$0PVmW9CCy!T(Utf8P9G-=B^c)t2WsJ^85+!-Lc~whf~H=jK7B~Y9toJ z`rI1C9Q=pRpjTy0fTsLi)VLU{rCo@6e2%C)0bZw$o4LKMa{ z%&cdJEJ$BkPNc#b(_lSvytqS@pg%UjiO;6bd&#okxut6Ksj&yrXZ?6)sAmIcoje;1 z<_@%M_k6Hj`^g|?J;Vo<-8qE(Fz=7q>fvDxP@%OSxxoRFBYWD*+PiOWInikB(7J3B zd;bk`W2;AFXZ0aIL{Y?!!L(o^t#eG(`IzeLKCJ3kdx%`J`{TSo_qW-#kFRR6eOni= z-gqM$>*aK;N6?EdPG_ygF_vl<99QFX(b~mP)vwQs~WlygrbtJI#U8Z*$Z6q)dX;G zIFopbEC!9BRt-`CctC<;qI8?K$--^RIh1-e@#x z;Cs2cccY1{>~ZVfnxC*RX)7`|onB|s>uh?RlY=jEuNhT0{1-QtTOD~D%c>*$H8oFV zSERmF!4QfVgtAZ#t3eofLB*FQaZlO5=2h#}U>V#^)V-Q3?dyJZc_k8Liew&Kl&VK= z;-B_1f{5l6(e$~@bKh^eVdU4#Lz}*6B>Wdq zM3sZCD78seZyCC7&-5LP7`_TYEU}N7_#1{ta#DHB7=?!_!cUFDTiNUwJFXu^uE>T} z{SmYoU?bow#4(4FHis^^!qCi*sU1+EtwcYH?|YD5LJI!|!L_X%LI}2IgvWqY4Ge&- zvSDq_5SJoYw@3%0=qN|fkpvtRN~CqS*`B_YTh_Lm7?xqp78|icgfnf2-eI3mpIbY* zJqA3UMLl~>7e6~tk621)y}roi4%{I)$Ts^vtWz>K*Z}Ru(-Ew3Y;5bW0>`)VT|HT< zm;4UeG40wXS2eta5j!6d@(SI&rG=Acu@>OC3Q3B3WUNBLNzn7nV_!Qzz8g zk;Zsf-SnRu|LzvRs9I+Yv5!uoRfjYDI^7Z(lD5}u`T>{4%j>{Z-G4=hhLAQKYTb*Z zCA?tSFBKBv^DSz1O0`luGCTl!<>|#`s=zu(Zo+Oap!kPKAx#Uu9ec?n?qitPXG7RI zN~$Pjfc6uJdB+}V!#4{~$TnC=Y6op11N)c+CeSwm6ADo%2VJE(Bg-LdP0NySgzDK@ zSwefL%t_1GZa!wa1&&r+#~!0+J*{WZrLmyQUq^rm?TJe$z($~uC80_gLrE5qsLRihLeiZ^u}mwTNv}f`(G0d1?w-Vmq7`L` z5>7g4$*pc9%KVUcC|D}UjlS!Ah!&K?qFW0J41&hXCSl-) zCUOWVq@b~ieev$dJJ1t3Z(n`;mUle6y1R8d9H_l59<~4Fu{16;UIMucUz` zQNzy)&r?CAQE#6AH4Q*3Vr!e@qT{4(;(1tWuNI!4XjcXTscLdIkx#nbY&4`pM`Z50 zsI|sJ7On(-A@bGO(yGFu z?bBWoMTl%V|AlhJd(@jpViBUX7HvPsQThqe0i7o2rC+2|;Zw&;B$i36kf@SaC9wtp zn_i=QokT#QL4qdGnv7BmE%|SezPgGLO`3Qw!~;{z?`c80!Im6df1Y9#qx#<|#=;za zNAvjVB1ReYz6Yro*`b&dsTEAVB0NCNkfHd)S5-6n4raE08bb6#JTS=onHD64OiMG# z2b~9~nH(nZDNW>VnE9;#{mBXSaVY&Tluf6W$RFGibf-%4-x9SKqV^W60c5%M)o9LJ;V$7VjU&Q>Pt(#pft&Em#oAE zY|zuTA5fLa4qPL;L*XwlK zu_EFIHOp2)+@xFL>G!b=boTXkq%V6J=-ZbFN!+;%gzEqEGLQ-R2hxFMpf4145I=lT zn4`JT$@l)J5LZeJ8tM4gFNz4|rGwWor4{KUpJ;>hBgWz6c0H=r!ihFnwN!zkIx^?F zp8AxrjBcg*{V>J2hyn4mlZvd zHqMEfU7vLnMhG6#c^=ck_n7k(dxUhB>i{1^y2JVmoh5&cIdp31iJ`X(J=vdApg-p5 I1E9M90`fKWO8@`> diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/resnetv1b.cpython-39.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/resnetv1b.cpython-39.pyc deleted file mode 100644 index 5996a2eb7ccda4dbdd2d28706da994d6f8e1b79d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7945 zcmd5>TWlOx8J_#juJ4ZR*lC(NO?uh3w&Poph7fv5T}WH2N&>BxmSw!>*j{^gc5`Oz zBxYPt9cT-36%~mmgzO3-`JESdK|(z7!V^54r%DJ33*L~BNF~bmpP9YZshX;4S?B!o z&$-Wk{>%5DcwJo?4Zm0Z@+&s-u%`W$UYdUnUY=SDvBThc*kf#%jezTAkF)#OQE+|i z33fj_2Ckotu?N^FxI=t^9bYwWYvSLvui@IxAbXIVU?&ISi}8xrqr@x$c)L9viRT3@f6rJYEq?6XgoyrC_4Aud&&}cDS^1Te5o*A2-o?bk|3p4jRboSKPf&C+r%bSEXdp7DZW;h9pUunN7|*D=?8!EIWH z_%w=%5(tn`3-w6bGIoqz7*=SmSdkuKxyX#H$il+rb}G{Hc9e>YE%eMgp}CV{)-Lg` z>TghgW=BrspueN~r|bPQk&T`mm`}@hGL5!5zfa50?dj55D0;zF(TCTpAzfB1d10~Q zivgJFl$PzQ3cg;ftOfbn z=48gNh;qK<-Q)tFkk01|B`*l_`91BfYN%>7UaVJxw?i9$v2n!zo20nfJ>A3OpqQyj)- zNov6`h-P%_ox1apYh>{3)-A<3`o>U;evH>dS}awMEl=gc~1)wmU#X=}Q;gb}tHi{6oOMpJXtMw(Z&J$*jwh$C1@+(+boBFBh4 zKx7nT&k`r_kopar7=wh0;Z12))X2Hji0htUbc(?ch^AW*0{)1q;^Mh6 z*eZx*)Lxp@%=gM1YL>~9pw~*ocjoig*1S^ElIAz6o*$5+lC)*VGhSF&o`+UUF?A4P zlB&BB%_#6vDW7lJ>7!^Bv|Xjyr2GK|M;BXlaBAIMs=k1#mfzi7gh6)7j3yU>fT&yq zf?|t{aN!~|0*XPMlpSV`}n-2J!PG&2A)1`b#xAG-D%9l9vYV&jA5Pe5K$klAGpo14r5+Sot zha^UbYbaSmzsTXn{$iT^#k4p|J(UuU5~m!(gT$$GtLvJY`wZ&qCN}^*tC`&qy_a}0 zxn_BP3X^*SQ+`&H+o_oqnOx0Cof@~2QU|qyA^oT9oj$!UydJr(E_t>SV?fS#@ z`O}!+B#?~JXn~b!E1I&fbZsTnXTW#Fd)ZYH8@;+BSSPGGGooQ<9ouB>-AO%R1$#t; zrKecuHbwfL$hxhI(@A?5c>%QBsz+@&YMmA<{AJ=H;>T zpYjohzz?vVNM9Ko(4hGw`ypuj;a%;b_WG-vZa5sdB$X`#`}{4qk(DEnyK*0aem1ae z>Y6{E^f{{fyhnX@?pJ;6U8ExQ_hT$W@3-}9??ujSLp)Vq=VG$X1DL@O$Lb?Sv5p$! zA5&`_S1a79w?9~G$DDVRhZ~&V{{a+g|0xjV;Py%Xn!MiC{S;LY*30VR5qcQ0(;|K! zx~65#CRjtYMGcdPb8%k^Kx#fjPtVU&R3&={AhQT*$WFiFmx@031fp*Cgg8n4m5Wt7 zsk>O2xx%llQE>K3vAb2CwBd7UR+)auyYX_RQUZ8OmAxBPu(Bgx_EvcesidQj)nxLV zN}kinb4K<(&Ae(@T<1@(FV!aU)|b>o&WmcFLiu!Kv4AC12oGeY7!>^=^!x&s76D1w zv+5PgMSls{Oq2o9q;s7YmzG08K3%2(EU9~#S70=bCd%cJi7RB1O~;_bULd@k>@ScV z{#rSPgg})kg&H}NX~uaqAB4Ot`!~3##9$VwL6HGisAH3Q`JfQ<9jq5z0wGifEQh}V z1QxeO|Knzc5o$USaEA3GXgR7MGme1k)CUYge?us};2qU#0Cc1OaxJ&42PCBd1yZHj zk1$CG%CjiJKS7wbnLkfwM&3*y3sQb5;%Ui?)7*C7SznhYoWQ*MJnZ z^f%#`;=RH8wGdu7GPexa|FO+<%RIK4BfX7Lx_0gN_yRL>-=!=i(~K-&QOZib1}S8% zMtUG{m7%G_Q^q8yedFj?FO07Ft3I-h(OS|l8WcD9xzX=!!W30C7~cM zLD*I4(FPcJ$vBz7sUw-7w1POCSi+fg4%ui@7pD-^&#BkND2D3Xq)y*SNBT~NVH7i< zY1GzHYs0CU^U6`~e}rGQvmN0)kIjj15FvXLFA#Z=$UKO&){Fc)@0(4dErQ2ih=AGZDZMvqUJvP%bIPvUE}kT#2*9 zHI{7vB$D@QC^0~aQ;H}0hZSy_D^Q%M{RsS1AuA8a&k)MTwVM9EzGxJ2M!(eGRmq>o zDf@;sk7z3OGj{AQK?9m zZpZR#q9l{mz#`xvdW2l9EbSuvKCT$nstk3qM-6DS?^!Z~YX-U<@DVDURb;SBfwa{U zIk!?`O&_p|y~fES$>`T80YxMYZr*}fx_FYV32JQ-=@)<4$l56*Q?7BJE_mbD&i0*s z=ED|CG^vt^G6XtYQ3Ck`F=de2sm7P32c4?$sqZ=wHAh3fB(UG194udI{gUM?$eD_+ z_Kkf0;`|e>(=5*Tw)r4&){I1D-xV5HC31}j-8YJWNJwOj$U%aqVDU$kg9T5mU$Wp^ z6kFk6rRUx4JNx`kLSl*dB#tNUD!%DI=qN-)oevT)+2Ws34i+%A{(lH~-^Et;PlCjr zqo;jmpTB_rMvEy`uL(H*zmt0tFu~{#P!1L_wchD@V`5zWe@b$-+X9(L$Enu~G{+^7 z=DdpQ(?X!drABB2Bv}kV3F*E8DCDyAHWJkcx2*(hmEZU_ zroE~HF>bMYmN+sU6#(;##S&Lug)Zntr4rIjy}CFrgnOwt29kNpjyMJe_zW&?p+=ds zwAE)xi=y0~Ev{3;kBHnLa+8SatlX_4%5P`T@DCIPTQs0g3pn$rE~x&C6wW4Xupu0= zMek(V;)ft};w@s|A+k-R?JMXKi10-FS5W;I`4!)%y|sS@+0Uo?3bMNoQQE(P>gkHS z45Wf9?Q=9AxxW1mK(1!>xt4#%oOl!U+4kp%_z9||8~=BrpWLDk#;z diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/vgg.cpython-310.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/vgg.cpython-310.pyc deleted file mode 100644 index 60bfbec7ece515c2213c7247625275dee4c7797c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5493 zcmc&&&2JmW72nxiEJ7c%BbFHu#`0moks6a+bS%hz~zzU&%HO-h+)$~NT5W1dW%S3ZgdIY$Ln!tLGabSV5wvWn$11JOEGEfvd~tG; zN2_5y^7+K_gCvsG#feDPCL0kK^>|WGFkY$f;#t31npjR2U%X#&yn12g?enD>e}DaE z#*3v3rRk!-u)lt#abB%dN}>cypl9iY?Zcvs&zwCwU7RlNvnwt2xL@JZ=gy1s`fE{pc;TZMM@G zW1wZpi!Ea_Pcv`>d4tzY&AH~5UJ=qKFe(Iwp|3lrQgL%xNNt14cwP_&iRY=zg7A~3 z6tObHP#v7*{&Etmh}o6eyHQl1<|^&4)Ruv&+#PYRDZ(W1>&m$mgu<7~eox$bU!||g zXgO*oDp#%haU3iJLaIZ01_%dh!dn$VZ83>mTiLOwFUTQirv}1i!_z%5dtU3aE%%hE ztk+?A-c#dCIk^}$#N^tdAMq%xO};B+9EE<}3!_BDlZo_$y2nK#s!0%qlm4oRAwM3A zh7WzKKxz$W?h|)>JMAZ{krds$ulTX>p49-926+G~j^T|@032Wr%QKthSO!=ZI{&t_ zpnkQMN0FN7E@_pbr27y~Vfqf<_#6OShJa|Zbp*mD+s4?Y*eKkJ{z5_8FVWXe&> zgD_6~uqq_-$WYEZQMiJ%uqj^E&|N=h1Yr$w}J<#Zxg*_FVLRkj-G&WWc6uJ}oH zaSmeBT#nL6>Fqu}pEdUQOdf?8)v5S6Kpp`&3V#hV^W!|=2-+jMW&d++Oa~yTv?t+A zIe@tdTKSeor6_TuY2AO?MNRP_r z8)l7NfZRO?G_z$vhEa9iu-6f5a;jyvQi-t%I*O;Uq$~CYTd=54nCo!6)S5}umc4H2 z(NZn*Gb)(bxh<;NirwC#U`LD$+!Znub&qmNIdLKbE~Kk@@)C)rmwnFR^Ldx4bPz7r z{i<-CPIP2%`}aF_Q@#aRIZfa&fkObUqpa#eP30;S3p!?$Q}-XBnyG;x^s0+~h^b@6 zR+NL#L7eSabe+Zn4gaq2I(?yTl*dEP2 zyuY#gPS6{1?<_qAHZnG=*cHXDDt2vG&7^RmXeZe(E(5f1DB-A;+`Oa@1_|opG2;^IoFKUch%Tmzp7z$oEQ^79oUM$>PD9p>Ic&h+bQ-*QD-yRXN<6Yq> zEZl7PH8Cd=J=S&k8cdP52#|$OJ4vo%OwNL!hUiS~p;RWq<3Tl1S*@qH7DXzfc};$QCox-D&( zdz?_%;RI)w6VhwHdod>zR)b`*pfkMCsqn#?cG1i6&iqp*ufXWeY4~S^eL!IMaX@o@ z(0l87yt|%snC-is@?CIy0a<)*Kwfzn0ePEj)AxYBrV=+Kv2s_ozj?oah}(7bH=6ef zT}^h+%U_a3RBU8l>xui2BtGiO_P2hQb7!9G+^a9cxpSnlzRmPCkGP+bn079;`kS}Q zx!-8sD|9Z|z3p5&R`qu-aeqq^+PT!~Z~ZRkmY(a}YcIpOKPA0$1o}FcxKBtzJC|Dh z&D-VN=bCr=+G+O{I+yI;b}s!x(cgOF{+T4SbE!pV_y#s}&vWf9s?)Lru=Q6u%>4?O z%jAdpbKO0rQu@y=mFfJ)6^5My>fuhanFRHC;@JkB(gb-2y;a)te0|=3pIXjseO1s$ z`e(a+=l>V2f-b(g2I)-I0ju4rtvgR+q@*A6P+38vCVx%@3zxDTJCfd;^zk-x2|D3s y@&lM;#I%oPaPb*o8Pjx3w)5xwYb2k0(>dtiK91jia+%!V&R=GbF1Waz|M!2+pF}SJ diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/vgg.cpython-39.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/vgg.cpython-39.pyc deleted file mode 100644 index 98225cbc36f655be44cd3ec2a071e38164b0bf57..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5965 zcmc&&&2JmW6`$E%E|(v&WLf@dT!(ej*xQ<-B-(Ol7^&p=BW`WgNR+fCS}c~FA+-{@ zOV2K)n4y3cHkw`(};dPTnLe%f=oOHpaD8>w#WQF`JFEiHEeDhL(pP zQ=pTI9#M4aF=a;|8RZQ7K0C&aL%l5f0XxA?0yoCavv09CfIGzU>=gSpaN}&6on~(W zH^B-l$G!vHVK&3gujPqQFgXR%g6WxJH*ay(eiPA zoF98Y%O@E9kl1rOFnh9Or=zeqfaR{LIW;ajtm4rfBNCTVZn- zF7k^98x19DkR*|yPYDBwiNr#Jo>Yaj z=fOJogwsGcDN{zu8cTgfY82h)VXCa73q2%d1KkMTiO+~Z$|is^Ei`Df^A8w%xw_jC zc7}Ww5-Jr&Kq3;+E_tABYd!GNF7S>X8QTd)d&rx>Cm~r(V|q^mF2#%h05u$v$)vkpZviXQAM$%`?nj>6kmhaA=dO^(2mJO& zGI>n|t)LyrbhY7zp;z;`kVn)AP}%EnXM=n7l_<0gX@tB{6XT$r%=qo5qgue|S*1_E z-V-WQPAqbqC*zEBMW-}U*58*=BV|3g?1hd{;Skit_y2t7ninxzI!Qwz90 z{g;_CXeSHdX?oY7Rm8{cgFFKCJxJjNAh1{fg&y4m2yD|nN4K>u-PREp43<{Oj9nQl zs=g#BwB7kHJa$6aPE109G}b-70Tu-d3slwC1Z*cn(oe)`c*o>TEJ@Y^vEd4K7BZpQ zd|(P`Iti(hLG}CcDh{;!p!VoWO8Y+ogWC7VHS)_xn-suJBW+cGpo#b4ZLB7`bk$@q z1Ez232>8_`BW)_aMCrDr@JwJ?Gzsd!85s~J5fxiw;VIW%N_?qiAJ--ex6iq;*G-LcqqvE-tOjQHbIC5m> zsvA{TmY~`k6I0kzvfpN~X3c$TCQd*#j8Ne*AOygdrBiCnvUCDgE(@u1TtN?&+2?4Q z$UviIJBhCDyG;(Ro^}x8Ffrh>j&rZ=He!jwx_Ap3s6pje-}kD#(Quq0z0ef|0!l>@ z)GI`QSoF|@l~iRn_TV3d7>yKyegjOMY-)^d>Rm*UziD(0rmbmWx~oNG6{9c2-z}|9 zFG0O~St!%GI@F;UkGG6Xz?i6XwQeFJ+fWYSlB}v2y+vy}w(+N8>27IV!_uy?sjJ=+ zUF~xWlc}X0jMI|d*+Gy4HZp5m_`KnuFG(|ugvYqBl}ud0s>zni7_9ZOO=Z&aTMf6$ zZ8Jti>W=$K94*Cps4M1>97S>jh;2%}TC2-+1u>woBF%<-A7Yryc)n9zaeWv%lyqI1 z02#p7kja(%W}5CAcj7jvO&<~l#3KuS$Fxb{PHR&TnZ!*Ta%^V-Ug9#4onH+&*d>Vg z(bFuWw4`Sn& z^m7WhB;ra}+g8Vd$YjPz(t|w!VyA{%dFR*Q+_~A=yju61+MROe03bE5-WF;v%bi&~ zW2bZQEQ)$~S#g+a366WB&spwV1@>rcrx}t!w4VT^Pf;Zqy#Uvs_OllNZ%13&zC~CV zS_E^?A|yM%f3Zc#ZFtd2PI-7Pj_h7XEzzr4o#iJ~T!l%;=ij@?eS~E1F+fROC?B%t zso0))d>w7i1&}`5p8p4+i_Zo0)mH)NJ20R4<}lJ!cx>|_4VkJBJ(P?#)k}xoH=64; zLJ#e@EPf6&zH^PVCWgjOAq`ox4?L8Nw&nszUmEb{p9{QeuL5{WFrWCgI?_~lUivMh zAyf5%hmz5zdg;LXMsvMJ;GrG+z*G19k=8`%@Ag`=4?L8Nw&nszUmEaA&jsG~R{^}A zz+JQj_G<(l+OZEj{I(=US`(#zhBRc&KJZY2 zx3VT2&>Y7$c5v?%B_KP0#RK{;fU~LiSiNr8bj1IpOsG$n(u%)e`hI*FIT~l%k=F>P zpB}~%sz7`Iy~?EHxawwyXDeymQI|q>eS3Pci@&ZZ)o{a$<2y#6_`@1PLVaBm7qO3o zaDC=W-D}px&#*v;i}%p!L!Cp^RmQpkI>8n^gJ*a3_uVBNePPNq{fa2krXAzi5*dt;KZ>|vPj3uSWa^=w*d}0 zm;rA$_&^ODN1#iw61d{5vd98RrBcB@i!8IqZ%CzjnQVeZHsMZ{nEAfb-O~U#C`s8R zgPJq9&wccLob#Q>#b9EhY~b^QfBsuF@f(KmZ+tlVaq#dKzTtm@C_|Y&qm}%c>sHG$ zld`>TS%zt>JFS9gtQpGsYqRBk+fcT0o*2prth>b>yXApXP%b!b$|-?UR3129$|-|W zQe|+;DW?KXMODG6rkpA`6RHMIE#*vrGpXv})Kg9koGH}+r;&0d)dh7%%|0<(b(GGk zIh3YQI;ZAQY6R2j{5?xQ3>rH|YetzjvDTYL^Wq`aZ7es7vHfnp6IYgQT#urlA9V-) zxcp%!*kI|6YkB$2%W-uri2O%sT|9a8$wzhV7^B_c3yyxc<@|(~Kmwy>Dx+no$|uIE zDce_S*~zB0oMfw7g-;B}Xt|hfHJ-c~Y_xTIr5D^7^dDSNhc-qzG(nb@J05^k6;lz~YVmNnnsv|@Kn4>mW#*jeeeLtO#$z|<40E60U!b0g5Ygs0f^{cgV- z`F>ov7wCS_^TX~|5PPcIYs=PK{eJ8w3ycdLEc9}0t@Pv4#-Q7e9(Kc^>BUYM^j70a zx9@lEwEGw&jH`pqX!N9~_$oJybXNtsfd`pNT<#C_y5DO*4)lQ;mwdm|Ylorl9~hrK zT)Z<_4;HuXvAqzFr#b3j+Oz&CQ znlrcNPMrCrZ2LMEnr9!D3;5o^H+&srXzUp{O5-lS*1o-s-#vS1?we9_MkO4$@SqR} z=5o`GE34f{{zhA=ZhuX-G1~_ zw=SNGk+DZvb|Ja=t~&eHwm0-3r}nalojD?yez3aA zBYzOIO6cK7gQ(qW68=FyhVpsI9;05RL4XYqCrYC zTR{i>LS*k1m9w9KSZT-DE)UJ^%Fxrx!=nEBu)J4>et7#PKoRArB4ZW=2wmmwx>-#! zvOHPam8hBsd*^Y81MF&@hZa#ke$GKa=gk8G|x2d-$L(C$ModtPWDVz z|94|rQ%MR#N8VC+e%Ad9!;=1|)B25_A9j-CV5cXB<)3pmYXbSjM3t6w4aap$Bos)0 z0T0bdJ&7OvDhbg{69x1P2oU>TcLRtF3NUv*2)_IM*wI06Gj@92FpAyw#s>5#u@ME` z?ym)L`DSo`lLooni)|fjG$#^uJ;zb!WmIrOP0dPBinE9(!>Fwz|6w<}#n-*8Qy$C=bn_VrsFak1}KQ$=)7D&yio3m!sbS-?Lw*0G_^JWdd zbtp5-^_0h_xzSS%rLCIC{dnl6Hf7NL^bI^D&<#+w2;a)sH1#xicp|p#<8RyULmjql zAfpX5#L?e0^h@Y^V9T=fBFg$TGMpUXVjC7RsXC6?0u;Z4)=!B}MC*p>SRR(MHIYm_ z#p$M-L(%fBTT2J7Zt_jU0sS=+Ids9DU*}`~o*ZZ>9K>5O4xV9|mRWmNe_4d)RH%7+ z1wAsTQ6{`3=zQSZEv(uCIgTnkDYc+ss`$ic6$Nq#`<~|Ze)ht91Jzya25RAYue;XA zZ|2^W7w|d!cc9MU0$LukM1&{8l0eeGj)5dVk)>ypwG9Z`E+~5&zR`9Oss>d=%}Y{a zr!_8WCQ(yF%_X#T@P+k49}hKFWPfZ%SPDsWbk z)+W5=658gi%V<3ztpQW?r7;%d1fxyLXcOSpRDHy)s;S)~l^guJYM`xy)>ZkN7C&$@ zsslK^XiCkfGdmU_Z)&?V^!6I)(ZC)HuERiT#z>BMs4P$&BZa}oJX6B zGR_6;;6-K24!(f$i|QhD3cBSl5Zn%4Sk~Xhk?UV2p*?C^xy&C@yfX${9MV{VA!SOM zpka!Phi|dPPe7WE{yKgVoFYVNgk#1bJaIVCI`B%ujA}|oUEA$gCr@vRy->dp6yIdoo zkE@B6adEw?R(peoaZx&5c`Y>|j2y0V%&W=xue1DmQohFWwRq~fY9q*d5L|z-_Woed zyQ1P^`@z}c>G^4h!M$!xW5xQhFY~yX z)`0m4(M~$IjClr=R`A<|?PsA+YbI|yVutz$T6z%)&ntW+7~ftTJlDv(x{a8fTNeTgHxB5^4jW%H!&*Z zwY3p%a#YG|>nYFNn;M#X4TaH_g-C;|ozCijC5Sn&8OGr=d=6$r^7Z$4j1+qP0m)60 zTO{8n;bA7n{UJ+#K++=lA<0K1KO*@YYW6s+3t75)S^$PSmXb&4t;nVMuERhhvPdDWM_5a6^J9{+R&l&T~ zkf&x}47`}Q$<$y!Z{q9Gl$iL&2@^k^_84d4e*qI;`@d)6H9g(TY>DKE)A~LoXB4g+ zQPz=}I-;=>;E3AN1Rc@c69!6bgO&F{a#+FeDXi!oYjXh00|%bf-{X4}Ag+n- zp9&Bm9kWr`;2jAS>@-wxM%jr{?7mH}<9eE&=r9SDeRzBf2W))9?~Fl^G&=!D{X}*` zB9$~dQTW{KM3JEb5?bC6e&4WoBs+n!WG6gK961jpzKUsff~^WeFNxO2^g#mbbNVE+ zeloVNrQq7j+mft8^0qWL&_8!tA7&IV!!do3PpA%y5*SQKKEXSlPoP7aq`jCUZ`Kn3 zazL&qYObTBC^vT{6AzJQPP{M-Xh7BVu6oe z0r>_dqTfa#O)*^0)CeI*g^fs-Z)q7b@1H9b_w_ti18-l_e)ShLd8ew>(_<>-ib^3^ zMf8meAyq*1ovTz)(=}Y~a8=odT19OVO(Mn}&+|n#>=4qrd$^d$^Lx9FWTF^k+H;0v z-6ojyl~&Ar@$$k-THQY%74Le(f=F0$N#QIf@f-z8cBENOq>9r^q)LEWy#x3H(Nv0(8 zsnbkP(@fs?GU~?(#(R7p2PvCd6}}~d*O%CVqaNv%ivxSSb4(mOIl~V4$37HRe)N41 zY>bVcJxu~K>MH;D*343Vi#YyhscHP)z&E7Zfgr-zH$HNKxjivDeOtuG{Z|d1RhHmHxYGL-(~qWNtZ-c9-$PM zf=3(ezVh24k_WQrSI9Q|vDt}Dzh$?h=!lpsus2cXhg?0)VIA?|EFy#&qQs-?{$j$$ z1Nz=EVJnFob-+LN$%HLrQgLL?%>Z`*tJV<&Dx?SmRC167#7?+zDT`dL%bq+Zq9d#R zR~Y+w5rrTk2ag>WLF64Rl1|uv5k_PiQ(-Jd9@==Sgt~HaJqnJ;%k2@T3{HjVx)Dbb zbn-;OpMZGR6p#+(NtEk~p($&oMAU1`iCO#(CYtA`vSib12HSt8;jL!O#m=Mi^urJ6D8HBH{qXOll(yp-HZCzsI2E|9siL~BaWQ)V9{ z{69w*cdvhA{+u@r$QDu!dic+fZHNCsuxBHCiVG*0UK0pyK@o6SG_L z4+?R4LkGN=M@C=&2|Kv=aQD0x#swKl203{mREm$WK~Kvl@cxEAnqzA6Pu&u|gW7Ci z`UxIdjcT!3V;Kkl= u;qY-O{w*xU2wws53?htYHSETP#-+weW3AC_Of(wh7t3DR^E{-up8XF@yM9#w diff --git a/trail_detection_node/trail_detection_node/base_models/__pycache__/xception.cpython-39.pyc b/trail_detection_node/trail_detection_node/base_models/__pycache__/xception.cpython-39.pyc deleted file mode 100644 index 85ec7e51a85d7127e84efe3e6a46a28b659b8906..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10103 zcmeHN>u(&_b)VPH&OW##mlQ=&4>PgjcH3ICD9KLUCN(9QX;GVXETu^|b|@CW@D|EPwaO(&7`z?eI<&5PSoml zlH&K8!4^|*Tpgz0yp)tTg2;cA6M#~6(!Uod**vWM^@5`j@Sl~K1;=_kgz zDN|Rd+v$YWophS&xt|!0QFqbZax!@{*lOs;?RIdz+j+38j%>7YWP+@&HNC89{xQvU zXMXrSkjRM4*x0r9tpluMY{!Og$2m|Z%H4Hi6F)n4lnI_0yUN=0ke841vAK(SzL%A< zl#N_x&jGKX40I~DruqJwmAD(ayS)`A&h1ts)Fm(vO+CT9Vv-BDw*svTcuG9qZ*^Lc z?b5%3!&Vs7yu=BE_Igrk zb^PX?MhA_ANx8cn4W9HgU*(38ZmB?5@gQAEik+_B^xKWcfj%^og6}umjWG26L*uiD zOLw}P!P3s1Mptz^8%ys8I_!2DZNJlvf^aF)jaJ)NK@>FE%caJ{AVm4s!eFxjk!d2a zu?gXkF}$|*IO^)=ouy`12TPk>g$^#=ZiIoKrTY?eaqF?3Lhm>;ArppKF%i{ef8UPH15;AYAcX}N9u(rx zT&uZBX}$Hx-)bn;>TJj~rqg!{jp{j)(;$bIp2uHeKGH9dCuEVF{CZbEY-lxuhfuJ` zteQpstew**$C8z85{3E~ry`GAhH?Xm*orM>&O>TRXN}WF&!Mc&8@*hdQ`Umfb0N1b zp7W8hPg!;$xwzMz1FPr79^}+s6OlaPL_t{1)x^daYXupGeg)j&SnH=o1k(@J*SX{m zf_ecp{HPl>+Vwn>w}Xu|5p-02f>RbK$oAHD6rPwB{Z+i+Ebg!jVwkh0C%;*9=j=0c zGMxTePICsO=R#HqR#2c$V>5sP6@ALncAM$4@892Uw9_1sDg8Cp<(rdt<|Oo zhBRXs#0bKLfYmR6CMUqkip_g=c!5iP&(?28_FYIXMFJcE(jH+9%O6_;Rc^;skbXZG z+536r9Hj77*fn~^*zA>JPyam5>#exBUxa3O2PWVU>8K=a<^%#=?KQ$=)CWv8G%qeruEL*Nw20%OVE1L^| zYX>?Q*t<~2Lyi;KADa;d1f$Qyjp3=HD($&YI+uVW(3X#!eNUAI z(h5@Agq>VK*4~>KozZMv;`?aYm?I21h^G7HQ<)j^j@B-4gQp>qO60`W%y*-VR3o@E{lvy}Y=A4u{GbpnlWzLUj z=Pb&cmv+u!4$muF=I{d2FRAm;Dd?8JNN_v6u%>Td(KRm!;oAe(RrLtU%?)RF!=Fdu)k% zdqjjiR#Gh9!JE?N;71s#v7R4kb4*ETxMjF#(;C8R%|3y%E!qpUucVMKgXi=lGi%-G zk2bg30j=Rcy`yzk5BC90J2J6vcP{BmNNH+zYUMAJyDUJExVLF7T+05Jv;WKbGOJuA zp^dAFmPvlIrPkZshe=*4Ex(>^A@m$Bv(3wC`&XF0lBTaReKnb0Q4KhG4}z5k8}E0! z?PZnZ8xJeVQ}D&bGq4qT*~a9KOIeOJ z&#j5nn!^T#jqIuCxTse(ou_51Y-1yX*TD^}f0xajnLb*+adrF-Mw@!DgSimu!45{J zL-s6jI$+3U?#DPsWSHA`Nx0|rYrsd%>cS_@Qr;j{7^H?ZVD4q*VA^G=VQzWAn;4{q zxs?HLa*!J4PGvlEe>yhzs|u|v3!Vl?JJZhtmcZx0WEe+JaUITzvKry4HxAch=x< zFRK@^3y2_`gv z#4F6ALAfuC2Ic9dA#~`)E}}vCk!TRo5)Jauam1JqV$5gJAePF-UK$q}QwO2OFQ_BE zMkJ;a@91CpUC zE%NbeAaAkzY1Ee%Ru)%D141i(MTCRM2xz7cdWGp>TSK+v%A93`>_39E7c@C9sFbcK zB|-~`mEhnGF%zRcRLZ=j8Nb%#tn@GHC6`uok@$2xG8@^j0T6=U$05=%BD?2E)Q}+o zd)|;Z=>h}vQi^d(9yD2Lsrzq4`MX}66A4WZl$^EH_ohI~eAJ4mRI+=iRLYhqawn21 z{-8)I7c$9-PgTpG3>Bt6WhjA4-DYFoAQ`h&zB61h9>&Z;>gLUzGR_Fc3ln?BDH35S zBGDJ7EGkzs)Azm1_YIeIk&hIS_xL_GVwgK6zNH^-GuNIk3M_wPL=-$(zb0E5y(lQW zjz{aiNEA4lwI*r*Vw%AY{)aSDPKv@l!qXB}Vc;F!8a*OJRq}Z907HmrfoCF8#nVHi zcTMCJ6+r8Nr;#OrCpU#BG0U?kO0AqOKVLioO5;o)0iaoyqXm{Yi~t~NF;9+Q(K)8Z zEcgeUM*+iNnCj!IU6Cci;#Ydkq{_>QZiBoZwj=vBHhKj&@UJaP>OVACUwkK1l~8 zDFlzU8Xe^~LWF{3(65kfb`rChn10=EMA10$`#wicu{AL2D#RV9jx(dY) z=qDiB6$KnbdJ^d=#UKN7rbWyvYw4!@Ep#+RJEDI-Z1_64vNIC{kA6Tx2|M}~NUfTx zLx}Wz2E(^UZ^op!YVy`zFdW;VB)uqAapQkR0>pX`NRF`=w%IB_A zh3HgqN`ipVAXu<6US-%ZZZp?$hwMZjFC~KVqVXb85Y#YadW6rvlUIw>SvQbT)LQkU66iEaO^_>&MR^Hk8y{LB1gu$(ZU(}-gY|AFF*@7D2&@Z;3a`?96RkVN)CFHXfc^gjA+W1)G zQT7E4@*LgmMnD{@c2Y_Uh3H?Pe@M<`yNjc8oXAB1LYmDe$w}XF>IeQ|E-7y5fZtZ& zSVsSt72JFHd|)F?a?+GEa&jrC4Igu6Qq&?|)9BOW|GH;G_wbT>u|ENCUeZ4T1LNn+ z>yJq|ZoNzLgyb(t{)*&;MA5wZ>oLP1|9^NGexpAO0hTW{42s*&#tegeeyp&G#r6G~ zwW?!qp7}o|vy^xu78g66GIbwnH+{GuUo-;?hkQb$B$ zEfmLeP3|&9%n|4lB@#)|oM+8mrfT|AriM!-sYUP-@An~!_{!p&7^D%B#pB+p<=NG9 X)r-~J)s1ScI#I1wUn+Y1dFFosldSwi diff --git a/trail_detection_node/trail_detection_node/base_models/resnetv1b.py b/trail_detection_node/trail_detection_node/base_models/resnetv1b.py deleted file mode 100644 index 21d67b7a..00000000 --- a/trail_detection_node/trail_detection_node/base_models/resnetv1b.py +++ /dev/null @@ -1,264 +0,0 @@ -import torch -import torch.nn as nn -import torch.utils.model_zoo as model_zoo - -__all__ = ['ResNetV1b', 'resnet18_v1b', 'resnet34_v1b', 'resnet50_v1b', - 'resnet101_v1b', 'resnet152_v1b', 'resnet152_v1s', 'resnet101_v1s', 'resnet50_v1s'] - -model_urls = { - 'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth', - 'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth', - 'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth', - 'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth', - 'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth', -} - - -class BasicBlockV1b(nn.Module): - expansion = 1 - - def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None, - previous_dilation=1, norm_layer=nn.BatchNorm2d): - super(BasicBlockV1b, self).__init__() - self.conv1 = nn.Conv2d(inplanes, planes, 3, stride, - dilation, dilation, bias=False) - self.bn1 = norm_layer(planes) - self.relu = nn.ReLU(True) - self.conv2 = nn.Conv2d(planes, planes, 3, 1, previous_dilation, - dilation=previous_dilation, bias=False) - self.bn2 = norm_layer(planes) - self.downsample = downsample - self.stride = stride - - def forward(self, x): - identity = x - - out = self.conv1(x) - out = self.bn1(out) - out = self.relu(out) - - out = self.conv2(out) - out = self.bn2(out) - - if self.downsample is not None: - identity = self.downsample(x) - - out += identity - out = self.relu(out) - - return out - - -class BottleneckV1b(nn.Module): - expansion = 4 - - def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None, - previous_dilation=1, norm_layer=nn.BatchNorm2d): - super(BottleneckV1b, self).__init__() - self.conv1 = nn.Conv2d(inplanes, planes, 1, bias=False) - self.bn1 = norm_layer(planes) - self.conv2 = nn.Conv2d(planes, planes, 3, stride, - dilation, dilation, bias=False) - self.bn2 = norm_layer(planes) - self.conv3 = nn.Conv2d(planes, planes * self.expansion, 1, bias=False) - self.bn3 = norm_layer(planes * self.expansion) - self.relu = nn.ReLU(True) - self.downsample = downsample - self.stride = stride - - def forward(self, x): - identity = x - - out = self.conv1(x) - out = self.bn1(out) - out = self.relu(out) - - out = self.conv2(out) - out = self.bn2(out) - out = self.relu(out) - - out = self.conv3(out) - out = self.bn3(out) - - if self.downsample is not None: - identity = self.downsample(x) - - out += identity - out = self.relu(out) - - return out - - -class ResNetV1b(nn.Module): - - def __init__(self, block, layers, num_classes=1000, dilated=True, deep_stem=False, - zero_init_residual=False, norm_layer=nn.BatchNorm2d): - self.inplanes = 128 if deep_stem else 64 - super(ResNetV1b, self).__init__() - if deep_stem: - self.conv1 = nn.Sequential( - nn.Conv2d(3, 64, 3, 2, 1, bias=False), - norm_layer(64), - nn.ReLU(True), - nn.Conv2d(64, 64, 3, 1, 1, bias=False), - norm_layer(64), - nn.ReLU(True), - nn.Conv2d(64, 128, 3, 1, 1, bias=False) - ) - else: - self.conv1 = nn.Conv2d(3, 64, 7, 2, 3, bias=False) - self.bn1 = norm_layer(self.inplanes) - self.relu = nn.ReLU(True) - self.maxpool = nn.MaxPool2d(3, 2, 1) - self.layer1 = self._make_layer(block, 64, layers[0], norm_layer=norm_layer) - self.layer2 = self._make_layer(block, 128, layers[1], stride=2, norm_layer=norm_layer) - if dilated: - self.layer3 = self._make_layer(block, 256, layers[2], stride=1, dilation=2, norm_layer=norm_layer) - self.layer4 = self._make_layer(block, 512, layers[3], stride=1, dilation=4, norm_layer=norm_layer) - else: - self.layer3 = self._make_layer(block, 256, layers[2], stride=2, norm_layer=norm_layer) - self.layer4 = self._make_layer(block, 512, layers[3], stride=2, norm_layer=norm_layer) - self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) - self.fc = nn.Linear(512 * block.expansion, num_classes) - - for m in self.modules(): - if isinstance(m, nn.Conv2d): - nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') - elif isinstance(m, nn.BatchNorm2d): - nn.init.constant_(m.weight, 1) - nn.init.constant_(m.bias, 0) - - if zero_init_residual: - for m in self.modules(): - if isinstance(m, BottleneckV1b): - nn.init.constant_(m.bn3.weight, 0) - elif isinstance(m, BasicBlockV1b): - nn.init.constant_(m.bn2.weight, 0) - - def _make_layer(self, block, planes, blocks, stride=1, dilation=1, norm_layer=nn.BatchNorm2d): - downsample = None - if stride != 1 or self.inplanes != planes * block.expansion: - downsample = nn.Sequential( - nn.Conv2d(self.inplanes, planes * block.expansion, 1, stride, bias=False), - norm_layer(planes * block.expansion), - ) - - layers = [] - if dilation in (1, 2): - layers.append(block(self.inplanes, planes, stride, dilation=1, downsample=downsample, - previous_dilation=dilation, norm_layer=norm_layer)) - elif dilation == 4: - layers.append(block(self.inplanes, planes, stride, dilation=2, downsample=downsample, - previous_dilation=dilation, norm_layer=norm_layer)) - else: - raise RuntimeError("=> unknown dilation size: {}".format(dilation)) - self.inplanes = planes * block.expansion - for _ in range(1, blocks): - layers.append(block(self.inplanes, planes, dilation=dilation, - previous_dilation=dilation, norm_layer=norm_layer)) - - return nn.Sequential(*layers) - - def forward(self, x): - x = self.conv1(x) - x = self.bn1(x) - x = self.relu(x) - x = self.maxpool(x) - - x = self.layer1(x) - x = self.layer2(x) - x = self.layer3(x) - x = self.layer4(x) - - x = self.avgpool(x) - x = x.view(x.size(0), -1) - x = self.fc(x) - - return x - - -def resnet18_v1b(pretrained=False, **kwargs): - model = ResNetV1b(BasicBlockV1b, [2, 2, 2, 2], **kwargs) - if pretrained: - old_dict = model_zoo.load_url(model_urls['resnet18']) - model_dict = model.state_dict() - old_dict = {k: v for k, v in old_dict.items() if (k in model_dict)} - model_dict.update(old_dict) - model.load_state_dict(model_dict) - return model - - -def resnet34_v1b(pretrained=False, **kwargs): - model = ResNetV1b(BasicBlockV1b, [3, 4, 6, 3], **kwargs) - if pretrained: - old_dict = model_zoo.load_url(model_urls['resnet34']) - model_dict = model.state_dict() - old_dict = {k: v for k, v in old_dict.items() if (k in model_dict)} - model_dict.update(old_dict) - model.load_state_dict(model_dict) - return model - - -def resnet50_v1b(pretrained=False, **kwargs): - model = ResNetV1b(BottleneckV1b, [3, 4, 6, 3], **kwargs) - if pretrained: - old_dict = model_zoo.load_url(model_urls['resnet50']) - model_dict = model.state_dict() - old_dict = {k: v for k, v in old_dict.items() if (k in model_dict)} - model_dict.update(old_dict) - model.load_state_dict(model_dict) - return model - - -def resnet101_v1b(pretrained=False, **kwargs): - model = ResNetV1b(BottleneckV1b, [3, 4, 23, 3], **kwargs) - if pretrained: - old_dict = model_zoo.load_url(model_urls['resnet101']) - model_dict = model.state_dict() - old_dict = {k: v for k, v in old_dict.items() if (k in model_dict)} - model_dict.update(old_dict) - model.load_state_dict(model_dict) - return model - - -def resnet152_v1b(pretrained=False, **kwargs): - model = ResNetV1b(BottleneckV1b, [3, 8, 36, 3], **kwargs) - if pretrained: - old_dict = model_zoo.load_url(model_urls['resnet152']) - model_dict = model.state_dict() - old_dict = {k: v for k, v in old_dict.items() if (k in model_dict)} - model_dict.update(old_dict) - model.load_state_dict(model_dict) - return model - - -def resnet50_v1s(pretrained=False, root='~/.torch/models', **kwargs): - model = ResNetV1b(BottleneckV1b, [3, 4, 6, 3], deep_stem=True, **kwargs) - if pretrained: - from ..model_store import get_resnet_file - model.load_state_dict(torch.load(get_resnet_file('resnet50', root=root)), strict=False) - return model - - -def resnet101_v1s(pretrained=False, root='~/.torch/models', **kwargs): - model = ResNetV1b(BottleneckV1b, [3, 4, 23, 3], deep_stem=True, **kwargs) - if pretrained: - from ..model_store import get_resnet_file - model.load_state_dict(torch.load(get_resnet_file('resnet101', root=root)), strict=False) - return model - - -def resnet152_v1s(pretrained=False, root='~/.torch/models', **kwargs): - model = ResNetV1b(BottleneckV1b, [3, 8, 36, 3], deep_stem=True, **kwargs) - if pretrained: - from ..model_store import get_resnet_file - model.load_state_dict(torch.load(get_resnet_file('resnet152', root=root)), strict=False) - return model - - -if __name__ == '__main__': - import torch - - img = torch.randn(4, 3, 224, 224) - model = resnet50_v1b(True) - output = model(img) From b3034b06ec99b6ae198b4bf28d3767b141bbb1c8 Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Thu, 7 Sep 2023 20:15:23 -0400 Subject: [PATCH 06/22] Delete trail_detection_node/trail_detection_node/GANav_visualizer.py --- .../trail_detection_node/GANav_visualizer.py | 412 ------------------ 1 file changed, 412 deletions(-) delete mode 100644 trail_detection_node/trail_detection_node/GANav_visualizer.py diff --git a/trail_detection_node/trail_detection_node/GANav_visualizer.py b/trail_detection_node/trail_detection_node/GANav_visualizer.py deleted file mode 100644 index 9919553f..00000000 --- a/trail_detection_node/trail_detection_node/GANav_visualizer.py +++ /dev/null @@ -1,412 +0,0 @@ -import torch -from .mmseg.apis import inference_segmentor, init_segmentor -from PIL import Image as ImagePIL -from torchvision import transforms -import cv2 -import os - -import rclpy -from rclpy.node import Node -from geometry_msgs.msg import PoseStamped -from sensor_msgs.msg import Image, PointCloud2 -import sensor_msgs_py.point_cloud2 as pc2 -import numpy as np -import math -from cv_bridge import CvBridge - -''' - The transformation matrix as well as the coordinate conversion and depth estimation functions are copied from human_detection_node -''' -camera_transformation_k = np.array([ - [628.5359544,0,676.9575694], - [0,627.7249542,532.7206716], - [0,0,1]]) - -rotation_matrix = np.array([ - [-0.007495781893,-0.0006277316155,0.9999717092], - [-0.9999516401,-0.006361853422,-0.007499625104], - [0.006366381192,-0.9999795662,-0.0005800141927]]) - -rotation_matrix = rotation_matrix.T - -translation_vector = np.array([-0.06024059837, -0.08180891509, -0.3117851288]) -image_width=1280 -image_height=1024 - -def convert_to_lidar_frame(uv_coordinate): - """ - convert 2d camera coordinate + depth into 3d lidar frame - """ - point_cloud = np.empty( (3,) , dtype=float) - point_cloud[2] = uv_coordinate[2] - point_cloud[1] = ( image_height - uv_coordinate[1] )*point_cloud[2] - point_cloud[0] = uv_coordinate[0]*point_cloud[2] - - inverse_camera_transformation_k = np.linalg.inv(camera_transformation_k) - inverse_rotation_matrix = np.linalg.inv(rotation_matrix) - point_cloud = inverse_camera_transformation_k @ point_cloud - point_cloud = inverse_rotation_matrix @ (point_cloud-translation_vector) - return point_cloud - -def convert_to_camera_frame(point_cloud): - """ - convert 3d lidar data into 2d coordinate of the camera frame + depth - """ - length = point_cloud.shape[0] - translation = np.tile(translation_vector, (length, 1)).T - - point_cloud = point_cloud.T - point_cloud = rotation_matrix@point_cloud + translation - point_cloud = camera_transformation_k @ point_cloud - - uv_coordinate = np.empty_like(point_cloud) - - """ - uv = [x/z, y/z, z], and y is opposite so the minus imageheight - """ - uv_coordinate[0] = point_cloud[0] / point_cloud[2] - uv_coordinate[1] = image_height - point_cloud[1] / point_cloud[2] - uv_coordinate[2] = point_cloud[2] - - uv_depth = uv_coordinate[2, :] - filtered_uv_coordinate = uv_coordinate[:, uv_depth >= 0] - return filtered_uv_coordinate - -def estimate_depth(x, y, np_2d_array): - """ - estimate the depth by finding points closest to x,y from thhe 2d array - """ - # Calculate the distance between each point and the target coordinates (x, y) - distances_sq = (np_2d_array[0,:] - x) ** 2 + (np_2d_array[1,:] - y) ** 2 - - # Find the indices of the k nearest points - k = 5 # Number of nearest neighbors we want - closest_indices = np.argpartition(distances_sq, k)[:k] - pixel_distance_threshold = 2000 - - valid_indices = [idx for idx in closest_indices if distances_sq[idx]<=pixel_distance_threshold] - if len(valid_indices) == 0: - # lidar points disappears usually around 0.4m - distance_where_lidar_stops_working = -1 - return distance_where_lidar_stops_working - - filtered_indices = np.array(valid_indices) - # Get the depth value of the closest point - closest_depths = np_2d_array[2,filtered_indices] - - return np.mean(closest_depths) - -def load_model(device): - - config = "./trail_detection_node/trained_models/rugd_group6/ganav_rugd_6.py" - checkpoint = "./trail_detection_node/trained_models/rugd_group6/ganav_rugd_6.pth" - model = init_segmentor(config, checkpoint, device="cuda:0") - print('Finished loading model!') - - return model - -def find_route(model, device, cv_image): - result = inference_segmentor(model, cv_image) - result_np = np.vstack(result).astype(np.uint8) - pred = np.where(result_np == 0, 255, 0) - - # # prediction visualize - # cv2.imshow('prediction',pred) - # cv2.waitKey(25) - - route = np.zeros_like(pred) - # calculate the center line by taking the average - row_num = 0 - for row in pred: - white_pixels = list(np.nonzero(row)[0]) - if white_pixels: - average = (white_pixels[0] + white_pixels[-1]) / 2 - route[row_num][round(average)] = 255 - row_num = row_num + 1 - return route, pred - -def equalize_hist_rgb(img): - # Split the image into its color channels - r, g, b = cv2.split(img) - - # Equalize the histograms for each channel - r_eq = cv2.equalizeHist(r) - g_eq = cv2.equalizeHist(g) - b_eq = cv2.equalizeHist(b) - - # Merge the channels - img_eq = cv2.merge((r_eq, g_eq, b_eq)) - # img_eq = cv2.fastNlMeansDenoisingColored(img_eq,None,10,20,7,42) - # img_eq = cv2.GaussianBlur(img_eq, (25,25), 0) - return img_eq - -''' - The traildetector node has two subscriptions(lidar and camera) and one publisher(trail position). After it receives msgs from both lidar and camera, - it detects the trail in the image and sends the corresponding lidar position as the trail location. - The msgs are synchornized before processing, using buffer and sync function. - To find the path, the node will process the image, find a line to follow (by taking the average of left and right of the path), estimate the lidar points depth, - and choose to go to the closest point. - V1_traildetection assumes that the path is pointing frontward and has only one path in front. -''' -class trailDetector(Node): - def __init__(self, model, device): - super().__init__('trail_detector') - # define trail subscription - self.trail_publisher = self.create_publisher( - PoseStamped, - 'trail_location', - 10) - - # define camera subscription - self.camera_subscription = self.create_subscription( - Image, - 'camera', - self.camera_callback, - 10) - self.camera_subscription - - # define lidar subscription - self.lidar_subscription = self.create_subscription( - PointCloud2, - 'velodyne_points', - self.lidar_callback, - 10) - self.lidar_subscription - - self.bridge = CvBridge() - - # load model and device - self.model = model - self.device = device - - # buffers to hold msgs for sync - self.buffer_size = 50 # 30 is a random choice, to be discussed - self.lidar_buffer = [] - self.camera_buffer = [] - self.only_camera_mode = True - - def camera_callback(self, msg): - if len(self.camera_buffer) >= self.buffer_size: - self.camera_buffer.pop(0) - self.camera_buffer.append(msg) - self.sync() - - def lidar_callback(self, msg): - if len(self.lidar_buffer) >= self.buffer_size: - self.lidar_buffer.pop(0) - self.lidar_buffer.append(msg) - self.sync() - - def sync(self): - # if one of the sensor has no msg, then return - if self.only_camera_mode and self.camera_buffer: - camera_msg = self.camera_buffer[0] - self.camera_buffer.pop(0) - self.only_camera_callback(camera_msg) - elif not self.lidar_buffer or not self.camera_buffer: - return - - while self.lidar_buffer and self.camera_buffer: - lidar_msg = self.lidar_buffer[0] - camera_msg = self.camera_buffer[0] - - time_tolerance = 20000000 # nanosec, random choice to be discussed - time_difference = abs(lidar_msg.header.stamp.nanosec - camera_msg.header.stamp.nanosec) - # print(time_difference) - - # compare the time difference, if it's within tolerance, then pass to the trail_callback, otherwise discard the older msg - if time_difference <= time_tolerance: - self.lidar_buffer.pop(0) - self.camera_buffer.pop(0) - self.get_logger().info("msgs received!") - self.trail_callback(lidar_msg, camera_msg) - elif lidar_msg.header.stamp.nanosec > camera_msg.header.stamp.nanosec: - self.camera_buffer.pop(0) - else: - self.lidar_buffer.pop(0) - - def trail_callback(self, lidar_msg, camera_msg): - # process lidar msg - point_gen = pc2.read_points( - lidar_msg, field_names=( - "x", "y", "z"), skip_nans=True) - points = [[x, y, z] for x, y, z in point_gen] - points = np.array(points) - points2d = convert_to_camera_frame(points) - - # process camera msg - cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') - - # increase brightness - M = np.ones(cv_image.shape, dtype = 'uint8') * 50 # increase brightness by 50 - cv_image = cv2.add(cv_image, M) - - # # image visualizer - # cv2.imshow('raw image',cv_image) - # cv2.waitKey(25) - - route = find_route(self.model, self.device, cv_image) - - # # visualize route purely - # cv2.imshow("white_route",route) - # cv2.waitKey(25) - - route_indices = list(zip(*np.nonzero(route))) - if not route_indices: - print("No centerline found!") - return - - # #filter points that have no lidar points near it - # filtered_route_indices = [] - # for index in route_indices: - # point = [] - # point.append(index[0]) - # point.append(index[1]) - # point.append(estimate_depth(index[0], index[1], points2d)) - # if point[2] == -1: - # continue - # else: - # filtered_route_indices.append(point) - - # find the corresponding lidar points using the center line pixels - filtered_3dPoints = [] - # for index in filtered_route_indices: - for index in route_indices: - point = [] - # point.append(index[0]) - # point.append(index[1]) - # point.append(index[2]) - - point.append(index[0]) - point.append(index[1]) - point.append(estimate_depth(index[0], index[1], points2d)) - - point_3d = convert_to_lidar_frame(point) - filtered_3dPoints.append(point_3d) - - filtered_3dPoints = np.array(filtered_3dPoints) - # find the nearest 3d point and set that as goal - distances_sq = filtered_3dPoints[:,0]**2 + filtered_3dPoints[:,1]**2 + filtered_3dPoints[:,2]**2 - smallest_index = np.argmin(distances_sq) - # print(math.sqrt(distances_sq[smallest_index])) - # smallest_index = np.argmin(filtered_3dPoints[:,2]) - - # visualize after-pocessed image - visualize_cv_image = cv_image - print(f"{visualize_cv_image.shape[0]}") - circle_x = route_indices[smallest_index][0] - circle_y = route_indices[smallest_index][1] - - # # visualize the lidar points in image - # uv_x, uv_y, uv_z = points2d - # for index_lidar in range(points2d.shape[1]): - # # print(f"{int(uv_x[index_lidar])},{int(uv_y[index_lidar])}") - # cv2.circle(visualize_cv_image, (int(uv_x[index_lidar]), int(image_height - uv_y[index_lidar])), radius=5, color=(255, 0, 0), thickness=-1) - - # visualize center line in image - for index_circle in range(len(route_indices)): - if index_circle == smallest_index: - continue - else: - red_circle_x = route_indices[index_circle][0] - red_circle_y = route_indices[index_circle][1] - cv2.circle(visualize_cv_image, (red_circle_y, red_circle_x), radius=5, color=(0, 0, 255), thickness=-1) - - # visualize the chosen point in image - cv2.circle(visualize_cv_image, (circle_y, circle_x), radius=7, color=(0, 255, 0), thickness=-1) - cv2.circle(visualize_cv_image, (0, 0), radius=12, color=(0, 255, 0), thickness=-1) - - cv2.imshow('circled image',visualize_cv_image) - cv2.waitKey(25) - - # publsih message - trail_location_msg = PoseStamped() - trail_location_msg.header.stamp = lidar_msg.header.stamp - trail_location_msg.header.frame_id = "velodyne" - #position - trail_location_msg.pose.position.x = filtered_3dPoints[smallest_index][0] - trail_location_msg.pose.position.y = filtered_3dPoints[smallest_index][1] - trail_location_msg.pose.position.z = filtered_3dPoints[smallest_index][2] - #orientation - yaw = math.atan2(filtered_3dPoints[smallest_index][1], filtered_3dPoints[smallest_index][0]) - trail_location_msg.pose.orientation.x = 0.0 - trail_location_msg.pose.orientation.y = 0.0 - trail_location_msg.pose.orientation.z = math.sin(yaw/2) - trail_location_msg.pose.orientation.w = math.cos(yaw / 2) - self.get_logger().info("location published!") - self.trail_publisher.publish(trail_location_msg) - - def only_camera_callback(self, camera_msg): - # process camera msg - cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') - - # cv_image = cv2.fastNlMeansDenoisingColored(cv_image,None,5,10,7,14) - - # # histogram equalization method 1 - # cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2YCrCb) - # cv_image[:,:,0] = cv2.equalizeHist(cv_image[:,:,0]) - # cv_image = cv2.cvtColor(cv_image, cv2.COLOR_YCrCb2BGR) - - # # histogram equalization method 2 - # cv_image = equalize_hist_rgb(cv_image) - - # # histogram equalization method 3 CLAHE - # image_bw = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY) - # clahe = cv2.createCLAHE(clipLimit=5) - # cv_image = clahe.apply(image_bw) + 30 - - # # histogram equalization method 4 CLAHE - # cla = cv2.createCLAHE(clipLimit=4.0) - # H, S, V = cv2.split(cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)) - # eq_V = cla.apply(V) - # cv_image = cv2.cvtColor(cv2.merge([H, S, eq_V]), cv2.COLOR_HSV2BGR) - - # # increase brightness - # M = np.ones(cv_image.shape, dtype = 'uint8') * 50 # increase brightness by 50 - # cv_image = cv2.add(cv_image, M) - - # # image visualizer - # cv2.imshow('raw image',cv_image) - # cv2.waitKey(25) - - route, pred = find_route(self.model, self.device, cv_image) - - # # visualize route purely - # cv2.imshow("white_route",route) - # cv2.waitKey(25) - - # increase the brightness of image where is predicted as road - sign = cv2.cvtColor(pred, cv2.COLOR_GRAY2BGR) - cv_image = cv2.add(cv_image, sign) - - route_indices = list(zip(*np.nonzero(route))) - if not route_indices: - print("No centerline found!") - return - - for index_circle in range(len(route_indices)): - red_circle_x = route_indices[index_circle][0] - red_circle_y = route_indices[index_circle][1] - cv2.circle(cv_image, (red_circle_y, red_circle_x), radius=5, color=(0, 0, 255), thickness=-1) - - cv2.imshow('circled image',cv_image) - cv2.waitKey(25) - - - - -def main(args=None): - print(torch.cuda.is_available()) - device = torch.device("cuda" if torch.cuda.is_available() else "cpu") - model = load_model(device) - - rclpy.init(args=args) - trailDetectorNode = trailDetector(model, device) - rclpy.spin(trailDetectorNode) - - trailDetectorNode.destroy_node() - rclpy.shutdown() - cv2.destroyAllWindows() - -if __name__ == '__main__': - main() \ No newline at end of file From a5ea74b6ff3a1293d75d0adf19c8d9e332aad043 Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Thu, 7 Sep 2023 20:15:45 -0400 Subject: [PATCH 07/22] Delete trail_detection_node/trail_detection_node/jpu.py --- .../trail_detection_node/jpu.py | 68 ------------------- 1 file changed, 68 deletions(-) delete mode 100644 trail_detection_node/trail_detection_node/jpu.py diff --git a/trail_detection_node/trail_detection_node/jpu.py b/trail_detection_node/trail_detection_node/jpu.py deleted file mode 100644 index db23bab8..00000000 --- a/trail_detection_node/trail_detection_node/jpu.py +++ /dev/null @@ -1,68 +0,0 @@ -"""Joint Pyramid Upsampling""" -import torch -import torch.nn as nn -import torch.nn.functional as F - -__all__ = ['JPU'] - - -class SeparableConv2d(nn.Module): - def __init__(self, inplanes, planes, kernel_size=3, stride=1, padding=1, - dilation=1, bias=False, norm_layer=nn.BatchNorm2d): - super(SeparableConv2d, self).__init__() - self.conv = nn.Conv2d(inplanes, inplanes, kernel_size, stride, padding, dilation, groups=inplanes, bias=bias) - self.bn = norm_layer(inplanes) - self.pointwise = nn.Conv2d(inplanes, planes, 1, bias=bias) - - def forward(self, x): - x = self.conv(x) - x = self.bn(x) - x = self.pointwise(x) - return x - - -# copy from: https://github.com/wuhuikai/FastFCN/blob/master/encoding/nn/customize.py -class JPU(nn.Module): - def __init__(self, in_channels, width=512, norm_layer=nn.BatchNorm2d, **kwargs): - super(JPU, self).__init__() - - self.conv5 = nn.Sequential( - nn.Conv2d(in_channels[-1], width, 3, padding=1, bias=False), - norm_layer(width), - nn.ReLU(True)) - self.conv4 = nn.Sequential( - nn.Conv2d(in_channels[-2], width, 3, padding=1, bias=False), - norm_layer(width), - nn.ReLU(True)) - self.conv3 = nn.Sequential( - nn.Conv2d(in_channels[-3], width, 3, padding=1, bias=False), - norm_layer(width), - nn.ReLU(True)) - - self.dilation1 = nn.Sequential( - SeparableConv2d(3 * width, width, 3, padding=1, dilation=1, bias=False), - norm_layer(width), - nn.ReLU(True)) - self.dilation2 = nn.Sequential( - SeparableConv2d(3 * width, width, 3, padding=2, dilation=2, bias=False), - norm_layer(width), - nn.ReLU(True)) - self.dilation3 = nn.Sequential( - SeparableConv2d(3 * width, width, 3, padding=4, dilation=4, bias=False), - norm_layer(width), - nn.ReLU(True)) - self.dilation4 = nn.Sequential( - SeparableConv2d(3 * width, width, 3, padding=8, dilation=8, bias=False), - norm_layer(width), - nn.ReLU(True)) - - def forward(self, *inputs): - feats = [self.conv5(inputs[-1]), self.conv4(inputs[-2]), self.conv3(inputs[-3])] - size = feats[-1].size()[2:] - feats[-2] = F.interpolate(feats[-2], size, mode='bilinear', align_corners=True) - feats[-3] = F.interpolate(feats[-3], size, mode='bilinear', align_corners=True) - feat = torch.cat(feats, dim=1) - feat = torch.cat([self.dilation1(feat), self.dilation2(feat), self.dilation3(feat), self.dilation4(feat)], - dim=1) - - return inputs[0], inputs[1], inputs[2], feat From 4b7abe5485f4e4e0a6ca41d47296af7c068b9356 Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Thu, 7 Sep 2023 20:16:16 -0400 Subject: [PATCH 08/22] Delete trail_detection_node/trail_detection_node/model_store.py --- .../trail_detection_node/model_store.py | 36 ------------------- 1 file changed, 36 deletions(-) delete mode 100644 trail_detection_node/trail_detection_node/model_store.py diff --git a/trail_detection_node/trail_detection_node/model_store.py b/trail_detection_node/trail_detection_node/model_store.py deleted file mode 100644 index a0193c79..00000000 --- a/trail_detection_node/trail_detection_node/model_store.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Model store which provides pretrained models.""" -from __future__ import print_function - -import os -import zipfile - -__all__ = ['get_model_file', 'get_resnet_file'] - -_model_sha1 = {name: checksum for checksum, name in [ - ('25c4b50959ef024fcc050213a06b614899f94b3d', 'resnet50'), - ('2a57e44de9c853fa015b172309a1ee7e2d0e4e2a', 'resnet101'), - ('0d43d698c66aceaa2bc0309f55efdd7ff4b143af', 'resnet152'), -]} - -encoding_repo_url = 'https://hangzh.s3.amazonaws.com/' -_url_format = '{repo_url}encoding/models/{file_name}.zip' - - -def short_hash(name): - if name not in _model_sha1: - raise ValueError('Pretrained model for {name} is not available.'.format(name=name)) - return _model_sha1[name][:8] - - -def get_resnet_file(name, root='~/.torch/models'): - file_name = '{name}-{short_hash}'.format(name=name, short_hash=short_hash(name)) - root = os.path.expanduser(root) - - file_path = os.path.join(root, file_name + '.pth') - sha1_hash = _model_sha1[name] - if os.path.exists(file_path): - return file_path - - else: - print('Model file {} is not found.'.format(file_path)) - From f902b9ec500812f34de9710200072580d9addb8e Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Thu, 7 Sep 2023 20:16:37 -0400 Subject: [PATCH 09/22] Delete trail_detection_node/trail_detection_node/segbase.py --- .../trail_detection_node/segbase.py | 60 ------------------- 1 file changed, 60 deletions(-) delete mode 100644 trail_detection_node/trail_detection_node/segbase.py diff --git a/trail_detection_node/trail_detection_node/segbase.py b/trail_detection_node/trail_detection_node/segbase.py deleted file mode 100644 index ae417652..00000000 --- a/trail_detection_node/trail_detection_node/segbase.py +++ /dev/null @@ -1,60 +0,0 @@ -"""Base Model for Semantic Segmentation""" -import torch.nn as nn - -from .jpu import JPU -from .base_models.resnetv1b import resnet50_v1s, resnet101_v1s, resnet152_v1s - -__all__ = ['SegBaseModel'] - - -class SegBaseModel(nn.Module): - r"""Base Model for Semantic Segmentation - - Parameters - ---------- - backbone : string - Pre-trained dilated backbone network type (default:'resnet50'; 'resnet50', - 'resnet101' or 'resnet152'). - """ - - def __init__(self, nclass, aux, backbone='resnet50', jpu=False, pretrained_base=True, **kwargs): - super(SegBaseModel, self).__init__() - dilated = False if jpu else True - self.aux = aux - self.nclass = nclass - if backbone == 'resnet50': - self.pretrained = resnet50_v1s(pretrained=pretrained_base, dilated=dilated, **kwargs) - elif backbone == 'resnet101': - self.pretrained = resnet101_v1s(pretrained=pretrained_base, dilated=dilated, **kwargs) - elif backbone == 'resnet152': - self.pretrained = resnet152_v1s(pretrained=pretrained_base, dilated=dilated, **kwargs) - else: - raise RuntimeError('unknown backbone: {}'.format(backbone)) - - self.jpu = JPU([512, 1024, 2048], width=512, **kwargs) if jpu else None - - def base_forward(self, x): - """forwarding pre-trained network""" - x = self.pretrained.conv1(x) - x = self.pretrained.bn1(x) - x = self.pretrained.relu(x) - x = self.pretrained.maxpool(x) - c1 = self.pretrained.layer1(x) - c2 = self.pretrained.layer2(c1) - c3 = self.pretrained.layer3(c2) - c4 = self.pretrained.layer4(c3) - - if self.jpu: - return self.jpu(c1, c2, c3, c4) - else: - return c1, c2, c3, c4 - - def evaluate(self, x): - """evaluating network with inputs and targets""" - return self.forward(x)[0] - - def demo(self, x): - pred = self.forward(x) - if self.aux: - pred = pred[0] - return pred From 7fd0167d0ae7ffc59d342d4cc14e4ffd371f5669 Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Thu, 7 Sep 2023 20:16:50 -0400 Subject: [PATCH 10/22] Delete trail_detection_node/trail_detection_node/v1_trailDetectionNode.py --- .../v1_trailDetectionNode.py | 278 ------------------ 1 file changed, 278 deletions(-) delete mode 100644 trail_detection_node/trail_detection_node/v1_trailDetectionNode.py diff --git a/trail_detection_node/trail_detection_node/v1_trailDetectionNode.py b/trail_detection_node/trail_detection_node/v1_trailDetectionNode.py deleted file mode 100644 index c20f7df5..00000000 --- a/trail_detection_node/trail_detection_node/v1_trailDetectionNode.py +++ /dev/null @@ -1,278 +0,0 @@ -import torch -from .model_loader import FCN8s -from PIL import Image as ImagePIL -from torchvision import transforms -import cv2 - -import rclpy -from rclpy.node import Node -from geometry_msgs.msg import PoseStamped -from sensor_msgs.msg import Image, PointCloud2 -import sensor_msgs_py.point_cloud2 as pc2 -import numpy as np -import math -from cv_bridge import CvBridge - -''' - The transformation matrix as well as the coordinate conversion and depth estimation functions are copied from human_detection_node -''' -camera_transformation_k = np.array([ - [628.5359544,0,676.9575694], - [0,627.7249542,532.7206716], - [0,0,1]]) - -rotation_matrix = np.array([ - [-0.007495781893,-0.0006277316155,0.9999717092], - [-0.9999516401,-0.006361853422,-0.007499625104], - [0.006366381192,-0.9999795662,-0.0005800141927]]) - -rotation_matrix = rotation_matrix.T - -translation_vector = np.array([-0.06024059837, -0.08180891509, -0.3117851288]) -image_width=1280 -image_height=1024 - -def convert_to_lidar_frame(uv_coordinate): - """ - convert 2d camera coordinate + depth into 3d lidar frame - """ - point_cloud = np.empty( (3,) , dtype=float) - point_cloud[2] = uv_coordinate[2] - point_cloud[1] = ( image_height - uv_coordinate[1] )*point_cloud[2] - point_cloud[0] = uv_coordinate[0]*point_cloud[2] - - inverse_camera_transformation_k = np.linalg.inv(camera_transformation_k) - inverse_rotation_matrix = np.linalg.inv(rotation_matrix) - point_cloud = inverse_camera_transformation_k @ point_cloud - point_cloud = inverse_rotation_matrix @ (point_cloud-translation_vector) - return point_cloud - -def convert_to_camera_frame(point_cloud): - """ - convert 3d lidar data into 2d coordinate of the camera frame + depth - """ - length = point_cloud.shape[0] - translation = np.tile(translation_vector, (length, 1)).T - - point_cloud = point_cloud.T - point_cloud = rotation_matrix@point_cloud + translation - point_cloud = camera_transformation_k @ point_cloud - - uv_coordinate = np.empty_like(point_cloud) - - """ - uv = [x/z, y/z, z], and y is opposite so the minus imageheight - """ - uv_coordinate[0] = point_cloud[0] / point_cloud[2] - uv_coordinate[1] = image_height - point_cloud[1] / point_cloud[2] - uv_coordinate[2] = point_cloud[2] - - uv_depth = uv_coordinate[2, :] - filtered_uv_coordinate = uv_coordinate[:, uv_depth >= 0] - return filtered_uv_coordinate - -def estimate_depth(x, y, np_2d_array): - """ - estimate the depth by finding points closest to x,y from thhe 2d array - """ - # Calculate the distance between each point and the target coordinates (x, y) - distances_sq = (np_2d_array[0,:] - x) ** 2 + (np_2d_array[1,:] - y) ** 2 - - # Find the indices of the k nearest points - k = 5 # Number of nearest neighbors we want - closest_indices = np.argpartition(distances_sq, k)[:k] - pixel_distance_threshold = 2000 - - valid_indices = [idx for idx in closest_indices if distances_sq[idx]<=pixel_distance_threshold] - if len(valid_indices) == 0: - # lidar points disappears usually around 0.4m - distance_where_lidar_stops_working = -1 - return distance_where_lidar_stops_working - - filtered_indices = np.array(valid_indices) - # Get the depth value of the closest point - closest_depths = np_2d_array[2,filtered_indices] - - return np.mean(closest_depths) - -def load_model(device): - model = FCN8s(nclass=6, backbone='vgg16', pretrained_base=True, pretrained=True) - model.load_state_dict(torch.load('src/trail_detection_node/trail_detection_node/model/fcn8s_vgg16_pascal_voc.pth')) - model = model.to(device) - print('Finished loading model!') - - return model - -def find_route(model, device, cv_image): - PIL_image = ImagePIL.fromarray(cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB)) - transform = transforms.Compose([ - transforms.ToTensor(), - transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), - ]) - image = transform(PIL_image).unsqueeze(0).to(device) - with torch.no_grad(): - output = model(image) - # pred is prediction generated by the model, route is the variable for the center line - pred = torch.argmax(output[0], 1).squeeze(0).cpu().data.numpy() - pred[pred != 1] = 0 # only see the trail - pred[pred == 1] = 255 - pred = np.array(pred, dtype=np.uint8) - route = np.zeros_like(pred) - # calculate the center line by taking the average - row_num = 0 - for row in pred: - white_pixels = list(np.nonzero(row)[0]) - if white_pixels: - average = (white_pixels[0] + white_pixels[-1]) / 2 - route[row_num][round(average)] = 255 - row_num = row_num + 1 - return route - -''' - The traildetector node has two subscriptions(lidar and camera) and one publisher(trail position). After it receives msgs from both lidar and camera, - it detects the trail in the image and sends the corresponding lidar position as the trail location. - The msgs are synchornized before processing, using buffer and sync function. - To find the path, the node will process the image, find a line to follow (by taking the average of left and right of the path), estimate the lidar points depth, - and choose to go to the closest point. - V1_traildetection assumes that the path is pointing frontward and has only one path in front. -''' -class trailDetector(Node): - def __init__(self, model, device): - super().__init__('trail_detector') - # define trail subscription - self.trail_publisher = self.create_publisher( - PoseStamped, - 'trail_location', - 10) - - # define camera subscription - self.camera_subscription = self.create_subscription( - Image, - 'camera', - self.camera_callback, - 10) - self.camera_subscription - - # define lidar subscription - self.lidar_subscription = self.create_subscription( - PointCloud2, - 'velodyne_points', - self.lidar_callback, - 10) - self.lidar_subscription - - self.bridge = CvBridge() - - # load model and device - self.model = model - self.device = device - - # buffers to hold msgs for sync - self.buffer_size = 30 # 30 is a random choice, to be discussed - self.lidar_buffer = [] - self.camera_buffer = [] - - def camera_callback(self, msg): - if len(self.camera_buffer) >= self.buffer_size: - self.camera_buffer.pop(0) - self.camera_buffer.append(msg) - self.sync() - - def lidar_callback(self, msg): - if len(self.lidar_buffer) >= self.buffer_size: - self.lidar_buffer.pop(0) - self.lidar_buffer.append(msg) - self.sync() - - def sync(self): - # if one of the sensor has no msg, then return - if not self.lidar_buffer or not self.camera_buffer: - return - - while self.lidar_buffer and self.camera_buffer: - lidar_msg = self.lidar_buffer[0] - camera_msg = self.camera_buffer[0] - - time_tolerance = 5000000 # nanosec, random choice to be discussed - time_difference = abs(lidar_msg.header.stamp.nanosec - camera_msg.header.stamp.nanosec) - print(time_difference) - - # compare the time difference, if it's within tolerance, then pass to the trail_callback, otherwise discard the older msg - if time_difference <= time_tolerance: - self.lidar_buffer.pop(0) - self.camera_buffer.pop(0) - self.get_logger().info("msgs received!") - self.trail_callback(lidar_msg, camera_msg) - elif lidar_msg.header.stamp.nanosec > camera_msg.header.stamp.nanosec: - self.camera_buffer.pop(0) - else: - self.lidar_buffer.pop(0) - - def trail_callback(self, lidar_msg, camera_msg): - # process lidar msg - point_gen = pc2.read_points( - lidar_msg, field_names=( - "x", "y", "z"), skip_nans=True) - points = [[x, y, z] for x, y, z in point_gen] - points = np.array(points) - points2d = convert_to_camera_frame(points) - - # process camera msg - cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') - route = find_route(self.model, self.device, cv_image) - route_indices = list(zip(*np.nonzero(route))) - - if not route_indices: - print("No centerline found!") - return - - # find the corresponding lidar points using the center line pixels - filtered_3dPoints = [] - for index in route_indices: - point = [] - point.append(index[0]) - point.append(index[1]) - point.append(estimate_depth(index[0], index[1], points2d)) - point_3d = convert_to_lidar_frame(point) - filtered_3dPoints.append(point_3d) - - filtered_3dPoints = np.array(filtered_3dPoints) - # find the nearest 3d point and set that as goal - distances_sq = filtered_3dPoints[:,0]**2 + filtered_3dPoints[:,1]**2 + filtered_3dPoints[:,2]**2 - smallest_index = np.argmin(distances_sq) - - # publsih message - trail_location_msg = PoseStamped() - trail_location_msg.header.stamp = lidar_msg.header.stamp - trail_location_msg.header.frame_id = "velodyne" - #position - trail_location_msg.pose.position.x = filtered_3dPoints[smallest_index][0] - trail_location_msg.pose.position.y = filtered_3dPoints[smallest_index][1] - trail_location_msg.pose.position.z = filtered_3dPoints[smallest_index][2] - #orientation - yaw = math.atan2(filtered_3dPoints[smallest_index][1], filtered_3dPoints[smallest_index][0]) - trail_location_msg.pose.orientation.x = 0.0 - trail_location_msg.pose.orientation.y = 0.0 - trail_location_msg.pose.orientation.z = math.sin(yaw/2) - trail_location_msg.pose.orientation.w = math.cos(yaw / 2) - self.get_logger().info("location published!") - self.trail_publisher.publish(trail_location_msg) - - - - - -def main(args=None): - print(torch.cuda.is_available()) - device = torch.device("cuda" if torch.cuda.is_available() else "cpu") - model = load_model(device) - - rclpy.init(args=args) - trailDetectorNode = trailDetector(model, device) - rclpy.spin(trailDetectorNode) - - trailDetectorNode.destroy_node() - rclpy.shutdown() - -if __name__ == '__main__': - main() \ No newline at end of file From ff03ab6f656896e7bf39772d3da28169dddefa9b Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Thu, 7 Sep 2023 20:17:02 -0400 Subject: [PATCH 11/22] Delete trail_detection_node/trail_detection_node/visualizer_v1_trailDetectionNode.py --- .../visualizer_v1_trailDetectionNode.py | 431 ------------------ 1 file changed, 431 deletions(-) delete mode 100644 trail_detection_node/trail_detection_node/visualizer_v1_trailDetectionNode.py diff --git a/trail_detection_node/trail_detection_node/visualizer_v1_trailDetectionNode.py b/trail_detection_node/trail_detection_node/visualizer_v1_trailDetectionNode.py deleted file mode 100644 index 616b689d..00000000 --- a/trail_detection_node/trail_detection_node/visualizer_v1_trailDetectionNode.py +++ /dev/null @@ -1,431 +0,0 @@ -import torch -from .model_loader import FCN8s, FCN32s -from PIL import Image as ImagePIL -from torchvision import transforms -import cv2 -import os - -import rclpy -from rclpy.node import Node -from geometry_msgs.msg import PoseStamped -from sensor_msgs.msg import Image, PointCloud2 -import sensor_msgs_py.point_cloud2 as pc2 -import numpy as np -import math -from cv_bridge import CvBridge - -''' - The transformation matrix as well as the coordinate conversion and depth estimation functions are copied from human_detection_node -''' -camera_transformation_k = np.array([ - [628.5359544,0,676.9575694], - [0,627.7249542,532.7206716], - [0,0,1]]) - -rotation_matrix = np.array([ - [-0.007495781893,-0.0006277316155,0.9999717092], - [-0.9999516401,-0.006361853422,-0.007499625104], - [0.006366381192,-0.9999795662,-0.0005800141927]]) - -rotation_matrix = rotation_matrix.T - -translation_vector = np.array([-0.06024059837, -0.08180891509, -0.3117851288]) -image_width=1280 -image_height=1024 - -def convert_to_lidar_frame(uv_coordinate): - """ - convert 2d camera coordinate + depth into 3d lidar frame - """ - point_cloud = np.empty( (3,) , dtype=float) - point_cloud[2] = uv_coordinate[2] - point_cloud[1] = ( image_height - uv_coordinate[1] )*point_cloud[2] - point_cloud[0] = uv_coordinate[0]*point_cloud[2] - - inverse_camera_transformation_k = np.linalg.inv(camera_transformation_k) - inverse_rotation_matrix = np.linalg.inv(rotation_matrix) - point_cloud = inverse_camera_transformation_k @ point_cloud - point_cloud = inverse_rotation_matrix @ (point_cloud-translation_vector) - return point_cloud - -def convert_to_camera_frame(point_cloud): - """ - convert 3d lidar data into 2d coordinate of the camera frame + depth - """ - length = point_cloud.shape[0] - translation = np.tile(translation_vector, (length, 1)).T - - point_cloud = point_cloud.T - point_cloud = rotation_matrix@point_cloud + translation - point_cloud = camera_transformation_k @ point_cloud - - uv_coordinate = np.empty_like(point_cloud) - - """ - uv = [x/z, y/z, z], and y is opposite so the minus imageheight - """ - uv_coordinate[0] = point_cloud[0] / point_cloud[2] - uv_coordinate[1] = image_height - point_cloud[1] / point_cloud[2] - uv_coordinate[2] = point_cloud[2] - - uv_depth = uv_coordinate[2, :] - filtered_uv_coordinate = uv_coordinate[:, uv_depth >= 0] - return filtered_uv_coordinate - -def estimate_depth(x, y, np_2d_array): - """ - estimate the depth by finding points closest to x,y from thhe 2d array - """ - # Calculate the distance between each point and the target coordinates (x, y) - distances_sq = (np_2d_array[0,:] - x) ** 2 + (np_2d_array[1,:] - y) ** 2 - - # Find the indices of the k nearest points - k = 5 # Number of nearest neighbors we want - closest_indices = np.argpartition(distances_sq, k)[:k] - pixel_distance_threshold = 2000 - - valid_indices = [idx for idx in closest_indices if distances_sq[idx]<=pixel_distance_threshold] - if len(valid_indices) == 0: - # lidar points disappears usually around 0.4m - distance_where_lidar_stops_working = -1 - return distance_where_lidar_stops_working - - filtered_indices = np.array(valid_indices) - # Get the depth value of the closest point - closest_depths = np_2d_array[2,filtered_indices] - - return np.mean(closest_depths) - -def load_model(device): - model = FCN32s(nclass=2, backbone='vgg16', pretrained_base=True, pretrained=True) - # model_location = '32s_batch8/fcn32s_vgg16_pascal_voc_best_model.pth' - model_location = '500epoch/fcn32s_vgg16_pascal_voc.pth' - if os.path.isfile(f'src/trail_detection_node/trail_detection_node/model/{model_location}'): - model.load_state_dict(torch.load(f'src/trail_detection_node/trail_detection_node/model/{model_location}',map_location=torch.device('cuda:0'))) - else: - model.load_state_dict(torch.load(f'trail_detection_node/model/{model_location}',map_location=torch.device('cuda:0'))) - model = model.to(device) - print('Finished loading model!') - - return model - -def find_route(model, device, cv_image): - PIL_image = ImagePIL.fromarray(cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB)) - transform = transforms.Compose([ - transforms.ToTensor(), - transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), - ]) - image = transform(PIL_image).unsqueeze(0).to(device) - with torch.no_grad(): - output = model(image) - # pred is prediction generated by the model, route is the variable for the center line - pred = torch.argmax(output[0], 1).squeeze(0).cpu().data.numpy() - pred[pred == 0] = 255 # only see the trail - pred[pred == 1] = 0 - pred[pred == 2] = 0 - pred[pred == 3] = 0 - pred[pred == 4] = 0 - pred[pred == 5] = 0 - pred = np.array(pred, dtype=np.uint8) - - # # prediction visualize - # cv2.imshow('prediction',pred) - # cv2.waitKey(25) - - route = np.zeros_like(pred) - # calculate the center line by taking the average - row_num = 0 - for row in pred: - white_pixels = list(np.nonzero(row)[0]) - if white_pixels: - average = (white_pixels[0] + white_pixels[-1]) / 2 - route[row_num][round(average)] = 255 - row_num = row_num + 1 - return route, pred - -def equalize_hist_rgb(img): - # Split the image into its color channels - r, g, b = cv2.split(img) - - # Equalize the histograms for each channel - r_eq = cv2.equalizeHist(r) - g_eq = cv2.equalizeHist(g) - b_eq = cv2.equalizeHist(b) - - # Merge the channels - img_eq = cv2.merge((r_eq, g_eq, b_eq)) - # img_eq = cv2.fastNlMeansDenoisingColored(img_eq,None,10,20,7,42) - # img_eq = cv2.GaussianBlur(img_eq, (25,25), 0) - return img_eq - -''' - The traildetector node has two subscriptions(lidar and camera) and one publisher(trail position). After it receives msgs from both lidar and camera, - it detects the trail in the image and sends the corresponding lidar position as the trail location. - The msgs are synchornized before processing, using buffer and sync function. - To find the path, the node will process the image, find a line to follow (by taking the average of left and right of the path), estimate the lidar points depth, - and choose to go to the closest point. - V1_traildetection assumes that the path is pointing frontward and has only one path in front. -''' -class trailDetector(Node): - def __init__(self, model, device): - super().__init__('trail_detector') - # define trail subscription - self.trail_publisher = self.create_publisher( - PoseStamped, - 'trail_location', - 10) - - # define camera subscription - self.camera_subscription = self.create_subscription( - Image, - 'camera', - self.camera_callback, - 10) - self.camera_subscription - - # define lidar subscription - self.lidar_subscription = self.create_subscription( - PointCloud2, - 'velodyne_points', - self.lidar_callback, - 10) - self.lidar_subscription - - self.bridge = CvBridge() - - # load model and device - self.model = model - self.device = device - - # buffers to hold msgs for sync - self.buffer_size = 50 # 30 is a random choice, to be discussed - self.lidar_buffer = [] - self.camera_buffer = [] - self.only_camera_mode = False - - def camera_callback(self, msg): - if len(self.camera_buffer) >= self.buffer_size: - self.camera_buffer.pop(0) - self.camera_buffer.append(msg) - self.sync() - - def lidar_callback(self, msg): - if len(self.lidar_buffer) >= self.buffer_size: - self.lidar_buffer.pop(0) - self.lidar_buffer.append(msg) - self.sync() - - def sync(self): - # if one of the sensor has no msg, then return - if self.only_camera_mode and self.camera_buffer: - camera_msg = self.camera_buffer[0] - self.camera_buffer.pop(0) - self.only_camera_callback(camera_msg) - elif not self.lidar_buffer or not self.camera_buffer: - return - - while self.lidar_buffer and self.camera_buffer: - lidar_msg = self.lidar_buffer[0] - camera_msg = self.camera_buffer[0] - - time_tolerance = 20000000 # nanosec, random choice to be discussed - time_difference = abs(lidar_msg.header.stamp.nanosec - camera_msg.header.stamp.nanosec) - # print(time_difference) - - # compare the time difference, if it's within tolerance, then pass to the trail_callback, otherwise discard the older msg - if time_difference <= time_tolerance: - self.lidar_buffer.pop(0) - self.camera_buffer.pop(0) - self.get_logger().info("msgs received!") - self.trail_callback(lidar_msg, camera_msg) - elif lidar_msg.header.stamp.nanosec > camera_msg.header.stamp.nanosec: - self.camera_buffer.pop(0) - else: - self.lidar_buffer.pop(0) - - def trail_callback(self, lidar_msg, camera_msg): - # process lidar msg - point_gen = pc2.read_points( - lidar_msg, field_names=( - "x", "y", "z"), skip_nans=True) - points = [[x, y, z] for x, y, z in point_gen] - points = np.array(points) - points2d = convert_to_camera_frame(points) - - # process camera msg - cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') - - # increase brightness - M = np.ones(cv_image.shape, dtype = 'uint8') * 50 # increase brightness by 50 - cv_image = cv2.add(cv_image, M) - - # # image visualizer - # cv2.imshow('raw image',cv_image) - # cv2.waitKey(25) - - route = find_route(self.model, self.device, cv_image) - - # # visualize route purely - # cv2.imshow("white_route",route) - # cv2.waitKey(25) - - route_indices = list(zip(*np.nonzero(route))) - if not route_indices: - print("No centerline found!") - return - - # #filter points that have no lidar points near it - # filtered_route_indices = [] - # for index in route_indices: - # point = [] - # point.append(index[0]) - # point.append(index[1]) - # point.append(estimate_depth(index[0], index[1], points2d)) - # if point[2] == -1: - # continue - # else: - # filtered_route_indices.append(point) - - # find the corresponding lidar points using the center line pixels - filtered_3dPoints = [] - # for index in filtered_route_indices: - for index in route_indices: - point = [] - # point.append(index[0]) - # point.append(index[1]) - # point.append(index[2]) - - point.append(index[0]) - point.append(index[1]) - point.append(estimate_depth(index[0], index[1], points2d)) - - point_3d = convert_to_lidar_frame(point) - filtered_3dPoints.append(point_3d) - - filtered_3dPoints = np.array(filtered_3dPoints) - # find the nearest 3d point and set that as goal - distances_sq = filtered_3dPoints[:,0]**2 + filtered_3dPoints[:,1]**2 + filtered_3dPoints[:,2]**2 - smallest_index = np.argmin(distances_sq) - # print(math.sqrt(distances_sq[smallest_index])) - # smallest_index = np.argmin(filtered_3dPoints[:,2]) - - # visualize after-pocessed image - visualize_cv_image = cv_image - print(f"{visualize_cv_image.shape[0]}") - circle_x = route_indices[smallest_index][0] - circle_y = route_indices[smallest_index][1] - - # # visualize the lidar points in image - # uv_x, uv_y, uv_z = points2d - # for index_lidar in range(points2d.shape[1]): - # # print(f"{int(uv_x[index_lidar])},{int(uv_y[index_lidar])}") - # cv2.circle(visualize_cv_image, (int(uv_x[index_lidar]), int(image_height - uv_y[index_lidar])), radius=5, color=(255, 0, 0), thickness=-1) - - # visualize center line in image - for index_circle in range(len(route_indices)): - if index_circle == smallest_index: - continue - else: - red_circle_x = route_indices[index_circle][0] - red_circle_y = route_indices[index_circle][1] - cv2.circle(visualize_cv_image, (red_circle_y, red_circle_x), radius=5, color=(0, 0, 255), thickness=-1) - - # visualize the chosen point in image - cv2.circle(visualize_cv_image, (circle_y, circle_x), radius=7, color=(0, 255, 0), thickness=-1) - cv2.circle(visualize_cv_image, (0, 0), radius=12, color=(0, 255, 0), thickness=-1) - - cv2.imshow('circled image',visualize_cv_image) - cv2.waitKey(25) - - # publsih message - trail_location_msg = PoseStamped() - trail_location_msg.header.stamp = lidar_msg.header.stamp - trail_location_msg.header.frame_id = "velodyne" - #position - trail_location_msg.pose.position.x = filtered_3dPoints[smallest_index][0] - trail_location_msg.pose.position.y = filtered_3dPoints[smallest_index][1] - trail_location_msg.pose.position.z = filtered_3dPoints[smallest_index][2] - #orientation - yaw = math.atan2(filtered_3dPoints[smallest_index][1], filtered_3dPoints[smallest_index][0]) - trail_location_msg.pose.orientation.x = 0.0 - trail_location_msg.pose.orientation.y = 0.0 - trail_location_msg.pose.orientation.z = math.sin(yaw/2) - trail_location_msg.pose.orientation.w = math.cos(yaw / 2) - self.get_logger().info("location published!") - self.trail_publisher.publish(trail_location_msg) - - def only_camera_callback(self, camera_msg): - # process camera msg - cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') - # cv_image = cv2.resize(cv_image,(480,480)) - - # cv_image = cv2.fastNlMeansDenoisingColored(cv_image,None,5,5,5,11) - - # # histogram equalization method 1 - # cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2YCrCb) - # cv_image[:,:,0] = cv2.equalizeHist(cv_image[:,:,0]) - # cv_image = cv2.cvtColor(cv_image, cv2.COLOR_YCrCb2BGR) - - # histogram equalization method 2 - # cv_image = equalize_hist_rgb(cv_image) - - # # histogram equalization method 3 CLAHE - # image_bw = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY) - # clahe = cv2.createCLAHE(clipLimit=5) - # cv_image = clahe.apply(image_bw) + 30 - - # # histogram equalization method 4 CLAHE - # cla = cv2.createCLAHE(clipLimit=4.0) - # H, S, V = cv2.split(cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)) - # eq_V = cla.apply(V) - # cv_image = cv2.cvtColor(cv2.merge([H, S, eq_V]), cv2.COLOR_HSV2BGR) - - # # increase brightness - # M = np.ones(cv_image.shape, dtype = 'uint8') * 50 # increase brightness by 50 - # cv_image = cv2.add(cv_image, M) - - # # image visualizer - # cv2.imshow('raw image',cv_image) - # cv2.waitKey(25) - - route, pred = find_route(self.model, self.device, cv_image) - - # # visualize route purely - # cv2.imshow("white_route",route) - # cv2.waitKey(25) - - # increase the brightness of image where is predicted as road - sign = cv2.cvtColor(pred, cv2.COLOR_GRAY2BGR) - cv_image = cv2.add(cv_image, sign) - - route_indices = list(zip(*np.nonzero(route))) - if not route_indices: - print("No centerline found!") - return - - # for index_circle in range(len(route_indices)): - # red_circle_x = route_indices[index_circle][0] - # red_circle_y = route_indices[index_circle][1] - # cv2.circle(cv_image, (red_circle_y, red_circle_x), radius=5, color=(0, 0, 255), thickness=-1) - - cv2.imshow('circled image',cv_image) - cv2.waitKey(25) - - - - -def main(args=None): - print(torch.cuda.is_available()) - device = torch.device("cuda" if torch.cuda.is_available() else "cpu") - model = load_model(device) - - rclpy.init(args=args) - trailDetectorNode = trailDetector(model, device) - rclpy.spin(trailDetectorNode) - - trailDetectorNode.destroy_node() - rclpy.shutdown() - cv2.destroyAllWindows() - -if __name__ == '__main__': - main() From de2828e5b81e793d2650b9612555599fd1793edd Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Thu, 7 Sep 2023 20:17:10 -0400 Subject: [PATCH 12/22] Delete trail_detection_node/trail_detection_node/vgg.py --- .../trail_detection_node/vgg.py | 191 ------------------ 1 file changed, 191 deletions(-) delete mode 100644 trail_detection_node/trail_detection_node/vgg.py diff --git a/trail_detection_node/trail_detection_node/vgg.py b/trail_detection_node/trail_detection_node/vgg.py deleted file mode 100644 index fe5c163c..00000000 --- a/trail_detection_node/trail_detection_node/vgg.py +++ /dev/null @@ -1,191 +0,0 @@ -import torch -import torch.nn as nn -import torch.utils.model_zoo as model_zoo - -__all__ = [ - 'VGG', 'vgg11', 'vgg11_bn', 'vgg13', 'vgg13_bn', 'vgg16', 'vgg16_bn', - 'vgg19_bn', 'vgg19', -] - -model_urls = { - 'vgg11': 'https://download.pytorch.org/models/vgg11-bbd30ac9.pth', - 'vgg13': 'https://download.pytorch.org/models/vgg13-c768596a.pth', - 'vgg16': 'https://download.pytorch.org/models/vgg16-397923af.pth', - 'vgg19': 'https://download.pytorch.org/models/vgg19-dcbb9e9d.pth', - 'vgg11_bn': 'https://download.pytorch.org/models/vgg11_bn-6002323d.pth', - 'vgg13_bn': 'https://download.pytorch.org/models/vgg13_bn-abd245e5.pth', - 'vgg16_bn': 'https://download.pytorch.org/models/vgg16_bn-6c64b313.pth', - 'vgg19_bn': 'https://download.pytorch.org/models/vgg19_bn-c79401a0.pth', -} - - -class VGG(nn.Module): - def __init__(self, features, num_classes=1000, init_weights=True): - super(VGG, self).__init__() - self.features = features - self.avgpool = nn.AdaptiveAvgPool2d((7, 7)) - self.classifier = nn.Sequential( - nn.Linear(512 * 7 * 7, 4096), - nn.ReLU(True), - nn.Dropout(), - nn.Linear(4096, 4096), - nn.ReLU(True), - nn.Dropout(), - nn.Linear(4096, num_classes) - ) - if init_weights: - self._initialize_weights() - - def forward(self, x): - x = self.features(x) - x = self.avgpool(x) - x = x.view(x.size(0), -1) - x = self.classifier(x) - return x - - def _initialize_weights(self): - for m in self.modules(): - if isinstance(m, nn.Conv2d): - nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') - if m.bias is not None: - nn.init.constant_(m.bias, 0) - elif isinstance(m, nn.BatchNorm2d): - nn.init.constant_(m.weight, 1) - nn.init.constant_(m.bias, 0) - elif isinstance(m, nn.Linear): - nn.init.normal_(m.weight, 0, 0.01) - nn.init.constant_(m.bias, 0) - - -def make_layers(cfg, batch_norm=False): - layers = [] - in_channels = 3 - for v in cfg: - if v == 'M': - layers += [nn.MaxPool2d(kernel_size=2, stride=2)] - else: - conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1) - if batch_norm: - layers += (conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)) - else: - layers += [conv2d, nn.ReLU(inplace=True)] - in_channels = v - return nn.Sequential(*layers) - - -cfg = { - 'A': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'], - 'B': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'], - 'D': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'], - 'E': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'], -} - - -def vgg11(pretrained=False, **kwargs): - """VGG 11-layer model (configuration "A") - Args: - pretrained (bool): If True, returns a model pre-trained on ImageNet - """ - if pretrained: - kwargs['init_weights'] = False - model = VGG(make_layers(cfg['A']), **kwargs) - if pretrained: - model.load_state_dict(model_zoo.load_url(model_urls['vgg11'])) - return model - - -def vgg11_bn(pretrained=False, **kwargs): - """VGG 11-layer model (configuration "A") with batch normalization - Args: - pretrained (bool): If True, returns a model pre-trained on ImageNet - """ - if pretrained: - kwargs['init_weights'] = False - model = VGG(make_layers(cfg['A'], batch_norm=True), **kwargs) - if pretrained: - model.load_state_dict(model_zoo.load_url(model_urls['vgg11_bn'])) - return model - - -def vgg13(pretrained=False, **kwargs): - """VGG 13-layer model (configuration "B") - Args: - pretrained (bool): If True, returns a model pre-trained on ImageNet - """ - if pretrained: - kwargs['init_weights'] = False - model = VGG(make_layers(cfg['B']), **kwargs) - if pretrained: - model.load_state_dict(model_zoo.load_url(model_urls['vgg13'])) - return model - - -def vgg13_bn(pretrained=False, **kwargs): - """VGG 13-layer model (configuration "B") with batch normalization - Args: - pretrained (bool): If True, returns a model pre-trained on ImageNet - """ - if pretrained: - kwargs['init_weights'] = False - model = VGG(make_layers(cfg['B'], batch_norm=True), **kwargs) - if pretrained: - model.load_state_dict(model_zoo.load_url(model_urls['vgg13_bn'])) - return model - - -def vgg16(pretrained=False, **kwargs): - """VGG 16-layer model (configuration "D") - Args: - pretrained (bool): If True, returns a model pre-trained on ImageNet - """ - if pretrained: - kwargs['init_weights'] = False - model = VGG(make_layers(cfg['D']), **kwargs) - if pretrained: - model.load_state_dict(model_zoo.load_url(model_urls['vgg16'])) - return model - - -def vgg16_bn(pretrained=False, **kwargs): - """VGG 16-layer model (configuration "D") with batch normalization - Args: - pretrained (bool): If True, returns a model pre-trained on ImageNet - """ - if pretrained: - kwargs['init_weights'] = False - model = VGG(make_layers(cfg['D'], batch_norm=True), **kwargs) - if pretrained: - model.load_state_dict(model_zoo.load_url(model_urls['vgg16_bn'])) - return model - - -def vgg19(pretrained=False, **kwargs): - """VGG 19-layer model (configuration "E") - Args: - pretrained (bool): If True, returns a model pre-trained on ImageNet - """ - if pretrained: - kwargs['init_weights'] = False - model = VGG(make_layers(cfg['E']), **kwargs) - if pretrained: - model.load_state_dict(model_zoo.load_url(model_urls['vgg19'])) - return model - - -def vgg19_bn(pretrained=False, **kwargs): - """VGG 19-layer model (configuration 'E') with batch normalization - Args: - pretrained (bool): If True, returns a model pre-trained on ImageNet - """ - if pretrained: - kwargs['init_weights'] = False - model = VGG(make_layers(cfg['E'], batch_norm=True), **kwargs) - if pretrained: - model.load_state_dict(model_zoo.load_url(model_urls['vgg19_bn'])) - return model - - -if __name__ == '__main__': - img = torch.randn((4, 3, 480, 480)) - model = vgg16(pretrained=False) - out = model(img) From 7e21927f85bfab9068047782c082ef4dcf61fad3 Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Thu, 7 Sep 2023 20:17:46 -0400 Subject: [PATCH 13/22] merged all the model loading part into one file --- .../trail_detection_node/model_loader.py | 350 ++++++++++++++---- 1 file changed, 285 insertions(+), 65 deletions(-) diff --git a/trail_detection_node/trail_detection_node/model_loader.py b/trail_detection_node/trail_detection_node/model_loader.py index 6c8e2399..d8934ae3 100644 --- a/trail_detection_node/trail_detection_node/model_loader.py +++ b/trail_detection_node/trail_detection_node/model_loader.py @@ -1,90 +1,310 @@ import torch.nn as nn import torch.nn.functional as F -from .vgg import vgg16 import torch -from .segbase import SegBaseModel - -class FCN8s(nn.Module): - def __init__(self, nclass, backbone='vgg16', aux=False, pretrained_base=True, norm_layer=nn.BatchNorm2d, **kwargs): - super(FCN8s, self).__init__() - self.aux = aux - if backbone == 'vgg16': - self.pretrained = vgg16(pretrained=pretrained_base).features - else: - raise RuntimeError('unknown backbone: {}'.format(backbone)) - self.pool3 = nn.Sequential(*self.pretrained[:17]) - self.pool4 = nn.Sequential(*self.pretrained[17:24]) - self.pool5 = nn.Sequential(*self.pretrained[24:]) - self.head = _FCNHead(512, nclass, norm_layer) - self.score_pool3 = nn.Conv2d(256, nclass, 1) - self.score_pool4 = nn.Conv2d(512, nclass, 1) - if aux: - self.auxlayer = _FCNHead(512, nclass, norm_layer) - - self.__setattr__('exclusive', - ['head', 'score_pool3', 'score_pool4', 'auxlayer'] if aux else ['head', 'score_pool3', - 'score_pool4']) +import os + +# all the code below are copied from https://github.com/Tramac/awesome-semantic-segmentation-pytorch + +class BasicBlockV1b(nn.Module): + expansion = 1 + + def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None, + previous_dilation=1, norm_layer=nn.BatchNorm2d): + super(BasicBlockV1b, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, 3, stride, + dilation, dilation, bias=False) + self.bn1 = norm_layer(planes) + self.relu = nn.ReLU(True) + self.conv2 = nn.Conv2d(planes, planes, 3, 1, previous_dilation, + dilation=previous_dilation, bias=False) + self.bn2 = norm_layer(planes) + self.downsample = downsample + self.stride = stride def forward(self, x): - pool3 = self.pool3(x) - pool4 = self.pool4(pool3) - pool5 = self.pool5(pool4) + identity = x - outputs = [] - score_fr = self.head(pool5) + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) - score_pool4 = self.score_pool4(pool4) - score_pool3 = self.score_pool3(pool3) + out = self.conv2(out) + out = self.bn2(out) - upscore2 = F.interpolate(score_fr, score_pool4.size()[2:], mode='bilinear', align_corners=True) - fuse_pool4 = upscore2 + score_pool4 + if self.downsample is not None: + identity = self.downsample(x) - upscore_pool4 = F.interpolate(fuse_pool4, score_pool3.size()[2:], mode='bilinear', align_corners=True) - fuse_pool3 = upscore_pool4 + score_pool3 + out += identity + out = self.relu(out) - out = F.interpolate(fuse_pool3, x.size()[2:], mode='bilinear', align_corners=True) - outputs.append(out) + return out - if self.aux: - auxout = self.auxlayer(pool5) - auxout = F.interpolate(auxout, x.size()[2:], mode='bilinear', align_corners=True) - outputs.append(auxout) - return tuple(outputs) +class BottleneckV1b(nn.Module): + expansion = 4 -class FCN32s(nn.Module): - """There are some difference from original fcn""" + def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None, + previous_dilation=1, norm_layer=nn.BatchNorm2d): + super(BottleneckV1b, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, 1, bias=False) + self.bn1 = norm_layer(planes) + self.conv2 = nn.Conv2d(planes, planes, 3, stride, + dilation, dilation, bias=False) + self.bn2 = norm_layer(planes) + self.conv3 = nn.Conv2d(planes, planes * self.expansion, 1, bias=False) + self.bn3 = norm_layer(planes * self.expansion) + self.relu = nn.ReLU(True) + self.downsample = downsample + self.stride = stride - def __init__(self, nclass, backbone='vgg16', aux=False, pretrained_base=True, - norm_layer=nn.BatchNorm2d, **kwargs): - super(FCN32s, self).__init__() - self.aux = aux - if backbone == 'vgg16': - self.pretrained = vgg16(pretrained=pretrained_base).features + def forward(self, x): + identity = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + out = self.relu(out) + + return out + +class ResNetV1b(nn.Module): + + def __init__(self, block, layers, num_classes=1000, dilated=True, deep_stem=False, + zero_init_residual=False, norm_layer=nn.BatchNorm2d): + self.inplanes = 128 if deep_stem else 64 + super(ResNetV1b, self).__init__() + if deep_stem: + self.conv1 = nn.Sequential( + nn.Conv2d(3, 64, 3, 2, 1, bias=False), + norm_layer(64), + nn.ReLU(True), + nn.Conv2d(64, 64, 3, 1, 1, bias=False), + norm_layer(64), + nn.ReLU(True), + nn.Conv2d(64, 128, 3, 1, 1, bias=False) + ) else: - raise RuntimeError('unknown backbone: {}'.format(backbone)) - self.head = _FCNHead(512, nclass, norm_layer) - if aux: - self.auxlayer = _FCNHead(512, nclass, norm_layer) + self.conv1 = nn.Conv2d(3, 64, 7, 2, 3, bias=False) + self.bn1 = norm_layer(self.inplanes) + self.relu = nn.ReLU(True) + self.maxpool = nn.MaxPool2d(3, 2, 1) + self.layer1 = self._make_layer(block, 64, layers[0], norm_layer=norm_layer) + self.layer2 = self._make_layer(block, 128, layers[1], stride=2, norm_layer=norm_layer) + if dilated: + self.layer3 = self._make_layer(block, 256, layers[2], stride=1, dilation=2, norm_layer=norm_layer) + self.layer4 = self._make_layer(block, 512, layers[3], stride=1, dilation=4, norm_layer=norm_layer) + else: + self.layer3 = self._make_layer(block, 256, layers[2], stride=2, norm_layer=norm_layer) + self.layer4 = self._make_layer(block, 512, layers[3], stride=2, norm_layer=norm_layer) + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + self.fc = nn.Linear(512 * block.expansion, num_classes) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + if zero_init_residual: + for m in self.modules(): + if isinstance(m, BottleneckV1b): + nn.init.constant_(m.bn3.weight, 0) + elif isinstance(m, BasicBlockV1b): + nn.init.constant_(m.bn2.weight, 0) + + def _make_layer(self, block, planes, blocks, stride=1, dilation=1, norm_layer=nn.BatchNorm2d): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2d(self.inplanes, planes * block.expansion, 1, stride, bias=False), + norm_layer(planes * block.expansion), + ) + + layers = [] + if dilation in (1, 2): + layers.append(block(self.inplanes, planes, stride, dilation=1, downsample=downsample, + previous_dilation=dilation, norm_layer=norm_layer)) + elif dilation == 4: + layers.append(block(self.inplanes, planes, stride, dilation=2, downsample=downsample, + previous_dilation=dilation, norm_layer=norm_layer)) + else: + raise RuntimeError("=> unknown dilation size: {}".format(dilation)) + self.inplanes = planes * block.expansion + for _ in range(1, blocks): + layers.append(block(self.inplanes, planes, dilation=dilation, + previous_dilation=dilation, norm_layer=norm_layer)) - self.__setattr__('exclusive', ['head', 'auxlayer'] if aux else ['head']) + return nn.Sequential(*layers) def forward(self, x): - size = x.size()[2:] - pool5 = self.pretrained(x) + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + x = self.maxpool(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + + x = self.avgpool(x) + x = x.view(x.size(0), -1) + x = self.fc(x) + + return x + +_model_sha1 = {name: checksum for checksum, name in [ + ('25c4b50959ef024fcc050213a06b614899f94b3d', 'resnet50'), + ('2a57e44de9c853fa015b172309a1ee7e2d0e4e2a', 'resnet101'), + ('0d43d698c66aceaa2bc0309f55efdd7ff4b143af', 'resnet152'), +]} + +def short_hash(name): + if name not in _model_sha1: + raise ValueError('Pretrained model for {name} is not available.'.format(name=name)) + return _model_sha1[name][:8] + +def get_resnet_file(name, root='~/.torch/models'): + file_name = '{name}-{short_hash}'.format(name=name, short_hash=short_hash(name)) + root = os.path.expanduser(root) + + file_path = os.path.join(root, file_name + '.pth') + sha1_hash = _model_sha1[name] + if os.path.exists(file_path): + return file_path + + else: + print('Model file {} is not found.'.format(file_path)) + +def resnet50_v1s(pretrained=False, root='~/.torch/models', **kwargs): + model = ResNetV1b(BottleneckV1b, [3, 4, 6, 3], deep_stem=True, **kwargs) + if pretrained: + model.load_state_dict(torch.load(get_resnet_file('resnet50', root=root)), strict=False) + return model + +class SeparableConv2d(nn.Module): + def __init__(self, inplanes, planes, kernel_size=3, stride=1, padding=1, + dilation=1, bias=False, norm_layer=nn.BatchNorm2d): + super(SeparableConv2d, self).__init__() + self.conv = nn.Conv2d(inplanes, inplanes, kernel_size, stride, padding, dilation, groups=inplanes, bias=bias) + self.bn = norm_layer(inplanes) + self.pointwise = nn.Conv2d(inplanes, planes, 1, bias=bias) - outputs = [] - out = self.head(pool5) - out = F.interpolate(out, size, mode='bilinear', align_corners=True) - outputs.append(out) + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + x = self.pointwise(x) + return x + +# copy from: https://github.com/wuhuikai/FastFCN/blob/master/encoding/nn/customize.py +class JPU(nn.Module): + def __init__(self, in_channels, width=512, norm_layer=nn.BatchNorm2d, **kwargs): + super(JPU, self).__init__() + + self.conv5 = nn.Sequential( + nn.Conv2d(in_channels[-1], width, 3, padding=1, bias=False), + norm_layer(width), + nn.ReLU(True)) + self.conv4 = nn.Sequential( + nn.Conv2d(in_channels[-2], width, 3, padding=1, bias=False), + norm_layer(width), + nn.ReLU(True)) + self.conv3 = nn.Sequential( + nn.Conv2d(in_channels[-3], width, 3, padding=1, bias=False), + norm_layer(width), + nn.ReLU(True)) + + self.dilation1 = nn.Sequential( + SeparableConv2d(3 * width, width, 3, padding=1, dilation=1, bias=False), + norm_layer(width), + nn.ReLU(True)) + self.dilation2 = nn.Sequential( + SeparableConv2d(3 * width, width, 3, padding=2, dilation=2, bias=False), + norm_layer(width), + nn.ReLU(True)) + self.dilation3 = nn.Sequential( + SeparableConv2d(3 * width, width, 3, padding=4, dilation=4, bias=False), + norm_layer(width), + nn.ReLU(True)) + self.dilation4 = nn.Sequential( + SeparableConv2d(3 * width, width, 3, padding=8, dilation=8, bias=False), + norm_layer(width), + nn.ReLU(True)) + + def forward(self, *inputs): + feats = [self.conv5(inputs[-1]), self.conv4(inputs[-2]), self.conv3(inputs[-3])] + size = feats[-1].size()[2:] + feats[-2] = F.interpolate(feats[-2], size, mode='bilinear', align_corners=True) + feats[-3] = F.interpolate(feats[-3], size, mode='bilinear', align_corners=True) + feat = torch.cat(feats, dim=1) + feat = torch.cat([self.dilation1(feat), self.dilation2(feat), self.dilation3(feat), self.dilation4(feat)], + dim=1) + + return inputs[0], inputs[1], inputs[2], feat + + + +class SegBaseModel(nn.Module): + r"""Base Model for Semantic Segmentation - if self.aux: - auxout = self.auxlayer(pool5) - auxout = F.interpolate(auxout, size, mode='bilinear', align_corners=True) - outputs.append(auxout) + Parameters + ---------- + backbone : string + Pre-trained dilated backbone network type (default:'resnet50'; 'resnet50', + 'resnet101' or 'resnet152'). + """ - return tuple(outputs) + def __init__(self, nclass, aux, backbone='resnet50', jpu=False, pretrained_base=True, **kwargs): + super(SegBaseModel, self).__init__() + dilated = False if jpu else True + self.aux = aux + self.nclass = nclass + if backbone == 'resnet50': + self.pretrained = resnet50_v1s(pretrained=pretrained_base, dilated=dilated, **kwargs) + else: + raise RuntimeError('unknown backbone: {}'.format(backbone)) + + self.jpu = JPU([512, 1024, 2048], width=512, **kwargs) if jpu else None + + def base_forward(self, x): + """forwarding pre-trained network""" + x = self.pretrained.conv1(x) + x = self.pretrained.bn1(x) + x = self.pretrained.relu(x) + x = self.pretrained.maxpool(x) + c1 = self.pretrained.layer1(x) + c2 = self.pretrained.layer2(c1) + c3 = self.pretrained.layer3(c2) + c4 = self.pretrained.layer4(c3) + + if self.jpu: + return self.jpu(c1, c2, c3, c4) + else: + return c1, c2, c3, c4 + + def evaluate(self, x): + """evaluating network with inputs and targets""" + return self.forward(x)[0] + + def demo(self, x): + pred = self.forward(x) + if self.aux: + pred = pred[0] + return pred class _FCNHead(nn.Module): def __init__(self, in_channels, channels, norm_layer=nn.BatchNorm2d, **kwargs): From b27a0cc001b6f85c39bed5b0c9f073a3d4600fad Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Thu, 7 Sep 2023 20:19:43 -0400 Subject: [PATCH 14/22] deleted the fcn8s and fcn32s from the script --- .../trail_detection_node/v2_trailDetectionNode.py | 2 +- .../visualizer_v2_trailDetectionNode.py | 7 ++----- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/trail_detection_node/trail_detection_node/v2_trailDetectionNode.py b/trail_detection_node/trail_detection_node/v2_trailDetectionNode.py index d3f96edf..d70e89fc 100644 --- a/trail_detection_node/trail_detection_node/v2_trailDetectionNode.py +++ b/trail_detection_node/trail_detection_node/v2_trailDetectionNode.py @@ -1,5 +1,5 @@ import torch -from .model_loader import FCN8s, PSPNet +from .model_loader import PSPNet from PIL import Image as ImagePIL from torchvision import transforms import cv2 diff --git a/trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py b/trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py index da2f0696..7c2ceabc 100644 --- a/trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py +++ b/trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py @@ -1,5 +1,5 @@ import torch -from .model_loader import FCN8s, FCN32s, PSPNet +from .model_loader import PSPNet from PIL import Image as ImagePIL from torchvision import transforms import cv2 @@ -15,7 +15,6 @@ from cv_bridge import CvBridge import message_filters -from scipy.ndimage import grey_erosion ''' The transformation matrix as well as the coordinate conversion and depth estimation functions are copied from human_detection_node ''' @@ -131,8 +130,6 @@ def find_route(model, device, cv_image): pred[pred == 0] = 255 # only see the trail pred[pred == 1] = 0 pred = np.array(pred, dtype=np.uint8) - - pred = grey_erosion(pred, size=(4,4)) # # prediction visualize # cv2.imshow('prediction',pred) @@ -313,7 +310,7 @@ def trail_callback(self, camera_msg, lidar_msg): cv2.circle(visualize_cv_image, (red_circle_coloum, image_height - red_circle_row), radius=5, color=(0, 0, 255), thickness=-1) # visualize the chosen point in image - cv2.circle(visualize_cv_image, (circle_x, circle_y), radius=7, color=(255, 0, 255), thickness=-1) + cv2.circle(visualize_cv_image, (circle_x, circle_y), radius=7, color=(255, 255, 255), thickness=-1) cv2.imshow('circled image',visualize_cv_image) cv2.waitKey(25) From 6d7d9784260473a047f30c32d32257f977006d75 Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Mon, 11 Sep 2023 14:42:35 -0400 Subject: [PATCH 15/22] Delete trail_detection_node/trail_detection_node/v2_trailDetectionNode.py --- .../v2_trailDetectionNode.py | 256 ------------------ 1 file changed, 256 deletions(-) delete mode 100644 trail_detection_node/trail_detection_node/v2_trailDetectionNode.py diff --git a/trail_detection_node/trail_detection_node/v2_trailDetectionNode.py b/trail_detection_node/trail_detection_node/v2_trailDetectionNode.py deleted file mode 100644 index d70e89fc..00000000 --- a/trail_detection_node/trail_detection_node/v2_trailDetectionNode.py +++ /dev/null @@ -1,256 +0,0 @@ -import torch -from .model_loader import PSPNet -from PIL import Image as ImagePIL -from torchvision import transforms -import cv2 - -import rclpy -from rclpy.node import Node -from geometry_msgs.msg import PoseStamped -from sensor_msgs.msg import Image, PointCloud2 -import sensor_msgs_py.point_cloud2 as pc2 -import numpy as np -import math -from cv_bridge import CvBridge - -import message_filters - -''' - The transformation matrix as well as the coordinate conversion and depth estimation functions are copied from human_detection_node -''' -camera_transformation_k = np.array([ - [628.5359544,0,676.9575694], - [0,627.7249542,532.7206716], - [0,0,1]]) - -rotation_matrix = np.array([ - [-0.007495781893,-0.0006277316155,0.9999717092], - [-0.9999516401,-0.006361853422,-0.007499625104], - [0.006366381192,-0.9999795662,-0.0005800141927]]) - -rotation_matrix = rotation_matrix.T - -translation_vector = np.array([-0.06024059837, -0.08180891509, -0.3117851288]) -image_width=1280 -image_height=1024 - -def convert_to_lidar_frame(uv_coordinate): - """ - convert 2d camera coordinate + depth into 3d lidar frame - """ - point_cloud = np.empty( (3,) , dtype=float) - point_cloud[2] = uv_coordinate[2] - point_cloud[1] = ( image_height - uv_coordinate[1] )*point_cloud[2] - point_cloud[0] = uv_coordinate[0]*point_cloud[2] - - inverse_camera_transformation_k = np.linalg.inv(camera_transformation_k) - inverse_rotation_matrix = np.linalg.inv(rotation_matrix) - point_cloud = inverse_camera_transformation_k @ point_cloud - point_cloud = inverse_rotation_matrix @ (point_cloud-translation_vector) - return point_cloud - -def convert_to_camera_frame(point_cloud): - """ - convert 3d lidar data into 2d coordinate of the camera frame + depth - """ - length = point_cloud.shape[0] - translation = np.tile(translation_vector, (length, 1)).T - - point_cloud = point_cloud.T - point_cloud = rotation_matrix@point_cloud + translation - point_cloud = camera_transformation_k @ point_cloud - - uv_coordinate = np.empty_like(point_cloud) - - """ - uv = [x/z, y/z, z], and y is opposite so the minus imageheight - """ - uv_coordinate[0] = point_cloud[0] / point_cloud[2] - uv_coordinate[1] = image_height - point_cloud[1] / point_cloud[2] - uv_coordinate[2] = point_cloud[2] - - uv_depth = uv_coordinate[2, :] - filtered_uv_coordinate = uv_coordinate[:, uv_depth >= 0] - return filtered_uv_coordinate - -def estimate_depth(x, y, np_2d_array): - """ - estimate the depth by finding points closest to x,y from thhe 2d array - """ - # Calculate the distance between each point and the target coordinates (x, y) - distances_sq = (np_2d_array[0,:] - x) ** 2 + (np_2d_array[1,:] - y) ** 2 - - # Find the indices of the k nearest points - k = 5 # Number of nearest neighbors we want - closest_indices = np.argpartition(distances_sq, k)[:k] - pixel_distance_threshold = 2000 - - valid_indices = [idx for idx in closest_indices if distances_sq[idx]<=pixel_distance_threshold] - if len(valid_indices) == 0: - # lidar points disappears usually around 0.4m - distance_where_lidar_stops_working = -1 - return distance_where_lidar_stops_working - - filtered_indices = np.array(valid_indices) - # Get the depth value of the closest point - closest_depths = np_2d_array[2,filtered_indices] - - return np.mean(closest_depths) - -def load_model(device): - # model = FCN8s(nclass=6, backbone='vgg16', pretrained_base=True, pretrained=True) - model = PSPNet(nclass=2, backbone='resnet50', pretrained_base=True) - model_location = 'psp2/psp_resnet50_pascal_voc_best_model.pth' - model.load_state_dict(torch.load(f'src/trail_detection_node/trail_detection_node/model/{model_location}',map_location=torch.device('cuda:0'))) - model = model.to(device) - model.eval() - print('Finished loading model!') - - return model - -def find_route(model, device, cv_image): - PIL_image = ImagePIL.fromarray(cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB)) - transform = transforms.Compose([ - transforms.ToTensor(), - transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), - ]) - image = transform(PIL_image).unsqueeze(0).to(device) - with torch.no_grad(): - output = model(image) - # pred is prediction generated by the model, route is the variable for the center line - pred = torch.argmax(output[0], 1).squeeze(0).cpu().data.numpy() - pred[pred == 0] = 255 # only see the trail - pred[pred == 1] = 0 - pred = np.array(pred, dtype=np.uint8) - route = np.zeros_like(pred) - # calculate the center line by taking the average - row_num = 0 - for row in pred: - white_pixels = list(np.nonzero(row)[0]) - if white_pixels: - average = (white_pixels[0] + white_pixels[-1]) / 2 - route[row_num][round(average)] = 255 - row_num = row_num + 1 - return route - -''' - The traildetector node has two subscriptions(lidar and camera) and one publisher(trail position). After it receives msgs from both lidar and camera, - it detects the trail in the image and sends the corresponding lidar position as the trail location. - The msgs are synchornized before processing, using buffer and sync function. - To find the path, the node will process the image, find a line to follow (by taking the average of left and right of the path), estimate the lidar points depth, - and choose to go to the closest point. - V1_traildetection assumes that the path is pointing frontward and has only one path in front. -''' -class trailDetector(Node): - def __init__(self, model, device): - super().__init__('trail_detector') - # define trail publisher - self.trail_publisher = self.create_publisher( - PoseStamped, - 'trail_location', - 10) - - # create subscribers - self.image_sub = message_filters.Subscriber(self, Image, 'camera') - self.lidar_sub = message_filters.Subscriber(self, PointCloud2, 'velodyne_points') - - self.bridge = CvBridge() - - # load model and device - self.model = model - self.device = device - - # create callback - queue_size = 30 - ts = message_filters.ApproximateTimeSynchronizer([self.image_sub, self.lidar_sub], queue_size, 0.5) - ts.registerCallback(self.trail_callback) - - def trail_callback(self, camera_msg, lidar_msg): - print("Message received!") - # process lidar msg - point_gen = pc2.read_points( - lidar_msg, field_names=( - "x", "y", "z"), skip_nans=True) - points = [[x, y, z] for x, y, z in point_gen] - points = np.array(points) - points2d = convert_to_camera_frame(points) - - # process camera msg - cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') - route = find_route(self.model, self.device, cv_image) - route_indices = list(zip(*np.nonzero(route))) - - if not route_indices: - print("No centerline found!") - return - - #filter points that have no lidar points near it - filtered_route_indices = [] - for index in route_indices: - point = [] - u = index[1] - v = image_height - index[0] - point.append(u) - point.append(v) - point.append(estimate_depth(u, v, points2d)) - if point[2] == -1: - continue - else: - filtered_route_indices.append(point) - - if not filtered_route_indices: - print("No usable centerline found!") - return - - # find the corresponding lidar points using the center line pixels - filtered_3dPoints = [] - for index in filtered_route_indices: - point = [] - point.append(index[0]) - point.append(index[1]) - point.append(index[2]) - point_3d = convert_to_lidar_frame(point) - filtered_3dPoints.append(point_3d) - - filtered_3dPoints = np.array(filtered_3dPoints) - # find the nearest 3d point and set that as goal - distances_sq = filtered_3dPoints[:,0]**2 + filtered_3dPoints[:,1]**2 + filtered_3dPoints[:,2]**2 - smallest_index = np.argmin(distances_sq) - - # publsih message - trail_location_msg = PoseStamped() - trail_location_msg.header.stamp = lidar_msg.header.stamp - trail_location_msg.header.frame_id = "velodyne" - # position - trail_location_msg.pose.position.x = filtered_3dPoints[smallest_index][0] - trail_location_msg.pose.position.y = filtered_3dPoints[smallest_index][1] - trail_location_msg.pose.position.z = filtered_3dPoints[smallest_index][2] - # orientation - yaw = math.atan2(filtered_3dPoints[smallest_index][1], filtered_3dPoints[smallest_index][0]) - trail_location_msg.pose.orientation.x = 0.0 - trail_location_msg.pose.orientation.y = 0.0 - trail_location_msg.pose.orientation.z = math.sin(yaw/2) - trail_location_msg.pose.orientation.w = math.cos(yaw / 2) - self.trail_publisher.publish(trail_location_msg) - - # logging - self.get_logger().info("location published!") - self.get_logger().info(f"Point: {filtered_3dPoints[smallest_index][0]}, {filtered_3dPoints[smallest_index][1]}, {filtered_3dPoints[smallest_index][2]}") - - - - -def main(args=None): - print(torch.cuda.is_available()) - device = torch.device("cuda" if torch.cuda.is_available() else "cpu") - model = load_model(device) - - rclpy.init(args=args) - trailDetectorNode = trailDetector(model, device) - rclpy.spin(trailDetectorNode) - - trailDetectorNode.destroy_node() - rclpy.shutdown() - -if __name__ == '__main__': - main() \ No newline at end of file From d52f6b4f5e12efb380535a27962d8a079ae2d20f Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Mon, 11 Sep 2023 14:42:47 -0400 Subject: [PATCH 16/22] Delete trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py --- .../visualizer_v2_trailDetectionNode.py | 410 ------------------ 1 file changed, 410 deletions(-) delete mode 100644 trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py diff --git a/trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py b/trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py deleted file mode 100644 index 7c2ceabc..00000000 --- a/trail_detection_node/trail_detection_node/visualizer_v2_trailDetectionNode.py +++ /dev/null @@ -1,410 +0,0 @@ -import torch -from .model_loader import PSPNet -from PIL import Image as ImagePIL -from torchvision import transforms -import cv2 -import os - -import rclpy -from rclpy.node import Node -from geometry_msgs.msg import PoseStamped -from sensor_msgs.msg import Image, PointCloud2 -import sensor_msgs_py.point_cloud2 as pc2 -import numpy as np -import math -from cv_bridge import CvBridge - -import message_filters -''' - The transformation matrix as well as the coordinate conversion and depth estimation functions are copied from human_detection_node -''' -camera_transformation_k = np.array([ - [628.5359544,0,676.9575694], - [0,627.7249542,532.7206716], - [0,0,1]]) - -rotation_matrix = np.array([ - [-0.007495781893,-0.0006277316155,0.9999717092], - [-0.9999516401,-0.006361853422,-0.007499625104], - [0.006366381192,-0.9999795662,-0.0005800141927]]) - -rotation_matrix = rotation_matrix.T - -translation_vector = np.array([-0.06024059837, -0.08180891509, -0.3117851288]) -image_width=1280 -image_height=1024 - -def convert_to_lidar_frame(uv_coordinate): - """ - convert 2d camera coordinate + depth into 3d lidar frame - """ - point_cloud = np.empty( (3,) , dtype=float) - point_cloud[2] = uv_coordinate[2] - point_cloud[1] = ( image_height - uv_coordinate[1] )*point_cloud[2] - point_cloud[0] = uv_coordinate[0]*point_cloud[2] - - inverse_camera_transformation_k = np.linalg.inv(camera_transformation_k) - inverse_rotation_matrix = np.linalg.inv(rotation_matrix) - point_cloud = inverse_camera_transformation_k @ point_cloud - point_cloud = inverse_rotation_matrix @ (point_cloud-translation_vector) - return point_cloud - -def convert_to_camera_frame(point_cloud): - """ - convert 3d lidar data into 2d coordinate of the camera frame + depth - """ - length = point_cloud.shape[0] - translation = np.tile(translation_vector, (length, 1)).T - - point_cloud = point_cloud.T - point_cloud = rotation_matrix@point_cloud + translation - point_cloud = camera_transformation_k @ point_cloud - - uv_coordinate = np.empty_like(point_cloud) - - """ - uv = [x/z, y/z, z], and y is opposite so the minus imageheight - """ - uv_coordinate[0] = point_cloud[0] / point_cloud[2] - uv_coordinate[1] = image_height - point_cloud[1] / point_cloud[2] - uv_coordinate[2] = point_cloud[2] - - uv_depth = uv_coordinate[2, :] - filtered_uv_coordinate = uv_coordinate[:, uv_depth >= 0] - return filtered_uv_coordinate - -def estimate_depth(x, y, np_2d_array): - """ - estimate the depth by finding points closest to x,y from thhe 2d array - """ - # Calculate the distance between each point and the target coordinates (x, y) - distances_sq = (np_2d_array[0,:] - x) ** 2 + (np_2d_array[1,:] - y) ** 2 - - # Find the indices of the k nearest points - k = 5 # Number of nearest neighbors we want - closest_indices = np.argpartition(distances_sq, k)[:k] - pixel_distance_threshold = 2000 - - valid_indices = [idx for idx in closest_indices if distances_sq[idx]<=pixel_distance_threshold] - if len(valid_indices) == 0: - # lidar points disappears usually around 0.4m - distance_where_lidar_stops_working = -1 - return distance_where_lidar_stops_working - - filtered_indices = np.array(valid_indices) - # Get the depth value of the closest point - closest_depths = np_2d_array[2,filtered_indices] - - return np.mean(closest_depths) - -def load_model(device): - # model = FCN32s(nclass=2, backbone='vgg16', pretrained_base=True, pretrained=True) - model = PSPNet(nclass=2, backbone='resnet50', pretrained_base=True) - - # model_location = '32s_batch8/fcn32s_vgg16_pascal_voc_best_model.pth' - # model_location = '32s_self_labelled/fcn32s_vgg16_pascal_voc_best_model.pth' - model_location = 'psp2/psp_resnet50_pascal_voc_best_model.pth' - - if os.path.isfile(f'src/trail_detection_node/trail_detection_node/model/{model_location}'): - model.load_state_dict(torch.load(f'src/trail_detection_node/trail_detection_node/model/{model_location}',map_location=torch.device('cuda:0'))) - else: - model.load_state_dict(torch.load(f'trail_detection_node/model/{model_location}',map_location=torch.device('cuda:0'))) - model = model.to(device) - - model.eval() - print('Finished loading model!') - - return model - -def find_route(model, device, cv_image): - PIL_image = ImagePIL.fromarray(cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB)) - transform = transforms.Compose([ - transforms.ToTensor(), - transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), - ]) - image = transform(PIL_image).unsqueeze(0).to(device) - with torch.no_grad(): - output = model(image) - # pred is prediction generated by the model, route is the variable for the center line - pred = torch.argmax(output[0], 1).squeeze(0).cpu().data.numpy() - pred[pred == 0] = 255 # only see the trail - pred[pred == 1] = 0 - pred = np.array(pred, dtype=np.uint8) - - # # prediction visualize - # cv2.imshow('prediction',pred) - # cv2.waitKey(25) - - route = np.zeros_like(pred) - # calculate the center line by taking the average - row_num = 0 - for row in pred: - white_pixels = list(np.nonzero(row)[0]) - if white_pixels: - average = (white_pixels[0] + white_pixels[-1]) / 2 - route[row_num][round(average)] = 255 - row_num = row_num + 1 - return route, pred - -def equalize_hist_rgb(img): - # Split the image into its color channels - r, g, b = cv2.split(img) - - # Equalize the histograms for each channel - r_eq = cv2.equalizeHist(r) - g_eq = cv2.equalizeHist(g) - b_eq = cv2.equalizeHist(b) - - # Merge the channels - img_eq = cv2.merge((r_eq, g_eq, b_eq)) - # img_eq = cv2.fastNlMeansDenoisingColored(img_eq,None,10,20,7,42) - # img_eq = cv2.GaussianBlur(img_eq, (25,25), 0) - return img_eq - -''' - The traildetector node has two subscriptions(lidar and camera) and one publisher(trail position). After it receives msgs from both lidar and camera, - it detects the trail in the image and sends the corresponding lidar position as the trail location. - The msgs are synchornized before processing, using buffer and sync function. - To find the path, the node will process the image, find a line to follow (by taking the average of left and right of the path), estimate the lidar points depth, - and choose to go to the closest point. - V1_traildetection assumes that the path is pointing frontward and has only one path in front. -''' -class trailDetector(Node): - def __init__(self, model, device): - super().__init__('trail_detector') - self.only_camera_mode = False - self.bridge = CvBridge() - - # load model and device - self.model = model - self.device = device - - # define trail publication - self.trail_publisher = self.create_publisher( - PoseStamped, - 'trail_location', - 10) - - if self.only_camera_mode: - # define camera subscription - print("Mode: Only Camera") - self.camera_subscription = self.create_subscription( - Image, - 'camera', - self.only_camera_callback, - 10) - self.camera_subscription - else: - # create subscribers and synchronizer - print("Mode: Lidar and Camera") - self.image_sub = message_filters.Subscriber(self, Image, 'camera') - self.lidar_sub = message_filters.Subscriber(self, PointCloud2, 'velodyne_points') - queue_size = 30 - ts = message_filters.ApproximateTimeSynchronizer([self.image_sub, self.lidar_sub], queue_size, 0.5) - ts.registerCallback(self.trail_callback) - - - - - def trail_callback(self, camera_msg, lidar_msg): - print("Message received!") - # process lidar msg - point_gen = pc2.read_points( - lidar_msg, field_names=( - "x", "y", "z"), skip_nans=True) - points = [[x, y, z] for x, y, z in point_gen] - points = np.array(points) - points2d = convert_to_camera_frame(points) - - # process camera msg - cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') - - # # increase brightness - # M = np.ones(cv_image.shape, dtype = 'uint8') * 50 # increase brightness by 50 - # cv_image = cv2.add(cv_image, M) - - # # image visualizer - # cv2.imshow('raw image',cv_image) - # cv2.waitKey(25) - - route, _ = find_route(self.model, self.device, cv_image) - - # # visualize route purely - # cv2.imshow("white_route",route) - # cv2.waitKey(25) - - route_indices = list(zip(*np.nonzero(route))) - if not route_indices: - print("No centerline found!") - return - - #filter points that have no lidar points near it - filtered_route_indices = [] - for index in route_indices: - point = [] - u = index[1] - v = image_height - index[0] - point.append(u) - point.append(v) - point.append(estimate_depth(u, v, points2d)) - if point[2] == -1: - continue - else: - filtered_route_indices.append(point) - - if not filtered_route_indices: - print("No usable centerline found!") - return - - # find the corresponding lidar points using the center line pixels - filtered_3dPoints = [] - for index in filtered_route_indices: - # for index in route_indices: - point = [] - point.append(index[0]) - point.append(index[1]) - point.append(index[2]) - - # point.append(index[0]) - # point.append(index[1]) - # point.append(estimate_depth(index[0], index[1], points2d)) - - point_3d = convert_to_lidar_frame(point) - filtered_3dPoints.append(point_3d) - - filtered_3dPoints = np.array(filtered_3dPoints) - # find the nearest 3d point and set that as goal - distances_sq = filtered_3dPoints[:,0]**2 + filtered_3dPoints[:,1]**2 + filtered_3dPoints[:,2]**2 - smallest_index = np.argmin(distances_sq) - # print(math.sqrt(distances_sq[smallest_index])) - # smallest_index = np.argmin(filtered_3dPoints[:,2]) - - # visualize after-pocessed image - visualize_cv_image = cv_image - print(f"{visualize_cv_image.shape[0]}") - circle_y = route_indices[smallest_index][0] - circle_x = route_indices[smallest_index][1] - - # visualize the lidar points in image - uv_x, uv_y, uv_z = points2d - for index_lidar in range(points2d.shape[1]): - # print(f"{int(uv_x[index_lidar])},{int(uv_y[index_lidar])}") - cv2.circle(visualize_cv_image, (int(uv_x[index_lidar]), int(image_height - uv_y[index_lidar])), radius=5, color=(255, 0, 0), thickness=-1) - - # visualize center line in image - for index_circle in range(len(route_indices)): - if index_circle == smallest_index: - continue - else: - green_circle_row = route_indices[index_circle][0] - green_circle_coloum = route_indices[index_circle][1] - cv2.circle(visualize_cv_image, (green_circle_coloum, green_circle_row), radius=5, color=(0, 125, 125), thickness=-1) - - # visualize filtered center line in image - for index_circle in range(len(filtered_route_indices)): - if index_circle == smallest_index: - continue - else: - red_circle_row = filtered_route_indices[index_circle][1] - red_circle_coloum = filtered_route_indices[index_circle][0] - cv2.circle(visualize_cv_image, (red_circle_coloum, image_height - red_circle_row), radius=5, color=(0, 0, 255), thickness=-1) - - # visualize the chosen point in image - cv2.circle(visualize_cv_image, (circle_x, circle_y), radius=7, color=(255, 255, 255), thickness=-1) - - cv2.imshow('circled image',visualize_cv_image) - cv2.waitKey(25) - - # publsih message - trail_location_msg = PoseStamped() - trail_location_msg.header.stamp = lidar_msg.header.stamp - trail_location_msg.header.frame_id = "velodyne" - #position - trail_location_msg.pose.position.x = filtered_3dPoints[smallest_index][0] - trail_location_msg.pose.position.y = filtered_3dPoints[smallest_index][1] - trail_location_msg.pose.position.z = filtered_3dPoints[smallest_index][2] - #orientation - yaw = math.atan2(filtered_3dPoints[smallest_index][1], filtered_3dPoints[smallest_index][0]) - trail_location_msg.pose.orientation.x = 0.0 - trail_location_msg.pose.orientation.y = 0.0 - trail_location_msg.pose.orientation.z = math.sin(yaw/2) - trail_location_msg.pose.orientation.w = math.cos(yaw / 2) - self.get_logger().info("location published!") - self.get_logger().info(f"Point: {filtered_3dPoints[smallest_index][0]}, {filtered_3dPoints[smallest_index][1]}, {filtered_3dPoints[smallest_index][2]}") - self.trail_publisher.publish(trail_location_msg) - - def only_camera_callback(self, camera_msg): - # process camera msg - cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') - # cv_image = cv2.resize(cv_image,(480,480)) - - # cv_image = cv2.fastNlMeansDenoisingColored(cv_image,None,5,5,5,11) - - # # histogram equalization method 1 - # cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2YCrCb) - # cv_image[:,:,0] = cv2.equalizeHist(cv_image[:,:,0]) - # cv_image = cv2.cvtColor(cv_image, cv2.COLOR_YCrCb2BGR) - - # histogram equalization method 2 - # cv_image = equalize_hist_rgb(cv_image) - - # # histogram equalization method 3 CLAHE - # image_bw = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY) - # clahe = cv2.createCLAHE(clipLimit=5) - # cv_image = clahe.apply(image_bw) + 30 - - # # histogram equalization method 4 CLAHE - # cla = cv2.createCLAHE(clipLimit=4.0) - # H, S, V = cv2.split(cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)) - # eq_V = cla.apply(V) - # cv_image = cv2.cvtColor(cv2.merge([H, S, eq_V]), cv2.COLOR_HSV2BGR) - - # # increase brightness - # M = np.ones(cv_image.shape, dtype = 'uint8') * 50 # increase brightness by 50 - # cv_image = cv2.add(cv_image, M) - - # # image visualizer - # cv2.imshow('raw image',cv_image) - # cv2.waitKey(25) - - route, pred = find_route(self.model, self.device, cv_image) - - # # visualize route purely - # cv2.imshow("white_route",route) - # cv2.waitKey(25) - - # increase the brightness of image where is predicted as road - sign = cv2.cvtColor(pred, cv2.COLOR_GRAY2BGR) - cv_image = cv2.add(cv_image, sign) - - route_indices = list(zip(*np.nonzero(route))) - if not route_indices: - print("No centerline found!") - return - - # for index_circle in range(len(route_indices)): - # red_circle_x = route_indices[index_circle][0] - # red_circle_y = route_indices[index_circle][1] - # cv2.circle(cv_image, (red_circle_y, red_circle_x), radius=5, color=(0, 0, 255), thickness=-1) - - cv2.imshow('circled image',cv_image) - cv2.waitKey(25) - - - - -def main(args=None): - print(torch.cuda.is_available()) - device = torch.device("cuda" if torch.cuda.is_available() else "cpu") - model = load_model(device) - - rclpy.init(args=args) - trailDetectorNode = trailDetector(model, device) - rclpy.spin(trailDetectorNode) - - trailDetectorNode.destroy_node() - rclpy.shutdown() - cv2.destroyAllWindows() - -if __name__ == '__main__': - main() From 3849886f5e77acadc453df3a2a58770a5cca7946 Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Mon, 11 Sep 2023 14:45:04 -0400 Subject: [PATCH 17/22] merge visualization and msg publish node --- .../trailDetectionNode.py | 420 ++++++++++++++++++ 1 file changed, 420 insertions(+) create mode 100644 trail_detection_node/trail_detection_node/trailDetectionNode.py diff --git a/trail_detection_node/trail_detection_node/trailDetectionNode.py b/trail_detection_node/trail_detection_node/trailDetectionNode.py new file mode 100644 index 00000000..cb9d89ea --- /dev/null +++ b/trail_detection_node/trail_detection_node/trailDetectionNode.py @@ -0,0 +1,420 @@ +import torch +from .model_loader import PSPNet +from PIL import Image as ImagePIL +from torchvision import transforms +import cv2 +import os + +import rclpy +from rclpy.node import Node +from geometry_msgs.msg import PoseStamped +from sensor_msgs.msg import Image, PointCloud2 +import sensor_msgs_py.point_cloud2 as pc2 +import numpy as np +import math +from cv_bridge import CvBridge + +import message_filters +''' + The transformation matrix as well as the coordinate conversion and depth estimation functions are copied from human_detection_node +''' +camera_transformation_k = np.array([ + [628.5359544,0,676.9575694], + [0,627.7249542,532.7206716], + [0,0,1]]) + +rotation_matrix = np.array([ + [-0.007495781893,-0.0006277316155,0.9999717092], + [-0.9999516401,-0.006361853422,-0.007499625104], + [0.006366381192,-0.9999795662,-0.0005800141927]]) + +rotation_matrix = rotation_matrix.T + +translation_vector = np.array([-0.06024059837, -0.08180891509, -0.3117851288]) +image_width=1280 +image_height=1024 + +def convert_to_lidar_frame(uv_coordinate): + """ + convert 2d camera coordinate + depth into 3d lidar frame + """ + point_cloud = np.empty( (3,) , dtype=float) + point_cloud[2] = uv_coordinate[2] + point_cloud[1] = ( image_height - uv_coordinate[1] )*point_cloud[2] + point_cloud[0] = uv_coordinate[0]*point_cloud[2] + + inverse_camera_transformation_k = np.linalg.inv(camera_transformation_k) + inverse_rotation_matrix = np.linalg.inv(rotation_matrix) + point_cloud = inverse_camera_transformation_k @ point_cloud + point_cloud = inverse_rotation_matrix @ (point_cloud-translation_vector) + return point_cloud + +def convert_to_camera_frame(point_cloud): + """ + convert 3d lidar data into 2d coordinate of the camera frame + depth + """ + length = point_cloud.shape[0] + translation = np.tile(translation_vector, (length, 1)).T + + point_cloud = point_cloud.T + point_cloud = rotation_matrix@point_cloud + translation + point_cloud = camera_transformation_k @ point_cloud + + uv_coordinate = np.empty_like(point_cloud) + + """ + uv = [x/z, y/z, z], and y is opposite so the minus imageheight + """ + uv_coordinate[0] = point_cloud[0] / point_cloud[2] + uv_coordinate[1] = image_height - point_cloud[1] / point_cloud[2] + uv_coordinate[2] = point_cloud[2] + + uv_depth = uv_coordinate[2, :] + filtered_uv_coordinate = uv_coordinate[:, uv_depth >= 0] + return filtered_uv_coordinate + +def estimate_depth(x, y, np_2d_array): + """ + estimate the depth by finding points closest to x,y from thhe 2d array + """ + # Calculate the distance between each point and the target coordinates (x, y) + distances_sq = (np_2d_array[0,:] - x) ** 2 + (np_2d_array[1,:] - y) ** 2 + + # Find the indices of the k nearest points + k = 5 # Number of nearest neighbors we want + closest_indices = np.argpartition(distances_sq, k)[:k] + pixel_distance_threshold = 2000 + + valid_indices = [idx for idx in closest_indices if distances_sq[idx]<=pixel_distance_threshold] + if len(valid_indices) == 0: + # lidar points disappears usually around 0.4m + distance_where_lidar_stops_working = -1 + return distance_where_lidar_stops_working + + filtered_indices = np.array(valid_indices) + # Get the depth value of the closest point + closest_depths = np_2d_array[2,filtered_indices] + + return np.mean(closest_depths) + +def load_model(device): + # model = FCN32s(nclass=2, backbone='vgg16', pretrained_base=True, pretrained=True) + model = PSPNet(nclass=2, backbone='resnet50', pretrained_base=True) + + # model_location = '32s_batch8/fcn32s_vgg16_pascal_voc_best_model.pth' + # model_location = '32s_self_labelled/fcn32s_vgg16_pascal_voc_best_model.pth' + model_location = 'psp2/psp_resnet50_pascal_voc_best_model.pth' + + if os.path.isfile(f'src/trail_detection_node/trail_detection_node/model/{model_location}'): + model.load_state_dict(torch.load(f'src/trail_detection_node/trail_detection_node/model/{model_location}',map_location=torch.device('cuda:0'))) + else: + model.load_state_dict(torch.load(f'trail_detection_node/model/{model_location}',map_location=torch.device('cuda:0'))) + model = model.to(device) + + model.eval() + print('Finished loading model!') + + return model + +def find_route(model, device, cv_image): + PIL_image = ImagePIL.fromarray(cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB)) + transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), + ]) + image = transform(PIL_image).unsqueeze(0).to(device) + with torch.no_grad(): + output = model(image) + # pred is prediction generated by the model, route is the variable for the center line + pred = torch.argmax(output[0], 1).squeeze(0).cpu().data.numpy() + pred[pred == 0] = 255 # only see the trail + pred[pred == 1] = 0 + pred = np.array(pred, dtype=np.uint8) + + # # prediction visualize + # cv2.imshow('prediction',pred) + # cv2.waitKey(25) + + route = np.zeros_like(pred) + # calculate the center line by taking the average + row_num = 0 + for row in pred: + white_pixels = list(np.nonzero(row)[0]) + if white_pixels: + average = (white_pixels[0] + white_pixels[-1]) / 2 + route[row_num][round(average)] = 255 + row_num = row_num + 1 + return route, pred + +def equalize_hist_rgb(img): + # Split the image into its color channels + r, g, b = cv2.split(img) + + # Equalize the histograms for each channel + r_eq = cv2.equalizeHist(r) + g_eq = cv2.equalizeHist(g) + b_eq = cv2.equalizeHist(b) + + # Merge the channels + img_eq = cv2.merge((r_eq, g_eq, b_eq)) + # img_eq = cv2.fastNlMeansDenoisingColored(img_eq,None,10,20,7,42) + # img_eq = cv2.GaussianBlur(img_eq, (25,25), 0) + return img_eq + +''' + The traildetector node has two subscriptions(lidar and camera) and one publisher(trail position). After it receives msgs from both lidar and camera, + it detects the trail in the image and sends the corresponding lidar position as the trail location. + The msgs are synchornized before processing, using buffer and sync function. + To find the path, the node will process the image, find a line to follow (by taking the average of left and right of the path), estimate the lidar points depth, + and choose to go to the closest point. + This ediction of node assumes that the path is pointing frontward and has only one path in front. +''' +class trailDetector(Node): + def __init__(self, model, device): + super().__init__('trail_detector') + self.only_camera_mode = False + self.bridge = CvBridge() + + # load model and device + self.model = model + self.device = device + + # define trail publication + self.trail_publisher = self.create_publisher( + PoseStamped, + 'trail_location', + 10) + + self.visualize = False + + if self.only_camera_mode: + # define camera subscription + self.get_logger().info("Mode: Only Camera") + self.camera_subscription = self.create_subscription( + Image, + 'camera', + self.only_camera_callback, + 10) + self.camera_subscription + else: + # create subscribers and synchronizer + self.get_logger().info("Mode: Lidar and Camera") + self.image_sub = message_filters.Subscriber(self, Image, 'camera') + self.lidar_sub = message_filters.Subscriber(self, PointCloud2, 'velodyne_points') + queue_size = 30 + ts = message_filters.ApproximateTimeSynchronizer([self.image_sub, self.lidar_sub], queue_size, 0.5) + ts.registerCallback(self.trail_callback) + + + + + def trail_callback(self, camera_msg, lidar_msg): + # print("Message received!") + # process lidar msg + point_gen = pc2.read_points( + lidar_msg, field_names=( + "x", "y", "z"), skip_nans=True) + points = [[x, y, z] for x, y, z in point_gen] + points = np.array(points) + points2d = convert_to_camera_frame(points) + + # process camera msg + cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') + + # # increase brightness + # M = np.ones(cv_image.shape, dtype = 'uint8') * 50 # increase brightness by 50 + # cv_image = cv2.add(cv_image, M) + + # # image visualizer + # cv2.imshow('raw image',cv_image) + # cv2.waitKey(25) + + route, _ = find_route(self.model, self.device, cv_image) + + # # visualize route purely + # cv2.imshow("white_route",route) + # cv2.waitKey(25) + + route_indices = list(zip(*np.nonzero(route))) + if not route_indices: + self.get_logger().info("No centerline found!") + return + + #filter points that have no lidar points near it + filtered_route_indices = [] + for index in route_indices: + point = [] + u = index[1] + v = image_height - index[0] + point.append(u) + point.append(v) + point.append(estimate_depth(u, v, points2d)) + if point[2] == -1: + continue + else: + filtered_route_indices.append(point) + + if not filtered_route_indices: + self.get_logger().info("No usable centerline found!") + return + + # find the corresponding lidar points using the center line pixels + filtered_3dPoints = [] + for index in filtered_route_indices: + # for index in route_indices: + point = [] + point.append(index[0]) + point.append(index[1]) + point.append(index[2]) + + # point.append(index[0]) + # point.append(index[1]) + # point.append(estimate_depth(index[0], index[1], points2d)) + + point_3d = convert_to_lidar_frame(point) + filtered_3dPoints.append(point_3d) + + filtered_3dPoints = np.array(filtered_3dPoints) + # find the nearest 3d point and set that as goal + distances_sq = filtered_3dPoints[:,0]**2 + filtered_3dPoints[:,1]**2 + filtered_3dPoints[:,2]**2 + self.smallest_index = np.argmin(distances_sq) + # print(math.sqrt(distances_sq[smallest_index])) + # smallest_index = np.argmin(filtered_3dPoints[:,2]) + + if self.visualize == True: + self.visualization(route_indices, points2d, filtered_route_indices, cv_image) + else: + self.publish_msg(lidar_msg, filtered_3dPoints) + + + def visualization(self, route_indices, points2d, filtered_route_indices, cv_image): + # visualize after-pocessed image + visualize_cv_image = cv_image + # print(f"{visualize_cv_image.shape[0]}") + circle_y = route_indices[self.smallest_index][0] + circle_x = route_indices[self.smallest_index][1] + + # visualize the lidar points in image + uv_x, uv_y, uv_z = points2d + for index_lidar in range(points2d.shape[1]): + # print(f"{int(uv_x[index_lidar])},{int(uv_y[index_lidar])}") + cv2.circle(visualize_cv_image, (int(uv_x[index_lidar]), int(image_height - uv_y[index_lidar])), radius=5, color=(255, 0, 0), thickness=-1) + + # visualize center line in image + for index_circle in range(len(route_indices)): + if index_circle == self.smallest_index: + continue + else: + green_circle_row = route_indices[index_circle][0] + green_circle_coloum = route_indices[index_circle][1] + cv2.circle(visualize_cv_image, (green_circle_coloum, green_circle_row), radius=5, color=(0, 125, 125), thickness=-1) + + # visualize filtered center line in image + for index_circle in range(len(filtered_route_indices)): + if index_circle == self.smallest_index: + continue + else: + red_circle_row = filtered_route_indices[index_circle][1] + red_circle_coloum = filtered_route_indices[index_circle][0] + cv2.circle(visualize_cv_image, (red_circle_coloum, image_height - red_circle_row), radius=5, color=(0, 0, 255), thickness=-1) + + # visualize the chosen point in image + cv2.circle(visualize_cv_image, (circle_x, circle_y), radius=7, color=(255, 255, 255), thickness=-1) + + cv2.imshow('circled image',visualize_cv_image) + cv2.waitKey(25) + + def publish_msg(self, lidar_msg, filtered_3dPoints): + # publsih message + trail_location_msg = PoseStamped() + trail_location_msg.header.stamp = lidar_msg.header.stamp + trail_location_msg.header.frame_id = "velodyne" + #position + trail_location_msg.pose.position.x = filtered_3dPoints[self.smallest_index][0] + trail_location_msg.pose.position.y = filtered_3dPoints[self.smallest_index][1] + trail_location_msg.pose.position.z = filtered_3dPoints[self.smallest_index][2] + #orientation + yaw = math.atan2(filtered_3dPoints[self.smallest_index][1], filtered_3dPoints[self.smallest_index][0]) + trail_location_msg.pose.orientation.x = 0.0 + trail_location_msg.pose.orientation.y = 0.0 + trail_location_msg.pose.orientation.z = math.sin(yaw/2) + trail_location_msg.pose.orientation.w = math.cos(yaw / 2) + self.get_logger().info("location published!") + self.get_logger().info(f"Point: {filtered_3dPoints[self.smallest_index][0]}, {filtered_3dPoints[self.smallest_index][1]}, {filtered_3dPoints[self.smallest_index][2]}") + self.trail_publisher.publish(trail_location_msg) + + def only_camera_callback(self, camera_msg): + # process camera msg + cv_image = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') + # cv_image = cv2.resize(cv_image,(480,480)) + + # cv_image = cv2.fastNlMeansDenoisingColored(cv_image,None,5,5,5,11) + + # # histogram equalization method 1 + # cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2YCrCb) + # cv_image[:,:,0] = cv2.equalizeHist(cv_image[:,:,0]) + # cv_image = cv2.cvtColor(cv_image, cv2.COLOR_YCrCb2BGR) + + # histogram equalization method 2 + # cv_image = equalize_hist_rgb(cv_image) + + # # histogram equalization method 3 CLAHE + # image_bw = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY) + # clahe = cv2.createCLAHE(clipLimit=5) + # cv_image = clahe.apply(image_bw) + 30 + + # # histogram equalization method 4 CLAHE + # cla = cv2.createCLAHE(clipLimit=4.0) + # H, S, V = cv2.split(cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)) + # eq_V = cla.apply(V) + # cv_image = cv2.cvtColor(cv2.merge([H, S, eq_V]), cv2.COLOR_HSV2BGR) + + # # increase brightness + # M = np.ones(cv_image.shape, dtype = 'uint8') * 50 # increase brightness by 50 + # cv_image = cv2.add(cv_image, M) + + # # image visualizer + # cv2.imshow('raw image',cv_image) + # cv2.waitKey(25) + + route, pred = find_route(self.model, self.device, cv_image) + + # # visualize route purely + # cv2.imshow("white_route",route) + # cv2.waitKey(25) + + # increase the brightness of image where is predicted as road + sign = cv2.cvtColor(pred, cv2.COLOR_GRAY2BGR) + cv_image = cv2.add(cv_image, sign) + + route_indices = list(zip(*np.nonzero(route))) + if not route_indices: + self.get_logger().info("No centerline found!") + return + + # for index_circle in range(len(route_indices)): + # red_circle_x = route_indices[index_circle][0] + # red_circle_y = route_indices[index_circle][1] + # cv2.circle(cv_image, (red_circle_y, red_circle_x), radius=5, color=(0, 0, 255), thickness=-1) + + cv2.imshow('circled image',cv_image) + cv2.waitKey(25) + + + + +def main(args=None): + print(torch.cuda.is_available()) + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + model = load_model(device) + + rclpy.init(args=args) + trailDetectorNode = trailDetector(model, device) + rclpy.spin(trailDetectorNode) + + trailDetectorNode.destroy_node() + rclpy.shutdown() + cv2.destroyAllWindows() + +if __name__ == '__main__': + main() From 975fc45385b60e6d0755aeb2f6980018c8706d5f Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Sat, 16 Sep 2023 20:36:49 -0400 Subject: [PATCH 18/22] Update setup.py Change entry point and fix email --- trail_detection_node/setup.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/trail_detection_node/setup.py b/trail_detection_node/setup.py index 511d6ad0..83dd74cc 100644 --- a/trail_detection_node/setup.py +++ b/trail_detection_node/setup.py @@ -14,15 +14,13 @@ install_requires=['setuptools'], zip_safe=True, maintainer='zhaodong', - maintainer_email='tom2352759619@hotmail.com', + maintainer_email='zhaodong.jiang@mail.utoronto.ca', description='TODO: Package description', license='TODO: License declaration', tests_require=['pytest'], entry_points={ 'console_scripts': [ - 'trail_detection = trail_detection_node.v2_trailDetectionNode:main', - 'visualizer = trail_detection_node.visualizer_v2_trailDetectionNode:main', - 'original_visualizer = trail_detection_node.visualizer_v1_trailDetectionNode:main', + 'trail_detection = trail_detection_node.trailDetectionNode:main', ], }, ) From 9927b75ced00d1772f1164b7401fe0e9da0ba90a Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Sat, 16 Sep 2023 20:37:24 -0400 Subject: [PATCH 19/22] Update test.py --- trail_detection_node/test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trail_detection_node/test.py b/trail_detection_node/test.py index 5d715cab..c925cc4c 100644 --- a/trail_detection_node/test.py +++ b/trail_detection_node/test.py @@ -1,3 +1,3 @@ -from trail_detection_node.visualizer_v2_trailDetectionNode import * +from trail_detection_node.trailDetectionNode import * main() From d42767cc1bfbd8b370b3f329548d6b636e9ce0a0 Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Sat, 16 Sep 2023 20:39:18 -0400 Subject: [PATCH 20/22] Update package.xml fix email --- trail_detection_node/package.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trail_detection_node/package.xml b/trail_detection_node/package.xml index 45159f4b..aa18aeb5 100644 --- a/trail_detection_node/package.xml +++ b/trail_detection_node/package.xml @@ -4,7 +4,7 @@ trail_detection_node 0.0.0 TODO: Package description - zhaodong + zhaodong TODO: License declaration sensor_msgs From 5bc9924b21ecc8a4386344c2b63ddada5efa2d3f Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Sat, 16 Sep 2023 21:07:42 -0400 Subject: [PATCH 21/22] Update README.md --- trail_detection_node/README.md | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/trail_detection_node/README.md b/trail_detection_node/README.md index ceda4310..f6e7b881 100644 --- a/trail_detection_node/README.md +++ b/trail_detection_node/README.md @@ -7,15 +7,16 @@ SETUP GUIDE RUN the node ============= -The package contains two nodes: -1. The node that takes the camera and lidar data and sends the Posestamp message ``` ros2 run trail_detection_node trail_detection ``` -2. The node that takes the camera and lidar data and shows the visualization of the output -``` -ros2 run trail_detection_node visualizer -``` + +Node Usage +============= +The node has three modes, which can be controlled by setting the ```only_camera_mode```(line 175) and ```visualize```(line 188) to different value: +1. Trail centerline publisher(default, ```only_camera_mode=False```, ```visualize=False```): subscribes to camera and lidar and publishes the centerline point(posestamp type msg) in ```velodyne``` frame in ```trail_location``` topic +2. Trail centerline visualizer(```only_camera_mode=False```, ```visualize=True```): subscribes to camera and lidar and shows the chosen centerline point in **white**, the lidar points in **blue**, center points with no lidar points around in **yellow**, and center points with lidar points around in **red** in the image in the pop up window +3. Only Camera mode(```only_camera_mode=True```): subscribes only to camera, runs the segmentation model, and shows the segmented trail in image in the pop up window Possible issues ============= @@ -24,10 +25,5 @@ Possible issues 2. **Subfolder package relative import error:** Temporary fix: Move the script to the same folder and change the import code -Scripts explanation -============= -1. ```v1_trailDetectionNode.py``` and ```v2_trailDetectionNode.py```: two versions of ros2 node that sends the Posestamp message -2. ```visualizer_v1_trailDetectionNode.py``` and ```visualizer_v2_trailDetectionNode.py```: two versions of ros2 visualization node -3. ```GANav_visualizer.py```: ros2 visualization node for GANav model -4. ```jpu.py```, ```model_loader.py```, ```model_store.py```, ```segbase.py```, ```vgg.py```, and ```base_models``` folder: model loaders for FCN32S. FCN8s, and PSPNet. Copied from: https://github.com/Tramac/awesome-semantic-segmentation-pytorch + From 6ca46402905c884c94db1f186e99fa6e660c6353 Mon Sep 17 00:00:00 2001 From: Frank_ZhaodongJiang <69261064+FrankZhaodong@users.noreply.github.com> Date: Sat, 16 Sep 2023 21:12:31 -0400 Subject: [PATCH 22/22] Update trailDetectionNode.py Add a comment that line 21 to 98 are from human_detection_node --- trail_detection_node/trail_detection_node/trailDetectionNode.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trail_detection_node/trail_detection_node/trailDetectionNode.py b/trail_detection_node/trail_detection_node/trailDetectionNode.py index cb9d89ea..e4eef73e 100644 --- a/trail_detection_node/trail_detection_node/trailDetectionNode.py +++ b/trail_detection_node/trail_detection_node/trailDetectionNode.py @@ -16,7 +16,7 @@ import message_filters ''' - The transformation matrix as well as the coordinate conversion and depth estimation functions are copied from human_detection_node + The transformation matrix as well as the coordinate conversion and depth estimation functions (from line 21 to line 98) are copied from human_detection_node ''' camera_transformation_k = np.array([ [628.5359544,0,676.9575694],