diff --git a/map/autoware_tp_manager/CMakeLists.txt b/map/autoware_tp_manager/CMakeLists.txt new file mode 100644 index 00000000..58ffcf92 --- /dev/null +++ b/map/autoware_tp_manager/CMakeLists.txt @@ -0,0 +1,19 @@ +cmake_minimum_required(VERSION 3.5) +project(autoware_tp_manager) + +find_package(autoware_cmake REQUIRED) +autoware_package() + +# Find packages +find_package(yaml-cpp REQUIRED) +find_package(PCL REQUIRED) + +install(PROGRAMS + scripts/tp_collector.py + scripts/tp_checker.py + scripts/tp_visualizer.py + DESTINATION lib/${PROJECT_NAME} +) + +# ament_auto_package(INSTALL_TO_SHARE launch config) +ament_auto_package() diff --git a/map/autoware_tp_manager/LICENSE b/map/autoware_tp_manager/LICENSE new file mode 100644 index 00000000..da934de9 --- /dev/null +++ b/map/autoware_tp_manager/LICENSE @@ -0,0 +1,28 @@ +BSD 3-Clause License + +Copyright (c) 2023, MAP IV + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/map/autoware_tp_manager/README.md b/map/autoware_tp_manager/README.md new file mode 100644 index 00000000..5aee33c1 --- /dev/null +++ b/map/autoware_tp_manager/README.md @@ -0,0 +1,83 @@ +# autoware_tp_manager + +Here are some tools for collecting average TPs of PCD maps. Currently, we consider the decrease of TPs as a sign of map decay. However, we don't know what TPs are 'abnormal', e.g. in some areas the TPs range around 2.0 ~ 3.0, while in others TPs float around 5.0. This package provides some tools to check it, including: + +- TP collector: collect the average TPs per segment of a PCD map +- TP checker: compare a rosbag's TPs with a map's TPs and highlight the map areas where the rosbag's TPs differ significantly from the map's TPs. + +## Installation + +```bash +cd # OR +cd src/ +git clone git@github.com:autowarefoundation/autoware_tools.git +cd .. +colcon build --cmake-args -DCMAKE_BUILD_TYPE=Release --catkin-skip-building-tests --symlink-install --packages-up-to autoware_pointcloud_merger +``` + +## Usage + +- Collect the average TPs per segment from a map by TP_collector + + ```bash + ros2 run autoware_tp_manager tp_collector.py [--resolution:=] [--pose_topic:=] [--tp_topic:=] [--scan_topic:=] + ``` + + | Name | Description | + | ------------------ | ---------------------------------------------------------------------------- | + | path_to_pcd_dir | Directory that contains the input PCD files | + | path_to_rosbag | Path to the input rosbag | + | path_to_output_dir | Path to the output directory | + | resolution | Resolution to segment the input PCD. The TPs are collected on these segments | + | topic_of_poses | Topic of poses messages in the input rosbag | + | topic_of_TPs | Topic of TPs in the input rosbag | + | topic_of_scans | Topic of downsampled scans in the input rosbag | + + Paths to folders should be specified as **absolute paths**. + + The rosbag should contain the following topics + + - /localization/pose_twist_fusion_filter/pose_with_covariance_without_yawbias + - /localization/pose_estimator/transform_probability + - /localization/util/downsample/pointcloud + + The average TPs can be visualized on Rviz2 by running the following command + + ```bash + python3 install/autoware_tp_manager/lib/autoware_tp_manager/tp_visualizer.py + ``` + + | Name | Description | + | ------------------ | -------------------------------------------- | + | path_to_output_dir | Path to the output directory of TP_collector | + + then open another terminal to launch Rviz2 and add the topic /autoware_tp_visualizer. + +- Compare a rosbags' TPs with a map's TPs by TP_checker + + ```bash + ros2 run autoware_tp_manager tp_checker.py [--pose_topic:=] [--tp_topic:=] [--scan_topic:=] + ``` + + | Name | Description | + | ----------------- | ------------------------------------------------------------------------------------------------------------------------- | + | path_to_score_dir | Directory that contains the TP file (.csv) and the downsampled PCD map. This is the output directory of the tp_collector. | + | path_to_rosbag | Path to the input rosbag to be evaluated | + | topic_of_poses | Topic of poses in the evaluation rosbag | + | topic_of_TPs | Topic of TPs in the evaluation rosbag | + | topic_of_scans | Topic of scans in the evaluation rosbag | + + The results of checking are published to the topic /autoware_tp_checker, and can also be displayed on Rviz2. The red points + +- The rosbags used to both TP collector and TP checker is created by running Autoware's logging simulator and record the following three topics: + - /localization/pose_twist_fusion_filter/pose_with_covariance_without_yawbias + - /localization/pose_estimator/transform_probability + - /localization/util/downsample/pointcloud + +## Parameter + +{{ json_to_markdown("map/autoware_tp_manager/schema/tp_manager.schema.json") }} + +## LICENSE + +This package is under [Apache License 2.0](../../LICENSE) diff --git a/map/autoware_tp_manager/package.xml b/map/autoware_tp_manager/package.xml new file mode 100644 index 00000000..110c6d60 --- /dev/null +++ b/map/autoware_tp_manager/package.xml @@ -0,0 +1,28 @@ + + + + autoware_tp_manager + 0.1.0 + A package for checking TP scores of NDT matching + Yamato Ando + Taiki Yamada + Shintaro Sakoda + Anh Nguyen + Masahiro Sakamoto + Apache License 2.0 + + Anh Nguyen + Masahiro Sakamoto + + ament_cmake_auto + autoware_cmake + tier4_debug_msgs + + libpcl-all-dev + tier4_debug_msgs + yaml-cpp + + + ament_cmake + + diff --git a/map/autoware_tp_manager/schema/tp_manager.schema.json b/map/autoware_tp_manager/schema/tp_manager.schema.json new file mode 100644 index 00000000..0af1e40b --- /dev/null +++ b/map/autoware_tp_manager/schema/tp_manager.schema.json @@ -0,0 +1,107 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Parameters for autoware tp manager node", + "type": "object", + "definitions": { + "tp_collector": { + "type": "object", + "properties": { + "path_to_pcd_dir": { + "type": "string", + "description": "The path to the directory containing the input PCD files", + "default": "" + }, + "path_to_rosbag": { + "type": "string", + "description": "The path to the input rosbag", + "default": "" + }, + "path_to_output_dir": { + "type": "string", + "description": "The path to the output directory", + "default": "" + }, + "resolution": { + "type": "number", + "description": "Resolution to segment the input PCD. The TPs are collected on these segments.", + "default": "5,0" + }, + "pose_topic": { + "type": "string", + "description": "Topic of poses messages in the input rosbag", + "default": "/localization/pose_twist_fusion_filter/pose_with_covariance_without_yawbias" + }, + "tp_topic": { + "type": "string", + "description": "Topic of TPs in the input rosbag", + "default": "/localization/pose_estimator/transform_probability" + }, + "scan_topic": { + "type": "string", + "description": "Topic of downsampled scans in the input rosbag", + "default": "/localization/util/downsample/pointcloud" + } + }, + "required": ["path_to_pcd_dir", "path_to_rosbag", "path_to_output_dir"], + "additionalProperties": false + }, + "tp_checker": { + "type": "object", + "properties": { + "path_to_score_dir": { + "type": "string", + "description": "The path to the directory containing the average TP file (.csv) and the downsampled PCD map. This is also the output directory of the TP collector.", + "default": "" + }, + "path_to_rosbag": { + "type": "string", + "description": "Path to the input rosbag to be evaluated", + "default": "" + }, + "pose_topic": { + "type": "string", + "description": "Topic of poses messages in the input rosbag", + "default": "/localization/pose_twist_fusion_filter/pose_with_covariance_without_yawbias" + }, + "tp_topic": { + "type": "string", + "description": "Topic of TPs in the input rosbag", + "default": "/localization/pose_estimator/transform_probability" + }, + "scan_topic": { + "type": "string", + "description": "Topic of downsampled scans in the input rosbag", + "default": "/localization/util/downsample/pointcloud" + } + }, + "required": ["path_to_score_dir", "path_to_rosbag"], + "additionalProperties": false + }, + "tp_visualizer": { + "type": "object", + "properties": { + "path_to_output_dir": { + "type": "string", + "description": "Path to the output directory of TP_collector", + "default": "" + } + }, + "required": ["path_to_output_dir"], + "additionalProperties": false + } + }, + "properties": { + "/**": { + "type": "object", + "properties": { + "ros__parameters": { + "$ref": "#/definitions/autoware_pointcloud_merger" + } + }, + "required": ["ros__parameters"], + "additionalProperties": false + } + }, + "required": ["/**"], + "additionalProperties": false +} diff --git a/map/autoware_tp_manager/scripts/tp_checker.py b/map/autoware_tp_manager/scripts/tp_checker.py new file mode 100755 index 00000000..73a8a81f --- /dev/null +++ b/map/autoware_tp_manager/scripts/tp_checker.py @@ -0,0 +1,250 @@ +#!/usr/bin/env python3 + +# Copyright 2024 TIER IV, Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import csv +import os +import struct +import time + +import numpy as np +import open3d as o3d +import rclpy +from rclpy.node import Node +from scipy import spatial as sp +import sensor_msgs.msg as sensor_msgs +import sensor_msgs_py.point_cloud2 as pc2 +import std_msgs.msg as std_msgs +import tp_utility as tpu +import tqdm +import yaml + + +def mark_changes(candidate_segments, rosbag_tp: float, segment_dict, segment_df): + tp_sum = 0.0 + valid_segment_num = 0 + + for key in candidate_segments: + if key in segment_dict: + tp_sum += segment_df[segment_dict[key], 1] + valid_segment_num += 1 + if valid_segment_num > 0: + expected_tp = tp_sum / float(valid_segment_num) + else: + expected_tp = 0 + + if (expected_tp > 0 and abs(expected_tp - rosbag_tp) / expected_tp >= 0.2) or ( + expected_tp == 0 and rosbag_tp > 0 + ): + for key in candidate_segments: + if key in segment_dict: + segment_df[segment_dict[key], 2] += 1 + + +class TPChecker(Node): + + def __init__(self): + super().__init__("tp_checker") + self.segment_df = None # Segment indices, TP + self.segment_dict = {} # key: segment name, value: index to the segment_df + self.pcd_path = None # Path to the directory containing PCD segments + self.changed_dir = None # A directory contains the segments that need to be examined + self.result_csv = None # Path to the result CSV file + self.tp_path = None # Path to the file that contains TPs of map segments + + def __initialize(self, score_dir: str): + if not os.path.exists(score_dir): + print("Error: {0} does not exist! Abort!".format(score_dir)) + exit() + + self.pcd_path = os.path.join(score_dir, "pointcloud_map.pcd") + + if not os.path.exists(self.pcd_path): + print("Error: {0} does not exist! Abort!".format(self.pcd_path)) + exit() + + self.yaml_path = os.path.join(score_dir, "pointcloud_map_metadata.yaml") + + if not os.path.exists(self.yaml_path): + print("Error: A map metadata file is not found at {0}! Abort!".format(self.yaml_path)) + exit() + + self.tp_path = os.path.join(score_dir, "scores.csv") + + if not os.path.exists(self.tp_path): + print( + "Error: A TP file, which contains the TPs of map segments, is not found at {0}! Abort!".format( + self.tp_path + ) + ) + exit() + + # Read the input map directory and setup the segment dictionary + def __get_pcd_segments_and_scores(self): + # Read the metadata file and get the list of segments + print("Loading the PCD maps...") + self.segment_df = [] + + with open(self.yaml_path, "r") as f: + for key, value in yaml.safe_load(f).items(): + if key != "x_resolution" and key != "y_resolution": + self.segment_df.append([key, 0, 0]) + seg_key = str(value[0]) + "_" + str(value[1]) + self.segment_dict[seg_key] = len(self.segment_df) - 1 + elif key == "x_resolution": + self.resolution = value + + self.segment_df = np.array(self.segment_df, dtype=object) + + # Load the TPs + with open(self.tp_path, "r") as f: + reader = csv.reader(f) + + # Skip the header + next(reader) + # Load the maps' TPs + for index, row in enumerate(reader): + self.segment_df[index, 1] = float(row[1]) + + def __show(self): + ros_float_dtype = sensor_msgs.PointField.FLOAT32 + ros_uint32_dtype = sensor_msgs.PointField.UINT32 + dtype = np.float32 + itemsize = np.dtype(dtype).itemsize + + fields = [ + sensor_msgs.PointField(name="x", offset=0, datatype=ros_float_dtype, count=1), + sensor_msgs.PointField(name="y", offset=itemsize, datatype=ros_float_dtype, count=1), + sensor_msgs.PointField( + name="z", offset=itemsize * 2, datatype=ros_float_dtype, count=1 + ), + sensor_msgs.PointField( + name="rgba", offset=itemsize * 3, datatype=ros_uint32_dtype, count=1 + ), + ] + + points = [] + pc2_width = 0 + + progress_bar = tqdm.tqdm(total=len(self.segment_df)) + origin = None + + for i in range(self.segment_df.shape[0]): + progress_bar.update(1) + # Load the current segment + pcd = o3d.io.read_point_cloud(os.path.join(self.pcd_path, self.segment_df[i, 0])) + np_pcd = np.asarray(pcd.points) + rgba = self.__set_color_based_on_mark(self.segment_df[i, 2]) + + for p in np_pcd: + if origin == None: + origin = [p[0], p[1], p[2]] + pt = [p[0] - origin[0], p[1] - origin[1], p[2] - origin[2], rgba] + points.append(pt) + pc2_width += 1 + + print("Publishing result...") + header = std_msgs.Header() + header.stamp = self.get_clock().now().to_msg() + header.frame_id = "map" + + pc2_msg = pc2.create_cloud(header, fields, points) + pcd_publisher = self.create_publisher(sensor_msgs.PointCloud2, "/autoware_tp_checker", 10) + + while True: + pcd_publisher.publish(pc2_msg) + time.sleep(5) + + def __set_color_based_on_mark(self, mark) -> int: + # The may-have-changed segments are colored red + # The may-not-have-changed segments are colored white + if mark >= 100: + r = 255 + g = 0 + b = 0 + else: + r = 255 + g = 255 + b = 255 + a = 255 + + tmp_rgb = struct.pack("BBBB", b, g, r, a) + rgba = struct.unpack("I", tmp_rgb)[0] + + return rgba + + def processing( + self, score_path: str, rosbag_path: str, pose_topic: str, tp_topic: str, scan_topic: str + ): + self.__initialize(score_path) + self.__get_pcd_segments_and_scores() + + tpu.collect_rosbag_tp( + rosbag_path, + pose_topic, + tp_topic, + scan_topic, + self.resolution, + mark_changes, + self.segment_dict, + self.segment_df, + ) + + self.__show() + + +if __name__ == "__main__": + rclpy.init() + parser = argparse.ArgumentParser() + parser.add_argument("score_path", help="The path to the folder containing the TP file") + parser.add_argument("bag_path", help="The path to the input rosbag") + parser.add_argument( + "--pose_topic", + help="Pose topic", + default="/localization/pose_twist_fusion_filter/pose_with_covariance_without_yawbias", + required=False, + type=str, + ) + parser.add_argument( + "--tp_topic", + help="TP topic", + default="/localization/pose_estimator/transform_probability", + required=False, + type=str, + ) + parser.add_argument( + "--scan_topic", + help="Point cloud topic", + default="/localization/util/downsample/pointcloud", + required=False, + type=str, + ) + + args = parser.parse_args() + + # Practice with string % a bit + print("Input PCD map at {0}".format(args.score_path)) + print("Input rosbag at {0}".format(args.bag_path)) + print("Topic of NDT poses {0}".format(args.pose_topic)) + print("Topic of Transformation Probability {0}".format(args.tp_topic)) + print("Topic of scan data {0}".format(args.scan_topic)) + + # Run + checker = TPChecker() + + checker.processing( + args.score_path, args.bag_path, args.pose_topic, args.tp_topic, args.scan_topic + ) diff --git a/map/autoware_tp_manager/scripts/tp_collector.py b/map/autoware_tp_manager/scripts/tp_collector.py new file mode 100755 index 00000000..5ae40b45 --- /dev/null +++ b/map/autoware_tp_manager/scripts/tp_collector.py @@ -0,0 +1,215 @@ +#!/usr/bin/env python3 + +# Copyright 2024 TIER IV, Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import csv +import os +import shutil +from subprocess import call + +import numpy as np +import rclpy +from rclpy.node import Node +import tp_utility as tpu +import yaml + + +# Update the TP of a segment at index @idx, given a newly TP value +def update_avg_tp(candidate_segments, new_tp, segment_dict, segment_df): + for key in candidate_segments: + if key in segment_dict: + i = segment_dict[key] + tp, counter = segment_df[i, [1, 2]] + segment_df[i, [1, 2]] = [tp + 1.0 / (counter + 1) * (new_tp - tp), counter + 1] + + +class TPCollector(Node): + def __init__(self): + super().__init__("tp_collector") + self.pcd_path = None + self.yaml_path = None + self.score_path = None + self.segment_df = None + self.segment_dict = {} # Pairs of 2D coordinate and index + self.resolution = None + + def __initialize(self, pcd_map_dir: str, output_path: str, resolution: float): + if not os.path.exists(output_path): + os.makedirs(output_path) + else: + print("The output directory {0} already existed. Re-create? [y/n]".format(output_path)) + if input() == "y": + shutil.rmtree(output_path) + os.makedirs(output_path) + + self.output_path = output_path + self.resolution = resolution + + print("Type resolution = {0}".format(type(self.resolution))) + + if not os.path.exists(pcd_map_dir): + print("Error: {0} does not exist!".format(pcd_map_dir)) + exit() + + self.pcd_path = pcd_map_dir + + if not os.path.exists(self.pcd_path): + print("Error: {0} does not exist!".format(self.pcd_path)) + exit() + + ##### Read the YAML file to get the list of PCD segments and scores ##### + def __get_pcd_segments_and_scores(self): + # Downsample the input PCDs to make it lighter so it can be published to rviz + # Create a dataframe to record the avg tp of 2D segments + self.segment_df = [] + + # Downsample the input clouds if necessary + self.yaml_path = os.path.join(self.output_path, "pointcloud_map_metadata.yaml") + + if not os.path.exists(self.yaml_path): + ds_cmd = ( + "ros2 launch autoware_pointcloud_divider pointcloud_divider.launch.xml " + + "input_pcd_or_dir:=" + + self.pcd_path + + " output_pcd_dir:=" + + self.output_path + + " prefix:=test leaf_size:=0.5" + + " grid_size_x:=" + + str(self.resolution) + + " grid_size_y:=" + + str(self.resolution) + ) + call(ds_cmd, shell=True) + self.pcd_path = os.path.join(self.output_path, "pointcloud_map.pcd") + + # Now scan the downsample directory and get the segment list + with open(self.yaml_path, "r") as f: + for key, value in yaml.safe_load(f).items(): + if key != "x_resolution" and key != "y_resolution": + self.segment_df.append([key, 0, 0]) + seg_key = str(value[0]) + "_" + str(value[1]) + self.segment_dict[seg_key] = len(self.segment_df) - 1 + + self.segment_df = np.array(self.segment_df, dtype=object) + + # Load the score map + self.score_path = os.path.join(self.output_path, "scores.csv") + + if os.path.exists(self.score_path): + with open(self.score_path, "r") as f: + reader = csv.reader(f) + + # Skip the header + next(reader) + # Load the current maps' TPs + for index, row in enumerate(reader): + self.segment_df[index, [1, 2]] = [float(row[1]), 1] + + ##### Save the segment TPs ##### + def __save_tps(self): + print("Saving TP to files") + with open(self.score_path, "w") as f: + f.write("segment,tp\n") + print("Number of segments = {0}".format(self.segment_df.shape[0])) + for i in np.arange(0, self.segment_df.shape[0], dtype=int): + f.write("{0},{1}\n".format(self.segment_df[i, 0], self.segment_df[i, 1])) + print("Done. Segments' TPs are saved at {0}.".format(self.score_path)) + + def processing( + self, + pcd_map_dir: str, + rosbag_path: str, + output_path: str, + resolution: float, + pose_topic: str, + tp_topic: str, + scan_topic: str, + ): + # Initialize paths to directories + self.__initialize(pcd_map_dir, output_path, resolution) + + # Get the segment lists and scores + self.__get_pcd_segments_and_scores() + + # Read the rosbag and get the ndt poses and corresponding tps + tpu.collect_rosbag_tp( + rosbag_path, + pose_topic, + tp_topic, + scan_topic, + self.resolution, + update_avg_tp, + self.segment_dict, + self.segment_df, + ) + + # Save the new TPs + self.__save_tps() + + +if __name__ == "__main__": + rclpy.init() + parser = argparse.ArgumentParser() + parser.add_argument("map_path", help="The path to the PCD folder", type=str) + parser.add_argument("bag_path", help="The path to the input rosbag", type=str) + parser.add_argument("output", help="The path to the output directory", type=str) + parser.add_argument( + "--resolution", help="Map segment resolution", default=20, required=False, type=float + ) + parser.add_argument( + "--pose_topic", + help="Pose topic", + default="/localization/pose_twist_fusion_filter/pose_with_covariance_without_yawbias", + required=False, + type=str, + ) + parser.add_argument( + "--tp_topic", + help="TP topic", + default="/localization/pose_estimator/transform_probability", + required=False, + type=str, + ) + parser.add_argument( + "--scan_topic", + help="Point cloud topic", + default="/localization/util/downsample/pointcloud", + required=False, + type=str, + ) + + args = parser.parse_args() + + # Practice with string % a bit + print("Input PCD map at {0}".format(args.map_path)) + print("Input rosbag at {0}".format(args.bag_path)) + print("Output results at {0}".format(args.output)) + print("Segmentation resolution {0}".format(args.resolution)) + print("Topic of NDT poses {0}".format(args.pose_topic)) + print("Topic of Transformation Probability {0}".format(args.tp_topic)) + print("Topic of scan data {0}".format(args.scan_topic)) + + # Run + tp_collector = TPCollector() + tp_collector.processing( + args.map_path, + args.bag_path, + args.output, + args.resolution, + args.pose_topic, + args.tp_topic, + args.scan_topic, + ) diff --git a/map/autoware_tp_manager/scripts/tp_utility.py b/map/autoware_tp_manager/scripts/tp_utility.py new file mode 100644 index 00000000..881ed899 --- /dev/null +++ b/map/autoware_tp_manager/scripts/tp_utility.py @@ -0,0 +1,263 @@ +#!/usr/bin/env python3 + +# Copyright 2024 Tier IV, Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from geometry_msgs.msg import Pose +from geometry_msgs.msg import PoseWithCovarianceStamped +import numpy as np +from rclpy.serialization import deserialize_message +from rosbag2_py import ConverterOptions +from rosbag2_py import Info +from rosbag2_py import SequentialReader +from rosbag2_py import StorageOptions +import sensor_msgs.msg as sensor_msgs +import sensor_msgs_py.point_cloud2 as pc2 +from tier4_debug_msgs.msg import Float32Stamped +import tqdm + + +class FixQueue: + def __init__(self): + self.queue_limit = 100 # Max number of items in the queue + self.data_queue = None # Queue of data + self.head = 0 # Index to append a new item (enqueue index) + self.tail = 0 # Index to start remove/process items (dequeue index) + self.current_size = 0 # Number of items in the queue + + def enqueue(self, item): + if self.is_full(): + print("Cannot add new item. Queue is full!") + return False + + if self.is_empty() or self.data_queue.shape[1] != len(item): + self.data_queue = np.ndarray((self.queue_limit, len(item)), dtype=object) + self.data_queue[self.tail, :] = item + self.tail += 1 + + if self.tail == self.queue_limit: + self.tail = 0 + + self.current_size += 1 + + return True + + def drop(self, limitless=False, callback_func=None, *args): + if limitless == True: + end_id = self.tail + else: + end_id = self.head + int(self.queue_limit / 2) + + if self.head < end_id: + drop_list = np.arange(self.head, end_id) + else: + drop_list = np.arange(self.head, end_id + self.queue_limit) % self.queue_limit + + for i in drop_list: + # Process the item at index i + callback_func(self.data_queue[i], *args) + + self.current_size -= len(drop_list) + self.head = end_id + + if self.head == self.queue_limit: + self.head = 0 + + def is_full(self) -> bool: + if self.current_size == self.queue_limit: + return True + return False + + def is_empty(self) -> bool: + if self.current_size == 0: + return True + return False + + +# Some utility functions that is shared by both the tp collector and tp checker +# Convert a pose to a 4x4 transformation matrix +def __pose_to_mat(pose: Pose) -> np.array: + t = pose.position # Translation + q = pose.orientation # Rotation + + qx = q.x + qy = q.y + qz = q.z + qw = q.w + + return np.asarray( + [ + [1 - 2 * (qy * qy + qz * qz), 2 * (qx * qy - qw * qz), 2 * (qw * qy + qx * qz), t.x], + [2 * (qx * qy + qw * qz), 1 - 2 * (qx * qx + qz * qz), 2 * (qy * qz - qw * qz), t.y], + [2 * (qx * qz - qw * qy), 2 * (qw * qx + qy * qz), 1 - 2 * (qx * qx + qy * qy), t.z], + [0.0, 0.0, 0.0, 1.0], + ] + ) + + +# Point-wise transform +def transform_p(p, trans_mat_t: np.ndarray) -> np.ndarray: + tp = np.asarray([p[0], p[1], p[2], 1.0]) + return np.matmul(tp, trans_mat_t) + + +##### Stamp search ##### +def __stamp_search(stamp: int, ref_df: np.ndarray, search_size: int) -> int: + left = 0 + right = search_size - 1 + res = -1 + + # Find the closest stamp that goes before the stamp key + while left <= right: + mid = (left + right) // 2 + cur_stamp = ref_df[mid, 0] + + if cur_stamp <= stamp: + left = mid + 1 + res = mid + elif cur_stamp > stamp: + right = mid - 1 + + return res + + +def __get_message_count(rosbag_path: str): + info = Info() + metadata = info.read_metadata(rosbag_path, "sqlite3") + + output = { + item.topic_metadata.name: item.message_count for item in metadata.topics_with_message_count + } + output["total"] = metadata.message_count + + return output + + +def __scan_callback( + scan_tuple, pose_df, pose_df_len, tp_df, tp_df_len, resolution, callback_func=None, *args +): + stamp, scan = scan_tuple + tmp_scan = pc2.read_points(scan, skip_nans=True) + + # Find the closest pose + pid = __stamp_search(stamp, pose_df, pose_df_len) + + if pid <= 0: + return + + # Get the transposed transformation matrix + closest_p_t = pose_df[pid, 1].T + + # Skip some 0 poses at the beginning of the rosbag + if closest_p_t[3, 0] == 0 and closest_p_t[3, 1] == 0 and closest_p_t[3, 2] == 0: + return + + # Find the closest tp + tid = __stamp_search(stamp, tp_df, tp_df_len) + + if tid <= 0: + return + + closest_tp = tp_df[tid, 1] + + # Transform the scan and find the segments that cover the transformed points + # Record the segments that cover the scan + segment_set = set() + + for p in tmp_scan: + tp = transform_p(p, closest_p_t) + + # Hash the point to find the segment containing it + sx = int(tp[0] / resolution) * int(resolution) + sy = int(tp[1] / resolution) * int(resolution) + seg_idx = str(sx) + "_" + str(sy) + + segment_set.add(seg_idx) + + # Update/examine the segments' avg TP + callback_func(segment_set, closest_tp, *args) + + +##### Read the input rosbag and update map's TP ##### +def collect_rosbag_tp(bag_path: str, pose_topic: str, tp_topic: str, scan_topic: str, *args): + reader = SequentialReader() + bag_storage_options = StorageOptions(uri=bag_path, storage_id="sqlite3") + bag_converter_options = ConverterOptions( + input_serialization_format="cdr", output_serialization_format="cdr" + ) + reader.open(bag_storage_options, bag_converter_options) + + print("Reading rosbag...") + + msg_count_by_topic = __get_message_count(bag_path) + progress_bar = tqdm.tqdm(total=msg_count_by_topic["total"]) + + if msg_count_by_topic[pose_topic] == 0: + print( + "Error: the input rosbag contains no message from the topic {0}. Exit!".format( + pose_topic + ) + ) + if msg_count_by_topic[tp_topic] == 0: + print( + "Error: the input rosbag contains no message from the topic {0}. Exit!".format(tp_topic) + ) + if msg_count_by_topic[scan_topic] == 0: + print( + "Error: the input rosbag contains no message from the topic {0}. Exit!".format( + scan_topic + ) + ) + + pose_df = np.ndarray((msg_count_by_topic[pose_topic], 2), dtype=object) + tp_df = np.ndarray((msg_count_by_topic[tp_topic], 2), dtype=object) + pose_df_idx = 0 + tp_df_idx = 0 + scan_df = FixQueue() + + skip_count = 0 + + while reader.has_next(): + progress_bar.update(1) + (topic, data, stamp) = reader.read_next() + + if skip_count <= 2000: + skip_count += 1 + continue + + if topic == pose_topic: + pose_msg = deserialize_message(data, PoseWithCovarianceStamped) + stamp = pose_msg.header.stamp.sec * 1e9 + pose_msg.header.stamp.nanosec + pose_df[pose_df_idx, :] = [stamp, __pose_to_mat(pose_msg.pose.pose)] + pose_df_idx += 1 + + elif topic == tp_topic: + tp_msg = deserialize_message(data, Float32Stamped) + stamp = tp_msg.stamp.sec * 1e9 + tp_msg.stamp.nanosec + tp_df[tp_df_idx, :] = [stamp, tp_msg.data] + tp_df_idx += 1 + + elif topic == scan_topic: + pc_msg = deserialize_message(data, sensor_msgs.PointCloud2) + stamp = pc_msg.header.stamp.sec * 1e9 + pc_msg.header.stamp.nanosec + + scan_df.enqueue([stamp, pc_msg]) + + if scan_df.is_full(): + scan_df.drop(False, __scan_callback, pose_df, pose_df_idx, tp_df, tp_df_idx, *args) + + # Process the rest of the queue + scan_df.drop(True, __scan_callback, pose_df, pose_df_idx, tp_df, tp_df_idx, *args) + + progress_bar.close() diff --git a/map/autoware_tp_manager/scripts/tp_visualizer.py b/map/autoware_tp_manager/scripts/tp_visualizer.py new file mode 100644 index 00000000..47ff33ff --- /dev/null +++ b/map/autoware_tp_manager/scripts/tp_visualizer.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python3 + +# Copyright 2024 TIER IV, Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import os +import struct +import time + +import numpy as np +import open3d as o3d +import pandas as pd +import rclpy +from rclpy.node import Node +from scipy import spatial as sp +import sensor_msgs.msg as sensor_msgs +import sensor_msgs_py.point_cloud2 as pc2 +import std_msgs.msg as std_msgs +import tqdm + + +class TPVisualizer(Node): + def __init__(self): + super().__init__("tp_visualizer") + self.pcd_map_dir = None + self.pcd_path = None + self.yaml_path = None + self.score_path = None + # Color based on rounded TP + self.color = { + 0: [255, 255, 255], # White + 1: [255, 255, 51], # Yellow + 2: [255, 128, 0], # Orange + 3: [204, 0, 0], # Dark red + 4: [51, 153, 255], # Blue + 5: [51, 153, 255], + 6: [51, 153, 255], + 7: [51, 153, 255], + 8: [51, 153, 255], + } + + ##### Read the YAML file to get the list of PCD segments and scores ##### + def __get_pcd_segments_and_scores(self, pcd_map_dir: str): + if not os.path.exists(pcd_map_dir): + print("Error: the input PCD folder does not exist at {0}! Abort!".format(pcd_map_dir)) + exit() + + self.pcd_map_dir = pcd_map_dir + self.pcd_path = os.path.join(pcd_map_dir, "pointcloud_map.pcd/") + + if not os.path.exists(self.pcd_path): + print("Error: no PCD file was found at {0}! Abort!".format(self.pcd_path)) + exit() + + self.score_path = os.path.join(pcd_map_dir, "scores.csv") + + if not os.path.exists(self.score_path): + print("Error: a score file does not exist at {0}".format(self.score_path)) + exit() + + self.segment_df = pd.read_csv(self.score_path) + + def __show(self): + ros_float_dtype = sensor_msgs.PointField.FLOAT32 + ros_uint32_dtype = sensor_msgs.PointField.UINT32 + dtype = np.float32 + itemsize = np.dtype(dtype).itemsize + + fields = [ + sensor_msgs.PointField(name="x", offset=0, datatype=ros_float_dtype, count=1), + sensor_msgs.PointField(name="y", offset=itemsize, datatype=ros_float_dtype, count=1), + sensor_msgs.PointField( + name="z", offset=itemsize * 2, datatype=ros_float_dtype, count=1 + ), + sensor_msgs.PointField( + name="rgba", offset=itemsize * 3, datatype=ros_uint32_dtype, count=1 + ), + ] + + points = [] + pc2_width = 0 + + progress_bar = tqdm.tqdm(total=len(self.segment_df)) + origin = None + + for tuple in self.segment_df.itertuples(): + progress_bar.update(1) + # Load the current segment + seg_path = self.pcd_path + "/" + tuple.segment + pcd = o3d.io.read_point_cloud(seg_path) + np_pcd = np.asarray(pcd.points) + rgba = self.__set_color_based_on_score(tuple.tp) + + for p in np_pcd: + if origin == None: + origin = [p[0], p[1], p[2]] + pt = [p[0] - origin[0], p[1] - origin[1], p[2] - origin[2], rgba] + points.append(pt) + pc2_width += 1 + + progress_bar.close() + + print("Publishing result...") + header = std_msgs.Header() + header.stamp = self.get_clock().now().to_msg() + header.frame_id = "map" + + pc2_msg = pc2.create_cloud(header, fields, points) + pcd_publisher = self.create_publisher( + sensor_msgs.PointCloud2, "/autoware_tp_visualizer", 10 + ) + + while True: + pcd_publisher.publish(pc2_msg) + time.sleep(1) + + def __set_color_based_on_score(self, score) -> int: + r, g, b = self.color[int(score)] + a = 255 + + tmp_rgb = struct.pack("BBBB", b, g, r, a) + rgba = struct.unpack("I", tmp_rgb)[0] + + return rgba + + def processing(self, pcd_map_dir: str): + # Get the segment lists and scores + self.__get_pcd_segments_and_scores(pcd_map_dir) + # Publish to rviz + self.__show() + + +if __name__ == "__main__": + rclpy.init() + parser = argparse.ArgumentParser() + parser.add_argument("map_path", help="The path to the result folder") + + args = parser.parse_args() + + # Practice with string % a bit + print("Input PCD map at %s" % (args.map_path)) + + # Run + tp_collector = TPVisualizer() + tp_collector.processing(args.map_path)