From 441e737887ea90e6085f1a287550470b2e9b5ed6 Mon Sep 17 00:00:00 2001 From: Jarek Karwowski Date: Sat, 20 Jan 2024 00:36:09 +0100 Subject: [PATCH] `PathCrossingDetector` extended with the analysis of the global path plan [#124] --- .../path_crossing_detector.h | 84 +++++ src/path_crossing_detector.cpp | 348 ++++++++++++++---- 2 files changed, 357 insertions(+), 75 deletions(-) diff --git a/include/humap_local_planner/path_crossing_detector.h b/include/humap_local_planner/path_crossing_detector.h index 1b73e1c9..e6f0e7ca 100644 --- a/include/humap_local_planner/path_crossing_detector.h +++ b/include/humap_local_planner/path_crossing_detector.h @@ -22,6 +22,11 @@ class PathCrossingDetector { */ static constexpr double SAFE_POINT_DISTANCE_MULTIPLIER_DEFAULT = 0.70; + /// Innovation factor for the motion direction filtering + static constexpr double ROBOT_INNOV_FACTOR_DEFAULT = 0.8; + /// Innovation factor for the motion direction filtering (person predictions have smaller confidence) + static constexpr double PERSON_INNOV_FACTOR_DEFAULT = 0.6; + PathCrossingDetector(); void setParameters( @@ -37,11 +42,25 @@ class PathCrossingDetector { * @brief Performs crossing detection and tries to find the safe directions for further motion for the robot * * @param traj_robot evaluated robot trajectory (with velocities expressed in global coordinates) + * @param global_plan global path plan of the robot * @param traj_people predicted trajectories of surrounding people * @return true when crossing was detected */ bool detect( const Trajectory& traj_robot, + const std::vector& global_plan, + const std::vector& traj_people + ); + + /// @brief Performs crossing detection and tries to find the safe directions for further motion for the robot + bool detect( + const Trajectory& traj_robot, + const std::vector& traj_people + ); + + /// @brief Performs crossing detection and tries to find the safe directions for further motion for the robot + bool detect( + const std::vector& global_plan, const std::vector& traj_people ); @@ -81,7 +100,72 @@ class PathCrossingDetector { return gap_closest_person_; } + /** + * @brief Implements a complementary filter to process the motion direction (from poses) + * + * @param direction the newest motion direction + * @param pose_prev previous pose + * @param pose_current current pose + * @param innovation_factor the bigger the value is, the more confident we are about the new observations + * @return a new motion direction + */ + static double motionDirectionFilter( + double direction, + const geometry::Pose& pose_prev, + const geometry::Pose& pose_current, + double innovation_factor + ); + protected: + /** + * @brief Resets class' members to clear any pending detections + */ + void prepareForDetecting(); + + /** + * @return true if any crossing was detected + */ + bool detectCrossingTrajectory( + const Trajectory& traj_robot, + const std::vector& traj_people + ); + + /** + * @return true if any crossing was detected + */ + bool detectCrossingPath( + const std::vector& global_plan, + const std::vector& traj_people + ); + + /** + * @brief Performs detection of a human proximity that may indicate crossing of the robot's path + * + * Method that needs to be called in each step of the prediction horizon + * + * @param robot_pose + * @param person_pose + * @param timestamp_prediction + * @param save_gap_to_closest_person + * + * @return true if crossing was detected and its confidence exceeds the threshold + */ + bool performCrossingDetection( + const geometry::Pose& robot_pose, + const geometry::Pose& person_pose, + double timestamp_prediction, + bool save_gap_to_closest_person, + bool compute_crossing_angle_confidence = true + ); + + /** + * @brief + * + * @param robot_pose the current pose of the robot (first of the predicted trajectory) + * @return true if safe direction has been found + */ + bool findSafeDirectionsAndClosestPair(const geometry::Pose& robot_pose); + double person_model_radius_; double robot_model_radius_; double separation_threshold_; diff --git a/src/path_crossing_detector.cpp b/src/path_crossing_detector.cpp index 487f0026..823d30d1 100644 --- a/src/path_crossing_detector.cpp +++ b/src/path_crossing_detector.cpp @@ -35,14 +35,60 @@ void PathCrossingDetector::setParameters( bool PathCrossingDetector::detect( const Trajectory& traj_robot, + const std::vector& global_plan, const std::vector& traj_people ) { + prepareForDetecting(); + detectCrossingTrajectory(traj_robot, traj_people); + detectCrossingPath(global_plan, traj_people); + // no clue whether the closest detection comes from the trajectory or from the path + return crossing_detected_; +} + +bool PathCrossingDetector::detect( + const Trajectory& traj_robot, + const std::vector& traj_people +) { + prepareForDetecting(); + return detectCrossingTrajectory(traj_robot, traj_people); +} + +bool PathCrossingDetector::detect( + const std::vector& global_plan, + const std::vector& traj_people +) { + prepareForDetecting(); + return detectCrossingPath(global_plan, traj_people); +} + +double PathCrossingDetector::motionDirectionFilter( + double direction, + const geometry::Pose& pose_prev, + const geometry::Pose& pose_current, + double innovation_factor +) { + auto pos_diff = pose_current.getPosition() - pose_prev.getPosition(); + auto new_dir = pos_diff.calculateDirection().getRadian(); + + // complementary filters + double filtered_dir = geometry::Angle( + (1.0 - innovation_factor) * direction + innovation_factor * new_dir + ).getRadian(); + return filtered_dir; +} + +void PathCrossingDetector::prepareForDetecting() { crossing_detected_ = false; crossing_people_data_.clear(); safe_directions_behind_crossing_people_.clear(); closest_safe_direction_ = {(NAN, NAN, NAN), NAN}; gap_closest_person_ = std::numeric_limits::max(); +} +bool PathCrossingDetector::detectCrossingTrajectory( + const Trajectory& traj_robot, + const std::vector& traj_people +) { if (traj_robot.getPoses().empty()) { return false; } @@ -92,103 +138,255 @@ bool PathCrossingDetector::detect( robot_pose_it++, person_pose_it++ ) { if (!first_iteration) { - auto robot_pos_diff = robot_pose_it->getPosition() - std::prev(robot_pose_it)->getPosition(); - auto robot_new_dir = robot_pos_diff.calculateDirection().getRadian(); - - auto person_pos_diff = person_pose_it->getPosition() - std::prev(person_pose_it)->getPosition(); - auto person_new_dir = person_pos_diff.calculateDirection().getRadian(); - - // complementary filters - const double ROBOT_INNOV_FACTOR = 0.8; - const double PERSON_INNOV_FACTOR = 0.6; - direction_robot = geometry::Angle( - (1.0 - ROBOT_INNOV_FACTOR) * direction_robot + ROBOT_INNOV_FACTOR * robot_new_dir - ).getRadian(); - // person predictions have smaller confidence - direction_person = geometry::Angle( - (1.0 - PERSON_INNOV_FACTOR) * direction_person + PERSON_INNOV_FACTOR * person_new_dir - ).getRadian(); + direction_robot = PathCrossingDetector::motionDirectionFilter( + direction_robot, + *std::prev(robot_pose_it), + *robot_pose_it, + ROBOT_INNOV_FACTOR_DEFAULT + ); + direction_person = PathCrossingDetector::motionDirectionFilter( + direction_person, + *std::prev(person_pose_it), + *person_pose_it, + PERSON_INNOV_FACTOR_DEFAULT + ); } - // estimated distance between the centers of the robot and human - auto dist_center = (robot_pose_it->getPosition() - person_pose_it->getPosition()).calculateLength(); - // spatial gap between the models of the robot and human - auto dist_gap = dist_center - person_model_radius_ - robot_model_radius_; - // trim in case of collision detection - dist_gap = std::max(0.0, dist_gap); - - // store only the actual (not predicted) closest gap - update this only during the first iteration - if (first_iteration) { - gap_closest_person_ = std::min(gap_closest_person_, dist_gap); - } + // assigning direction into yaw here + bool person_crosses = performCrossingDetection( + geometry::Pose(robot_pose_it->getX(), robot_pose_it->getY(), direction_robot), + geometry::Pose(person_pose_it->getX(), person_pose_it->getY(), direction_person), + timestamp, + first_iteration, + true + ); - if (dist_gap > separation_threshold_) { + if (!person_crosses) { timestamp += traj_person.getTimeDelta(); first_iteration = false; continue; } - // confidence of crossing based on the time of prediction ("how far from now"); approx. 50% at 2 sec pred. - const double TIMING_EXP_FACTOR = -0.34; - double cross_confidence = std::exp(TIMING_EXP_FACTOR * timestamp); + // do not clear the flag if previously set + crossing_detected_ = true; + crossing_people_data_.push_back({traj_person.getPoses().front(), direction_person}); - // evaluate the angle of approaching crossing vectors - geometry::Angle cross_angle(direction_robot - direction_person); + timestamp += traj_person.getTimeDelta(); + first_iteration = false; + // processing this trajectory further is not needed + break; + } + } - // knowing whether the human approaches from the left or right is also necessary - social_nav_utils::RelativeLocation rel_loc( - robot_pose_it->getX(), - robot_pose_it->getY(), - robot_pose_it->getYaw(), - person_pose_it->getX(), - person_pose_it->getY() - ); + // second stage - find safe directions + // + // robot's initial pose and direction are known + findSafeDirectionsAndClosestPair(traj_robot.getPoses().front()); + return crossing_detected_; +} - /* - * Detection of crossings between the paths of the robot and the human. Cases of interest are crossings - * with the human approaching directly from the side (right or left). - * It is modelled by the 1D Gaussian (for angle domain) with a mean at abs(M_PI_2) - * and with a standard deviation of (M_PI_2 / 2.0) (2 sigma rule) - */ - // side-angle confidence; whether the crossing person approaches directly from the side - double angle_confidence = social_nav_utils::calculateGaussianAngle( - cross_angle.getRadian(), - rel_loc.isLeftSide() ? M_PI_2 : -M_PI_2, - std::pow(M_PI_4 / 2.0, 2.0), - true // normalize to 1.0 at mean - ); +bool PathCrossingDetector::detectCrossingPath( + const std::vector& global_plan, + const std::vector& traj_people +) { + std::vector global_plan_poses; + for (const auto& pose_stamped: global_plan) { + global_plan_poses.push_back(geometry::Pose(pose_stamped)); + } - /* - * Consider only those detections that are in front of the robot (a 90 deg "cone" in front) - * The relative location is expressed in the robot's local coordinate system, therefore mean is 0.0 - */ - double front_confidence = social_nav_utils::calculateGaussianAngle( - rel_loc.getAngle(), - 0.0, - std::pow(M_PI / 2.0, 2.0), - true // normalize to 1.0 at mean - ); + if (global_plan_poses.empty()) { + return false; + } - double confidence = cross_confidence * angle_confidence * front_confidence; - bool person_crossing = confidence >= confidence_threshold_; + // The loop is constructed as follows: + // - for each trajectory of detected people + // - iterate over person's trajectory poses + // - and check it against each pose of the global path + for (const auto& traj_person: traj_people) { + // velocity data is required for further calculations + if (traj_person.getVelocities().empty()) { + continue; + } - // do not clear the flag if previously set - crossing_detected_ = crossing_detected_ || person_crossing; + auto vel_person = traj_person.getVelocities().front(); + double speed_person = std::hypot(vel_person.getX(), vel_person.getY()); - if (person_crossing) { + if (speed_person < speed_negligible_threshold_) { + continue; + } + + // iterator must be referenced to an allocated container + auto traj_person_poses = traj_person.getPoses(); + if (traj_person_poses.empty()) { + break; + } + + // store filtered motion directions of a person + double direction_person = std::atan2( + traj_person.getVelocities().front().getY(), + traj_person.getVelocities().front().getX() + ); + + double timestamp = 0.0; + bool first_iteration_person = true; + bool collision_w_person_detected = false; + // iterate over human poses + for ( + auto person_pose_it = traj_person_poses.cbegin(); + person_pose_it != traj_person_poses.cend(); + person_pose_it++ + ) { + if (!first_iteration_person) { + direction_person = PathCrossingDetector::motionDirectionFilter( + direction_person, + *std::prev(person_pose_it), + *person_pose_it, + PERSON_INNOV_FACTOR_DEFAULT + ); + } + + // stores filtered motion direction of a robot + double direction_robot = NAN; + // non empty container ensured before, seed the robot direction somehow + if (global_plan_poses.size() >= 2) { + direction_robot = ( + global_plan_poses.at(1).getPosition() - global_plan_poses.at(0).getPosition() + ).calculateDirection().getRadian(); + } else if (!global_plan_poses.empty()) { + // global path might not have the orientation updated + direction_robot = global_plan_poses.front().getYaw(); + } + + bool first_iteration_path = true; + // iterate over robot poses + for ( + auto robot_pose_it = global_plan_poses.cbegin(); + robot_pose_it != global_plan_poses.cend(); + robot_pose_it++ + ) { + if (!first_iteration_path) { + direction_robot = PathCrossingDetector::motionDirectionFilter( + direction_robot, + *std::prev(robot_pose_it), + *robot_pose_it, + ROBOT_INNOV_FACTOR_DEFAULT + ); + } + + // assigning direction into yaw here + bool person_crosses = performCrossingDetection( + geometry::Pose(robot_pose_it->getX(), robot_pose_it->getY(), direction_robot), + geometry::Pose(person_pose_it->getX(), person_pose_it->getY(), direction_person), + timestamp, + first_iteration_path, + true + ); + + if (!person_crosses) { + first_iteration_path = false; + continue; + } + + crossing_detected_ = true; crossing_people_data_.push_back({traj_person.getPoses().front(), direction_person}); + + first_iteration_path = false; + collision_w_person_detected = true; + // processing this trajectory further is not needed + break; + } + + // exit to outer loop + if (collision_w_person_detected) { + break; } timestamp += traj_person.getTimeDelta(); - first_iteration = false; - break; + first_iteration_person = false; } } // second stage - find safe directions // // robot's initial pose and direction are known - auto robot_pose = traj_robot.getPoses().front(); + findSafeDirectionsAndClosestPair(global_plan_poses.front()); + return crossing_detected_; +} + +bool PathCrossingDetector::performCrossingDetection( + const geometry::Pose& robot_pose, + const geometry::Pose& person_pose, + double timestamp_prediction, + bool save_gap_to_closest_person, + bool compute_crossing_angle_confidence +) { + // estimated distance between the centers of the robot and human + auto dist_center = (robot_pose.getPosition() - person_pose.getPosition()).calculateLength(); + // spatial gap between the models of the robot and human + auto dist_gap = dist_center - person_model_radius_ - robot_model_radius_; + // trim in case of collision detection + dist_gap = std::max(0.0, dist_gap); + + // store only the actual (not predicted) closest gap - update this only during the first iteration + if (save_gap_to_closest_person) { + gap_closest_person_ = std::min(gap_closest_person_, dist_gap); + } + + if (dist_gap > separation_threshold_) { + return false; + } + + // confidence of crossing based on the time of prediction ("how far from now"); approx. 50% at 2 sec pred. + const double TIMING_EXP_FACTOR = -0.34; + double time_confidence = std::exp(TIMING_EXP_FACTOR * timestamp_prediction); + + // evaluate the angle of approaching crossing vectors + geometry::Angle cross_angle(robot_pose.getYaw() - person_pose.getYaw()); + + // knowing whether the human approaches from the left or right is also necessary + social_nav_utils::RelativeLocation rel_loc( + robot_pose.getX(), + robot_pose.getY(), + robot_pose.getYaw(), + person_pose.getX(), + person_pose.getY() + ); + + double angle_confidence = 1.0; + if (compute_crossing_angle_confidence) { + /* + * Detection of crossings between the paths of the robot and the human. Cases of interest are crossings + * with the human approaching directly from the side (right or left). + * It is modelled by the 1D Gaussian (for angle domain) with a mean at abs(M_PI_2) + * and with a standard deviation of (M_PI_2 / 2.0) (2 sigma rule) + */ + // side-angle confidence; whether the crossing person approaches directly from the side + angle_confidence = social_nav_utils::calculateGaussianAngle( + cross_angle.getRadian(), + rel_loc.isLeftSide() ? M_PI_2 : -M_PI_2, + std::pow(M_PI_4 / 2.0, 2.0), + true // normalize to 1.0 at mean + ); + } + + /* + * Consider only those detections that are in front of the robot (a 90 deg "cone" in front) + * The relative location is expressed in the robot's local coordinate system, therefore mean is 0.0 + */ + double front_confidence = social_nav_utils::calculateGaussianAngle( + rel_loc.getAngle(), + 0.0, + std::pow(M_PI / 2.0, 2.0), + true // normalize to 1.0 at mean + ); + + double confidence = time_confidence * angle_confidence * front_confidence; + bool person_crossing = confidence >= confidence_threshold_; + return person_crossing; +} + +bool PathCrossingDetector::findSafeDirectionsAndClosestPair(const geometry::Pose& robot_pose) { + // second stage - find safe directions // find directions from the robot initial pose towards safe positions (behind a person) for (const auto& person_data: crossing_people_data_) { @@ -212,10 +410,10 @@ bool PathCrossingDetector::detect( // Stage 3 - find the closest "pair" if (safe_directions_behind_crossing_people_.empty()) { // keep NANs - return crossing_detected_; + return false; } else if (safe_directions_behind_crossing_people_.size() == 1) { closest_safe_direction_ = safe_directions_behind_crossing_people_.front(); - return crossing_detected_; + return true; } // multiple - must sort according to the distance (from the robot to the safe position) std::vector>> sort_container; @@ -235,7 +433,7 @@ bool PathCrossingDetector::detect( } ); closest_safe_direction_ = sort_container.front().second; - return crossing_detected_; + return true; } } // namespace humap_local_planner