diff --git a/launch/trackdlo.launch b/launch/trackdlo.launch index 13546cc..afab431 100644 --- a/launch/trackdlo.launch +++ b/launch/trackdlo.launch @@ -1,25 +1,32 @@ + + - - - - + + + + + + + + + @@ -44,16 +51,16 @@ - + - + - + @@ -71,6 +78,7 @@ + @@ -78,6 +86,8 @@ + + diff --git a/rviz/tracking.rviz b/rviz/tracking.rviz index 99ae673..669190c 100644 --- a/rviz/tracking.rviz +++ b/rviz/tracking.rviz @@ -78,85 +78,7 @@ Visualization Manager: Marker Topic: /trackdlo/results_marker Name: MarkerArray Namespaces: - line_results0: true - line_results1: true - line_results10: true - line_results11: true - line_results12: true - line_results13: true - line_results14: true - line_results15: true - line_results16: true - line_results17: true - line_results18: true - line_results19: true - line_results2: true - line_results20: true - line_results21: true - line_results22: true - line_results23: true - line_results24: true - line_results25: true - line_results26: true - line_results27: true - line_results28: true - line_results29: true - line_results3: true - line_results30: true - line_results31: true - line_results32: true - line_results33: true - line_results34: true - line_results35: true - line_results36: true - line_results37: true - line_results38: true - line_results4: true - line_results5: true - line_results6: true - line_results7: true - line_results8: true - line_results9: true - node_results0: true - node_results1: true - node_results10: true - node_results11: true - node_results12: true - node_results13: true - node_results14: true - node_results15: true - node_results16: true - node_results17: true - node_results18: true - node_results19: true - node_results2: true - node_results20: true - node_results21: true - node_results22: true - node_results23: true - node_results24: true - node_results25: true - node_results26: true - node_results27: true - node_results28: true - node_results29: true - node_results3: true - node_results30: true - node_results31: true - node_results32: true - node_results33: true - node_results34: true - node_results35: true - node_results36: true - node_results37: true - node_results38: true - node_results39: true - node_results4: true - node_results5: true - node_results6: true - node_results7: true - node_results8: true - node_results9: true + {} Queue Size: 100 Value: true - Class: rviz/MarkerArray @@ -179,7 +101,7 @@ Visualization Manager: Color: 255; 255; 255 Color Transformer: RGB8 Decay Time: 0 - Enabled: true + Enabled: false Invert Rainbow: false Max Color: 255; 255; 255 Min Color: 0; 0; 0 @@ -194,7 +116,7 @@ Visualization Manager: Unreliable: false Use Fixed Frame: true Use rainbow: true - Value: true + Value: false - Alpha: 1 Autocompute Intensity Bounds: true Autocompute Value Bounds: @@ -207,7 +129,7 @@ Visualization Manager: Color: 255; 255; 255 Color Transformer: Intensity Decay Time: 0 - Enabled: false + Enabled: true Invert Rainbow: false Max Color: 255; 255; 255 Min Color: 0; 0; 0 @@ -222,7 +144,7 @@ Visualization Manager: Unreliable: false Use Fixed Frame: true Use rainbow: true - Value: false + Value: true Enabled: true Global Options: Background Color: 85; 87; 83 diff --git a/trackdlo/include/run_evaluation.h b/trackdlo/include/run_evaluation.h deleted file mode 100644 index 7926865..0000000 --- a/trackdlo/include/run_evaluation.h +++ /dev/null @@ -1,6 +0,0 @@ -#pragma once - -#ifndef RUN_EVALUATION_H -#define RUN_EVALUATION_H - -#endif \ No newline at end of file diff --git a/trackdlo/include/trackdlo_node.h b/trackdlo/include/trackdlo_node.h deleted file mode 100644 index 1974d60..0000000 --- a/trackdlo/include/trackdlo_node.h +++ /dev/null @@ -1,6 +0,0 @@ -#pragma once - -#ifndef TRACKDLO_NODE_H -#define TRACKDLO_NODE_H - -#endif \ No newline at end of file diff --git a/trackdlo/src/initialize.py b/trackdlo/src/initialize.py index 4ef9ede..401b227 100755 --- a/trackdlo/src/initialize.py +++ b/trackdlo/src/initialize.py @@ -26,7 +26,7 @@ def camera_info_callback (info): print(proj_matrix) camera_info_sub.unregister() -def color_thresholding (hsv_image): +def color_thresholding (hsv_image, cur_depth): # --- rope blue --- lower = (90, 90, 60) upper = (130, 255, 255) @@ -44,6 +44,9 @@ def color_thresholding (hsv_image): # combine masks mask = cv2.bitwise_or(mask_marker.copy(), mask_dlo.copy()) + # filter mask base on depth values + mask[cur_depth < 0.58*1000] = 0 + return mask def callback (rgb, depth): @@ -63,7 +66,7 @@ def callback (rgb, depth): mask = cv2.inRange(hsv_image, lower, upper) else: # color thresholding - mask = color_thresholding(hsv_image) + mask = color_thresholding(hsv_image, cur_depth) start_time = time.time() mask = cv2.cvtColor(mask.copy(), cv2.COLOR_GRAY2BGR) @@ -96,8 +99,12 @@ def callback (rgb, depth): # do not include those without depth values extracted_chains_3d = extracted_chains_3d[((extracted_chains_3d[:, 0] != 0) | (extracted_chains_3d[:, 1] != 0) | (extracted_chains_3d[:, 2] != 0))] + if multi_color_dlo: + depth_threshold = 0.58 # m + extracted_chains_3d = extracted_chains_3d[extracted_chains_3d[:, 2] > depth_threshold] + # tck, u = interpolate.splprep(extracted_chains_3d.T, s=0.001) - tck, u = interpolate.splprep(extracted_chains_3d.T, s=0.002) + tck, u = interpolate.splprep(extracted_chains_3d.T, s=0.0005) # 1st fit, less points u_fine = np.linspace(0, 1, 300) # <-- num fit points x_fine, y_fine, z_fine = interpolate.splev(u_fine, tck) @@ -112,7 +119,7 @@ def callback (rgb, depth): init_nodes = spline_pts[np.linspace(0, num_true_pts-1, num_of_nodes).astype(int)] - results = ndarray2MarkerArray(init_nodes, "camera_color_optical_frame", [255, 150, 0, 0.75], [0, 255, 0, 0.75]) + results = ndarray2MarkerArray(init_nodes, result_frame_id, [1, 150/255, 0, 0.75], [0, 1, 0, 0.75]) results_pub.publish(results) # add color @@ -135,6 +142,7 @@ def callback (rgb, depth): camera_info_topic = rospy.get_param('/init_tracker/camera_info_topic') rgb_topic = rospy.get_param('/init_tracker/rgb_topic') depth_topic = rospy.get_param('/init_tracker/depth_topic') + result_frame_id = rospy.get_param('/init_tracker/result_frame_id') visualize_initialization_process = rospy.get_param('/init_tracker/visualize_initialization_process') hsv_threshold_upper_limit = rospy.get_param('/init_tracker/hsv_threshold_upper_limit') @@ -152,7 +160,7 @@ def callback (rgb, depth): # header header = std_msgs.msg.Header() header.stamp = rospy.Time.now() - header.frame_id = 'camera_color_optical_frame' + header.frame_id = result_frame_id fields = [PointField('x', 0, PointField.FLOAT32, 1), PointField('y', 4, PointField.FLOAT32, 1), PointField('z', 8, PointField.FLOAT32, 1), diff --git a/trackdlo/src/run_evaluation.cpp b/trackdlo/src/run_evaluation.cpp index 16a42bd..df06db8 100644 --- a/trackdlo/src/run_evaluation.cpp +++ b/trackdlo/src/run_evaluation.cpp @@ -44,7 +44,7 @@ sensor_msgs::ImagePtr Callback(const sensor_msgs::ImageConstPtr& image_msg, cons double time_from_start; time_from_start = std::chrono::duration_cast(std::chrono::steady_clock::now() - tracking_evaluator.start_time()).count(); - time_from_start = time_from_start / 1000.0 * tracking_evaluator.rate(); + time_from_start = (time_from_start / 1000.0 + 3.0) * tracking_evaluator.rate(); // initialization usually takes 1.5 seconds std::cout << time_from_start << "; " << tracking_evaluator.exit_time() << std::endl; if (tracking_evaluator.exit_time() == -1) { @@ -454,7 +454,7 @@ int main(int argc, char **argv) { message_filters::Subscriber image_sub(nh, "/camera/color/image_raw", 10); message_filters::Subscriber pc_sub(nh, "/camera/depth/color/points", 10); - message_filters::Subscriber result_sub(nh, "/" + alg + "_results_pc", 10); + message_filters::Subscriber result_sub(nh, "/" + alg + "/results_pc", 10); message_filters::TimeSynchronizer sync(image_sub, pc_sub, result_sub, 10); sync.registerCallback max_p_nodes(P.cols(), 0); - - // // temp test - // for (int i = 0; i < N; i ++) { - // P.col(i).maxCoeff(&max_p_nodes[i]); - // } - MatrixXd pts_dis_sq_geodesic = MatrixXd::Zero(M, N); // loop through all points @@ -405,49 +399,6 @@ bool trackdlo::cpd_lle (MatrixXd X, for (int i = 0; i < Y.rows(); i ++) { double shortest_node_pt_dist = shortest_node_pt_dists[i]; - // double shortest_node_pt_dist; - - // // if this node is visible, the shortest dist is always 0 - // if (std::find(visible_nodes.begin(), visible_nodes.end(), i) != visible_nodes.end()){ - // shortest_node_pt_dist = 0; - // } - // else { - // // find the closest visible node - // for (int j = 0; j <= Y.rows()-1; j ++) { - // int left_node = i - j; - // int right_node = i + j; - // double left_dist = 100000; - // double right_dist = 100000; - // bool found_visible_node = false; - - // // make sure node index doesn't go out of bound - // if (left_node >= 0) { - // // if i - j is visible, record the geodesic distance between the nodes - // if (std::find(visible_nodes.begin(), visible_nodes.end(), left_node) != visible_nodes.end()){ - // left_dist = abs(converted_node_coord[left_node] - converted_node_coord[i]); - // found_visible_node = true; - // } - // } - // if (right_node <= Y.rows()-1) { - // // if i + j is visible, record the geodesic distance between the nodes - // if (std::find(visible_nodes.begin(), visible_nodes.end(), right_node) != visible_nodes.end()){ - // right_dist = abs(converted_node_coord[right_dist] - converted_node_coord[i]); - // found_visible_node = true; - // } - // } - - // // take the shortest distance - // if (found_visible_node) { - // if (left_dist < right_dist) { - // shortest_node_pt_dist = left_dist; - // } - // else { - // shortest_node_pt_dist = right_dist; - // } - // break; - // } - // } - // } double P_vis_i = exp(-k_vis * shortest_node_pt_dist); total_P_vis += P_vis_i; @@ -502,12 +453,7 @@ bool trackdlo::cpd_lle (MatrixXd X, } } - // MatrixXd W = A_matrix.householderQr().solve(B_matrix); - // MatrixXd W = A_matrix.completeOrthogonalDecomposition().solve(B_matrix); - // MatrixXd W = A_matrix.fullPivLu().solve(B_matrix); - // MatrixXd W = A_matrix.partialPivLu().solve(B_matrix); - // MatrixXd W = A_matrix.colPivHouseholderQr().solve(B_matrix); - MatrixXd W = A_matrix.fullPivHouseholderQr().solve(B_matrix); + MatrixXd W = A_matrix.completeOrthogonalDecomposition().solve(B_matrix); MatrixXd T = Y_0 + G * W; double trXtdPt1X = (X.transpose() * Pt1.asDiagonal() * X).trace(); @@ -516,7 +462,7 @@ bool trackdlo::cpd_lle (MatrixXd X, sigma2 = (trXtdPt1X - 2*trPXtT + trTtdP1T) / (Np * D); - if (pt2pt_dis_sq(Y, Y_0 + G*W) < tol) { + if (pt2pt_dis(Y, Y_0 + G*W) / Y.rows() < tol) { Y = Y_0 + G*W; ROS_INFO_STREAM("Iteration until convergence: " + std::to_string(it+1)); break; @@ -1018,7 +964,8 @@ void trackdlo::tracking_step (MatrixXd X, // determine DLO state: heading visible, tail visible, both visible, or both occluded // priors_vec should be the final output; priors_vec[i] = {index, x, y, z} double sigma2_pre_proc = sigma2_; - cpd_lle(X, guide_nodes_, sigma2_pre_proc, 3, 1, 1, mu_, 50, 0.00001, true, true, true, false, {}, 0, 1, visible_nodes_extended, 0, visibility_threshold_); + cpd_lle(X, guide_nodes_, sigma2_pre_proc, 3, 1, lle_weight_, mu_, 50, tol_, true, true, true, false, {}, 0, 1); + // cpd_lle(X, guide_nodes_, sigma2_pre_proc, 1, 1, 1, mu_, 50, tol_, true, true, true); if (visible_nodes_extended.size() == Y_.rows()) { if (visible_nodes.size() == visible_nodes_extended.size()) { diff --git a/trackdlo/src/trackdlo_node.cpp b/trackdlo/src/trackdlo_node.cpp index 9db5ab1..2b902b8 100644 --- a/trackdlo/src/trackdlo_node.cpp +++ b/trackdlo/src/trackdlo_node.cpp @@ -28,6 +28,7 @@ MatrixXd proj_matrix(3, 4); double total_len = 0; bool multi_color_dlo; +bool gltp; double visibility_threshold; int dlo_pixel_width; double beta; @@ -49,6 +50,7 @@ std::string rgb_topic; std::string depth_topic; std::string hsv_threshold_upper_limit; std::string hsv_threshold_lower_limit; +std::string result_frame_id; std::vector upper; std::vector lower; @@ -182,9 +184,6 @@ sensor_msgs::ImagePtr Callback(const sensor_msgs::ImageConstPtr& image_msg, cons cv::cvtColor(mask, mask_rgb, cv::COLOR_GRAY2BGR); - sensor_msgs::PointCloud2 output; - sensor_msgs::PointCloud2 result_pc; - bool simulated_occlusion = false; int occlusion_corner_i = -1; int occlusion_corner_j = -1; @@ -211,8 +210,7 @@ sensor_msgs::ImagePtr Callback(const sensor_msgs::ImageConstPtr& image_msg, cons occlusion_corner_j_2 = j; } - double depth_threshold = 0.4 * 1000; // millimeters - if (mask.at(i, j) != 0 && cur_depth.at(i, j) > depth_threshold) { + if (mask.at(i, j) != 0) { // point cloud from image pixel coordinates and depth value pcl::PointXYZRGB point; double pixel_x = static_cast(j); @@ -227,7 +225,7 @@ sensor_msgs::ImagePtr Callback(const sensor_msgs::ImageConstPtr& image_msg, cons point.y = (pixel_y - cy) * pc_z / fy; point.z = pc_z; - // currently missing point field so color doesn't show up in rviz + // currently something so color doesn't show up in rviz point.r = cur_image_orig.at(i, j)[0]; point.g = cur_image_orig.at(i, j)[1]; point.b = cur_image_orig.at(i, j)[2]; @@ -257,22 +255,46 @@ sensor_msgs::ImagePtr Callback(const sensor_msgs::ImageConstPtr& image_msg, cons cur_time = std::chrono::high_resolution_clock::now(); // calculate node visibility - // for each node in Y, determine a point in X closest to it + // for each node in Y, determine its shortest distance to X + // for each point in X, determine its shortest distance to Y std::map shortest_node_pt_dists; + std::vector shortest_pt_node_dists(X.rows(), 100000.0); + MatrixXd X_temp = MatrixXd::Zero(X.rows(), 3); + int valid_pt_counter = 0; for (int m = 0; m < Y.rows(); m ++) { int closest_pt_idx = 0; double shortest_dist = 100000; // loop through all points in X for (int n = 0; n < X.rows(); n ++) { double dist = (Y.row(m) - X.row(n)).norm(); + // update shortest dist for Y if (dist < shortest_dist) { closest_pt_idx = n; shortest_dist = dist; } + + // update shortest dist for X + if (dist < shortest_pt_node_dists[n]) { + shortest_pt_node_dists[n] = dist; + } + // count valid point in the last iteration + if (m == Y.rows()-1) { + // assumption: DLO movement between frames is small + // get rid of points too far away from the node point set Y^{t-1} + // this is necessary because exp(-dist/sigma2) can become too small to represent in MatrixXd + // if a point x_n in X is too far away from all nodes, all entries in row n of P will be zero, + // even if they technically aren't all zeros + if (shortest_pt_node_dists[n] < 0.05) { + X_temp.row(valid_pt_counter) = X.row(n); + valid_pt_counter += 1; + } + } } shortest_node_pt_dists.insert(std::pair(m, shortest_dist)); } + MatrixXd X_pruned = X_temp.topRows(valid_pt_counter); + // for current nodes and edges in Y, sort them based on how far away they are from the camera std::vector averaged_node_camera_dists = {}; std::vector indices_vec = {}; @@ -321,7 +343,15 @@ sensor_msgs::ImagePtr Callback(const sensor_msgs::ImageConstPtr& image_msg, cons } // add edges for checking overlap with upcoming nodes - cv::line(projected_edges, cv::Point(col_1, row_1), cv::Point(col_2, row_2), cv::Scalar(255, 255, 255), dlo_pixel_width); + double x1 = col_1; + double y1 = row_1; + double x2 = col_2; + double y2 = row_2; + // x1 = x1 + (0.25*dlo_pixel_width) / sqrt(pow(x1-x2, 2) + pow(y1-y2, 2)) * (x2 - x1); + // y1 = y1 + (0.25*dlo_pixel_width) / sqrt(pow(x1-x2, 2) + pow(y1-y2, 2)) * (y2 - y1); + // x2 = x2 + (0.25*dlo_pixel_width) / sqrt(pow(x1-x2, 2) + pow(y1-y2, 2)) * (x1 - x2); + // y2 = y2 + (0.25*dlo_pixel_width) / sqrt(pow(x1-x2, 2) + pow(y1-y2, 2)) * (y1 - y2); + cv::line(projected_edges, cv::Point(x1, y1), cv::Point(x2, y2), cv::Scalar(255, 255, 255), dlo_pixel_width); } // sort visible nodes to preserve the original connectivity @@ -340,18 +370,16 @@ sensor_msgs::ImagePtr Callback(const sensor_msgs::ImageConstPtr& image_msg, cons } } visible_nodes_extended.push_back(visible_nodes[visible_nodes.size()-1]); - - std::cout << "visible nodes: "; - print_1d_vector(visible_nodes); - std::cout << "visible nodes extended: "; - print_1d_vector(visible_nodes_extended); - tracker.tracking_step(X, visible_nodes, visible_nodes_extended, proj_matrix, mask.rows, mask.cols); - // tracker.ecpd_lle(X, Y, sigma2, 3, 1, 1, 0.1, 50, 0.00001, true, true, true, false, {}, 0, 1); - - Y = tracker.get_tracking_result(); - guide_nodes = tracker.get_guide_nodes(); - priors = tracker.get_correspondence_pairs(); + if (!gltp) { + tracker.tracking_step(X_pruned, visible_nodes, visible_nodes_extended, proj_matrix, mask.rows, mask.cols); + Y = tracker.get_tracking_result(); + guide_nodes = tracker.get_guide_nodes(); + priors = tracker.get_correspondence_pairs(); + } + else { + tracker.cpd_lle(X_pruned, Y, sigma2, 1, 1, 1, mu, 50, tol, true, false, true); + } // log time time_diff = std::chrono::duration_cast(std::chrono::high_resolution_clock::now() - cur_time).count() / 1000.0; @@ -360,6 +388,20 @@ sensor_msgs::ImagePtr Callback(const sensor_msgs::ImageConstPtr& image_msg, cons cur_time = std::chrono::high_resolution_clock::now(); // projection and pub image + averaged_node_camera_dists = {}; + indices_vec = {}; + for (int i = 0; i < Y.rows()-1; i ++) { + averaged_node_camera_dists.push_back(((Y.row(i) + Y.row(i+1)) / 2).norm()); + indices_vec.push_back(i); + } + // sort + std::sort(indices_vec.begin(), indices_vec.end(), + [&](const int& a, const int& b) { + return (averaged_node_camera_dists[a] < averaged_node_camera_dists[b]); + } + ); + std::reverse(indices_vec.begin(), indices_vec.end()); + MatrixXd nodes_h = Y.replicate(1, 1); nodes_h.conservativeResize(nodes_h.rows(), nodes_h.cols()+1); nodes_h.col(nodes_h.cols()-1) = MatrixXd::Ones(nodes_h.rows(), 1); @@ -369,31 +411,44 @@ sensor_msgs::ImagePtr Callback(const sensor_msgs::ImageConstPtr& image_msg, cons tracking_img = 0.5*cur_image_orig + 0.5*cur_image; // draw points - for (int i = 0; i < image_coords.rows(); i ++) { + for (int idx : indices_vec) { - int x = static_cast(image_coords(i, 0)/image_coords(i, 2)); - int y = static_cast(image_coords(i, 1)/image_coords(i, 2)); + int x = static_cast(image_coords(idx, 0)/image_coords(idx, 2)); + int y = static_cast(image_coords(idx, 1)/image_coords(idx, 2)); cv::Scalar point_color; cv::Scalar line_color; - if (std::find(visible_nodes.begin(), visible_nodes.end(), i) != visible_nodes.end()) { + if (std::find(visible_nodes.begin(), visible_nodes.end(), idx) != visible_nodes.end()) { point_color = cv::Scalar(0, 150, 255); - line_color = cv::Scalar(0, 255, 0); + if (std::find(visible_nodes.begin(), visible_nodes.end(), idx+1) != visible_nodes.end()) { + line_color = cv::Scalar(0, 255, 0); + } + else { + line_color = cv::Scalar(0, 0, 255); + } } else { point_color = cv::Scalar(0, 0, 255); line_color = cv::Scalar(0, 0, 255); } - if (i != image_coords.rows()-1) { - cv::line(tracking_img, cv::Point(x, y), - cv::Point(static_cast(image_coords(i+1, 0)/image_coords(i+1, 2)), - static_cast(image_coords(i+1, 1)/image_coords(i+1, 2))), - line_color, 5); - } + cv::line(tracking_img, cv::Point(x, y), + cv::Point(static_cast(image_coords(idx+1, 0)/image_coords(idx+1, 2)), + static_cast(image_coords(idx+1, 1)/image_coords(idx+1, 2))), + line_color, 5); cv::circle(tracking_img, cv::Point(x, y), 7, point_color, -1); + + if (std::find(visible_nodes.begin(), visible_nodes.end(), idx+1) != visible_nodes.end()) { + point_color = cv::Scalar(0, 150, 255); + } + else { + point_color = cv::Scalar(0, 0, 255); + } + cv::circle(tracking_img, cv::Point(static_cast(image_coords(idx+1, 0)/image_coords(idx+1, 2)), + static_cast(image_coords(idx+1, 1)/image_coords(idx+1, 2))), + 7, point_color, -1); } // add text @@ -404,18 +459,11 @@ sensor_msgs::ImagePtr Callback(const sensor_msgs::ImageConstPtr& image_msg, cons // publish image tracking_img_msg = cv_bridge::CvImage(std_msgs::Header(), "bgr8", tracking_img).toImageMsg(); - // publish filtered point cloud - pcl::PCLPointCloud2 cur_pc_pointcloud2; - pcl::toPCLPointCloud2(cur_pc_downsampled, cur_pc_pointcloud2); - cur_pc_pointcloud2.header.frame_id = "camera_color_optical_frame"; - - // Convert to ROS data type - pcl_conversions::moveFromPCL(cur_pc_pointcloud2, output); - // publish the results as a marker array - visualization_msgs::MarkerArray results = MatrixXd2MarkerArray(Y, "camera_color_optical_frame", "node_results", {1.0, 150.0/255.0, 0.0, 1.0}, {0.0, 1.0, 0.0, 1.0}, 0.01, 0.005, visible_nodes, {1.0, 0.0, 0.0, 1.0}, {1.0, 0.0, 0.0, 1.0}); - visualization_msgs::MarkerArray guide_nodes_results = MatrixXd2MarkerArray(guide_nodes, "camera_color_optical_frame", "guide_node_results", {0.0, 0.0, 0.0, 0.5}, {0.0, 0.0, 1.0, 0.5}); - visualization_msgs::MarkerArray corr_priors_results = MatrixXd2MarkerArray(priors, "camera_color_optical_frame", "corr_prior_results", {0.0, 0.0, 0.0, 0.5}, {1.0, 0.0, 0.0, 0.5}); + // visualization_msgs::MarkerArray results = MatrixXd2MarkerArray(Y, result_frame_id, "node_results", {1.0, 150.0/255.0, 0.0, 1.0}, {0.0, 1.0, 0.0, 1.0}, 0.01, 0.005, visible_nodes, {1.0, 0.0, 0.0, 1.0}, {1.0, 0.0, 0.0, 1.0}); + visualization_msgs::MarkerArray results = MatrixXd2MarkerArray(Y, result_frame_id, "node_results", {1.0, 150.0/255.0, 0.0, 1.0}, {0.0, 1.0, 0.0, 1.0}, 0.01, 0.005); + visualization_msgs::MarkerArray guide_nodes_results = MatrixXd2MarkerArray(guide_nodes, result_frame_id, "guide_node_results", {0.0, 0.0, 0.0, 0.5}, {0.0, 0.0, 1.0, 0.5}); + visualization_msgs::MarkerArray corr_priors_results = MatrixXd2MarkerArray(priors, result_frame_id, "corr_prior_results", {0.0, 0.0, 0.0, 0.5}, {1.0, 0.0, 0.0, 0.5}); // convert to pointcloud2 for eval pcl::PointCloud trackdlo_pc; @@ -427,19 +475,28 @@ sensor_msgs::ImagePtr Callback(const sensor_msgs::ImageConstPtr& image_msg, cons trackdlo_pc.points.push_back(temp); } + // publish filtered point cloud + pcl::PCLPointCloud2 cur_pc_pointcloud2; + pcl::toPCLPointCloud2(cur_pc_downsampled, cur_pc_pointcloud2); pcl::PCLPointCloud2 result_pc_pclpoincloud2; - pcl::toPCLPointCloud2(trackdlo_pc, result_pc_pclpoincloud2); - pcl_conversions::moveFromPCL(result_pc_pclpoincloud2, result_pc); - result_pc.header.frame_id = "camera_color_optical_frame"; - result_pc.header.stamp = image_msg->header.stamp; + // Convert to ROS data type + sensor_msgs::PointCloud2 cur_pc_msg; + sensor_msgs::PointCloud2 result_pc_msg; + pcl_conversions::moveFromPCL(cur_pc_pointcloud2, cur_pc_msg); + pcl_conversions::moveFromPCL(result_pc_pclpoincloud2, result_pc_msg); + + // for evaluation sync + cur_pc_msg.header.frame_id = result_frame_id; + result_pc_msg.header.frame_id = result_frame_id; + result_pc_msg.header.stamp = image_msg->header.stamp; results_pub.publish(results); guide_nodes_pub.publish(guide_nodes_results); corr_priors_pub.publish(corr_priors_results); - pc_pub.publish(output); - result_pc_pub.publish(result_pc); + pc_pub.publish(cur_pc_msg); + result_pc_pub.publish(result_pc_msg); // reset all guide nodes for (int i = 0; i < guide_nodes_results.markers.size(); i ++) { @@ -460,8 +517,6 @@ sensor_msgs::ImagePtr Callback(const sensor_msgs::ImageConstPtr& image_msg, cons ROS_INFO_STREAM("Avg tracking step: " + std::to_string(algo_total / frames) + " ms"); ROS_INFO_STREAM("Avg pub data: " + std::to_string(pub_data_total / frames) + " ms"); ROS_INFO_STREAM("Avg total: " + std::to_string((pre_proc_total + algo_total + pub_data_total) / frames) + " ms"); - - // pc_pub.publish(output); } return tracking_img_msg; @@ -486,6 +541,7 @@ int main(int argc, char **argv) { nh.getParam("/trackdlo/kernel", kernel); nh.getParam("/trackdlo/multi_color_dlo", multi_color_dlo); + nh.getParam("/trackdlo/gltp", gltp); nh.getParam("/trackdlo/visibility_threshold", visibility_threshold); nh.getParam("/trackdlo/dlo_pixel_width", dlo_pixel_width); nh.getParam("/trackdlo/downsample_leaf_size", downsample_leaf_size); @@ -493,6 +549,7 @@ int main(int argc, char **argv) { nh.getParam("/trackdlo/camera_info_topic", camera_info_topic); nh.getParam("/trackdlo/rgb_topic", rgb_topic); nh.getParam("/trackdlo/depth_topic", depth_topic); + nh.getParam("/trackdlo/result_frame_id", result_frame_id); nh.getParam("/trackdlo/hsv_threshold_upper_limit", hsv_threshold_upper_limit); nh.getParam("/trackdlo/hsv_threshold_lower_limit", hsv_threshold_lower_limit); @@ -532,7 +589,7 @@ int main(int argc, char **argv) { int pub_queue_size = 30; image_transport::ImageTransport it(nh); - image_transport::Subscriber opencv_mask_sub = it.subscribe("/trackdlo/mask_with_occlusion", 10, update_opencv_mask); + image_transport::Subscriber opencv_mask_sub = it.subscribe("/mask_with_occlusion", 10, update_opencv_mask); init_nodes_sub = nh.subscribe("/trackdlo/init_nodes", 1, update_init_nodes); camera_info_sub = nh.subscribe(camera_info_topic, 1, update_camera_info); diff --git a/trackdlo/src/utils.cpp b/trackdlo/src/utils.cpp index ae03ec6..aa8f22f 100644 --- a/trackdlo/src/utils.cpp +++ b/trackdlo/src/utils.cpp @@ -263,7 +263,7 @@ visualization_msgs::MarkerArray MatrixXd2MarkerArray (MatrixXd Y, // cur_node_result.header.stamp = ros::Time::now(); cur_node_result.type = visualization_msgs::Marker::SPHERE; cur_node_result.action = visualization_msgs::Marker::ADD; - cur_node_result.ns = marker_ns + std::to_string(i); + cur_node_result.ns = marker_ns + "_node_" + std::to_string(i); cur_node_result.id = i; // add position @@ -309,10 +309,10 @@ visualization_msgs::MarkerArray MatrixXd2MarkerArray (MatrixXd Y, visualization_msgs::Marker cur_line_result = visualization_msgs::Marker(); // add header - cur_line_result.header.frame_id = "camera_color_optical_frame"; + cur_line_result.header.frame_id = marker_frame; cur_line_result.type = visualization_msgs::Marker::CYLINDER; cur_line_result.action = visualization_msgs::Marker::ADD; - cur_line_result.ns = "line_results" + std::to_string(i); + cur_line_result.ns = marker_ns + "_line_" + std::to_string(i); cur_line_result.id = i; // add position @@ -381,7 +381,7 @@ visualization_msgs::MarkerArray MatrixXd2MarkerArray (std::vector Y, // cur_node_result.header.stamp = ros::Time::now(); cur_node_result.type = visualization_msgs::Marker::SPHERE; cur_node_result.action = visualization_msgs::Marker::ADD; - cur_node_result.ns = marker_ns + std::to_string(i); + cur_node_result.ns = marker_ns + "_node_" + std::to_string(i); cur_node_result.id = i; // add position @@ -427,10 +427,10 @@ visualization_msgs::MarkerArray MatrixXd2MarkerArray (std::vector Y, visualization_msgs::Marker cur_line_result = visualization_msgs::Marker(); // add header - cur_line_result.header.frame_id = "camera_color_optical_frame"; + cur_line_result.header.frame_id = marker_frame; cur_line_result.type = visualization_msgs::Marker::CYLINDER; cur_line_result.action = visualization_msgs::Marker::ADD; - cur_line_result.ns = "line_results" + std::to_string(i); + cur_line_result.ns = marker_ns + "_line_" + std::to_string(i); cur_line_result.id = i; // add position diff --git a/trackdlo/src/utils.py b/trackdlo/src/utils.py index 3575391..8e23ec5 100644 --- a/trackdlo/src/utils.py +++ b/trackdlo/src/utils.py @@ -161,7 +161,7 @@ def extract_connected_skeleton (visualize_process, mask, img_scale=10, seg_lengt # smooth image im = Image.fromarray(mask) - smoothed_im = im.filter(ImageFilter.ModeFilter(size=20)) + smoothed_im = im.filter(ImageFilter.ModeFilter(size=15)) mask = np.array(smoothed_im) # resize if necessary for better skeletonization performance diff --git a/utils/pub_camera_tf.py b/utils/pub_camera_tf.py index 5a6b800..b4c7c49 100755 --- a/utils/pub_camera_tf.py +++ b/utils/pub_camera_tf.py @@ -28,6 +28,7 @@ # current camera pos trans_5 = [0.5308947503950723, 0.030109485611943067, 0.50] + # trans_5 = [0.5308947503950723, 0.030109485611943067, 0.6311476272663069] quat_5 = [-0.7065771296991245, 0.7075322875283535, 0.0004147019946593735, 0.012109909714664245] static_transformStamped_5.transform.translation.x = trans_5[0] diff --git a/utils/simulate_occlusion.py b/utils/simulate_occlusion.py index bb610e6..87cbece 100755 --- a/utils/simulate_occlusion.py +++ b/utils/simulate_occlusion.py @@ -21,7 +21,7 @@ def __init__(self): self.mouse_mask = None self.rgb_sub = rospy.Subscriber('/camera/color/image_raw', Image, self.callback) - self.occlusion_mask_img_pub = rospy.Publisher('/trackdlo/mask_with_occlusion', Image, queue_size=100) + self.occlusion_mask_img_pub = rospy.Publisher('/mask_with_occlusion', Image, queue_size=100) def callback(self,rgb): cur_image = ros_numpy.numpify(rgb)