我来为您详细介绍ROS/ROS2的核心概念,包括节点通信机制、TF坐标变换系统和可视化工具。
一、ROS vs ROS2 架构对比#
| 特性 | ROS1 (Noetic) | ROS2 (Humble/Iron) |
|---|
| 中间件 | 自定义TCPROS/UDPROS | DDS (Data Distribution Service) |
| 实时性 | 不支持硬实时 | 支持硬实时 |
| 嵌入式 | 需交叉编译 | 原生支持MCU |
| 多机通信 | 需配置master | 自动发现,无单点故障 |
| 安全性 | 无 | 加密、认证、访问控制 |
| 构建工具 | catkin | colcon |
| Python版本 | Python 2/3 | Python 3 |
| 推荐场景 | legacy系统维护 | 新项目首选 |
二、Node(节点)#
2.1 概念#
节点是ROS中最小的计算单元,一个进程对应一个节点,负责单一功能(如点云处理、路径规划)。
自动驾驶系统节点图:
┌─────────────┐ ┌─────────────┐ ┌─────────────┐
│ lidar_node │───→│ perception │─────→│ planning │
│ (驱动节点) │ │ (感知节点) │ │ (规划节点) │
└─────────────┘ └──────┬──────┘ └──────┬──────┘
│ │
↓ ↓
┌─────────────┐ ┌─────────────┐
│ tracking │ │ control │
│ (跟踪节点) │ │ (控制节点) │
└─────────────┘ └─────────────┘
2.2 ROS2节点实现(C++)#
// ROS2 C++节点示例:点云处理节点
#include "rclcpp/rclcpp.hpp"
#include "sensor_msgs/msg/point_cloud2.hpp"
#include "autoware_auto_perception_msgs/msg/detected_objects.hpp"
class LidarPerceptionNode : public rclcpp::Node {
public:
LidarPerceptionNode() : Node("lidar_perception_node") {
// 声明参数
this->declare_parameter("model_path", "/opt/model/pointpillars.onnx");
this->declare_parameter("score_threshold", 0.5);
this->declare_parameter("use_gpu", true);
// 获取参数
std::string model_path = this->get_parameter("model_path").as_string();
score_threshold_ = this->get_parameter("score_threshold").as_double();
// QoS配置(关键!ROS2使用DDS QoS)
rclcpp::QoS qos(10); // 历史深度10
qos.reliable(); // 可靠传输
qos.keep_last(10); // 保留最近10条
// 创建订阅者
sub_cloud_ = this->create_subscription<sensor_msgs::msg::PointCloud2>(
"/lidar/points_raw", // Topic名称
qos,
std::bind(&LidarPerceptionNode::pointCloudCallback, this, std::placeholders::_1)
);
// 创建发布者
pub_objects_ = this->create_publisher<autoware_auto_perception_msgs::msg::DetectedObjects>(
"/perception/objects",
qos
);
// 定时器 (100ms周期处理)
timer_ = this->create_wall_timer(
std::chrono::milliseconds(100),
std::bind(&LidarPerceptionNode::timerCallback, this)
);
// 初始化TensorRT引擎
initModel(model_path);
RCLCPP_INFO(this->get_logger(), "LidarPerceptionNode initialized");
}
private:
void pointCloudCallback(const sensor_msgs::msg::PointCloud2::SharedPtr msg) {
// 转换ROS消息到PCL格式
pcl::PointCloud<pcl::PointXYZI>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZI>);
pcl::fromROSMsg(*msg, *cloud);
// 缓存点云(线程安全)
std::lock_guard<std::mutex> lock(mutex_);
latest_cloud_ = cloud;
latest_stamp_ = msg->header.stamp;
}
void timerCallback() {
// 处理缓存的点云
pcl::PointCloud<pcl::PointXYZI>::Ptr cloud;
rclcpp::Time stamp;
{
std::lock_guard<std::mutex> lock(mutex_);
if (!latest_cloud_) return;
cloud = latest_cloud_;
stamp = latest_stamp_;
}
// 推理
auto detections = model_->infer(cloud);
// 构建输出消息
autoware_auto_perception_msgs::msg::DetectedObjects objects_msg;
objects_msg.header.stamp = stamp;
objects_msg.header.frame_id = "lidar_frame";
for (const auto& det : detections) {
autoware_auto_perception_msgs::msg::DetectedObject obj;
obj.kinematics.pose_with_covariance.pose.position.x = det.x;
obj.kinematics.pose_with_covariance.pose.position.y = det.y;
obj.kinematics.pose_with_covariance.pose.position.z = det.z;
obj.shape.dimensions.x = det.w;
obj.shape.dimensions.y = det.l;
obj.shape.dimensions.z = det.h;
obj.existence_probability = det.score;
objects_msg.objects.push_back(obj);
}
pub_objects_->publish(objects_msg);
}
void initModel(const std::string& path) {
// TensorRT初始化...
model_ = std::make_unique<PointPillarsTRT>(path);
}
// 成员变量
rclcpp::Subscription<sensor_msgs::msg::PointCloud2>::SharedPtr sub_cloud_;
rclcpp::Publisher<autoware_auto_perception_msgs::msg::DetectedObjects>::SharedPtr pub_objects_;
rclcpp::TimerBase::SharedPtr timer_;
std::mutex mutex_;
pcl::PointCloud<pcl::PointXYZI>::Ptr latest_cloud_;
rclcpp::Time latest_stamp_;
std::unique_ptr<PointPillarsTRT> model_;
double score_threshold_;
};
int main(int argc, char** argv) {
rclcpp::init(argc, argv);
auto node = std::make_shared<LidarPerceptionNode>();
rclcpp::spin(node);
rclcpp::shutdown();
return 0;
}
2.3 ROS2节点实现(Python)#
#!/usr/bin/env python3
# ROS2 Python节点示例
import rclpy
from rclpy.node import Node
from rclpy.qos import QoSProfile, ReliabilityPolicy, HistoryPolicy
from sensor_msgs.msg import PointCloud2
from sensor_msgs_py import point_cloud2 as pc2
from autoware_auto_perception_msgs.msg import DetectedObjects, DetectedObject
import numpy as np
import torch
class LidarPerceptionNode(Node):
def __init__(self):
super().__init__('lidar_perception_py')
# 声明参数
self.declare_parameter('model_path', '/opt/model/model.pth')
self.declare_parameter('device', 'cuda')
# QoS配置
qos = QoSProfile(
reliability=ReliabilityPolicy.RELIABLE,
history=HistoryPolicy.KEEP_LAST,
depth=10
)
# 订阅/发布
self.sub_cloud = self.create_subscription(
PointCloud2,
'/lidar/points_raw',
self.cloud_callback,
qos
)
self.pub_objects = self.create_publisher(
DetectedObjects,
'/perception/objects',
qos
)
# 多线程执行器回调组
self.callback_group = rclpy.callback_groups.MutuallyExclusiveCallbackGroup()
self.timer = self.create_timer(
0.1, # 100ms
self.timer_callback,
callback_group=self.callback_group
)
# 加载模型
self.model = self.load_model()
self.latest_cloud = None
self.get_logger().info('Python perception node started')
def cloud_callback(self, msg: PointCloud2):
"""点云回调(高频,仅做数据转换)"""
# 转换为numpy数组
points = pc2.read_points_numpy(msg, field_names=['x', 'y', 'z', 'intensity'])
self.latest_cloud = {
'points': points,
'stamp': msg.header.stamp,
'frame_id': msg.header.frame_id
}
def timer_callback(self):
"""定时器回调(低频,执行推理)"""
if self.latest_cloud is None:
return
points = self.latest_cloud['points']
# PyTorch推理
with torch.no_grad():
detections = self.model(torch.from_numpy(points).cuda())
# 构建消息
objects_msg = DetectedObjects()
objects_msg.header.stamp = self.latest_cloud['stamp']
objects_msg.header.frame_id = self.latest_cloud['frame_id']
for det in detections:
obj = DetectedObject()
obj.kinematics.pose_with_covariance.pose.position.x = float(det['x'])
obj.kinematics.pose_with_covariance.pose.position.y = float(det['y'])
obj.existence_probability = float(det['score'])
objects_msg.objects.append(obj)
self.pub_objects.publish(objects_msg)
def load_model(self):
# 加载PyTorch模型
model = torch.jit.load(self.get_parameter('model_path').value)
model.eval()
return model
def main(args=None):
rclpy.init(args=args)
node = LidarPerceptionNode()
# 多线程执行器(Python GIL优化)
executor = rclpy.executors.MultiThreadedExecutor(num_threads=4)
executor.add_node(node)
try:
executor.spin()
except KeyboardInterrupt:
pass
finally:
node.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main()
三、Topic(话题)#
3.1 发布-订阅模型#
┌─────────┐ /camera/image_raw ┌─────────┐
│ Camera │ ───────────────────────────→ │ Node B │
│ Node │ (sensor_msgs/Image) │ (处理) │
└─────────┘ └─────────┘
│
│ /camera/image_raw ┌─────────┐
└───────────────────────────────────→│ Node C │
│ (显示) │
└─────────┘
特点:
- 多对多通信
- 异步解耦
- 无持久化(除非配置QoS为transient_local)
3.2 常用Topic类型(自动驾驶)#
| Topic名称 | 消息类型 | 频率 | 说明 |
|---|
/lidar/points_raw | sensor_msgs/PointCloud2 | 10-20Hz | 原始点云 |
/lidar/ground | sensor_msgs/PointCloud2 | 10Hz | 地面点云 |
/perception/objects | autoware_auto_perception_msgs/DetectedObjects | 10Hz | 检测结果 |
/localization/pose | geometry_msgs/PoseWithCovarianceStamped | 50Hz | 定位位姿 |
/planning/trajectory | autoware_auto_planning_msgs/Trajectory | 10Hz | 规划轨迹 |
/vehicle/status | autoware_auto_vehicle_msgs/VehicleStatus | 100Hz | 车辆状态 |
3.3 QoS深度配置#
// ROS2 QoS配置详解
// 1. 传感器数据(最佳 effort,容忍丢失)
rclcpp::QoS sensor_qos(10);
sensor_qos.best_effort(); // UDP传输,低延迟
sensor_qos.keep_last(10);
// 2. 关键指令(可靠传输,必须到达)
rclcpp::QoS command_qos(10);
command_qos.reliable(); // TCP-like
command_qos.durability_volatile();
// 3. 地图数据(持久化,新节点可获取历史数据)
rclcpp::QoS map_qos(1);
map_qos.reliable();
map_qos.transient_local(); // 保留最后一条消息给新订阅者
map_qos.keep_last(1);
// 4. 实时控制(Deadline约束)
rclcpp::QoS control_qos(1);
control_qos.reliable();
control_qos.deadline(rclcpp::Duration(10000000)); // 10ms deadline
四、Service(服务)#
4.1 请求-响应模型#
同步调用,适合低频、需确认的操作:
┌─────────────┐ Request ┌─────────────┐
│ Planning │ ─────────────────────────→ │ Map │
│ Node │ (获取特定区域地图) │ Server │
│ │ ←───────────────────────── │ │
└─────────────┘ Response └─────────────┘
(地图数据)
vs Topic:
- Service: 同步,一对一,有响应
- Topic: 异步,多对多,无响应
4.2 Service实现(C++)#
// 定义srv文件 (srv/GetMap.srv)
// float64 latitude
// float64 longitude
// float64 radius
// ---
// nav_msgs/OccupancyGrid map
// bool success
#include "rclcpp/rclcpp.hpp"
#include "my_msgs/srv/get_map.hpp"
class MapServiceNode : public rclcpp::Node {
public:
MapServiceNode() : Node("map_service") {
// 加载地图数据
loadMap("/opt/maps/city_map.pcd");
// 创建Service
service_ = this->create_service<my_msgs::srv::GetMap>(
"/map/get_local_map",
std::bind(&MapServiceNode::handleGetMap, this,
std::placeholders::_1, std::placeholders::_2)
);
RCLCPP_INFO(this->get_logger(), "Map service ready");
}
private:
void handleGetMap(
const std::shared_ptr<my_msgs::srv::GetMap::Request> request,
std::shared_ptr<my_msgs::srv::GetMap::Response> response) {
RCLCPP_INFO(this->get_logger(),
"GetMap request: lat=%.6f, lon=%.6f, radius=%.1f",
request->latitude, request->longitude, request->radius);
// 查询地图数据库
auto local_map = queryMap(
request->latitude,
request->longitude,
request->radius
);
if (local_map) {
response->map = *local_map;
response->success = true;
} else {
response->success = false;
}
}
std::optional<nav_msgs::msg::OccupancyGrid> queryMap(
double lat, double lon, double radius) {
// 地图查询逻辑
return nav_msgs::msg::OccupancyGrid();
}
void loadMap(const std::string& path) {
// 加载地图
}
rclcpp::Service<my_msgs::srv::GetMap>::SharedPtr service_;
};
// 客户端调用
class PlanningNode : public rclcpp::Node {
public:
PlanningNode() : Node("planning") {
client_ = this->create_client<my_msgs::srv::GetMap>("/map/get_local_map");
}
void planRoute() {
// 等待服务可用
while (!client_->wait_for_service(std::chrono::seconds(1))) {
RCLCPP_INFO(this->get_logger(), "Waiting for map service...");
}
// 构建请求
auto request = std::make_shared<my_msgs::srv::GetMap::Request>();
request->latitude = 31.2304;
request->longitude = 121.4737;
request->radius = 500.0; // 500米
// 异步调用
auto future = client_->async_send_request(request);
// 等待响应(带超时)
auto status = future.wait_for(std::chrono::seconds(5));
if (status == std::future_status::ready) {
auto response = future.get();
if (response->success) {
processMap(response->map);
}
}
}
private:
rclcpp::Client<my_msgs::srv::GetMap>::SharedPtr client_;
};
五、TF坐标变换#
5.1 TF树的作用#
核心回答:TF树维护机器人各部件之间的坐标变换关系,解决"传感器数据在哪个坐标系下表示"的问题。
自动驾驶TF树示例:
map (全局地图坐标系)
└── odom (里程计坐标系,短时间准确)
└── base_link (车体中心)
├── lidar_link (激光雷达)
│ └── points数据在此坐标系
├── camera_front (前视相机)
│ └── image数据在此坐标系
├── camera_left (左视相机)
├── radar_front (前向毫米波雷达)
├── imu_link (IMU)
└── gnss_link (GPS天线)
关键特性:
1. 时间戳关联:每个变换带时间戳,支持历史查询
2. 自动插值:支持任意时刻的坐标变换
3. 树形结构:必须连通,无环
5.2 TF树详解#
| 坐标系 | 定义 | 用途 |
|---|
| map | 全局固定坐标系,ENU或UTM | 全局路径规划,地图匹配 |
| odom | 里程计起始点,短时间固定 | 局部规划,避免全局定位跳变影响 |
| base_link | 车体后轴中心,x前y左z上 | 车辆控制,运动学计算 |
| lidar_link | 激光雷达光学中心 | 点云数据原始坐标系 |
| camera_link | 相机光心 | 图像数据原始坐标系 |
5.3 TF发布与查询(C++)#
#include "tf2_ros/transform_broadcaster.h"
#include "tf2_ros/transform_listener.h"
#include "tf2_ros/buffer.h"
#include "geometry_msgs/msg/transform_stamped.hpp"
#include "tf2_geometry_msgs/tf2_geometry_msgs.hpp"
class TFManager {
public:
TFManager(rclcpp::Node::SharedPtr node)
: node_(node),
tf_buffer_(std::make_shared<tf2_ros::Buffer>(node->get_clock())),
tf_listener_(std::make_shared<tf2_ros::TransformListener>(*tf_buffer_)),
tf_broadcaster_(std::make_shared<tf2_ros::TransformBroadcaster>(node)) {
}
// 发布静态TF (传感器外参,固定不变)
void publishStaticTF() {
geometry_msgs::msg::TransformStamped static_transform;
// lidar_link -> base_link (外参标定结果)
static_transform.header.stamp = node_->now();
static_transform.header.frame_id = "base_link";
static_transform.child_frame_id = "lidar_link";
static_transform.transform.translation.x = 1.2; // 前向1.2m
static_transform.transform.translation.y = 0.0;
static_transform.transform.translation.z = 1.8; // 高度1.8m
tf2::Quaternion q;
q.setRPY(0, 0, 0); // 无旋转
static_transform.transform.rotation = tf2::toMsg(q);
// 使用StaticTransformBroadcaster发送
static_tf_broadcaster_->sendTransform(static_transform);
}
// 发布动态TF (定位结果,每帧更新)
void publishOdometryTF(const nav_msgs::msg::Odometry::SharedPtr odom) {
geometry_msgs::msg::TransformStamped transform;
transform.header = odom->header;
transform.child_frame_id = odom->child_frame_id;
transform.transform.translation.x = odom->pose.pose.position.x;
transform.transform.translation.y = odom->pose.pose.position.y;
transform.transform.translation.z = odom->pose.pose.position.z;
transform.transform.rotation = odom->pose.pose.orientation;
tf_broadcaster_->sendTransform(transform);
}
// 查询TF变换 (数据融合关键!)
bool transformPointCloud(
const sensor_msgs::msg::PointCloud2::SharedPtr input_cloud,
const std::string& target_frame,
sensor_msgs::msg::PointCloud2& output_cloud) {
try {
// 方法1: 使用tf2_ros::Buffer直接转换消息
geometry_msgs::msg::TransformStamped transform =
tf_buffer_->lookupTransform(
target_frame, // 目标坐标系
input_cloud->header.frame_id, // 源坐标系
input_cloud->header.stamp, // 时间戳
rclcpp::Duration(100000000) // 超时100ms
);
// 执行坐标变换
tf2::doTransform(*input_cloud, output_cloud, transform);
output_cloud.header.frame_id = target_frame;
return true;
} catch (tf2::TransformException& ex) {
RCLCPP_WARN(node_->get_logger(),
"TF transform failed: %s", ex.what());
return false;
}
}
// 手动变换单个点 (用于自定义算法)
Eigen::Vector3d transformPoint(
const Eigen::Vector3d& point,
const std::string& from_frame,
const std::string& to_frame,
const rclcpp::Time& time) {
geometry_msgs::msg::PointStamped point_in, point_out;
point_in.header.frame_id = from_frame;
point_in.header.stamp = time;
point_in.point.x = point.x();
point_in.point.y = point.y();
point_in.point.z = point.z();
tf_buffer_->transform(point_in, point_out, to_frame);
return Eigen::Vector3d(
point_out.point.x,
point_out.point.y,
point_out.point.z
);
}
private:
rclcpp::Node::SharedPtr node_;
std::shared_ptr<tf2_ros::Buffer> tf_buffer_;
std::shared_ptr<tf2_ros::TransformListener> tf_listener_;
std::shared_ptr<tf2_ros::TransformBroadcaster> tf_broadcaster_;
std::shared_ptr<tf2_ros::StaticTransformBroadcaster> static_tf_broadcaster_;
};
5.4 时间同步与TF插值#
// 多传感器时间同步关键代码
class TimeSynchronizer {
public:
void syncCallback(
const sensor_msgs::msg::PointCloud2::ConstSharedPtr& cloud,
const sensor_msgs::msg::Image::ConstSharedPtr& image) {
// 使用近似时间同步策略
rclcpp::Time cloud_time = cloud->header.stamp;
rclcpp::Time image_time = image->header.stamp;
// 查找两个传感器时间戳之间的TF
// TF支持任意时间插值!
geometry_msgs::msg::TransformStamped T_lidar_to_cam =
tf_buffer_->lookupTransform(
"camera_link",
"lidar_link",
cloud_time, // 使用点云时间戳
rclcpp::Duration(50000000) // 50ms容忍
);
// 将点云投影到相机坐标系
pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cam =
transformCloud(cloud, T_lidar_to_cam);
// 执行点云-图像融合...
}
};
六、RViz可视化#
6.1 RViz2配置(ROS2)#
# RViz配置文件 (config/autoware.rviz)
Panels:
- Class: rviz_common/Displays
Name: Displays
- Class: rviz_common/Selection
Name: Selection
- Class: rviz_common/Tool Properties
Name: Tool Properties
- Class: rviz_common/Views
Name: Views
Visualization Manager:
Class: ""
Displays:
# 1. 3D网格
- Class: rviz_default_plugins/Grid
Name: Grid
Value: true
# 2. TF树可视化
- Class: rviz_default_plugins/TF
Name: TF
Value: true
Frames:
All Enabled: true
# 3. 点云显示
- Class: rviz_default_plugins/PointCloud2
Name: LidarPoints
Topic: /lidar/points_raw
Style: Flat Squares
Size (Pixels): 3
Color Transformer: Intensity
Decay Time: 0.1
# 4. 检测框 (3D)
- Class: rviz_default_plugins/MarkerArray
Name: DetectedObjects
Topic: /perception/objects/marker
Value: true
# 5. 轨迹显示
- Class: rviz_default_plugins/Path
Name: PlannedPath
Topic: /planning/trajectory
Color: 0; 255; 0
# 6. 成本地图
- Class: rviz_default_plugins/OccupancyGrid
Name: CostMap
Topic: /planning/costmap
Color Scheme: costmap
# 7. 相机图像 (画中画)
- Class: rviz_default_plugins/Image
Name: CameraImage
Topic: /camera/image_raw
Tools:
- Class: rviz_default_plugins/Interact
- Class: rviz_default_plugins/MoveCamera
- Class: rviz_default_plugins/Select
- Class: rviz_default_plugins/FocusCamera
- Class: rviz_default_plugins/Measure
Value: true
Views:
Current:
Class: rviz_default_plugins/Orbit
Distance: 50
Focal Point: 0; 0; 0
Name: Current View
6.2 自定义Marker发布(C++)#
#include "visualization_msgs/msg/marker_array.hpp"
class Visualizer {
public:
Visualizer(rclcpp::Node::SharedPtr node)
: node_(node),
pub_markers_(node->create_publisher<visualization_msgs::msg::MarkerArray>(
"/visualization/detections", 10)) {
}
void publishDetections(
const std::vector<DetectedObject>& objects,
const std::string& frame_id) {
visualization_msgs::msg::MarkerArray markers;
for (size_t i = 0; i < objects.size(); ++i) {
const auto& obj = objects[i];
// 3D边界框
visualization_msgs::msg::Marker box;
box.header.frame_id = frame_id;
box.header.stamp = node_->now();
box.ns = "detections";
box.id = i;
box.type = visualization_msgs::msg::Marker::CUBE;
box.action = visualization_msgs::msg::Marker::ADD;
box.pose.position.x = obj.x;
box.pose.position.y = obj.y;
box.pose.position.z = obj.z;
tf2::Quaternion q;
q.setRPY(0, 0, obj.yaw);
box.pose.orientation = tf2::toMsg(q);
box.scale.x = obj.w;
box.scale.y = obj.l;
box.scale.z = obj.h;
// 颜色按类别区分
auto color = getClassColor(obj.class_id);
box.color.r = color[0];
box.color.g = color[1];
box.color.b = color[2];
box.color.a = 0.5; // 半透明
box.lifetime = rclcpp::Duration(200000000); // 200ms
markers.markers.push_back(box);
// 文字标签
visualization_msgs::msg::Marker text;
text.header = box.header;
text.ns = "labels";
text.id = i;
text.type = visualization_msgs::msg::Marker::TEXT_VIEW_FACING;
text.action = visualization_msgs::msg::Marker::ADD;
text.pose.position.x = obj.x;
text.pose.position.y = obj.y;
text.pose.position.z = obj.z + obj.h / 2 + 0.5;
text.scale.z = 0.5; // 字体大小
text.color.r = 1.0;
text.color.g = 1.0;
text.color.b = 1.0;
text.color.a = 1.0;
text.text = obj.class_name + ": " +
std::to_string(int(obj.score * 100)) + "%";
text.lifetime = box.lifetime;
markers.markers.push_back(text);
// 速度箭头
if (obj.velocity.norm() > 0.1) {
visualization_msgs::msg::Marker arrow;
arrow.header = box.header;
arrow.ns = "velocity";
arrow.id = i;
arrow.type = visualization_msgs::msg::Marker::ARROW;
arrow.action = visualization_msgs::msg::Marker::ADD;
geometry_msgs::msg::Point start, end;
start.x = obj.x;
start.y = obj.y;
start.z = obj.z;
end.x = obj.x + obj.velocity.x();
end.y = obj.y + obj.velocity.y();
end.z = obj.z;
arrow.points.push_back(start);
arrow.points.push_back(end);
arrow.scale.x = 0.1; // 杆直径
arrow.scale.y = 0.2; // 头直径
arrow.color.r = 1.0;
arrow.color.a = 0.8;
arrow.lifetime = box.lifetime;
markers.markers.push_back(arrow);
}
}
// 删除旧marker (action=DELETE)
for (size_t i = objects.size(); i < last_marker_count_; ++i) {
visualization_msgs::msg::Marker del;
del.ns = "detections";
del.id = i;
del.action = visualization_msgs::msg::Marker::DELETE;
markers.markers.push_back(del);
}
last_marker_count_ = objects.size();
pub_markers_->publish(markers);
}
private:
std::array<float, 3> getClassColor(int class_id) {
static std::vector<std::array<float, 3>> colors = {
{1.0, 0.0, 0.0}, // Car - Red
{0.0, 1.0, 0.0}, // Pedestrian - Green
{0.0, 0.0, 1.0}, // Cyclist - Blue
{1.0, 1.0, 0.0}, // Truck - Yellow
{1.0, 0.0, 1.0}, // Bus - Magenta
};
return colors[class_id % colors.size()];
}
rclcpp::Node::SharedPtr node_;
rclcpp::Publisher<visualization_msgs::msg::MarkerArray>::SharedPtr pub_markers_;
size_t last_marker_count_ = 0;
};
七、完整节点启动配置#
7.1 Launch文件(ROS2 Python)#
# launch/perception.launch.py
from launch import LaunchDescription
from launch_ros.actions import Node
from launch.actions import DeclareLaunchArgument
from launch.substitutions import LaunchConfiguration
def generate_launch_description():
return LaunchDescription([
# 参数声明
DeclareLaunchArgument(
'model_path',
default_value='/opt/model/pointpillars.onnx',
description='Path to TensorRT engine'
),
DeclareLaunchArgument(
'use_gpu',
default_value='true',
description='Enable GPU acceleration'
),
# Lidar驱动节点
Node(
package='lidar_driver',
executable='lidar_node',
name='lidar_driver',
parameters=[{
'frame_id': 'lidar_link',
'topic': '/lidar/points_raw',
'frequency': 10.0
}],
output='screen'
),
# 感知节点(C++)
Node(
package='lidar_perception',
executable='perception_node',
name='lidar_perception',
parameters=[{
'model_path': LaunchConfiguration('model_path'),
'score_threshold': 0.5,
'use_gpu': LaunchConfiguration('use_gpu'),
'num_threads': 4
}],
remappings=[
('/input/points', '/lidar/points_raw'),
('/output/objects', '/perception/objects')
],
output='screen',
arguments=['--ros-args', '--log-level', 'info']
),
# TF静态发布(外参)
Node(
package='tf2_ros',
executable='static_transform_publisher',
arguments=['1.2', '0', '1.8', '0', '0', '0',
'base_link', 'lidar_link'],
name='lidar_tf'
),
# RViz2
Node(
package='rviz2',
executable='rviz2',
name='rviz2',
arguments=['-d', '/opt/config/autoware.rviz'],
output='screen'
),
# 组件容器(多节点同进程,零拷贝)
# 使用ROS2 Composition优化性能
Node(
package='rclcpp_components',
executable='component_container_mt', # 多线程容器
name='perception_container',
composable_node_descriptions=[
ComposableNode(
package='lidar_perception',
plugin='perception::PreprocessComponent',
name='preprocess'
),
ComposableNode(
package='lidar_perception',
plugin='perception::InferenceComponent',
name='inference'
),
ComposableNode(
package='lidar_perception',
plugin='perception::PostprocessComponent',
name='postprocess'
),
]
)
])
7.2 运行命令#
# 编译
colcon build --packages-select lidar_perception --cmake-args -DCMAKE_BUILD_TYPE=Release
# 启动
ros2 launch lidar_perception perception.launch.py model_path:=/home/user/model.trt use_gpu:=true
# 调试工具
ros2 topic list # 查看所有topic
ros2 topic echo /perception/objects # 打印消息内容
ros2 topic hz /lidar/points_raw # 查看发布频率
ros2 node info /lidar_perception # 查看节点信息
ros2 param list /lidar_perception # 查看参数
ros2 param set /lidar_perception score_threshold 0.3 # 动态调参
ros2 bag record /lidar/points_raw /perception/objects # 录制数据
ros2 bag play recorded.bag # 回放数据
八、总结:TF树的核心作用#
| 作用 | 说明 | 示例 |
|---|
| 数据对齐 | 多传感器数据统一到同一坐标系 | 点云+图像融合 |
| 时间插值 | 支持任意时刻的坐标查询 | 运动补偿 |
| 模块化 | 各模块独立开发,通过TF解耦 | 定位与感知分离 |
| 可视化 | RViz统一显示所有数据 | 多传感器叠加显示 |
| 可维护性 | 外参修改无需改代码 | 标定参数更新 |