[Vision System] --> [Perception Node]
| |
[Gazebo Sim] <--> [ROS2 Control] <--> [MoveIt! Planner]
| |
[Hardware Interface] --> [Real Arm]
# 安装ROS2 Humble
sudo apt install ros-humble-desktop
# 安装MoveIt2
sudo apt install ros-humble-moveit
# 创建工作空间
mkdir -p ~/arm_ws/src
cd ~/arm_ws/
colcon build --symlink-install
# 工业机械臂驱动包
sudo apt install ros-humble-industrial-core
# 视觉处理包
sudo apt install ros-humble-vision-opencv
# 深度相机驱动
sudo apt install ros-humble-realsense2
# 初始化MoveIt!配置包
ros2 launch moveit_setup_assistant setup_assistant.launch
配置关键参数:
// arm_controller/src/ik_solver.cpp
#include
class IKSolver {
public:
bool computeIK(const geometry_msgs::msg::PoseStamped& target_pose,
std::vector& joint_values) {
moveit::core::RobotStatePtr current_state =
move_group->getCurrentState();
bool found_ik = current_state->setFromIK(
move_group->getRobotModel()->getJointModelGroup("manipulator"),
target_pose.pose, 10, 0.1);
if(found_ik) {
current_state->copyJointGroupPositions(
"manipulator", joint_values);
return true;
}
return false;
}
};
# task_planner/bt_nodes/assembly_task.py
from py_trees import Behaviour, Blackboard
from py_trees.common import Status
class PickPlaceTask(Behaviour):
def __init__(self, name):
super().__init__(name)
self.blackboard = Blackboard()
def update(self):
# 1. 获取视觉目标位姿
target_pose = self.blackboard.get("target_pose")
# 2. 规划抓取路径
if not self.plan_grasp(target_pose):
return Status.FAILURE
# 3. 执行抓取动作
self.execute_grasp()
# 4. 规划放置路径
if not self.plan_place():
return Status.FAILURE
return Status.SUCCESS
def plan_grasp(self, target_pose):
# 调用MoveIt!规划服务
return True
// task_planner/include/state_machine.h
#include
class AssemblySM : public smacc2::SmaccStateMachineBase {
public:
using SmaccStateMachineBase::SmaccStateMachineBase;
struct Orthogonal : smacc2::Orthogonal {};
struct StateIdle : smacc2::SmaccState {
smacc2::Transition onEvent() override {
return transit();
}
};
// 后续状态定义(略)
};
# 相机内参标定
ros2 run camera_calibration cameracalibrator \
--size 8x6 --square 0.0245 image:=/camera/color/image_raw
# vision_system/src/object_detector.py
import cv2
import numpy as np
class ObjectDetector:
def __init__(self):
self.aruco_dict = cv2.aruco.Dictionary_get(cv2.aruco.DICT_6X6_250)
self.aruco_params = cv2.aruco.DetectorParameters_create()
def detect_pose(self, img):
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
corners, ids, _ = cv2.aruco.detectMarkers(
gray, self.aruco_dict, parameters=self.aruco_params)
if ids is not None:
rvec, tvec, _ = cv2.aruco.estimatePoseSingleMarkers(
corners, 0.05, camera_matrix, dist_coeffs)
return tvec[0][0], rvec[0][0]
return None
model://ground_plane
0 0 0.75 0 0 0
true
model://table
model://gear_part
0.3 0 0.8 0 0 0
# 启动仿真环境
ros2 launch arm_gazebo assembly_world.launch.py
# 启动MoveIt!规划场景
ros2 launch arm_moveit_config moveit_rviz.launch.py
# 启动控制节点
ros2 run arm_controller arm_control_node
// 修改硬件接口驱动
void write(const std::vector& commands) override {
// 将关节角度转换为PWM信号
for(size_t i=0; i
2.安全机制实现:
arm_ws/
├── src/
│ ├── arm_description/ # URDF模型
│ ├── arm_moveit_config/ # MoveIt!配置
│ ├── arm_controller/ # 控制算法(C++)
│ ├── task_planner/ # 任务规划(C++/Python)
│ ├── vision_system/ # 视觉处理
│ └── arm_gazebo/ # 仿真环境
└── colcon.meta
# 查看规划场景
ros2 run rviz2 rviz2 -d `ros2 pkg prefix arm_moveit_config`/share/arm_moveit_config/launch/moveit.rviz
# 记录规划数据
ros2 bag record /move_group/display_planned_path
# 性能分析
ros2 topic hz /joint_states
2.视觉定位偏差:
通过本教程的系统学习,开发者可以掌握:
建议按照以下顺序学习: