Cartographer源码阅读2D-前端-LocalTrajectoryBuilder2D

Cartographer-2D前端-LocalTrajectoryBuilder2D

前端的主要功能:
(1)点云数据处理:滤波、畸变校正
(2)位姿外推器推算位姿
(3)暴力匹配
(4)CSM匹配
(5)生成Submap
(6)生成Node传入后端

class LocalTrajectoryBuilder2D {
 public:
  struct InsertionResult {
    // Node数据
    std::shared_ptr<const TrajectoryNode::Data> constant_data;
    //该Node数据插入的两个submap
    std::vector<std::shared_ptr<const Submap2D>> insertion_submaps;
  };
  struct MatchingResult {
    common::Time time;
    // tracking 坐标系下的位姿(Local SLAM坐标系下的位姿), 需要乘以tracking frame in map的转换(Local SLAM坐标系到Global SLAM坐标系的转换)才能转到全局坐标系
    transform::Rigid3d local_pose
    // tracking 坐标系下的点云(即LocalSLAM坐标系下的点云)
    sensor::RangeData range_data_in_local;
    // 'nullptr' if dropped by the motion filter.
    std::unique_ptr<const InsertionResult> insertion_result;
  };

  explicit LocalTrajectoryBuilder2D(
      const proto::LocalTrajectoryBuilderOptions2D& options,
      const std::vector<std::string>& expected_range_sensor_ids);
  ~LocalTrajectoryBuilder2D();

  LocalTrajectoryBuilder2D(const LocalTrajectoryBuilder2D&) = delete;
  LocalTrajectoryBuilder2D& operator=(const LocalTrajectoryBuilder2D&) = delete;

  // Returns 'MatchingResult' when range data accumulation completed,
  // otherwise 'nullptr'. Range data must be approximately horizontal
  // for 2D SLAM. `TimedPointCloudData::time` is when the last point in
  // `range_data` was acquired, `TimedPointCloudData::ranges` contains the
  // relative time of point with respect to `TimedPointCloudData::time`.
  std::unique_ptr<MatchingResult> AddRangeData(
      const std::string& sensor_id,
      const sensor::TimedPointCloudData& range_data);
  void AddImuData(const sensor::ImuData& imu_data);
  void AddOdometryData(const sensor::OdometryData& odometry_data);

  static void RegisterMetrics(metrics::FamilyFactory* family_factory);

 private:
  std::unique_ptr<MatchingResult> AddAccumulatedRangeData(
      common::Time time, const sensor::RangeData& gravity_aligned_range_data,
      const transform::Rigid3d& gravity_alignment);
  sensor::RangeData TransformToGravityAlignedFrameAndFilter(
      const transform::Rigid3f& transform_to_gravity_aligned_frame,
      const sensor::RangeData& range_data) const;

  std::unique_ptr<InsertionResult> InsertIntoSubmap(
      common::Time time, const sensor::RangeData& range_data_in_local,
      const sensor::RangeData& gravity_aligned_range_data,
      const transform::Rigid3d& pose_estimate,
      const Eigen::Quaterniond& gravity_alignment);

  // Scan matches 'gravity_aligned_range_data' and returns the observed pose,
  // or nullptr on failure.
  std::unique_ptr<transform::Rigid2d> ScanMatch(
      common::Time time, const transform::Rigid2d& pose_prediction,
      const sensor::RangeData& gravity_aligned_range_data);

  // Lazily constructs a PoseExtrapolator.
  void InitializeExtrapolator(common::Time time);

  const proto::LocalTrajectoryBuilderOptions2D options_;
  // 存放2个最新的submap,执行插入点云,生成submap,和匹配的类
  ActiveSubmaps2D active_submaps_;
  // 稀疏滤波,防止重复的激光帧插入active_submaps_
  MotionFilter motion_filter_;
  // online scan matcher 接口(暴力匹配)
  scan_matching::RealTimeCorrelativeScanMatcher2D
      real_time_correlative_scan_matcher_;
  // CSM接口
  scan_matching::CeresScanMatcher2D ceres_scan_matcher_;
  // 位姿外推器
  std::unique_ptr<PoseExtrapolator> extrapolator_;
  // 几帧数据合成一帧,一般是设置为1,因为只用一个激光雷达
  int num_accumulated_ = 0;
  // 临时变量,几帧激光合成的数据
  sensor::RangeData accumulated_range_data_;
  // 存放accumulation时间戳
  std::chrono::steady_clock::time_point accumulation_started_;
  // 滤波的类
  RangeDataCollator range_data_collator_;
};

(1)轮速计数据处理

void LocalTrajectoryBuilder2D::AddOdometryData(
    const sensor::OdometryData& odometry_data) {
  if (extrapolator_ == nullptr) {
    // Until we've initialized the extrapolator we cannot add odometry data.
    LOG(INFO) << "Extrapolator not yet initialized.";
    return;
  }
  // 位姿外推器接收轮速计数据
  extrapolator_->AddOdometryData(odometry_data);
}

(2)IMU数据处理

void LocalTrajectoryBuilder2D::AddImuData(const sensor::ImuData& imu_data) {
  CHECK(options_.use_imu_data()) << "An unexpected IMU packet was added.";
  // 使用IMU初始化位姿外推器
  InitializeExtrapolator(imu_data.time);
  // 位姿外推器接收IMU数据
  extrapolator_->AddImuData(imu_data);
}

(3)激光点云数据处理
根据时间滤除重复数据;
使用位姿外推器进行畸变校正;
点云数据重力对齐;
点云体素滤波。

std::unique_ptr<LocalTrajectoryBuilder2D::MatchingResult>
LocalTrajectoryBuilder2D::AddRangeData(
    const std::string& sensor_id,
    const sensor::TimedPointCloudData& unsynchronized_data) {
  // 根据时间戳,将当前帧中时间点在前一帧最后一个点的时间戳之前的点滤掉,即滤掉时间重复的点
  auto synchronized_data =
      range_data_collator_.AddRangeData(sensor_id, unsynchronized_data);
  if (synchronized_data.ranges.empty()) {
    LOG(INFO) << "Range data collator filling buffer.";
    return nullptr;
  }

  const common::Time& time = synchronized_data.time;
  // Initialize extrapolator now if we do not ever use an IMU.位姿外推器如果要使用IMU,需要初始化的过程
  // 如果不使用IMU,则直接初始化
  if (!options_.use_imu_data()) {
    InitializeExtrapolator(time);
  }
  // 必须保证要有位姿外推器,如果没有初始化成功,则不进行位姿计算
  if (extrapolator_ == nullptr) {
    // Until we've initialized the extrapolator with our first IMU message, we
    // cannot compute the orientation of the rangefinder.
    LOG(INFO) << "Extrapolator not yet initialized.";
    return nullptr;
  }

  CHECK(!synchronized_data.ranges.empty());
  // TODO(gaschler): Check if this can strictly be 0.
  // 最后一个点的时间要<=0
  CHECK_LE(synchronized_data.ranges.back().point_time[3], 0.f);
  // 第一个点的真实时间
  const common::Time time_first_point =
      time +
      common::FromSeconds(synchronized_data.ranges.front().point_time[3]);
  // 上一帧位姿如果没有计算完,则还在初始化的过程中
  if (time_first_point < extrapolator_->GetLastPoseTime()) {
    LOG(INFO) << "Extrapolator is still initializing.";
    return nullptr;
  }

  if (num_accumulated_ == 0) {
    accumulation_started_ = std::chrono::steady_clock::now();
  }
  // 存放每个点的位姿(在LocalSLAM坐标系下)
  std::vector<transform::Rigid3f> range_data_poses;
  range_data_poses.reserve(synchronized_data.ranges.size());
  bool warned = false;
  for (const auto& range : synchronized_data.ranges) {
    common::Time time_point = time + common::FromSeconds(range.point_time[3]);
    
    if (time_point < extrapolator_->GetLastExtrapolatedTime()) {
      if (!warned) {
        LOG(ERROR)
            << "Timestamp of individual range data point jumps backwards from "
            << extrapolator_->GetLastExtrapolatedTime() << " to " << time_point;
        warned = true;
      }
      time_point = extrapolator_->GetLastExtrapolatedTime();
    
    // 根据时间戳,计算每个点在LocalSLAM下的位姿
    range_data_poses.push_back(
        extrapolator_->ExtrapolatePose(time_point).cast<float>());
  }

  if (num_accumulated_ == 0) {
    // 'accumulated_range_data_.origin' is uninitialized until the last
    // accumulation.
    accumulated_range_data_ = sensor::RangeData{{}, {}, {}};
  }

  // Drop any returns below the minimum range and convert returns beyond the
  // maximum range into misses.
  for (size_t i = 0; i < synchronized_data.ranges.size(); ++i) {
    const Eigen::Vector4f& hit = synchronized_data.ranges[i].point_time;
    	// 计算sensor坐标系原点的位置(在LocalSLAM坐标系下),一般为(0,0,0)
      const Eigen::Vector3f origin_in_local =
        range_data_poses[i] *
        synchronized_data.origins.at(synchronized_data.ranges[i].origin_index);
    	// sensor坐标系下的hit点
      const Eigen::Vector3f hit_in_local = range_data_poses[i] * hit.head<3>();
      // 减去sensor坐标系原点,统一转换到一个坐标系下(在LocalSLAM坐标系下)
    const Eigen::Vector3f delta = hit_in_local - origin_in_local;
    const float range = delta.norm();
    if (range >= options_.min_range()) {
      // 正常点
      if (range <= options_.max_range()) {
        accumulated_range_data_.returns.push_back(hit_in_local);
      } else {
        // 经过畸变校正后,超过测距范围的点,认为是miss点
        accumulated_range_data_.misses.push_back(
            origin_in_local +
            options_.missing_data_ray_length() / range * delta);
      }
    }
  }
  ++num_accumulated_;

  if (num_accumulated_ >= options_.num_accumulated_range_data()) {
    num_accumulated_ = 0;
    const transform::Rigid3d gravity_alignment = transform::Rigid3d::Rotation(
        extrapolator_->EstimateGravityOrientation(time));
    // TODO(gaschler): This assumes that 'range_data_poses.back()' is at time
    // 'time'.
    // 坐标系原点转换到最新点的坐标系下
    accumulated_range_data_.origin = range_data_poses.back().translation();
    // 将所有的点转换到最新点的坐标系下,并乘以重力方向,做重力对齐,然后做CSM,计算最新点的位姿,即机器人此时在LocalSLAM的位姿
    return AddAccumulatedRangeData(
        time,
        // 重力对齐,并进行体素滤波
        TransformToGravityAlignedFrameAndFilter(
            gravity_alignment.cast<float>() * range_data_poses.back().inverse(),
            accumulated_range_data_),
        gravity_alignment);
  }
  return nullptr;
}

体素滤波接口:

sensor::RangeData
LocalTrajectoryBuilder2D::TransformToGravityAlignedFrameAndFilter(
    const transform::Rigid3f& transform_to_gravity_aligned_frame,
    const sensor::RangeData& range_data) const {
  // 按照z方向阈值,剔除点
  const sensor::RangeData cropped =
      sensor::CropRangeData(sensor::TransformRangeData(
                                range_data, transform_to_gravity_aligned_frame),
                            options_.min_z(), options_.max_z());
  return sensor::RangeData{
      cropped.origin,
// 体素滤波    sensor::VoxelFilter(options_.voxel_filter_size()).Filter(cropped.returns),
      sensor::VoxelFilter(options_.voxel_filter_size()).Filter(cropped.misses)};
}

没有IMU的位姿外推器初始化:


void LocalTrajectoryBuilder2D::InitializeExtrapolator(const common::Time time) {
  if (extrapolator_ != nullptr) {
    return;
  }
  // We derive velocities from poses which are at least 1 ms apart for numerical
  // stability. Usually poses known to the extrapolator will be further apart
  // in time and thus the last two are used.
  constexpr double kExtrapolationEstimationTimeSec = 0.001;
  // TODO(gaschler): Consider using InitializeWithImu as 3D does.
  extrapolator_ = common::make_unique<PoseExtrapolator>(
      ::cartographer::common::FromSeconds(kExtrapolationEstimationTimeSec),
      options_.imu_gravity_time_constant());
  extrapolator_->AddPose(time, transform::Rigid3d::Identity());
}

位姿外推器推算当前时刻的机器人位姿;
点云自适应滤波,并进行暴力匹配和CSM,计算点云在LocalSLAM坐标系下的位姿;
更新位姿外推器;
将LocalSLAM坐标系下的点云插入active_submap;
生成Node传入后端。

std::unique_ptr<LocalTrajectoryBuilder2D::MatchingResult>
LocalTrajectoryBuilder2D::AddAccumulatedRangeData(
    const common::Time time,
    const sensor::RangeData& gravity_aligned_range_data,
    const transform::Rigid3d& gravity_alignment) {
  if (gravity_aligned_range_data.returns.empty()) {
    LOG(WARNING) << "Dropped empty horizontal range data.";
    return nullptr;
  }

  // Computes a gravity aligned pose prediction.
  // 位姿外推器计算当前时刻LocalSLAM的位姿
  const transform::Rigid3d non_gravity_aligned_pose_prediction =
      extrapolator_->ExtrapolatePose(time);
  // 因为在CSM内,需要做T*p和submap做点云匹配,而p(点云数据)是经过重力对齐的,所有T(位姿)需要减去重力方向。
  const transform::Rigid2d pose_prediction = transform::Project2D(
      non_gravity_aligned_pose_prediction * gravity_alignment.inverse());

  // local map frame <- gravity-aligned frame CSM计算LocalSLAM位姿
  std::unique_ptr<transform::Rigid2d> pose_estimate_2d =
      ScanMatch(time, pose_prediction, gravity_aligned_range_data);
  if (pose_estimate_2d == nullptr) {
    LOG(WARNING) << "Scan matching failed.";
    return nullptr;
  }
  // 计算的位姿需要再乘以重力方向才是LocalSLAM的位姿
  const transform::Rigid3d pose_estimate =
      transform::Embed3D(*pose_estimate_2d) * gravity_alignment;
  // 将当前CSM估计的LocalSLAM的位姿放入位姿外推器中
  extrapolator_->AddPose(time, pose_estimate);
  // 计算在LocalSLAM坐标系下,转换后的点云
  sensor::RangeData range_data_in_local =
      TransformRangeData(gravity_aligned_range_data,
                         transform::Embed3D(pose_estimate_2d->cast<float>()));
  // 将LocalSLAM坐标系下的点插入submap中
  std::unique_ptr<InsertionResult> insertion_result =
      InsertIntoSubmap(time, range_data_in_local, gravity_aligned_range_data,
                       pose_estimate, gravity_alignment.rotation());
  auto duration = std::chrono::steady_clock::now() - accumulation_started_;
  kLocalSlamLatencyMetric->Set(
      std::chrono::duration_cast<std::chrono::seconds>(duration).count());
  // 传入后端
  return common::make_unique<MatchingResult>(
      MatchingResult{time, pose_estimate, std::move(range_data_in_local),
                     std::move(insertion_result)});
}

Scan Match接口:

std::unique_ptr<transform::Rigid2d> LocalTrajectoryBuilder2D::ScanMatch(
    const common::Time time, const transform::Rigid2d& pose_prediction,
    const sensor::RangeData& gravity_aligned_range_data) {
  // 获取activesubmap的第一个submap作为匹配的submap
  std::shared_ptr<const Submap2D> matching_submap =
      active_submaps_.submaps().front();
  // The online correlative scan matcher will refine the initial estimate for
  // the Ceres scan matcher.
  transform::Rigid2d initial_ceres_pose = pose_prediction;
  // 对重力对齐的点云数据进行自适应滤波
  sensor::AdaptiveVoxelFilter adaptive_voxel_filter(
      options_.adaptive_voxel_filter_options());
  const sensor::PointCloud filtered_gravity_aligned_point_cloud =
      adaptive_voxel_filter.Filter(gravity_aligned_range_data.returns);
  if (filtered_gravity_aligned_point_cloud.empty()) {
    return nullptr;
  }
  // 根据参数设置,是否使用real_time_correlative_scan_matcher做暴力匹配,得到更好的初始位姿
  if (options_.use_online_correlative_scan_matching()) {
    CHECK_EQ(options_.submaps_options().grid_options_2d().grid_type(),
             proto::GridOptions2D_GridType_PROBABILITY_GRID);
    double score = real_time_correlative_scan_matcher_.Match(
        pose_prediction, filtered_gravity_aligned_point_cloud,
        *static_cast<const ProbabilityGrid*>(matching_submap->grid()),
        &initial_ceres_pose);
    kFastCorrelativeScanMatcherScoreMetric->Observe(score);
  }
  // 做CSM,计算点云位姿
  auto pose_observation = common::make_unique<transform::Rigid2d>();
  ceres::Solver::Summary summary;
  ceres_scan_matcher_.Match(pose_prediction.translation(), initial_ceres_pose,
                            filtered_gravity_aligned_point_cloud,
                            *matching_submap->grid(), pose_observation.get(),
                            &summary);
  if (pose_observation) {
    kCeresScanMatcherCostMetric->Observe(summary.final_cost);
    double residual_distance =
        (pose_observation->translation() - pose_prediction.translation())
            .norm();
    kScanMatcherResidualDistanceMetric->Observe(residual_distance);
    double residual_angle = std::abs(pose_observation->rotation().angle() -
                                     pose_prediction.rotation().angle());
    kScanMatcherResidualAngleMetric->Observe(residual_angle);
  }
  return pose_observation;
}

插入Submap的接口:
稀疏滤波;
插入数据;
生成InsertionResult数据。

std::unique_ptr<LocalTrajectoryBuilder2D::InsertionResult>
LocalTrajectoryBuilder2D::InsertIntoSubmap(
    const common::Time time, const sensor::RangeData& range_data_in_local,
    const sensor::RangeData& gravity_aligned_range_data,
    const transform::Rigid3d& pose_estimate,
    const Eigen::Quaterniond& gravity_alignment) {
  // 稀疏滤波
  if (motion_filter_.IsSimilar(time, pose_estimate)) {
    return nullptr;
  }

  // Querying the active submaps must be done here before calling
  // InsertRangeData() since the queried values are valid for next insertion.
  std::vector<std::shared_ptr<const Submap2D>> insertion_submaps;
  for (const std::shared_ptr<Submap2D>& submap : active_submaps_.submaps()) {
    insertion_submaps.push_back(submap);
  }
  // 插入数据
  active_submaps_.InsertRangeData(range_data_in_local);
  // 自适应滤波
  sensor::AdaptiveVoxelFilter adaptive_voxel_filter(
      options_.loop_closure_adaptive_voxel_filter_options());
  const sensor::PointCloud filtered_gravity_aligned_point_cloud =
      adaptive_voxel_filter.Filter(gravity_aligned_range_data.returns);

  return common::make_unique<InsertionResult>(InsertionResult{
      std::make_shared<const TrajectoryNode::Data>(TrajectoryNode::Data{
          // 点云时间
          time,
          // 重力方向
          gravity_alignment,
          // 去掉重力方向,经过自适应滤波,且在sensor坐标系下的点云数据
          filtered_gravity_aligned_point_cloud,
          {},  // 'high_resolution_point_cloud' is only used in 3D.
          {},  // 'low_resolution_point_cloud' is only used in 3D.
          {},  // 'rotational_scan_matcher_histogram' is only used in 3D.
          // Local SLAM坐标系下的位姿
          pose_estimate}),
          // 插入的2个submap(也有可能是一个)
      std::move(insertion_submaps)});
}

你可能感兴趣的:(cartographer)