cartographer源码分析(18)-sensor-compressed_point_cloud

来源:互联网 发布:侧面导航栏网站源码 编辑:程序博客网 时间:2024/06/06 18:06

源码可在https://github.com/learnmoreonce/SLAM 下载

文件:sensor/compressed_point_cloud.h#ifndef CARTOGRAPHER_SENSOR_COMPRESSED_POINT_CLOUD_H_#define CARTOGRAPHER_SENSOR_COMPRESSED_POINT_CLOUD_H_#include <iterator>#include <vector>#include "Eigen/Core"#include "cartographer/common/port.h"#include "cartographer/sensor/point_cloud.h"#include "cartographer/sensor/proto/sensor.pb.h"namespace cartographer {namespace sensor {/*CompressedPointCloud是点云压缩类,目的:压缩ponits以减少存储空间,压缩后有精度损失。方法:按照block分组。只有一个私有的*/// A compressed representation of a point cloud consisting of a collection of// points (Vector3f).// Internally, points are grouped by blocks. Each block encodes a bit of meta// data (number of points in block, coordinates of the block) and encodes each// point with a fixed bit rate in relation to the block.class CompressedPointCloud { public:  class ConstIterator; //前置声明  CompressedPointCloud() : num_points_(0) {}  explicit CompressedPointCloud(const PointCloud& point_cloud);  // Returns decompressed point cloud.  PointCloud Decompress() const;  bool empty() const;                   // num_points_==0  size_t size() const;                  // num_points_  ConstIterator begin() const;  ConstIterator end() const;  proto::CompressedPointCloud ToProto() const; private:  CompressedPointCloud(const std::vector<int32>& point_data, size_t num_points);  std::vector<int32> point_data_;  size_t num_points_;};/*前行迭代器*/// Forward iterator for compressed point clouds.class CompressedPointCloud::ConstIterator    : public std::iterator<std::forward_iterator_tag, Eigen::Vector3f> { public:  // Creates begin iterator.  explicit ConstIterator(const CompressedPointCloud* compressed_point_cloud);  // Creates end iterator.  static ConstIterator EndIterator(      const CompressedPointCloud* compressed_point_cloud);  Eigen::Vector3f operator*() const;  ConstIterator& operator++();  bool operator!=(const ConstIterator& it) const; private:  // Reads next point from buffer. Also handles reading the meta data of the  // next block, if the current block is depleted.  void ReadNextPoint();  const CompressedPointCloud* compressed_point_cloud_;  size_t remaining_points_;  int32 remaining_points_in_current_block_;  Eigen::Vector3f current_point_;  Eigen::Vector3i current_block_coordinates_;  std::vector<int32>::const_iterator input_;};}  // namespace sensor}  // namespace cartographer#endif  // CARTOGRAPHER_SENSOR_COMPRESSED_POINT_CLOUD_H_

.

sensor/compressed_point_cloud.cc#include "cartographer/sensor/compressed_point_cloud.h"#include <limits>#include "cartographer/common/math.h"#include "cartographer/mapping_3d/hybrid_grid.h"namespace cartographer {namespace sensor {namespace {// Points are encoded on a fixed grid with a grid spacing of 'kPrecision' with// integers. Points are organized in blocks, where each point is encoded// relative to the block's origin in an int32 with 'kBitsPerCoordinate' bits per// coordinate.constexpr float kPrecision = 0.001f;  // in meters.constexpr int kBitsPerCoordinate = 10;constexpr int kCoordinateMask = (1 << kBitsPerCoordinate) - 1;constexpr int kMaxBitsPerDirection = 23;}  // namespaceCompressedPointCloud::ConstIterator::ConstIterator(    const CompressedPointCloud* compressed_point_cloud)    : compressed_point_cloud_(compressed_point_cloud),      remaining_points_(compressed_point_cloud->num_points_),      remaining_points_in_current_block_(0),      input_(compressed_point_cloud->point_data_.begin()) {  if (remaining_points_ > 0) {    ReadNextPoint();  }}CompressedPointCloud::ConstIteratorCompressedPointCloud::ConstIterator::EndIterator(    const CompressedPointCloud* compressed_point_cloud) {  ConstIterator end_iterator(compressed_point_cloud);  end_iterator.remaining_points_ = 0;  return end_iterator;}Eigen::Vector3f CompressedPointCloud::ConstIterator::operator*() const {  CHECK_GT(remaining_points_, 0);  return current_point_;}CompressedPointCloud::ConstIterator& CompressedPointCloud::ConstIterator::operator++() {  --remaining_points_;  if (remaining_points_ > 0) {    ReadNextPoint();  }  return *this;}bool CompressedPointCloud::ConstIterator::operator!=(    const ConstIterator& it) const {  CHECK(compressed_point_cloud_ == it.compressed_point_cloud_);  return remaining_points_ != it.remaining_points_;}void CompressedPointCloud::ConstIterator::ReadNextPoint() {  if (remaining_points_in_current_block_ == 0) {    remaining_points_in_current_block_ = *input_++;    for (int i = 0; i < 3; ++i) {      current_block_coordinates_[i] = *input_++ << kBitsPerCoordinate;    }  }  --remaining_points_in_current_block_;  const int point = *input_++;  constexpr int kMask = (1 << kBitsPerCoordinate) - 1;  current_point_[0] =      (current_block_coordinates_[0] + (point & kMask)) * kPrecision;  current_point_[1] = (current_block_coordinates_[1] +                       ((point >> kBitsPerCoordinate) & kMask)) *                      kPrecision;  current_point_[2] =      (current_block_coordinates_[2] + (point >> (2 * kBitsPerCoordinate))) *      kPrecision;}/*最重要的构造函数压缩点云*/CompressedPointCloud::CompressedPointCloud(const PointCloud& point_cloud)//point_cloud是一个3f的vector,压缩到point_data_中储存    : num_points_(point_cloud.size()) {  // Distribute points into blocks.  struct RasterPoint {    Eigen::Array3i point; //  Array3i (int d1, int d2, int d3)    int index;  };  using Blocks = mapping_3d::HybridGridBase<std::vector<RasterPoint>>;  Blocks blocks(kPrecision);  int num_blocks = 0;  CHECK_LE(point_cloud.size(), std::numeric_limits<int>::max());  for (int point_index = 0; point_index < static_cast<int>(point_cloud.size());       ++point_index) {    const Eigen::Vector3f& point = point_cloud[point_index];//获取某个point{x,y,z}    CHECK_LT(point.cwiseAbs().maxCoeff() / kPrecision,             1 << kMaxBitsPerDirection)        << "Point out of bounds: " << point;    Eigen::Array3i raster_point;    Eigen::Array3i block_coordinate;    for (int i = 0; i < 3; ++i) {      raster_point[i] = common::RoundToInt(point[i] / kPrecision);      block_coordinate[i] = raster_point[i] >> kBitsPerCoordinate;      raster_point[i] &= kCoordinateMask;    }    auto* const block = blocks.mutable_value(block_coordinate);    num_blocks += block->empty();    block->push_back({raster_point, point_index});  } //end for  // Encode blocks.  point_data_.reserve(4 * num_blocks + point_cloud.size());  for (Blocks::Iterator it(blocks); !it.Done(); it.Next(), --num_blocks) {    const auto& raster_points = it.GetValue();    CHECK_LE(raster_points.size(), std::numeric_limits<int32>::max());    point_data_.push_back(raster_points.size());    const Eigen::Array3i block_coordinate = it.GetCellIndex();    point_data_.push_back(block_coordinate.x());    point_data_.push_back(block_coordinate.y());    point_data_.push_back(block_coordinate.z());    for (const RasterPoint& raster_point : raster_points) {      point_data_.push_back((((raster_point.point.z() << kBitsPerCoordinate) +                              raster_point.point.y())                             << kBitsPerCoordinate) +                            raster_point.point.x());    }  }  CHECK_EQ(num_blocks, 0);}/*私有的构造函数,外部不能调用*/CompressedPointCloud::CompressedPointCloud(const std::vector<int32>& point_data,                                           size_t num_points)    : point_data_(point_data), num_points_(num_points) {}bool CompressedPointCloud::empty() const { return num_points_ == 0; }size_t CompressedPointCloud::size() const { return num_points_; }CompressedPointCloud::ConstIterator CompressedPointCloud::begin() const {//迭代器首,  return ConstIterator(this);}CompressedPointCloud::ConstIterator CompressedPointCloud::end() const { //迭代器尾,  return ConstIterator::EndIterator(this);}PointCloud CompressedPointCloud::Decompress() const {  PointCloud decompressed;                    //Vector3f组成的vector  for (const Eigen::Vector3f& point : *this) { //此处调用的是迭代器函数,begin(),end()    decompressed.push_back(point);  }  return decompressed;}proto::CompressedPointCloud CompressedPointCloud::ToProto() const {  proto::CompressedPointCloud result;  result.set_num_points(num_points_);       //序列化点云的个数  for (const int32 data : point_data_) {    result.add_point_data(data);            //依次序添加数据   }  return result;}}  // namespace sensor}  // namespace cartographer

测试代码:sensor/compressed_point_cloud_test.cc#include "cartographer/sensor/compressed_point_cloud.h"#include "gmock/gmock.h"namespace Eigen {// Prints Vector3f in a readable format in matcher ApproximatelyEquals when// failing a test. Without this function, the output is formated as hexadecimal// 8 bit numbers.void PrintTo(const Vector3f& x, std::ostream* os) {  *os << "(" << x[0] << ", " << x[1] << ", " << x[2] << ")";}}  // namespace Eigennamespace cartographer {namespace sensor {namespace {using ::testing::Contains;using ::testing::FloatNear;using ::testing::PrintToString;constexpr float kPrecision = 0.001f;// Matcher for 3-d vectors w.r.t. to the target precision.MATCHER_P(ApproximatelyEquals, expected,          string("is equal to ") + PrintToString(expected)) {  return (arg - expected).isZero(kPrecision);//压缩后有精度丢失,精确度为0.001}// Helper function to test the mapping of a single point. Includes test for// recompressing the same point again.void TestPoint(const Eigen::Vector3f& p) {  CompressedPointCloud compressed({p});  EXPECT_EQ(1, compressed.size());  EXPECT_THAT(*compressed.begin(), ApproximatelyEquals(p));  CompressedPointCloud recompressed({*compressed.begin()});  EXPECT_THAT(*recompressed.begin(), ApproximatelyEquals(p));}TEST(CompressPointCloudTest, CompressesPointsCorrectly) {  TestPoint(Eigen::Vector3f(8000.f, 7500.f, 5000.f));  TestPoint(Eigen::Vector3f(1000.f, 2000.f, 3000.f));  TestPoint(Eigen::Vector3f(100.f, 200.f, 300.f));  TestPoint(Eigen::Vector3f(10.f, 20.f, 30.f));  TestPoint(Eigen::Vector3f(-0.00049f, -0.0005f, -0.0015f));  TestPoint(Eigen::Vector3f(0.05119f, 0.0512f, 0.05121));  TestPoint(Eigen::Vector3f(-0.05119f, -0.0512f, -0.05121));  TestPoint(Eigen::Vector3f(0.8405f, 0.84f, 0.8396f));  TestPoint(Eigen::Vector3f(0.8395f, 0.8394f, 0.8393f));  TestPoint(Eigen::Vector3f(0.839f, 0.8391f, 0.8392f));  TestPoint(Eigen::Vector3f(0.8389f, 0.8388f, 0.83985f));}TEST(CompressPointCloudTest, Compresses) {  const CompressedPointCloud compressed({Eigen::Vector3f(0.838f, 0, 0),                                         Eigen::Vector3f(0.839f, 0, 0),                                         Eigen::Vector3f(0.840f, 0, 0)});  EXPECT_FALSE(compressed.empty());  EXPECT_EQ(3, compressed.size());  const PointCloud decompressed = compressed.Decompress();  EXPECT_EQ(3, decompressed.size());  EXPECT_THAT(decompressed,              Contains(ApproximatelyEquals(Eigen::Vector3f(0.838f, 0, 0))));//压缩解压缩后,前3位小数不变  EXPECT_THAT(decompressed,              Contains(ApproximatelyEquals(Eigen::Vector3f(0.839f, 0, 0))));  EXPECT_THAT(decompressed,              Contains(ApproximatelyEquals(Eigen::Vector3f(0.840f, 0, 0))));}TEST(CompressPointCloudTest, CompressesEmptyPointCloud) {  CompressedPointCloud compressed;  EXPECT_TRUE(compressed.empty());  EXPECT_EQ(0, compressed.size());}// Test for gaps.// Produces a series of points densly packed along the x axis, compresses these// points (twice), and tests, whether there are gaps between two consecutive// points.TEST(CompressPointCloudTest, CompressesNoGaps) {  PointCloud point_cloud;  for (int i = 0; i < 3000; ++i) {    point_cloud.push_back(Eigen::Vector3f(kPrecision * i - 1.5f, 0, 0));  }  const CompressedPointCloud compressed(point_cloud);//压缩  const PointCloud decompressed = compressed.Decompress();//解压缩  const CompressedPointCloud recompressed(decompressed);//再压缩  EXPECT_EQ(decompressed.size(), recompressed.size());  std::vector<float> x_coord;  for (const auto& p : compressed) {    x_coord.push_back(p[0]);  }  std::sort(x_coord.begin(), x_coord.end());  for (size_t i = 1; i < x_coord.size(); ++i) {    EXPECT_THAT(std::abs(x_coord[i] - x_coord[i - 1]),                FloatNear(kPrecision, 1e-7f)); //前后相差不大  }}}  // namespace}  // namespace sensor}  // namespace cartographer

本文发于:
* http://www.jianshu.com/u/9e38d2febec1
* https://zhuanlan.zhihu.com/learnmoreonce
* http://blog.csdn.net/learnmoreonce
* slam源码分析微信公众号:slamcode