Point Cloud Library (PCL)  1.7.2
integral_image_normal.h
1 /*
2  * Software License Agreement (BSD License)
3  *
4  * Point Cloud Library (PCL) - www.pointclouds.org
5  * Copyright (c) 2010-2011, Willow Garage, Inc.
6  * Copyright (c) 2012-, Open Perception, Inc.
7  *
8  * All rights reserved.
9  *
10  * Redistribution and use in source and binary forms, with or without
11  * modification, are permitted provided that the following conditions
12  * are met:
13  *
14  * * Redistributions of source code must retain the above copyright
15  * notice, this list of conditions and the following disclaimer.
16  * * Redistributions in binary form must reproduce the above
17  * copyright notice, this list of conditions and the following
18  * disclaimer in the documentation and/or other materials provided
19  * with the distribution.
20  * * Neither the name of the copyright holder(s) nor the names of its
21  * contributors may be used to endorse or promote products derived
22  * from this software without specific prior written permission.
23  *
24  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
25  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
26  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
27  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
28  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
29  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
30  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
31  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
32  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
33  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
34  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
35  * POSSIBILITY OF SUCH DAMAGE.
36  *
37  */
38 
39 #ifndef PCL_INTEGRALIMAGE_BASED_NORMAL_ESTIMATOR_H_
40 #define PCL_INTEGRALIMAGE_BASED_NORMAL_ESTIMATOR_H_
41 
42 #include <pcl/point_cloud.h>
43 #include <pcl/point_types.h>
44 #include <pcl/features/feature.h>
45 #include <pcl/features/integral_image2D.h>
46 
47 #if defined BUILD_Maintainer && defined __GNUC__ && __GNUC__ == 4 && __GNUC_MINOR__ > 3
48 #pragma GCC diagnostic ignored "-Weffc++"
49 #endif
50 namespace pcl
51 {
52  /** \brief Surface normal estimation on organized data using integral images.
53  *
54  * For detailed information about this method see:
55  *
56  * S. Holzer and R. B. Rusu and M. Dixon and S. Gedikli and N. Navab,
57  * Adaptive Neighborhood Selection for Real-Time Surface Normal Estimation
58  * from Organized Point Cloud Data Using Integral Images, IROS 2012.
59  *
60  * D. Holz, S. Holzer, R. B. Rusu, and S. Behnke (2011, July).
61  * Real-Time Plane Segmentation using RGB-D Cameras. In Proceedings of
62  * the 15th RoboCup International Symposium, Istanbul, Turkey.
63  * http://www.ais.uni-bonn.de/~holz/papers/holz_2011_robocup.pdf
64  *
65  * \author Stefan Holzer
66  */
67  template <typename PointInT, typename PointOutT>
68  class IntegralImageNormalEstimation: public Feature<PointInT, PointOutT>
69  {
75 
76  public:
77  typedef boost::shared_ptr<IntegralImageNormalEstimation<PointInT, PointOutT> > Ptr;
78  typedef boost::shared_ptr<const IntegralImageNormalEstimation<PointInT, PointOutT> > ConstPtr;
79 
80  /** \brief Different types of border handling. */
82  {
85  };
86 
87  /** \brief Different normal estimation methods.
88  * <ul>
89  * <li><b>COVARIANCE_MATRIX</b> - creates 9 integral images to compute the normal for a specific point
90  * from the covariance matrix of its local neighborhood.</li>
91  * <li><b>AVERAGE_3D_GRADIENT</b> - creates 6 integral images to compute smoothed versions of
92  * horizontal and vertical 3D gradients and computes the normals using the cross-product between these
93  * two gradients.
94  * <li><b>AVERAGE_DEPTH_CHANGE</b> - creates only a single integral image and computes the normals
95  * from the average depth changes.
96  * </ul>
97  */
99  {
104  };
105 
108 
109  /** \brief Constructor */
111  : normal_estimation_method_(AVERAGE_3D_GRADIENT)
112  , border_policy_ (BORDER_POLICY_IGNORE)
113  , rect_width_ (0), rect_width_2_ (0), rect_width_4_ (0)
114  , rect_height_ (0), rect_height_2_ (0), rect_height_4_ (0)
115  , distance_threshold_ (0)
116  , integral_image_DX_ (false)
117  , integral_image_DY_ (false)
118  , integral_image_depth_ (false)
119  , integral_image_XYZ_ (true)
120  , diff_x_ (NULL)
121  , diff_y_ (NULL)
122  , depth_data_ (NULL)
123  , distance_map_ (NULL)
124  , use_depth_dependent_smoothing_ (false)
125  , max_depth_change_factor_ (20.0f*0.001f)
126  , normal_smoothing_size_ (10.0f)
127  , init_covariance_matrix_ (false)
128  , init_average_3d_gradient_ (false)
129  , init_simple_3d_gradient_ (false)
130  , init_depth_change_ (false)
131  , vpx_ (0.0f)
132  , vpy_ (0.0f)
133  , vpz_ (0.0f)
134  , use_sensor_origin_ (true)
135  {
136  feature_name_ = "IntegralImagesNormalEstimation";
137  tree_.reset ();
138  k_ = 1;
139  }
140 
141  /** \brief Destructor **/
143 
144  /** \brief Set the regions size which is considered for normal estimation.
145  * \param[in] width the width of the search rectangle
146  * \param[in] height the height of the search rectangle
147  */
148  void
149  setRectSize (const int width, const int height);
150 
151  /** \brief Sets the policy for handling borders.
152  * \param[in] border_policy the border policy.
153  */
154  void
155  setBorderPolicy (const BorderPolicy border_policy)
156  {
157  border_policy_ = border_policy;
158  }
159 
160  /** \brief Computes the normal at the specified position.
161  * \param[in] pos_x x position (pixel)
162  * \param[in] pos_y y position (pixel)
163  * \param[in] point_index the position index of the point
164  * \param[out] normal the output estimated normal
165  */
166  void
167  computePointNormal (const int pos_x, const int pos_y, const unsigned point_index, PointOutT &normal);
168 
169  /** \brief Computes the normal at the specified position with mirroring for border handling.
170  * \param[in] pos_x x position (pixel)
171  * \param[in] pos_y y position (pixel)
172  * \param[in] point_index the position index of the point
173  * \param[out] normal the output estimated normal
174  */
175  void
176  computePointNormalMirror (const int pos_x, const int pos_y, const unsigned point_index, PointOutT &normal);
177 
178  /** \brief The depth change threshold for computing object borders
179  * \param[in] max_depth_change_factor the depth change threshold for computing object borders based on
180  * depth changes
181  */
182  void
183  setMaxDepthChangeFactor (float max_depth_change_factor)
184  {
185  max_depth_change_factor_ = max_depth_change_factor;
186  }
187 
188  /** \brief Set the normal smoothing size
189  * \param[in] normal_smoothing_size factor which influences the size of the area used to smooth normals
190  * (depth dependent if useDepthDependentSmoothing is true)
191  */
192  void
193  setNormalSmoothingSize (float normal_smoothing_size)
194  {
195  if (normal_smoothing_size <= 0)
196  {
197  PCL_ERROR ("[pcl::%s::setNormalSmoothingSize] Invalid normal smoothing size given! (%f). Allowed ranges are: 0 < N. Defaulting to %f.\n",
198  feature_name_.c_str (), normal_smoothing_size, normal_smoothing_size_);
199  return;
200  }
201  normal_smoothing_size_ = normal_smoothing_size;
202  }
203 
204  /** \brief Set the normal estimation method. The current implemented algorithms are:
205  * <ul>
206  * <li><b>COVARIANCE_MATRIX</b> - creates 9 integral images to compute the normal for a specific point
207  * from the covariance matrix of its local neighborhood.</li>
208  * <li><b>AVERAGE_3D_GRADIENT</b> - creates 6 integral images to compute smoothed versions of
209  * horizontal and vertical 3D gradients and computes the normals using the cross-product between these
210  * two gradients.
211  * <li><b>AVERAGE_DEPTH_CHANGE</b> - creates only a single integral image and computes the normals
212  * from the average depth changes.
213  * </ul>
214  * \param[in] normal_estimation_method the method used for normal estimation
215  */
216  void
218  {
219  normal_estimation_method_ = normal_estimation_method;
220  }
221 
222  /** \brief Set whether to use depth depending smoothing or not
223  * \param[in] use_depth_dependent_smoothing decides whether the smoothing is depth dependent
224  */
225  void
226  setDepthDependentSmoothing (bool use_depth_dependent_smoothing)
227  {
228  use_depth_dependent_smoothing_ = use_depth_dependent_smoothing;
229  }
230 
231  /** \brief Provide a pointer to the input dataset (overwrites the PCLBase::setInputCloud method)
232  * \param[in] cloud the const boost shared pointer to a PointCloud message
233  */
234  virtual inline void
235  setInputCloud (const typename PointCloudIn::ConstPtr &cloud)
236  {
237  input_ = cloud;
238  if (!cloud->isOrganized ())
239  {
240  PCL_ERROR ("[pcl::IntegralImageNormalEstimation::setInputCloud] Input dataset is not organized (height = 1).\n");
241  return;
242  }
243 
244  init_covariance_matrix_ = init_average_3d_gradient_ = init_depth_change_ = false;
245 
246  if (use_sensor_origin_)
247  {
248  vpx_ = input_->sensor_origin_.coeff (0);
249  vpy_ = input_->sensor_origin_.coeff (1);
250  vpz_ = input_->sensor_origin_.coeff (2);
251  }
252 
253  // Initialize the correct data structure based on the normal estimation method chosen
254  initData ();
255  }
256 
257  /** \brief Returns a pointer to the distance map which was computed internally
258  */
259  inline float*
261  {
262  return (distance_map_);
263  }
264 
265  /** \brief Set the viewpoint.
266  * \param vpx the X coordinate of the viewpoint
267  * \param vpy the Y coordinate of the viewpoint
268  * \param vpz the Z coordinate of the viewpoint
269  */
270  inline void
271  setViewPoint (float vpx, float vpy, float vpz)
272  {
273  vpx_ = vpx;
274  vpy_ = vpy;
275  vpz_ = vpz;
276  use_sensor_origin_ = false;
277  }
278 
279  /** \brief Get the viewpoint.
280  * \param [out] vpx x-coordinate of the view point
281  * \param [out] vpy y-coordinate of the view point
282  * \param [out] vpz z-coordinate of the view point
283  * \note this method returns the currently used viewpoint for normal flipping.
284  * If the viewpoint is set manually using the setViewPoint method, this method will return the set view point coordinates.
285  * If an input cloud is set, it will return the sensor origin otherwise it will return the origin (0, 0, 0)
286  */
287  inline void
288  getViewPoint (float &vpx, float &vpy, float &vpz)
289  {
290  vpx = vpx_;
291  vpy = vpy_;
292  vpz = vpz_;
293  }
294 
295  /** \brief sets whether the sensor origin or a user given viewpoint should be used. After this method, the
296  * normal estimation method uses the sensor origin of the input cloud.
297  * to use a user defined view point, use the method setViewPoint
298  */
299  inline void
301  {
302  use_sensor_origin_ = true;
303  if (input_)
304  {
305  vpx_ = input_->sensor_origin_.coeff (0);
306  vpy_ = input_->sensor_origin_.coeff (1);
307  vpz_ = input_->sensor_origin_.coeff (2);
308  }
309  else
310  {
311  vpx_ = 0;
312  vpy_ = 0;
313  vpz_ = 0;
314  }
315  }
316 
317  protected:
318 
319  /** \brief Computes the normal for the complete cloud or only \a indices_ if provided.
320  * \param[out] output the resultant normals
321  */
322  void
323  computeFeature (PointCloudOut &output);
324 
325  /** \brief Computes the normal for the complete cloud.
326  * \param[in] distance_map distance map
327  * \param[in] bad_point constant given to invalid normal components
328  * \param[out] output the resultant normals
329  */
330  void
331  computeFeatureFull (const float* distance_map, const float& bad_point, PointCloudOut& output);
332 
333  /** \brief Computes the normal for part of the cloud specified by \a indices_
334  * \param[in] distance_map distance map
335  * \param[in] bad_point constant given to invalid normal components
336  * \param[out] output the resultant normals
337  */
338  void
339  computeFeaturePart (const float* distance_map, const float& bad_point, PointCloudOut& output);
340 
341  /** \brief Initialize the data structures, based on the normal estimation method chosen. */
342  void
343  initData ();
344 
345  private:
346 
347  /** \brief Flip (in place) the estimated normal of a point towards a given viewpoint
348  * \param point a given point
349  * \param vp_x the X coordinate of the viewpoint
350  * \param vp_y the X coordinate of the viewpoint
351  * \param vp_z the X coordinate of the viewpoint
352  * \param nx the resultant X component of the plane normal
353  * \param ny the resultant Y component of the plane normal
354  * \param nz the resultant Z component of the plane normal
355  * \ingroup features
356  */
357  inline void
358  flipNormalTowardsViewpoint (const PointInT &point,
359  float vp_x, float vp_y, float vp_z,
360  float &nx, float &ny, float &nz)
361  {
362  // See if we need to flip any plane normals
363  vp_x -= point.x;
364  vp_y -= point.y;
365  vp_z -= point.z;
366 
367  // Dot product between the (viewpoint - point) and the plane normal
368  float cos_theta = (vp_x * nx + vp_y * ny + vp_z * nz);
369 
370  // Flip the plane normal
371  if (cos_theta < 0)
372  {
373  nx *= -1;
374  ny *= -1;
375  nz *= -1;
376  }
377  }
378 
379  /** \brief The normal estimation method to use. Currently, 3 implementations are provided:
380  *
381  * - COVARIANCE_MATRIX
382  * - AVERAGE_3D_GRADIENT
383  * - AVERAGE_DEPTH_CHANGE
384  */
385  NormalEstimationMethod normal_estimation_method_;
386 
387  /** \brief The policy for handling borders. */
388  BorderPolicy border_policy_;
389 
390  /** The width of the neighborhood region used for computing the normal. */
391  int rect_width_;
392  int rect_width_2_;
393  int rect_width_4_;
394  /** The height of the neighborhood region used for computing the normal. */
395  int rect_height_;
396  int rect_height_2_;
397  int rect_height_4_;
398 
399  /** the threshold used to detect depth discontinuities */
400  float distance_threshold_;
401 
402  /** integral image in x-direction */
403  IntegralImage2D<float, 3> integral_image_DX_;
404  /** integral image in y-direction */
405  IntegralImage2D<float, 3> integral_image_DY_;
406  /** integral image */
407  IntegralImage2D<float, 1> integral_image_depth_;
408  /** integral image xyz */
409  IntegralImage2D<float, 3> integral_image_XYZ_;
410 
411  /** derivatives in x-direction */
412  float *diff_x_;
413  /** derivatives in y-direction */
414  float *diff_y_;
415 
416  /** depth data */
417  float *depth_data_;
418 
419  /** distance map */
420  float *distance_map_;
421 
422  /** \brief Smooth data based on depth (true/false). */
423  bool use_depth_dependent_smoothing_;
424 
425  /** \brief Threshold for detecting depth discontinuities */
426  float max_depth_change_factor_;
427 
428  /** \brief */
429  float normal_smoothing_size_;
430 
431  /** \brief True when a dataset has been received and the covariance_matrix data has been initialized. */
432  bool init_covariance_matrix_;
433 
434  /** \brief True when a dataset has been received and the average 3d gradient data has been initialized. */
435  bool init_average_3d_gradient_;
436 
437  /** \brief True when a dataset has been received and the simple 3d gradient data has been initialized. */
438  bool init_simple_3d_gradient_;
439 
440  /** \brief True when a dataset has been received and the depth change data has been initialized. */
441  bool init_depth_change_;
442 
443  /** \brief Values describing the viewpoint ("pinhole" camera model assumed). For per point viewpoints, inherit
444  * from NormalEstimation and provide your own computeFeature (). By default, the viewpoint is set to 0,0,0. */
445  float vpx_, vpy_, vpz_;
446 
447  /** whether the sensor origin of the input cloud or a user given viewpoint should be used.*/
448  bool use_sensor_origin_;
449 
450  /** \brief This method should get called before starting the actual computation. */
451  bool
452  initCompute ();
453 
454  /** \brief Internal initialization method for COVARIANCE_MATRIX estimation. */
455  void
456  initCovarianceMatrixMethod ();
457 
458  /** \brief Internal initialization method for AVERAGE_3D_GRADIENT estimation. */
459  void
460  initAverage3DGradientMethod ();
461 
462  /** \brief Internal initialization method for AVERAGE_DEPTH_CHANGE estimation. */
463  void
464  initAverageDepthChangeMethod ();
465 
466  /** \brief Internal initialization method for SIMPLE_3D_GRADIENT estimation. */
467  void
468  initSimple3DGradientMethod ();
469 
470  public:
471  EIGEN_MAKE_ALIGNED_OPERATOR_NEW
472  };
473 }
474 #if defined BUILD_Maintainer && defined __GNUC__ && __GNUC__ == 4 && __GNUC_MINOR__ > 3
475 #pragma GCC diagnostic warning "-Weffc++"
476 #endif
477 
478 #ifdef PCL_NO_PRECOMPILE
479 #include <pcl/features/impl/integral_image_normal.hpp>
480 #endif
481 
482 #endif
483 
boost::shared_ptr< const PointCloud< PointT > > ConstPtr
Definition: point_cloud.h:429
virtual ~IntegralImageNormalEstimation()
Destructor.
void computePointNormalMirror(const int pos_x, const int pos_y, const unsigned point_index, PointOutT &normal)
Computes the normal at the specified position with mirroring for border handling. ...
std::string feature_name_
The feature name.
Definition: feature.h:222
int k_
The number of K nearest neighbors to use for each point.
Definition: feature.h:242
void computePointNormal(const int pos_x, const int pos_y, const unsigned point_index, PointOutT &normal)
Computes the normal at the specified position.
Feature< PointInT, PointOutT >::PointCloudOut PointCloudOut
KdTreePtr tree_
A pointer to the spatial search object.
Definition: feature.h:233
void setBorderPolicy(const BorderPolicy border_policy)
Sets the policy for handling borders.
void getViewPoint(float &vpx, float &vpy, float &vpz)
Get the viewpoint.
void setRectSize(const int width, const int height)
Set the regions size which is considered for normal estimation.
void computeFeature(PointCloudOut &output)
Computes the normal for the complete cloud or only indices_ if provided.
void initData()
Initialize the data structures, based on the normal estimation method chosen.
Surface normal estimation on organized data using integral images.
void setMaxDepthChangeFactor(float max_depth_change_factor)
The depth change threshold for computing object borders.
void useSensorOriginAsViewPoint()
sets whether the sensor origin or a user given viewpoint should be used.
void computeFeatureFull(const float *distance_map, const float &bad_point, PointCloudOut &output)
Computes the normal for the complete cloud.
void setNormalEstimationMethod(NormalEstimationMethod normal_estimation_method)
Set the normal estimation method.
BorderPolicy
Different types of border handling.
virtual void setInputCloud(const typename PointCloudIn::ConstPtr &cloud)
Provide a pointer to the input dataset (overwrites the PCLBase::setInputCloud method) ...
boost::shared_ptr< const IntegralImageNormalEstimation< PointInT, PointOutT > > ConstPtr
void setViewPoint(float vpx, float vpy, float vpz)
Set the viewpoint.
float * getDistanceMap()
Returns a pointer to the distance map which was computed internally.
void computeFeaturePart(const float *distance_map, const float &bad_point, PointCloudOut &output)
Computes the normal for part of the cloud specified by indices_.
Feature< PointInT, PointOutT >::PointCloudIn PointCloudIn
PointCloudConstPtr input_
The input point cloud dataset.
Definition: pcl_base.h:150
Feature represents the base feature class.
Definition: feature.h:105
boost::shared_ptr< IntegralImageNormalEstimation< PointInT, PointOutT > > Ptr
NormalEstimationMethod
Different normal estimation methods.
void setNormalSmoothingSize(float normal_smoothing_size)
Set the normal smoothing size.
void setDepthDependentSmoothing(bool use_depth_dependent_smoothing)
Set whether to use depth depending smoothing or not.