Visual Servoing Platform  version 3.6.1 under development (2024-11-15)
vpPoseRGBD.cpp
1 /*
2  * ViSP, open source Visual Servoing Platform software.
3  * Copyright (C) 2005 - 2024 by Inria. All rights reserved.
4  *
5  * This software is free software; you can redistribute it and/or modify
6  * it under the terms of the GNU General Public License as published by
7  * the Free Software Foundation; either version 2 of the License, or
8  * (at your option) any later version.
9  * See the file LICENSE.txt at the root directory of this source
10  * distribution for additional information about the GNU GPL.
11  *
12  * For using ViSP with software that can not be combined with the GNU
13  * GPL, please contact Inria about acquiring a ViSP Professional
14  * Edition License.
15  *
16  * See https://visp.inria.fr for more information.
17  *
18  * This software was developed at:
19  * Inria Rennes - Bretagne Atlantique
20  * Campus Universitaire de Beaulieu
21  * 35042 Rennes Cedex
22  * France
23  *
24  * If you have questions regarding the use of this file, please contact
25  * Inria at visp@inria.fr
26  *
27  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29  *
30  * Description:
31  * Pose computation from RGBD.
32  */
33 
34 #include <visp3/core/vpPixelMeterConversion.h>
35 #include <visp3/core/vpPlane.h>
36 #include <visp3/core/vpPolygon.h>
37 #include <visp3/core/vpRobust.h>
38 #include <visp3/vision/vpPose.h>
39 
40 BEGIN_VISP_NAMESPACE
41 
42 // See also vpPlaneEstimation.cpp that implements the same functionaly in c++17
43 void estimatePlaneEquationSVD(const std::vector<double> &point_cloud_face, vpPlane &plane_equation_estimated,
44  vpColVector &centroid, double &normalized_weights)
45 {
46  const unsigned int max_iter = 10;
47  double prev_error = 1e3;
48  double error = 1e3 - 1;
49  const unsigned int size3DPt = 3;
50  unsigned int nPoints = static_cast<unsigned int>(point_cloud_face.size() / size3DPt);
51  const unsigned int idX = 0;
52  const unsigned int idY = 1;
53  const unsigned int idZ = 2;
54 
55  vpColVector weights(nPoints, 1.0);
56  vpColVector residues(nPoints);
57  vpMatrix M(nPoints, size3DPt);
58  vpRobust tukey;
60  vpColVector normal;
61 
62  double fabs_error_m_prev_error = std::fabs(error - prev_error);
63  unsigned int iter = 0;
64  while ((iter < max_iter) && (fabs_error_m_prev_error > 1e-6)) {
65  if (iter != 0) {
66  tukey.MEstimator(vpRobust::TUKEY, residues, weights);
67  }
68 
69  // Compute centroid
70  double centroid_x = 0.0, centroid_y = 0.0, centroid_z = 0.0;
71  double total_w = 0.0;
72 
73  for (unsigned int i = 0; i < nPoints; ++i) {
74  centroid_x += weights[i] * point_cloud_face[(size3DPt * i) + idX];
75  centroid_y += weights[i] * point_cloud_face[(size3DPt * i) + idY];
76  centroid_z += weights[i] * point_cloud_face[(size3DPt * i) + idZ];
77  total_w += weights[i];
78  }
79 
80  centroid_x /= total_w;
81  centroid_y /= total_w;
82  centroid_z /= total_w;
83 
84  // Minimization
85  for (unsigned int i = 0; i < nPoints; ++i) {
86  M[static_cast<unsigned int>(i)][idX] = weights[i] * (point_cloud_face[(size3DPt * i) + idX] - centroid_x);
87  M[static_cast<unsigned int>(i)][idY] = weights[i] * (point_cloud_face[(size3DPt * i) + idY] - centroid_y);
88  M[static_cast<unsigned int>(i)][idZ] = weights[i] * (point_cloud_face[(size3DPt * i) + idZ] - centroid_z);
89  }
90 
91  vpColVector W;
92  vpMatrix V;
93  vpMatrix J = M.t() * M;
94  J.svd(W, V);
95 
96  double smallestSv = W[0];
97  unsigned int indexSmallestSv = 0;
98  unsigned int w_size = W.size();
99  for (unsigned int i = 1; i < w_size; ++i) {
100  if (W[i] < smallestSv) {
101  smallestSv = W[i];
102  indexSmallestSv = i;
103  }
104  }
105 
106  normal = V.getCol(indexSmallestSv);
107 
108  // Compute plane equation
109  double A = normal[idX];
110  double B = normal[idY];
111  double C = normal[idZ];
112  double D = -((A * centroid_x) + (B * centroid_y) + (C * centroid_z));
113 
114  // Compute error points to estimated plane
115  prev_error = error;
116  error = 0.0;
117  for (unsigned int i = 0; i < nPoints; ++i) {
118  residues[i] = std::fabs((A * point_cloud_face[size3DPt * i]) + (B * point_cloud_face[(size3DPt * i) + idY]) +
119  (C * point_cloud_face[(size3DPt * i) + idZ]) + D) /
120  sqrt((A * A) + (B * B) + (C * C));
121  error += weights[i] * residues[i];
122  }
123  error /= total_w;
124  // evaluate one of the end conditions of the for
125  fabs_error_m_prev_error = std::fabs(error - prev_error);
126 
127  ++iter;
128  }
129 
130  // Update final weights
131  tukey.MEstimator(vpRobust::TUKEY, residues, weights);
132 
133  // Update final centroid
134  centroid.resize(size3DPt, false);
135  double total_w = 0.0;
136 
137  for (unsigned int i = 0; i < nPoints; ++i) {
138  centroid[idX] += weights[i] * point_cloud_face[size3DPt * i];
139  centroid[idY] += weights[i] * point_cloud_face[(size3DPt * i) + idY];
140  centroid[idZ] += weights[i] * point_cloud_face[(size3DPt * i) + idZ];
141  total_w += weights[i];
142  }
143 
144  centroid[idX] /= total_w;
145  centroid[idY] /= total_w;
146  centroid[idZ] /= total_w;
147 
148  // Compute final plane equation
149  double A = normal[0], B = normal[1], C = normal[idZ];
150  double D = -((A * centroid[0]) + (B * centroid[1]) + (C * centroid[idZ]));
151 
152  // Update final plane equation
153  plane_equation_estimated.setABCD(A, B, C, D);
154 
155  normalized_weights = total_w / nPoints;
156 }
157 
158 bool vpPose::computePlanarObjectPoseFromRGBD(const vpImage<float> &depthMap, const std::vector<vpImagePoint> &corners,
159  const vpCameraParameters &colorIntrinsics,
160  const std::vector<vpPoint> &point3d, vpHomogeneousMatrix &cMo,
161  double *confidence_index)
162 {
163  if (corners.size() != point3d.size()) {
165  "Cannot compute pose from RGBD, 3D (%d) and 2D (%d) data doesn't have the same size",
166  point3d.size(), corners.size()));
167  }
168  std::vector<vpPoint> pose_points;
169  if (confidence_index != nullptr) {
170  *confidence_index = 0.0;
171  }
172 
173  size_t point3d_size = point3d.size();
174  for (size_t i = 0; i < point3d_size; ++i) {
175  pose_points.push_back(point3d[i]);
176  }
177 
178  vpPolygon polygon(corners);
179  vpRect bb = polygon.getBoundingBox();
180  unsigned int top = static_cast<unsigned int>(std::max<int>(0, static_cast<int>(bb.getTop())));
181  unsigned int bottom =
182  static_cast<unsigned int>(std::min<int>(static_cast<int>(depthMap.getHeight()) - 1, static_cast<int>(bb.getBottom())));
183  unsigned int left = static_cast<unsigned int>(std::max<int>(0, static_cast<int>(bb.getLeft())));
184  unsigned int right =
185  static_cast<unsigned int>(std::min<int>(static_cast<int>(depthMap.getWidth()) - 1, static_cast<int>(bb.getRight())));
186 
187  std::vector<double> points_3d;
188  points_3d.reserve((bottom - top) * (right - left));
189  for (unsigned int idx_i = top; idx_i < bottom; ++idx_i) {
190  for (unsigned int idx_j = left; idx_j < right; ++idx_j) {
191  vpImagePoint imPt(idx_i, idx_j);
192  if ((depthMap[idx_i][idx_j] > 0) && (polygon.isInside(imPt))) {
193  double x = 0, y = 0;
194  vpPixelMeterConversion::convertPoint(colorIntrinsics, imPt.get_u(), imPt.get_v(), x, y);
195  double Z = depthMap[idx_i][idx_j];
196  points_3d.push_back(x * Z);
197  points_3d.push_back(y * Z);
198  points_3d.push_back(Z);
199  }
200  }
201  }
202 
203  unsigned int nb_points_3d = static_cast<unsigned int>(points_3d.size() / 3);
204 
205  const unsigned int minNbPoints = 4;
206  if (nb_points_3d > minNbPoints) {
207  std::vector<vpPoint> p, q;
208 
209  // Plane equation
210  vpPlane plane_equation;
211  vpColVector centroid;
212  double normalized_weights = 0;
213  estimatePlaneEquationSVD(points_3d, plane_equation, centroid, normalized_weights);
214 
215  size_t corners_size = corners.size();
216  for (size_t j = 0; j < corners_size; ++j) {
217  const vpImagePoint &imPt = corners[j];
218  double x = 0, y = 0;
219  vpPixelMeterConversion::convertPoint(colorIntrinsics, imPt.get_u(), imPt.get_v(), x, y);
220  double Z = plane_equation.computeZ(x, y);
221  if (Z < 0) {
222  Z = -Z;
223  }
224  p.push_back(vpPoint(x * Z, y * Z, Z));
225 
226  pose_points[j].set_x(x);
227  pose_points[j].set_y(y);
228  }
229 
230  size_t point3d_size = point3d.size();
231  for (size_t i = 0; i < point3d_size; ++i) {
232  q.push_back(point3d[i]);
233  }
234 
236 
237  if (cMo.isValid()) {
238  vpPose pose;
239  pose.addPoints(pose_points);
240  if (pose.computePose(vpPose::VIRTUAL_VS, cMo)) {
241  if (confidence_index != nullptr) {
242  *confidence_index = std::min<double>(1.0, (normalized_weights * static_cast<double>(nb_points_3d)) / polygon.getArea());
243  }
244  return true;
245  }
246  }
247  }
248 
249  return false;
250 }
251 
253  const std::vector<std::vector<vpImagePoint> > &corners,
254  const vpCameraParameters &colorIntrinsics,
255  const std::vector<std::vector<vpPoint> > &point3d,
256  vpHomogeneousMatrix &cMo, double *confidence_index, bool coplanar_points)
257 {
258  const size_t nb3dPoints = point3d.size();
259  const size_t nbCorners = corners.size();
260  if (nbCorners != nb3dPoints) {
262  "Cannot compute pose from RGBD, 3D (%d) and 2D (%d) data doesn't have the same size",
263  nb3dPoints, nbCorners));
264  }
265  std::vector<vpPoint> pose_points;
266  if (confidence_index != nullptr) {
267  *confidence_index = 0.0;
268  }
269 
270  for (size_t i = 0; i < nb3dPoints; ++i) {
271  std::vector<vpPoint> tagPoint3d = point3d[i];
272  size_t tagpoint3d_size = tagPoint3d.size();
273  for (size_t j = 0; j < tagpoint3d_size; ++j) {
274  pose_points.push_back(tagPoint3d[j]);
275  }
276  }
277 
278  // Total area of the polygon to estimate confidence
279  double totalArea = 0.0;
280 
281  // If coplanar is true, the tags_points_3d will be used to compute one plane
282  std::vector<double> tag_points_3d;
283 
284  // Otherwise the vector of planes will be used to compute each plane for each vector
285  std::vector<std::vector<double> > tag_points_3d_nonplanar;
286  size_t nb_points_3d_non_planar = 0;
287 
288  // Loop through each object, compute 3d point cloud of each
289  size_t corners_size = corners.size();
290  for (size_t i = 0; i < corners_size; ++i) {
291  std::vector<double> points_3d;
292  vpPolygon polygon(corners[i]);
293  vpRect bb = polygon.getBoundingBox();
294 
295  // The area to calculate final confidence index should be total area of the tags
296  totalArea += polygon.getArea();
297 
298  unsigned int top = static_cast<unsigned int>(std::max<int>(0, static_cast<int>(bb.getTop())));
299  unsigned int bottom = static_cast<unsigned int>(
300  std::min<int>(static_cast<int>(depthMap.getHeight()) - 1, static_cast<int>(bb.getBottom())));
301  unsigned int left = static_cast<unsigned int>(std::max<int>(0, static_cast<int>(bb.getLeft())));
302  unsigned int right =
303  static_cast<unsigned int>(std::min<int>(static_cast<int>(depthMap.getWidth()) - 1, static_cast<int>(bb.getRight())));
304 
305  points_3d.reserve((bottom - top) * (right - left));
306  for (unsigned int idx_i = top; idx_i < bottom; ++idx_i) {
307  for (unsigned int idx_j = left; idx_j < right; ++idx_j) {
308  vpImagePoint imPt(idx_i, idx_j);
309  if ((depthMap[idx_i][idx_j] > 0) && (polygon.isInside(imPt))) {
310  double x = 0, y = 0;
311  vpPixelMeterConversion::convertPoint(colorIntrinsics, imPt.get_u(), imPt.get_v(), x, y);
312  double Z = depthMap[idx_i][idx_j];
313  points_3d.push_back(x * Z);
314  points_3d.push_back(y * Z);
315  points_3d.push_back(Z);
316  }
317  }
318  }
319 
320  // If coplanar_points is true, feed all 3d points to single vector
321  // Otherwise, each vector will hold 3d points for separate planes
322  if (coplanar_points) {
323  tag_points_3d.insert(tag_points_3d.end(), points_3d.begin(), points_3d.end());
324  }
325  else {
326  tag_points_3d_nonplanar.push_back(points_3d);
327  nb_points_3d_non_planar += points_3d.size();
328  }
329  }
330 
331  size_t nb_points_3d = 0;
332  const size_t sizePt3D = 3;
333 
334  if (coplanar_points) {
335  nb_points_3d = tag_points_3d.size() / sizePt3D;
336  }
337  else {
338  nb_points_3d = nb_points_3d_non_planar / sizePt3D;
339  }
340 
341  const size_t minNbPts = 4;
342  if (nb_points_3d > minNbPts) {
343  std::vector<vpPoint> p, q;
344 
345  // Plane equation
346  vpPlane plane_equation;
347  vpColVector centroid;
348  double normalized_weights = 0;
349 
350  if (coplanar_points) {
351  // If all objects are coplanar, use points insides tag_points_3d to estimate the plane
352  estimatePlaneEquationSVD(tag_points_3d, plane_equation, centroid, normalized_weights);
353  int count = 0;
354  for (size_t j = 0; j < nbCorners; ++j) {
355  std::vector<vpImagePoint> tag_corner = corners[j];
356  size_t tag_corner_size = tag_corner.size();
357  for (size_t i = 0; i < tag_corner_size; ++i) {
358  const vpImagePoint &imPt = tag_corner[i];
359  double x = 0, y = 0;
360  vpPixelMeterConversion::convertPoint(colorIntrinsics, imPt.get_u(), imPt.get_v(), x, y);
361  double Z = plane_equation.computeZ(x, y);
362  std::cout << Z;
363  if (Z < 0) {
364  Z = -Z;
365  }
366  p.push_back(vpPoint(x * Z, y * Z, Z));
367  pose_points[count].set_x(x);
368  pose_points[count].set_y(y);
369  ++count;
370  }
371  }
372  }
373  else {
374  // If the tags is not coplanar, estimate the plane for each tags
375  size_t count = 0;
376 
377  size_t tag_points_3d_nonplanar_size = tag_points_3d_nonplanar.size();
378  for (size_t k = 0; k < tag_points_3d_nonplanar_size; ++k) {
379  std::vector<double> rec_points_3d = tag_points_3d_nonplanar[k];
380  double tag_normalized_weights = 0;
381  const size_t minNbPtsForPlaneSVD = 3;
382  const size_t minSizeForPlaneSVD = minNbPtsForPlaneSVD * sizePt3D;
383  if (rec_points_3d.size() >= minSizeForPlaneSVD) {
384  // The array must has at least 3 points for the function estimatePlaneEquationSVD not to crash
385  estimatePlaneEquationSVD(rec_points_3d, plane_equation, centroid, tag_normalized_weights);
386  normalized_weights += tag_normalized_weights;
387 
388  // Get the 2d points of the tag the plane just recomputed
389  std::vector<vpImagePoint> tag_corner = corners[k];
390 
391  size_t tag_corner_size = tag_corner.size();
392  for (size_t i = 0; i < tag_corner_size; ++i) {
393  const vpImagePoint &imPt = tag_corner[i];
394  double x = 0, y = 0;
395  vpPixelMeterConversion::convertPoint(colorIntrinsics, imPt.get_u(), imPt.get_v(), x, y);
396  double Z = plane_equation.computeZ(x, y);
397 
398  if (Z < 0) {
399  Z = -Z;
400  }
401  p.push_back(vpPoint(x * Z, y * Z, Z));
402  pose_points[count].set_x(x);
403  pose_points[count].set_y(y);
404  ++count;
405  }
406  }
407  else {
408  // Sometimes an object may do not have enough points registered due to small size or bad alignment btw depth
409  // and rgb. This behavior happens with Orbbec camera while Realsenses was fine. To prevent exception while
410  // computePose, skip recomputing the failed estimation tag's (4 point - corners)
411  count += corners[k].size();
412  }
413  }
414  normalized_weights = normalized_weights / tag_points_3d_nonplanar.size();
415  }
416 
417  for (size_t i = 0; i < nb3dPoints; ++i) {
418  std::vector<vpPoint> tagPoint3d = point3d[i];
419  // Sometimes an object may do not have enough points registered due to small size.
420  // The issue happens with Orbbec camera while Realsenses was fine.
421  // To prevent wrong estimation or exception (p and q sizes are differents),
422  // ignore the recomputer vector (tag_points_3d_nonplanar) when size = 0
423  if (coplanar_points) {
424  size_t tag_point3d_size = tagPoint3d.size();
425  for (size_t j = 0; j < tag_point3d_size; ++j) {
426  q.push_back(tagPoint3d[j]);
427  }
428  }
429  else {
430  if (tag_points_3d_nonplanar[i].size() > 0) {
431  size_t tag_point3d_size = tagPoint3d.size();
432  for (size_t j = 0; j < tag_point3d_size; ++j) {
433  q.push_back(tagPoint3d[j]);
434  }
435  }
436  }
437  }
438 
439  // Due to the possibility of q's size might less than p's, check their size should be identical
440  if (p.size() == q.size()) {
442 
443  if (cMo.isValid()) {
444  vpPose pose;
445  pose.addPoints(pose_points);
446  if (pose.computePose(vpPose::VIRTUAL_VS, cMo)) {
447  if (confidence_index != nullptr) {
448  *confidence_index = std::min<double>(1.0, (normalized_weights * static_cast<double>(nb_points_3d)) / totalArea);
449  }
450  return true;
451  }
452  }
453  }
454  }
455  return false;
456 }
457 
458 END_VISP_NAMESPACE
unsigned int size() const
Return the number of elements of the 2D array.
Definition: vpArray2D.h:349
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:191
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:1143
error that can be emitted by ViSP classes.
Definition: vpException.h:60
@ fatalError
Fatal error.
Definition: vpException.h:72
Implementation of an homogeneous matrix and operations on such kind of matrices.
static vpHomogeneousMatrix compute3d3dTransformation(const std::vector< vpPoint > &p, const std::vector< vpPoint > &q)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:82
double get_u() const
Definition: vpImagePoint.h:136
double get_v() const
Definition: vpImagePoint.h:147
unsigned int getWidth() const
Definition: vpImage.h:242
void insert(const vpImage< Type > &src, const vpImagePoint &topLeft)
Definition: vpImage.h:637
unsigned int getHeight() const
Definition: vpImage.h:181
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:169
void svd(vpColVector &w, vpMatrix &V)
vpColVector getCol(unsigned int j) const
Definition: vpMatrix.cpp:548
vpMatrix t() const
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
This class defines the container for a plane geometrical structure.
Definition: vpPlane.h:57
double computeZ(double x, double y) const
Definition: vpPlane.cpp:245
void setABCD(double a, double b, double c, double d)
Definition: vpPlane.h:88
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:79
Defines a generic 2D polygon.
Definition: vpPolygon.h:103
vpRect getBoundingBox() const
Definition: vpPolygon.h:164
double getArea() const
Definition: vpPolygon.h:148
bool isInside(const vpImagePoint &iP, const PointInPolygonMethod &method=PnPolyRayCasting) const
Definition: vpPolygon.cpp:400
Class used for pose computation from N points (pose from point only). Some of the algorithms implemen...
Definition: vpPose.h:77
static bool computePlanarObjectPoseFromRGBD(const vpImage< float > &depthMap, const std::vector< vpImagePoint > &corners, const vpCameraParameters &colorIntrinsics, const std::vector< vpPoint > &point3d, vpHomogeneousMatrix &cMo, double *confidence_index=nullptr)
Definition: vpPoseRGBD.cpp:158
@ VIRTUAL_VS
Definition: vpPose.h:92
bool computePose(vpPoseMethodType method, vpHomogeneousMatrix &cMo, FuncCheckValidityPose func=nullptr)
Definition: vpPose.cpp:385
void addPoints(const std::vector< vpPoint > &lP)
Definition: vpPose.cpp:103
Defines a rectangle in the plane.
Definition: vpRect.h:79
double getLeft() const
Definition: vpRect.h:173
double getRight() const
Definition: vpRect.h:179
double getBottom() const
Definition: vpRect.h:97
double getTop() const
Definition: vpRect.h:192
Contains an M-estimator and various influence function.
Definition: vpRobust.h:84
@ TUKEY
Tukey influence function.
Definition: vpRobust.h:89
void MEstimator(const vpRobustEstimatorType method, const vpColVector &residues, vpColVector &weights)
Definition: vpRobust.cpp:130
void setMinMedianAbsoluteDeviation(double mad_min)
Definition: vpRobust.h:136