Visual Servoing Platform  version 3.5.0 under development (2022-02-15)
vpMbtFaceDepthNormal.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Manage depth normal features for a particular face.
33  *
34  *****************************************************************************/
35 
36 #include <visp3/core/vpCPUFeatures.h>
37 #include <visp3/mbt/vpMbtFaceDepthNormal.h>
38 #include <visp3/mbt/vpMbtTukeyEstimator.h>
39 
40 #ifdef VISP_HAVE_PCL
41 #include <pcl/common/centroid.h>
42 #include <pcl/filters/extract_indices.h>
43 #include <pcl/segmentation/sac_segmentation.h>
44 #endif
45 
46 #if defined __SSE2__ || defined _M_X64 || (defined _M_IX86_FP && _M_IX86_FP >= 2)
47 #include <emmintrin.h>
48 #define VISP_HAVE_SSE2 1
49 #endif
50 
51 #define USE_SSE_CODE 1
52 #if VISP_HAVE_SSE2 && USE_SSE_CODE
53 #define USE_SSE 1
54 #else
55 #define USE_SSE 0
56 #endif
57 
59  : m_cam(), m_clippingFlag(vpPolygon3D::NO_CLIPPING), m_distFarClip(100), m_distNearClip(0.001), m_hiddenFace(NULL),
60  m_planeObject(), m_polygon(NULL), m_useScanLine(false), m_faceActivated(false),
61  m_faceCentroidMethod(GEOMETRIC_CENTROID), m_faceDesiredCentroid(), m_faceDesiredNormal(),
62  m_featureEstimationMethod(ROBUST_FEATURE_ESTIMATION), m_isTrackedDepthNormalFace(true), m_isVisible(false),
63  m_listOfFaceLines(), m_planeCamera(),
64  m_pclPlaneEstimationMethod(2), // SAC_MSAC, see pcl/sample_consensus/method_types.h
65  m_pclPlaneEstimationRansacMaxIter(200), m_pclPlaneEstimationRansacThreshold(0.001), m_polygonLines()
66 {
67 }
68 
70 {
71  for (size_t i = 0; i < m_listOfFaceLines.size(); i++) {
72  delete m_listOfFaceLines[i];
73  }
74 }
75 
90 void vpMbtFaceDepthNormal::addLine(vpPoint &P1, vpPoint &P2, vpMbHiddenFaces<vpMbtPolygon> *const faces, vpUniRand& rand_gen, int polygon,
91  std::string name)
92 {
93  // Build a PolygonLine to be able to easily display the lines model
94  PolygonLine polygon_line;
95 
96  // Add polygon
97  polygon_line.m_poly.setNbPoint(2);
98  polygon_line.m_poly.addPoint(0, P1);
99  polygon_line.m_poly.addPoint(1, P2);
100 
101  polygon_line.m_poly.setClipping(m_clippingFlag);
102  polygon_line.m_poly.setNearClippingDistance(m_distNearClip);
103  polygon_line.m_poly.setFarClippingDistance(m_distFarClip);
104 
105  polygon_line.m_p1 = &polygon_line.m_poly.p[0];
106  polygon_line.m_p2 = &polygon_line.m_poly.p[1];
107 
108  m_polygonLines.push_back(polygon_line);
109 
110  // suppress line already in the model
111  bool already_here = false;
113 
114  for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
115  ++it) {
116  l = *it;
117  if ((samePoint(*(l->p1), P1) && samePoint(*(l->p2), P2)) || (samePoint(*(l->p1), P2) && samePoint(*(l->p2), P1))) {
118  already_here = true;
119  l->addPolygon(polygon);
120  l->hiddenface = faces;
122  }
123  }
124 
125  if (!already_here) {
126  l = new vpMbtDistanceLine;
127 
129  l->buildFrom(P1, P2, rand_gen);
130  l->addPolygon(polygon);
131  l->hiddenface = faces;
133 
134  l->setIndex((unsigned int)m_listOfFaceLines.size());
135  l->setName(name);
136 
139 
140  if ((m_clippingFlag & vpPolygon3D::NEAR_CLIPPING) == vpPolygon3D::NEAR_CLIPPING)
142 
143  if ((m_clippingFlag & vpPolygon3D::FAR_CLIPPING) == vpPolygon3D::FAR_CLIPPING)
145 
146  m_listOfFaceLines.push_back(l);
147  }
148 }
149 
150 #ifdef VISP_HAVE_PCL
152  unsigned int height,
153  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud,
154  vpColVector &desired_features, unsigned int stepX,
155  unsigned int stepY
156 #if DEBUG_DISPLAY_DEPTH_NORMAL
157  ,
158  vpImage<unsigned char> &debugImage,
159  std::vector<std::vector<vpImagePoint> > &roiPts_vec
160 #endif
161  , const vpImage<bool> *mask
162 )
163 {
164  m_faceActivated = false;
165 
166  if (width == 0 || height == 0)
167  return false;
168 
169  std::vector<vpImagePoint> roiPts;
170  vpColVector desired_normal(3);
171 
172  computeROI(cMo, width, height, roiPts
173 #if DEBUG_DISPLAY_DEPTH_NORMAL
174  ,
175  roiPts_vec
176 #endif
177  );
178 
179  if (roiPts.size() <= 2) {
180 #ifndef NDEBUG
181  std::cerr << "Error: roiPts.size() <= 2 in computeDesiredFeatures" << std::endl;
182 #endif
183  return false;
184  }
185 
186  vpPolygon polygon_2d(roiPts);
187  vpRect bb = polygon_2d.getBoundingBox();
188 
189  unsigned int top = (unsigned int)std::max(0.0, bb.getTop());
190  unsigned int bottom = (unsigned int)std::min((double)height, std::max(0.0, bb.getBottom()));
191  unsigned int left = (unsigned int)std::max(0.0, bb.getLeft());
192  unsigned int right = (unsigned int)std::min((double)width, std::max(0.0, bb.getRight()));
193 
194  bb.setTop(top);
195  bb.setBottom(bottom);
196  bb.setLeft(left);
197  bb.setRight(right);
198 
199  // Keep only 3D points inside the projected polygon face
200  pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud_face(new pcl::PointCloud<pcl::PointXYZ>);
201  std::vector<double> point_cloud_face_vec, point_cloud_face_custom;
202 
204  point_cloud_face_custom.reserve((size_t)(3 * bb.getWidth() * bb.getHeight()));
205  point_cloud_face_vec.reserve((size_t)(3 * bb.getWidth() * bb.getHeight()));
207  point_cloud_face_vec.reserve((size_t)(3 * bb.getWidth() * bb.getHeight()));
209  point_cloud_face->reserve((size_t)(bb.getWidth() * bb.getHeight()));
210  }
211 
212  bool checkSSE2 = vpCPUFeatures::checkSSE2();
213 #if !USE_SSE
214  checkSSE2 = false;
215 #else
216  bool push = false;
217  double prev_x, prev_y, prev_z;
218 #endif
219 
220  double x = 0.0, y = 0.0;
221  for (unsigned int i = top; i < bottom; i += stepY) {
222  for (unsigned int j = left; j < right; j += stepX) {
223  if (vpMeTracker::inMask(mask, i, j) && pcl::isFinite((*point_cloud)(j, i)) && (*point_cloud)(j, i).z > 0 &&
224  (m_useScanLine ? (i < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getHeight() &&
225  j < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getWidth() &&
226  m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs()[i][j] == m_polygon->getIndex())
227  : polygon_2d.isInside(vpImagePoint(i, j)))) {
228 
230  point_cloud_face->push_back((*point_cloud)(j, i));
233  point_cloud_face_vec.push_back((*point_cloud)(j, i).x);
234  point_cloud_face_vec.push_back((*point_cloud)(j, i).y);
235  point_cloud_face_vec.push_back((*point_cloud)(j, i).z);
236 
238  // Add point for custom method for plane equation estimation
240 
241  if (checkSSE2) {
242 #if USE_SSE
243  if (!push) {
244  push = true;
245  prev_x = x;
246  prev_y = y;
247  prev_z = (*point_cloud)(j, i).z;
248  } else {
249  push = false;
250  point_cloud_face_custom.push_back(prev_x);
251  point_cloud_face_custom.push_back(x);
252 
253  point_cloud_face_custom.push_back(prev_y);
254  point_cloud_face_custom.push_back(y);
255 
256  point_cloud_face_custom.push_back(prev_z);
257  point_cloud_face_custom.push_back((*point_cloud)(j, i).z);
258  }
259 #endif
260  } else {
261  point_cloud_face_custom.push_back(x);
262  point_cloud_face_custom.push_back(y);
263  point_cloud_face_custom.push_back((*point_cloud)(j, i).z);
264  }
265  }
266  }
267 
268 #if DEBUG_DISPLAY_DEPTH_NORMAL
269  debugImage[i][j] = 255;
270 #endif
271  }
272  }
273  }
274 
275 #if USE_SSE
276  if (checkSSE2 && push) {
277  point_cloud_face_custom.push_back(prev_x);
278  point_cloud_face_custom.push_back(prev_y);
279  point_cloud_face_custom.push_back(prev_z);
280  }
281 #endif
282 
283  if (point_cloud_face->empty() && point_cloud_face_custom.empty() && point_cloud_face_vec.empty()) {
284  return false;
285  }
286 
287  // Face centroid computed by the different methods
288  vpColVector centroid_point(3);
289 
291  if (!computeDesiredFeaturesPCL(point_cloud_face, desired_features, desired_normal, centroid_point)) {
292  return false;
293  }
295  computeDesiredFeaturesSVD(point_cloud_face_vec, cMo, desired_features, desired_normal, centroid_point);
297  computeDesiredFeaturesRobustFeatures(point_cloud_face_custom, point_cloud_face_vec, cMo, desired_features,
298  desired_normal, centroid_point);
299  } else {
300  throw vpException(vpException::badValue, "Unknown feature estimation method!");
301  }
302 
303  computeDesiredNormalAndCentroid(cMo, desired_normal, centroid_point);
304 
305  m_faceActivated = true;
306 
307  return true;
308 }
309 #endif
310 
312  unsigned int height,
313  const std::vector<vpColVector> &point_cloud,
314  vpColVector &desired_features, unsigned int stepX,
315  unsigned int stepY
316 #if DEBUG_DISPLAY_DEPTH_NORMAL
317  ,
318  vpImage<unsigned char> &debugImage,
319  std::vector<std::vector<vpImagePoint> > &roiPts_vec
320 #endif
321  , const vpImage<bool> *mask
322 )
323 {
324  m_faceActivated = false;
325 
326  if (width == 0 || height == 0)
327  return false;
328 
329  std::vector<vpImagePoint> roiPts;
330  vpColVector desired_normal(3);
331 
332  computeROI(cMo, width, height, roiPts
333 #if DEBUG_DISPLAY_DEPTH_NORMAL
334  ,
335  roiPts_vec
336 #endif
337  );
338 
339  if (roiPts.size() <= 2) {
340 #ifndef NDEBUG
341  std::cerr << "Error: roiPts.size() <= 2 in computeDesiredFeatures" << std::endl;
342 #endif
343  return false;
344  }
345 
346  vpPolygon polygon_2d(roiPts);
347  vpRect bb = polygon_2d.getBoundingBox();
348 
349  unsigned int top = (unsigned int)std::max(0.0, bb.getTop());
350  unsigned int bottom = (unsigned int)std::min((double)height, std::max(0.0, bb.getBottom()));
351  unsigned int left = (unsigned int)std::max(0.0, bb.getLeft());
352  unsigned int right = (unsigned int)std::min((double)width, std::max(0.0, bb.getRight()));
353 
354  bb.setTop(top);
355  bb.setBottom(bottom);
356  bb.setLeft(left);
357  bb.setRight(right);
358 
359  // Keep only 3D points inside the projected polygon face
360  std::vector<double> point_cloud_face, point_cloud_face_custom;
361 
362  point_cloud_face.reserve((size_t)(3 * bb.getWidth() * bb.getHeight()));
364  point_cloud_face_custom.reserve((size_t)(3 * bb.getWidth() * bb.getHeight()));
365  }
366 
367  bool checkSSE2 = vpCPUFeatures::checkSSE2();
368 #if !USE_SSE
369  checkSSE2 = false;
370 #else
371  bool push = false;
372  double prev_x, prev_y, prev_z;
373 #endif
374 
375  double x = 0.0, y = 0.0;
376  for (unsigned int i = top; i < bottom; i += stepY) {
377  for (unsigned int j = left; j < right; j += stepX) {
378  if (vpMeTracker::inMask(mask, i, j) && point_cloud[i * width + j][2] > 0 &&
379  (m_useScanLine ? (i < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getHeight() &&
380  j < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getWidth() &&
381  m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs()[i][j] == m_polygon->getIndex())
382  : polygon_2d.isInside(vpImagePoint(i, j)))) {
383  // Add point
384  point_cloud_face.push_back(point_cloud[i * width + j][0]);
385  point_cloud_face.push_back(point_cloud[i * width + j][1]);
386  point_cloud_face.push_back(point_cloud[i * width + j][2]);
387 
389  // Add point for custom method for plane equation estimation
391 
392  if (checkSSE2) {
393 #if USE_SSE
394  if (!push) {
395  push = true;
396  prev_x = x;
397  prev_y = y;
398  prev_z = point_cloud[i * width + j][2];
399  } else {
400  push = false;
401  point_cloud_face_custom.push_back(prev_x);
402  point_cloud_face_custom.push_back(x);
403 
404  point_cloud_face_custom.push_back(prev_y);
405  point_cloud_face_custom.push_back(y);
406 
407  point_cloud_face_custom.push_back(prev_z);
408  point_cloud_face_custom.push_back(point_cloud[i * width + j][2]);
409  }
410 #endif
411  } else {
412  point_cloud_face_custom.push_back(x);
413  point_cloud_face_custom.push_back(y);
414  point_cloud_face_custom.push_back(point_cloud[i * width + j][2]);
415  }
416  }
417 
418 #if DEBUG_DISPLAY_DEPTH_NORMAL
419  debugImage[i][j] = 255;
420 #endif
421  }
422  }
423  }
424 
425 #if USE_SSE
426  if (checkSSE2 && push) {
427  point_cloud_face_custom.push_back(prev_x);
428  point_cloud_face_custom.push_back(prev_y);
429  point_cloud_face_custom.push_back(prev_z);
430  }
431 #endif
432 
433  if (point_cloud_face.empty() && point_cloud_face_custom.empty()) {
434  return false;
435  }
436 
437  // Face centroid computed by the different methods
438  vpColVector centroid_point(3);
439 
440 #ifdef VISP_HAVE_PCL
442  pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud_face_pcl(new pcl::PointCloud<pcl::PointXYZ>);
443  point_cloud_face_pcl->reserve(point_cloud_face.size() / 3);
444 
445  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
446  point_cloud_face_pcl->push_back(
447  pcl::PointXYZ(point_cloud_face[3 * i], point_cloud_face[3 * i + 1], point_cloud_face[3 * i + 2]));
448  }
449 
450  computeDesiredFeaturesPCL(point_cloud_face_pcl, desired_features, desired_normal, centroid_point);
451  } else
452 #endif
454  computeDesiredFeaturesSVD(point_cloud_face, cMo, desired_features, desired_normal, centroid_point);
456  computeDesiredFeaturesRobustFeatures(point_cloud_face_custom, point_cloud_face, cMo, desired_features,
457  desired_normal, centroid_point);
458  } else {
459  throw vpException(vpException::badValue, "Unknown feature estimation method!");
460  }
461 
462  computeDesiredNormalAndCentroid(cMo, desired_normal, centroid_point);
463 
464  m_faceActivated = true;
465 
466  return true;
467 }
468 
469 #ifdef VISP_HAVE_PCL
470 bool vpMbtFaceDepthNormal::computeDesiredFeaturesPCL(const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud_face,
471  vpColVector &desired_features, vpColVector &desired_normal,
472  vpColVector &centroid_point)
473 {
474  try {
475  // Compute plane equation for this subset of point cloud
476  pcl::ModelCoefficients::Ptr coefficients(new pcl::ModelCoefficients);
477  pcl::PointIndices::Ptr inliers(new pcl::PointIndices);
478  // Create the segmentation object
479  pcl::SACSegmentation<pcl::PointXYZ> seg;
480  // Optional
481  seg.setOptimizeCoefficients(true);
482  // Mandatory
483  seg.setModelType(pcl::SACMODEL_PLANE);
484  seg.setMethodType(m_pclPlaneEstimationMethod);
485  seg.setDistanceThreshold(m_pclPlaneEstimationRansacThreshold);
486  seg.setMaxIterations(m_pclPlaneEstimationRansacMaxIter);
487 
488  seg.setInputCloud(point_cloud_face);
489  seg.segment(*inliers, *coefficients);
490 
491  pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud_face_extracted(new pcl::PointCloud<pcl::PointXYZ>);
492  // Create the filtering object
493  pcl::ExtractIndices<pcl::PointXYZ> extract;
494 
495  // Extract the inliers
496  extract.setInputCloud(point_cloud_face);
497  extract.setIndices(inliers);
498  extract.setNegative(false);
499  extract.filter(*point_cloud_face_extracted);
500 
501 #if PCL_VERSION_COMPARE(>=, 1, 7, 2)
502  pcl::PointXYZ centroid_point_pcl;
503  if (pcl::computeCentroid(*point_cloud_face_extracted, centroid_point_pcl)) {
504  pcl::PointXYZ face_normal;
505  computeNormalVisibility(coefficients->values[0], coefficients->values[1], coefficients->values[2],
506  centroid_point_pcl, face_normal);
507 
508  desired_features.resize(3, false);
509  desired_features[0] = -coefficients->values[0] / coefficients->values[3];
510  desired_features[1] = -coefficients->values[1] / coefficients->values[3];
511  desired_features[2] = -coefficients->values[2] / coefficients->values[3];
512 
513  desired_normal[0] = face_normal.x;
514  desired_normal[1] = face_normal.y;
515  desired_normal[2] = face_normal.z;
516 
517  centroid_point[0] = centroid_point_pcl.x;
518  centroid_point[1] = centroid_point_pcl.y;
519  centroid_point[2] = centroid_point_pcl.z;
520  } else {
521  std::cerr << "Cannot compute centroid!" << std::endl;
522  return false;
523  }
524 #else
525  std::cerr << "Cannot compute centroid using PCL " << PCL_VERSION_PRETTY << "!" << std::endl;
526  return false;
527 #endif
528  } catch (const pcl::PCLException &e) {
529  std::cerr << "Catch a PCL exception: " << e.what() << std::endl;
530  throw;
531  }
532 
533  return true;
534 }
535 #endif
536 
537 void vpMbtFaceDepthNormal::computeDesiredFeaturesRobustFeatures(const std::vector<double> &point_cloud_face_custom,
538  const std::vector<double> &point_cloud_face,
539  const vpHomogeneousMatrix &cMo,
540  vpColVector &desired_features,
541  vpColVector &desired_normal,
542  vpColVector &centroid_point)
543 {
544  std::vector<double> weights;
545  double den = 0.0;
546  estimateFeatures(point_cloud_face_custom, cMo, desired_features, weights);
547 
548  // Compute face centroid
549  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
550  centroid_point[0] += weights[i] * point_cloud_face[3 * i];
551  centroid_point[1] += weights[i] * point_cloud_face[3 * i + 1];
552  centroid_point[2] += weights[i] * point_cloud_face[3 * i + 2];
553 
554  den += weights[i];
555  }
556 
557  centroid_point[0] /= den;
558  centroid_point[1] /= den;
559  centroid_point[2] /= den;
560 
561  computeNormalVisibility(-desired_features[0], -desired_features[1], -desired_features[2], centroid_point,
562  desired_normal);
563 }
564 
565 void vpMbtFaceDepthNormal::computeDesiredFeaturesSVD(const std::vector<double> &point_cloud_face,
566  const vpHomogeneousMatrix &cMo, vpColVector &desired_features,
567  vpColVector &desired_normal, vpColVector &centroid_point)
568 {
569  vpColVector plane_equation_SVD;
570  estimatePlaneEquationSVD(point_cloud_face, cMo, plane_equation_SVD, centroid_point);
571 
572  desired_features.resize(3, false);
573  desired_features[0] = -plane_equation_SVD[0] / plane_equation_SVD[3];
574  desired_features[1] = -plane_equation_SVD[1] / plane_equation_SVD[3];
575  desired_features[2] = -plane_equation_SVD[2] / plane_equation_SVD[3];
576 
577  computeNormalVisibility(-desired_features[0], -desired_features[1], -desired_features[2], centroid_point,
578  desired_normal);
579 }
580 
582  const vpColVector &desired_normal,
583  const vpColVector &centroid_point)
584 {
585  // Compute desired centroid in the object frame
586  vpColVector centroid_cam(4);
587  centroid_cam[0] = centroid_point[0];
588  centroid_cam[1] = centroid_point[1];
589  centroid_cam[2] = centroid_point[2];
590  centroid_cam[3] = 1;
591 
592  vpColVector centroid_obj = cMo.inverse() * centroid_cam;
593  m_faceDesiredCentroid.setWorldCoordinates(centroid_obj[0], centroid_obj[1], centroid_obj[2]);
594 
595  // Compute desired face normal in the object frame
596  vpColVector face_normal_cam(4);
597  face_normal_cam[0] = desired_normal[0];
598  face_normal_cam[1] = desired_normal[1];
599  face_normal_cam[2] = desired_normal[2];
600  face_normal_cam[3] = 1;
601 
602  vpColVector face_normal_obj = cMo.inverse() * face_normal_cam;
603  m_faceDesiredNormal.setWorldCoordinates(face_normal_obj[0], face_normal_obj[1], face_normal_obj[2]);
604 }
605 
606 bool vpMbtFaceDepthNormal::computePolygonCentroid(const std::vector<vpPoint> &points_, vpPoint &centroid)
607 {
608  if (points_.empty()) {
609  return false;
610  }
611 
612  if (points_.size() < 2) {
613  centroid = points_[0];
614  return true;
615  }
616 
617  std::vector<vpPoint> points = points_;
618  points.push_back(points_.front());
619 
620  double A1 = 0.0, A2 = 0.0, c_x1 = 0.0, c_x2 = 0.0, c_y = 0.0, c_z = 0.0;
621 
622  for (size_t i = 0; i < points.size() - 1; i++) {
623  // projection onto xy plane
624  c_x1 += (points[i].get_X() + points[i + 1].get_X()) *
625  (points[i].get_X() * points[i + 1].get_Y() - points[i + 1].get_X() * points[i].get_Y());
626  c_y += (points[i].get_Y() + points[i + 1].get_Y()) *
627  (points[i].get_X() * points[i + 1].get_Y() - points[i + 1].get_X() * points[i].get_Y());
628  A1 += points[i].get_X() * points[i + 1].get_Y() - points[i + 1].get_X() * points[i].get_Y();
629 
630  // projection onto xz plane
631  c_x2 += (points[i].get_X() + points[i + 1].get_X()) *
632  (points[i].get_X() * points[i + 1].get_Z() - points[i + 1].get_X() * points[i].get_Z());
633  c_z += (points[i].get_Z() + points[i + 1].get_Z()) *
634  (points[i].get_X() * points[i + 1].get_Z() - points[i + 1].get_X() * points[i].get_Z());
635  A2 += points[i].get_X() * points[i + 1].get_Z() - points[i + 1].get_X() * points[i].get_Z();
636  }
637 
638  c_x1 /= 3.0 * A1;
639  c_y /= 3.0 * A1;
640  c_x2 /= 3.0 * A2;
641  c_z /= 3.0 * A2;
642 
643  if (A1 > A2) {
644  centroid.set_X(c_x1);
645  } else {
646  centroid.set_X(c_x2);
647  }
648 
649  centroid.set_Y(c_y);
650  centroid.set_Z(c_z);
651 
652  return true;
653 }
654 
655 void vpMbtFaceDepthNormal::computeROI(const vpHomogeneousMatrix &cMo, unsigned int width,
656  unsigned int height, std::vector<vpImagePoint> &roiPts
657 #if DEBUG_DISPLAY_DEPTH_NORMAL
658  ,
659  std::vector<std::vector<vpImagePoint> > &roiPts_vec
660 #endif
661 )
662 {
663  if (m_useScanLine || m_clippingFlag > 2)
664  m_cam.computeFov(width, height);
665 
666  if (m_useScanLine) {
667  for (std::vector<PolygonLine>::iterator it = m_polygonLines.begin(); it != m_polygonLines.end(); ++it) {
668  it->m_p1->changeFrame(cMo);
669  it->m_p2->changeFrame(cMo);
670 
671  vpImagePoint ip1, ip2;
672 
673  it->m_poly.changeFrame(cMo);
674  it->m_poly.computePolygonClipped(m_cam);
675 
676  if (it->m_poly.polyClipped.size() == 2 &&
677  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::NEAR_CLIPPING) == 0) &&
678  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::FAR_CLIPPING) == 0) &&
679  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::DOWN_CLIPPING) == 0) &&
680  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::UP_CLIPPING) == 0) &&
681  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::LEFT_CLIPPING) == 0) &&
682  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::RIGHT_CLIPPING) == 0)) {
683 
684  std::vector<std::pair<vpPoint, vpPoint> > linesLst;
685  m_hiddenFace->computeScanLineQuery(it->m_poly.polyClipped[0].first, it->m_poly.polyClipped[1].first, linesLst,
686  true);
687 
688  for (unsigned int i = 0; i < linesLst.size(); i++) {
689  linesLst[i].first.project();
690  linesLst[i].second.project();
691 
692  vpMeterPixelConversion::convertPoint(m_cam, linesLst[i].first.get_x(), linesLst[i].first.get_y(), ip1);
693  vpMeterPixelConversion::convertPoint(m_cam, linesLst[i].second.get_x(), linesLst[i].second.get_y(), ip2);
694 
695  it->m_imPt1 = ip1;
696  it->m_imPt2 = ip2;
697 
698  roiPts.push_back(ip1);
699  roiPts.push_back(ip2);
700 
701 #if DEBUG_DISPLAY_DEPTH_NORMAL
702  std::vector<vpImagePoint> roiPts_;
703  roiPts_.push_back(ip1);
704  roiPts_.push_back(ip2);
705  roiPts_vec.push_back(roiPts_);
706 #endif
707  }
708  }
709  }
710  } else {
711  // Get polygon clipped
712  m_polygon->getRoiClipped(m_cam, roiPts, cMo);
713 
714 #if DEBUG_DISPLAY_DEPTH_NORMAL
715  roiPts_vec.push_back(roiPts);
716 #endif
717  }
718 }
719 
721 
723 {
724  // Compute lines visibility, only for display
725  vpMbtDistanceLine *line;
726  for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
727  ++it) {
728  line = *it;
729  bool isvisible = false;
730 
731  for (std::list<int>::const_iterator itindex = line->Lindex_polygon.begin(); itindex != line->Lindex_polygon.end();
732  ++itindex) {
733  int index = *itindex;
734  if (index == -1) {
735  isvisible = true;
736  } else {
737  if (line->hiddenface->isVisible((unsigned int)index)) {
738  isvisible = true;
739  }
740  }
741  }
742 
743  // Si la ligne n'appartient a aucune face elle est tout le temps visible
744  if (line->Lindex_polygon.empty())
745  isvisible = true; // Not sure that this can occur
746 
747  if (isvisible) {
748  line->setVisible(true);
749  } else {
750  line->setVisible(false);
751  }
752  }
753 }
754 
755 void vpMbtFaceDepthNormal::computeNormalVisibility(double nx, double ny, double nz,
756  const vpHomogeneousMatrix &cMo, const vpCameraParameters &camera,
757  vpColVector &correct_normal, vpPoint &centroid)
758 {
759  vpColVector faceNormal(3);
760  faceNormal[0] = nx;
761  faceNormal[1] = ny;
762  faceNormal[2] = nz;
763  faceNormal.normalize();
764 
765  // Get polygon clipped
766  std::vector<vpImagePoint> roiPts;
767  m_polygon->getRoiClipped(camera, roiPts, cMo);
768 
769  std::vector<vpPoint> polyPts;
770  m_polygon->getPolygonClipped(polyPts);
771 
772  vpColVector e4(3);
774  computePolygonCentroid(polyPts, centroid);
775  centroid.project();
776 
777  e4[0] = -centroid.get_X();
778  e4[1] = -centroid.get_Y();
779  e4[2] = -centroid.get_Z();
780  e4.normalize();
781  } else {
782  double centroid_x = 0.0;
783  double centroid_y = 0.0;
784  double centroid_z = 0.0;
785 
786  for (size_t i = 0; i < polyPts.size(); i++) {
787  centroid_x += polyPts[i].get_X();
788  centroid_y += polyPts[i].get_Y();
789  centroid_z += polyPts[i].get_Z();
790  }
791 
792  centroid_x /= polyPts.size();
793  centroid_y /= polyPts.size();
794  centroid_z /= polyPts.size();
795 
796  e4[0] = -centroid_x;
797  e4[1] = -centroid_y;
798  e4[2] = -centroid_z;
799  e4.normalize();
800 
801  centroid.set_X(centroid_x);
802  centroid.set_Y(centroid_y);
803  centroid.set_Z(centroid_z);
804  }
805 
806  correct_normal.resize(3, false);
807  double angle = acos(vpColVector::dotProd(e4, faceNormal));
808  if (angle < M_PI_2) {
809  correct_normal = faceNormal;
810  } else {
811  correct_normal[0] = -faceNormal[0];
812  correct_normal[1] = -faceNormal[1];
813  correct_normal[2] = -faceNormal[2];
814  }
815 }
816 
817 #ifdef VISP_HAVE_PCL
818 void vpMbtFaceDepthNormal::computeNormalVisibility(float nx, float ny, float nz,
819  const pcl::PointXYZ &centroid_point, pcl::PointXYZ &face_normal)
820 {
821  vpColVector faceNormal(3);
822  faceNormal[0] = nx;
823  faceNormal[1] = ny;
824  faceNormal[2] = nz;
825  faceNormal.normalize();
826 
827  vpColVector e4(3);
828  e4[0] = -centroid_point.x;
829  e4[1] = -centroid_point.y;
830  e4[2] = -centroid_point.z;
831  e4.normalize();
832 
833  double angle = acos(vpColVector::dotProd(e4, faceNormal));
834  if (angle < M_PI_2) {
835  face_normal = pcl::PointXYZ(faceNormal[0], faceNormal[1], faceNormal[2]);
836  } else {
837  face_normal = pcl::PointXYZ(-faceNormal[0], -faceNormal[1], -faceNormal[2]);
838  }
839 }
840 #endif
841 
842 void vpMbtFaceDepthNormal::computeNormalVisibility(double nx, double ny, double nz,
843  const vpColVector &centroid_point, vpColVector &face_normal)
844 {
845  face_normal.resize(3, false);
846  face_normal[0] = nx;
847  face_normal[1] = ny;
848  face_normal[2] = nz;
849  face_normal.normalize();
850 
851  vpColVector e4 = -centroid_point;
852  e4.normalize();
853 
854  double angle = acos(vpColVector::dotProd(e4, face_normal));
855  if (angle >= M_PI_2) {
856  face_normal[0] = -face_normal[0];
857  face_normal[1] = -face_normal[1];
858  face_normal[2] = -face_normal[2];
859  }
860 }
861 
863 {
864  L.resize(3, 6, false, false);
865 
866  // Transform the plane equation for the current pose
869 
870  double ux = m_planeCamera.getA();
871  double uy = m_planeCamera.getB();
872  double uz = m_planeCamera.getC();
873  double D = m_planeCamera.getD();
874  double D2 = D * D;
875 
876  // Features
877  features.resize(3, false);
878  features[0] = -ux / D;
879  features[1] = -uy / D;
880  features[2] = -uz / D;
881 
882  // L_A
883  L[0][0] = ux * ux / D2;
884  L[0][1] = ux * uy / D2;
885  L[0][2] = ux * uz / D2;
886  L[0][3] = 0.0;
887  L[0][4] = uz / D;
888  L[0][5] = -uy / D;
889 
890  // L_B
891  L[1][0] = ux * uy / D2;
892  L[1][1] = uy * uy / D2;
893  L[1][2] = uy * uz / D2;
894  L[1][3] = -uz / D;
895  L[1][4] = 0.0;
896  L[1][5] = ux / D;
897 
898  // L_C
899  L[2][0] = ux * uz / D2;
900  L[2][1] = uy * uz / D2;
901  L[2][2] = uz * uz / D2;
902  L[2][3] = uy / D;
903  L[2][4] = -ux / D;
904  L[2][5] = 0.0;
905 }
906 
908  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
909  bool displayFullModel)
910 {
911  std::vector<std::vector<double> > models = getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
912 
913  for (size_t i = 0; i < models.size(); i++) {
914  vpImagePoint ip1(models[i][1], models[i][2]);
915  vpImagePoint ip2(models[i][3], models[i][4]);
916  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
917  }
918 }
919 
921  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
922  bool displayFullModel)
923 {
924  std::vector<std::vector<double> > models = getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
925 
926  for (size_t i = 0; i < models.size(); i++) {
927  vpImagePoint ip1(models[i][1], models[i][2]);
928  vpImagePoint ip2(models[i][3], models[i][4]);
929  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
930  }
931 }
932 
934  const vpCameraParameters &cam, double scale,
935  unsigned int thickness)
936 {
938  // Desired feature
939  vpPoint pt_centroid = m_faceDesiredCentroid;
940  pt_centroid.changeFrame(cMo);
941  pt_centroid.project();
942 
943  vpImagePoint im_centroid;
944  vpMeterPixelConversion::convertPoint(cam, pt_centroid.get_x(), pt_centroid.get_y(), im_centroid);
945 
946  vpPoint pt_normal = m_faceDesiredNormal;
947  pt_normal.changeFrame(cMo);
948  pt_normal.project();
949 
950  vpPoint pt_extremity;
951  pt_extremity.set_X(pt_centroid.get_X() + pt_normal.get_X() * scale);
952  pt_extremity.set_Y(pt_centroid.get_Y() + pt_normal.get_Y() * scale);
953  pt_extremity.set_Z(pt_centroid.get_Z() + pt_normal.get_Z() * scale);
954  pt_extremity.project();
955 
956  vpImagePoint im_extremity;
957  vpMeterPixelConversion::convertPoint(cam, pt_extremity.get_x(), pt_extremity.get_y(), im_extremity);
958 
959  vpDisplay::displayArrow(I, im_centroid, im_extremity, vpColor::blue, 4, 2, thickness);
960 
961  // Current feature
962  // Transform the plane equation for the current pose
965 
966  double ux = m_planeCamera.getA();
967  double uy = m_planeCamera.getB();
968  double uz = m_planeCamera.getC();
969 
970  vpColVector correct_normal;
971  vpCameraParameters cam_copy = cam;
972  cam_copy.computeFov(I.getWidth(), I.getHeight());
973  computeNormalVisibility(ux, uy, uz, cMo, cam_copy, correct_normal, pt_centroid);
974 
975  pt_centroid.project();
976  vpMeterPixelConversion::convertPoint(cam_copy, pt_centroid.get_x(), pt_centroid.get_y(), im_centroid);
977 
978  pt_extremity.set_X(pt_centroid.get_X() + correct_normal[0] * scale);
979  pt_extremity.set_Y(pt_centroid.get_Y() + correct_normal[1] * scale);
980  pt_extremity.set_Z(pt_centroid.get_Z() + correct_normal[2] * scale);
981  pt_extremity.project();
982 
983  vpMeterPixelConversion::convertPoint(cam_copy, pt_extremity.get_x(), pt_extremity.get_y(), im_extremity);
984 
985  vpDisplay::displayArrow(I, im_centroid, im_extremity, vpColor::red, 4, 2, thickness);
986  }
987 }
988 
990  const vpCameraParameters &cam, double scale,
991  unsigned int thickness)
992 {
994  // Desired feature
995  vpPoint pt_centroid = m_faceDesiredCentroid;
996  pt_centroid.changeFrame(cMo);
997  pt_centroid.project();
998 
999  vpImagePoint im_centroid;
1000  vpMeterPixelConversion::convertPoint(cam, pt_centroid.get_x(), pt_centroid.get_y(), im_centroid);
1001 
1002  vpPoint pt_normal = m_faceDesiredNormal;
1003  pt_normal.changeFrame(cMo);
1004  pt_normal.project();
1005 
1006  vpPoint pt_extremity;
1007  pt_extremity.set_X(pt_centroid.get_X() + pt_normal.get_X() * scale);
1008  pt_extremity.set_Y(pt_centroid.get_Y() + pt_normal.get_Y() * scale);
1009  pt_extremity.set_Z(pt_centroid.get_Z() + pt_normal.get_Z() * scale);
1010  pt_extremity.project();
1011 
1012  vpImagePoint im_extremity;
1013  vpMeterPixelConversion::convertPoint(cam, pt_extremity.get_x(), pt_extremity.get_y(), im_extremity);
1014 
1015  vpDisplay::displayArrow(I, im_centroid, im_extremity, vpColor::blue, 4, 2, thickness);
1016 
1017  // Current feature
1018  // Transform the plane equation for the current pose
1021 
1022  double ux = m_planeCamera.getA();
1023  double uy = m_planeCamera.getB();
1024  double uz = m_planeCamera.getC();
1025 
1026  vpColVector correct_normal;
1027  vpCameraParameters cam_copy = cam;
1028  cam_copy.computeFov(I.getWidth(), I.getHeight());
1029  computeNormalVisibility(ux, uy, uz, cMo, cam_copy, correct_normal, pt_centroid);
1030 
1031  pt_centroid.project();
1032  vpMeterPixelConversion::convertPoint(cam_copy, pt_centroid.get_x(), pt_centroid.get_y(), im_centroid);
1033 
1034  pt_extremity.set_X(pt_centroid.get_X() + correct_normal[0] * scale);
1035  pt_extremity.set_Y(pt_centroid.get_Y() + correct_normal[1] * scale);
1036  pt_extremity.set_Z(pt_centroid.get_Z() + correct_normal[2] * scale);
1037  pt_extremity.project();
1038 
1039  vpMeterPixelConversion::convertPoint(cam_copy, pt_extremity.get_x(), pt_extremity.get_y(), im_extremity);
1040 
1041  vpDisplay::displayArrow(I, im_centroid, im_extremity, vpColor::red, 4, 2, thickness);
1042  }
1043 }
1044 
1045 void vpMbtFaceDepthNormal::estimateFeatures(const std::vector<double> &point_cloud_face, const vpHomogeneousMatrix &cMo,
1046  vpColVector &x_estimated, std::vector<double> &w)
1047 {
1048  vpMbtTukeyEstimator<double> tukey_robust;
1049  std::vector<double> residues(point_cloud_face.size() / 3);
1050 
1051  w.resize(point_cloud_face.size() / 3, 1.0);
1052 
1053  unsigned int max_iter = 30, iter = 0;
1054  double error = 0.0, prev_error = -1.0;
1055  double A = 0.0, B = 0.0, C = 0.0;
1056 
1057  Mat33<double> ATA_3x3;
1058 
1059  bool checkSSE2 = vpCPUFeatures::checkSSE2();
1060 #if !USE_SSE
1061  checkSSE2 = false;
1062 #endif
1063 
1064  if (checkSSE2) {
1065 #if USE_SSE
1066  while (std::fabs(error - prev_error) > 1e-6 && (iter < max_iter)) {
1067  if (iter == 0) {
1068  // Transform the plane equation for the current pose
1071 
1072  double ux = m_planeCamera.getA();
1073  double uy = m_planeCamera.getB();
1074  double uz = m_planeCamera.getC();
1075  double D = m_planeCamera.getD();
1076 
1077  // Features
1078  A = -ux / D;
1079  B = -uy / D;
1080  C = -uz / D;
1081 
1082  size_t cpt = 0;
1083  if (point_cloud_face.size() / 3 >= 2) {
1084  const double *ptr_point_cloud = &point_cloud_face[0];
1085  const __m128d vA = _mm_set1_pd(A);
1086  const __m128d vB = _mm_set1_pd(B);
1087  const __m128d vC = _mm_set1_pd(C);
1088  const __m128d vones = _mm_set1_pd(1.0);
1089 
1090  double *ptr_residues = &residues[0];
1091 
1092  for (; cpt <= point_cloud_face.size() - 6; cpt += 6, ptr_point_cloud += 6, ptr_residues += 2) {
1093  const __m128d vxi = _mm_loadu_pd(ptr_point_cloud);
1094  const __m128d vyi = _mm_loadu_pd(ptr_point_cloud + 2);
1095  const __m128d vZi = _mm_loadu_pd(ptr_point_cloud + 4);
1096  const __m128d vinvZi = _mm_div_pd(vones, vZi);
1097 
1098  const __m128d tmp =
1099  _mm_add_pd(_mm_add_pd(_mm_mul_pd(vA, vxi), _mm_mul_pd(vB, vyi)), _mm_sub_pd(vC, vinvZi));
1100  _mm_storeu_pd(ptr_residues, tmp);
1101  }
1102  }
1103 
1104  for (; cpt < point_cloud_face.size(); cpt += 3) {
1105  double xi = point_cloud_face[cpt];
1106  double yi = point_cloud_face[cpt + 1];
1107  double Zi = point_cloud_face[cpt + 2];
1108 
1109  residues[cpt / 3] = (A * xi + B * yi + C - 1 / Zi);
1110  }
1111  }
1112 
1113  tukey_robust.MEstimator(residues, w, 1e-2);
1114 
1115  __m128d vsum_wi2_xi2 = _mm_setzero_pd();
1116  __m128d vsum_wi2_yi2 = _mm_setzero_pd();
1117  __m128d vsum_wi2 = _mm_setzero_pd();
1118  __m128d vsum_wi2_xi_yi = _mm_setzero_pd();
1119  __m128d vsum_wi2_xi = _mm_setzero_pd();
1120  __m128d vsum_wi2_yi = _mm_setzero_pd();
1121 
1122  __m128d vsum_wi2_xi_Zi = _mm_setzero_pd();
1123  __m128d vsum_wi2_yi_Zi = _mm_setzero_pd();
1124  __m128d vsum_wi2_Zi = _mm_setzero_pd();
1125 
1126  // Estimate A, B, C
1127  size_t cpt = 0;
1128  if (point_cloud_face.size() / 3 >= 2) {
1129  const double *ptr_point_cloud = &point_cloud_face[0];
1130  double *ptr_w = &w[0];
1131 
1132  const __m128d vones = _mm_set1_pd(1.0);
1133 
1134  for (; cpt <= point_cloud_face.size() - 6; cpt += 6, ptr_point_cloud += 6, ptr_w += 2) {
1135  const __m128d vwi2 = _mm_mul_pd(_mm_loadu_pd(ptr_w), _mm_loadu_pd(ptr_w));
1136 
1137  const __m128d vxi = _mm_loadu_pd(ptr_point_cloud);
1138  const __m128d vyi = _mm_loadu_pd(ptr_point_cloud + 2);
1139  const __m128d vZi = _mm_loadu_pd(ptr_point_cloud + 4);
1140  const __m128d vinvZi = _mm_div_pd(vones, vZi);
1141 
1142  vsum_wi2_xi2 = _mm_add_pd(vsum_wi2_xi2, _mm_mul_pd(vwi2, _mm_mul_pd(vxi, vxi)));
1143  vsum_wi2_yi2 = _mm_add_pd(vsum_wi2_yi2, _mm_mul_pd(vwi2, _mm_mul_pd(vyi, vyi)));
1144  vsum_wi2 = _mm_add_pd(vsum_wi2, vwi2);
1145  vsum_wi2_xi_yi = _mm_add_pd(vsum_wi2_xi_yi, _mm_mul_pd(vwi2, _mm_mul_pd(vxi, vyi)));
1146  vsum_wi2_xi = _mm_add_pd(vsum_wi2_xi, _mm_mul_pd(vwi2, vxi));
1147  vsum_wi2_yi = _mm_add_pd(vsum_wi2_yi, _mm_mul_pd(vwi2, vyi));
1148 
1149  const __m128d vwi2_invZi = _mm_mul_pd(vwi2, vinvZi);
1150  vsum_wi2_xi_Zi = _mm_add_pd(vsum_wi2_xi_Zi, _mm_mul_pd(vxi, vwi2_invZi));
1151  vsum_wi2_yi_Zi = _mm_add_pd(vsum_wi2_yi_Zi, _mm_mul_pd(vyi, vwi2_invZi));
1152  vsum_wi2_Zi = _mm_add_pd(vsum_wi2_Zi, vwi2_invZi);
1153  }
1154  }
1155 
1156  double vtmp[2];
1157  _mm_storeu_pd(vtmp, vsum_wi2_xi2);
1158  double sum_wi2_xi2 = vtmp[0] + vtmp[1];
1159 
1160  _mm_storeu_pd(vtmp, vsum_wi2_yi2);
1161  double sum_wi2_yi2 = vtmp[0] + vtmp[1];
1162 
1163  _mm_storeu_pd(vtmp, vsum_wi2);
1164  double sum_wi2 = vtmp[0] + vtmp[1];
1165 
1166  _mm_storeu_pd(vtmp, vsum_wi2_xi_yi);
1167  double sum_wi2_xi_yi = vtmp[0] + vtmp[1];
1168 
1169  _mm_storeu_pd(vtmp, vsum_wi2_xi);
1170  double sum_wi2_xi = vtmp[0] + vtmp[1];
1171 
1172  _mm_storeu_pd(vtmp, vsum_wi2_yi);
1173  double sum_wi2_yi = vtmp[0] + vtmp[1];
1174 
1175  _mm_storeu_pd(vtmp, vsum_wi2_xi_Zi);
1176  double sum_wi2_xi_Zi = vtmp[0] + vtmp[1];
1177 
1178  _mm_storeu_pd(vtmp, vsum_wi2_yi_Zi);
1179  double sum_wi2_yi_Zi = vtmp[0] + vtmp[1];
1180 
1181  _mm_storeu_pd(vtmp, vsum_wi2_Zi);
1182  double sum_wi2_Zi = vtmp[0] + vtmp[1];
1183 
1184  for (; cpt < point_cloud_face.size(); cpt += 3) {
1185  double wi2 = w[cpt / 3] * w[cpt / 3];
1186 
1187  double xi = point_cloud_face[cpt];
1188  double yi = point_cloud_face[cpt + 1];
1189  double Zi = point_cloud_face[cpt + 2];
1190  double invZi = 1.0 / Zi;
1191 
1192  sum_wi2_xi2 += wi2 * xi * xi;
1193  sum_wi2_yi2 += wi2 * yi * yi;
1194  sum_wi2 += wi2;
1195  sum_wi2_xi_yi += wi2 * xi * yi;
1196  sum_wi2_xi += wi2 * xi;
1197  sum_wi2_yi += wi2 * yi;
1198 
1199  sum_wi2_xi_Zi += wi2 * xi * invZi;
1200  sum_wi2_yi_Zi += wi2 * yi * invZi;
1201  sum_wi2_Zi += wi2 * invZi;
1202  }
1203 
1204  ATA_3x3[0] = sum_wi2_xi2;
1205  ATA_3x3[1] = sum_wi2_xi_yi;
1206  ATA_3x3[2] = sum_wi2_xi;
1207  ATA_3x3[3] = sum_wi2_xi_yi;
1208  ATA_3x3[4] = sum_wi2_yi2;
1209  ATA_3x3[5] = sum_wi2_yi;
1210  ATA_3x3[6] = sum_wi2_xi;
1211  ATA_3x3[7] = sum_wi2_yi;
1212  ATA_3x3[8] = sum_wi2;
1213 
1214  Mat33<double> minv = ATA_3x3.inverse();
1215 
1216  A = minv[0] * sum_wi2_xi_Zi + minv[1] * sum_wi2_yi_Zi + minv[2] * sum_wi2_Zi;
1217  B = minv[3] * sum_wi2_xi_Zi + minv[4] * sum_wi2_yi_Zi + minv[5] * sum_wi2_Zi;
1218  C = minv[6] * sum_wi2_xi_Zi + minv[7] * sum_wi2_yi_Zi + minv[8] * sum_wi2_Zi;
1219 
1220  cpt = 0;
1221 
1222  // Compute error
1223  prev_error = error;
1224  error = 0.0;
1225 
1226  __m128d verror = _mm_set1_pd(0.0);
1227  if (point_cloud_face.size() / 3 >= 2) {
1228  const double *ptr_point_cloud = &point_cloud_face[0];
1229  const __m128d vA = _mm_set1_pd(A);
1230  const __m128d vB = _mm_set1_pd(B);
1231  const __m128d vC = _mm_set1_pd(C);
1232  const __m128d vones = _mm_set1_pd(1.0);
1233 
1234  double *ptr_residues = &residues[0];
1235 
1236  for (; cpt <= point_cloud_face.size() - 6; cpt += 6, ptr_point_cloud += 6, ptr_residues += 2) {
1237  const __m128d vxi = _mm_loadu_pd(ptr_point_cloud);
1238  const __m128d vyi = _mm_loadu_pd(ptr_point_cloud + 2);
1239  const __m128d vZi = _mm_loadu_pd(ptr_point_cloud + 4);
1240  const __m128d vinvZi = _mm_div_pd(vones, vZi);
1241 
1242  const __m128d tmp = _mm_add_pd(_mm_add_pd(_mm_mul_pd(vA, vxi), _mm_mul_pd(vB, vyi)), _mm_sub_pd(vC, vinvZi));
1243  verror = _mm_add_pd(verror, _mm_mul_pd(tmp, tmp));
1244 
1245  _mm_storeu_pd(ptr_residues, tmp);
1246  }
1247  }
1248 
1249  _mm_storeu_pd(vtmp, verror);
1250  error = vtmp[0] + vtmp[1];
1251 
1252  for (size_t idx = cpt; idx < point_cloud_face.size(); idx += 3) {
1253  double xi = point_cloud_face[idx];
1254  double yi = point_cloud_face[idx + 1];
1255  double Zi = point_cloud_face[idx + 2];
1256 
1257  error += vpMath::sqr(A * xi + B * yi + C - 1 / Zi);
1258  residues[idx / 3] = (A * xi + B * yi + C - 1 / Zi);
1259  }
1260 
1261  error /= point_cloud_face.size() / 3;
1262 
1263  iter++;
1264  } // while ( std::fabs(error - prev_error) > 1e-6 && (iter < max_iter) )
1265 #endif
1266  } else {
1267  while (std::fabs(error - prev_error) > 1e-6 && (iter < max_iter)) {
1268  if (iter == 0) {
1269  // Transform the plane equation for the current pose
1272 
1273  double ux = m_planeCamera.getA();
1274  double uy = m_planeCamera.getB();
1275  double uz = m_planeCamera.getC();
1276  double D = m_planeCamera.getD();
1277 
1278  // Features
1279  A = -ux / D;
1280  B = -uy / D;
1281  C = -uz / D;
1282 
1283  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
1284  double xi = point_cloud_face[3 * i];
1285  double yi = point_cloud_face[3 * i + 1];
1286  double Zi = point_cloud_face[3 * i + 2];
1287 
1288  residues[i] = (A * xi + B * yi + C - 1 / Zi);
1289  }
1290  }
1291 
1292  tukey_robust.MEstimator(residues, w, 1e-2);
1293 
1294  // Estimate A, B, C
1295  double sum_wi2_xi2 = 0.0, sum_wi2_yi2 = 0.0, sum_wi2 = 0.0;
1296  double sum_wi2_xi_yi = 0.0, sum_wi2_xi = 0.0, sum_wi2_yi = 0.0;
1297 
1298  double sum_wi2_xi_Zi = 0.0, sum_wi2_yi_Zi = 0.0, sum_wi2_Zi = 0.0;
1299 
1300  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
1301  double wi2 = w[i] * w[i];
1302 
1303  double xi = point_cloud_face[3 * i];
1304  double yi = point_cloud_face[3 * i + 1];
1305  double Zi = point_cloud_face[3 * i + 2];
1306  double invZi = 1 / Zi;
1307 
1308  sum_wi2_xi2 += wi2 * xi * xi;
1309  sum_wi2_yi2 += wi2 * yi * yi;
1310  sum_wi2 += wi2;
1311  sum_wi2_xi_yi += wi2 * xi * yi;
1312  sum_wi2_xi += wi2 * xi;
1313  sum_wi2_yi += wi2 * yi;
1314 
1315  sum_wi2_xi_Zi += wi2 * xi * invZi;
1316  sum_wi2_yi_Zi += wi2 * yi * invZi;
1317  sum_wi2_Zi += wi2 * invZi;
1318  }
1319 
1320  ATA_3x3[0] = sum_wi2_xi2;
1321  ATA_3x3[1] = sum_wi2_xi_yi;
1322  ATA_3x3[2] = sum_wi2_xi;
1323  ATA_3x3[3] = sum_wi2_xi_yi;
1324  ATA_3x3[4] = sum_wi2_yi2;
1325  ATA_3x3[5] = sum_wi2_yi;
1326  ATA_3x3[6] = sum_wi2_xi;
1327  ATA_3x3[7] = sum_wi2_yi;
1328  ATA_3x3[8] = sum_wi2;
1329 
1330  Mat33<double> minv = ATA_3x3.inverse();
1331 
1332  A = minv[0] * sum_wi2_xi_Zi + minv[1] * sum_wi2_yi_Zi + minv[2] * sum_wi2_Zi;
1333  B = minv[3] * sum_wi2_xi_Zi + minv[4] * sum_wi2_yi_Zi + minv[5] * sum_wi2_Zi;
1334  C = minv[6] * sum_wi2_xi_Zi + minv[7] * sum_wi2_yi_Zi + minv[8] * sum_wi2_Zi;
1335 
1336  prev_error = error;
1337  error = 0.0;
1338 
1339  // Compute error
1340  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
1341  double xi = point_cloud_face[3 * i];
1342  double yi = point_cloud_face[3 * i + 1];
1343  double Zi = point_cloud_face[3 * i + 2];
1344 
1345  error += vpMath::sqr(A * xi + B * yi + C - 1 / Zi);
1346  residues[i] = (A * xi + B * yi + C - 1 / Zi);
1347  }
1348 
1349  error /= point_cloud_face.size() / 3;
1350 
1351  iter++;
1352  } // while ( std::fabs(error - prev_error) > 1e-6 && (iter < max_iter) )
1353  }
1354 
1355  x_estimated.resize(3, false);
1356  x_estimated[0] = A;
1357  x_estimated[1] = B;
1358  x_estimated[2] = C;
1359 }
1360 
1361 void vpMbtFaceDepthNormal::estimatePlaneEquationSVD(const std::vector<double> &point_cloud_face,
1362  const vpHomogeneousMatrix &cMo,
1363  vpColVector &plane_equation_estimated, vpColVector &centroid)
1364 {
1365  unsigned int max_iter = 10;
1366  double prev_error = 1e3;
1367  double error = 1e3 - 1;
1368 
1369  std::vector<double> weights(point_cloud_face.size() / 3, 1.0);
1370  std::vector<double> residues(point_cloud_face.size() / 3);
1371  vpMatrix M((unsigned int)(point_cloud_face.size() / 3), 3);
1372  vpMbtTukeyEstimator<double> tukey;
1373  vpColVector normal;
1374 
1375  for (unsigned int iter = 0; iter < max_iter && std::fabs(error - prev_error) > 1e-6; iter++) {
1376  if (iter != 0) {
1377  tukey.MEstimator(residues, weights, 1e-4);
1378  } else {
1379  // Transform the plane equation for the current pose
1382 
1383  double A = m_planeCamera.getA();
1384  double B = m_planeCamera.getB();
1385  double C = m_planeCamera.getC();
1386  double D = m_planeCamera.getD();
1387 
1388  // Compute distance point to estimated plane
1389  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
1390  residues[i] = std::fabs(A * point_cloud_face[3 * i] + B * point_cloud_face[3 * i + 1] +
1391  C * point_cloud_face[3 * i + 2] + D) /
1392  sqrt(A * A + B * B + C * C);
1393  }
1394 
1395  tukey.MEstimator(residues, weights, 1e-4);
1396  plane_equation_estimated.resize(4, false);
1397  }
1398 
1399  // Compute centroid
1400  double centroid_x = 0.0, centroid_y = 0.0, centroid_z = 0.0;
1401  double total_w = 0.0;
1402 
1403  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
1404  centroid_x += weights[i] * point_cloud_face[3 * i];
1405  centroid_y += weights[i] * point_cloud_face[3 * i + 1];
1406  centroid_z += weights[i] * point_cloud_face[3 * i + 2];
1407  total_w += weights[i];
1408  }
1409 
1410  centroid_x /= total_w;
1411  centroid_y /= total_w;
1412  centroid_z /= total_w;
1413 
1414  // Minimization
1415  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
1416  M[(unsigned int)i][0] = weights[i] * (point_cloud_face[3 * i] - centroid_x);
1417  M[(unsigned int)i][1] = weights[i] * (point_cloud_face[3 * i + 1] - centroid_y);
1418  M[(unsigned int)i][2] = weights[i] * (point_cloud_face[3 * i + 2] - centroid_z);
1419  }
1420 
1421  vpMatrix J = M.t() * M;
1422 
1423  vpColVector W;
1424  vpMatrix V;
1425  J.svd(W, V);
1426 
1427  double smallestSv = W[0];
1428  unsigned int indexSmallestSv = 0;
1429  for (unsigned int i = 1; i < W.size(); i++) {
1430  if (W[i] < smallestSv) {
1431  smallestSv = W[i];
1432  indexSmallestSv = i;
1433  }
1434  }
1435 
1436  normal = V.getCol(indexSmallestSv);
1437 
1438  // Compute plane equation
1439  double A = normal[0], B = normal[1], C = normal[2];
1440  double D = -(A * centroid_x + B * centroid_y + C * centroid_z);
1441 
1442  // Update plane equation
1443  plane_equation_estimated[0] = A;
1444  plane_equation_estimated[1] = B;
1445  plane_equation_estimated[2] = C;
1446  plane_equation_estimated[3] = D;
1447 
1448  // Compute error points to estimated plane
1449  prev_error = error;
1450  error = 0.0;
1451  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
1452  residues[i] = std::fabs(A * point_cloud_face[3 * i] + B * point_cloud_face[3 * i + 1] +
1453  C * point_cloud_face[3 * i + 2] + D) /
1454  sqrt(A * A + B * B + C * C);
1455  error += weights[i] * residues[i];
1456  }
1457  error /= total_w;
1458  }
1459 
1460  // Update final weights
1461  tukey.MEstimator(residues, weights, 1e-4);
1462 
1463  // Update final centroid
1464  centroid.resize(3, false);
1465  double total_w = 0.0;
1466 
1467  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
1468  centroid[0] += weights[i] * point_cloud_face[3 * i];
1469  centroid[1] += weights[i] * point_cloud_face[3 * i + 1];
1470  centroid[2] += weights[i] * point_cloud_face[3 * i + 2];
1471  total_w += weights[i];
1472  }
1473 
1474  centroid[0] /= total_w;
1475  centroid[1] /= total_w;
1476  centroid[2] /= total_w;
1477 
1478  // Compute final plane equation
1479  double A = normal[0], B = normal[1], C = normal[2];
1480  double D = -(A * centroid[0] + B * centroid[1] + C * centroid[2]);
1481 
1482  // Update final plane equation
1483  plane_equation_estimated[0] = A;
1484  plane_equation_estimated[1] = B;
1485  plane_equation_estimated[2] = C;
1486  plane_equation_estimated[3] = D;
1487 }
1488 
1494 std::vector<std::vector<double> > vpMbtFaceDepthNormal::getFeaturesForDisplay(const vpHomogeneousMatrix &cMo,
1495  const vpCameraParameters &cam,
1496  double scale)
1497 {
1498  std::vector<std::vector<double> > features;
1499 
1501  // Desired feature
1502  vpPoint pt_centroid = m_faceDesiredCentroid;
1503  pt_centroid.changeFrame(cMo);
1504  pt_centroid.project();
1505 
1506  vpImagePoint im_centroid;
1507  vpMeterPixelConversion::convertPoint(cam, pt_centroid.get_x(), pt_centroid.get_y(), im_centroid);
1508 
1509  vpPoint pt_normal = m_faceDesiredNormal;
1510  pt_normal.changeFrame(cMo);
1511  pt_normal.project();
1512 
1513  vpPoint pt_extremity;
1514  pt_extremity.set_X(pt_centroid.get_X() + pt_normal.get_X() * scale);
1515  pt_extremity.set_Y(pt_centroid.get_Y() + pt_normal.get_Y() * scale);
1516  pt_extremity.set_Z(pt_centroid.get_Z() + pt_normal.get_Z() * scale);
1517  pt_extremity.project();
1518 
1519  vpImagePoint im_extremity;
1520  vpMeterPixelConversion::convertPoint(cam, pt_extremity.get_x(), pt_extremity.get_y(), im_extremity);
1521 
1522  {
1523 #if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
1524  std::vector<double> params = {2, //desired normal
1525  im_centroid.get_i(),
1526  im_centroid.get_j(),
1527  im_extremity.get_i(),
1528  im_extremity.get_j()};
1529 #else
1530  std::vector<double> params;
1531  params.push_back(2); //desired normal
1532  params.push_back(im_centroid.get_i());
1533  params.push_back(im_centroid.get_j());
1534  params.push_back(im_extremity.get_i());
1535  params.push_back(im_extremity.get_j());
1536 #endif
1537  features.push_back(params);
1538  }
1539 
1540  // Current feature
1541  // Transform the plane equation for the current pose
1544 
1545  double ux = m_planeCamera.getA();
1546  double uy = m_planeCamera.getB();
1547  double uz = m_planeCamera.getC();
1548 
1549  vpColVector correct_normal;
1550  computeNormalVisibility(ux, uy, uz, cMo, cam, correct_normal, pt_centroid);
1551 
1552  pt_centroid.project();
1553  vpMeterPixelConversion::convertPoint(cam, pt_centroid.get_x(), pt_centroid.get_y(), im_centroid);
1554 
1555  pt_extremity.set_X(pt_centroid.get_X() + correct_normal[0] * scale);
1556  pt_extremity.set_Y(pt_centroid.get_Y() + correct_normal[1] * scale);
1557  pt_extremity.set_Z(pt_centroid.get_Z() + correct_normal[2] * scale);
1558  pt_extremity.project();
1559 
1560  vpMeterPixelConversion::convertPoint(cam, pt_extremity.get_x(), pt_extremity.get_y(), im_extremity);
1561 
1562  {
1563 #if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
1564  std::vector<double> params = {3, //normal at current pose
1565  im_centroid.get_i(),
1566  im_centroid.get_j(),
1567  im_extremity.get_i(),
1568  im_extremity.get_j()};
1569 #else
1570  std::vector<double> params;
1571  params.push_back(3); //normal at current pose
1572  params.push_back(im_centroid.get_i());
1573  params.push_back(im_centroid.get_j());
1574  params.push_back(im_extremity.get_i());
1575  params.push_back(im_extremity.get_j());
1576 #endif
1577  features.push_back(params);
1578  }
1579  }
1580 
1581  return features;
1582 }
1583 
1595 std::vector<std::vector<double> > vpMbtFaceDepthNormal::getModelForDisplay(unsigned int width, unsigned int height,
1596  const vpHomogeneousMatrix &cMo,
1597  const vpCameraParameters &cam,
1598  bool displayFullModel)
1599 {
1600  std::vector<std::vector<double> > models;
1601 
1602  if ((m_polygon->isVisible() && m_isTrackedDepthNormalFace) || displayFullModel) {
1604 
1605  for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
1606  ++it) {
1607  vpMbtDistanceLine *line = *it;
1608  std::vector<std::vector<double> > lineModels = line->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1609  models.insert(models.end(), lineModels.begin(), lineModels.end());
1610  }
1611  }
1612 
1613  return models;
1614 }
1615 
1625 bool vpMbtFaceDepthNormal::samePoint(const vpPoint &P1, const vpPoint &P2) const
1626 {
1627  double dx = fabs(P1.get_oX() - P2.get_oX());
1628  double dy = fabs(P1.get_oY() - P2.get_oY());
1629  double dz = fabs(P1.get_oZ() - P2.get_oZ());
1630 
1631  if (dx <= std::numeric_limits<double>::epsilon() && dy <= std::numeric_limits<double>::epsilon() &&
1632  dz <= std::numeric_limits<double>::epsilon())
1633  return true;
1634  else
1635  return false;
1636 }
1637 
1639 {
1640  m_cam = camera;
1641 
1642  for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
1643  ++it) {
1644  (*it)->setCameraParameters(camera);
1645  }
1646 }
1647 
1649 {
1650  m_useScanLine = v;
1651 
1652  for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
1653  ++it) {
1654  (*it)->useScanLine = v;
1655  }
1656 }
vpFeatureEstimationType m_featureEstimationMethod
Method to estimate the desired features.
void svd(vpColVector &w, vpMatrix &V)
Definition: vpMatrix.cpp:2030
Used to indicate that a value is not in the allowed range.
Definition: vpException.h:97
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:153
double get_i() const
Definition: vpImagePoint.h:203
double getTop() const
Definition: vpRect.h:193
void getRoiClipped(const vpCameraParameters &cam, std::vector< vpImagePoint > &roi)
double get_oY() const
Get the point oY coordinate in the object frame.
Definition: vpPoint.cpp:463
Implements a 3D polygon with render functionnalities like clipping.
Definition: vpPolygon3D.h:59
void resize(unsigned int nrows, unsigned int ncols, bool flagNullify=true, bool recopy_=true)
Definition: vpArray2D.h:304
void setWorldCoordinates(double oX, double oY, double oZ)
Definition: vpPoint.cpp:113
void setVisible(bool _isvisible)
void buildFrom(vpPoint &_p1, vpPoint &_p2, vpUniRand &rand_gen)
bool isVisible(unsigned int i)
void addLine(vpPoint &p1, vpPoint &p2, vpMbHiddenFaces< vpMbtPolygon > *const faces, vpUniRand &rand_gen, int polygon=-1, std::string name="")
int getIndex() const
Definition: vpMbtPolygon.h:101
Implementation of an homogeneous matrix and operations on such kind of matrices.
std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
std::list< int > Lindex_polygon
Index of the faces which contain the line.
void setFarClippingDistance(const double &dist)
Definition: vpPolygon3D.h:194
int m_pclPlaneEstimationMethod
PCL plane estimation method.
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
unsigned int m_clippingFlag
Flags specifying which clipping to used.
Class to define RGB colors available for display functionnalities.
Definition: vpColor.h:157
void setLeft(double pos)
Definition: vpRect.h:322
int m_pclPlaneEstimationRansacMaxIter
PCL pane estimation max number of iterations.
vpPoint * p1
The first extremity.
void computeDesiredFeaturesRobustFeatures(const std::vector< double > &point_cloud_face_custom, const std::vector< double > &point_cloud_face, const vpHomogeneousMatrix &cMo, vpColVector &desired_features, vpColVector &desired_normal, vpColVector &centroid_point)
error that can be emited by ViSP classes.
Definition: vpException.h:71
vpMbScanLine & getMbScanLineRenderer()
vpHomogeneousMatrix inverse() const
Manage the line of a polygon used in the model-based tracker.
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
void set_Z(double cZ)
Set the point cZ coordinate in the camera frame.
Definition: vpPoint.cpp:497
unsigned int size() const
Return the number of elements of the 2D array.
Definition: vpArray2D.h:291
double m_distNearClip
Distance for near clipping.
vpMbtPolygon & getPolygon()
bool samePoint(const vpPoint &P1, const vpPoint &P2) const
void computeROI(const vpHomogeneousMatrix &cMo, unsigned int width, unsigned int height, std::vector< vpImagePoint > &roiPts)
double get_oX() const
Get the point oX coordinate in the object frame.
Definition: vpPoint.cpp:461
bool m_useScanLine
Scan line visibility.
double getRight() const
Definition: vpRect.h:180
std::vector< std::vector< double > > getFeaturesForDisplay(const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double scale=0.05)
vpCameraParameters m_cam
Camera intrinsic parameters.
vpRect getBoundingBox() const
Definition: vpPolygon.h:177
static const vpColor red
Definition: vpColor.h:217
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:81
bool computePolygonCentroid(const std::vector< vpPoint > &points, vpPoint &centroid)
void setCameraParameters(const vpCameraParameters &camera)
void set_X(double cX)
Set the point cX coordinate in the camera frame.
Definition: vpPoint.cpp:493
bool m_faceActivated
True if the face should be considered by the tracker.
vpMbtPolygon * m_polygon
Polygon defining the face.
Defines a generic 2D polygon.
Definition: vpPolygon.h:103
vpMatrix t() const
Definition: vpMatrix.cpp:464
void computeNormalVisibility(double nx, double ny, double nz, const vpColVector &centroid_point, vpColVector &face_normal)
vpColVector & normalize()
void setTop(double pos)
Definition: vpRect.h:358
std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void computeInteractionMatrix(const vpHomogeneousMatrix &cMo, vpMatrix &L, vpColVector &features)
double getWidth() const
Definition: vpRect.h:228
double getD() const
Definition: vpPlane.h:108
vpPoint * p2
The second extremity.
void changeFrame(const vpHomogeneousMatrix &cMo)
Definition: vpPlane.cpp:354
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
static double sqr(double x)
Definition: vpMath.h:116
bool computeDesiredFeaturesPCL(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud_face, vpColVector &desired_features, vpColVector &desired_normal, vpColVector &centroid_point)
VISP_EXPORT bool checkSSE2()
double getB() const
Definition: vpPlane.h:104
bool isInside(const vpImagePoint &iP, const PointInPolygonMethod &method=PnPolyRayCasting) const
Definition: vpPolygon.cpp:309
double get_j() const
Definition: vpImagePoint.h:214
void getPolygonClipped(std::vector< std::pair< vpPoint, unsigned int > > &poly)
vpMbHiddenFaces< vpMbtPolygon > * m_hiddenFace
Pointer to the list of faces.
Generic class defining intrinsic camera parameters.
void changeFrame(const vpHomogeneousMatrix &cMo, vpColVector &cP) const
Definition: vpPoint.cpp:239
double get_oZ() const
Get the point oZ coordinate in the object frame.
Definition: vpPoint.cpp:465
vpPoint m_faceDesiredNormal
Face (normalized) normal (computed from the sensor)
vpColVector getCol(unsigned int j) const
Definition: vpMatrix.cpp:5175
double getLeft() const
Definition: vpRect.h:174
double m_distFarClip
Distance for near clipping.
virtual bool isVisible(const vpHomogeneousMatrix &cMo, double alpha, const bool &modulo=false, const vpCameraParameters &cam=vpCameraParameters(), unsigned int width=0, unsigned int height=0)
std::vector< vpMbtDistanceLine * > m_listOfFaceLines
void setClipping(const unsigned int &flags)
Definition: vpPolygon3D.h:187
std::vector< PolygonLine > m_polygonLines
double getA() const
Definition: vpPlane.h:102
vpFaceCentroidType m_faceCentroidMethod
Method to compute the face centroid for the current features.
void setName(const std::string &line_name)
void setCameraParameters(const vpCameraParameters &camera)
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:310
double get_X() const
Get the point cX coordinate in the camera frame.
Definition: vpPoint.cpp:452
void computeScanLineQuery(const vpPoint &a, const vpPoint &b, std::vector< std::pair< vpPoint, vpPoint > > &lines, const bool &displayResults=false)
void estimatePlaneEquationSVD(const std::vector< double > &point_cloud_face, const vpHomogeneousMatrix &cMo, vpColVector &plane_equation_estimated, vpColVector &centroid)
unsigned int getHeight() const
Definition: vpImage.h:188
void computeDesiredFeaturesSVD(const std::vector< double > &point_cloud_face, const vpHomogeneousMatrix &cMo, vpColVector &desired_features, vpColVector &desired_normal, vpColVector &centroid_point)
void computeDesiredNormalAndCentroid(const vpHomogeneousMatrix &cMo, const vpColVector &desired_normal, const vpColVector &centroid_point)
void set_Y(double cY)
Set the point cY coordinate in the camera frame.
Definition: vpPoint.cpp:495
Implementation of column vector and the associated operations.
Definition: vpColVector.h:130
static bool inMask(const vpImage< bool > *mask, unsigned int i, unsigned int j)
static double dotProd(const vpColVector &a, const vpColVector &b)
double get_x() const
Get the point x coordinate in the image plane.
Definition: vpPoint.cpp:470
void setRight(double pos)
Definition: vpRect.h:349
vpMbHiddenFaces< vpMbtPolygon > * hiddenface
Pointer to the list of faces.
double getHeight() const
Definition: vpRect.h:167
double get_y() const
Get the point y coordinate in the image plane.
Definition: vpPoint.cpp:472
void displayFeature(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double scale=0.05, unsigned int thickness=1)
double getC() const
Definition: vpPlane.h:106
void addPolygon(const int &index)
void setNearClippingDistance(const double &dist)
Definition: vpPolygon3D.h:207
vpPoint m_faceDesiredCentroid
Desired centroid (computed from the sensor)
Defines a rectangle in the plane.
Definition: vpRect.h:79
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:87
Class for generating random numbers with uniform probability density.
Definition: vpUniRand.h:100
Compute the geometric centroid.
double get_Z() const
Get the point cZ coordinate in the camera frame.
Definition: vpPoint.cpp:456
vpPlane m_planeObject
Plane equation described in the object frame.
double get_Y() const
Get the point cY coordinate in the camera frame.
Definition: vpPoint.cpp:454
bool computeDesiredFeatures(const vpHomogeneousMatrix &cMo, unsigned int width, unsigned int height, const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud, vpColVector &desired_features, unsigned int stepX, unsigned int stepY, const vpImage< bool > *mask=NULL)
void estimateFeatures(const std::vector< double > &point_cloud_face, const vpHomogeneousMatrix &cMo, vpColVector &x_estimated, std::vector< double > &weights)
unsigned int getWidth() const
Definition: vpImage.h:246
double getBottom() const
Definition: vpRect.h:98
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
void setBottom(double pos)
Definition: vpRect.h:289
void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false)
bool useScanLine
Use scanline rendering.
void setIndex(unsigned int i)
static const vpColor blue
Definition: vpColor.h:223
double m_pclPlaneEstimationRansacThreshold
PCL plane estimation RANSAC threshold.
void computeFov(const unsigned int &w, const unsigned int &h)