Visual Servoing Platform  version 3.3.0 under development (2020-02-17)
vpMbtFaceDepthNormal.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Manage depth normal features for a particular face.
33  *
34  *****************************************************************************/
35 
36 #include <visp3/core/vpCPUFeatures.h>
37 #include <visp3/mbt/vpMbtFaceDepthNormal.h>
38 #include <visp3/mbt/vpMbtTukeyEstimator.h>
39 
40 #ifdef VISP_HAVE_PCL
41 #include <pcl/common/centroid.h>
42 #include <pcl/filters/extract_indices.h>
43 #include <pcl/segmentation/sac_segmentation.h>
44 #endif
45 
46 #if defined __SSE2__ || defined _M_X64 || (defined _M_IX86_FP && _M_IX86_FP >= 2)
47 #include <emmintrin.h>
48 #define VISP_HAVE_SSE2 1
49 #endif
50 
51 #define USE_SSE_CODE 1
52 #if VISP_HAVE_SSE2 && USE_SSE_CODE
53 #define USE_SSE 1
54 #else
55 #define USE_SSE 0
56 #endif
57 
59  : m_cam(), m_clippingFlag(vpPolygon3D::NO_CLIPPING), m_distFarClip(100), m_distNearClip(0.001), m_hiddenFace(NULL),
60  m_planeObject(), m_polygon(NULL), m_useScanLine(false), m_faceActivated(false),
61  m_faceCentroidMethod(GEOMETRIC_CENTROID), m_faceDesiredCentroid(), m_faceDesiredNormal(),
62  m_featureEstimationMethod(ROBUST_FEATURE_ESTIMATION), m_isTrackedDepthNormalFace(true), m_isVisible(false),
63  m_listOfFaceLines(), m_planeCamera(),
64  m_pclPlaneEstimationMethod(2), // SAC_MSAC, see pcl/sample_consensus/method_types.h
65  m_pclPlaneEstimationRansacMaxIter(200), m_pclPlaneEstimationRansacThreshold(0.001), m_polygonLines()
66 {
67 }
68 
70 {
71  for (size_t i = 0; i < m_listOfFaceLines.size(); i++) {
72  delete m_listOfFaceLines[i];
73  }
74 }
75 
90  std::string name)
91 {
92  // Build a PolygonLine to be able to easily display the lines model
93  PolygonLine polygon_line;
94 
95  // Add polygon
96  polygon_line.m_poly.setNbPoint(2);
97  polygon_line.m_poly.addPoint(0, P1);
98  polygon_line.m_poly.addPoint(1, P2);
99 
100  polygon_line.m_poly.setClipping(m_clippingFlag);
101  polygon_line.m_poly.setNearClippingDistance(m_distNearClip);
102  polygon_line.m_poly.setFarClippingDistance(m_distFarClip);
103 
104  polygon_line.m_p1 = &polygon_line.m_poly.p[0];
105  polygon_line.m_p2 = &polygon_line.m_poly.p[1];
106 
107  m_polygonLines.push_back(polygon_line);
108 
109  // suppress line already in the model
110  bool already_here = false;
112 
113  for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
114  ++it) {
115  l = *it;
116  if ((samePoint(*(l->p1), P1) && samePoint(*(l->p2), P2)) || (samePoint(*(l->p1), P2) && samePoint(*(l->p2), P1))) {
117  already_here = true;
118  l->addPolygon(polygon);
119  l->hiddenface = faces;
121  }
122  }
123 
124  if (!already_here) {
125  l = new vpMbtDistanceLine;
126 
128  l->buildFrom(P1, P2);
129  l->addPolygon(polygon);
130  l->hiddenface = faces;
132 
133  l->setIndex((unsigned int)m_listOfFaceLines.size());
134  l->setName(name);
135 
138 
139  if ((m_clippingFlag & vpPolygon3D::NEAR_CLIPPING) == vpPolygon3D::NEAR_CLIPPING)
141 
142  if ((m_clippingFlag & vpPolygon3D::FAR_CLIPPING) == vpPolygon3D::FAR_CLIPPING)
144 
145  m_listOfFaceLines.push_back(l);
146  }
147 }
148 
149 #ifdef VISP_HAVE_PCL
151  unsigned int height,
152  const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud,
153  vpColVector &desired_features, unsigned int stepX,
154  unsigned int stepY
155 #if DEBUG_DISPLAY_DEPTH_NORMAL
156  ,
157  vpImage<unsigned char> &debugImage,
158  std::vector<std::vector<vpImagePoint> > &roiPts_vec
159 #endif
160  , const vpImage<bool> *mask
161 )
162 {
163  m_faceActivated = false;
164 
165  if (width == 0 || height == 0)
166  return false;
167 
168  std::vector<vpImagePoint> roiPts;
169  vpColVector desired_normal(3);
170 
171  computeROI(cMo, width, height, roiPts
172 #if DEBUG_DISPLAY_DEPTH_NORMAL
173  ,
174  roiPts_vec
175 #endif
176  );
177 
178  if (roiPts.size() <= 2) {
179 #ifndef NDEBUG
180  std::cerr << "Error: roiPts.size() <= 2 in computeDesiredFeatures" << std::endl;
181 #endif
182  return false;
183  }
184 
185  vpPolygon polygon_2d(roiPts);
186  vpRect bb = polygon_2d.getBoundingBox();
187 
188  unsigned int top = (unsigned int)std::max(0.0, bb.getTop());
189  unsigned int bottom = (unsigned int)std::min((double)height, std::max(0.0, bb.getBottom()));
190  unsigned int left = (unsigned int)std::max(0.0, bb.getLeft());
191  unsigned int right = (unsigned int)std::min((double)width, std::max(0.0, bb.getRight()));
192 
193  bb.setTop(top);
194  bb.setBottom(bottom);
195  bb.setLeft(left);
196  bb.setRight(right);
197 
198  // Keep only 3D points inside the projected polygon face
199  pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud_face(new pcl::PointCloud<pcl::PointXYZ>);
200  std::vector<double> point_cloud_face_vec, point_cloud_face_custom;
201 
203  point_cloud_face_custom.reserve((size_t)(3 * bb.getWidth() * bb.getHeight()));
204  point_cloud_face_vec.reserve((size_t)(3 * bb.getWidth() * bb.getHeight()));
206  point_cloud_face_vec.reserve((size_t)(3 * bb.getWidth() * bb.getHeight()));
208  point_cloud_face->reserve((size_t)(bb.getWidth() * bb.getHeight()));
209  }
210 
211  bool checkSSE2 = vpCPUFeatures::checkSSE2();
212 #if !USE_SSE
213  checkSSE2 = false;
214 #else
215  bool push = false;
216  double prev_x, prev_y, prev_z;
217 #endif
218 
219  double x = 0.0, y = 0.0;
220  for (unsigned int i = top; i < bottom; i += stepY) {
221  for (unsigned int j = left; j < right; j += stepX) {
222  if (vpMeTracker::inMask(mask, i, j) && pcl::isFinite((*point_cloud)(j, i)) && (*point_cloud)(j, i).z > 0 &&
223  (m_useScanLine ? (i < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getHeight() &&
224  j < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getWidth() &&
225  m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs()[i][j] == m_polygon->getIndex())
226  : polygon_2d.isInside(vpImagePoint(i, j)))) {
227 
229  point_cloud_face->push_back((*point_cloud)(j, i));
232  point_cloud_face_vec.push_back((*point_cloud)(j, i).x);
233  point_cloud_face_vec.push_back((*point_cloud)(j, i).y);
234  point_cloud_face_vec.push_back((*point_cloud)(j, i).z);
235 
237  // Add point for custom method for plane equation estimation
239 
240  if (checkSSE2) {
241 #if USE_SSE
242  if (!push) {
243  push = true;
244  prev_x = x;
245  prev_y = y;
246  prev_z = (*point_cloud)(j, i).z;
247  } else {
248  push = false;
249  point_cloud_face_custom.push_back(prev_x);
250  point_cloud_face_custom.push_back(x);
251 
252  point_cloud_face_custom.push_back(prev_y);
253  point_cloud_face_custom.push_back(y);
254 
255  point_cloud_face_custom.push_back(prev_z);
256  point_cloud_face_custom.push_back((*point_cloud)(j, i).z);
257  }
258 #endif
259  } else {
260  point_cloud_face_custom.push_back(x);
261  point_cloud_face_custom.push_back(y);
262  point_cloud_face_custom.push_back((*point_cloud)(j, i).z);
263  }
264  }
265  }
266 
267 #if DEBUG_DISPLAY_DEPTH_NORMAL
268  debugImage[i][j] = 255;
269 #endif
270  }
271  }
272  }
273 
274 #if USE_SSE
275  if (checkSSE2 && push) {
276  point_cloud_face_custom.push_back(prev_x);
277  point_cloud_face_custom.push_back(prev_y);
278  point_cloud_face_custom.push_back(prev_z);
279  }
280 #endif
281 
282  if (point_cloud_face->empty() && point_cloud_face_custom.empty() && point_cloud_face_vec.empty()) {
283  return false;
284  }
285 
286  // Face centroid computed by the different methods
287  vpColVector centroid_point(3);
288 
290  if (!computeDesiredFeaturesPCL(point_cloud_face, desired_features, desired_normal, centroid_point)) {
291  return false;
292  }
294  computeDesiredFeaturesSVD(point_cloud_face_vec, cMo, desired_features, desired_normal, centroid_point);
296  computeDesiredFeaturesRobustFeatures(point_cloud_face_custom, point_cloud_face_vec, cMo, desired_features,
297  desired_normal, centroid_point);
298  } else {
299  throw vpException(vpException::badValue, "Unknown feature estimation method!");
300  }
301 
302  computeDesiredNormalAndCentroid(cMo, desired_normal, centroid_point);
303 
304  m_faceActivated = true;
305 
306  return true;
307 }
308 #endif
309 
311  unsigned int height,
312  const std::vector<vpColVector> &point_cloud,
313  vpColVector &desired_features, unsigned int stepX,
314  unsigned int stepY
315 #if DEBUG_DISPLAY_DEPTH_NORMAL
316  ,
317  vpImage<unsigned char> &debugImage,
318  std::vector<std::vector<vpImagePoint> > &roiPts_vec
319 #endif
320  , const vpImage<bool> *mask
321 )
322 {
323  m_faceActivated = false;
324 
325  if (width == 0 || height == 0)
326  return false;
327 
328  std::vector<vpImagePoint> roiPts;
329  vpColVector desired_normal(3);
330 
331  computeROI(cMo, width, height, roiPts
332 #if DEBUG_DISPLAY_DEPTH_NORMAL
333  ,
334  roiPts_vec
335 #endif
336  );
337 
338  if (roiPts.size() <= 2) {
339 #ifndef NDEBUG
340  std::cerr << "Error: roiPts.size() <= 2 in computeDesiredFeatures" << std::endl;
341 #endif
342  return false;
343  }
344 
345  vpPolygon polygon_2d(roiPts);
346  vpRect bb = polygon_2d.getBoundingBox();
347 
348  unsigned int top = (unsigned int)std::max(0.0, bb.getTop());
349  unsigned int bottom = (unsigned int)std::min((double)height, std::max(0.0, bb.getBottom()));
350  unsigned int left = (unsigned int)std::max(0.0, bb.getLeft());
351  unsigned int right = (unsigned int)std::min((double)width, std::max(0.0, bb.getRight()));
352 
353  bb.setTop(top);
354  bb.setBottom(bottom);
355  bb.setLeft(left);
356  bb.setRight(right);
357 
358  // Keep only 3D points inside the projected polygon face
359  std::vector<double> point_cloud_face, point_cloud_face_custom;
360 
361  point_cloud_face.reserve((size_t)(3 * bb.getWidth() * bb.getHeight()));
363  point_cloud_face_custom.reserve((size_t)(3 * bb.getWidth() * bb.getHeight()));
364  }
365 
366  bool checkSSE2 = vpCPUFeatures::checkSSE2();
367 #if !USE_SSE
368  checkSSE2 = false;
369 #else
370  bool push = false;
371  double prev_x, prev_y, prev_z;
372 #endif
373 
374  double x = 0.0, y = 0.0;
375  for (unsigned int i = top; i < bottom; i += stepY) {
376  for (unsigned int j = left; j < right; j += stepX) {
377  if (vpMeTracker::inMask(mask, i, j) && point_cloud[i * width + j][2] > 0 &&
378  (m_useScanLine ? (i < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getHeight() &&
379  j < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getWidth() &&
380  m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs()[i][j] == m_polygon->getIndex())
381  : polygon_2d.isInside(vpImagePoint(i, j)))) {
382  // Add point
383  point_cloud_face.push_back(point_cloud[i * width + j][0]);
384  point_cloud_face.push_back(point_cloud[i * width + j][1]);
385  point_cloud_face.push_back(point_cloud[i * width + j][2]);
386 
388  // Add point for custom method for plane equation estimation
390 
391  if (checkSSE2) {
392 #if USE_SSE
393  if (!push) {
394  push = true;
395  prev_x = x;
396  prev_y = y;
397  prev_z = point_cloud[i * width + j][2];
398  } else {
399  push = false;
400  point_cloud_face_custom.push_back(prev_x);
401  point_cloud_face_custom.push_back(x);
402 
403  point_cloud_face_custom.push_back(prev_y);
404  point_cloud_face_custom.push_back(y);
405 
406  point_cloud_face_custom.push_back(prev_z);
407  point_cloud_face_custom.push_back(point_cloud[i * width + j][2]);
408  }
409 #endif
410  } else {
411  point_cloud_face_custom.push_back(x);
412  point_cloud_face_custom.push_back(y);
413  point_cloud_face_custom.push_back(point_cloud[i * width + j][2]);
414  }
415  }
416 
417 #if DEBUG_DISPLAY_DEPTH_NORMAL
418  debugImage[i][j] = 255;
419 #endif
420  }
421  }
422  }
423 
424 #if USE_SSE
425  if (checkSSE2 && push) {
426  point_cloud_face_custom.push_back(prev_x);
427  point_cloud_face_custom.push_back(prev_y);
428  point_cloud_face_custom.push_back(prev_z);
429  }
430 #endif
431 
432  if (point_cloud_face.empty() && point_cloud_face_custom.empty()) {
433  return false;
434  }
435 
436  // Face centroid computed by the different methods
437  vpColVector centroid_point(3);
438 
439 #ifdef VISP_HAVE_PCL
441  pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud_face_pcl(new pcl::PointCloud<pcl::PointXYZ>);
442  point_cloud_face_pcl->reserve(point_cloud_face.size() / 3);
443 
444  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
445  point_cloud_face_pcl->push_back(
446  pcl::PointXYZ(point_cloud_face[3 * i], point_cloud_face[3 * i + 1], point_cloud_face[3 * i + 2]));
447  }
448 
449  computeDesiredFeaturesPCL(point_cloud_face_pcl, desired_features, desired_normal, centroid_point);
450  } else
451 #endif
453  computeDesiredFeaturesSVD(point_cloud_face, cMo, desired_features, desired_normal, centroid_point);
455  computeDesiredFeaturesRobustFeatures(point_cloud_face_custom, point_cloud_face, cMo, desired_features,
456  desired_normal, centroid_point);
457  } else {
458  throw vpException(vpException::badValue, "Unknown feature estimation method!");
459  }
460 
461  computeDesiredNormalAndCentroid(cMo, desired_normal, centroid_point);
462 
463  m_faceActivated = true;
464 
465  return true;
466 }
467 
468 #ifdef VISP_HAVE_PCL
469 bool vpMbtFaceDepthNormal::computeDesiredFeaturesPCL(const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud_face,
470  vpColVector &desired_features, vpColVector &desired_normal,
471  vpColVector &centroid_point)
472 {
473  try {
474  // Compute plane equation for this subset of point cloud
475  pcl::ModelCoefficients::Ptr coefficients(new pcl::ModelCoefficients);
476  pcl::PointIndices::Ptr inliers(new pcl::PointIndices);
477  // Create the segmentation object
478  pcl::SACSegmentation<pcl::PointXYZ> seg;
479  // Optional
480  seg.setOptimizeCoefficients(true);
481  // Mandatory
482  seg.setModelType(pcl::SACMODEL_PLANE);
483  seg.setMethodType(m_pclPlaneEstimationMethod);
484  seg.setDistanceThreshold(m_pclPlaneEstimationRansacThreshold);
485  seg.setMaxIterations(m_pclPlaneEstimationRansacMaxIter);
486 
487  seg.setInputCloud(point_cloud_face);
488  seg.segment(*inliers, *coefficients);
489 
490  pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud_face_extracted(new pcl::PointCloud<pcl::PointXYZ>);
491  // Create the filtering object
492  pcl::ExtractIndices<pcl::PointXYZ> extract;
493 
494  // Extract the inliers
495  extract.setInputCloud(point_cloud_face);
496  extract.setIndices(inliers);
497  extract.setNegative(false);
498  extract.filter(*point_cloud_face_extracted);
499 
500 #if PCL_VERSION_COMPARE(>=, 1, 8, 0)
501  pcl::PointXYZ centroid_point_pcl;
502  if (pcl::computeCentroid(*point_cloud_face_extracted, centroid_point_pcl)) {
503  pcl::PointXYZ face_normal;
504  computeNormalVisibility(coefficients->values[0], coefficients->values[1], coefficients->values[2],
505  centroid_point_pcl, face_normal);
506 
507  desired_features.resize(3, false);
508  desired_features[0] = -coefficients->values[0] / coefficients->values[3];
509  desired_features[1] = -coefficients->values[1] / coefficients->values[3];
510  desired_features[2] = -coefficients->values[2] / coefficients->values[3];
511 
512  desired_normal[0] = face_normal.x;
513  desired_normal[1] = face_normal.y;
514  desired_normal[2] = face_normal.z;
515 
516  centroid_point[0] = centroid_point_pcl.x;
517  centroid_point[1] = centroid_point_pcl.y;
518  centroid_point[2] = centroid_point_pcl.z;
519  } else {
520  std::cerr << "Cannot compute centroid!" << std::endl;
521  return false;
522  }
523 #else
524  std::cerr << "Cannot compute centroid using PCL " << PCL_VERSION_PRETTY << "!" << std::endl;
525  return false;
526 #endif
527  } catch (const pcl::PCLException &e) {
528  std::cerr << "Catch a PCL exception: " << e.what() << std::endl;
529  throw;
530  }
531 
532  return true;
533 }
534 #endif
535 
536 void vpMbtFaceDepthNormal::computeDesiredFeaturesRobustFeatures(const std::vector<double> &point_cloud_face_custom,
537  const std::vector<double> &point_cloud_face,
538  const vpHomogeneousMatrix &cMo,
539  vpColVector &desired_features,
540  vpColVector &desired_normal,
541  vpColVector &centroid_point)
542 {
543  std::vector<double> weights;
544  double den = 0.0;
545  estimateFeatures(point_cloud_face_custom, cMo, desired_features, weights);
546 
547  // Compute face centroid
548  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
549  centroid_point[0] += weights[i] * point_cloud_face[3 * i];
550  centroid_point[1] += weights[i] * point_cloud_face[3 * i + 1];
551  centroid_point[2] += weights[i] * point_cloud_face[3 * i + 2];
552 
553  den += weights[i];
554  }
555 
556  centroid_point[0] /= den;
557  centroid_point[1] /= den;
558  centroid_point[2] /= den;
559 
560  computeNormalVisibility(-desired_features[0], -desired_features[1], -desired_features[2], centroid_point,
561  desired_normal);
562 }
563 
564 void vpMbtFaceDepthNormal::computeDesiredFeaturesSVD(const std::vector<double> &point_cloud_face,
565  const vpHomogeneousMatrix &cMo, vpColVector &desired_features,
566  vpColVector &desired_normal, vpColVector &centroid_point)
567 {
568  vpColVector plane_equation_SVD;
569  estimatePlaneEquationSVD(point_cloud_face, cMo, plane_equation_SVD, centroid_point);
570 
571  desired_features.resize(3, false);
572  desired_features[0] = -plane_equation_SVD[0] / plane_equation_SVD[3];
573  desired_features[1] = -plane_equation_SVD[1] / plane_equation_SVD[3];
574  desired_features[2] = -plane_equation_SVD[2] / plane_equation_SVD[3];
575 
576  computeNormalVisibility(-desired_features[0], -desired_features[1], -desired_features[2], centroid_point,
577  desired_normal);
578 }
579 
581  const vpColVector &desired_normal,
582  const vpColVector &centroid_point)
583 {
584  // Compute desired centroid in the object frame
585  vpColVector centroid_cam(4);
586  centroid_cam[0] = centroid_point[0];
587  centroid_cam[1] = centroid_point[1];
588  centroid_cam[2] = centroid_point[2];
589  centroid_cam[3] = 1;
590 
591  vpColVector centroid_obj = cMo.inverse() * centroid_cam;
592  m_faceDesiredCentroid.setWorldCoordinates(centroid_obj[0], centroid_obj[1], centroid_obj[2]);
593 
594  // Compute desired face normal in the object frame
595  vpColVector face_normal_cam(4);
596  face_normal_cam[0] = desired_normal[0];
597  face_normal_cam[1] = desired_normal[1];
598  face_normal_cam[2] = desired_normal[2];
599  face_normal_cam[3] = 1;
600 
601  vpColVector face_normal_obj = cMo.inverse() * face_normal_cam;
602  m_faceDesiredNormal.setWorldCoordinates(face_normal_obj[0], face_normal_obj[1], face_normal_obj[2]);
603 }
604 
605 bool vpMbtFaceDepthNormal::computePolygonCentroid(const std::vector<vpPoint> &points_, vpPoint &centroid)
606 {
607  if (points_.empty()) {
608  return false;
609  }
610 
611  if (points_.size() < 2) {
612  centroid = points_[0];
613  return true;
614  }
615 
616  std::vector<vpPoint> points = points_;
617  points.push_back(points_.front());
618 
619  double A1 = 0.0, A2 = 0.0, c_x1 = 0.0, c_x2 = 0.0, c_y = 0.0, c_z = 0.0;
620 
621  for (size_t i = 0; i < points.size() - 1; i++) {
622  // projection onto xy plane
623  c_x1 += (points[i].get_X() + points[i + 1].get_X()) *
624  (points[i].get_X() * points[i + 1].get_Y() - points[i + 1].get_X() * points[i].get_Y());
625  c_y += (points[i].get_Y() + points[i + 1].get_Y()) *
626  (points[i].get_X() * points[i + 1].get_Y() - points[i + 1].get_X() * points[i].get_Y());
627  A1 += points[i].get_X() * points[i + 1].get_Y() - points[i + 1].get_X() * points[i].get_Y();
628 
629  // projection onto xz plane
630  c_x2 += (points[i].get_X() + points[i + 1].get_X()) *
631  (points[i].get_X() * points[i + 1].get_Z() - points[i + 1].get_X() * points[i].get_Z());
632  c_z += (points[i].get_Z() + points[i + 1].get_Z()) *
633  (points[i].get_X() * points[i + 1].get_Z() - points[i + 1].get_X() * points[i].get_Z());
634  A2 += points[i].get_X() * points[i + 1].get_Z() - points[i + 1].get_X() * points[i].get_Z();
635  }
636 
637  c_x1 /= 3.0 * A1;
638  c_y /= 3.0 * A1;
639  c_x2 /= 3.0 * A2;
640  c_z /= 3.0 * A2;
641 
642  if (A1 > A2) {
643  centroid.set_X(c_x1);
644  } else {
645  centroid.set_X(c_x2);
646  }
647 
648  centroid.set_Y(c_y);
649  centroid.set_Z(c_z);
650 
651  return true;
652 }
653 
654 void vpMbtFaceDepthNormal::computeROI(const vpHomogeneousMatrix &cMo, unsigned int width,
655  unsigned int height, std::vector<vpImagePoint> &roiPts
656 #if DEBUG_DISPLAY_DEPTH_NORMAL
657  ,
658  std::vector<std::vector<vpImagePoint> > &roiPts_vec
659 #endif
660 )
661 {
662  if (m_useScanLine || m_clippingFlag > 2)
663  m_cam.computeFov(width, height);
664 
665  if (m_useScanLine) {
666  for (std::vector<PolygonLine>::iterator it = m_polygonLines.begin(); it != m_polygonLines.end(); ++it) {
667  it->m_p1->changeFrame(cMo);
668  it->m_p2->changeFrame(cMo);
669 
670  vpImagePoint ip1, ip2;
671 
672  it->m_poly.changeFrame(cMo);
673  it->m_poly.computePolygonClipped(m_cam);
674 
675  if (it->m_poly.polyClipped.size() == 2 &&
676  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::NEAR_CLIPPING) == 0) &&
677  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::FAR_CLIPPING) == 0) &&
678  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::DOWN_CLIPPING) == 0) &&
679  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::UP_CLIPPING) == 0) &&
680  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::LEFT_CLIPPING) == 0) &&
681  ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::RIGHT_CLIPPING) == 0)) {
682 
683  std::vector<std::pair<vpPoint, vpPoint> > linesLst;
684  m_hiddenFace->computeScanLineQuery(it->m_poly.polyClipped[0].first, it->m_poly.polyClipped[1].first, linesLst,
685  true);
686 
687  for (unsigned int i = 0; i < linesLst.size(); i++) {
688  linesLst[i].first.project();
689  linesLst[i].second.project();
690 
691  vpMeterPixelConversion::convertPoint(m_cam, linesLst[i].first.get_x(), linesLst[i].first.get_y(), ip1);
692  vpMeterPixelConversion::convertPoint(m_cam, linesLst[i].second.get_x(), linesLst[i].second.get_y(), ip2);
693 
694  it->m_imPt1 = ip1;
695  it->m_imPt2 = ip2;
696 
697  roiPts.push_back(ip1);
698  roiPts.push_back(ip2);
699 
700 #if DEBUG_DISPLAY_DEPTH_NORMAL
701  std::vector<vpImagePoint> roiPts_;
702  roiPts_.push_back(ip1);
703  roiPts_.push_back(ip2);
704  roiPts_vec.push_back(roiPts_);
705 #endif
706  }
707  }
708  }
709  } else {
710  // Get polygon clipped
711  m_polygon->getRoiClipped(m_cam, roiPts, cMo);
712 
713 #if DEBUG_DISPLAY_DEPTH_NORMAL
714  roiPts_vec.push_back(roiPts);
715 #endif
716  }
717 }
718 
720 
722 {
723  // Compute lines visibility, only for display
724  vpMbtDistanceLine *line;
725  for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
726  ++it) {
727  line = *it;
728  bool isvisible = false;
729 
730  for (std::list<int>::const_iterator itindex = line->Lindex_polygon.begin(); itindex != line->Lindex_polygon.end();
731  ++itindex) {
732  int index = *itindex;
733  if (index == -1) {
734  isvisible = true;
735  } else {
736  if (line->hiddenface->isVisible((unsigned int)index)) {
737  isvisible = true;
738  }
739  }
740  }
741 
742  // Si la ligne n'appartient a aucune face elle est tout le temps visible
743  if (line->Lindex_polygon.empty())
744  isvisible = true; // Not sure that this can occur
745 
746  if (isvisible) {
747  line->setVisible(true);
748  } else {
749  line->setVisible(false);
750  }
751  }
752 }
753 
754 void vpMbtFaceDepthNormal::computeNormalVisibility(double nx, double ny, double nz,
755  const vpHomogeneousMatrix &cMo, const vpCameraParameters &camera,
756  vpColVector &correct_normal, vpPoint &centroid)
757 {
758  vpColVector faceNormal(3);
759  faceNormal[0] = nx;
760  faceNormal[1] = ny;
761  faceNormal[2] = nz;
762  faceNormal.normalize();
763 
764  // Get polygon clipped
765  std::vector<vpImagePoint> roiPts;
766  m_polygon->getRoiClipped(camera, roiPts, cMo);
767 
768  std::vector<vpPoint> polyPts;
769  m_polygon->getPolygonClipped(polyPts);
770 
771  vpColVector e4(3);
773  computePolygonCentroid(polyPts, centroid);
774  centroid.project();
775 
776  e4[0] = -centroid.get_X();
777  e4[1] = -centroid.get_Y();
778  e4[2] = -centroid.get_Z();
779  e4.normalize();
780  } else {
781  double centroid_x = 0.0;
782  double centroid_y = 0.0;
783  double centroid_z = 0.0;
784 
785  for (size_t i = 0; i < polyPts.size(); i++) {
786  centroid_x += polyPts[i].get_X();
787  centroid_y += polyPts[i].get_Y();
788  centroid_z += polyPts[i].get_Z();
789  }
790 
791  centroid_x /= polyPts.size();
792  centroid_y /= polyPts.size();
793  centroid_z /= polyPts.size();
794 
795  e4[0] = -centroid_x;
796  e4[1] = -centroid_y;
797  e4[2] = -centroid_z;
798  e4.normalize();
799 
800  centroid.set_X(centroid_x);
801  centroid.set_Y(centroid_y);
802  centroid.set_Z(centroid_z);
803  }
804 
805  correct_normal.resize(3, false);
806  double angle = acos(vpColVector::dotProd(e4, faceNormal));
807  if (angle < M_PI_2) {
808  correct_normal = faceNormal;
809  } else {
810  correct_normal[0] = -faceNormal[0];
811  correct_normal[1] = -faceNormal[1];
812  correct_normal[2] = -faceNormal[2];
813  }
814 }
815 
816 #ifdef VISP_HAVE_PCL
817 void vpMbtFaceDepthNormal::computeNormalVisibility(float nx, float ny, float nz,
818  const pcl::PointXYZ &centroid_point, pcl::PointXYZ &face_normal)
819 {
820  vpColVector faceNormal(3);
821  faceNormal[0] = nx;
822  faceNormal[1] = ny;
823  faceNormal[2] = nz;
824  faceNormal.normalize();
825 
826  vpColVector e4(3);
827  e4[0] = -centroid_point.x;
828  e4[1] = -centroid_point.y;
829  e4[2] = -centroid_point.z;
830  e4.normalize();
831 
832  double angle = acos(vpColVector::dotProd(e4, faceNormal));
833  if (angle < M_PI_2) {
834  face_normal = pcl::PointXYZ(faceNormal[0], faceNormal[1], faceNormal[2]);
835  } else {
836  face_normal = pcl::PointXYZ(-faceNormal[0], -faceNormal[1], -faceNormal[2]);
837  }
838 }
839 #endif
840 
841 void vpMbtFaceDepthNormal::computeNormalVisibility(double nx, double ny, double nz,
842  const vpColVector &centroid_point, vpColVector &face_normal)
843 {
844  face_normal.resize(3, false);
845  face_normal[0] = nx;
846  face_normal[1] = ny;
847  face_normal[2] = nz;
848  face_normal.normalize();
849 
850  vpColVector e4 = -centroid_point;
851  e4.normalize();
852 
853  double angle = acos(vpColVector::dotProd(e4, face_normal));
854  if (angle >= M_PI_2) {
855  face_normal[0] = -face_normal[0];
856  face_normal[1] = -face_normal[1];
857  face_normal[2] = -face_normal[2];
858  }
859 }
860 
862 {
863  L.resize(3, 6, false, false);
864 
865  // Transform the plane equation for the current pose
868 
869  double ux = m_planeCamera.getA();
870  double uy = m_planeCamera.getB();
871  double uz = m_planeCamera.getC();
872  double D = m_planeCamera.getD();
873  double D2 = D * D;
874 
875  // Features
876  features.resize(3, false);
877  features[0] = -ux / D;
878  features[1] = -uy / D;
879  features[2] = -uz / D;
880 
881  // L_A
882  L[0][0] = ux * ux / D2;
883  L[0][1] = ux * uy / D2;
884  L[0][2] = ux * uz / D2;
885  L[0][3] = 0.0;
886  L[0][4] = uz / D;
887  L[0][5] = -uy / D;
888 
889  // L_B
890  L[1][0] = ux * uy / D2;
891  L[1][1] = uy * uy / D2;
892  L[1][2] = uy * uz / D2;
893  L[1][3] = -uz / D;
894  L[1][4] = 0.0;
895  L[1][5] = ux / D;
896 
897  // L_C
898  L[2][0] = ux * uz / D2;
899  L[2][1] = uy * uz / D2;
900  L[2][2] = uz * uz / D2;
901  L[2][3] = uy / D;
902  L[2][4] = -ux / D;
903  L[2][5] = 0.0;
904 }
905 
907  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
908  bool displayFullModel)
909 {
910  std::vector<std::vector<double> > models = getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
911 
912  for (size_t i = 0; i < models.size(); i++) {
913  vpImagePoint ip1(models[i][1], models[i][2]);
914  vpImagePoint ip2(models[i][3], models[i][4]);
915  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
916  }
917 }
918 
920  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
921  bool displayFullModel)
922 {
923  std::vector<std::vector<double> > models = getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
924 
925  for (size_t i = 0; i < models.size(); i++) {
926  vpImagePoint ip1(models[i][1], models[i][2]);
927  vpImagePoint ip2(models[i][3], models[i][4]);
928  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
929  }
930 }
931 
933  const vpCameraParameters &cam, double scale,
934  unsigned int thickness)
935 {
937  // Desired feature
938  vpPoint pt_centroid = m_faceDesiredCentroid;
939  pt_centroid.changeFrame(cMo);
940  pt_centroid.project();
941 
942  vpImagePoint im_centroid;
943  vpMeterPixelConversion::convertPoint(cam, pt_centroid.get_x(), pt_centroid.get_y(), im_centroid);
944 
945  vpPoint pt_normal = m_faceDesiredNormal;
946  pt_normal.changeFrame(cMo);
947  pt_normal.project();
948 
949  vpPoint pt_extremity;
950  pt_extremity.set_X(pt_centroid.get_X() + pt_normal.get_X() * scale);
951  pt_extremity.set_Y(pt_centroid.get_Y() + pt_normal.get_Y() * scale);
952  pt_extremity.set_Z(pt_centroid.get_Z() + pt_normal.get_Z() * scale);
953  pt_extremity.project();
954 
955  vpImagePoint im_extremity;
956  vpMeterPixelConversion::convertPoint(cam, pt_extremity.get_x(), pt_extremity.get_y(), im_extremity);
957 
958  vpDisplay::displayArrow(I, im_centroid, im_extremity, vpColor::blue, 4, 2, thickness);
959 
960  // Current feature
961  // Transform the plane equation for the current pose
964 
965  double ux = m_planeCamera.getA();
966  double uy = m_planeCamera.getB();
967  double uz = m_planeCamera.getC();
968 
969  vpColVector correct_normal;
970  vpCameraParameters cam_copy = cam;
971  cam_copy.computeFov(I.getWidth(), I.getHeight());
972  computeNormalVisibility(ux, uy, uz, cMo, cam_copy, correct_normal, pt_centroid);
973 
974  pt_centroid.project();
975  vpMeterPixelConversion::convertPoint(cam_copy, pt_centroid.get_x(), pt_centroid.get_y(), im_centroid);
976 
977  pt_extremity.set_X(pt_centroid.get_X() + correct_normal[0] * scale);
978  pt_extremity.set_Y(pt_centroid.get_Y() + correct_normal[1] * scale);
979  pt_extremity.set_Z(pt_centroid.get_Z() + correct_normal[2] * scale);
980  pt_extremity.project();
981 
982  vpMeterPixelConversion::convertPoint(cam_copy, pt_extremity.get_x(), pt_extremity.get_y(), im_extremity);
983 
984  vpDisplay::displayArrow(I, im_centroid, im_extremity, vpColor::red, 4, 2, thickness);
985  }
986 }
987 
989  const vpCameraParameters &cam, double scale,
990  unsigned int thickness)
991 {
993  // Desired feature
994  vpPoint pt_centroid = m_faceDesiredCentroid;
995  pt_centroid.changeFrame(cMo);
996  pt_centroid.project();
997 
998  vpImagePoint im_centroid;
999  vpMeterPixelConversion::convertPoint(cam, pt_centroid.get_x(), pt_centroid.get_y(), im_centroid);
1000 
1001  vpPoint pt_normal = m_faceDesiredNormal;
1002  pt_normal.changeFrame(cMo);
1003  pt_normal.project();
1004 
1005  vpPoint pt_extremity;
1006  pt_extremity.set_X(pt_centroid.get_X() + pt_normal.get_X() * scale);
1007  pt_extremity.set_Y(pt_centroid.get_Y() + pt_normal.get_Y() * scale);
1008  pt_extremity.set_Z(pt_centroid.get_Z() + pt_normal.get_Z() * scale);
1009  pt_extremity.project();
1010 
1011  vpImagePoint im_extremity;
1012  vpMeterPixelConversion::convertPoint(cam, pt_extremity.get_x(), pt_extremity.get_y(), im_extremity);
1013 
1014  vpDisplay::displayArrow(I, im_centroid, im_extremity, vpColor::blue, 4, 2, thickness);
1015 
1016  // Current feature
1017  // Transform the plane equation for the current pose
1020 
1021  double ux = m_planeCamera.getA();
1022  double uy = m_planeCamera.getB();
1023  double uz = m_planeCamera.getC();
1024 
1025  vpColVector correct_normal;
1026  vpCameraParameters cam_copy = cam;
1027  cam_copy.computeFov(I.getWidth(), I.getHeight());
1028  computeNormalVisibility(ux, uy, uz, cMo, cam_copy, correct_normal, pt_centroid);
1029 
1030  pt_centroid.project();
1031  vpMeterPixelConversion::convertPoint(cam_copy, pt_centroid.get_x(), pt_centroid.get_y(), im_centroid);
1032 
1033  pt_extremity.set_X(pt_centroid.get_X() + correct_normal[0] * scale);
1034  pt_extremity.set_Y(pt_centroid.get_Y() + correct_normal[1] * scale);
1035  pt_extremity.set_Z(pt_centroid.get_Z() + correct_normal[2] * scale);
1036  pt_extremity.project();
1037 
1038  vpMeterPixelConversion::convertPoint(cam_copy, pt_extremity.get_x(), pt_extremity.get_y(), im_extremity);
1039 
1040  vpDisplay::displayArrow(I, im_centroid, im_extremity, vpColor::red, 4, 2, thickness);
1041  }
1042 }
1043 
1044 void vpMbtFaceDepthNormal::estimateFeatures(const std::vector<double> &point_cloud_face, const vpHomogeneousMatrix &cMo,
1045  vpColVector &x_estimated, std::vector<double> &w)
1046 {
1047  vpMbtTukeyEstimator<double> tukey_robust;
1048  std::vector<double> residues(point_cloud_face.size() / 3);
1049 
1050  w.resize(point_cloud_face.size() / 3, 1.0);
1051 
1052  unsigned int max_iter = 30, iter = 0;
1053  double error = 0.0, prev_error = -1.0;
1054  double A = 0.0, B = 0.0, C = 0.0;
1055 
1056  Mat33<double> ATA_3x3;
1057 
1058  bool checkSSE2 = vpCPUFeatures::checkSSE2();
1059 #if !USE_SSE
1060  checkSSE2 = false;
1061 #endif
1062 
1063  if (checkSSE2) {
1064 #if USE_SSE
1065  while (std::fabs(error - prev_error) > 1e-6 && (iter < max_iter)) {
1066  if (iter == 0) {
1067  // Transform the plane equation for the current pose
1070 
1071  double ux = m_planeCamera.getA();
1072  double uy = m_planeCamera.getB();
1073  double uz = m_planeCamera.getC();
1074  double D = m_planeCamera.getD();
1075 
1076  // Features
1077  A = -ux / D;
1078  B = -uy / D;
1079  C = -uz / D;
1080 
1081  size_t cpt = 0;
1082  if (point_cloud_face.size() / 3 >= 2) {
1083  const double *ptr_point_cloud = &point_cloud_face[0];
1084  const __m128d vA = _mm_set1_pd(A);
1085  const __m128d vB = _mm_set1_pd(B);
1086  const __m128d vC = _mm_set1_pd(C);
1087  const __m128d vones = _mm_set1_pd(1.0);
1088 
1089  double *ptr_residues = &residues[0];
1090 
1091  for (; cpt <= point_cloud_face.size() - 6; cpt += 6, ptr_point_cloud += 6, ptr_residues += 2) {
1092  const __m128d vxi = _mm_loadu_pd(ptr_point_cloud);
1093  const __m128d vyi = _mm_loadu_pd(ptr_point_cloud + 2);
1094  const __m128d vZi = _mm_loadu_pd(ptr_point_cloud + 4);
1095  const __m128d vinvZi = _mm_div_pd(vones, vZi);
1096 
1097  const __m128d tmp =
1098  _mm_add_pd(_mm_add_pd(_mm_mul_pd(vA, vxi), _mm_mul_pd(vB, vyi)), _mm_sub_pd(vC, vinvZi));
1099  _mm_storeu_pd(ptr_residues, tmp);
1100  }
1101  }
1102 
1103  for (; cpt < point_cloud_face.size(); cpt += 3) {
1104  double xi = point_cloud_face[cpt];
1105  double yi = point_cloud_face[cpt + 1];
1106  double Zi = point_cloud_face[cpt + 2];
1107 
1108  residues[cpt / 3] = (A * xi + B * yi + C - 1 / Zi);
1109  }
1110  }
1111 
1112  tukey_robust.MEstimator(residues, w, 1e-2);
1113 
1114  __m128d vsum_wi2_xi2 = _mm_setzero_pd();
1115  __m128d vsum_wi2_yi2 = _mm_setzero_pd();
1116  __m128d vsum_wi2 = _mm_setzero_pd();
1117  __m128d vsum_wi2_xi_yi = _mm_setzero_pd();
1118  __m128d vsum_wi2_xi = _mm_setzero_pd();
1119  __m128d vsum_wi2_yi = _mm_setzero_pd();
1120 
1121  __m128d vsum_wi2_xi_Zi = _mm_setzero_pd();
1122  __m128d vsum_wi2_yi_Zi = _mm_setzero_pd();
1123  __m128d vsum_wi2_Zi = _mm_setzero_pd();
1124 
1125  // Estimate A, B, C
1126  size_t cpt = 0;
1127  if (point_cloud_face.size() / 3 >= 2) {
1128  const double *ptr_point_cloud = &point_cloud_face[0];
1129  double *ptr_w = &w[0];
1130 
1131  const __m128d vones = _mm_set1_pd(1.0);
1132 
1133  for (; cpt <= point_cloud_face.size() - 6; cpt += 6, ptr_point_cloud += 6, ptr_w += 2) {
1134  const __m128d vwi2 = _mm_mul_pd(_mm_loadu_pd(ptr_w), _mm_loadu_pd(ptr_w));
1135 
1136  const __m128d vxi = _mm_loadu_pd(ptr_point_cloud);
1137  const __m128d vyi = _mm_loadu_pd(ptr_point_cloud + 2);
1138  const __m128d vZi = _mm_loadu_pd(ptr_point_cloud + 4);
1139  const __m128d vinvZi = _mm_div_pd(vones, vZi);
1140 
1141  vsum_wi2_xi2 = _mm_add_pd(vsum_wi2_xi2, _mm_mul_pd(vwi2, _mm_mul_pd(vxi, vxi)));
1142  vsum_wi2_yi2 = _mm_add_pd(vsum_wi2_yi2, _mm_mul_pd(vwi2, _mm_mul_pd(vyi, vyi)));
1143  vsum_wi2 = _mm_add_pd(vsum_wi2, vwi2);
1144  vsum_wi2_xi_yi = _mm_add_pd(vsum_wi2_xi_yi, _mm_mul_pd(vwi2, _mm_mul_pd(vxi, vyi)));
1145  vsum_wi2_xi = _mm_add_pd(vsum_wi2_xi, _mm_mul_pd(vwi2, vxi));
1146  vsum_wi2_yi = _mm_add_pd(vsum_wi2_yi, _mm_mul_pd(vwi2, vyi));
1147 
1148  const __m128d vwi2_invZi = _mm_mul_pd(vwi2, vinvZi);
1149  vsum_wi2_xi_Zi = _mm_add_pd(vsum_wi2_xi_Zi, _mm_mul_pd(vxi, vwi2_invZi));
1150  vsum_wi2_yi_Zi = _mm_add_pd(vsum_wi2_yi_Zi, _mm_mul_pd(vyi, vwi2_invZi));
1151  vsum_wi2_Zi = _mm_add_pd(vsum_wi2_Zi, vwi2_invZi);
1152  }
1153  }
1154 
1155  double vtmp[2];
1156  _mm_storeu_pd(vtmp, vsum_wi2_xi2);
1157  double sum_wi2_xi2 = vtmp[0] + vtmp[1];
1158 
1159  _mm_storeu_pd(vtmp, vsum_wi2_yi2);
1160  double sum_wi2_yi2 = vtmp[0] + vtmp[1];
1161 
1162  _mm_storeu_pd(vtmp, vsum_wi2);
1163  double sum_wi2 = vtmp[0] + vtmp[1];
1164 
1165  _mm_storeu_pd(vtmp, vsum_wi2_xi_yi);
1166  double sum_wi2_xi_yi = vtmp[0] + vtmp[1];
1167 
1168  _mm_storeu_pd(vtmp, vsum_wi2_xi);
1169  double sum_wi2_xi = vtmp[0] + vtmp[1];
1170 
1171  _mm_storeu_pd(vtmp, vsum_wi2_yi);
1172  double sum_wi2_yi = vtmp[0] + vtmp[1];
1173 
1174  _mm_storeu_pd(vtmp, vsum_wi2_xi_Zi);
1175  double sum_wi2_xi_Zi = vtmp[0] + vtmp[1];
1176 
1177  _mm_storeu_pd(vtmp, vsum_wi2_yi_Zi);
1178  double sum_wi2_yi_Zi = vtmp[0] + vtmp[1];
1179 
1180  _mm_storeu_pd(vtmp, vsum_wi2_Zi);
1181  double sum_wi2_Zi = vtmp[0] + vtmp[1];
1182 
1183  for (; cpt < point_cloud_face.size(); cpt += 3) {
1184  double wi2 = w[cpt / 3] * w[cpt / 3];
1185 
1186  double xi = point_cloud_face[cpt];
1187  double yi = point_cloud_face[cpt + 1];
1188  double Zi = point_cloud_face[cpt + 2];
1189  double invZi = 1.0 / Zi;
1190 
1191  sum_wi2_xi2 += wi2 * xi * xi;
1192  sum_wi2_yi2 += wi2 * yi * yi;
1193  sum_wi2 += wi2;
1194  sum_wi2_xi_yi += wi2 * xi * yi;
1195  sum_wi2_xi += wi2 * xi;
1196  sum_wi2_yi += wi2 * yi;
1197 
1198  sum_wi2_xi_Zi += wi2 * xi * invZi;
1199  sum_wi2_yi_Zi += wi2 * yi * invZi;
1200  sum_wi2_Zi += wi2 * invZi;
1201  }
1202 
1203  ATA_3x3[0] = sum_wi2_xi2;
1204  ATA_3x3[1] = sum_wi2_xi_yi;
1205  ATA_3x3[2] = sum_wi2_xi;
1206  ATA_3x3[3] = sum_wi2_xi_yi;
1207  ATA_3x3[4] = sum_wi2_yi2;
1208  ATA_3x3[5] = sum_wi2_yi;
1209  ATA_3x3[6] = sum_wi2_xi;
1210  ATA_3x3[7] = sum_wi2_yi;
1211  ATA_3x3[8] = sum_wi2;
1212 
1213  Mat33<double> minv = ATA_3x3.inverse();
1214 
1215  A = minv[0] * sum_wi2_xi_Zi + minv[1] * sum_wi2_yi_Zi + minv[2] * sum_wi2_Zi;
1216  B = minv[3] * sum_wi2_xi_Zi + minv[4] * sum_wi2_yi_Zi + minv[5] * sum_wi2_Zi;
1217  C = minv[6] * sum_wi2_xi_Zi + minv[7] * sum_wi2_yi_Zi + minv[8] * sum_wi2_Zi;
1218 
1219  cpt = 0;
1220 
1221  // Compute error
1222  prev_error = error;
1223  error = 0.0;
1224 
1225  __m128d verror = _mm_set1_pd(0.0);
1226  if (point_cloud_face.size() / 3 >= 2) {
1227  const double *ptr_point_cloud = &point_cloud_face[0];
1228  const __m128d vA = _mm_set1_pd(A);
1229  const __m128d vB = _mm_set1_pd(B);
1230  const __m128d vC = _mm_set1_pd(C);
1231  const __m128d vones = _mm_set1_pd(1.0);
1232 
1233  double *ptr_residues = &residues[0];
1234 
1235  for (; cpt <= point_cloud_face.size() - 6; cpt += 6, ptr_point_cloud += 6, ptr_residues += 2) {
1236  const __m128d vxi = _mm_loadu_pd(ptr_point_cloud);
1237  const __m128d vyi = _mm_loadu_pd(ptr_point_cloud + 2);
1238  const __m128d vZi = _mm_loadu_pd(ptr_point_cloud + 4);
1239  const __m128d vinvZi = _mm_div_pd(vones, vZi);
1240 
1241  const __m128d tmp = _mm_add_pd(_mm_add_pd(_mm_mul_pd(vA, vxi), _mm_mul_pd(vB, vyi)), _mm_sub_pd(vC, vinvZi));
1242  verror = _mm_add_pd(verror, _mm_mul_pd(tmp, tmp));
1243 
1244  _mm_storeu_pd(ptr_residues, tmp);
1245  }
1246  }
1247 
1248  _mm_storeu_pd(vtmp, verror);
1249  error = vtmp[0] + vtmp[1];
1250 
1251  for (size_t idx = cpt; idx < point_cloud_face.size(); idx += 3) {
1252  double xi = point_cloud_face[idx];
1253  double yi = point_cloud_face[idx + 1];
1254  double Zi = point_cloud_face[idx + 2];
1255 
1256  error += vpMath::sqr(A * xi + B * yi + C - 1 / Zi);
1257  residues[idx / 3] = (A * xi + B * yi + C - 1 / Zi);
1258  }
1259 
1260  error /= point_cloud_face.size() / 3;
1261 
1262  iter++;
1263  } // while ( std::fabs(error - prev_error) > 1e-6 && (iter < max_iter) )
1264 #endif
1265  } else {
1266  while (std::fabs(error - prev_error) > 1e-6 && (iter < max_iter)) {
1267  if (iter == 0) {
1268  // Transform the plane equation for the current pose
1271 
1272  double ux = m_planeCamera.getA();
1273  double uy = m_planeCamera.getB();
1274  double uz = m_planeCamera.getC();
1275  double D = m_planeCamera.getD();
1276 
1277  // Features
1278  A = -ux / D;
1279  B = -uy / D;
1280  C = -uz / D;
1281 
1282  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
1283  double xi = point_cloud_face[3 * i];
1284  double yi = point_cloud_face[3 * i + 1];
1285  double Zi = point_cloud_face[3 * i + 2];
1286 
1287  residues[i] = (A * xi + B * yi + C - 1 / Zi);
1288  }
1289  }
1290 
1291  tukey_robust.MEstimator(residues, w, 1e-2);
1292 
1293  // Estimate A, B, C
1294  double sum_wi2_xi2 = 0.0, sum_wi2_yi2 = 0.0, sum_wi2 = 0.0;
1295  double sum_wi2_xi_yi = 0.0, sum_wi2_xi = 0.0, sum_wi2_yi = 0.0;
1296 
1297  double sum_wi2_xi_Zi = 0.0, sum_wi2_yi_Zi = 0.0, sum_wi2_Zi = 0.0;
1298 
1299  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
1300  double wi2 = w[i] * w[i];
1301 
1302  double xi = point_cloud_face[3 * i];
1303  double yi = point_cloud_face[3 * i + 1];
1304  double Zi = point_cloud_face[3 * i + 2];
1305  double invZi = 1 / Zi;
1306 
1307  sum_wi2_xi2 += wi2 * xi * xi;
1308  sum_wi2_yi2 += wi2 * yi * yi;
1309  sum_wi2 += wi2;
1310  sum_wi2_xi_yi += wi2 * xi * yi;
1311  sum_wi2_xi += wi2 * xi;
1312  sum_wi2_yi += wi2 * yi;
1313 
1314  sum_wi2_xi_Zi += wi2 * xi * invZi;
1315  sum_wi2_yi_Zi += wi2 * yi * invZi;
1316  sum_wi2_Zi += wi2 * invZi;
1317  }
1318 
1319  ATA_3x3[0] = sum_wi2_xi2;
1320  ATA_3x3[1] = sum_wi2_xi_yi;
1321  ATA_3x3[2] = sum_wi2_xi;
1322  ATA_3x3[3] = sum_wi2_xi_yi;
1323  ATA_3x3[4] = sum_wi2_yi2;
1324  ATA_3x3[5] = sum_wi2_yi;
1325  ATA_3x3[6] = sum_wi2_xi;
1326  ATA_3x3[7] = sum_wi2_yi;
1327  ATA_3x3[8] = sum_wi2;
1328 
1329  Mat33<double> minv = ATA_3x3.inverse();
1330 
1331  A = minv[0] * sum_wi2_xi_Zi + minv[1] * sum_wi2_yi_Zi + minv[2] * sum_wi2_Zi;
1332  B = minv[3] * sum_wi2_xi_Zi + minv[4] * sum_wi2_yi_Zi + minv[5] * sum_wi2_Zi;
1333  C = minv[6] * sum_wi2_xi_Zi + minv[7] * sum_wi2_yi_Zi + minv[8] * sum_wi2_Zi;
1334 
1335  prev_error = error;
1336  error = 0.0;
1337 
1338  // Compute error
1339  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
1340  double xi = point_cloud_face[3 * i];
1341  double yi = point_cloud_face[3 * i + 1];
1342  double Zi = point_cloud_face[3 * i + 2];
1343 
1344  error += vpMath::sqr(A * xi + B * yi + C - 1 / Zi);
1345  residues[i] = (A * xi + B * yi + C - 1 / Zi);
1346  }
1347 
1348  error /= point_cloud_face.size() / 3;
1349 
1350  iter++;
1351  } // while ( std::fabs(error - prev_error) > 1e-6 && (iter < max_iter) )
1352  }
1353 
1354  x_estimated.resize(3, false);
1355  x_estimated[0] = A;
1356  x_estimated[1] = B;
1357  x_estimated[2] = C;
1358 }
1359 
1360 void vpMbtFaceDepthNormal::estimatePlaneEquationSVD(const std::vector<double> &point_cloud_face,
1361  const vpHomogeneousMatrix &cMo,
1362  vpColVector &plane_equation_estimated, vpColVector &centroid)
1363 {
1364  unsigned int max_iter = 10;
1365  double prev_error = 1e3;
1366  double error = 1e3 - 1;
1367 
1368  std::vector<double> weights(point_cloud_face.size() / 3, 1.0);
1369  std::vector<double> residues(point_cloud_face.size() / 3);
1370  vpMatrix M((unsigned int)(point_cloud_face.size() / 3), 3);
1371  vpMbtTukeyEstimator<double> tukey;
1372  vpColVector normal;
1373 
1374  for (unsigned int iter = 0; iter < max_iter && std::fabs(error - prev_error) > 1e-6; iter++) {
1375  if (iter != 0) {
1376  tukey.MEstimator(residues, weights, 1e-4);
1377  } else {
1378  // Transform the plane equation for the current pose
1381 
1382  double A = m_planeCamera.getA();
1383  double B = m_planeCamera.getB();
1384  double C = m_planeCamera.getC();
1385  double D = m_planeCamera.getD();
1386 
1387  // Compute distance point to estimated plane
1388  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
1389  residues[i] = std::fabs(A * point_cloud_face[3 * i] + B * point_cloud_face[3 * i + 1] +
1390  C * point_cloud_face[3 * i + 2] + D) /
1391  sqrt(A * A + B * B + C * C);
1392  }
1393 
1394  tukey.MEstimator(residues, weights, 1e-4);
1395  plane_equation_estimated.resize(4, false);
1396  }
1397 
1398  // Compute centroid
1399  double centroid_x = 0.0, centroid_y = 0.0, centroid_z = 0.0;
1400  double total_w = 0.0;
1401 
1402  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
1403  centroid_x += weights[i] * point_cloud_face[3 * i];
1404  centroid_y += weights[i] * point_cloud_face[3 * i + 1];
1405  centroid_z += weights[i] * point_cloud_face[3 * i + 2];
1406  total_w += weights[i];
1407  }
1408 
1409  centroid_x /= total_w;
1410  centroid_y /= total_w;
1411  centroid_z /= total_w;
1412 
1413  // Minimization
1414  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
1415  M[(unsigned int)i][0] = weights[i] * (point_cloud_face[3 * i] - centroid_x);
1416  M[(unsigned int)i][1] = weights[i] * (point_cloud_face[3 * i + 1] - centroid_y);
1417  M[(unsigned int)i][2] = weights[i] * (point_cloud_face[3 * i + 2] - centroid_z);
1418  }
1419 
1420  vpMatrix J = M.t() * M;
1421 
1422  vpColVector W;
1423  vpMatrix V;
1424  J.svd(W, V);
1425 
1426  double smallestSv = W[0];
1427  unsigned int indexSmallestSv = 0;
1428  for (unsigned int i = 1; i < W.size(); i++) {
1429  if (W[i] < smallestSv) {
1430  smallestSv = W[i];
1431  indexSmallestSv = i;
1432  }
1433  }
1434 
1435  normal = V.getCol(indexSmallestSv);
1436 
1437  // Compute plane equation
1438  double A = normal[0], B = normal[1], C = normal[2];
1439  double D = -(A * centroid_x + B * centroid_y + C * centroid_z);
1440 
1441  // Update plane equation
1442  plane_equation_estimated[0] = A;
1443  plane_equation_estimated[1] = B;
1444  plane_equation_estimated[2] = C;
1445  plane_equation_estimated[3] = D;
1446 
1447  // Compute error points to estimated plane
1448  prev_error = error;
1449  error = 0.0;
1450  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
1451  residues[i] = std::fabs(A * point_cloud_face[3 * i] + B * point_cloud_face[3 * i + 1] +
1452  C * point_cloud_face[3 * i + 2] + D) /
1453  sqrt(A * A + B * B + C * C);
1454  error += weights[i] * residues[i];
1455  }
1456  error /= total_w;
1457  }
1458 
1459  // Update final weights
1460  tukey.MEstimator(residues, weights, 1e-4);
1461 
1462  // Update final centroid
1463  centroid.resize(3, false);
1464  double total_w = 0.0;
1465 
1466  for (size_t i = 0; i < point_cloud_face.size() / 3; i++) {
1467  centroid[0] += weights[i] * point_cloud_face[3 * i];
1468  centroid[1] += weights[i] * point_cloud_face[3 * i + 1];
1469  centroid[2] += weights[i] * point_cloud_face[3 * i + 2];
1470  total_w += weights[i];
1471  }
1472 
1473  centroid[0] /= total_w;
1474  centroid[1] /= total_w;
1475  centroid[2] /= total_w;
1476 
1477  // Compute final plane equation
1478  double A = normal[0], B = normal[1], C = normal[2];
1479  double D = -(A * centroid[0] + B * centroid[1] + C * centroid[2]);
1480 
1481  // Update final plane equation
1482  plane_equation_estimated[0] = A;
1483  plane_equation_estimated[1] = B;
1484  plane_equation_estimated[2] = C;
1485  plane_equation_estimated[3] = D;
1486 }
1487 
1493 std::vector<std::vector<double> > vpMbtFaceDepthNormal::getFeaturesForDisplay(const vpHomogeneousMatrix &cMo,
1494  const vpCameraParameters &cam,
1495  double scale)
1496 {
1497  std::vector<std::vector<double> > features;
1498 
1500  // Desired feature
1501  vpPoint pt_centroid = m_faceDesiredCentroid;
1502  pt_centroid.changeFrame(cMo);
1503  pt_centroid.project();
1504 
1505  vpImagePoint im_centroid;
1506  vpMeterPixelConversion::convertPoint(cam, pt_centroid.get_x(), pt_centroid.get_y(), im_centroid);
1507 
1508  vpPoint pt_normal = m_faceDesiredNormal;
1509  pt_normal.changeFrame(cMo);
1510  pt_normal.project();
1511 
1512  vpPoint pt_extremity;
1513  pt_extremity.set_X(pt_centroid.get_X() + pt_normal.get_X() * scale);
1514  pt_extremity.set_Y(pt_centroid.get_Y() + pt_normal.get_Y() * scale);
1515  pt_extremity.set_Z(pt_centroid.get_Z() + pt_normal.get_Z() * scale);
1516  pt_extremity.project();
1517 
1518  vpImagePoint im_extremity;
1519  vpMeterPixelConversion::convertPoint(cam, pt_extremity.get_x(), pt_extremity.get_y(), im_extremity);
1520 
1521  {
1522 #if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
1523  std::vector<double> params = {2, //desired normal
1524  im_centroid.get_i(),
1525  im_centroid.get_j(),
1526  im_extremity.get_i(),
1527  im_extremity.get_j()};
1528 #else
1529  std::vector<double> params;
1530  params.push_back(2); //desired normal
1531  params.push_back(im_centroid.get_i());
1532  params.push_back(im_centroid.get_j());
1533  params.push_back(im_extremity.get_i());
1534  params.push_back(im_extremity.get_j());
1535 #endif
1536  features.push_back(params);
1537  }
1538 
1539  // Current feature
1540  // Transform the plane equation for the current pose
1543 
1544  double ux = m_planeCamera.getA();
1545  double uy = m_planeCamera.getB();
1546  double uz = m_planeCamera.getC();
1547 
1548  vpColVector correct_normal;
1549  computeNormalVisibility(ux, uy, uz, cMo, cam, correct_normal, pt_centroid);
1550 
1551  pt_centroid.project();
1552  vpMeterPixelConversion::convertPoint(cam, pt_centroid.get_x(), pt_centroid.get_y(), im_centroid);
1553 
1554  pt_extremity.set_X(pt_centroid.get_X() + correct_normal[0] * scale);
1555  pt_extremity.set_Y(pt_centroid.get_Y() + correct_normal[1] * scale);
1556  pt_extremity.set_Z(pt_centroid.get_Z() + correct_normal[2] * scale);
1557  pt_extremity.project();
1558 
1559  vpMeterPixelConversion::convertPoint(cam, pt_extremity.get_x(), pt_extremity.get_y(), im_extremity);
1560 
1561  {
1562 #if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
1563  std::vector<double> params = {3, //normal at current pose
1564  im_centroid.get_i(),
1565  im_centroid.get_j(),
1566  im_extremity.get_i(),
1567  im_extremity.get_j()};
1568 #else
1569  std::vector<double> params;
1570  params.push_back(3); //normal at current pose
1571  params.push_back(im_centroid.get_i());
1572  params.push_back(im_centroid.get_j());
1573  params.push_back(im_extremity.get_i());
1574  params.push_back(im_extremity.get_j());
1575 #endif
1576  features.push_back(params);
1577  }
1578  }
1579 
1580  return features;
1581 }
1582 
1594 std::vector<std::vector<double> > vpMbtFaceDepthNormal::getModelForDisplay(unsigned int width, unsigned int height,
1595  const vpHomogeneousMatrix &cMo,
1596  const vpCameraParameters &cam,
1597  bool displayFullModel)
1598 {
1599  std::vector<std::vector<double> > models;
1600 
1601  if ((m_polygon->isVisible() && m_isTrackedDepthNormalFace) || displayFullModel) {
1603 
1604  for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
1605  ++it) {
1606  vpMbtDistanceLine *line = *it;
1607  std::vector<std::vector<double> > lineModels = line->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1608  models.insert(models.end(), lineModels.begin(), lineModels.end());
1609  }
1610  }
1611 
1612  return models;
1613 }
1614 
1624 bool vpMbtFaceDepthNormal::samePoint(const vpPoint &P1, const vpPoint &P2) const
1625 {
1626  double dx = fabs(P1.get_oX() - P2.get_oX());
1627  double dy = fabs(P1.get_oY() - P2.get_oY());
1628  double dz = fabs(P1.get_oZ() - P2.get_oZ());
1629 
1630  if (dx <= std::numeric_limits<double>::epsilon() && dy <= std::numeric_limits<double>::epsilon() &&
1631  dz <= std::numeric_limits<double>::epsilon())
1632  return true;
1633  else
1634  return false;
1635 }
1636 
1638 {
1639  m_cam = camera;
1640 
1641  for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
1642  ++it) {
1643  (*it)->setCameraParameters(camera);
1644  }
1645 }
1646 
1648 {
1649  m_useScanLine = v;
1650 
1651  for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
1652  ++it) {
1653  (*it)->useScanLine = v;
1654  }
1655 }
vpFeatureEstimationType m_featureEstimationMethod
Method to estimate the desired features.
void svd(vpColVector &w, vpMatrix &V)
Definition: vpMatrix.cpp:2066
Used to indicate that a value is not in the allowed range.
Definition: vpException.h:97
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:164
double get_i() const
Definition: vpImagePoint.h:204
double getTop() const
Definition: vpRect.h:192
void getRoiClipped(const vpCameraParameters &cam, std::vector< vpImagePoint > &roi)
double get_oY() const
Get the point Y coordinate in the object frame.
Definition: vpPoint.cpp:424
Implements a 3D polygon with render functionnalities like clipping.
Definition: vpPolygon3D.h:59
void resize(unsigned int nrows, unsigned int ncols, bool flagNullify=true, bool recopy_=true)
Definition: vpArray2D.h:305
void setWorldCoordinates(double oX, double oY, double oZ)
Definition: vpPoint.cpp:113
void setVisible(bool _isvisible)
bool isVisible(unsigned int i)
int getIndex() const
Definition: vpMbtPolygon.h:101
Implementation of an homogeneous matrix and operations on such kind of matrices.
std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
std::list< int > Lindex_polygon
Index of the faces which contain the line.
void setFarClippingDistance(const double &dist)
Definition: vpPolygon3D.h:194
int m_pclPlaneEstimationMethod
PCL plane estimation method.
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
unsigned int m_clippingFlag
Flags specifying which clipping to used.
Class to define colors available for display functionnalities.
Definition: vpColor.h:119
void setLeft(double pos)
Definition: vpRect.h:289
int m_pclPlaneEstimationRansacMaxIter
PCL pane estimation max number of iterations.
vpPoint * p1
The first extremity.
void computeDesiredFeaturesRobustFeatures(const std::vector< double > &point_cloud_face_custom, const std::vector< double > &point_cloud_face, const vpHomogeneousMatrix &cMo, vpColVector &desired_features, vpColVector &desired_normal, vpColVector &centroid_point)
error that can be emited by ViSP classes.
Definition: vpException.h:71
vpMbScanLine & getMbScanLineRenderer()
vpHomogeneousMatrix inverse() const
Manage the line of a polygon used in the model-based tracker.
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
unsigned int size() const
Return the number of elements of the 2D array.
Definition: vpArray2D.h:291
double m_distNearClip
Distance for near clipping.
void addLine(vpPoint &p1, vpPoint &p2, vpMbHiddenFaces< vpMbtPolygon > *const faces, int polygon=-1, std::string name="")
vpMbtPolygon & getPolygon()
void set_Y(double Y)
Set the point Y coordinate in the camera frame.
Definition: vpPoint.cpp:456
bool samePoint(const vpPoint &P1, const vpPoint &P2) const
void computeROI(const vpHomogeneousMatrix &cMo, unsigned int width, unsigned int height, std::vector< vpImagePoint > &roiPts)
double get_oX() const
Get the point X coordinate in the object frame.
Definition: vpPoint.cpp:422
bool m_useScanLine
Scan line visibility.
double getRight() const
Definition: vpRect.h:179
std::vector< std::vector< double > > getFeaturesForDisplay(const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double scale=0.05)
vpCameraParameters m_cam
Camera intrinsic parameters.
vpRect getBoundingBox() const
Definition: vpPolygon.h:177
static const vpColor red
Definition: vpColor.h:179
Class that defines what is a point.
Definition: vpPoint.h:58
bool computePolygonCentroid(const std::vector< vpPoint > &points, vpPoint &centroid)
void setCameraParameters(const vpCameraParameters &camera)
bool m_faceActivated
True if the face should be considered by the tracker.
vpMbtPolygon * m_polygon
Polygon defining the face.
Defines a generic 2D polygon.
Definition: vpPolygon.h:103
vpMatrix t() const
Definition: vpMatrix.cpp:507
void computeNormalVisibility(double nx, double ny, double nz, const vpColVector &centroid_point, vpColVector &face_normal)
vpColVector & normalize()
void setTop(double pos)
Definition: vpRect.h:325
std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void computeInteractionMatrix(const vpHomogeneousMatrix &cMo, vpMatrix &L, vpColVector &features)
double getWidth() const
Definition: vpRect.h:227
double getD() const
Definition: vpPlane.h:108
vpPoint * p2
The second extremity.
void changeFrame(const vpHomogeneousMatrix &cMo)
Definition: vpPlane.cpp:354
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
static double sqr(double x)
Definition: vpMath.h:114
bool computeDesiredFeaturesPCL(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud_face, vpColVector &desired_features, vpColVector &desired_normal, vpColVector &centroid_point)
VISP_EXPORT bool checkSSE2()
double getB() const
Definition: vpPlane.h:104
bool isInside(const vpImagePoint &iP, const PointInPolygonMethod &method=PnPolyRayCasting) const
Definition: vpPolygon.cpp:309
double get_j() const
Definition: vpImagePoint.h:215
void getPolygonClipped(std::vector< std::pair< vpPoint, unsigned int > > &poly)
vpMbHiddenFaces< vpMbtPolygon > * m_hiddenFace
Pointer to the list of faces.
Generic class defining intrinsic camera parameters.
double get_oZ() const
Get the point Z coordinate in the object frame.
Definition: vpPoint.cpp:426
vpPoint m_faceDesiredNormal
Face (normalized) normal (computed from the sensor)
vpColVector getCol(unsigned int j) const
Definition: vpMatrix.cpp:4096
double getLeft() const
Definition: vpRect.h:173
double m_distFarClip
Distance for near clipping.
virtual bool isVisible(const vpHomogeneousMatrix &cMo, double alpha, const bool &modulo=false, const vpCameraParameters &cam=vpCameraParameters(), unsigned int width=0, unsigned int height=0)
std::vector< vpMbtDistanceLine * > m_listOfFaceLines
void setClipping(const unsigned int &flags)
Definition: vpPolygon3D.h:187
std::vector< PolygonLine > m_polygonLines
double getA() const
Definition: vpPlane.h:102
vpFaceCentroidType m_faceCentroidMethod
Method to compute the face centroid for the current features.
void setName(const std::string &line_name)
void setCameraParameters(const vpCameraParameters &camera)
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:310
double get_X() const
Get the point X coordinate in the camera frame.
Definition: vpPoint.cpp:413
void computeScanLineQuery(const vpPoint &a, const vpPoint &b, std::vector< std::pair< vpPoint, vpPoint > > &lines, const bool &displayResults=false)
void estimatePlaneEquationSVD(const std::vector< double > &point_cloud_face, const vpHomogeneousMatrix &cMo, vpColVector &plane_equation_estimated, vpColVector &centroid)
unsigned int getHeight() const
Definition: vpImage.h:186
void computeDesiredFeaturesSVD(const std::vector< double > &point_cloud_face, const vpHomogeneousMatrix &cMo, vpColVector &desired_features, vpColVector &desired_normal, vpColVector &centroid_point)
void set_Z(double Z)
Set the point Z coordinate in the camera frame.
Definition: vpPoint.cpp:458
void computeDesiredNormalAndCentroid(const vpHomogeneousMatrix &cMo, const vpColVector &desired_normal, const vpColVector &centroid_point)
Implementation of column vector and the associated operations.
Definition: vpColVector.h:130
static bool inMask(const vpImage< bool > *mask, unsigned int i, unsigned int j)
static double dotProd(const vpColVector &a, const vpColVector &b)
double get_x() const
Get the point x coordinate in the image plane.
Definition: vpPoint.cpp:431
void setRight(double pos)
Definition: vpRect.h:316
vpMbHiddenFaces< vpMbtPolygon > * hiddenface
Pointer to the list of faces.
double getHeight() const
Definition: vpRect.h:166
double get_y() const
Get the point y coordinate in the image plane.
Definition: vpPoint.cpp:433
void displayFeature(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double scale=0.05, unsigned int thickness=1)
double getC() const
Definition: vpPlane.h:106
void addPolygon(const int &index)
void setNearClippingDistance(const double &dist)
Definition: vpPolygon3D.h:207
vpPoint m_faceDesiredCentroid
Desired centroid (computed from the sensor)
Defines a rectangle in the plane.
Definition: vpRect.h:78
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:88
Compute the geometric centroid.
double get_Z() const
Get the point Z coordinate in the camera frame.
Definition: vpPoint.cpp:417
vpPlane m_planeObject
Plane equation described in the object frame.
void changeFrame(const vpHomogeneousMatrix &cMo, vpColVector &_cP)
Definition: vpPoint.cpp:233
double get_Y() const
Get the point Y coordinate in the camera frame.
Definition: vpPoint.cpp:415
bool computeDesiredFeatures(const vpHomogeneousMatrix &cMo, unsigned int width, unsigned int height, const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud, vpColVector &desired_features, unsigned int stepX, unsigned int stepY, const vpImage< bool > *mask=NULL)
void estimateFeatures(const std::vector< double > &point_cloud_face, const vpHomogeneousMatrix &cMo, vpColVector &x_estimated, std::vector< double > &weights)
unsigned int getWidth() const
Definition: vpImage.h:244
double getBottom() const
Definition: vpRect.h:97
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
void setBottom(double pos)
Definition: vpRect.h:256
void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false)
bool useScanLine
Use scanline rendering.
void set_X(double X)
Set the point X coordinate in the camera frame.
Definition: vpPoint.cpp:454
void buildFrom(vpPoint &_p1, vpPoint &_p2)
void setIndex(unsigned int i)
static const vpColor blue
Definition: vpColor.h:185
double m_pclPlaneEstimationRansacThreshold
PCL plane estimation RANSAC threshold.
void computeFov(const unsigned int &w, const unsigned int &h)