Visual Servoing Platform  version 3.6.1 under development (2024-11-15)
vpMbDepthNormalTracker.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See https://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * Model-based tracker using depth normal features.
33  *
34 *****************************************************************************/
35 
36 #include <iostream>
37 
38 #include <visp3/core/vpConfig.h>
39 
40 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
41 #include <pcl/point_cloud.h>
42 #endif
43 
44 #include <visp3/core/vpDisplay.h>
45 #include <visp3/core/vpExponentialMap.h>
46 #include <visp3/core/vpTrackingException.h>
47 #include <visp3/mbt/vpMbDepthNormalTracker.h>
48 #include <visp3/mbt/vpMbtXmlGenericParser.h>
49 
50 #if DEBUG_DISPLAY_DEPTH_NORMAL
51 #include <visp3/gui/vpDisplayGDI.h>
52 #include <visp3/gui/vpDisplayX.h>
53 #endif
54 
55 BEGIN_VISP_NAMESPACE
57  : m_depthNormalFeatureEstimationMethod(vpMbtFaceDepthNormal::ROBUST_FEATURE_ESTIMATION),
58  m_depthNormalHiddenFacesDisplay(), m_depthNormalListOfActiveFaces(), m_depthNormalListOfDesiredFeatures(),
59  m_depthNormalFaces(), m_depthNormalPclPlaneEstimationMethod(2), m_depthNormalPclPlaneEstimationRansacMaxIter(200),
60  m_depthNormalPclPlaneEstimationRansacThreshold(0.001), m_depthNormalSamplingStepX(2), m_depthNormalSamplingStepY(2),
61  m_depthNormalUseRobust(false), m_error_depthNormal(), m_featuresToBeDisplayedDepthNormal(), m_L_depthNormal(),
62  m_robust_depthNormal(), m_w_depthNormal(), m_weightedError_depthNormal()
63 #if DEBUG_DISPLAY_DEPTH_NORMAL
64  ,
65  m_debugDisp_depthNormal(nullptr), m_debugImage_depthNormal()
66 #endif
67 {
68 #ifdef VISP_HAVE_OGRE
69  faces.getOgreContext()->setWindowName("MBT Depth");
70 #endif
71 
72 #if defined(VISP_HAVE_X11) && DEBUG_DISPLAY_DEPTH_NORMAL
73  m_debugDisp_depthNormal = new vpDisplayX;
74 #elif defined(VISP_HAVE_GDI) && DEBUG_DISPLAY_DEPTH_NORMAL
75  m_debugDisp_depthNormal = new vpDisplayGDI;
76 #endif
77 }
78 
80 {
81  for (size_t i = 0; i < m_depthNormalFaces.size(); i++) {
82  delete m_depthNormalFaces[i];
83  }
84 }
85 
86 void vpMbDepthNormalTracker::addFace(vpMbtPolygon &polygon, bool alreadyClose)
87 {
88  if (polygon.nbpt < 3) {
89  return;
90  }
91 
92  // Copy hidden faces
94 
95  vpMbtFaceDepthNormal *normal_face = new vpMbtFaceDepthNormal;
96  normal_face->m_hiddenFace = &faces;
97  normal_face->m_polygon = &polygon;
98  normal_face->m_cam = m_cam;
99  normal_face->m_useScanLine = useScanLine;
100  normal_face->m_clippingFlag = clippingFlag;
101  normal_face->m_distNearClip = distNearClip;
102  normal_face->m_distFarClip = distFarClip;
107 
108  // Add lines that compose the face
109  unsigned int nbpt = polygon.getNbPoint();
110  if (nbpt > 0) {
111  for (unsigned int i = 0; i < nbpt - 1; i++) {
112  normal_face->addLine(polygon.p[i], polygon.p[i + 1], &m_depthNormalHiddenFacesDisplay, m_rand, polygon.getIndex(),
113  polygon.getName());
114  }
115 
116  if (!alreadyClose) {
117  // Add last line that closes the face
118  normal_face->addLine(polygon.p[nbpt - 1], polygon.p[0], &m_depthNormalHiddenFacesDisplay, m_rand,
119  polygon.getIndex(), polygon.getName());
120  }
121  }
122 
123  // Construct a vpPlane in object frame
124  vpPoint pts[3];
125  pts[0] = polygon.p[0];
126  pts[1] = polygon.p[1];
127  pts[2] = polygon.p[2];
128  normal_face->m_planeObject = vpPlane(pts[0], pts[1], pts[2], vpPlane::object_frame);
129 
130  m_depthNormalFaces.push_back(normal_face);
131 }
132 
133 void vpMbDepthNormalTracker::computeVisibility(unsigned int width, unsigned int height)
134 {
135  bool changed = false;
136  faces.setVisible(width, height, m_cam, m_cMo, angleAppears, angleDisappears, changed);
137 
138  if (useScanLine) {
139  // if (clippingFlag <= 2) {
140  // cam.computeFov(width, height);
141  // }
142 
144  faces.computeScanLineRender(m_cam, width, height);
145  }
146 
147  for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
148  it != m_depthNormalFaces.end(); ++it) {
149  vpMbtFaceDepthNormal *face_normal = *it;
150  face_normal->computeVisibility();
151  }
152 }
153 
155 {
156  double normRes = 0;
157  double normRes_1 = -1;
158  unsigned int iter = 0;
159 
160  computeVVSInit();
161  unsigned int nb_features = (unsigned int)(3 * m_depthNormalListOfDesiredFeatures.size());
162 
163  vpColVector error_prev(nb_features);
164  vpMatrix LTL;
165  vpColVector LTR, v;
166 
167  double mu = m_initialMu;
168  vpHomogeneousMatrix cMo_prev;
169 
170  bool isoJoIdentity = m_isoJoIdentity; // Backup since it can be modified if L is not full rank
171  if (isoJoIdentity)
172  oJo.eye();
173 
175  vpMatrix L_true, LVJ_true;
176 
177  while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
179 
180  bool reStartFromLastIncrement = false;
181  computeVVSCheckLevenbergMarquardt(iter, m_error_depthNormal, error_prev, cMo_prev, mu, reStartFromLastIncrement);
182 
183  if (!reStartFromLastIncrement) {
186 
187  if (computeCovariance) {
188  L_true = m_L_depthNormal;
189  if (!isoJoIdentity) {
190  cVo.buildFrom(m_cMo);
191  LVJ_true = (m_L_depthNormal * (cVo * oJo));
192  }
193  }
194 
195  // Compute DoF only once
196  if (iter == 0) {
197  // If all the 6 dof should be estimated, we check if the interaction
198  // matrix is full rank. If not we remove automatically the dof that
199  // cannot be estimated. This is particularly useful when considering
200  // circles (rank 5) and cylinders (rank 4)
201  if (isoJoIdentity) {
202  cVo.buildFrom(m_cMo);
203 
204  vpMatrix K; // kernel
205  unsigned int rank = (m_L_depthNormal * cVo).kernel(K);
206  if (rank == 0) {
207  throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
208  }
209 
210  if (rank != 6) {
211  vpMatrix I; // Identity
212  I.eye(6);
213  oJo = I - K.AtA();
214 
215  isoJoIdentity = false;
216  }
217  }
218  }
219 
220  double num = 0.0, den = 0.0;
221  for (unsigned int i = 0; i < m_L_depthNormal.getRows(); i++) {
222  // Compute weighted errors and stop criteria
225  den += m_w_depthNormal[i];
226 
227  // weight interaction matrix
228  for (unsigned int j = 0; j < 6; j++) {
229  m_L_depthNormal[i][j] *= m_w_depthNormal[i];
230  }
231  }
232 
234  m_error_depthNormal, error_prev, LTR, mu, v);
235 
236  cMo_prev = m_cMo;
238 
239  normRes_1 = normRes;
240  normRes = sqrt(num / den);
241  }
242 
243  iter++;
244  }
245 
246  computeCovarianceMatrixVVS(isoJoIdentity, m_w_depthNormal, cMo_prev, L_true, LVJ_true, m_error_depthNormal);
247 }
248 
250 {
251  unsigned int nb_features = (unsigned int)(3 * m_depthNormalListOfDesiredFeatures.size());
252 
253  m_L_depthNormal.resize(nb_features, 6, false, false);
254  m_error_depthNormal.resize(nb_features, false);
255  m_weightedError_depthNormal.resize(nb_features, false);
256 
257  m_w_depthNormal.resize(nb_features, false);
258  m_w_depthNormal = 1;
259 
261 }
262 
264 {
265  unsigned int cpt = 0;
266  for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalListOfActiveFaces.begin();
267  it != m_depthNormalListOfActiveFaces.end(); ++it) {
268  vpMatrix L_face;
269  vpColVector features_face;
270 
271  (*it)->computeInteractionMatrix(m_cMo, L_face, features_face);
272 
273  vpColVector face_error = features_face - m_depthNormalListOfDesiredFeatures[(size_t)cpt];
274 
275  if (!(*it)->planeIsInvalid(m_cMo, angleDisappears)) {
276  m_error_depthNormal.insert(cpt * 3, face_error);
277  m_L_depthNormal.insert(L_face, cpt * 3, 0);
278  }
279  else {
280  face_error = 0;
281  L_face = 0;
282  m_error_depthNormal.insert(cpt * 3, face_error);
283  m_L_depthNormal.insert(L_face, cpt * 3, 0);
284  }
285 
286  cpt++;
287  }
288 }
289 
291  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
292  bool displayFullModel)
293 {
294  std::vector<std::vector<double> > models =
295  vpMbDepthNormalTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
296 
297  for (size_t i = 0; i < models.size(); i++) {
298  if (vpMath::equal(models[i][0], 0)) {
299  vpImagePoint ip1(models[i][1], models[i][2]);
300  vpImagePoint ip2(models[i][3], models[i][4]);
301  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
302  }
303  }
304 
305  if (displayFeatures) {
306  std::vector<std::vector<double> > features = getFeaturesForDisplayDepthNormal();
307  for (size_t i = 0; i < features.size(); i++) {
308  vpImagePoint im_centroid(features[i][1], features[i][2]);
309  vpImagePoint im_extremity(features[i][3], features[i][4]);
310  bool desired = vpMath::equal(features[i][0], 2);
311  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
312  }
313  }
314 }
315 
317  const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
318  bool displayFullModel)
319 {
320  std::vector<std::vector<double> > models =
321  vpMbDepthNormalTracker::getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
322 
323  for (size_t i = 0; i < models.size(); i++) {
324  if (vpMath::equal(models[i][0], 0)) {
325  vpImagePoint ip1(models[i][1], models[i][2]);
326  vpImagePoint ip2(models[i][3], models[i][4]);
327  vpDisplay::displayLine(I, ip1, ip2, col, thickness);
328  }
329  }
330 
331  if (displayFeatures) {
332  std::vector<std::vector<double> > features = getFeaturesForDisplayDepthNormal();
333  for (size_t i = 0; i < features.size(); i++) {
334  vpImagePoint im_centroid(features[i][1], features[i][2]);
335  vpImagePoint im_extremity(features[i][3], features[i][4]);
336  bool desired = vpMath::equal(features[i][0], 2);
337  vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
338  }
339  }
340 }
341 
343 {
344  std::vector<std::vector<double> > features;
345 
346  for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
347  it != m_depthNormalFaces.end(); ++it) {
348  vpMbtFaceDepthNormal *face_normal = *it;
349  std::vector<std::vector<double> > currentFeatures = face_normal->getFeaturesForDisplay(m_cMo, m_cam);
350  features.insert(features.end(), currentFeatures.begin(), currentFeatures.end());
351  }
352 
353  return features;
354 }
355 
371 std::vector<std::vector<double> > vpMbDepthNormalTracker::getModelForDisplay(unsigned int width, unsigned int height,
372  const vpHomogeneousMatrix &cMo,
373  const vpCameraParameters &cam,
374  bool displayFullModel)
375 {
376  std::vector<std::vector<double> > models;
377 
378  vpCameraParameters c = cam;
379 
380  bool changed = false;
382 
383  if (useScanLine) {
384  c.computeFov(width, height);
385 
388  }
389 
390  for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
391  it != m_depthNormalFaces.end(); ++it) {
392  vpMbtFaceDepthNormal *face_normal = *it;
393  std::vector<std::vector<double> > modelLines =
394  face_normal->getModelForDisplay(width, height, cMo, cam, displayFullModel);
395  models.insert(models.end(), modelLines.begin(), modelLines.end());
396  }
397 
398  return models;
399 }
400 
402 {
403  if (!modelInitialised) {
404  throw vpException(vpException::fatalError, "model not initialized");
405  }
406 
407  bool reInitialisation = false;
408  if (!useOgre) {
409  faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
410  }
411  else {
412 #ifdef VISP_HAVE_OGRE
413  if (!faces.isOgreInitialised()) {
417  // Turn off Ogre config dialog display for the next call to this
418  // function since settings are saved in the ogre.cfg file and used
419  // during the next call
420  ogreShowConfigDialog = false;
421  }
422 
424 #else
425  faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
426 #endif
427  }
428 
429  if (useScanLine || clippingFlag > 3)
431 
433 }
434 
435 void vpMbDepthNormalTracker::loadConfigFile(const std::string &configFile, bool verbose)
436 {
437 #if defined(VISP_HAVE_PUGIXML)
439  xmlp.setVerbose(verbose);
443 
450 
451  try {
452  if (verbose) {
453  std::cout << " *********** Parsing XML for Mb Depth Tracker ************ " << std::endl;
454  }
455  xmlp.parse(configFile);
456  }
457  catch (const vpException &e) {
458  std::cerr << "Exception: " << e.what() << std::endl;
459  throw vpException(vpException::ioError, "Cannot open XML file \"%s\"", configFile.c_str());
460  }
461 
462  vpCameraParameters camera;
463  xmlp.getCameraParameters(camera);
464  setCameraParameters(camera);
465 
468 
469  if (xmlp.hasNearClippingDistance())
471 
472  if (xmlp.hasFarClippingDistance())
474 
475  if (xmlp.getFovClipping())
477 
483 #else
484  (void)configFile;
485  (void)verbose;
486  throw(vpException(vpException::ioError, "vpMbDepthDenseTracker::loadConfigFile() needs pugixml built-in 3rdparty"));
487 #endif
488 }
489 
490 void vpMbDepthNormalTracker::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
491  const vpHomogeneousMatrix &cMo, bool verbose)
492 {
493  m_cMo.eye();
494 
495  for (size_t i = 0; i < m_depthNormalFaces.size(); i++) {
496  delete m_depthNormalFaces[i];
497  m_depthNormalFaces[i] = nullptr;
498  }
499 
500  m_depthNormalFaces.clear();
501 
502  loadModel(cad_name, verbose);
503  initFromPose(I, cMo);
504 }
505 
506 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
507 void vpMbDepthNormalTracker::reInitModel(const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud,
508  const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose)
509 {
510  vpImage<unsigned char> I_dummy(point_cloud->height, point_cloud->width);
511  reInitModel(I_dummy, cad_name, cMo, verbose);
512 }
513 
514 #endif
515 
517 {
518  m_cMo.eye();
519 
520  for (std::vector<vpMbtFaceDepthNormal *>::iterator it = m_depthNormalFaces.begin(); it != m_depthNormalFaces.end();
521  ++it) {
522  vpMbtFaceDepthNormal *normal_face = *it;
523  delete normal_face;
524  normal_face = nullptr;
525  }
526 
527  m_depthNormalFaces.clear();
528 
529  m_computeInteraction = true;
530  computeCovariance = false;
531 
534 
536 
537  m_lambda = 1.0;
538 
539  faces.reset();
540 
542 
543  useScanLine = false;
544 
545 #ifdef VISP_HAVE_OGRE
546  useOgre = false;
547 #endif
548 
551 }
552 
554 {
556 #ifdef VISP_HAVE_OGRE
557  faces.getOgreContext()->setWindowName("MBT Depth");
558 #endif
559 }
560 
562 {
563  m_cMo = cdMo;
564  init(I);
565 }
566 
568 {
569  m_cMo = cdMo;
570  vpImageConvert::convert(I_color, m_I);
571  init(m_I);
572 }
573 
574 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
575 void vpMbDepthNormalTracker::setPose(const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud,
576  const vpHomogeneousMatrix &cdMo)
577 {
578  vpImage<unsigned char> I_dummy(point_cloud->height, point_cloud->width);
579  m_cMo = cdMo;
580  init(I_dummy);
581 }
582 #endif
583 
585 {
587 
588  for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
589  it != m_depthNormalFaces.end(); ++it) {
590  (*it)->setScanLineVisibilityTest(v);
591  }
592 }
593 
594 void vpMbDepthNormalTracker::setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
595 {
596  for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
597  it != m_depthNormalFaces.end(); ++it) {
598  vpMbtFaceDepthNormal *face = *it;
599  if (face->m_polygon->getName() == name) {
600  face->setTracked(useDepthNormalTracking);
601  }
602  }
603 }
604 
606 
607 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
608 void vpMbDepthNormalTracker::segmentPointCloud(const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
609 {
612 
613 #if DEBUG_DISPLAY_DEPTH_NORMAL
614  if (!m_debugDisp_depthNormal->isInitialised()) {
615  m_debugImage_depthNormal.resize(point_cloud->height, point_cloud->width);
616  m_debugDisp_depthNormal->init(m_debugImage_depthNormal, 50, 0, "Debug display normal depth tracker");
617  }
618 
619  m_debugImage_depthNormal = 0;
620  std::vector<std::vector<vpImagePoint> > roiPts_vec;
621 #endif
622 
623  for (std::vector<vpMbtFaceDepthNormal *>::iterator it = m_depthNormalFaces.begin(); it != m_depthNormalFaces.end();
624  ++it) {
625  vpMbtFaceDepthNormal *face = *it;
626 
627  if (face->isVisible() && face->isTracked()) {
628  vpColVector desired_features;
629 
630 #if DEBUG_DISPLAY_DEPTH_NORMAL
631  std::vector<std::vector<vpImagePoint> > roiPts_vec_;
632 #endif
633  if (face->computeDesiredFeatures(m_cMo, point_cloud->width, point_cloud->height, point_cloud, desired_features,
635 #if DEBUG_DISPLAY_DEPTH_NORMAL
636  ,
637  m_debugImage_depthNormal, roiPts_vec_
638 #endif
639  ,
640  m_mask)) {
641  m_depthNormalListOfDesiredFeatures.push_back(desired_features);
642  m_depthNormalListOfActiveFaces.push_back(face);
643 
644 #if DEBUG_DISPLAY_DEPTH_NORMAL
645  roiPts_vec.insert(roiPts_vec.end(), roiPts_vec_.begin(), roiPts_vec_.end());
646 #endif
647  }
648  }
649  }
650 
651 #if DEBUG_DISPLAY_DEPTH_NORMAL
652  vpDisplay::display(m_debugImage_depthNormal);
653 
654  for (size_t i = 0; i < roiPts_vec.size(); i++) {
655  if (roiPts_vec[i].empty())
656  continue;
657 
658  for (size_t j = 0; j < roiPts_vec[i].size() - 1; j++) {
659  vpDisplay::displayLine(m_debugImage_depthNormal, roiPts_vec[i][j], roiPts_vec[i][j + 1], vpColor::red, 2);
660  }
661  vpDisplay::displayLine(m_debugImage_depthNormal, roiPts_vec[i][0], roiPts_vec[i][roiPts_vec[i].size() - 1],
662  vpColor::red, 2);
663  }
664 
665  vpDisplay::flush(m_debugImage_depthNormal);
666 #endif
667 }
668 #endif
669 
670 void vpMbDepthNormalTracker::segmentPointCloud(const std::vector<vpColVector> &point_cloud, unsigned int width,
671  unsigned int height)
672 {
675 
676 #if DEBUG_DISPLAY_DEPTH_NORMAL
677  if (!m_debugDisp_depthNormal->isInitialised()) {
678  m_debugImage_depthNormal.resize(height, width);
679  m_debugDisp_depthNormal->init(m_debugImage_depthNormal, 50, 0, "Debug display normal depth tracker");
680  }
681 
682  m_debugImage_depthNormal = 0;
683  std::vector<std::vector<vpImagePoint> > roiPts_vec;
684 #endif
685 
686  for (std::vector<vpMbtFaceDepthNormal *>::iterator it = m_depthNormalFaces.begin(); it != m_depthNormalFaces.end();
687  ++it) {
688  vpMbtFaceDepthNormal *face = *it;
689 
690  if (face->isVisible() && face->isTracked()) {
691  vpColVector desired_features;
692 
693 #if DEBUG_DISPLAY_DEPTH_NORMAL
694  std::vector<std::vector<vpImagePoint> > roiPts_vec_;
695 #endif
696 
697  if (face->computeDesiredFeatures(m_cMo, width, height, point_cloud, desired_features, m_depthNormalSamplingStepX,
699 #if DEBUG_DISPLAY_DEPTH_NORMAL
700  ,
701  m_debugImage_depthNormal, roiPts_vec_
702 #endif
703  ,
704  m_mask)) {
705  m_depthNormalListOfDesiredFeatures.push_back(desired_features);
706  m_depthNormalListOfActiveFaces.push_back(face);
707 
708 #if DEBUG_DISPLAY_DEPTH_NORMAL
709  roiPts_vec.insert(roiPts_vec.end(), roiPts_vec_.begin(), roiPts_vec_.end());
710 #endif
711  }
712  }
713  }
714 
715 #if DEBUG_DISPLAY_DEPTH_NORMAL
716  vpDisplay::display(m_debugImage_depthNormal);
717 
718  for (size_t i = 0; i < roiPts_vec.size(); i++) {
719  if (roiPts_vec[i].empty())
720  continue;
721 
722  for (size_t j = 0; j < roiPts_vec[i].size() - 1; j++) {
723  vpDisplay::displayLine(m_debugImage_depthNormal, roiPts_vec[i][j], roiPts_vec[i][j + 1], vpColor::red, 2);
724  }
725  vpDisplay::displayLine(m_debugImage_depthNormal, roiPts_vec[i][0], roiPts_vec[i][roiPts_vec[i].size() - 1],
726  vpColor::red, 2);
727  }
728 
729  vpDisplay::flush(m_debugImage_depthNormal);
730 #endif
731 }
732 
733 void vpMbDepthNormalTracker::segmentPointCloud(const vpMatrix &point_cloud, unsigned int width,
734  unsigned int height)
735 {
738 
739 #if DEBUG_DISPLAY_DEPTH_NORMAL
740  if (!m_debugDisp_depthNormal->isInitialised()) {
741  m_debugImage_depthNormal.resize(height, width);
742  m_debugDisp_depthNormal->init(m_debugImage_depthNormal, 50, 0, "Debug display normal depth tracker");
743  }
744 
745  m_debugImage_depthNormal = 0;
746  std::vector<std::vector<vpImagePoint> > roiPts_vec;
747 #endif
748 
749  for (std::vector<vpMbtFaceDepthNormal *>::iterator it = m_depthNormalFaces.begin(); it != m_depthNormalFaces.end();
750  ++it) {
751  vpMbtFaceDepthNormal *face = *it;
752 
753  if (face->isVisible() && face->isTracked()) {
754  vpColVector desired_features;
755 
756 #if DEBUG_DISPLAY_DEPTH_NORMAL
757  std::vector<std::vector<vpImagePoint> > roiPts_vec_;
758 #endif
759 
760  if (face->computeDesiredFeatures(m_cMo, width, height, point_cloud, desired_features, m_depthNormalSamplingStepX,
762 #if DEBUG_DISPLAY_DEPTH_NORMAL
763  ,
764  m_debugImage_depthNormal, roiPts_vec_
765 #endif
766  ,
767  m_mask)) {
768  m_depthNormalListOfDesiredFeatures.push_back(desired_features);
769  m_depthNormalListOfActiveFaces.push_back(face);
770 
771 #if DEBUG_DISPLAY_DEPTH_NORMAL
772  roiPts_vec.insert(roiPts_vec.end(), roiPts_vec_.begin(), roiPts_vec_.end());
773 #endif
774  }
775  }
776  }
777 
778 #if DEBUG_DISPLAY_DEPTH_NORMAL
779  vpDisplay::display(m_debugImage_depthNormal);
780 
781  for (size_t i = 0; i < roiPts_vec.size(); i++) {
782  if (roiPts_vec[i].empty())
783  continue;
784 
785  for (size_t j = 0; j < roiPts_vec[i].size() - 1; j++) {
786  vpDisplay::displayLine(m_debugImage_depthNormal, roiPts_vec[i][j], roiPts_vec[i][j + 1], vpColor::red, 2);
787  }
788  vpDisplay::displayLine(m_debugImage_depthNormal, roiPts_vec[i][0], roiPts_vec[i][roiPts_vec[i].size() - 1],
789  vpColor::red, 2);
790  }
791 
792  vpDisplay::flush(m_debugImage_depthNormal);
793 #endif
794 }
795 
797 {
798  m_cam = cam;
799 
800  for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
801  it != m_depthNormalFaces.end(); ++it) {
802  (*it)->setCameraParameters(cam);
803  }
804 }
805 
807 {
808  for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
809  it != m_depthNormalFaces.end(); ++it) {
810  (*it)->setFaceCentroidMethod(method);
811  }
812 }
813 
816 {
818 
819  for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
820  it != m_depthNormalFaces.end(); ++it) {
821  (*it)->setFeatureEstimationMethod(method);
822  }
823 }
824 
826 {
828 
829  for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
830  it != m_depthNormalFaces.end(); ++it) {
831  (*it)->setPclPlaneEstimationMethod(method);
832  }
833 }
834 
836 {
838 
839  for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
840  it != m_depthNormalFaces.end(); ++it) {
841  (*it)->setPclPlaneEstimationRansacMaxIter(maxIter);
842  }
843 }
844 
846 {
848 
849  for (std::vector<vpMbtFaceDepthNormal *>::const_iterator it = m_depthNormalFaces.begin();
850  it != m_depthNormalFaces.end(); ++it) {
851  (*it)->setPclPlaneEstimationRansacThreshold(threshold);
852  }
853 }
854 
855 void vpMbDepthNormalTracker::setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
856 {
857  if (stepX == 0 || stepY == 0) {
858  std::cerr << "stepX and stepY must be greater than zero!" << std::endl;
859  return;
860  }
861 
864 }
865 
866 // void vpMbDepthNormalTracker::setDepthNormalUseRobust(bool use) {
867 // m_depthNormalUseRobust = use;
868 //}
869 
871 {
872  throw vpException(vpException::fatalError, "Cannot track with a grayscale image!");
873 }
874 
876 {
877  throw vpException(vpException::fatalError, "Cannot track with a color image!");
878 }
879 
880 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
881 void vpMbDepthNormalTracker::track(const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
882 {
883  segmentPointCloud(point_cloud);
884 
885  computeVVS();
886 
887  computeVisibility(point_cloud->width, point_cloud->height);
888 }
889 #endif
890 
891 void vpMbDepthNormalTracker::track(const std::vector<vpColVector> &point_cloud, unsigned int width, unsigned int height)
892 {
893  segmentPointCloud(point_cloud, width, height);
894 
895  computeVVS();
896 
897  computeVisibility(width, height);
898 }
899 
900 void vpMbDepthNormalTracker::initCircle(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const vpPoint & /*p3*/,
901  double /*radius*/, int /*idFace*/, const std::string & /*name*/)
902 {
903  throw vpException(vpException::fatalError, "vpMbDepthNormalTracker::initCircle() should not be called!");
904 }
905 
906 void vpMbDepthNormalTracker::initCylinder(const vpPoint & /*p1*/, const vpPoint & /*p2*/, double /*radius*/,
907  int /*idFace*/, const std::string & /*name*/)
908 {
909  throw vpException(vpException::fatalError, "vpMbDepthNormalTracker::initCylinder() should not be called!");
910 }
911 
913 
915 END_VISP_NAMESPACE
void setWindowName(const Ogre::String &n)
Definition: vpAROgre.h:265
void resize(unsigned int nrows, unsigned int ncols, bool flagNullify=true, bool recopy_=true)
Definition: vpArray2D.h:362
unsigned int getRows() const
Definition: vpArray2D.h:347
Generic class defining intrinsic camera parameters.
void computeFov(const unsigned int &w, const unsigned int &h)
Implementation of column vector and the associated operations.
Definition: vpColVector.h:191
void insert(unsigned int i, const vpColVector &v)
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:1143
Class to define RGB colors available for display functionalities.
Definition: vpColor.h:157
static const vpColor red
Definition: vpColor.h:217
static const vpColor blue
Definition: vpColor.h:223
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:130
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
error that can be emitted by ViSP classes.
Definition: vpException.h:60
@ ioError
I/O error.
Definition: vpException.h:67
@ fatalError
Fatal error.
Definition: vpException.h:72
const char * what() const
Definition: vpException.cpp:71
static vpHomogeneousMatrix direct(const vpColVector &v)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:82
unsigned int getWidth() const
Definition: vpImage.h:242
unsigned int getHeight() const
Definition: vpImage.h:181
static double rad(double deg)
Definition: vpMath.h:129
static double sqr(double x)
Definition: vpMath.h:203
static bool equal(double x, double y, double threshold=0.001)
Definition: vpMath.h:459
static double deg(double rad)
Definition: vpMath.h:119
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:169
vpMatrix AtA() const
void insert(const vpMatrix &A, unsigned int r, unsigned int c)
Definition: vpMatrix.cpp:1133
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false) VP_OVERRIDE
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
vpMatrix m_L_depthNormal
Interaction matrix.
vpRobust m_robust_depthNormal
Robust.
virtual void setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
vpMbtFaceDepthNormal::vpFeatureEstimationType m_depthNormalFeatureEstimationMethod
Method to estimate the desired features.
std::vector< vpMbtFaceDepthNormal * > m_depthNormalFaces
List of faces.
int m_depthNormalPclPlaneEstimationRansacMaxIter
PCL RANSAC maximum number of iterations.
unsigned int m_depthNormalSamplingStepY
Sampling step in y-direction.
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
double m_depthNormalPclPlaneEstimationRansacThreshold
PCL RANSAC threshold.
virtual void setDepthNormalPclPlaneEstimationRansacThreshold(double thresold)
virtual void resetTracker() VP_OVERRIDE
void addFace(vpMbtPolygon &polygon, bool alreadyClose)
bool m_depthNormalUseRobust
If true, use Tukey robust M-Estimator.
virtual void track(const vpImage< unsigned char > &) VP_OVERRIDE
void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false)
virtual std::vector< std::vector< double > > getFeaturesForDisplayDepthNormal()
vpColVector m_w_depthNormal
Robust weights.
virtual void init(const vpImage< unsigned char > &I) VP_OVERRIDE
vpColVector m_error_depthNormal
(s - s*)
vpColVector m_weightedError_depthNormal
Weighted error.
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo) VP_OVERRIDE
virtual void setOgreVisibilityTest(const bool &v) VP_OVERRIDE
std::vector< vpMbtFaceDepthNormal * > m_depthNormalListOfActiveFaces
List of current active (visible and with features extracted) faces.
virtual void loadConfigFile(const std::string &configFile, bool verbose=true) VP_OVERRIDE
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void setCameraParameters(const vpCameraParameters &camera) VP_OVERRIDE
int m_depthNormalPclPlaneEstimationMethod
PCL plane estimation method.
virtual void setDepthNormalFaceCentroidMethod(const vpMbtFaceDepthNormal::vpFaceCentroidType &method)
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void setDepthNormalPclPlaneEstimationMethod(int method)
std::vector< vpColVector > m_depthNormalListOfDesiredFeatures
List of desired features.
virtual void computeVVSInit() VP_OVERRIDE
virtual void testTracking() VP_OVERRIDE
void setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
void computeVisibility(unsigned int width, unsigned int height)
vpMbHiddenFaces< vpMbtPolygon > m_depthNormalHiddenFacesDisplay
Set of faces describing the object used only for display with scan line.
virtual void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
virtual void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
virtual ~vpMbDepthNormalTracker() VP_OVERRIDE
unsigned int m_depthNormalSamplingStepX
Sampling step in x-direction.
vpAROgre * getOgreContext()
void computeClippedPolygons(const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam)
unsigned int setVisibleOgre(unsigned int width, unsigned int height, const vpCameraParameters &cam, const vpHomogeneousMatrix &cMo, const double &angleAppears, const double &angleDisappears, bool &changed)
void initOgre(const vpCameraParameters &cam=vpCameraParameters())
unsigned int setVisible(unsigned int width, unsigned int height, const vpCameraParameters &cam, const vpHomogeneousMatrix &cMo, const double &angle, bool &changed)
void setBackgroundSizeOgre(const unsigned int &h, const unsigned int &w)
void computeScanLineRender(const vpCameraParameters &cam, const unsigned int &w, const unsigned int &h)
void setOgreShowConfigDialog(bool showConfigDialog)
double m_lambda
Gain of the virtual visual servoing stage.
Definition: vpMbTracker.h:189
bool modelInitialised
Definition: vpMbTracker.h:125
vpImage< unsigned char > m_I
Grayscale image buffer, used when passing color images.
Definition: vpMbTracker.h:225
bool m_computeInteraction
Definition: vpMbTracker.h:187
vpMatrix oJo
The Degrees of Freedom to estimate.
Definition: vpMbTracker.h:117
vpUniRand m_rand
Random number generator used in vpMbtDistanceLine::buildFrom()
Definition: vpMbTracker.h:229
double m_initialMu
Initial Mu for Levenberg Marquardt optimization loop.
Definition: vpMbTracker.h:195
vpHomogeneousMatrix m_cMo
The current pose.
Definition: vpMbTracker.h:115
virtual void computeVVSCheckLevenbergMarquardt(unsigned int iter, vpColVector &error, const vpColVector &m_error_prev, const vpHomogeneousMatrix &cMoPrev, double &mu, bool &reStartFromLastIncrement, vpColVector *const w=nullptr, const vpColVector *const m_w_prev=nullptr)
vpCameraParameters m_cam
The camera parameters.
Definition: vpMbTracker.h:113
double m_stopCriteriaEpsilon
Epsilon threshold to stop the VVS optimization loop.
Definition: vpMbTracker.h:193
bool useOgre
Use Ogre3d for visibility tests.
Definition: vpMbTracker.h:157
virtual void computeVVSWeights(vpRobust &robust, const vpColVector &error, vpColVector &w)
vpMbHiddenFaces< vpMbtPolygon > faces
Set of faces describing the object.
Definition: vpMbTracker.h:145
virtual void setScanLineVisibilityTest(const bool &v)
Definition: vpMbTracker.h:610
virtual void setOgreVisibilityTest(const bool &v)
virtual void computeCovarianceMatrixVVS(const bool isoJoIdentity, const vpColVector &w_true, const vpHomogeneousMatrix &cMoPrev, const vpMatrix &L_true, const vpMatrix &LVJ_true, const vpColVector &error)
vpMbtOptimizationMethod m_optimizationMethod
Optimization method used.
Definition: vpMbTracker.h:142
virtual void computeVVSPoseEstimation(const bool isoJoIdentity, unsigned int iter, vpMatrix &L, vpMatrix &LTL, vpColVector &R, const vpColVector &error, vpColVector &error_prev, vpColVector &LTR, double &mu, vpColVector &v, const vpColVector *const w=nullptr, vpColVector *const m_w_prev=nullptr)
bool displayFeatures
If true, the features are displayed.
Definition: vpMbTracker.h:140
double angleDisappears
Angle used to detect a face disappearance.
Definition: vpMbTracker.h:149
virtual void setNearClippingDistance(const double &dist)
virtual void setFarClippingDistance(const double &dist)
double distFarClip
Distance for near clipping.
Definition: vpMbTracker.h:153
bool m_isoJoIdentity
Boolean to know if oJo is identity (for fast computation)
Definition: vpMbTracker.h:119
bool useScanLine
Use Scanline for visibility tests.
Definition: vpMbTracker.h:160
virtual void setClipping(const unsigned int &flags)
double angleAppears
Angle used to detect a face appearance.
Definition: vpMbTracker.h:147
const vpImage< bool > * m_mask
Mask used to disable tracking on a part of image.
Definition: vpMbTracker.h:223
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
bool computeCovariance
Flag used to specify if the covariance matrix has to be computed or not.
Definition: vpMbTracker.h:130
double distNearClip
Distance for near clipping.
Definition: vpMbTracker.h:151
unsigned int m_maxIter
Maximum number of iterations of the virtual visual servoing stage.
Definition: vpMbTracker.h:191
bool ogreShowConfigDialog
Definition: vpMbTracker.h:158
unsigned int clippingFlag
Flags specifying which clipping to used.
Definition: vpMbTracker.h:155
double m_distNearClip
Distance for near clipping.
std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void addLine(vpPoint &p1, vpPoint &p2, vpMbHiddenFaces< vpMbtPolygon > *const faces, vpUniRand &rand_gen, int polygon=-1, std::string name="")
double m_distFarClip
Distance for near clipping.
vpMbHiddenFaces< vpMbtPolygon > * m_hiddenFace
Pointer to the list of faces.
std::vector< std::vector< double > > getFeaturesForDisplay(const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double scale=0.05)
void setPclPlaneEstimationMethod(int method)
void setPclPlaneEstimationRansacThreshold(double threshold)
vpMbtPolygon * m_polygon
Polygon defining the face.
vpCameraParameters m_cam
Camera intrinsic parameters.
vpPlane m_planeObject
Plane equation described in the object frame.
unsigned int m_clippingFlag
Flags specifying which clipping to used.
void setTracked(bool tracked)
void setFeatureEstimationMethod(const vpFeatureEstimationType &method)
void setPclPlaneEstimationRansacMaxIter(int maxIter)
bool m_useScanLine
Scan line visibility.
bool computeDesiredFeatures(const vpHomogeneousMatrix &cMo, unsigned int width, unsigned int height, const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud, vpColVector &desired_features, unsigned int stepX, unsigned int stepY, const vpImage< bool > *mask=nullptr)
Implementation of a polygon of the model used by the model-based tracker.
Definition: vpMbtPolygon.h:60
std::string getName() const
Definition: vpMbtPolygon.h:100
int getIndex() const
Definition: vpMbtPolygon.h:93
Parse an Xml file to extract configuration parameters of a mbtConfig object.
void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
int getDepthNormalPclPlaneEstimationRansacMaxIter() const
unsigned int getDepthNormalSamplingStepX() const
void getCameraParameters(vpCameraParameters &cam) const
unsigned int getDepthNormalSamplingStepY() const
void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
vpMbtFaceDepthNormal::vpFeatureEstimationType getDepthNormalFeatureEstimationMethod() const
void setDepthNormalPclPlaneEstimationMethod(int method)
void setDepthNormalSamplingStepX(unsigned int stepX)
void setAngleDisappear(const double &adisappear)
int getDepthNormalPclPlaneEstimationMethod() const
void setAngleAppear(const double &aappear)
double getDepthNormalPclPlaneEstimationRansacThreshold() const
void parse(const std::string &filename)
void setDepthNormalPclPlaneEstimationRansacThreshold(double threshold)
void setCameraParameters(const vpCameraParameters &cam)
void setDepthNormalSamplingStepY(unsigned int stepY)
This class defines the container for a plane geometrical structure.
Definition: vpPlane.h:57
@ object_frame
Definition: vpPlane.h:65
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:79
unsigned int nbpt
Number of points used to define the polygon.
Definition: vpPolygon3D.h:74
unsigned int getNbPoint() const
Definition: vpPolygon3D.h:130
vpPoint * p
corners in the object frame
Definition: vpPolygon3D.h:79
void setMinMedianAbsoluteDeviation(double mad_min)
Definition: vpRobust.h:136
vpVelocityTwistMatrix & buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)