ViSP  2.8.0
vpMbKltTracker.cpp
1 /****************************************************************************
2  *
3  * $Id: vpMbKltTracker.cpp 4337 2013-07-23 13:57:53Z ayol $
4  *
5  * This file is part of the ViSP software.
6  * Copyright (C) 2005 - 2013 by INRIA. All rights reserved.
7  *
8  * This software is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU General Public License
10  * ("GPL") version 2 as published by the Free Software Foundation.
11  * See the file LICENSE.txt at the root directory of this source
12  * distribution for additional information about the GNU GPL.
13  *
14  * For using ViSP with software that can not be combined with the GNU
15  * GPL, please contact INRIA about acquiring a ViSP Professional
16  * Edition License.
17  *
18  * See http://www.irisa.fr/lagadic/visp/visp.html for more information.
19  *
20  * This software was developed at:
21  * INRIA Rennes - Bretagne Atlantique
22  * Campus Universitaire de Beaulieu
23  * 35042 Rennes Cedex
24  * France
25  * http://www.irisa.fr/lagadic
26  *
27  * If you have questions regarding the use of this file, please contact
28  * INRIA at visp@inria.fr
29  *
30  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
31  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
32  *
33  * Description:
34  * Model based tracker using only KLT
35  *
36  * Authors:
37  * Romain Tallonneau
38  * Aurelien Yol
39  *
40  *****************************************************************************/
41 
42 #include <visp/vpMbKltTracker.h>
43 
44 #ifdef VISP_HAVE_OPENCV
45 
47 {
48 
49  cur = NULL;
50  compute_interaction = true;
51  firstInitialisation = true;
52  computeCovariance = false;
53  firstTrack = false;
54 
57 
58  tracker.setMaxFeatures(10000);
60  tracker.setQuality(0.01);
65 
68 
70 
71  maskBorder = 5;
72  threshold_outlier = 0.5;
73  percentGood = 0.6;
74 
75  lambda = 0.8;
76  maxIter = 200;
77 
78 #ifdef VISP_HAVE_OGRE
79  faces.getOgreContext()->setWindowName("MBT KLT");
80 #endif
81 
82  useOgre = false;
83 }
84 
90 {
91  if(cur != NULL){
92  cvReleaseImage(&cur);
93  cur = NULL;
94  }
95 }
96 
97 void
99 {
100  if(!modelInitialised){
101  throw vpException(vpException::fatalError, "model not initialized");
102  }
103 
104  bool reInitialisation = false;
105  if(!useOgre)
106  faces.setVisible(I, cam, cMo, angleAppears, angleDisappears, reInitialisation);
107  else{
108 #ifdef VISP_HAVE_OGRE
109  if(!faces.isOgreInitialised()){
111  faces.initOgre(cam);
112  }
113 
114  faces.setVisibleOgre(I, cam, cMo, angleAppears, angleDisappears, reInitialisation);
115 
116 #else
117  faces.setVisible(I, cam, cMo, angleAppears, angleDisappears, reInitialisation);
118 #endif
119  }
120 
121  reinit(I);
122 }
123 
124 void
126 {
127  c0Mo = cMo;
128  ctTc0.setIdentity();
129  firstTrack = false;
130 
132 
133  // mask
134  IplImage* mask = cvCreateImage(cvSize((int)I.getWidth(), (int)I.getHeight()), IPL_DEPTH_8U, 1);
135  cvZero(mask);
136 
137  for (unsigned int i = 0; i < faces.size(); i += 1){
138  if(faces[i]->isVisible())
139  faces[i]->updateMask(mask, 255/* - i*15*/, maskBorder);
140  }
141 
142  tracker.initTracking(cur, mask);
143 
144  for (unsigned int i = 0; i < faces.size(); i += 1){
145  if(faces[i]->isVisible()){
146  faces[i]->init(tracker);
147  }
148  }
149 
150  cvReleaseImage(&mask);
151 }
152 
157 void
159 {
160  cMo.setIdentity();
161 
162  if(cur != NULL){
163  cvReleaseImage(&cur);
164  cur = NULL;
165  }
166 
167  compute_interaction = true;
168  firstInitialisation = true;
169  computeCovariance = false;
170  firstTrack = false;
171 
174 
175  tracker.setMaxFeatures(10000);
177  tracker.setQuality(0.01);
182 
185 
187 
188  maskBorder = 5;
189  threshold_outlier = 0.5;
190  percentGood = 0.7;
191 
192  lambda = 0.8;
193  maxIter = 200;
194 
195  faces.reset();
196 
197 #ifdef VISP_HAVE_OGRE
198  useOgre = false;
199 #endif
200 }
201 
209 std::vector<vpImagePoint>
211 {
212  std::vector<vpImagePoint> kltPoints;
213  for (unsigned int i = 0; i < static_cast<unsigned int>(tracker.getNbFeatures()); i ++){
214  int id;
215  float x_tmp, y_tmp;
216  tracker.getFeature((int)i, id, x_tmp, y_tmp);
217  kltPoints.push_back(vpImagePoint(y_tmp, x_tmp));
218  }
219 
220  return kltPoints;
221 }
222 
230 std::map<int, vpImagePoint>
232 {
233  std::map<int, vpImagePoint> kltPoints;
234  for (unsigned int i = 0; i < static_cast<unsigned int>(tracker.getNbFeatures()); i ++){
235  int id;
236  float x_tmp, y_tmp;
237  tracker.getFeature((int)i, id, x_tmp, y_tmp);
238  kltPoints[id] = vpImagePoint(y_tmp, x_tmp);
239  }
240 
241  return kltPoints;
242 }
243 
249 void
258 }
259 
265 void
267 {
268  for (unsigned int i = 0; i < faces.size(); i += 1){
269  faces[i]->setCameraParameters(cam);
270  }
271  this->cam = cam;
272 }
273 
281 void
283 {
284  useOgre = v;
285  if(useOgre){
286 #ifndef VISP_HAVE_OGRE
287  useOgre = false;
288  std::cout << "WARNING: ViSP doesn't have Ogre3D, basic visibility test will be used. setOgreVisibilityTest() set to false." << std::endl;
289 #endif
290  }
291 }
292 
302 void
304 {
305  if(firstTrack){
306  bool reInitialisation = false;
307  if(!useOgre)
308  faces.setVisible(I, cam, cdMo, angleAppears, angleDisappears, reInitialisation);
309  else{
310  #ifdef VISP_HAVE_OGRE
311  faces.setVisibleOgre(I, cam, cdMo, angleAppears, angleDisappears, reInitialisation);
312  #else
313  faces.setVisible(I, cam, cdMo, angleAppears, angleDisappears, reInitialisation);
314  #endif
315  }
316 
317  if(reInitialisation){
318  std::cout << "WARNING: Visibility changed, must reinitialize to update pose" << std::endl;
319  cMo = cdMo;
320  reinit(I);
321  }
322  else{
323  vpHomogeneousMatrix cdMc = cdMo * cMo.inverse();
324  vpHomogeneousMatrix cMcd = cdMc.inverse();
325 
326  vpRotationMatrix cdRc;
327  vpTranslationVector cdtc;
328 
329  cdMc.extract(cdRc);
330  cdMc.extract(cdtc);
331 
332  CvPoint2D32f* initial_guess = NULL;
333  initial_guess = (CvPoint2D32f*)cvAlloc((unsigned int)tracker.getMaxFeatures()*sizeof(initial_guess[0]));
334 
335  for (unsigned int i = 0; i < faces.size(); i += 1){
336  if(faces[i]->isVisible() && faces[i]->hasEnoughPoints()){
337  //Get the normal to the face at the current state cMo
338  vpPlane plan(faces[i]->p[0], faces[i]->p[1], faces[i]->p[2]);
339  plan.changeFrame(cMcd);
340 
341  vpColVector Nc = plan.getNormal();
342  Nc.normalize();
343 
344  double invDc = 1.0 / plan.getD();
345 
346  //Create the homography
347  vpHomography cdHc;
348  vpGEMM(cdtc, Nc, -invDc, cdRc, 1.0, cdHc, VP_GEMM_B_T);
349  cdHc /= cdHc[2][2];
350 
351  //Create the 2D homography
352  vpMatrix cdGc = cam.get_K() * cdHc * cam.get_K_inverse();
353 
354  //Points displacement
355  std::map<int, vpImagePoint>::const_iterator iter = faces[i]->getCurrentPoints().begin();
356  for( ; iter != faces[i]->getCurrentPoints().end(); iter++){
357  vpColVector cdp(3);
358  cdp[0] = iter->second.get_j(); cdp[1] = iter->second.get_i(); cdp[2] = 1.0;
359 
360  double p_mu_t_2 = cdp[0] * cdGc[2][0] + cdp[1] * cdGc[2][1] + cdGc[2][2];
361 
362  if( fabs(p_mu_t_2) < std::numeric_limits<double>::epsilon()){
363  cdp[0] = 0.0;
364  cdp[1] = 0.0;
365  throw vpException(vpException::divideByZeroError, "the depth of the point is calculated to zero");
366  }
367 
368  cdp[0] = (cdp[0] * cdGc[0][0] + cdp[1] * cdGc[0][1] + cdGc[0][2]) / p_mu_t_2;
369  cdp[1] = (cdp[0] * cdGc[1][0] + cdp[1] * cdGc[1][1] + cdGc[1][2]) / p_mu_t_2;
370 
371  //Set value to the KLT tracker
372  initial_guess[(faces[i]->getCurrentPointsInd())[iter->first]].x = (float)cdp[0];
373  initial_guess[(faces[i]->getCurrentPointsInd())[iter->first]].y = (float)cdp[1];
374  }
375  }
376  }
377 
378  tracker.setInitialGuess(&initial_guess);
379 
380  if(initial_guess) cvFree(&initial_guess);
381  initial_guess = NULL;
382 
383  cMo = cdMo;
384  }
385  }
386 }
387 
393 void
395 {
396  if( (clippingFlag & vpMbtPolygon::NEAR_CLIPPING) == vpMbtPolygon::NEAR_CLIPPING && dist <= distNearClip)
397  vpTRACE("Far clipping value cannot be inferior than near clipping value. Far clipping won't be considered.");
398  else if ( dist < 0 )
399  vpTRACE("Far clipping value cannot be inferior than 0. Far clipping won't be considered.");
400  else{
402  distFarClip = dist;
403  for (unsigned int i = 0; i < faces.size(); i ++){
404  faces[i]->setFarClippingDistance(distFarClip);
405  }
406  }
407 }
408 
414 void
416 {
417  if( (clippingFlag & vpMbtPolygon::FAR_CLIPPING) == vpMbtPolygon::FAR_CLIPPING && dist >= distFarClip)
418  vpTRACE("Near clipping value cannot be superior than far clipping value. Near clipping won't be considered.");
419  else if ( dist < 0 )
420  vpTRACE("Near clipping value cannot be inferior than 0. Near clipping won't be considered.");
421  else{
423  distNearClip = dist;
424  for (unsigned int i = 0; i < faces.size(); i ++){
425  faces[i]->setNearClippingDistance(distNearClip);
426  }
427  }
428 }
429 
437 void
438 vpMbKltTracker::setClipping(const unsigned int &flags)
439 {
440  clippingFlag = flags;
441  for (unsigned int i = 0; i < faces.size(); i ++)
443 }
444 
451 void
452 vpMbKltTracker::initFaceFromCorners(const std::vector<vpPoint>& corners, const unsigned int indexFace)
453 {
454  if( corners.size() > 2){ // This tracker can't handle lignes
455  vpMbtKltPolygon *polygon = new vpMbtKltPolygon;
456  // polygon->setCameraParameters(cam);
457  polygon->setNbPoint((unsigned int)corners.size());
458  polygon->setIndex((int)indexFace);
459  for(unsigned int j = 0; j < corners.size(); j++) {
460  polygon->addPoint(j, corners[j]);
461  }
462  faces.addPolygon(polygon);
463  faces.getPolygon().back()->setCameraParameters(cam);
464 
466  faces.getPolygon().back()->setClipping(clippingFlag);
467 
468  if((clippingFlag & vpMbtPolygon::NEAR_CLIPPING) == vpMbtPolygon::NEAR_CLIPPING)
469  faces.getPolygon().back()->setNearClippingDistance(distNearClip);
470 
471  if((clippingFlag & vpMbtPolygon::FAR_CLIPPING) == vpMbtPolygon::FAR_CLIPPING)
472  faces.getPolygon().back()->setFarClippingDistance(distFarClip);
473 
474  delete polygon;
475  polygon = NULL;
476  }
477 }
478 
486 void
487 vpMbKltTracker::preTracking(const vpImage<unsigned char>& I, unsigned int &nbInfos, unsigned int &nbFaceUsed)
488 {
490  tracker.track(cur);
491 
492  if(!firstTrack)
493  firstTrack = true;
494 
495  nbInfos = 0;
496  nbFaceUsed = 0;
497  for (unsigned int i = 0; i < faces.size(); i += 1){
498  if(faces[i]->isVisible()){
499  faces[i]->computeNbDetectedCurrent(tracker);
500 // faces[i]->ransac();
501  if(faces[i]->hasEnoughPoints()){
502  nbInfos += faces[i]->getNbPointsCur();
503  nbFaceUsed++;
504  }
505  }
506  }
507 }
508 
512 bool
514 {
515  // # For a better Post Tracking, tracker should reinitialize if so faces don't have enough points but are visible.
516  // # Here we are not doing it for more speed performance.
517  bool reInitialisation = false;
518 
519  unsigned int initialNumber = 0;
520  unsigned int currentNumber = 0;
521  unsigned int shift = 0;
522  for (unsigned int i = 0; i < faces.size(); i += 1){
523  if(faces[i]->isVisible()){
524  initialNumber += faces[i]->getInitialNumberPoint();
525  if(faces[i]->hasEnoughPoints()){
526  vpSubColVector sub_w(w, shift, 2*faces[i]->getNbPointsCur());
527  faces[i]->removeOutliers(sub_w, threshold_outlier);
528  shift += 2*faces[i]->getNbPointsCur();
529 
530  currentNumber += faces[i]->getNbPointsCur();
531  }
532 // else{
533 // reInitialisation = true;
534 // break;
535 // }
536  }
537  }
538 
539 // if(!reInitialisation){
540  double value = percentGood * (double)initialNumber;
541  if((double)currentNumber < value){
542 // std::cout << "Too many point disappear : " << initialNumber << "/" << currentNumber << std::endl;
543  reInitialisation = true;
544  }
545  else{
546  if(!useOgre)
547  faces.setVisible(I, cam, cMo, angleAppears, angleDisappears, reInitialisation);
548  else{
549 #ifdef VISP_HAVE_OGRE
550  faces.setVisibleOgre(I, cam, cMo, angleAppears, angleDisappears, reInitialisation);
551 #else
552  faces.setVisible(I, cam, cMo, angleAppears, angleDisappears, reInitialisation);
553 #endif
554  }
555  }
556 // }
557 
558  if(reInitialisation)
559  return true;
560 
561  return false;
562 }
563 
570 void
571 vpMbKltTracker::computeVVS(const unsigned int &nbInfos, vpColVector &w)
572 {
573  vpMatrix J; // interaction matrix
574  vpColVector R; // residu
575  vpMatrix J_true; // interaction matrix
576  vpColVector R_true; // residu
577  vpColVector v; // "speed" for VVS
578  vpHomography H;
579  vpColVector w_true;
580  vpRobust robust(2*nbInfos);
581 
582  vpMatrix JTJ, JTR;
583 
584  double normRes = 0;
585  double normRes_1 = -1;
586  unsigned int iter = 0;
587 
588  R.resize(2*nbInfos);
589  J.resize(2*nbInfos, 6, 0);
590 
591  while( ((int)((normRes - normRes_1)*1e8) != 0 ) && (iter<maxIter) ){
592 
593  unsigned int shift = 0;
594  for (unsigned int i = 0; i < faces.size(); i += 1){
595  if(faces[i]->isVisible() && faces[i]->hasEnoughPoints()){
596  vpSubColVector subR(R, shift, 2*faces[i]->getNbPointsCur());
597  vpSubMatrix subJ(J, shift, 0, 2*faces[i]->getNbPointsCur(), 6);
598  try{
599  faces[i]->computeHomography(ctTc0, H);
600  faces[i]->computeInteractionMatrixAndResidu(subR, subJ);
601  }catch(...){
602  std::cerr << "exception while tracking face " << i << std::endl;
603  throw ;
604  }
605 
606  shift += 2*faces[i]->getNbPointsCur();
607  }
608  }
609 
610  /* robust */
611  if(iter == 0){
612  w_true.resize(2*nbInfos);
613  w.resize(2*nbInfos);
614  w = 1;
615  }
616  robust.setIteration(iter);
617  robust.setThreshold(2/cam.get_px());
618  robust.MEstimator( vpRobust::TUKEY, R, w);
619 
620  if(computeCovariance){
621  R_true = R;
622  J_true = J;
623  }
624 
625  normRes_1 = normRes;
626  normRes = 0;
627  for (unsigned int i = 0; i < static_cast<unsigned int>(R.getRows()); i += 1){
628  w_true = w[i] * w[i];
629  R[i] = R[i] * w[i];
630  normRes += R[i];
631  }
632 
633  if((iter == 0) || compute_interaction){
634  for(unsigned int i=0; i<static_cast<unsigned int>(R.getRows()); i++){
635  for(unsigned int j=0; j<6; j++){
636  J[i][j] *= w[i];
637  }
638  }
639  }
640 
641  JTJ = J.AtA();
642  computeJTR(J, R, JTR);
643  v = -lambda * JTJ.pseudoInverse(1e-16) * JTR;
644 
646 
647  iter++;
648  }
649 
650  if(computeCovariance){
651  vpMatrix D;
652  D.diag(w_true);
654  }
655 
656  cMo = ctTc0 * c0Mo;
657 }
658 
666 void
668 {
669  unsigned int nbInfos;
670  unsigned int nbFaceUsed;
671  preTracking(I, nbInfos, nbFaceUsed);
672 
673  if(nbInfos < 4 || nbFaceUsed == 0){
674  vpERROR_TRACE("\n\t\t Error-> not enough data") ;
675  throw vpTrackingException(vpTrackingException::notEnoughPointError, "\n\t\t Error-> not enough data");
676  }
677 
678  vpColVector w;
679  computeVVS(nbInfos, w);
680 
681  if(postTracking(I, w))
682  reinit(I);
683 }
684 
696 void
697 vpMbKltTracker::loadConfigFile(const std::string& configFile)
698 {
699  vpMbKltTracker::loadConfigFile(configFile.c_str());
700 }
701 
748 void
749 vpMbKltTracker::loadConfigFile(const char* configFile)
750 {
751 #ifdef VISP_HAVE_XML2
752  vpMbtKltXmlParser xmlp;
753 
754  xmlp.setMaxFeatures(10000);
755  xmlp.setWindowSize(5);
756  xmlp.setQuality(0.01);
757  xmlp.setMinDistance(5);
758  xmlp.setHarrisParam(0.01);
759  xmlp.setBlockSize(3);
760  xmlp.setPyramidLevels(3);
764 
765  try{
766  std::cout << " *********** Parsing XML for MBT KLT Tracker ************ " << std::endl;
767 
768  xmlp.parse(configFile);
769  }
770  catch(...){
771  vpERROR_TRACE("Can't open XML file \"%s\"\n ", configFile);
772  throw vpException(vpException::ioError, "problem to parse configuration file.");
773  }
774 
775  vpCameraParameters camera;
776  xmlp.getCameraParameters(camera);
777  setCameraParameters(camera);
778 
780  tracker.setWindowSize((int)xmlp.getWindowSize());
781  tracker.setQuality(xmlp.getQuality());
784  tracker.setBlockSize((int)xmlp.getBlockSize());
786  maskBorder = xmlp.getMaskBorder();
789 
790  if(xmlp.hasNearClippingDistance())
792 
793  if(xmlp.hasFarClippingDistance())
795 
796  if(xmlp.getFovClipping())
798 #else
799  vpTRACE("You need the libXML2 to read the config file %s", configFile);
800 #endif
801 }
802 
813 void
815  const vpColor& col , const unsigned int thickness, const bool displayFullModel)
816 {
818 
819  if(clippingFlag > 3) // Contains at least one FOV constraint
820  c.computeFov(I.getWidth(), I.getHeight());
821 
822  for (unsigned int i = 0; i < faces.size(); i += 1){
823  if(displayFullModel || faces[i]->isVisible())
824  {
825  faces[i]->changeFrame(cMo);
826  faces[i]->computeRoiClipped(c);
827  std::vector<std::pair<vpImagePoint,unsigned int> > roi;
828  faces[i]->getRoiClipped(c, roi);
829 
830  for (unsigned int j = 0; j < roi.size(); j += 1){
831  if(((roi[(j+1)%roi.size()].second & roi[j].second & vpMbtPolygon::NEAR_CLIPPING) == 0) &&
832  ((roi[(j+1)%roi.size()].second & roi[j].second & vpMbtPolygon::FAR_CLIPPING) == 0) &&
833  ((roi[(j+1)%roi.size()].second & roi[j].second & vpMbtPolygon::DOWN_CLIPPING) == 0) &&
834  ((roi[(j+1)%roi.size()].second & roi[j].second & vpMbtPolygon::UP_CLIPPING) == 0) &&
835  ((roi[(j+1)%roi.size()].second & roi[j].second & vpMbtPolygon::LEFT_CLIPPING) == 0) &&
836  ((roi[(j+1)%roi.size()].second & roi[j].second & vpMbtPolygon::RIGHT_CLIPPING) == 0)){
837  vpImagePoint ip1, ip2;
838  ip1 = roi[j].first;
839  ip2 = roi[(j+1)%roi.size()].first;
840 
841  vpDisplay::displayLine (I, ip1, ip2, col, thickness);
842  }
843  }
844  }
845  if(displayFeatures && faces[i]->hasEnoughPoints() && faces[i]->isVisible()) {
846  faces[i]->displayPrimitive(I);
847 // faces[i]->displayNormal(I);
848  }
849  }
850 
851 #ifdef VISP_HAVE_OGRE
852  if(useOgre)
853  faces.displayOgre(cMo);
854 #endif
855 }
856 
867 void
869  const vpColor& col , const unsigned int thickness, const bool displayFullModel)
870 {
872 
873  if(clippingFlag > 3) // Contains at least one FOV constraint
874  c.computeFov(I.getWidth(), I.getHeight());
875 
876  for (unsigned int i = 0; i < faces.size(); i += 1){
877  if(displayFullModel || faces[i]->isVisible())
878  {
879  faces[i]->changeFrame(cMo);
880  faces[i]->computeRoiClipped(c);
881  std::vector<std::pair<vpImagePoint,unsigned int> > roi;
882  faces[i]->getRoiClipped(c, roi);
883 
884  for (unsigned int j = 0; j < roi.size(); j += 1){
885  if(((roi[(j+1)%roi.size()].second & roi[j].second & vpMbtPolygon::NEAR_CLIPPING) == 0) &&
886  ((roi[(j+1)%roi.size()].second & roi[j].second & vpMbtPolygon::FAR_CLIPPING) == 0) &&
887  ((roi[(j+1)%roi.size()].second & roi[j].second & vpMbtPolygon::DOWN_CLIPPING) == 0) &&
888  ((roi[(j+1)%roi.size()].second & roi[j].second & vpMbtPolygon::UP_CLIPPING) == 0) &&
889  ((roi[(j+1)%roi.size()].second & roi[j].second & vpMbtPolygon::LEFT_CLIPPING) == 0) &&
890  ((roi[(j+1)%roi.size()].second & roi[j].second & vpMbtPolygon::RIGHT_CLIPPING) == 0)){
891  vpImagePoint ip1, ip2;
892  ip1 = roi[j].first;
893  ip2 = roi[(j+1)%roi.size()].first;
894 
895  vpDisplay::displayLine (I, ip1, ip2, col, thickness);
896  }
897  }
898  }
899  if(displayFeatures && faces[i]->hasEnoughPoints() && faces[i]->isVisible()) {
900  faces[i]->displayPrimitive(I);
901 // faces[i]->displayNormal(I);
902  }
903  }
904 
905 #ifdef VISP_HAVE_OGRE
906  if(useOgre)
907  faces.displayOgre(cMo);
908 #endif
909 }
910 
919 void
921 {
922  unsigned int nbTotalPoints = 0;
923  for (unsigned int i = 0; i < faces.size(); i += 1){
924  if(faces[i]->isVisible()){
925  nbTotalPoints += faces[i]->getNbPointsCur();
926  }
927  }
928 
929  if(nbTotalPoints < 10){
930  std::cerr << "test tracking failed (too few points to realize a good tracking)." << std::endl;
932  "test tracking failed (too few points to realize a good tracking).");
933  }
934 }
935 
936 #endif //VISP_HAVE_OPENCV
bool hasFarClippingDistance() const
void setKltOpencv(const vpKltOpencv &t)
bool compute_interaction
If true, compute the interaction matrix at each iteration of the minimization. Otherwise, compute it only on the first iteration.
virtual void track(const vpImage< unsigned char > &I)
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
vpMatrix get_K_inverse() const
void setWindowName(const Ogre::String &n)
Definition: vpAROgre.h:234
int getPyramidLevels() const
Get the number of pyramid levels.
Definition: vpKltOpencv.h:196
unsigned int getMaskBorder() const
void setQuality(const double &q)
Definition of the vpMatrix class.
Definition: vpMatrix.h:96
vpMatrix covarianceMatrix
Covariance matrix.
Definition: vpMbTracker.h:118
double distFarClip
Distance for near clipping.
virtual ~vpMbKltTracker()
void setQuality(double input)
Definition: vpKltOpencv.h:263
double getHarrisParam() const
void resize(const unsigned int nrows, const unsigned int ncols, const bool nullify=true)
Definition: vpMatrix.cpp:174
void track(const IplImage *I)
double distNearClip
Distance for near clipping.
unsigned int clippingFlag
Flags specifying which clipping to used.
void MEstimator(const vpRobustEstimatorType method, const vpColVector &residues, vpColVector &weights)
Compute the weights according a residue vector and a PsiFunction.
Definition: vpRobust.cpp:138
int getMaxFeatures() const
Get Max number of features.
Definition: vpKltOpencv.h:183
void parse(const char *filename)
unsigned int getWidth() const
Definition: vpImage.h:159
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
double getFarClippingDistance() const
unsigned int maxIter
The maximum iteration of the virtual visual servoing stage.
The class provides a data structure for the homogeneous matrices as well as a set of operations on th...
#define vpERROR_TRACE
Definition: vpDebug.h:379
#define vpTRACE
Definition: vpDebug.h:401
IplImage * cur
Temporary OpenCV image for fast conversion.
double getHarrisFreeParameter() const
Get Harris free parameter.
Definition: vpKltOpencv.h:179
void setIdentity()
Basic initialisation (identity)
Class to define colors available for display functionnalities.
Definition: vpColor.h:125
double percentGood
Percentage of good points, according to the initial number, that must have the tracker.
void setBackgroundSizeOgre(const unsigned int &h, const unsigned int &w)
vpHomogeneousMatrix cMo
The current pose.
Definition: vpMbTracker.h:108
bool postTracking(const vpImage< unsigned char > &I, vpColVector &w)
unsigned int getBlockSize() const
void setUseHarris(const int input)
Definition: vpKltOpencv.h:272
bool modelInitialised
Flag used to ensure that the CAD model is loaded before the initialisation.
Definition: vpMbTracker.h:112
void getCameraParameters(vpCameraParameters &_cam) const
void initOgre(vpCameraParameters _cam=vpCameraParameters())
error that can be emited by ViSP classes.
Definition: vpException.h:75
void computeJTR(const vpMatrix &J, const vpColVector &R, vpMatrix &JTR)
double getAngleAppear() const
double getAngleDisappear() const
unsigned int getMaxFeatures() const
void setInitialGuess(CvPoint2D32f **guess_pts)
void setPyramidLevels(const int input)
Definition: vpKltOpencv.h:262
void setBlockSize(const int input)
Definition: vpKltOpencv.h:218
Definition of the vpSubMatrix vpSubMatrix class provides a mask on a vpMatrix all properties of vpMat...
Definition: vpSubMatrix.h:66
bool computeCovariance
Flag used to specify if the covariance matrix has to be computed or not.
Definition: vpMbTracker.h:116
bool firstInitialisation
Flag to specify whether the init method is called the first or not (specific calls to realize in this...
static vpMatrix computeCovarianceMatrix(const vpMatrix &A, const vpColVector &x, const vpColVector &b)
virtual void reinit(const vpImage< unsigned char > &I)
virtual void testTracking()
virtual void loadConfigFile(const std::string &configFile)
vpCameraParameters cam
The camera parameters.
Definition: vpMbTracker.h:106
The vpRotationMatrix considers the particular case of a rotation matrix.
vpHomogeneousMatrix ctTc0
The estimated displacement of the pose between the current instant and the initial position...
unsigned int setVisibleOgre(const vpImage< unsigned char > &_I, const vpCameraParameters &_cam, const vpHomogeneousMatrix &_cMo, const double &angleAppears, const double &angleDisappears, bool &changed)
This class aims to compute the homography wrt.two images.
Definition: vpHomography.h:173
void addPoint(const unsigned int n, const vpPoint &P)
vpAROgre * getOgreContext()
unsigned int setVisible(const vpImage< unsigned char > &_I, const vpCameraParameters &_cam, const vpHomogeneousMatrix &_cMo, const double &angle, bool &changed)
std::vector< vpImagePoint > getKltImagePoints() const
void setPyramidLevels(const unsigned int &pL)
Parse an Xml file to extract configuration parameters of a Mbt Klt object.Data parser for the KLT mod...
unsigned int getWindowSize() const
Error that can be emited by the vpTracker class and its derivates.
void getFeature(int index, int &id, float &x, float &y) const
void setWindowSize(const int input)
Definition: vpKltOpencv.h:273
vpMatrix AtA() const
Definition: vpMatrix.cpp:1359
double getQuality() const
Get the quality of the tracker.
Definition: vpKltOpencv.h:198
void changeFrame(const vpHomogeneousMatrix &cMo)
Definition: vpPlane.cpp:352
void setWindowSize(const unsigned int &w)
void setTrackerId(int tid)
Definition: vpKltOpencv.h:264
Definition of the vpSubColVector vpSubColVector class provides a mask on a vpColVector all properties...
unsigned int maskBorder
Erosion of the mask.
Generic class defining intrinsic camera parameters.
int getBlockSize() const
Get the block size.
Definition: vpKltOpencv.h:172
std::map< int, vpImagePoint > getKltImagePointsWithId() const
bool useOgre
Use Ogre3d for visibility tests.
vpMbHiddenFaces< vpMbtKltPolygon > faces
Set of faces describing the object.
void extract(vpRotationMatrix &R) const
void setAngleDisappear(const double &adisappear)
void setHarrisParam(const double &hp)
bool firstTrack
First track() called.
void setMinDistance(const double &mD)
void addPolygon(PolygonType *p)
vpKltOpencv tracker
Points tracker.
vpColVector getNormal() const
Definition: vpPlane.cpp:226
double get_px() const
static double rad(double deg)
Definition: vpMath.h:100
double threshold_outlier
Threshold below which the weight associated to a point to consider this one as an outlier...
void diag(const vpColVector &A)
Definition: vpMatrix.cpp:2523
void preTracking(const vpImage< unsigned char > &I, unsigned int &nbInfos, unsigned int &nbFaceUsed)
void setAngleAppear(const double &aappear)
unsigned int size() const
double lambda
The gain of the virtual visual servoing stage.
void setMaxFeatures(const unsigned int &mF)
void initTracking(const IplImage *I, const IplImage *mask=NULL)
double getQuality() const
vpMatrix get_K() const
static double deg(double rad)
Definition: vpMath.h:93
bool displayFeatures
If true, the features are displayed.
Definition: vpMbTracker.h:120
void setCameraParameters(const vpCameraParameters &cam)
Class that provides a data structure for the column vectors as well as a set of operations on these v...
Definition: vpColVector.h:72
virtual void setOgreVisibilityTest(const bool &v)
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV.
Definition: vpKltOpencv.h:103
vpHomogeneousMatrix inverse() const
virtual void setNearClippingDistance(const double &dist)
static vpHomogeneousMatrix direct(const vpColVector &v)
int getNbFeatures() const
Get the current number of features.
Definition: vpKltOpencv.h:187
Contains an M-Estimator and various influence function.
Definition: vpRobust.h:63
virtual void initFaceFromCorners(const std::vector< vpPoint > &corners, const unsigned int indexFace=-1)
double angleAppears
Angle used to detect a face appearance.
void displayOgre(const vpHomogeneousMatrix &_cMo)
unsigned int getHeight() const
Definition: vpImage.h:150
virtual void setClipping(const unsigned int &flags)
vpMatrix pseudoInverse(double svThreshold=1e-6) const
Compute the pseudo inverse of the matrix using the SVD.
Definition: vpMatrix.cpp:1812
virtual void init(const vpImage< unsigned char > &I)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:92
virtual void displayLine(const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1)=0
void setBlockSize(const unsigned int &bs)
This class defines the container for a plane geometrical structure.
Definition: vpPlane.h:67
double getMinDistance() const
Get Min Distance.
Definition: vpKltOpencv.h:185
void setMaxFeatures(const int input)
double angleDisappears
Angle used to detect a face disappearance.
void setThreshold(const double noise_threshold)
Definition: vpRobust.h:128
unsigned int getRows() const
Return the number of rows of the matrix.
Definition: vpMatrix.h:157
bool hasNearClippingDistance() const
double getMinDistance() const
virtual void setNbPoint(const unsigned int nb)
vpColVector & normalize()
normalise the vector
bool getFovClipping() const
void setMinDistance(double input)
Definition: vpKltOpencv.h:242
void setIteration(const unsigned int iter)
Set iteration.
Definition: vpRobust.h:122
void computeVVS(const unsigned int &nbInfos, vpColVector &w)
Class that consider the case of a translation vector.
double getNearClippingDistance() const
void setMaskBorder(const unsigned int &mb)
vpHomogeneousMatrix c0Mo
Initial pose.
int getWindowSize() const
Get Max number of features.
Definition: vpKltOpencv.h:200
virtual void setFarClippingDistance(const double &dist)
double getD() const
Definition: vpPlane.h:118
unsigned int getPyramidLevels() const
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, const unsigned int thickness=1, const bool displayFullModel=false)
void setHarrisFreeParameter(double input)
Definition: vpKltOpencv.h:226
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
void resize(const unsigned int i, const bool flagNullify=true)
Definition: vpColVector.h:94
std::vector< PolygonType * > & getPolygon()
virtual void setIndex(const int i)
Definition: vpMbtPolygon.h:212
void computeFov(const unsigned int &w, const unsigned int &h)