36 #include <visp3/mbt/vpMbGenericTracker.h>
38 #include <visp3/core/vpDisplay.h>
39 #include <visp3/core/vpExponentialMap.h>
40 #include <visp3/core/vpTrackingException.h>
41 #include <visp3/core/vpIoTools.h>
42 #include <visp3/mbt/vpMbtXmlGenericParser.h>
44 #ifdef VISP_HAVE_NLOHMANN_JSON
45 #include VISP_NLOHMANN_JSON(json.hpp)
46 using json = nlohmann::json;
51 : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
52 m_percentageGdPt(0.4), m_referenceCameraName(
"Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
53 m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
63 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
72 : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
73 m_percentageGdPt(0.4), m_referenceCameraName(
"Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
74 m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
79 else if (nbCameras == 1) {
86 for (
unsigned int i = 1; i <= nbCameras; i++) {
102 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
111 : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
112 m_percentageGdPt(0.4), m_referenceCameraName(
"Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
113 m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
115 if (trackerTypes.empty()) {
119 if (trackerTypes.size() == 1) {
126 for (
size_t i = 1; i <= trackerTypes.size(); i++) {
127 std::stringstream ss;
142 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
151 const std::vector<int> &trackerTypes)
152 : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
153 m_percentageGdPt(0.4), m_referenceCameraName(
"Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
154 m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
156 if (cameraNames.size() != trackerTypes.size() || cameraNames.empty()) {
158 "cameraNames.size() != trackerTypes.size() || cameraNames.empty()");
161 for (
size_t i = 0; i < cameraNames.size(); i++) {
174 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
187 it->second =
nullptr;
211 double rawTotalProjectionError = 0.0;
212 unsigned int nbTotalFeaturesUsed = 0;
214 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
216 TrackerWrapper *tracker = it->second;
218 unsigned int nbFeaturesUsed = 0;
219 double curProjError = tracker->computeProjectionErrorImpl(I, cMo, cam, nbFeaturesUsed);
221 if (nbFeaturesUsed > 0) {
222 nbTotalFeaturesUsed += nbFeaturesUsed;
223 rawTotalProjectionError += curProjError;
227 if (nbTotalFeaturesUsed > 0) {
228 return vpMath::deg(rawTotalProjectionError / (
double)nbTotalFeaturesUsed);
264 double rawTotalProjectionError = 0.0;
265 unsigned int nbTotalFeaturesUsed = 0;
267 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
269 TrackerWrapper *tracker = it->second;
271 double curProjError = tracker->getProjectionError();
272 unsigned int nbFeaturesUsed = tracker->nbFeaturesForProjErrorComputation;
274 if (nbFeaturesUsed > 0) {
275 nbTotalFeaturesUsed += nbFeaturesUsed;
276 rawTotalProjectionError += (
vpMath::rad(curProjError) * nbFeaturesUsed);
280 if (nbTotalFeaturesUsed > 0) {
301 double normRes_1 = -1;
302 unsigned int iter = 0;
320 std::map<std::string, vpVelocityTwistMatrix> mapOfVelocityTwist;
325 mapOfVelocityTwist[it->first] = cVo;
329 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
343 bool reStartFromLastIncrement =
false;
345 if (reStartFromLastIncrement) {
346 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
348 TrackerWrapper *tracker = it->second;
352 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
355 tracker->ctTc0 = c_curr_tTc_curr0;
360 if (!reStartFromLastIncrement) {
365 if (!isoJoIdentity) {
368 LVJ_true = (
m_L * (cVo *
oJo));
382 unsigned int rank = (
m_L * cVo).kernel(K);
391 isoJoIdentity =
false;
400 unsigned int start_index = 0;
401 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
403 TrackerWrapper *tracker = it->second;
406 for (
unsigned int i = 0; i < tracker->m_error_edge.getRows(); i++) {
407 double wi = tracker->m_w_edge[i] * tracker->m_factor[i] * factorEdge;
408 W_true[start_index + i] = wi;
414 for (
unsigned int j = 0; j <
m_L.
getCols(); j++) {
415 m_L[start_index + i][j] *= wi;
419 start_index += tracker->m_error_edge.
getRows();
422 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
423 if (tracker->m_trackerType & KLT_TRACKER) {
424 for (
unsigned int i = 0; i < tracker->m_error_klt.getRows(); i++) {
425 double wi = tracker->m_w_klt[i] * factorKlt;
426 W_true[start_index + i] = wi;
432 for (
unsigned int j = 0; j <
m_L.
getCols(); j++) {
433 m_L[start_index + i][j] *= wi;
437 start_index += tracker->m_error_klt.
getRows();
442 for (
unsigned int i = 0; i < tracker->m_error_depthNormal.getRows(); i++) {
443 double wi = tracker->m_w_depthNormal[i] * factorDepth;
444 W_true[start_index + i] = wi;
450 for (
unsigned int j = 0; j <
m_L.
getCols(); j++) {
451 m_L[start_index + i][j] *= wi;
455 start_index += tracker->m_error_depthNormal.
getRows();
459 for (
unsigned int i = 0; i < tracker->m_error_depthDense.getRows(); i++) {
460 double wi = tracker->m_w_depthDense[i] * factorDepthDense;
461 W_true[start_index + i] = wi;
467 for (
unsigned int j = 0; j <
m_L.
getCols(); j++) {
468 m_L[start_index + i][j] *= wi;
472 start_index += tracker->m_error_depthDense.
getRows();
477 normRes = sqrt(num / den);
485 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
486 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
488 TrackerWrapper *tracker = it->second;
492 tracker->ctTc0 = c_curr_tTc_curr0;
497 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
499 TrackerWrapper *tracker = it->second;
508 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
510 TrackerWrapper *tracker = it->second;
514 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
515 if (tracker->m_trackerType & KLT_TRACKER) {
529 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
531 TrackerWrapper *tracker = it->second;
534 tracker->updateMovingEdgeWeights();
546 unsigned int nbFeatures = 0;
548 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
550 TrackerWrapper *tracker = it->second;
551 tracker->computeVVSInit(mapOfImages[it->first]);
553 nbFeatures += tracker->m_error.getRows();
567 "computeVVSInteractionMatrixAndR"
568 "esidu() should not be called");
573 std::map<std::string, vpVelocityTwistMatrix> &mapOfVelocityTwist)
575 unsigned int start_index = 0;
577 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
579 TrackerWrapper *tracker = it->second;
582 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
585 tracker->ctTc0 = c_curr_tTc_curr0;
588 tracker->computeVVSInteractionMatrixAndResidu(mapOfImages[it->first]);
590 m_L.
insert(tracker->m_L * mapOfVelocityTwist[it->first], start_index, 0);
593 start_index += tracker->m_error.getRows();
599 unsigned int start_index = 0;
601 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
603 TrackerWrapper *tracker = it->second;
604 tracker->computeVVSWeights();
607 start_index += tracker->m_w.getRows();
626 bool displayFullModel)
630 TrackerWrapper *tracker = it->second;
631 tracker->display(I, cMo, cam, col, thickness, displayFullModel);
653 bool displayFullModel)
657 TrackerWrapper *tracker = it->second;
658 tracker->display(I, cMo, cam, col, thickness, displayFullModel);
684 unsigned int thickness,
bool displayFullModel)
687 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
688 it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
691 it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
694 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
718 bool displayFullModel)
721 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
722 it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
725 it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
728 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
745 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
746 const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
747 const vpColor &col,
unsigned int thickness,
bool displayFullModel)
751 it_img != mapOfImages.end(); ++it_img) {
752 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it_img->first);
753 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
754 std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
756 if (it_tracker !=
m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
757 it_cam != mapOfCameraParameters.end()) {
758 TrackerWrapper *tracker = it_tracker->second;
759 tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
762 std::cerr <<
"Missing elements for image:" << it_img->first <<
"!" << std::endl;
779 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
780 const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
781 const vpColor &col,
unsigned int thickness,
bool displayFullModel)
784 for (std::map<std::string,
const vpImage<vpRGBa> *>::const_iterator it_img = mapOfImages.begin();
785 it_img != mapOfImages.end(); ++it_img) {
786 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it_img->first);
787 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
788 std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
790 if (it_tracker !=
m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
791 it_cam != mapOfCameraParameters.end()) {
792 TrackerWrapper *tracker = it_tracker->second;
793 tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
796 std::cerr <<
"Missing elements for image:" << it_img->first <<
"!" << std::endl;
808 std::vector<std::string> cameraNames;
810 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
812 cameraNames.push_back(it_tracker->first);
834 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
835 it->second->getCameraParameters(cam1);
838 it->second->getCameraParameters(cam2);
841 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
854 mapOfCameraParameters.clear();
856 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
859 it->second->getCameraParameters(cam_);
860 mapOfCameraParameters[it->first] = cam_;
872 std::map<std::string, int> trackingTypes;
874 TrackerWrapper *traker;
875 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
877 traker = it_tracker->second;
878 trackingTypes[it_tracker->first] = traker->getTrackerType();
881 return trackingTypes;
895 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
896 clippingFlag1 = it->second->getClipping();
899 clippingFlag2 = it->second->getClipping();
902 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
914 mapOfClippingFlags.clear();
916 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
918 TrackerWrapper *tracker = it->second;
919 mapOfClippingFlags[it->first] = tracker->getClipping();
932 return it->second->getFaces();
946 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(cameraName);
948 return it->second->getFaces();
951 std::cerr <<
"The camera: " << cameraName <<
" cannot be found!" << std::endl;
955 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
959 std::list<vpMbtDistanceCircle *> &vpMbGenericTracker::getFeaturesCircle()
963 TrackerWrapper *tracker = it->second;
964 return tracker->getFeaturesCircle();
975 std::list<vpMbtDistanceKltCylinder *> &vpMbGenericTracker::getFeaturesKltCylinder()
979 TrackerWrapper *tracker = it->second;
980 return tracker->getFeaturesKltCylinder();
991 std::list<vpMbtDistanceKltPoints *> &vpMbGenericTracker::getFeaturesKlt()
995 TrackerWrapper *tracker = it->second;
996 return tracker->getFeaturesKlt();
1036 return it->second->getFeaturesForDisplay();
1042 return std::vector<std::vector<double> >();
1073 mapOfFeatures.clear();
1075 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1077 mapOfFeatures[it->first] = it->second->getFeaturesForDisplay();
1092 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
1101 std::vector<vpImagePoint> vpMbGenericTracker::getKltImagePoints()
const
1105 TrackerWrapper *tracker = it->second;
1106 return tracker->getKltImagePoints();
1112 return std::vector<vpImagePoint>();
1123 std::map<int, vpImagePoint> vpMbGenericTracker::getKltImagePointsWithId()
const
1127 TrackerWrapper *tracker = it->second;
1128 return tracker->getKltImagePointsWithId();
1134 return std::map<int, vpImagePoint>();
1142 unsigned int vpMbGenericTracker::getKltMaskBorder()
const
1146 TrackerWrapper *tracker = it->second;
1147 return tracker->getKltMaskBorder();
1161 int vpMbGenericTracker::getKltNbPoints()
const
1165 TrackerWrapper *tracker = it->second;
1166 return tracker->getKltNbPoints();
1180 vpKltOpencv vpMbGenericTracker::getKltOpencv()
const
1185 TrackerWrapper *tracker;
1186 tracker = it_tracker->second;
1187 return tracker->getKltOpencv();
1207 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1208 klt1 = it->second->getKltOpencv();
1211 klt2 = it->second->getKltOpencv();
1214 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
1224 void vpMbGenericTracker::getKltOpencv(std::map<std::string, vpKltOpencv> &mapOfKlts)
const
1228 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1230 TrackerWrapper *tracker = it->second;
1231 mapOfKlts[it->first] = tracker->getKltOpencv();
1240 std::vector<cv::Point2f> vpMbGenericTracker::getKltPoints()
const
1244 TrackerWrapper *tracker = it->second;
1245 return tracker->getKltPoints();
1251 return std::vector<cv::Point2f>();
1260 double vpMbGenericTracker::getKltThresholdAcceptation()
const {
return m_thresholdOutlier; }
1279 it->second->getLcircle(circlesList, level);
1300 unsigned int level)
const
1302 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(cameraName);
1304 it->second->getLcircle(circlesList, level);
1307 std::cerr <<
"The camera: " << cameraName <<
" does not exist!" << std::endl;
1327 it->second->getLcylinder(cylindersList, level);
1348 unsigned int level)
const
1350 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(cameraName);
1352 it->second->getLcylinder(cylindersList, level);
1355 std::cerr <<
"The camera: " << cameraName <<
" does not exist!" << std::endl;
1376 it->second->getLline(linesList, level);
1397 unsigned int level)
const
1399 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(cameraName);
1401 it->second->getLline(linesList, level);
1404 std::cerr <<
"The camera: " << cameraName <<
" does not exist!" << std::endl;
1440 bool displayFullModel)
1445 return it->second->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1451 return std::vector<std::vector<double> >();
1480 const std::map<std::string, unsigned int> &mapOfwidths,
1481 const std::map<std::string, unsigned int> &mapOfheights,
1482 const std::map<std::string, vpHomogeneousMatrix> &mapOfcMos,
1483 const std::map<std::string, vpCameraParameters> &mapOfCams,
1484 bool displayFullModel)
1487 mapOfModels.clear();
1489 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1491 std::map<std::string, unsigned int>::const_iterator findWidth = mapOfwidths.find(it->first);
1492 std::map<std::string, unsigned int>::const_iterator findHeight = mapOfheights.find(it->first);
1493 std::map<std::string, vpHomogeneousMatrix>::const_iterator findcMo = mapOfcMos.find(it->first);
1494 std::map<std::string, vpCameraParameters>::const_iterator findCam = mapOfCams.find(it->first);
1496 if (findWidth != mapOfwidths.end() && findHeight != mapOfheights.end() && findcMo != mapOfcMos.end() &&
1497 findCam != mapOfCams.end()) {
1498 mapOfModels[it->first] = it->second->getModelForDisplay(findWidth->second, findHeight->second, findcMo->second,
1499 findCam->second, displayFullModel);
1514 return it->second->getMovingEdge();
1534 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1535 it->second->getMovingEdge(me1);
1538 it->second->getMovingEdge(me2);
1541 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
1553 mapOfMovingEdges.clear();
1555 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1557 TrackerWrapper *tracker = it->second;
1558 mapOfMovingEdges[it->first] = tracker->getMovingEdge();
1583 unsigned int nbGoodPoints = 0;
1586 nbGoodPoints = it->second->getNbPoints(level);
1592 return nbGoodPoints;
1611 mapOfNbPoints.clear();
1613 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1615 TrackerWrapper *tracker = it->second;
1616 mapOfNbPoints[it->first] = tracker->getNbPoints(level);
1629 return it->second->getNbPolygon();
1643 mapOfNbPolygons.clear();
1645 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1647 TrackerWrapper *tracker = it->second;
1648 mapOfNbPolygons[it->first] = tracker->getNbPolygon();
1666 return it->second->getPolygon(index);
1686 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(cameraName);
1688 return it->second->getPolygon(index);
1691 std::cerr <<
"The camera: " << cameraName <<
" does not exist!" << std::endl;
1710 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > >
1713 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces;
1717 TrackerWrapper *tracker = it->second;
1718 polygonFaces = tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1724 return polygonFaces;
1745 std::map<std::string, std::vector<std::vector<vpPoint> > > &mapOfPoints,
1746 bool orderPolygons,
bool useVisibility,
bool clipPolygon)
1748 mapOfPolygons.clear();
1749 mapOfPoints.clear();
1751 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1753 TrackerWrapper *tracker = it->second;
1754 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces =
1755 tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1757 mapOfPolygons[it->first] = polygonFaces.first;
1758 mapOfPoints[it->first] = polygonFaces.second;
1775 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1776 it->second->getPose(c1Mo);
1779 it->second->getPose(c2Mo);
1782 std::cerr <<
"The tracker is not set as a stereo configuration! There are " <<
m_mapOfTrackers.size() <<
" cameras!"
1795 mapOfCameraPoses.clear();
1797 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1799 TrackerWrapper *tracker = it->second;
1800 tracker->getPose(mapOfCameraPoses[it->first]);
1816 TrackerWrapper *tracker = it->second;
1817 return tracker->getTrackerType();
1827 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1829 TrackerWrapper *tracker = it->second;
1836 double ,
int ,
const std::string & )
1841 #ifdef VISP_HAVE_MODULE_GUI
1886 const std::string &initFile1,
const std::string &initFile2,
bool displayHelp,
1890 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1891 TrackerWrapper *tracker = it->second;
1892 tracker->initClick(I1, initFile1, displayHelp, T1);
1896 tracker = it->second;
1897 tracker->initClick(I2, initFile2, displayHelp, T2);
1901 tracker = it->second;
1904 tracker->getPose(
m_cMo);
1909 "Cannot initClick()! Require two cameras but there are %d cameras!",
m_mapOfTrackers.size());
1956 const std::string &initFile1,
const std::string &initFile2,
bool displayHelp,
1960 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
1961 TrackerWrapper *tracker = it->second;
1962 tracker->initClick(I_color1, initFile1, displayHelp, T1);
1966 tracker = it->second;
1967 tracker->initClick(I_color2, initFile2, displayHelp, T2);
1971 tracker = it->second;
1974 tracker->getPose(
m_cMo);
1979 "Cannot initClick()! Require two cameras but there are %d cameras!",
m_mapOfTrackers.size());
2025 void vpMbGenericTracker::initClick(
const std::map<std::string,
const vpImage<unsigned char> *> &mapOfImages,
2026 const std::map<std::string, std::string> &mapOfInitFiles,
bool displayHelp,
2027 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2030 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2032 std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(
m_referenceCameraName);
2034 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2035 TrackerWrapper *tracker = it_tracker->second;
2036 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2037 if (it_T != mapOfT.end())
2038 tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2040 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2041 tracker->getPose(
m_cMo);
2048 std::vector<std::string> vectorOfMissingCameraPoses;
2053 it_img = mapOfImages.find(it_tracker->first);
2054 it_initFile = mapOfInitFiles.find(it_tracker->first);
2056 if (it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2058 TrackerWrapper *tracker = it_tracker->second;
2059 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2062 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2068 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2069 it != vectorOfMissingCameraPoses.end(); ++it) {
2070 it_img = mapOfImages.find(*it);
2071 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2081 "Missing image or missing camera transformation "
2082 "matrix! Cannot set the pose for camera: %s!",
2130 void vpMbGenericTracker::initClick(
const std::map<std::string,
const vpImage<vpRGBa> *> &mapOfColorImages,
2131 const std::map<std::string, std::string> &mapOfInitFiles,
bool displayHelp,
2132 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2135 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
2136 std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(
m_referenceCameraName);
2138 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2139 TrackerWrapper *tracker = it_tracker->second;
2140 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2141 if (it_T != mapOfT.end())
2142 tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2144 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2145 tracker->getPose(
m_cMo);
2152 std::vector<std::string> vectorOfMissingCameraPoses;
2157 it_img = mapOfColorImages.find(it_tracker->first);
2158 it_initFile = mapOfInitFiles.find(it_tracker->first);
2160 if (it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2162 TrackerWrapper *tracker = it_tracker->second;
2163 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2166 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2172 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2173 it != vectorOfMissingCameraPoses.end(); ++it) {
2174 it_img = mapOfColorImages.find(*it);
2175 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2186 "Missing image or missing camera transformation "
2187 "matrix! Cannot set the pose for camera: %s!",
2195 const int ,
const std::string & )
2240 const std::string &initFile1,
const std::string &initFile2)
2243 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2244 TrackerWrapper *tracker = it->second;
2245 tracker->initFromPoints(I1, initFile1);
2249 tracker = it->second;
2250 tracker->initFromPoints(I2, initFile2);
2254 tracker = it->second;
2257 tracker->getPose(
m_cMo);
2260 tracker->getCameraParameters(
m_cam);
2265 "Cannot initFromPoints()! Require two cameras but "
2266 "there are %d cameras!",
2301 const std::string &initFile1,
const std::string &initFile2)
2304 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2305 TrackerWrapper *tracker = it->second;
2306 tracker->initFromPoints(I_color1, initFile1);
2310 tracker = it->second;
2311 tracker->initFromPoints(I_color2, initFile2);
2315 tracker = it->second;
2318 tracker->getPose(
m_cMo);
2321 tracker->getCameraParameters(
m_cam);
2326 "Cannot initFromPoints()! Require two cameras but "
2327 "there are %d cameras!",
2333 const std::map<std::string, std::string> &mapOfInitPoints)
2337 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2339 std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(
m_referenceCameraName);
2341 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2342 TrackerWrapper *tracker = it_tracker->second;
2343 tracker->initFromPoints(*it_img->second, it_initPoints->second);
2344 tracker->getPose(
m_cMo);
2351 std::vector<std::string> vectorOfMissingCameraPoints;
2355 it_img = mapOfImages.find(it_tracker->first);
2356 it_initPoints = mapOfInitPoints.find(it_tracker->first);
2358 if (it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2360 it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2363 vectorOfMissingCameraPoints.push_back(it_tracker->first);
2367 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2368 it != vectorOfMissingCameraPoints.end(); ++it) {
2369 it_img = mapOfImages.find(*it);
2370 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2379 "Missing image or missing camera transformation "
2380 "matrix! Cannot init the pose for camera: %s!",
2387 const std::map<std::string, std::string> &mapOfInitPoints)
2391 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
2392 std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(
m_referenceCameraName);
2394 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() &&
2395 it_initPoints != mapOfInitPoints.end()) {
2396 TrackerWrapper *tracker = it_tracker->second;
2397 tracker->initFromPoints(*it_img->second, it_initPoints->second);
2398 tracker->getPose(
m_cMo);
2405 std::vector<std::string> vectorOfMissingCameraPoints;
2409 it_img = mapOfColorImages.find(it_tracker->first);
2410 it_initPoints = mapOfInitPoints.find(it_tracker->first);
2412 if (it_img != mapOfColorImages.end() && it_initPoints != mapOfInitPoints.end()) {
2414 it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2417 vectorOfMissingCameraPoints.push_back(it_tracker->first);
2421 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2422 it != vectorOfMissingCameraPoints.end(); ++it) {
2423 it_img = mapOfColorImages.find(*it);
2424 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2433 "Missing image or missing camera transformation "
2434 "matrix! Cannot init the pose for camera: %s!",
2457 const std::string &initFile1,
const std::string &initFile2)
2460 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2461 TrackerWrapper *tracker = it->second;
2462 tracker->initFromPose(I1, initFile1);
2466 tracker = it->second;
2467 tracker->initFromPose(I2, initFile2);
2471 tracker = it->second;
2474 tracker->getPose(
m_cMo);
2477 tracker->getCameraParameters(
m_cam);
2482 "Cannot initFromPose()! Require two cameras but there "
2500 const std::string &initFile1,
const std::string &initFile2)
2503 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2504 TrackerWrapper *tracker = it->second;
2505 tracker->initFromPose(I_color1, initFile1);
2509 tracker = it->second;
2510 tracker->initFromPose(I_color2, initFile2);
2514 tracker = it->second;
2517 tracker->getPose(
m_cMo);
2520 tracker->getCameraParameters(
m_cam);
2525 "Cannot initFromPose()! Require two cameras but there "
2546 const std::map<std::string, std::string> &mapOfInitPoses)
2550 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2552 std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(
m_referenceCameraName);
2554 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2555 TrackerWrapper *tracker = it_tracker->second;
2556 tracker->initFromPose(*it_img->second, it_initPose->second);
2557 tracker->getPose(
m_cMo);
2564 std::vector<std::string> vectorOfMissingCameraPoses;
2568 it_img = mapOfImages.find(it_tracker->first);
2569 it_initPose = mapOfInitPoses.find(it_tracker->first);
2571 if (it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2573 it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2576 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2580 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2581 it != vectorOfMissingCameraPoses.end(); ++it) {
2582 it_img = mapOfImages.find(*it);
2583 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2592 "Missing image or missing camera transformation "
2593 "matrix! Cannot init the pose for camera: %s!",
2614 const std::map<std::string, std::string> &mapOfInitPoses)
2618 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
2619 std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(
m_referenceCameraName);
2621 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2622 TrackerWrapper *tracker = it_tracker->second;
2623 tracker->initFromPose(*it_img->second, it_initPose->second);
2624 tracker->getPose(
m_cMo);
2631 std::vector<std::string> vectorOfMissingCameraPoses;
2635 it_img = mapOfColorImages.find(it_tracker->first);
2636 it_initPose = mapOfInitPoses.find(it_tracker->first);
2638 if (it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2640 it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2643 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2647 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2648 it != vectorOfMissingCameraPoses.end(); ++it) {
2649 it_img = mapOfColorImages.find(*it);
2650 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2659 "Missing image or missing camera transformation "
2660 "matrix! Cannot init the pose for camera: %s!",
2680 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2681 it->second->initFromPose(I1, c1Mo);
2685 it->second->initFromPose(I2, c2Mo);
2691 "This method requires 2 cameras but there are %d cameras!",
m_mapOfTrackers.size());
2709 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2710 it->second->initFromPose(I_color1, c1Mo);
2714 it->second->initFromPose(I_color2, c2Mo);
2720 "This method requires 2 cameras but there are %d cameras!",
m_mapOfTrackers.size());
2738 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2742 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2744 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
2746 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2747 TrackerWrapper *tracker = it_tracker->second;
2748 tracker->initFromPose(*it_img->second, it_camPose->second);
2749 tracker->getPose(
m_cMo);
2756 std::vector<std::string> vectorOfMissingCameraPoses;
2760 it_img = mapOfImages.find(it_tracker->first);
2761 it_camPose = mapOfCameraPoses.find(it_tracker->first);
2763 if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2765 it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2768 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2772 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2773 it != vectorOfMissingCameraPoses.end(); ++it) {
2774 it_img = mapOfImages.find(*it);
2775 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2784 "Missing image or missing camera transformation "
2785 "matrix! Cannot set the pose for camera: %s!",
2805 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2809 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
2810 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
2812 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2813 TrackerWrapper *tracker = it_tracker->second;
2814 tracker->initFromPose(*it_img->second, it_camPose->second);
2815 tracker->getPose(
m_cMo);
2822 std::vector<std::string> vectorOfMissingCameraPoses;
2826 it_img = mapOfColorImages.find(it_tracker->first);
2827 it_camPose = mapOfCameraPoses.find(it_tracker->first);
2829 if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2831 it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2834 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2838 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2839 it != vectorOfMissingCameraPoses.end(); ++it) {
2840 it_img = mapOfColorImages.find(*it);
2841 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2850 "Missing image or missing camera transformation "
2851 "matrix! Cannot set the pose for camera: %s!",
2871 if (extension ==
".xml") {
2874 #ifdef VISP_HAVE_NLOHMANN_JSON
2875 else if (extension ==
".json") {
2897 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
2899 TrackerWrapper *tracker = it->second;
2900 tracker->loadConfigFile(configFile, verbose);
2913 #ifdef VISP_HAVE_NLOHMANN_JSON
2925 std::ifstream jsonFile(settingsFile);
2926 if (!jsonFile.good()) {
2931 settings = json::parse(jsonFile);
2933 catch (json::parse_error &e) {
2934 std::stringstream msg;
2935 msg <<
"Could not parse JSON file : \n";
2937 msg << e.what() << std::endl;
2938 msg <<
"Byte position of error: " << e.byte;
2943 if (!settings.contains(
"version")) {
2946 else if (settings[
"version"].get<std::string>() != MBT_JSON_SETTINGS_VERSION) {
2953 trackersJson = settings.at(
"trackers");
2958 bool refCameraFound =
false;
2960 for (
const auto &it : trackersJson.items()) {
2961 const std::string cameraName = it.key();
2962 const json trackerJson = it.value();
2966 if (trackerJson.contains(
"camTref")) {
2973 std::cout <<
"Loading tracker " << cameraName << std::endl <<
" with settings: " << std::endl << trackerJson.dump(2);
2977 std::cout <<
"Updating an already existing tracker with JSON configuration." << std::endl;
2983 std::cout <<
"Creating a new tracker from JSON configuration." << std::endl;
2985 TrackerWrapper *tw =
new TrackerWrapper();
2989 const auto unusedCamIt = std::remove(unusedCameraNames.begin(), unusedCameraNames.end(), cameraName);
2990 unusedCameraNames.erase(unusedCamIt, unusedCameraNames.end());
2992 if (!refCameraFound) {
2997 for (
const std::string &oldCameraName : unusedCameraNames) {
3005 refTracker->getCameraParameters(
m_cam);
3009 this->
distNearClip = refTracker->getNearClippingDistance();
3010 this->
distFarClip = refTracker->getFarClippingDistance();
3013 if (settings.contains(
"display")) {
3014 const json displayJson = settings[
"display"];
3018 if (settings.contains(
"visibilityTest")) {
3019 const json visJson = settings[
"visibilityTest"];
3025 if (settings.contains(
"vvs")) {
3026 const json vvsJson = settings[
"vvs"];
3027 setLambda(vvsJson.value(
"lambda", this->m_lambda));
3028 setMaxIter(vvsJson.value(
"maxIter", this->m_maxIter));
3029 setInitialMu(vvsJson.value(
"initialMu", this->m_initialMu));
3033 if (settings.contains(
"model")) {
3034 loadModel(settings.at(
"model").get<std::string>(), verbose);
3049 j[
"version"] = MBT_JSON_SETTINGS_VERSION;
3053 trackers[kv.first] = *(kv.second);
3056 trackers[kv.first][
"camTref"] = itTransformation->second;
3059 j[
"trackers"] = trackers;
3066 std::ofstream f(settingsFile);
3068 const unsigned indentLevel = 4;
3069 f << j.dump(indentLevel);
3099 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3100 TrackerWrapper *tracker = it_tracker->second;
3101 tracker->loadConfigFile(configFile1, verbose);
3104 tracker = it_tracker->second;
3105 tracker->loadConfigFile(configFile2, verbose);
3132 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3134 TrackerWrapper *tracker = it_tracker->second;
3136 std::map<std::string, std::string>::const_iterator it_config = mapOfConfigFiles.find(it_tracker->first);
3137 if (it_config != mapOfConfigFiles.end()) {
3138 tracker->loadConfigFile(it_config->second, verbose);
3142 it_tracker->first.c_str());
3149 TrackerWrapper *tracker = it->second;
3150 tracker->getCameraParameters(
m_cam);
3183 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3185 TrackerWrapper *tracker = it->second;
3186 tracker->loadModel(modelFile, verbose, T);
3219 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3220 TrackerWrapper *tracker = it_tracker->second;
3221 tracker->loadModel(modelFile1, verbose, T1);
3224 tracker = it_tracker->second;
3225 tracker->loadModel(modelFile2, verbose, T2);
3248 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3250 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3252 std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(it_tracker->first);
3254 if (it_model != mapOfModelFiles.end()) {
3255 TrackerWrapper *tracker = it_tracker->second;
3256 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3258 if (it_T != mapOfT.end())
3259 tracker->loadModel(it_model->second, verbose, it_T->second);
3261 tracker->loadModel(it_model->second, verbose);
3265 it_tracker->first.c_str());
3270 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
3272 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
3274 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3276 TrackerWrapper *tracker = it->second;
3277 tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
3283 std::map<std::string,
const std::vector<vpColVector> *> &mapOfPointClouds,
3284 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3285 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3287 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3289 TrackerWrapper *tracker = it->second;
3290 tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3291 mapOfPointCloudHeights[it->first]);
3296 std::map<std::string, const vpMatrix *> &mapOfPointClouds,
3297 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3298 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3300 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3302 TrackerWrapper *tracker = it->second;
3303 tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3304 mapOfPointCloudHeights[it->first]);
3329 TrackerWrapper *tracker = it_tracker->second;
3330 tracker->reInitModel(I, cad_name, cMo, verbose, T);
3333 tracker->getPose(
m_cMo);
3363 TrackerWrapper *tracker = it_tracker->second;
3364 tracker->reInitModel(I_color, cad_name, cMo, verbose, T);
3367 tracker->getPose(
m_cMo);
3397 const std::string &cad_name1,
const std::string &cad_name2,
3402 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3404 it_tracker->second->reInitModel(I1, cad_name1, c1Mo, verbose, T1);
3408 it_tracker->second->reInitModel(I2, cad_name2, c2Mo, verbose, T2);
3413 it_tracker->second->getPose(
m_cMo);
3444 const std::string &cad_name1,
const std::string &cad_name2,
3449 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.begin();
3451 it_tracker->second->reInitModel(I_color1, cad_name1, c1Mo, verbose, T1);
3455 it_tracker->second->reInitModel(I_color2, cad_name2, c2Mo, verbose, T2);
3460 it_tracker->second->getPose(
m_cMo);
3485 const std::map<std::string, std::string> &mapOfModelFiles,
3486 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
bool verbose,
3487 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3490 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
3492 std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(
m_referenceCameraName);
3493 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
3495 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3496 it_camPose != mapOfCameraPoses.end()) {
3497 TrackerWrapper *tracker = it_tracker->second;
3498 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3499 if (it_T != mapOfT.end())
3500 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3502 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3505 tracker->getPose(
m_cMo);
3511 std::vector<std::string> vectorOfMissingCameras;
3514 it_img = mapOfImages.find(it_tracker->first);
3515 it_model = mapOfModelFiles.find(it_tracker->first);
3516 it_camPose = mapOfCameraPoses.find(it_tracker->first);
3518 if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
3519 TrackerWrapper *tracker = it_tracker->second;
3520 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3523 vectorOfMissingCameras.push_back(it_tracker->first);
3528 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3530 it_img = mapOfImages.find(*it);
3531 it_model = mapOfModelFiles.find(*it);
3532 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3535 if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3538 m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3560 const std::map<std::string, std::string> &mapOfModelFiles,
3561 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
bool verbose,
3562 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3565 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
3566 std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(
m_referenceCameraName);
3567 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
3569 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3570 it_camPose != mapOfCameraPoses.end()) {
3571 TrackerWrapper *tracker = it_tracker->second;
3572 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3573 if (it_T != mapOfT.end())
3574 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3576 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3579 tracker->getPose(
m_cMo);
3585 std::vector<std::string> vectorOfMissingCameras;
3588 it_img = mapOfColorImages.find(it_tracker->first);
3589 it_model = mapOfModelFiles.find(it_tracker->first);
3590 it_camPose = mapOfCameraPoses.find(it_tracker->first);
3592 if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3593 it_camPose != mapOfCameraPoses.end()) {
3594 TrackerWrapper *tracker = it_tracker->second;
3595 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3598 vectorOfMissingCameras.push_back(it_tracker->first);
3603 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3605 it_img = mapOfColorImages.find(*it);
3606 it_model = mapOfModelFiles.find(*it);
3607 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3610 if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3613 m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3630 #ifdef VISP_HAVE_OGRE
3657 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
3664 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3666 TrackerWrapper *tracker = it->second;
3667 tracker->resetTracker();
3684 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3686 TrackerWrapper *tracker = it->second;
3687 tracker->setAngleAppear(a);
3706 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3707 it->second->setAngleAppear(a1);
3710 it->second->setAngleAppear(a2);
3737 for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3738 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
3741 TrackerWrapper *tracker = it_tracker->second;
3742 tracker->setAngleAppear(it->second);
3764 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3766 TrackerWrapper *tracker = it->second;
3767 tracker->setAngleDisappear(a);
3786 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3787 it->second->setAngleDisappear(a1);
3790 it->second->setAngleDisappear(a2);
3817 for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3818 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
3821 TrackerWrapper *tracker = it_tracker->second;
3822 tracker->setAngleDisappear(it->second);
3840 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3842 TrackerWrapper *tracker = it->second;
3843 tracker->setCameraParameters(camera);
3858 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3859 it->second->setCameraParameters(camera1);
3862 it->second->setCameraParameters(camera2);
3866 it->second->getCameraParameters(
m_cam);
3888 for (std::map<std::string, vpCameraParameters>::const_iterator it = mapOfCameraParameters.begin();
3889 it != mapOfCameraParameters.end(); ++it) {
3890 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
3893 TrackerWrapper *tracker = it_tracker->second;
3894 tracker->setCameraParameters(it->second);
3917 it->second = cameraTransformationMatrix;
3932 const std::map<std::string, vpHomogeneousMatrix> &mapOfTransformationMatrix)
3934 for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = mapOfTransformationMatrix.begin();
3935 it != mapOfTransformationMatrix.end(); ++it) {
3936 std::map<std::string, vpHomogeneousMatrix>::iterator it_camTrans =
3940 it_camTrans->second = it->second;
3958 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3960 TrackerWrapper *tracker = it->second;
3961 tracker->setClipping(flags);
3978 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
3979 it->second->setClipping(flags1);
3982 it->second->setClipping(flags2);
3993 std::stringstream ss;
3994 ss <<
"Require two cameras! There are " <<
m_mapOfTrackers.size() <<
" cameras!";
4008 for (std::map<std::string, unsigned int>::const_iterator it = mapOfClippingFlags.begin();
4009 it != mapOfClippingFlags.end(); ++it) {
4010 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4013 TrackerWrapper *tracker = it_tracker->second;
4014 tracker->setClipping(it->second);
4034 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4036 TrackerWrapper *tracker = it->second;
4037 tracker->setDepthDenseFilteringMaxDistance(maxDistance);
4051 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4053 TrackerWrapper *tracker = it->second;
4054 tracker->setDepthDenseFilteringMethod(method);
4069 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4071 TrackerWrapper *tracker = it->second;
4072 tracker->setDepthDenseFilteringMinDistance(minDistance);
4087 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4089 TrackerWrapper *tracker = it->second;
4090 tracker->setDepthDenseFilteringOccupancyRatio(occupancyRatio);
4104 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4106 TrackerWrapper *tracker = it->second;
4107 tracker->setDepthDenseSamplingStep(stepX, stepY);
4120 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4122 TrackerWrapper *tracker = it->second;
4123 tracker->setDepthNormalFaceCentroidMethod(method);
4137 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4139 TrackerWrapper *tracker = it->second;
4140 tracker->setDepthNormalFeatureEstimationMethod(method);
4153 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4155 TrackerWrapper *tracker = it->second;
4156 tracker->setDepthNormalPclPlaneEstimationMethod(method);
4169 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4171 TrackerWrapper *tracker = it->second;
4172 tracker->setDepthNormalPclPlaneEstimationRansacMaxIter(maxIter);
4185 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4187 TrackerWrapper *tracker = it->second;
4188 tracker->setDepthNormalPclPlaneEstimationRansacThreshold(thresold);
4202 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4204 TrackerWrapper *tracker = it->second;
4205 tracker->setDepthNormalSamplingStep(stepX, stepY);
4231 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4233 TrackerWrapper *tracker = it->second;
4234 tracker->setDisplayFeatures(displayF);
4249 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4251 TrackerWrapper *tracker = it->second;
4252 tracker->setFarClippingDistance(dist);
4267 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4268 it->second->setFarClippingDistance(dist1);
4271 it->second->setFarClippingDistance(dist2);
4275 distFarClip = it->second->getFarClippingDistance();
4294 for (std::map<std::string, double>::const_iterator it = mapOfClippingDists.begin(); it != mapOfClippingDists.end();
4296 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4299 TrackerWrapper *tracker = it_tracker->second;
4300 tracker->setFarClippingDistance(it->second);
4319 std::map<vpTrackerType, double>::const_iterator it_factor = mapOfFeatureFactors.find(it->first);
4320 if (it_factor != mapOfFeatureFactors.end()) {
4321 it->second = it_factor->second;
4345 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4347 TrackerWrapper *tracker = it->second;
4348 tracker->setGoodMovingEdgesRatioThreshold(threshold);
4352 #ifdef VISP_HAVE_OGRE
4366 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4368 TrackerWrapper *tracker = it->second;
4369 tracker->setGoodNbRayCastingAttemptsRatio(ratio);
4386 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4388 TrackerWrapper *tracker = it->second;
4389 tracker->setNbRayCastingAttemptsForVisibility(attempts);
4394 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
4402 void vpMbGenericTracker::setKltOpencv(
const vpKltOpencv &t)
4404 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4406 TrackerWrapper *tracker = it->second;
4407 tracker->setKltOpencv(t);
4422 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4423 it->second->setKltOpencv(t1);
4426 it->second->setKltOpencv(t2);
4439 void vpMbGenericTracker::setKltOpencv(
const std::map<std::string, vpKltOpencv> &mapOfKlts)
4441 for (std::map<std::string, vpKltOpencv>::const_iterator it = mapOfKlts.begin(); it != mapOfKlts.end(); ++it) {
4442 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4445 TrackerWrapper *tracker = it_tracker->second;
4446 tracker->setKltOpencv(it->second);
4458 void vpMbGenericTracker::setKltThresholdAcceptation(
double th)
4462 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4464 TrackerWrapper *tracker = it->second;
4465 tracker->setKltThresholdAcceptation(th);
4484 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4486 TrackerWrapper *tracker = it->second;
4487 tracker->setLod(useLod, name);
4491 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
4499 void vpMbGenericTracker::setKltMaskBorder(
const unsigned int &e)
4501 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4503 TrackerWrapper *tracker = it->second;
4504 tracker->setKltMaskBorder(e);
4516 void vpMbGenericTracker::setKltMaskBorder(
const unsigned int &e1,
const unsigned int &e2)
4519 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4520 it->second->setKltMaskBorder(e1);
4524 it->second->setKltMaskBorder(e2);
4537 void vpMbGenericTracker::setKltMaskBorder(
const std::map<std::string, unsigned int> &mapOfErosions)
4539 for (std::map<std::string, unsigned int>::const_iterator it = mapOfErosions.begin(); it != mapOfErosions.end();
4541 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4544 TrackerWrapper *tracker = it_tracker->second;
4545 tracker->setKltMaskBorder(it->second);
4560 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4562 TrackerWrapper *tracker = it->second;
4563 tracker->setMask(mask);
4580 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4582 TrackerWrapper *tracker = it->second;
4583 tracker->setMinLineLengthThresh(minLineLengthThresh, name);
4599 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4601 TrackerWrapper *tracker = it->second;
4602 tracker->setMinPolygonAreaThresh(minPolygonAreaThresh, name);
4615 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4617 TrackerWrapper *tracker = it->second;
4618 tracker->setMovingEdge(me);
4634 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4635 it->second->setMovingEdge(me1);
4639 it->second->setMovingEdge(me2);
4654 for (std::map<std::string, vpMe>::const_iterator it = mapOfMe.begin(); it != mapOfMe.end(); ++it) {
4655 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4658 TrackerWrapper *tracker = it_tracker->second;
4659 tracker->setMovingEdge(it->second);
4675 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4677 TrackerWrapper *tracker = it->second;
4678 tracker->setNearClippingDistance(dist);
4693 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4694 it->second->setNearClippingDistance(dist1);
4698 it->second->setNearClippingDistance(dist2);
4721 for (std::map<std::string, double>::const_iterator it = mapOfDists.begin(); it != mapOfDists.end(); ++it) {
4722 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
4725 TrackerWrapper *tracker = it_tracker->second;
4726 tracker->setNearClippingDistance(it->second);
4751 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4753 TrackerWrapper *tracker = it->second;
4754 tracker->setOgreShowConfigDialog(showConfigDialog);
4772 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4774 TrackerWrapper *tracker = it->second;
4775 tracker->setOgreVisibilityTest(v);
4778 #ifdef VISP_HAVE_OGRE
4779 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4781 TrackerWrapper *tracker = it->second;
4782 tracker->faces.getOgreContext()->setWindowName(
"Multi Generic MBT (" + it->first +
")");
4798 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4800 TrackerWrapper *tracker = it->second;
4801 tracker->setOptimizationMethod(opt);
4821 "to be configured with only one camera!");
4828 TrackerWrapper *tracker = it->second;
4829 tracker->setPose(I, cdMo);
4853 "to be configured with only one camera!");
4860 TrackerWrapper *tracker = it->second;
4862 tracker->setPose(
m_I, cdMo);
4885 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4886 it->second->setPose(I1, c1Mo);
4890 it->second->setPose(I2, c2Mo);
4895 it->second->getPose(
m_cMo);
4923 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
4924 it->second->setPose(I_color1, c1Mo);
4928 it->second->setPose(I_color2, c2Mo);
4933 it->second->getPose(
m_cMo);
4962 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4966 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
4968 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
4970 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4971 TrackerWrapper *tracker = it_tracker->second;
4972 tracker->setPose(*it_img->second, it_camPose->second);
4973 tracker->getPose(
m_cMo);
4980 std::vector<std::string> vectorOfMissingCameraPoses;
4985 it_img = mapOfImages.find(it_tracker->first);
4986 it_camPose = mapOfCameraPoses.find(it_tracker->first);
4988 if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4990 TrackerWrapper *tracker = it_tracker->second;
4991 tracker->setPose(*it_img->second, it_camPose->second);
4994 vectorOfMissingCameraPoses.push_back(it_tracker->first);
4999 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
5000 it != vectorOfMissingCameraPoses.end(); ++it) {
5001 it_img = mapOfImages.find(*it);
5002 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
5011 "Missing image or missing camera transformation "
5012 "matrix! Cannot set pose for camera: %s!",
5034 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
5038 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(
m_referenceCameraName);
5039 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(
m_referenceCameraName);
5041 if (it_tracker !=
m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5042 TrackerWrapper *tracker = it_tracker->second;
5043 tracker->setPose(*it_img->second, it_camPose->second);
5044 tracker->getPose(
m_cMo);
5051 std::vector<std::string> vectorOfMissingCameraPoses;
5056 it_img = mapOfColorImages.find(it_tracker->first);
5057 it_camPose = mapOfCameraPoses.find(it_tracker->first);
5059 if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5061 TrackerWrapper *tracker = it_tracker->second;
5062 tracker->setPose(*it_img->second, it_camPose->second);
5065 vectorOfMissingCameraPoses.push_back(it_tracker->first);
5070 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
5071 it != vectorOfMissingCameraPoses.end(); ++it) {
5072 it_img = mapOfColorImages.find(*it);
5073 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
5082 "Missing image or missing camera transformation "
5083 "matrix! Cannot set pose for camera: %s!",
5107 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5109 TrackerWrapper *tracker = it->second;
5110 tracker->setProjectionErrorComputation(flag);
5121 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5123 TrackerWrapper *tracker = it->second;
5124 tracker->setProjectionErrorDisplay(
display);
5135 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5137 TrackerWrapper *tracker = it->second;
5138 tracker->setProjectionErrorDisplayArrowLength(length);
5146 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5148 TrackerWrapper *tracker = it->second;
5149 tracker->setProjectionErrorDisplayArrowThickness(thickness);
5160 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.find(referenceCameraName);
5165 std::cerr <<
"The reference camera: " << referenceCameraName <<
" does not exist!";
5173 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5175 TrackerWrapper *tracker = it->second;
5176 tracker->setScanLineVisibilityTest(v);
5193 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5195 TrackerWrapper *tracker = it->second;
5196 tracker->setTrackerType(type);
5211 for (std::map<std::string, int>::const_iterator it = mapOfTrackerTypes.begin(); it != mapOfTrackerTypes.end(); ++it) {
5212 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker =
m_mapOfTrackers.find(it->first);
5214 TrackerWrapper *tracker = it_tracker->second;
5215 tracker->setTrackerType(it->second);
5231 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5233 TrackerWrapper *tracker = it->second;
5234 tracker->setUseDepthDenseTracking(name, useDepthDenseTracking);
5249 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5251 TrackerWrapper *tracker = it->second;
5252 tracker->setUseDepthNormalTracking(name, useDepthNormalTracking);
5267 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5269 TrackerWrapper *tracker = it->second;
5270 tracker->setUseEdgeTracking(name, useEdgeTracking);
5274 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5284 void vpMbGenericTracker::setUseKltTracking(
const std::string &name,
const bool &useKltTracking)
5286 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5288 TrackerWrapper *tracker = it->second;
5289 tracker->setUseKltTracking(name, useKltTracking);
5297 bool isOneTestTrackingOk =
false;
5298 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5300 TrackerWrapper *tracker = it->second;
5302 tracker->testTracking();
5303 isOneTestTrackingOk =
true;
5309 if (!isOneTestTrackingOk) {
5310 std::ostringstream oss;
5311 oss <<
"Not enough moving edges to track the object. Try to reduce the "
5329 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5332 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5333 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5335 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5349 std::map<std::string, const vpImage<vpRGBa> *> mapOfColorImages;
5352 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5353 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5355 track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5371 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5372 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5373 mapOfImages[it->first] = &I1;
5376 mapOfImages[it->first] = &I2;
5378 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5379 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5381 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5384 std::stringstream ss;
5385 ss <<
"Require two cameras! There are " <<
m_mapOfTrackers.size() <<
" cameras!";
5403 std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5404 std::map<std::string, const vpImage<vpRGBa> *> mapOfImages;
5405 mapOfImages[it->first] = &I_color1;
5408 mapOfImages[it->first] = &_colorI2;
5410 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5411 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5413 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5416 std::stringstream ss;
5417 ss <<
"Require two cameras! There are " <<
m_mapOfTrackers.size() <<
" cameras!";
5431 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5432 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5434 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5446 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5447 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5449 track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5452 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
5462 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5464 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5466 TrackerWrapper *tracker = it->second;
5469 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5477 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5481 mapOfImages[it->first] ==
nullptr) {
5486 !mapOfPointClouds[it->first]) {
5503 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5505 TrackerWrapper *tracker = it->second;
5508 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5511 tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5514 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5515 if (tracker->m_trackerType & KLT_TRACKER) {
5516 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5521 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5538 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5540 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5541 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5543 TrackerWrapper *tracker = it->second;
5546 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5554 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5558 mapOfImages[it->first] ==
nullptr) {
5562 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5566 mapOfImages[it->first] !=
nullptr) {
5568 mapOfImages[it->first] = &tracker->m_I;
5572 !mapOfPointClouds[it->first]) {
5589 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5591 TrackerWrapper *tracker = it->second;
5594 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5597 tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5600 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5601 if (tracker->m_trackerType & KLT_TRACKER) {
5602 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5607 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5627 std::map<std::string,
const std::vector<vpColVector> *> &mapOfPointClouds,
5628 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5629 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5631 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5633 TrackerWrapper *tracker = it->second;
5636 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5644 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5648 mapOfImages[it->first] ==
nullptr) {
5653 (mapOfPointClouds[it->first] ==
nullptr)) {
5658 preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5670 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5672 TrackerWrapper *tracker = it->second;
5675 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5678 tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5681 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5682 if (tracker->m_trackerType & KLT_TRACKER) {
5683 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5688 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5707 std::map<std::string,
const std::vector<vpColVector> *> &mapOfPointClouds,
5708 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5709 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5711 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5712 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5714 TrackerWrapper *tracker = it->second;
5717 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5725 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5729 mapOfColorImages[it->first] ==
nullptr) {
5733 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5737 mapOfColorImages[it->first] !=
nullptr) {
5739 mapOfImages[it->first] = &tracker->m_I;
5743 (mapOfPointClouds[it->first] ==
nullptr)) {
5748 preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5760 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5762 TrackerWrapper *tracker = it->second;
5765 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5768 tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5771 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5772 if (tracker->m_trackerType & KLT_TRACKER) {
5773 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5778 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5787 std::map<std::string, const vpMatrix *> &mapOfPointClouds,
5788 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5789 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5791 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5793 TrackerWrapper *tracker = it->second;
5796 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5804 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5808 mapOfImages[it->first] ==
nullptr) {
5813 (mapOfPointClouds[it->first] ==
nullptr)) {
5818 preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5830 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5832 TrackerWrapper *tracker = it->second;
5835 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5838 tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5841 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5842 if (tracker->m_trackerType & KLT_TRACKER) {
5843 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5848 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5867 std::map<std::string, const vpMatrix *> &mapOfPointClouds,
5868 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5869 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5871 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5872 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5874 TrackerWrapper *tracker = it->second;
5877 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5885 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5889 mapOfColorImages[it->first] ==
nullptr) {
5893 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5897 mapOfColorImages[it->first] !=
nullptr) {
5899 mapOfImages[it->first] = &tracker->m_I;
5903 (mapOfPointClouds[it->first] ==
nullptr)) {
5908 preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5920 for (std::map<std::string, TrackerWrapper *>::const_iterator it =
m_mapOfTrackers.begin();
5922 TrackerWrapper *tracker = it->second;
5925 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5928 tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5931 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5932 if (tracker->m_trackerType & KLT_TRACKER) {
5933 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5938 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5947 vpMbGenericTracker::TrackerWrapper::TrackerWrapper()
5948 : m_error(), m_L(), m_trackerType(EDGE_TRACKER), m_w(), m_weightedError()
5953 #ifdef VISP_HAVE_OGRE
5960 vpMbGenericTracker::TrackerWrapper::TrackerWrapper(
int trackerType)
5961 : m_error(), m_L(), m_trackerType(trackerType), m_w(), m_weightedError()
5964 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5974 #ifdef VISP_HAVE_OGRE
5984 computeVVSInit(ptr_I);
5986 if (m_error.getRows() < 4) {
5991 double normRes_1 = -1;
5992 unsigned int iter = 0;
5994 double factorEdge = 1.0;
5995 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5996 double factorKlt = 1.0;
5998 double factorDepth = 1.0;
5999 double factorDepthDense = 1.0;
6005 double mu = m_initialMu;
6007 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6010 bool isoJoIdentity = m_isoJoIdentity;
6018 unsigned int nb_edge_features = m_error_edge.
getRows();
6019 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6020 unsigned int nb_klt_features = m_error_klt.getRows();
6022 unsigned int nb_depth_features = m_error_depthNormal.getRows();
6023 unsigned int nb_depth_dense_features = m_error_depthDense.getRows();
6025 while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
6026 computeVVSInteractionMatrixAndResidu(ptr_I);
6028 bool reStartFromLastIncrement =
false;
6029 computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
6031 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6032 if (reStartFromLastIncrement) {
6033 if (m_trackerType & KLT_TRACKER) {
6039 if (!reStartFromLastIncrement) {
6040 computeVVSWeights();
6042 if (computeCovariance) {
6044 if (!isoJoIdentity) {
6047 LVJ_true = (m_L * cVo * oJo);
6057 if (isoJoIdentity) {
6061 unsigned int rank = (m_L * cVo).kernel(K);
6071 isoJoIdentity =
false;
6080 unsigned int start_index = 0;
6081 if (m_trackerType & EDGE_TRACKER) {
6082 for (
unsigned int i = 0; i < nb_edge_features; i++) {
6083 double wi = m_w_edge[i] * m_factor[i] * factorEdge;
6085 m_weightedError[i] = wi * m_error[i];
6090 for (
unsigned int j = 0; j < m_L.getCols(); j++) {
6095 start_index += nb_edge_features;
6098 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6099 if (m_trackerType & KLT_TRACKER) {
6100 for (
unsigned int i = 0; i < nb_klt_features; i++) {
6101 double wi = m_w_klt[i] * factorKlt;
6102 W_true[start_index + i] = wi;
6103 m_weightedError[start_index + i] = wi * m_error_klt[i];
6105 num += wi *
vpMath::sqr(m_error[start_index + i]);
6108 for (
unsigned int j = 0; j < m_L.getCols(); j++) {
6109 m_L[start_index + i][j] *= wi;
6113 start_index += nb_klt_features;
6117 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6118 for (
unsigned int i = 0; i < nb_depth_features; i++) {
6119 double wi = m_w_depthNormal[i] * factorDepth;
6120 m_w[start_index + i] = m_w_depthNormal[i];
6121 m_weightedError[start_index + i] = wi * m_error[start_index + i];
6123 num += wi *
vpMath::sqr(m_error[start_index + i]);
6126 for (
unsigned int j = 0; j < m_L.getCols(); j++) {
6127 m_L[start_index + i][j] *= wi;
6131 start_index += nb_depth_features;
6134 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6135 for (
unsigned int i = 0; i < nb_depth_dense_features; i++) {
6136 double wi = m_w_depthDense[i] * factorDepthDense;
6137 m_w[start_index + i] = m_w_depthDense[i];
6138 m_weightedError[start_index + i] = wi * m_error[start_index + i];
6140 num += wi *
vpMath::sqr(m_error[start_index + i]);
6143 for (
unsigned int j = 0; j < m_L.getCols(); j++) {
6144 m_L[start_index + i][j] *= wi;
6151 computeVVSPoseEstimation(isoJoIdentity, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
6154 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6155 if (m_trackerType & KLT_TRACKER) {
6162 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6163 if (m_trackerType & KLT_TRACKER) {
6167 normRes_1 = normRes;
6169 normRes = sqrt(num / den);
6175 computeCovarianceMatrixVVS(isoJoIdentity, W_true, cMo_prev, L_true, LVJ_true, m_error);
6177 if (m_trackerType & EDGE_TRACKER) {
6182 void vpMbGenericTracker::TrackerWrapper::computeVVSInit()
6185 "TrackerWrapper::computeVVSInit("
6186 ") should not be called!");
6191 initMbtTracking(ptr_I);
6193 unsigned int nbFeatures = 0;
6195 if (m_trackerType & EDGE_TRACKER) {
6196 nbFeatures += m_error_edge.getRows();
6199 m_error_edge.clear();
6200 m_weightedError_edge.clear();
6205 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6206 if (m_trackerType & KLT_TRACKER) {
6207 vpMbKltTracker::computeVVSInit();
6208 nbFeatures += m_error_klt.getRows();
6211 m_error_klt.clear();
6212 m_weightedError_klt.clear();
6218 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6220 nbFeatures += m_error_depthNormal.getRows();
6223 m_error_depthNormal.clear();
6224 m_weightedError_depthNormal.clear();
6225 m_L_depthNormal.clear();
6226 m_w_depthNormal.clear();
6229 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6231 nbFeatures += m_error_depthDense.getRows();
6234 m_error_depthDense.clear();
6235 m_weightedError_depthDense.clear();
6236 m_L_depthDense.clear();
6237 m_w_depthDense.clear();
6240 m_L.resize(nbFeatures, 6,
false,
false);
6241 m_error.resize(nbFeatures,
false);
6243 m_weightedError.resize(nbFeatures,
false);
6244 m_w.resize(nbFeatures,
false);
6252 "computeVVSInteractionMatrixAndR"
6253 "esidu() should not be called!");
6258 if (m_trackerType & EDGE_TRACKER) {
6262 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6263 if (m_trackerType & KLT_TRACKER) {
6264 vpMbKltTracker::computeVVSInteractionMatrixAndResidu();
6268 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6272 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6276 unsigned int start_index = 0;
6277 if (m_trackerType & EDGE_TRACKER) {
6278 m_L.insert(m_L_edge, start_index, 0);
6279 m_error.insert(start_index, m_error_edge);
6281 start_index += m_error_edge.getRows();
6284 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6285 if (m_trackerType & KLT_TRACKER) {
6286 m_L.insert(m_L_klt, start_index, 0);
6287 m_error.insert(start_index, m_error_klt);
6289 start_index += m_error_klt.getRows();
6293 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6294 m_L.insert(m_L_depthNormal, start_index, 0);
6295 m_error.insert(start_index, m_error_depthNormal);
6297 start_index += m_error_depthNormal.getRows();
6300 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6301 m_L.insert(m_L_depthDense, start_index, 0);
6302 m_error.insert(start_index, m_error_depthDense);
6310 unsigned int start_index = 0;
6312 if (m_trackerType & EDGE_TRACKER) {
6314 m_w.insert(start_index, m_w_edge);
6316 start_index += m_w_edge.getRows();
6319 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6320 if (m_trackerType & KLT_TRACKER) {
6322 m_w.insert(start_index, m_w_klt);
6324 start_index += m_w_klt.getRows();
6328 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6329 if (m_depthNormalUseRobust) {
6331 m_w.insert(start_index, m_w_depthNormal);
6334 start_index += m_w_depthNormal.getRows();
6337 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6339 m_w.insert(start_index, m_w_depthDense);
6347 unsigned int thickness,
bool displayFullModel)
6349 if (displayFeatures) {
6350 std::vector<std::vector<double> > features = getFeaturesForDisplay();
6351 for (
size_t i = 0; i < features.size(); i++) {
6354 int state =
static_cast<int>(features[i][3]);
6386 double id = features[i][5];
6387 std::stringstream ss;
6392 vpImagePoint im_centroid(features[i][1], features[i][2]);
6393 vpImagePoint im_extremity(features[i][3], features[i][4]);
6400 std::vector<std::vector<double> > models =
6402 for (
size_t i = 0; i < models.size(); i++) {
6410 double n20 = models[i][3];
6411 double n11 = models[i][4];
6412 double n02 = models[i][5];
6417 #ifdef VISP_HAVE_OGRE
6418 if ((m_trackerType & EDGE_TRACKER)
6419 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6420 || (m_trackerType & KLT_TRACKER)
6424 faces.displayOgre(cMo);
6431 unsigned int thickness,
bool displayFullModel)
6433 if (displayFeatures) {
6434 std::vector<std::vector<double> > features = getFeaturesForDisplay();
6435 for (
size_t i = 0; i < features.size(); i++) {
6438 int state =
static_cast<int>(features[i][3]);
6470 double id = features[i][5];
6471 std::stringstream ss;
6476 vpImagePoint im_centroid(features[i][1], features[i][2]);
6477 vpImagePoint im_extremity(features[i][3], features[i][4]);
6484 std::vector<std::vector<double> > models =
6486 for (
size_t i = 0; i < models.size(); i++) {
6494 double n20 = models[i][3];
6495 double n11 = models[i][4];
6496 double n02 = models[i][5];
6501 #ifdef VISP_HAVE_OGRE
6502 if ((m_trackerType & EDGE_TRACKER)
6503 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6504 || (m_trackerType & KLT_TRACKER)
6508 faces.displayOgre(cMo);
6513 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getFeaturesForDisplay()
6515 std::vector<std::vector<double> > features;
6517 if (m_trackerType & EDGE_TRACKER) {
6519 features.insert(features.end(), m_featuresToBeDisplayedEdge.begin(), m_featuresToBeDisplayedEdge.end());
6522 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6523 if (m_trackerType & KLT_TRACKER) {
6525 features.insert(features.end(), m_featuresToBeDisplayedKlt.begin(), m_featuresToBeDisplayedKlt.end());
6529 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6531 features.insert(features.end(), m_featuresToBeDisplayedDepthNormal.begin(),
6532 m_featuresToBeDisplayedDepthNormal.end());
6538 std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getModelForDisplay(
unsigned int width,
6539 unsigned int height,
6542 bool displayFullModel)
6544 std::vector<std::vector<double> > models;
6547 if (m_trackerType == EDGE_TRACKER) {
6550 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6551 else if (m_trackerType == KLT_TRACKER) {
6552 models = vpMbKltTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6555 else if (m_trackerType == DEPTH_NORMAL_TRACKER) {
6558 else if (m_trackerType == DEPTH_DENSE_TRACKER) {
6563 if (m_trackerType & EDGE_TRACKER) {
6564 std::vector<std::vector<double> > edgeModels =
6566 models.insert(models.end(), edgeModels.begin(), edgeModels.end());
6570 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6571 std::vector<std::vector<double> > depthDenseModels =
6573 models.insert(models.end(), depthDenseModels.begin(), depthDenseModels.end());
6582 if (!modelInitialised) {
6586 if (useScanLine || clippingFlag > 3)
6589 bool reInitialisation =
false;
6591 faces.setVisible(I.
getWidth(), I.
getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6594 #ifdef VISP_HAVE_OGRE
6595 if (!faces.isOgreInitialised()) {
6598 faces.setOgreShowConfigDialog(ogreShowConfigDialog);
6599 faces.initOgre(m_cam);
6603 ogreShowConfigDialog =
false;
6606 faces.setVisibleOgre(I.
getWidth(), I.
getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6608 faces.setVisible(I.
getWidth(), I.
getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6613 faces.computeClippedPolygons(m_cMo, m_cam);
6617 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6618 if (m_trackerType & KLT_TRACKER)
6619 vpMbKltTracker::reinit(I);
6622 if (m_trackerType & EDGE_TRACKER) {
6631 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6634 if (m_trackerType & DEPTH_DENSE_TRACKER)
6638 void vpMbGenericTracker::TrackerWrapper::initCircle(
const vpPoint &p1,
const vpPoint &p2,
const vpPoint &p3,
6639 double radius,
int idFace,
const std::string &name)
6641 if (m_trackerType & EDGE_TRACKER)
6644 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6645 if (m_trackerType & KLT_TRACKER)
6646 vpMbKltTracker::initCircle(p1, p2, p3, radius, idFace, name);
6650 void vpMbGenericTracker::TrackerWrapper::initCylinder(
const vpPoint &p1,
const vpPoint &p2,
double radius,
int idFace,
6651 const std::string &name)
6653 if (m_trackerType & EDGE_TRACKER)
6656 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6657 if (m_trackerType & KLT_TRACKER)
6658 vpMbKltTracker::initCylinder(p1, p2, radius, idFace, name);
6662 void vpMbGenericTracker::TrackerWrapper::initFaceFromCorners(
vpMbtPolygon &polygon)
6664 if (m_trackerType & EDGE_TRACKER)
6667 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6668 if (m_trackerType & KLT_TRACKER)
6669 vpMbKltTracker::initFaceFromCorners(polygon);
6672 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6675 if (m_trackerType & DEPTH_DENSE_TRACKER)
6679 void vpMbGenericTracker::TrackerWrapper::initFaceFromLines(
vpMbtPolygon &polygon)
6681 if (m_trackerType & EDGE_TRACKER)
6684 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6685 if (m_trackerType & KLT_TRACKER)
6686 vpMbKltTracker::initFaceFromLines(polygon);
6689 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6692 if (m_trackerType & DEPTH_DENSE_TRACKER)
6698 if (m_trackerType & EDGE_TRACKER) {
6704 void vpMbGenericTracker::TrackerWrapper::loadConfigFile(
const std::string &configFile,
bool verbose)
6706 #if defined(VISP_HAVE_PUGIXML)
6711 xmlp.setVerbose(verbose);
6712 xmlp.setCameraParameters(m_cam);
6714 xmlp.setAngleDisappear(
vpMath::deg(angleDisappears));
6720 xmlp.setKltMaxFeatures(10000);
6721 xmlp.setKltWindowSize(5);
6722 xmlp.setKltQuality(0.01);
6723 xmlp.setKltMinDistance(5);
6724 xmlp.setKltHarrisParam(0.01);
6725 xmlp.setKltBlockSize(3);
6726 xmlp.setKltPyramidLevels(3);
6727 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6728 xmlp.setKltMaskBorder(maskBorder);
6732 xmlp.setDepthNormalFeatureEstimationMethod(m_depthNormalFeatureEstimationMethod);
6733 xmlp.setDepthNormalPclPlaneEstimationMethod(m_depthNormalPclPlaneEstimationMethod);
6734 xmlp.setDepthNormalPclPlaneEstimationRansacMaxIter(m_depthNormalPclPlaneEstimationRansacMaxIter);
6735 xmlp.setDepthNormalPclPlaneEstimationRansacThreshold(m_depthNormalPclPlaneEstimationRansacThreshold);
6736 xmlp.setDepthNormalSamplingStepX(m_depthNormalSamplingStepX);
6737 xmlp.setDepthNormalSamplingStepY(m_depthNormalSamplingStepY);
6740 xmlp.setDepthDenseSamplingStepX(m_depthDenseSamplingStepX);
6741 xmlp.setDepthDenseSamplingStepY(m_depthDenseSamplingStepY);
6745 std::cout <<
" *********** Parsing XML for";
6748 std::vector<std::string> tracker_names;
6749 if (m_trackerType & EDGE_TRACKER)
6750 tracker_names.push_back(
"Edge");
6751 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6752 if (m_trackerType & KLT_TRACKER)
6753 tracker_names.push_back(
"Klt");
6755 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6756 tracker_names.push_back(
"Depth Normal");
6757 if (m_trackerType & DEPTH_DENSE_TRACKER)
6758 tracker_names.push_back(
"Depth Dense");
6761 for (
size_t i = 0; i < tracker_names.size(); i++) {
6762 std::cout <<
" " << tracker_names[i];
6763 if (i == tracker_names.size() - 1) {
6768 std::cout <<
"Model-Based Tracker ************ " << std::endl;
6771 xmlp.parse(configFile);
6778 xmlp.getCameraParameters(camera);
6779 setCameraParameters(camera);
6781 angleAppears =
vpMath::rad(xmlp.getAngleAppear());
6782 angleDisappears =
vpMath::rad(xmlp.getAngleDisappear());
6784 if (xmlp.hasNearClippingDistance())
6785 setNearClippingDistance(xmlp.getNearClippingDistance());
6787 if (xmlp.hasFarClippingDistance())
6788 setFarClippingDistance(xmlp.getFarClippingDistance());
6790 if (xmlp.getFovClipping()) {
6794 useLodGeneral = xmlp.getLodState();
6795 minLineLengthThresholdGeneral = xmlp.getLodMinLineLengthThreshold();
6796 minPolygonAreaThresholdGeneral = xmlp.getLodMinPolygonAreaThreshold();
6798 applyLodSettingInConfig =
false;
6799 if (this->getNbPolygon() > 0) {
6800 applyLodSettingInConfig =
true;
6801 setLod(useLodGeneral);
6802 setMinLineLengthThresh(minLineLengthThresholdGeneral);
6803 setMinPolygonAreaThresh(minPolygonAreaThresholdGeneral);
6808 xmlp.getEdgeMe(meParser);
6812 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6813 tracker.setMaxFeatures((
int)xmlp.getKltMaxFeatures());
6814 tracker.setWindowSize((
int)xmlp.getKltWindowSize());
6815 tracker.setQuality(xmlp.getKltQuality());
6816 tracker.setMinDistance(xmlp.getKltMinDistance());
6817 tracker.setHarrisFreeParameter(xmlp.getKltHarrisParam());
6818 tracker.setBlockSize((
int)xmlp.getKltBlockSize());
6819 tracker.setPyramidLevels((
int)xmlp.getKltPyramidLevels());
6820 maskBorder = xmlp.getKltMaskBorder();
6823 faces.getMbScanLineRenderer().setMaskBorder(maskBorder);
6827 setDepthNormalFeatureEstimationMethod(xmlp.getDepthNormalFeatureEstimationMethod());
6828 setDepthNormalPclPlaneEstimationMethod(xmlp.getDepthNormalPclPlaneEstimationMethod());
6829 setDepthNormalPclPlaneEstimationRansacMaxIter(xmlp.getDepthNormalPclPlaneEstimationRansacMaxIter());
6830 setDepthNormalPclPlaneEstimationRansacThreshold(xmlp.getDepthNormalPclPlaneEstimationRansacThreshold());
6831 setDepthNormalSamplingStep(xmlp.getDepthNormalSamplingStepX(), xmlp.getDepthNormalSamplingStepY());
6834 setDepthDenseSamplingStep(xmlp.getDepthDenseSamplingStepX(), xmlp.getDepthDenseSamplingStepY());
6842 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
6844 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6846 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6848 if (m_trackerType & KLT_TRACKER) {
6849 if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6850 vpMbKltTracker::reinit(*ptr_I);
6856 if (m_trackerType & EDGE_TRACKER) {
6857 bool newvisibleface =
false;
6861 faces.computeClippedPolygons(m_cMo, m_cam);
6867 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6871 if (m_trackerType & DEPTH_DENSE_TRACKER)
6875 if (m_trackerType & EDGE_TRACKER) {
6882 if (computeProjError) {
6889 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6891 if (m_trackerType & EDGE_TRACKER) {
6896 std::cerr <<
"Error in moving edge tracking" << std::endl;
6901 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6902 if (m_trackerType & KLT_TRACKER) {
6904 vpMbKltTracker::preTracking(*ptr_I);
6907 std::cerr <<
"Error in KLT tracking: " << e.
what() << std::endl;
6913 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6918 std::cerr <<
"Error in Depth normal tracking" << std::endl;
6923 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6928 std::cerr <<
"Error in Depth dense tracking" << std::endl;
6936 const unsigned int pointcloud_width,
6937 const unsigned int pointcloud_height)
6939 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6941 if (m_trackerType & KLT_TRACKER) {
6942 if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6943 vpMbKltTracker::reinit(*ptr_I);
6949 if (m_trackerType & EDGE_TRACKER) {
6950 bool newvisibleface =
false;
6954 faces.computeClippedPolygons(m_cMo, m_cam);
6960 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6964 if (m_trackerType & DEPTH_DENSE_TRACKER)
6968 if (m_trackerType & EDGE_TRACKER) {
6975 if (computeProjError) {
6982 const std::vector<vpColVector> *
const point_cloud,
6983 const unsigned int pointcloud_width,
6984 const unsigned int pointcloud_height)
6986 if (m_trackerType & EDGE_TRACKER) {
6991 std::cerr <<
"Error in moving edge tracking" << std::endl;
6996 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6997 if (m_trackerType & KLT_TRACKER) {
6999 vpMbKltTracker::preTracking(*ptr_I);
7002 std::cerr <<
"Error in KLT tracking: " << e.
what() << std::endl;
7008 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
7013 std::cerr <<
"Error in Depth tracking" << std::endl;
7018 if (m_trackerType & DEPTH_DENSE_TRACKER) {
7023 std::cerr <<
"Error in Depth dense tracking" << std::endl;
7031 const unsigned int pointcloud_width,
7032 const unsigned int pointcloud_height)
7034 if (m_trackerType & EDGE_TRACKER) {
7039 std::cerr <<
"Error in moving edge tracking" << std::endl;
7044 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7045 if (m_trackerType & KLT_TRACKER) {
7047 vpMbKltTracker::preTracking(*ptr_I);
7050 std::cerr <<
"Error in KLT tracking: " << e.
what() << std::endl;
7056 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
7061 std::cerr <<
"Error in Depth tracking" << std::endl;
7066 if (m_trackerType & DEPTH_DENSE_TRACKER) {
7071 std::cerr <<
"Error in Depth dense tracking" << std::endl;
7089 for (
unsigned int i = 0; i < scales.size(); i++) {
7091 for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[i].begin(); it != lines[i].end(); ++it) {
7098 for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[i].begin(); it != cylinders[i].end();
7106 for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[i].begin(); it != circles[i].end(); ++it) {
7114 cylinders[i].clear();
7122 nbvisiblepolygone = 0;
7125 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7127 for (std::list<vpMbtDistanceKltPoints *>::const_iterator it = kltPolygons.begin(); it != kltPolygons.end(); ++it) {
7128 vpMbtDistanceKltPoints *kltpoly = *it;
7129 if (kltpoly !=
nullptr) {
7134 kltPolygons.clear();
7136 for (std::list<vpMbtDistanceKltCylinder *>::const_iterator it = kltCylinders.begin(); it != kltCylinders.end();
7138 vpMbtDistanceKltCylinder *kltPolyCylinder = *it;
7139 if (kltPolyCylinder !=
nullptr) {
7140 delete kltPolyCylinder;
7142 kltPolyCylinder =
nullptr;
7144 kltCylinders.clear();
7147 for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles_disp.begin(); it != circles_disp.end(); ++it) {
7149 if (ci !=
nullptr) {
7154 circles_disp.clear();
7156 firstInitialisation =
true;
7161 for (
size_t i = 0; i < m_depthNormalFaces.size(); i++) {
7162 delete m_depthNormalFaces[i];
7163 m_depthNormalFaces[i] =
nullptr;
7165 m_depthNormalFaces.clear();
7168 for (
size_t i = 0; i < m_depthDenseFaces.size(); i++) {
7169 delete m_depthDenseFaces[i];
7170 m_depthDenseFaces[i] =
nullptr;
7172 m_depthDenseFaces.clear();
7176 loadModel(cad_name, verbose, T);
7178 initFromPose(*I, cMo);
7181 initFromPose(*I_color, cMo);
7189 reInitModel(&I,
nullptr, cad_name, cMo, verbose, T);
7196 reInitModel(
nullptr, &I_color, cad_name, cMo, verbose, T);
7199 void vpMbGenericTracker::TrackerWrapper::resetTracker()
7202 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7203 vpMbKltTracker::resetTracker();
7209 void vpMbGenericTracker::TrackerWrapper::setCameraParameters(
const vpCameraParameters &cam)
7214 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7215 vpMbKltTracker::setCameraParameters(m_cam);
7223 void vpMbGenericTracker::TrackerWrapper::setFarClippingDistance(
const double &dist)
7228 void vpMbGenericTracker::TrackerWrapper::setNearClippingDistance(
const double &dist)
7233 void vpMbGenericTracker::TrackerWrapper::setOgreVisibilityTest(
const bool &v)
7236 #ifdef VISP_HAVE_OGRE
7237 faces.getOgreContext()->setWindowName(
"TrackerWrapper");
7244 bool performKltSetPose =
false;
7249 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7250 if (m_trackerType & KLT_TRACKER) {
7251 performKltSetPose =
true;
7253 if (useScanLine || clippingFlag > 3) {
7254 m_cam.computeFov(I ? I->
getWidth() : m_I.getWidth(), I ? I->
getHeight() : m_I.getHeight());
7257 vpMbKltTracker::setPose(I ? *I : m_I, cdMo);
7261 if (!performKltSetPose) {
7267 if (m_trackerType & EDGE_TRACKER) {
7272 faces.computeClippedPolygons(m_cMo, m_cam);
7273 faces.computeScanLineRender(m_cam, I ? I->
getWidth() : m_I.getWidth(), I ? I->
getHeight() : m_I.getHeight());
7277 if (m_trackerType & EDGE_TRACKER) {
7278 initPyramid(I, Ipyramid);
7280 unsigned int i = (
unsigned int)scales.size();
7290 cleanPyramid(Ipyramid);
7293 if (m_trackerType & EDGE_TRACKER) {
7307 setPose(&I,
nullptr, cdMo);
7312 setPose(
nullptr, &I_color, cdMo);
7315 void vpMbGenericTracker::TrackerWrapper::setProjectionErrorComputation(
const bool &flag)
7320 void vpMbGenericTracker::TrackerWrapper::setScanLineVisibilityTest(
const bool &v)
7323 #if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7324 vpMbKltTracker::setScanLineVisibilityTest(v);
7330 void vpMbGenericTracker::TrackerWrapper::setTrackerType(
int type)
7332 if ((type & (EDGE_TRACKER |
7333 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7336 DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
7340 m_trackerType = type;
7343 void vpMbGenericTracker::TrackerWrapper::testTracking()
7345 if (m_trackerType & EDGE_TRACKER) {
7351 #
if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
7356 if ((m_trackerType & (EDGE_TRACKER
7357 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7361 std::cerr <<
"Bad tracker type: " << m_trackerType << std::endl;
7365 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
7375 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
7377 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
7379 if ((m_trackerType & (EDGE_TRACKER |
7380 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7383 DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
7384 std::cerr <<
"Bad tracker type: " << m_trackerType << std::endl;
7388 if (m_trackerType & (EDGE_TRACKER
7389 #
if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7397 if (m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) && !point_cloud) {
7404 preTracking(ptr_I, point_cloud);
7410 covarianceMatrix = -1;
7414 if (m_trackerType == EDGE_TRACKER)
7417 postTracking(ptr_I, point_cloud);
7421 std::cerr <<
"Exception: " << e.
what() << std::endl;
void setWindowName(const Ogre::String &n)
unsigned int getCols() const
void resize(unsigned int nrows, unsigned int ncols, bool flagNullify=true, bool recopy_=true)
unsigned int getRows() const
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
void insert(unsigned int i, const vpColVector &v)
void resize(unsigned int i, bool flagNullify=true)
Class to define RGB colors available for display functionalities.
static const vpColor cyan
static const vpColor blue
static const vpColor purple
static const vpColor yellow
static const vpColor green
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayEllipse(const vpImage< unsigned char > &I, const vpImagePoint ¢er, const double &coef1, const double &coef2, const double &coef3, bool use_normalized_centered_moments, const vpColor &color, unsigned int thickness=1, bool display_center=false, bool display_arc=false)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
@ badValue
Used to indicate that a value is not in the allowed range.
@ notInitialized
Used to indicate that a parameter is not initialized.
const char * what() const
static vpHomogeneousMatrix direct(const vpColVector &v)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
unsigned int getWidth() const
unsigned int getHeight() const
void init(unsigned int h, unsigned int w, Type value)
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
static double rad(double deg)
static double sqr(double x)
static bool equal(double x, double y, double threshold=0.001)
static double deg(double rad)
Implementation of a matrix and operations on matrices.
void insert(const vpMatrix &A, unsigned int r, unsigned int c)
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
void computeVisibility(unsigned int width, unsigned int height)
virtual void setCameraParameters(const vpCameraParameters &camera) VP_OVERRIDE
virtual void resetTracker() VP_OVERRIDE
virtual void computeVVSWeights()
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
virtual void computeVVSInit() VP_OVERRIDE
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
virtual void resetTracker() VP_OVERRIDE
virtual void track(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false)
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void setCameraParameters(const vpCameraParameters &camera) VP_OVERRIDE
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void setPose(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud, const vpHomogeneousMatrix &cdMo)
virtual void computeVVSInit() VP_OVERRIDE
void computeVisibility(unsigned int width, unsigned int height)
void computeVVS(const vpImage< unsigned char > &_I, unsigned int lvl)
void updateMovingEdgeWeights()
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
void computeProjectionError(const vpImage< unsigned char > &_I)
virtual void testTracking() VP_OVERRIDE
virtual void setNearClippingDistance(const double &dist) VP_OVERRIDE
virtual void computeVVSWeights()
virtual void computeVVSInit() VP_OVERRIDE
void initMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
void trackMovingEdge(const vpImage< unsigned char > &I)
virtual void setFarClippingDistance(const double &dist) VP_OVERRIDE
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
void computeVVSFirstPhaseFactor(const vpImage< unsigned char > &I, unsigned int lvl=0)
void updateMovingEdge(const vpImage< unsigned char > &I)
unsigned int initMbtTracking(unsigned int &nberrors_lines, unsigned int &nberrors_cylinders, unsigned int &nberrors_circles)
virtual void setClipping(const unsigned int &flags) VP_OVERRIDE
void reinitMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
void resetTracker() VP_OVERRIDE
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
void setMovingEdge(const vpMe &me)
virtual void computeVVSInteractionMatrixAndResidu(const vpImage< unsigned char > &I)
virtual void setCameraParameters(const vpCameraParameters &cam) VP_OVERRIDE
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
void visibleFace(const vpImage< unsigned char > &_I, const vpHomogeneousMatrix &_cMo, bool &newvisibleline)
virtual std::vector< std::vector< double > > getFeaturesForDisplay()
virtual void setCameraParameters(const vpCameraParameters &camera) VP_OVERRIDE
virtual double getGoodMovingEdgesRatioThreshold() const
virtual int getTrackerType() const
virtual void setOgreVisibilityTest(const bool &v) VP_OVERRIDE
std::map< std::string, TrackerWrapper * > m_mapOfTrackers
virtual std::pair< std::vector< vpPolygon >, std::vector< std::vector< vpPoint > > > getPolygonFaces(bool orderPolygons=true, bool useVisibility=true, bool clipPolygon=false) VP_OVERRIDE
virtual void setProjectionErrorComputation(const bool &flag) VP_OVERRIDE
vpMbGenericTracker()
json namespace shortcut
virtual void setDisplayFeatures(bool displayF) VP_OVERRIDE
unsigned int m_nb_feat_edge
Number of moving-edges features.
std::map< std::string, vpHomogeneousMatrix > m_mapOfCameraTransformationMatrix
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam) VP_OVERRIDE
virtual void getLcircle(std::list< vpMbtDistanceCircle * > &circlesList, unsigned int level=0) const
virtual void getCameraParameters(vpCameraParameters &cam) const
virtual void setDepthDenseFilteringMaxDistance(double maxDistance)
virtual void setProjectionErrorDisplayArrowLength(unsigned int length) VP_OVERRIDE
virtual void initFromPoints(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
virtual void setGoodNbRayCastingAttemptsRatio(const double &ratio)
virtual void getPose(vpHomogeneousMatrix &cMo) const
virtual void setGoodMovingEdgesRatioThreshold(double threshold)
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
virtual vpMbtPolygon * getPolygon(unsigned int index) VP_OVERRIDE
virtual void setDepthDenseFilteringMinDistance(double minDistance)
virtual void setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
virtual void setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name="") VP_OVERRIDE
virtual void setLod(bool useLod, const std::string &name="") VP_OVERRIDE
virtual void setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
vpColVector m_w
Robust weights.
virtual void saveConfigFile(const std::string &settingsFile) const
virtual std::string getReferenceCameraName() const
virtual std::map< std::string, int > getCameraTrackerTypes() const
virtual void loadConfigFileJSON(const std::string &configFile, bool verbose=true)
virtual void testTracking() VP_OVERRIDE
unsigned int m_nb_feat_depthDense
Number of depth dense features.
virtual void setDepthNormalPclPlaneEstimationMethod(int method)
virtual void computeProjectionError()
virtual void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
virtual void setMovingEdge(const vpMe &me)
virtual void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
virtual void setDepthNormalPclPlaneEstimationRansacThreshold(double threshold)
virtual void setAngleDisappear(const double &a) VP_OVERRIDE
virtual void setFeatureFactors(const std::map< vpTrackerType, double > &mapOfFeatureFactors)
virtual void computeVVSInit() VP_OVERRIDE
virtual void init(const vpImage< unsigned char > &I) VP_OVERRIDE
virtual void getLcylinder(std::list< vpMbtDistanceCylinder * > &cylindersList, unsigned int level=0) const
virtual void setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
virtual std::vector< std::string > getCameraNames() const
virtual void setDepthDenseFilteringMethod(int method)
virtual void track(const vpImage< unsigned char > &I) VP_OVERRIDE
vpColVector m_weightedError
Weighted error.
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix()) VP_OVERRIDE
vpMatrix m_L
Interaction matrix.
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt) VP_OVERRIDE
virtual void setDepthNormalFaceCentroidMethod(const vpMbtFaceDepthNormal::vpFaceCentroidType &method)
virtual void computeVVSWeights()
virtual void loadConfigFileXML(const std::string &configFile, bool verbose=true)
virtual void setNbRayCastingAttemptsForVisibility(const unsigned int &attempts)
virtual void setMask(const vpImage< bool > &mask) VP_OVERRIDE
virtual void setDepthDenseFilteringOccupancyRatio(double occupancyRatio)
vpColVector m_error
(s - s*)
virtual void setReferenceCameraName(const std::string &referenceCameraName)
virtual void setMinLineLengthThresh(double minLineLengthThresh, const std::string &name="") VP_OVERRIDE
virtual void setClipping(const unsigned int &flags) VP_OVERRIDE
std::map< vpTrackerType, double > m_mapOfFeatureFactors
Ponderation between each feature type in the VVS stage.
double m_thresholdOutlier
virtual void computeVVS(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages)
virtual void getLline(std::list< vpMbtDistanceLine * > &linesList, unsigned int level=0) const
virtual void setTrackerType(int type)
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
unsigned int m_nb_feat_klt
Number of klt features.
virtual unsigned int getNbPoints(unsigned int level=0) const
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
unsigned int m_nb_feat_depthNormal
Number of depth normal features.
virtual void setOgreShowConfigDialog(bool showConfigDialog) VP_OVERRIDE
virtual vpMe getMovingEdge() const
virtual unsigned int getNbPolygon() const VP_OVERRIDE
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
virtual void resetTracker() VP_OVERRIDE
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo) VP_OVERRIDE
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void preTracking(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages, std::map< std::string, pcl::PointCloud< pcl::PointXYZ >::ConstPtr > &mapOfPointClouds)
virtual void setNearClippingDistance(const double &dist) VP_OVERRIDE
virtual ~vpMbGenericTracker() VP_OVERRIDE
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void setCameraTransformationMatrix(const std::string &cameraName, const vpHomogeneousMatrix &cameraTransformationMatrix)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness) VP_OVERRIDE
virtual void setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
std::string m_referenceCameraName
Name of the reference camera.
virtual vpMbHiddenFaces< vpMbtPolygon > & getFaces() VP_OVERRIDE
virtual void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false) VP_OVERRIDE
friend void from_json(const nlohmann::json &j, TrackerWrapper &t)
Load configuration settings from a JSON object for a tracker wrapper.
virtual void loadConfigFile(const std::string &configFile, bool verbose=true) VP_OVERRIDE
virtual void setProjectionErrorDisplay(bool display) VP_OVERRIDE
virtual unsigned int getClipping() const
virtual void setAngleAppear(const double &a) VP_OVERRIDE
virtual void setFarClippingDistance(const double &dist) VP_OVERRIDE
vpAROgre * getOgreContext()
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
double m_lambda
Gain of the virtual visual servoing stage.
virtual void setMaxIter(unsigned int max)
bool m_projectionErrorDisplay
Display gradient and model orientation for projection error computation.
virtual void setOgreShowConfigDialog(bool showConfigDialog)
virtual void setMask(const vpImage< bool > &mask)
virtual void getCameraParameters(vpCameraParameters &cam) const
vpImage< unsigned char > m_I
Grayscale image buffer, used when passing color images.
virtual void setDisplayFeatures(bool displayF)
virtual vpHomogeneousMatrix getPose() const
bool m_computeInteraction
vpMatrix oJo
The Degrees of Freedom to estimate.
vpMatrix covarianceMatrix
Covariance matrix.
double m_initialMu
Initial Mu for Levenberg Marquardt optimization loop.
vpHomogeneousMatrix m_cMo
The current pose.
virtual void computeVVSCheckLevenbergMarquardt(unsigned int iter, vpColVector &error, const vpColVector &m_error_prev, const vpHomogeneousMatrix &cMoPrev, double &mu, bool &reStartFromLastIncrement, vpColVector *const w=nullptr, const vpColVector *const m_w_prev=nullptr)
vpCameraParameters m_cam
The camera parameters.
double m_stopCriteriaEpsilon
Epsilon threshold to stop the VVS optimization loop.
bool useOgre
Use Ogre3d for visibility tests.
virtual void computeVVSWeights(vpRobust &robust, const vpColVector &error, vpColVector &w)
vpMbHiddenFaces< vpMbtPolygon > faces
Set of faces describing the object.
virtual void setCameraParameters(const vpCameraParameters &cam)
virtual void setAngleDisappear(const double &a)
virtual void setInitialMu(double mu)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void setOgreVisibilityTest(const bool &v)
virtual void computeCovarianceMatrixVVS(const bool isoJoIdentity, const vpColVector &w_true, const vpHomogeneousMatrix &cMoPrev, const vpMatrix &L_true, const vpMatrix &LVJ_true, const vpColVector &error)
vpMbtOptimizationMethod m_optimizationMethod
Optimization method used.
virtual void computeVVSPoseEstimation(const bool isoJoIdentity, unsigned int iter, vpMatrix &L, vpMatrix <L, vpColVector &R, const vpColVector &error, vpColVector &error_prev, vpColVector <R, double &mu, vpColVector &v, const vpColVector *const w=nullptr, vpColVector *const m_w_prev=nullptr)
bool displayFeatures
If true, the features are displayed.
virtual void setProjectionErrorDisplay(bool display)
double angleDisappears
Angle used to detect a face disappearance.
virtual void setLambda(double gain)
virtual void setNearClippingDistance(const double &dist)
virtual void setFarClippingDistance(const double &dist)
double distFarClip
Distance for near clipping.
bool m_isoJoIdentity
Boolean to know if oJo is identity (for fast computation)
bool useScanLine
Use Scanline for visibility tests.
virtual void setProjectionErrorComputation(const bool &flag)
virtual void setClipping(const unsigned int &flags)
double angleAppears
Angle used to detect a face appearance.
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness)
virtual void setAngleAppear(const double &a)
bool computeCovariance
Flag used to specify if the covariance matrix has to be computed or not.
double distNearClip
Distance for near clipping.
unsigned int m_maxIter
Maximum number of iterations of the virtual visual servoing stage.
unsigned int clippingFlag
Flags specifying which clipping to used.
vpMbHiddenFaces< vpMbtPolygon > m_projectionErrorFaces
Set of faces describing the object, used for projection error.
virtual void loadConfigFile(const std::string &configFile, bool verbose=true)
Manage a circle used in the model-based tracker.
Manage a cylinder used in the model-based tracker.
Manage the line of a polygon used in the model-based tracker.
Implementation of a polygon of the model used by the model-based tracker.
Parse an Xml file to extract configuration parameters of a mbtConfig object.
@ TOO_NEAR
Point not tracked anymore, since too near from its neighbor.
@ THRESHOLD
Point not tracked due to the likelihood that is below the threshold, but retained in the ME list.
@ CONTRAST
Point not tracked due to a contrast problem, but retained in the ME list.
@ M_ESTIMATOR
Point detected as an outlier during virtual visual-servoing.
@ NO_SUPPRESSION
Point successfully tracked.
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Error that can be emitted by the vpTracker class and its derivatives.
@ notEnoughPointError
Not enough point to track.
@ initializationError
Tracker initialization error.
@ fatalError
Tracker fatal error.
vpVelocityTwistMatrix & buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)