#include <visp3/core/vpConfig.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/gui/vpDisplayGDI.h>
#include <visp3/gui/vpDisplayOpenCV.h>
#include <visp3/mbt/vpMbEdgeTracker.h>
#include <visp3/io/vpVideoReader.h>
#include <visp3/vision/vpKeyPoint.h>
#include <visp3/core/vpIoTools.h>
#if defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020400)
std::vector<cv::KeyPoint> trainKeyPoints;
double elapsedTime;
keypoint_learning.
detect(I, trainKeyPoints, elapsedTime);
std::vector<vpPolygon> polygons;
std::vector<std::vector<vpPoint> > roisPt;
std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > pair = tracker.
getPolygonFaces();
polygons = pair.first;
roisPt = pair.second;
std::vector<cv::Point3f> points3f;
keypoint_learning.
buildReference(I, trainKeyPoints, points3f,
true,
id);
for(std::vector<cv::KeyPoint>::const_iterator it = trainKeyPoints.begin(); it != trainKeyPoints.end(); ++it) {
}
}
#endif
int main(int argc, char ** argv) {
#if defined(VISP_HAVE_OPENCV) && ((VISP_HAVE_OPENCV_VERSION >= 0x020400) || defined(VISP_HAVE_FFMPEG))
try {
std::string videoname = "cube.mpeg";
for (int i=0; i<argc; i++) {
if (std::string(argv[i]) == "--name")
videoname = std::string(argv[i+1]);
else if (std::string(argv[i]) == "--help") {
std::cout << "\nUsage: " << argv[0] << " [--name <video name>] [--help]\n" << std::endl;
return 0;
}
}
if(! parentname.empty())
objectname = parentname + "/" + objectname;
std::cout << "Video name: " << videoname << std::endl;
std::cout << "Tracker requested config files: " << objectname
<< ".[init,"
#ifdef VISP_HAVE_XML2
<< "xml,"
#endif
<< "cao or wrl]" << std::endl;
std::cout << "Tracker optional config files: " << objectname << ".[ppm]" << std::endl;
bool usexml = false;
#ifdef VISP_HAVE_XML2
usexml = true;
}
#endif
if (! usexml) {
}
vpKeyPoint keypoint_learning(
"ORB",
"ORB",
"BruteForce-Hamming");
#if (VISP_HAVE_OPENCV_VERSION < 0x030000)
keypoint_learning.setDetectorParameter("ORB", "nLevels", 1);
#else
cv::Ptr<cv::ORB> orb_learning = keypoint_learning.getDetector("ORB").dynamicCast<cv::ORB>();
if(orb_learning != NULL) {
orb_learning->setNLevels(1);
}
#endif
#if defined(VISP_HAVE_X11)
#elif defined(VISP_HAVE_GDI)
#elif defined(VISP_HAVE_OPENCV)
#else
std::cout << "No image viewer is available..." << std::endl;
return 0;
#endif
std::string imageName[] = {"cube0001.png", "cube0150.png", "cube0200.png"};
vpHomogeneousMatrix(0.02143385294, 0.1098083886, 0.5127439561, 2.087159614, 1.141775176, -0.4701291124),
vpHomogeneousMatrix(0.02651282185, -0.03713587374, 0.6873765919, 2.314744454, 0.3492296488, -0.1226054828),
vpHomogeneousMatrix(0.02965448956, -0.07283091786, 0.7253526051, 2.300529617, -0.4286674806, 0.1788761025)};
for(int i = 0; i < 3; i++) {
if (i==0) {
}
std::stringstream title;
title << "Learning cube on image: " << imageName[i];
tracker.
setPose(I, initPoseTab[i]);
learnCube(I, tracker, keypoint_learning, i);
if(i < 2) {
} else {
}
}
keypoint_learning.saveLearningData("cube_learning_data.bin", true);
vpKeyPoint keypoint_detection(
"ORB",
"ORB",
"BruteForce-Hamming");
#if (VISP_HAVE_OPENCV_VERSION < 0x030000)
keypoint_detection.setDetectorParameter("ORB", "nLevels", 1);
#else
cv::Ptr<cv::ORB> orb_detector = keypoint_detection.getDetector("ORB").dynamicCast<cv::ORB>();
orb_detector = keypoint_detection.getDetector("ORB").dynamicCast<cv::ORB>();
if(orb_detector != NULL) {
orb_detector->setNLevels(1);
}
#endif
keypoint_detection.loadLearningData("cube_learning_data.bin", true);
keypoint_detection.createImageMatching(I, IMatching);
#if defined VISP_HAVE_X11
#elif defined VISP_HAVE_GTK
#elif defined VISP_HAVE_GDI
#else
#endif
display2.
init(IMatching, 50, 50,
"Display matching between learned and current images");
double error;
bool click_done = false;
keypoint_detection.insertImageMatching(I, IMatching);
double elapsedTime;
if(keypoint_detection.matchPoint(I, cam, cMo, error, elapsedTime)) {
keypoint_detection.displayMatching(I, IMatching);
std::vector<vpImagePoint> ransacInliers = keypoint_detection.getRansacInliers();
std::vector<vpImagePoint> ransacOutliers = keypoint_detection.getRansacOutliers();
for(std::vector<vpImagePoint>::const_iterator it = ransacInliers.begin(); it != ransacInliers.end(); ++it) {
imPt.set_u(imPt.get_u() + I.
getWidth());
}
for(std::vector<vpImagePoint>::const_iterator it = ransacOutliers.begin(); it != ransacOutliers.end(); ++it) {
imPt.set_u(imPt.get_u() + I.
getWidth());
}
keypoint_detection.displayMatching(I, IMatching);
}
click_done = true;
break;
}
click_done = true;
break;
}
}
if (! click_done)
#ifdef VISP_HAVE_XML2
#endif
#if defined(VISP_HAVE_COIN3D) && (COIN_MAJOR_VERSION == 3)
SoDB::finish();
#endif
}
std::cout << "Catch an exception: " << e << std::endl;
}
#else
(void)argc;
(void)argv;
std::cout << "Install OpenCV or ffmpeg and rebuild ViSP to use this example." << std::endl;
#endif
return 0;
}