Visual Servoing Platform  version 3.6.1 under development (2024-10-02)
testGenericTrackerDeterminist.cpp
/*
* ViSP, open source Visual Servoing Platform software.
* Copyright (C) 2005 - 2024 by Inria. All rights reserved.
*
* This software is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
* See the file LICENSE.txt at the root directory of this source
* distribution for additional information about the GNU GPL.
*
* For using ViSP with software that can not be combined with the GNU
* GPL, please contact Inria about acquiring a ViSP Professional
* Edition License.
*
* See https://visp.inria.fr for more information.
*
* This software was developed at:
* Inria Rennes - Bretagne Atlantique
* Campus Universitaire de Beaulieu
* 35042 Rennes Cedex
* France
*
* If you have questions regarding the use of this file, please contact
* Inria at visp@inria.fr
*
* This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
* WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
*
* Description:
* Check that MBT is deterministic.
*/
#include <visp3/core/vpConfig.h>
#if defined(VISP_HAVE_CATCH2) && defined(VISP_HAVE_THREADS)
#define CATCH_CONFIG_ENABLE_BENCHMARKING
#define CATCH_CONFIG_RUNNER
#include <catch.hpp>
#include <future>
#include <thread>
#include <visp3/core/vpIoTools.h>
#include <visp3/io/vpImageIo.h>
#include <visp3/mbt/vpMbGenericTracker.h>
// #define DEBUG_DISPLAY // uncomment to check that the tracking is correct
#ifdef DEBUG_DISPLAY
#include <visp3/gui/vpDisplayX.h>
#endif
#ifdef ENABLE_VISP_NAMESPACE
using namespace VISP_NAMESPACE_NAME;
#endif
namespace
{
bool read_data(int cpt, vpImage<unsigned char> &I)
{
#if VISP_HAVE_DATASET_VERSION >= 0x030600
std::string ext("png");
#else
std::string ext("pgm");
#endif
const std::string env_ipath = vpIoTools::getViSPImagesDataPath();
const std::string ipath = vpIoTools::createFilePath(env_ipath, "mbt/cube/image%04d." + ext);
char buffer[FILENAME_MAX];
snprintf(buffer, FILENAME_MAX, ipath.c_str(), cpt);
std::string image_filename = buffer;
if (!vpIoTools::checkFilename(image_filename)) {
return false;
}
vpImageIo::read(I, image_filename);
return true;
}
void checkPoses(const vpHomogeneousMatrix &cMo1, const vpHomogeneousMatrix &cMo2)
{
for (unsigned int i = 0; i < 3; i++) {
for (unsigned int j = 0; j < 4; j++) {
CHECK(cMo1[i][j] == Approx(cMo2[i][j]).epsilon(std::numeric_limits<double>::epsilon()));
}
}
}
void configureTracker(vpMbGenericTracker &tracker, vpCameraParameters &cam)
{
const std::string env_ipath = vpIoTools::getViSPImagesDataPath();
const std::string configFile = vpIoTools::createFilePath(env_ipath, "mbt/cube.xml");
const std::string modelFile = vpIoTools::createFilePath(env_ipath, "mbt/cube_and_cylinder.cao");
#if defined(VISP_HAVE_PUGIXML)
const bool verbose = false;
tracker.loadConfigFile(configFile, verbose);
#else
// Corresponding parameters manually set to have an example code
// By setting the parameters:
cam.initPersProjWithoutDistortion(547, 542, 338, 234);
vpMe me;
me.setMaskSize(5);
me.setMaskNumber(180);
me.setRange(7);
me.setThreshold(5);
me.setMu1(0.5);
me.setMu2(0.5);
klt.setMaxFeatures(300);
klt.setWindowSize(5);
klt.setQuality(0.01);
klt.setBlockSize(3);
tracker.setCameraParameters(cam);
tracker.setMovingEdge(me);
tracker.setKltOpencv(klt);
tracker.setKltMaskBorder(5);
// Specify the clipping to
tracker.setNearClippingDistance(0.01);
tracker.setFarClippingDistance(0.90);
// tracker.setClipping(tracker.getClipping() | vpMbtPolygon::LEFT_CLIPPING |
// vpMbtPolygon::RIGHT_CLIPPING | vpMbtPolygon::UP_CLIPPING |
// vpMbtPolygon::DOWN_CLIPPING); // Equivalent to FOV_CLIPPING
#endif
tracker.getCameraParameters(cam);
tracker.loadModel(modelFile);
tracker.setDisplayFeatures(true);
const vpPoseVector initPose(0.02231950571, 0.1071368004, 0.5071128378, 2.100485509, 1.146812236, -0.4560126437);
read_data(0, I);
tracker.initFromPose(I, vpHomogeneousMatrix(initPose));
}
} // anonymous namespace
TEST_CASE("Check MBT determinism sequential", "[MBT_determinism]")
{
// First tracker
configureTracker(tracker1, cam);
read_data(0, I);
#ifdef DEBUG_DISPLAY
vpDisplayX d(I);
#endif
for (int cpt = 0; read_data(cpt, I); cpt++) {
tracker1.track(I);
tracker1.getPose(cMo1);
#ifdef DEBUG_DISPLAY
tracker1.display(I, cMo1, cam, vpColor::red, 3);
vpDisplay::displayFrame(I, cMo1, cam, 0.05, vpColor::none, 3);
#endif
}
std::cout << "First tracker, final cMo:\n" << cMo1 << std::endl;
// Second tracker
configureTracker(tracker2, cam);
for (int cpt = 0; read_data(cpt, I); cpt++) {
tracker2.track(I);
tracker2.getPose(cMo2);
#ifdef DEBUG_DISPLAY
tracker2.display(I, cMo2, cam, vpColor::red, 3);
vpDisplay::displayFrame(I, cMo2, cam, 0.05, vpColor::none, 3);
#endif
}
std::cout << "Second tracker, final cMo:\n" << cMo2 << std::endl;
// Check that both poses are identical
checkPoses(cMo1, cMo2);
}
TEST_CASE("Check MBT determinism parallel", "[MBT_determinism]")
{
// First tracker
std::future<vpHomogeneousMatrix> res_cMo1 = std::async(std::launch::async, []() {
configureTracker(tracker1, cam);
for (int cpt = 0; read_data(cpt, I); cpt++) {
tracker1.track(I);
tracker1.getPose(cMo1);
}
return cMo1;
});
// Second tracker
std::future<vpHomogeneousMatrix> res_cMo2 = std::async(std::launch::async, []() {
configureTracker(tracker2, cam);
for (int cpt = 0; read_data(cpt, I); cpt++) {
tracker2.track(I);
tracker2.getPose(cMo2);
}
return cMo2;
});
vpHomogeneousMatrix cMo1 = res_cMo1.get();
vpHomogeneousMatrix cMo2 = res_cMo2.get();
std::cout << "Run both trackers in separate threads" << std::endl;
std::cout << "First tracker, final cMo:\n" << cMo1 << std::endl;
std::cout << "Second tracker, final cMo:\n" << cMo2 << std::endl;
// Check that both poses are identical
checkPoses(cMo1, cMo2);
}
TEST_CASE("Check Stereo MBT determinism parallel", "[MBT_determinism]")
{
// First tracker
std::future<vpHomogeneousMatrix> res_cMo1 = std::async(std::launch::async, []() {
vpMbGenericTracker tracker1(2);
configureTracker(tracker1, cam);
for (int cpt = 0; read_data(cpt, I); cpt++) {
tracker1.track(I, I);
tracker1.getPose(cMo1);
}
return cMo1;
});
// Second tracker
std::future<vpHomogeneousMatrix> res_cMo2 = std::async(std::launch::async, []() {
vpMbGenericTracker tracker2(2);
configureTracker(tracker2, cam);
for (int cpt = 0; read_data(cpt, I); cpt++) {
tracker2.track(I, I);
tracker2.getPose(cMo2);
}
return cMo2;
});
vpHomogeneousMatrix cMo1 = res_cMo1.get();
vpHomogeneousMatrix cMo2 = res_cMo2.get();
std::cout << "Run both stereo trackers in separate threads" << std::endl;
std::cout << "First tracker, final cMo:\n" << cMo1 << std::endl;
std::cout << "Second tracker, final cMo:\n" << cMo2 << std::endl;
// Check that both poses are identical
checkPoses(cMo1, cMo2);
}
int main(int argc, char *argv[])
{
Catch::Session session; // There must be exactly one instance
// Let Catch (using Clara) parse the command line
session.applyCommandLine(argc, argv);
int numFailed = session.run();
// numFailed is clamped to 255 as some unices only use the lower 8 bits.
// This clamping has already been applied, so just return it here
// You can also do any post run clean-up here
return numFailed;
}
#else
#include <iostream>
int main() { return EXIT_SUCCESS; }
#endif
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
static const vpColor red
Definition: vpColor.h:217
static const vpColor none
Definition: vpColor.h:229
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void flush(const vpImage< unsigned char > &I)
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void read(vpImage< unsigned char > &I, const std::string &filename, int backend=IO_DEFAULT_BACKEND)
Definition: vpImageIo.cpp:147
static std::string getViSPImagesDataPath()
Definition: vpIoTools.cpp:1053
static bool checkFilename(const std::string &filename)
Definition: vpIoTools.cpp:786
static std::string createFilePath(const std::string &parent, const std::string &child)
Definition: vpIoTools.cpp:1427
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition: vpKltOpencv.h:74
void setBlockSize(int blockSize)
Definition: vpKltOpencv.h:267
void setQuality(double qualityLevel)
Definition: vpKltOpencv.h:356
void setHarrisFreeParameter(double harris_k)
Definition: vpKltOpencv.h:275
void setMaxFeatures(int maxCount)
Definition: vpKltOpencv.h:315
void setMinDistance(double minDistance)
Definition: vpKltOpencv.h:324
void setWindowSize(int winSize)
Definition: vpKltOpencv.h:377
void setPyramidLevels(int pyrMaxLevel)
Definition: vpKltOpencv.h:343
static double rad(double deg)
Definition: vpMath.h:129
Real-time 6D object pose tracking using its CAD model.
virtual void setCameraParameters(const vpCameraParameters &camera) VP_OVERRIDE
virtual void setDisplayFeatures(bool displayF) VP_OVERRIDE
virtual void getCameraParameters(vpCameraParameters &camera) const VP_OVERRIDE
virtual void initFromPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo) VP_OVERRIDE
virtual void getPose(vpHomogeneousMatrix &cMo) const VP_OVERRIDE
virtual void setMovingEdge(const vpMe &me)
virtual void setAngleDisappear(const double &a) VP_OVERRIDE
virtual void track(const vpImage< unsigned char > &I) VP_OVERRIDE
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix()) VP_OVERRIDE
virtual void getClipping(unsigned int &clippingFlag1, unsigned int &clippingFlag2) const
virtual void setClipping(const unsigned int &flags) VP_OVERRIDE
virtual void setNearClippingDistance(const double &dist) VP_OVERRIDE
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false) VP_OVERRIDE
virtual void loadConfigFile(const std::string &configFile, bool verbose=true) VP_OVERRIDE
virtual void setAngleAppear(const double &a) VP_OVERRIDE
virtual void setFarClippingDistance(const double &dist) VP_OVERRIDE
Definition: vpMe.h:134
void setMu1(const double &mu_1)
Definition: vpMe.h:385
void setRange(const unsigned int &range)
Definition: vpMe.h:415
void setLikelihoodThresholdType(const vpLikelihoodThresholdType likelihood_threshold_type)
Definition: vpMe.h:505
void setMaskNumber(const unsigned int &mask_number)
Definition: vpMe.cpp:552
void setThreshold(const double &threshold)
Definition: vpMe.h:466
void setSampleStep(const double &sample_step)
Definition: vpMe.h:422
void setMaskSize(const unsigned int &mask_size)
Definition: vpMe.cpp:560
void setMu2(const double &mu_2)
Definition: vpMe.h:392
@ NORMALIZED_THRESHOLD
Definition: vpMe.h:145
Implementation of a pose vector and operations on poses.
Definition: vpPoseVector.h:203