Visual Servoing Platform  version 3.3.0 under development (2020-02-17)
tutorial-apriltag-detector.cpp

[Include]

#include <visp3/detection/vpDetectorAprilTag.h>
#include <visp3/gui/vpDisplayGDI.h>
#include <visp3/gui/vpDisplayOpenCV.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/io/vpImageIo.h>
#ifdef VISP_HAVE_PUGIXML
#include <visp3/core/vpXmlParserCamera.h>
#endif
int main(int argc, const char **argv)
{
#if defined(VISP_HAVE_APRILTAG) && (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV))
std::string input_filename = "AprilTag.pgm";
double tagSize = 0.053;
float quad_decimate = 1.0;
int nThreads = 1;
std::string intrinsic_file = "";
std::string camera_name = "";
bool display_tag = false;
int color_id = -1;
unsigned int thickness = 2;
bool z_aligned = false;
for (int i = 1; i < argc; i++) {
if (std::string(argv[i]) == "--pose_method" && i + 1 < argc) {
poseEstimationMethod = (vpDetectorAprilTag::vpPoseEstimationMethod)atoi(argv[i + 1]);
} else if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) {
tagSize = atof(argv[i + 1]);
} else if (std::string(argv[i]) == "--input" && i + 1 < argc) {
input_filename = std::string(argv[i + 1]);
} else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) {
quad_decimate = (float)atof(argv[i + 1]);
} else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) {
nThreads = atoi(argv[i + 1]);
} else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
intrinsic_file = std::string(argv[i + 1]);
} else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) {
camera_name = std::string(argv[i + 1]);
} else if (std::string(argv[i]) == "--display_tag") {
display_tag = true;
} else if (std::string(argv[i]) == "--color" && i + 1 < argc) {
color_id = atoi(argv[i + 1]);
} else if (std::string(argv[i]) == "--thickness" && i + 1 < argc) {
thickness = (unsigned int)atoi(argv[i + 1]);
} else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) {
tagFamily = (vpDetectorAprilTag::vpAprilTagFamily)atoi(argv[i + 1]);
} else if (std::string(argv[i]) == "--z_aligned") {
z_aligned = true;
} else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
std::cout << "Usage: " << argv[0]
<< " [--input <input file>] [--tag_size <tag_size in m>]"
" [--quad_decimate <quad_decimate>] [--nthreads <nb>]"
" [--intrinsic <intrinsic file>] [--camera_name <camera name>]"
" [--pose_method <method> (0: HOMOGRAPHY, 1: HOMOGRAPHY_VIRTUAL_VS, "
" 2: DEMENTHON_VIRTUAL_VS, 3: LAGRANGE_VIRTUAL_VS, "
" 4: BEST_RESIDUAL_VIRTUAL_VS, 5: HOMOGRAPHY_ORTHOGONAL_ITERATION) (default: 0)]"
" [--tag_family <family> (0: TAG_36h11, 1: TAG_36h10 (DEPRECATED), 2: TAG_36ARTOOLKIT (DEPRECATED),"
" 3: TAG_25h9, 4: TAG_25h7 (DEPRECATED), 5: TAG_16h5, 6: TAG_CIRCLE21h7, 7: TAG_CIRCLE49h12,"
" 8: TAG_CUSTOM48h12, 9: TAG_STANDARD41h12, 10: TAG_STANDARD52h13) (default: 0)]"
" [--display_tag] [--color <color_id (0, 1, ...)>]"
" [--thickness <thickness>] [--z_aligned]"
" [--help]"
<< std::endl;
return EXIT_SUCCESS;
}
}
cam.initPersProjWithoutDistortion(615.1674805, 615.1675415, 312.1889954, 243.4373779);
#ifdef VISP_HAVE_PUGIXML
if (!intrinsic_file.empty() && !camera_name.empty())
parser.parse(cam, intrinsic_file, camera_name, vpCameraParameters::perspectiveProjWithoutDistortion);
#endif
std::cout << cam << std::endl;
std::cout << "poseEstimationMethod: " << poseEstimationMethod << std::endl;
std::cout << "tagFamily: " << tagFamily << std::endl;
std::cout << "nThreads : " << nThreads << std::endl;
std::cout << "Z aligned: " << z_aligned << std::endl;
try {
vpImageIo::read(I, input_filename);
#ifdef VISP_HAVE_X11
vpDisplayX d(I);
#elif defined(VISP_HAVE_GDI)
#elif defined(VISP_HAVE_OPENCV)
#endif
vpDetectorAprilTag detector(tagFamily);
detector.setAprilTagQuadDecimate(quad_decimate);
detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
detector.setAprilTagNbThreads(nThreads);
detector.setDisplayTag(display_tag, color_id < 0 ? vpColor::none : vpColor::getColor(color_id), thickness);
detector.setZAlignedWithCameraAxis(z_aligned);
double t = vpTime::measureTimeMs();
std::vector<vpHomogeneousMatrix> cMo_vec;
detector.detect(I, tagSize, cam, cMo_vec);
std::stringstream ss;
ss << "Detection time: " << t << " ms for " << detector.getNbObjects() << " tags";
vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
for (size_t i = 0; i < detector.getNbObjects(); i++) {
std::vector<vpImagePoint> p = detector.getPolygon(i);
vpRect bbox = detector.getBBox(i);
std::string message = detector.getMessage(i);
std::size_t tag_id_pos = message.find("id: ");
if (tag_id_pos != std::string::npos) {
int tag_id = atoi(message.substr(tag_id_pos + 4).c_str());
ss.str("");
ss << "Tag id: " << tag_id;
vpDisplay::displayText(I, (int)(bbox.getTop() - 10), (int)bbox.getLeft(), ss.str(), vpColor::red);
}
for (size_t j = 0; j < p.size(); j++) {
std::ostringstream number;
number << j;
vpDisplay::displayText(I, p[j] + vpImagePoint(15, 5), number.str(), vpColor::blue);
}
}
vpDisplay::displayText(I, 20, 20, "Click to display tag poses", vpColor::red);
for (size_t i = 0; i < cMo_vec.size(); i++) {
vpDisplay::displayFrame(I, cMo_vec[i], cam, tagSize / 2, vpColor::none, 3);
}
vpDisplay::displayText(I, 20, 20, "Click to quit.", vpColor::red);
} catch (const vpException &e) {
std::cerr << "Catch an exception: " << e.getMessage() << std::endl;
}
return EXIT_SUCCESS;
#else
(void)argc;
(void)argv;
return 0;
#endif
}