Visual Servoing Platform  version 3.6.1 under development (2024-03-28)
tutorial-apriltag-detector-live.cpp
1 #include <visp3/core/vpConfig.h>
3 #ifdef VISP_HAVE_MODULE_SENSOR
4 #include <visp3/sensor/vp1394CMUGrabber.h>
5 #include <visp3/sensor/vp1394TwoGrabber.h>
6 #include <visp3/sensor/vpFlyCaptureGrabber.h>
7 #include <visp3/sensor/vpRealSense2.h>
8 #include <visp3/sensor/vpV4l2Grabber.h>
9 #endif
11 #include <visp3/detection/vpDetectorAprilTag.h>
13 #include <visp3/core/vpXmlParserCamera.h>
14 #include <visp3/gui/vpDisplayGDI.h>
15 #include <visp3/gui/vpDisplayOpenCV.h>
16 #include <visp3/gui/vpDisplayX.h>
17 
18 #if defined(HAVE_OPENCV_VIDEOIO)
19 #include <opencv2/videoio.hpp>
20 #endif
21 
23 // #undef VISP_HAVE_V4L2
24 // #undef VISP_HAVE_DC1394
25 // #undef VISP_HAVE_CMU1394
26 // #undef VISP_HAVE_FLYCAPTURE
27 // #undef VISP_HAVE_REALSENSE2
28 // #undef VISP_HAVE_OPENCV
30 
31 int main(int argc, const char **argv)
32 {
34 #if defined(VISP_HAVE_APRILTAG) && \
35  (defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || \
36  defined(HAVE_OPENCV_VIDEOIO) || defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2))
38 
39  int opt_device = 0; // For OpenCV and V4l2 grabber to set the camera device
42  double tagSize = 0.053;
43  float quad_decimate = 1.0;
44  int nThreads = 1;
45  std::string intrinsic_file = "";
46  std::string camera_name = "";
47  bool display_tag = false;
48  int color_id = -1;
49  unsigned int thickness = 2;
50  bool align_frame = false;
51 
52 #if !(defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(HAVE_OPENCV_HIGHGUI))
53  bool display_off = true;
54  std::cout << "Warning: There is no 3rd party (X11, GDI or openCV) to dislay images..." << std::endl;
55 #else
56  bool display_off = false;
57 #endif
58 
60 
61  for (int i = 1; i < argc; i++) {
62  if (std::string(argv[i]) == "--pose_method" && i + 1 < argc) {
63  poseEstimationMethod = (vpDetectorAprilTag::vpPoseEstimationMethod)atoi(argv[i + 1]);
64  }
65  else if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) {
66  tagSize = atof(argv[i + 1]);
67  }
68  else if (std::string(argv[i]) == "--camera_device" && i + 1 < argc) {
69  opt_device = atoi(argv[i + 1]);
70  }
71  else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) {
72  quad_decimate = (float)atof(argv[i + 1]);
73  }
74  else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) {
75  nThreads = atoi(argv[i + 1]);
76  }
77  else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
78  intrinsic_file = std::string(argv[i + 1]);
79  }
80  else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) {
81  camera_name = std::string(argv[i + 1]);
82  }
83  else if (std::string(argv[i]) == "--display_tag") {
84  display_tag = true;
85  }
86  else if (std::string(argv[i]) == "--display_off") {
87  display_off = true;
88  }
89  else if (std::string(argv[i]) == "--color" && i + 1 < argc) {
90  color_id = atoi(argv[i + 1]);
91  }
92  else if (std::string(argv[i]) == "--thickness" && i + 1 < argc) {
93  thickness = (unsigned int)atoi(argv[i + 1]);
94  }
95  else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) {
96  tagFamily = (vpDetectorAprilTag::vpAprilTagFamily)atoi(argv[i + 1]);
97  }
98  else if (std::string(argv[i]) == "--z_aligned") {
99  align_frame = true;
100  }
101  else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
102  std::cout << "Usage: " << argv[0] << " [--camera_device <camera device> (default: 0)]"
103  << " [--tag_size <tag_size in m> (default: 0.053)]"
104  " [--quad_decimate <quad_decimate> (default: 1)]"
105  " [--nthreads <nb> (default: 1)]"
106  " [--intrinsic <intrinsic file> (default: empty)]"
107  " [--camera_name <camera name> (default: empty)]"
108  " [--pose_method <method> (0: HOMOGRAPHY, 1: HOMOGRAPHY_VIRTUAL_VS, "
109  " 2: DEMENTHON_VIRTUAL_VS, 3: LAGRANGE_VIRTUAL_VS, "
110  " 4: BEST_RESIDUAL_VIRTUAL_VS, 5: HOMOGRAPHY_ORTHOGONAL_ITERATION) (default: 0)]"
111  " [--tag_family <family> (0: TAG_36h11, 1: TAG_36h10 (DEPRECATED), 2: TAG_36ARTOOLKIT (DEPRECATED),"
112  " 3: TAG_25h9, 4: TAG_25h7 (DEPRECATED), 5: TAG_16h5, 6: TAG_CIRCLE21h7, 7: TAG_CIRCLE49h12,"
113  " 8: TAG_CUSTOM48h12, 9: TAG_STANDARD41h12, 10: TAG_STANDARD52h13) (default: 0)]"
114  " [--display_tag] [--z_aligned]";
115 #if (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV))
116  std::cout << " [--display_off] [--color <color id>] [--thickness <line thickness>]";
117 #endif
118  std::cout << " [--help]" << std::endl;
119  return EXIT_SUCCESS;
120  }
121  }
122 
123  try {
124  vpCameraParameters cam;
125  cam.initPersProjWithoutDistortion(615.1674805, 615.1675415, 312.1889954, 243.4373779);
126 #if defined(VISP_HAVE_PUGIXML)
127  vpXmlParserCamera parser;
128  if (!intrinsic_file.empty() && !camera_name.empty())
129  parser.parse(cam, intrinsic_file, camera_name, vpCameraParameters::perspectiveProjWithoutDistortion);
130 #endif
131 
133 #if defined(VISP_HAVE_V4L2)
134  vpV4l2Grabber g;
135  std::ostringstream device;
136  device << "/dev/video" << opt_device;
137  std::cout << "Use Video 4 Linux grabber on device " << device.str() << std::endl;
138  g.setDevice(device.str());
139  g.setScale(1);
140  g.open(I);
141 #elif defined(VISP_HAVE_DC1394)
142  (void)opt_device; // To avoid non used warning
143  std::cout << "Use DC1394 grabber" << std::endl;
145  g.open(I);
146 #elif defined(VISP_HAVE_CMU1394)
147  (void)opt_device; // To avoid non used warning
148  std::cout << "Use CMU1394 grabber" << std::endl;
150  g.open(I);
151 #elif defined(VISP_HAVE_FLYCAPTURE)
152  (void)opt_device; // To avoid non used warning
153  std::cout << "Use FlyCapture grabber" << std::endl;
155  g.open(I);
156 #elif defined(VISP_HAVE_REALSENSE2)
157  (void)opt_device; // To avoid non used warning
158  std::cout << "Use Realsense 2 grabber" << std::endl;
159  vpRealSense2 g;
160  rs2::config config;
161  config.disable_stream(RS2_STREAM_DEPTH);
162  config.disable_stream(RS2_STREAM_INFRARED);
163  config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
164  g.open(config);
165  g.acquire(I);
166 
167  std::cout << "Read camera parameters from Realsense device" << std::endl;
169 #elif defined(HAVE_OPENCV_VIDEOIO)
170  std::cout << "Use OpenCV grabber on device " << opt_device << std::endl;
171  cv::VideoCapture g(opt_device); // Open the default camera
172  if (!g.isOpened()) { // Check if we succeeded
173  std::cout << "Failed to open the camera" << std::endl;
174  return EXIT_FAILURE;
175  }
176  cv::Mat frame;
177  g >> frame; // get a new frame from camera
178  vpImageConvert::convert(frame, I);
179 #endif
181 
182  std::cout << cam << std::endl;
183  std::cout << "poseEstimationMethod: " << poseEstimationMethod << std::endl;
184  std::cout << "tagFamily: " << tagFamily << std::endl;
185  std::cout << "nThreads : " << nThreads << std::endl;
186  std::cout << "Z aligned: " << align_frame << std::endl;
187 
188  vpDisplay *d = nullptr;
189  if (!display_off) {
190 #ifdef VISP_HAVE_X11
191  d = new vpDisplayX(I);
192 #elif defined(VISP_HAVE_GDI)
193  d = new vpDisplayGDI(I);
194 #elif defined(HAVE_OPENCV_HIGHGUI)
195  d = new vpDisplayOpenCV(I);
196 #endif
197  }
198 
200  vpDetectorAprilTag detector(tagFamily);
202 
204  detector.setAprilTagQuadDecimate(quad_decimate);
205  detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
206  detector.setAprilTagNbThreads(nThreads);
207  detector.setDisplayTag(display_tag, color_id < 0 ? vpColor::none : vpColor::getColor(color_id), thickness);
208  detector.setZAlignedWithCameraAxis(align_frame);
210 
211  std::vector<double> time_vec;
212  for (;;) {
214 #if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || \
215  defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2)
216  g.acquire(I);
217 #elif defined(HAVE_OPENCV_VIDEOIO)
218  g >> frame;
219  vpImageConvert::convert(frame, I);
220 #endif
222 
224 
225  double t = vpTime::measureTimeMs();
227  std::vector<vpHomogeneousMatrix> cMo_vec;
228  detector.detect(I, tagSize, cam, cMo_vec);
230  t = vpTime::measureTimeMs() - t;
231  time_vec.push_back(t);
232 
233  std::stringstream ss;
234  ss << "Detection time: " << t << " ms for " << detector.getNbObjects() << " tags";
235  vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
236 
238  for (size_t i = 0; i < cMo_vec.size(); i++) {
239  vpDisplay::displayFrame(I, cMo_vec[i], cam, tagSize / 2, vpColor::none, 3);
240  }
242 
243  vpDisplay::displayText(I, 20, 20, "Click to quit.", vpColor::red);
244  vpDisplay::flush(I);
245  if (vpDisplay::getClick(I, false))
246  break;
247  }
248 
249  std::cout << "Benchmark computation time" << std::endl;
250  std::cout << "Mean / Median / Std: " << vpMath::getMean(time_vec) << " ms"
251  << " ; " << vpMath::getMedian(time_vec) << " ms"
252  << " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl;
253 
254  if (!display_off)
255  delete d;
256 
257  }
258  catch (const vpException &e) {
259  std::cerr << "Catch an exception: " << e.getMessage() << std::endl;
260  }
261 
262  return EXIT_SUCCESS;
263 #else
264  (void)argc;
265  (void)argv;
266 #ifndef VISP_HAVE_APRILTAG
267  std::cout << "Enable Apriltag support, configure and build ViSP to run this tutorial" << std::endl;
268 #else
269  std::cout << "Install a 3rd party dedicated to frame grabbing (dc1394, cmu1394, v4l2, OpenCV, FlyCapture, "
270  "Realsense2), configure and build ViSP again to use this example"
271  << std::endl;
272 #endif
273 #endif
274  return EXIT_SUCCESS;
275 }
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
void open(vpImage< unsigned char > &I)
Class for firewire ieee1394 video devices using libdc1394-2.x api.
void open(vpImage< unsigned char > &I)
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
static vpColor getColor(const unsigned int &i)
Definition: vpColor.h:307
static const vpColor red
Definition: vpColor.h:211
static const vpColor none
Definition: vpColor.h:223
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:128
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:128
Class that defines generic functionalities for display.
Definition: vpDisplay.h:173
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition: vpException.h:59
const char * getMessage() const
Definition: vpException.cpp:64
void open(vpImage< unsigned char > &I)
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
static double getMedian(const std::vector< double > &v)
Definition: vpMath.cpp:314
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
Definition: vpMath.cpp:345
static double getMean(const std::vector< double > &v)
Definition: vpMath.cpp:294
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void open(vpImage< unsigned char > &I)
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
XML parser to load and save intrinsic camera parameters.
int parse(vpCameraParameters &cam, const std::string &filename, const std::string &camera_name, const vpCameraParameters::vpCameraParametersProjType &projModel, unsigned int image_width=0, unsigned int image_height=0, bool verbose=true)
VISP_EXPORT double measureTimeMs()