Visual Servoing Platform  version 3.6.1 under development (2024-12-17)
tutorial-apriltag-detector-live.cpp
1 #include <visp3/core/vpConfig.h>
3 #ifdef VISP_HAVE_MODULE_SENSOR
4 #include <visp3/sensor/vp1394CMUGrabber.h>
5 #include <visp3/sensor/vp1394TwoGrabber.h>
6 #include <visp3/sensor/vpFlyCaptureGrabber.h>
7 #include <visp3/sensor/vpRealSense2.h>
8 #include <visp3/sensor/vpV4l2Grabber.h>
9 #endif
11 #include <visp3/detection/vpDetectorAprilTag.h>
13 #include <visp3/core/vpXmlParserCamera.h>
14 #include <visp3/gui/vpDisplayGDI.h>
15 #include <visp3/gui/vpDisplayOpenCV.h>
16 #include <visp3/gui/vpDisplayX.h>
17 
18 #if defined(HAVE_OPENCV_VIDEOIO)
19 #include <opencv2/videoio.hpp>
20 #endif
21 
23 // #undef VISP_HAVE_V4L2
24 // #undef VISP_HAVE_DC1394
25 // #undef VISP_HAVE_CMU1394
26 // #undef VISP_HAVE_FLYCAPTURE
27 // #undef VISP_HAVE_REALSENSE2
28 // #undef VISP_HAVE_OPENCV
30 
31 int main(int argc, const char **argv)
32 {
34 #if defined(VISP_HAVE_APRILTAG) && \
35  (defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || \
36  defined(HAVE_OPENCV_VIDEOIO) || defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2))
38 
39 #ifdef ENABLE_VISP_NAMESPACE
40  using namespace VISP_NAMESPACE_NAME;
41 #endif
42 
43  int opt_device = 0; // For OpenCV and V4l2 grabber to set the camera device
46  double tagSize = 0.053;
47  float quad_decimate = 1.0;
48  int nThreads = 1;
49  std::string intrinsic_file = "";
50  std::string camera_name = "";
51  bool display_tag = false;
52  int color_id = -1;
53  unsigned int thickness = 2;
54  bool align_frame = false;
55 
56 #if !(defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(HAVE_OPENCV_HIGHGUI))
57  bool display_off = true;
58  std::cout << "Warning: There is no 3rd party (X11, GDI or openCV) to dislay images..." << std::endl;
59 #else
60  bool display_off = false;
61 #endif
62 
64 
65  for (int i = 1; i < argc; i++) {
66  if (std::string(argv[i]) == "--pose_method" && i + 1 < argc) {
67  poseEstimationMethod = (vpDetectorAprilTag::vpPoseEstimationMethod)atoi(argv[i + 1]);
68  }
69  else if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) {
70  tagSize = atof(argv[i + 1]);
71  }
72  else if (std::string(argv[i]) == "--camera_device" && i + 1 < argc) {
73  opt_device = atoi(argv[i + 1]);
74  }
75  else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) {
76  quad_decimate = (float)atof(argv[i + 1]);
77  }
78  else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) {
79  nThreads = atoi(argv[i + 1]);
80  }
81  else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
82  intrinsic_file = std::string(argv[i + 1]);
83  }
84  else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) {
85  camera_name = std::string(argv[i + 1]);
86  }
87  else if (std::string(argv[i]) == "--display_tag") {
88  display_tag = true;
89  }
90  else if (std::string(argv[i]) == "--display_off") {
91  display_off = true;
92  }
93  else if (std::string(argv[i]) == "--color" && i + 1 < argc) {
94  color_id = atoi(argv[i + 1]);
95  }
96  else if (std::string(argv[i]) == "--thickness" && i + 1 < argc) {
97  thickness = (unsigned int)atoi(argv[i + 1]);
98  }
99  else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) {
100  tagFamily = (vpDetectorAprilTag::vpAprilTagFamily)atoi(argv[i + 1]);
101  }
102  else if (std::string(argv[i]) == "--z_aligned") {
103  align_frame = true;
104  }
105  else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
106  std::cout << "Usage: " << argv[0] << " [--camera_device <camera device> (default: 0)]"
107  << " [--tag_size <tag_size in m> (default: 0.053)]"
108  " [--quad_decimate <quad_decimate> (default: 1)]"
109  " [--nthreads <nb> (default: 1)]"
110  " [--intrinsic <intrinsic file> (default: empty)]"
111  " [--camera_name <camera name> (default: empty)]"
112  " [--pose_method <method> (0: HOMOGRAPHY, 1: HOMOGRAPHY_VIRTUAL_VS, "
113  " 2: DEMENTHON_VIRTUAL_VS, 3: LAGRANGE_VIRTUAL_VS, "
114  " 4: BEST_RESIDUAL_VIRTUAL_VS, 5: HOMOGRAPHY_ORTHOGONAL_ITERATION) (default: 0)]"
115  " [--tag_family <family> (0: TAG_36h11, 1: TAG_36h10 (DEPRECATED), 2: TAG_36ARTOOLKIT (DEPRECATED),"
116  " 3: TAG_25h9, 4: TAG_25h7 (DEPRECATED), 5: TAG_16h5, 6: TAG_CIRCLE21h7, 7: TAG_CIRCLE49h12,"
117  " 8: TAG_CUSTOM48h12, 9: TAG_STANDARD41h12, 10: TAG_STANDARD52h13) (default: 0)]"
118  " [--display_tag] [--z_aligned]";
119 #if (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV))
120  std::cout << " [--display_off] [--color <color id>] [--thickness <line thickness>]";
121 #endif
122  std::cout << " [--help]" << std::endl;
123  return EXIT_SUCCESS;
124  }
125  }
126 
127  try {
128  vpCameraParameters cam;
129  cam.initPersProjWithoutDistortion(615.1674805, 615.1675415, 312.1889954, 243.4373779);
130 #if defined(VISP_HAVE_PUGIXML)
131  vpXmlParserCamera parser;
132  if (!intrinsic_file.empty() && !camera_name.empty())
133  parser.parse(cam, intrinsic_file, camera_name, vpCameraParameters::perspectiveProjWithoutDistortion);
134 #endif
135 
137 #if defined(VISP_HAVE_V4L2)
138  vpV4l2Grabber g;
139  std::ostringstream device;
140  device << "/dev/video" << opt_device;
141  std::cout << "Use Video 4 Linux grabber on device " << device.str() << std::endl;
142  g.setDevice(device.str());
143  g.setScale(1);
144  g.open(I);
145 #elif defined(VISP_HAVE_DC1394)
146  (void)opt_device; // To avoid non used warning
147  std::cout << "Use DC1394 grabber" << std::endl;
149  g.open(I);
150 #elif defined(VISP_HAVE_CMU1394)
151  (void)opt_device; // To avoid non used warning
152  std::cout << "Use CMU1394 grabber" << std::endl;
154  g.open(I);
155 #elif defined(VISP_HAVE_FLYCAPTURE)
156  (void)opt_device; // To avoid non used warning
157  std::cout << "Use FlyCapture grabber" << std::endl;
159  g.open(I);
160 #elif defined(VISP_HAVE_REALSENSE2)
161  (void)opt_device; // To avoid non used warning
162  std::cout << "Use Realsense 2 grabber" << std::endl;
163  vpRealSense2 g;
164  rs2::config config;
165  config.disable_stream(RS2_STREAM_DEPTH);
166  config.disable_stream(RS2_STREAM_INFRARED);
167  config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
168  g.open(config);
169  g.acquire(I);
170 
171  std::cout << "Read camera parameters from Realsense device" << std::endl;
173 #elif defined(HAVE_OPENCV_VIDEOIO)
174  std::cout << "Use OpenCV grabber on device " << opt_device << std::endl;
175  cv::VideoCapture g(opt_device); // Open the default camera
176  if (!g.isOpened()) { // Check if we succeeded
177  std::cout << "Failed to open the camera" << std::endl;
178  return EXIT_FAILURE;
179  }
180  cv::Mat frame;
181  g >> frame; // get a new frame from camera
182  vpImageConvert::convert(frame, I);
183 #endif
185 
186  std::cout << cam << std::endl;
187  std::cout << "poseEstimationMethod: " << poseEstimationMethod << std::endl;
188  std::cout << "tagFamily: " << tagFamily << std::endl;
189  std::cout << "nThreads : " << nThreads << std::endl;
190  std::cout << "Z aligned: " << align_frame << std::endl;
191 
192  vpDisplay *d = nullptr;
193  if (!display_off) {
194 #ifdef VISP_HAVE_X11
195  d = new vpDisplayX(I);
196 #elif defined(VISP_HAVE_GDI)
197  d = new vpDisplayGDI(I);
198 #elif defined(HAVE_OPENCV_HIGHGUI)
199  d = new vpDisplayOpenCV(I);
200 #endif
201  }
202 
204  vpDetectorAprilTag detector(tagFamily);
206 
208  detector.setAprilTagQuadDecimate(quad_decimate);
209  detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
210  detector.setAprilTagNbThreads(nThreads);
211  detector.setDisplayTag(display_tag, color_id < 0 ? vpColor::none : vpColor::getColor(color_id), thickness);
212  detector.setZAlignedWithCameraAxis(align_frame);
214 
215  std::vector<double> time_vec;
216  for (;;) {
218 #if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || \
219  defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2)
220  g.acquire(I);
221 #elif defined(HAVE_OPENCV_VIDEOIO)
222  g >> frame;
223  vpImageConvert::convert(frame, I);
224 #endif
226 
228 
229  double t = vpTime::measureTimeMs();
231  std::vector<vpHomogeneousMatrix> cMo_vec;
232  detector.detect(I, tagSize, cam, cMo_vec);
234  t = vpTime::measureTimeMs() - t;
235  time_vec.push_back(t);
236 
237  std::stringstream ss;
238  ss << "Detection time: " << t << " ms for " << detector.getNbObjects() << " tags";
239  vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
240 
242  for (size_t i = 0; i < cMo_vec.size(); i++) {
243  vpDisplay::displayFrame(I, cMo_vec[i], cam, tagSize / 2, vpColor::none, 3);
244  }
246 
247  vpDisplay::displayText(I, 20, 20, "Click to quit.", vpColor::red);
248  vpDisplay::flush(I);
249  if (vpDisplay::getClick(I, false))
250  break;
251  }
252 
253  std::cout << "Benchmark computation time" << std::endl;
254  std::cout << "Mean / Median / Std: " << vpMath::getMean(time_vec) << " ms"
255  << " ; " << vpMath::getMedian(time_vec) << " ms"
256  << " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl;
257 
258  if (!display_off)
259  delete d;
260 
261  }
262  catch (const vpException &e) {
263  std::cerr << "Catch an exception: " << e.getMessage() << std::endl;
264  }
265 
266  return EXIT_SUCCESS;
267 #else
268  (void)argc;
269  (void)argv;
270 #ifndef VISP_HAVE_APRILTAG
271  std::cout << "Enable Apriltag support, configure and build ViSP to run this tutorial" << std::endl;
272 #else
273  std::cout << "Install a 3rd party dedicated to frame grabbing (dc1394, cmu1394, v4l2, OpenCV, FlyCapture, "
274  "Realsense2), configure and build ViSP again to use this example"
275  << std::endl;
276 #endif
277 #endif
278  return EXIT_SUCCESS;
279 }
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
void open(vpImage< unsigned char > &I)
Class for firewire ieee1394 video devices using libdc1394-2.x api.
void open(vpImage< unsigned char > &I)
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
static vpColor getColor(const unsigned int &i)
Definition: vpColor.h:311
static const vpColor red
Definition: vpColor.h:217
static const vpColor none
Definition: vpColor.h:229
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:130
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Class that defines generic functionalities for display.
Definition: vpDisplay.h:178
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition: vpException.h:60
const char * getMessage() const
Definition: vpException.cpp:65
void open(vpImage< unsigned char > &I)
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
static double getMedian(const std::vector< double > &v)
Definition: vpMath.cpp:322
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
Definition: vpMath.cpp:353
static double getMean(const std::vector< double > &v)
Definition: vpMath.cpp:302
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void open(vpImage< unsigned char > &I)
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
XML parser to load and save intrinsic camera parameters.
int parse(vpCameraParameters &cam, const std::string &filename, const std::string &camera_name, const vpCameraParameters::vpCameraParametersProjType &projModel, unsigned int image_width=0, unsigned int image_height=0, bool verbose=true)
VISP_EXPORT double measureTimeMs()