Visual Servoing Platform  version 3.6.1 under development (2024-11-15)
tutorial-apriltag-detector-live-T265-realsense.cpp
1 #include <visp3/core/vpConfig.h>
3 #ifdef VISP_HAVE_MODULE_SENSOR
4 #include <visp3/sensor/vpRealSense2.h>
5 #endif
7 #include <visp3/detection/vpDetectorAprilTag.h>
9 #include <visp3/core/vpImageConvert.h>
10 #include <visp3/core/vpImageTools.h>
11 #include <visp3/core/vpMeterPixelConversion.h>
12 #include <visp3/core/vpPixelMeterConversion.h>
13 #include <visp3/gui/vpDisplayGDI.h>
14 #include <visp3/gui/vpDisplayOpenCV.h>
15 #include <visp3/gui/vpDisplayX.h>
16 #include <visp3/vision/vpPose.h>
17 
18 int main(int argc, const char **argv)
19 {
21  // Realsense T265 is only supported if realsense API > 2.31.0
22 #if defined(VISP_HAVE_APRILTAG) && defined(VISP_HAVE_REALSENSE2) && (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
24 
25 #ifdef ENABLE_VISP_NAMESPACE
26  using namespace VISP_NAMESPACE_NAME;
27 #endif
28 
31  double tagSize = 0.053;
32  float quad_decimate = 1.0;
33  int nThreads = 1;
34  bool display_tag = false;
35  int color_id = -1;
36  unsigned int thickness = 2;
37  bool align_frame = false;
38 
39 #if !(defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV))
40  bool display_off = true;
41  std::cout << "Warning: There is no 3rd party (X11, GDI or openCV) to dislay images..." << std::endl;
42 #else
43  bool display_off = false;
44 #endif
45 
46  for (int i = 1; i < argc; i++) {
47  if (std::string(argv[i]) == "--pose_method" && i + 1 < argc) {
48  poseEstimationMethod = (vpDetectorAprilTag::vpPoseEstimationMethod)atoi(argv[i + 1]);
49  }
50  else if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) {
51  tagSize = atof(argv[i + 1]);
52  }
53  else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) {
54  quad_decimate = (float)atof(argv[i + 1]);
55  }
56  else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) {
57  nThreads = atoi(argv[i + 1]);
58  }
59  else if (std::string(argv[i]) == "--display_tag") {
60  display_tag = true;
61  }
62  else if (std::string(argv[i]) == "--display_off") {
63  display_off = true;
64  }
65  else if (std::string(argv[i]) == "--color" && i + 1 < argc) {
66  color_id = atoi(argv[i + 1]);
67  }
68  else if (std::string(argv[i]) == "--thickness" && i + 1 < argc) {
69  thickness = (unsigned int)atoi(argv[i + 1]);
70  }
71  else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) {
72  tagFamily = (vpDetectorAprilTag::vpAprilTagFamily)atoi(argv[i + 1]);
73  }
74  else if (std::string(argv[i]) == "--z_aligned") {
75  align_frame = true;
76  }
77  else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
78  std::cout << "Usage: " << argv[0]
79  << " [--tag_size <tag_size in m> (default: 0.053)]"
80  " [--quad_decimate <quad_decimate> (default: 1)]"
81  " [--nthreads <nb> (default: 1)]"
82  " [--pose_method <method> (0: HOMOGRAPHY, 1: HOMOGRAPHY_VIRTUAL_VS, "
83  " 2: DEMENTHON_VIRTUAL_VS, 3: LAGRANGE_VIRTUAL_VS, "
84  " 4: BEST_RESIDUAL_VIRTUAL_VS, 5: HOMOGRAPHY_ORTHOGONAL_ITERATION) (default: 0)]"
85  " [--tag_family <family> (0: TAG_36h11, 1: TAG_36h10 (DEPRECATED), 2: TAG_36ARTOOLKIT (DEPRECATED),"
86  " 3: TAG_25h9, 4: TAG_25h7 (DEPRECATED), 5: TAG_16h5, 6: TAG_CIRCLE21h7, 7: TAG_CIRCLE49h12,"
87  " 8: TAG_CUSTOM48h12, 9: TAG_STANDARD41h12, 10: TAG_STANDARD52h13) (default: 0)]"
88  " [--display_tag] [--z_aligned]";
89 #if (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV))
90  std::cout << " [--display_off] [--color <color id>] [--thickness <line thickness>]";
91 #endif
92  std::cout << " [--help]" << std::endl;
93  return EXIT_SUCCESS;
94  }
95  }
96 
97  try {
99  std::cout << "Use Realsense 2 grabber" << std::endl;
100  vpRealSense2 g;
101  rs2::config config;
102  unsigned int width = 848, height = 800;
103  config.disable_stream(RS2_STREAM_FISHEYE, 1);
104  config.disable_stream(RS2_STREAM_FISHEYE, 2);
105  config.enable_stream(RS2_STREAM_FISHEYE, 1, RS2_FORMAT_Y8);
106  config.enable_stream(RS2_STREAM_FISHEYE, 2, RS2_FORMAT_Y8);
107 
108  vpImage<unsigned char> I_left(height, width);
109  vpImage<unsigned char> I_undist(height, width);
110 
111  g.open(config);
112  g.acquire(&I_left, nullptr, nullptr);
113 
114  std::cout << "Read camera parameters from Realsense device" << std::endl;
115  vpCameraParameters cam_left, cam_undistort;
117  cam_undistort.initPersProjWithoutDistortion(cam_left.get_px(), cam_left.get_py(), cam_left.get_u0(),
118  cam_left.get_v0());
120 
121  std::cout << cam_left << std::endl;
122  std::cout << "poseEstimationMethod: " << poseEstimationMethod << std::endl;
123  std::cout << "tagFamily: " << tagFamily << std::endl;
124  std::cout << "nThreads : " << nThreads << std::endl;
125  std::cout << "Z aligned: " << align_frame << std::endl;
126 
127  vpDisplay *display_left = nullptr;
128  vpDisplay *display_undistort = nullptr;
129  if (!display_off) {
130 #ifdef VISP_HAVE_X11
131  display_left = new vpDisplayX(I_left, 100, 30, "Left image");
132  display_undistort = new vpDisplayX(I_undist, I_left.getWidth(), 30, "Undistorted image");
133 #elif defined(VISP_HAVE_GDI)
134  display_left = new vpDisplayGDI(I_left, 100, 30, "Left image");
135 #elif defined(HAVE_OPENCV_HIGHGUI)
136  display_left = new vpDisplayOpenCV(I_left, 100, 30, "Left image");
137 #endif
138  }
139 
141  vpArray2D<int> mapU, mapV;
142  vpArray2D<float> mapDu, mapDv;
143  vpImageTools::initUndistortMap(cam_left, I_left.getWidth(), I_left.getHeight(), mapU, mapV, mapDu, mapDv);
145 
147  vpDetectorAprilTag detector(tagFamily);
149 
151  detector.setAprilTagQuadDecimate(quad_decimate);
152  detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
153  detector.setAprilTagNbThreads(nThreads);
154  detector.setDisplayTag(display_tag, color_id < 0 ? vpColor::none : vpColor::getColor(color_id), thickness);
155  detector.setZAlignedWithCameraAxis(align_frame);
157 
158  std::vector<double> time_vec;
159 
160  std::vector<std::vector<vpImagePoint> > tag_corners;
161 
162  for (;;) {
163  double t = vpTime::measureTimeMs();
164 
166  g.acquire(&I_left, nullptr, nullptr);
167 
169  vpImageTools::undistort(I_left, mapU, mapV, mapDu, mapDv, I_undist);
171 
173  vpDisplay::display(I_left);
174  vpDisplay::display(I_undist);
176 
178  std::vector<vpHomogeneousMatrix> cMo_vec, cMo_vec1;
179  detector.detect(I_undist, tagSize, cam_undistort, cMo_vec);
181 
182  // Display tag corners, bounding box and pose
183  for (size_t i = 0; i < cMo_vec.size(); i++) {
184  tag_corners = detector.getTagsCorners();
185  for (size_t j = 0; j < 4; j++) {
186  vpDisplay::displayCross(I_undist, tag_corners[i][j], 20, vpColor::green, 2);
187  }
188 
189  vpDisplay::displayRectangle(I_undist, detector.getBBox(i), vpColor::yellow, false, 3);
190  vpDisplay::displayFrame(I_undist, cMo_vec[i], cam_undistort, tagSize / 2, vpColor::red, 3);
191  }
192 
193  t = vpTime::measureTimeMs() - t;
194  time_vec.push_back(t);
195 
196  std::stringstream ss;
197  ss << "Detection time: " << t << " ms for " << detector.getNbObjects() << " tags";
198  vpDisplay::displayText(I_left, 50, 20, ss.str(), vpColor::red);
199  vpDisplay::displayText(I_undist, 50, 20, ss.str(), vpColor::red);
200 
201  if (vpDisplay::getClick(I_left, false) || vpDisplay::getClick(I_undist, false))
202  break;
203 
204  vpDisplay::flush(I_left);
205  vpDisplay::flush(I_undist);
206  }
207 
208  std::cout << "Benchmark loop processing time" << std::endl;
209  std::cout << "Mean / Median / Std: " << vpMath::getMean(time_vec) << " ms"
210  << " ; " << vpMath::getMedian(time_vec) << " ms"
211  << " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl;
212 
213  if (!display_off) {
214  delete display_left;
215  delete display_undistort;
216  }
217 
218  }
219  catch (const vpException &e) {
220  std::cerr << "Catch an exception: " << e.getMessage() << std::endl;
221 }
222 
223  return EXIT_SUCCESS;
224 #else
225  (void)argc;
226  (void)argv;
227 #ifndef VISP_HAVE_APRILTAG
228  std::cout << "Enable Apriltag support, configure and build ViSP to run this tutorial" << std::endl;
229 #elif defined(VISP_HAVE_REALSENSE2) && !(RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
230  std::cout << "Realsense T265 device needs librealsense API > 2.31.0. ViSP is linked with librealsense API "
231  << RS2_API_VERSION_STR << ". You need to upgrade librealsense to use this example." << std::endl;
232 #else
233  std::cout << "Install librealsense 3rd party, configure and build ViSP again to use this example." << std::endl;
234 #endif
235 #endif
236  return EXIT_SUCCESS;
237 }
Implementation of a generic 2D array used as base class for matrices and vectors.
Definition: vpArray2D.h:145
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
@ ProjWithKannalaBrandtDistortion
Projection with Kannala-Brandt distortion model.
static vpColor getColor(const unsigned int &i)
Definition: vpColor.h:311
static const vpColor red
Definition: vpColor.h:217
static const vpColor none
Definition: vpColor.h:229
static const vpColor yellow
Definition: vpColor.h:225
static const vpColor green
Definition: vpColor.h:220
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:130
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Class that defines generic functionalities for display.
Definition: vpDisplay.h:178
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayRectangle(const vpImage< unsigned char > &I, const vpImagePoint &topLeft, unsigned int width, unsigned int height, const vpColor &color, bool fill=false, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition: vpException.h:60
const char * getMessage() const
Definition: vpException.cpp:65
static void initUndistortMap(const vpCameraParameters &cam, unsigned int width, unsigned int height, vpArray2D< int > &mapU, vpArray2D< int > &mapV, vpArray2D< float > &mapDu, vpArray2D< float > &mapDv)
static void undistort(const vpImage< Type > &I, const vpCameraParameters &cam, vpImage< Type > &newI, unsigned int nThreads=2)
Definition: vpImageTools.h:664
static double getMedian(const std::vector< double > &v)
Definition: vpMath.cpp:322
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
Definition: vpMath.cpp:353
static double getMean(const std::vector< double > &v)
Definition: vpMath.cpp:302
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
VISP_EXPORT double measureTimeMs()