1 #include <visp3/core/vpConfig.h> 3 #ifdef VISP_HAVE_MODULE_SENSOR 4 #include <visp3/sensor/vpV4l2Grabber.h> 5 #include <visp3/sensor/vp1394CMUGrabber.h> 6 #include <visp3/sensor/vp1394TwoGrabber.h> 7 #include <visp3/sensor/vpFlyCaptureGrabber.h> 8 #include <visp3/sensor/vpRealSense2.h> 10 #include <visp3/detection/vpDetectorAprilTag.h> 13 #include <visp3/gui/vpDisplayGDI.h> 14 #include <visp3/gui/vpDisplayOpenCV.h> 15 #include <visp3/gui/vpDisplayX.h> 16 #include <visp3/core/vpXmlParserCamera.h> 27 int main(
int argc,
const char **argv)
30 #if defined(VISP_HAVE_APRILTAG) && \ 31 (defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || (VISP_HAVE_OPENCV_VERSION >= 0x020100) || \ 32 defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2) ) 38 double tagSize = 0.053;
39 float quad_decimate = 1.0;
41 std::string intrinsic_file =
"";
42 std::string camera_name =
"";
43 bool display_tag =
false;
45 unsigned int thickness = 2;
46 bool align_frame =
false;
48 #if !(defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV)) 49 bool display_off =
true;
50 std::cout <<
"Warning: There is no 3rd party (X11, GDI or openCV) to dislay images..." << std::endl;
52 bool display_off =
false;
57 for (
int i = 1; i < argc; i++) {
58 if (std::string(argv[i]) ==
"--pose_method" && i + 1 < argc) {
60 }
else if (std::string(argv[i]) ==
"--tag_size" && i + 1 < argc) {
61 tagSize = atof(argv[i + 1]);
62 }
else if (std::string(argv[i]) ==
"--camera_device" && i + 1 < argc) {
63 opt_device = atoi(argv[i + 1]);
64 }
else if (std::string(argv[i]) ==
"--quad_decimate" && i + 1 < argc) {
65 quad_decimate = (float)atof(argv[i + 1]);
66 }
else if (std::string(argv[i]) ==
"--nthreads" && i + 1 < argc) {
67 nThreads = atoi(argv[i + 1]);
68 }
else if (std::string(argv[i]) ==
"--intrinsic" && i + 1 < argc) {
69 intrinsic_file = std::string(argv[i + 1]);
70 }
else if (std::string(argv[i]) ==
"--camera_name" && i + 1 < argc) {
71 camera_name = std::string(argv[i + 1]);
72 }
else if (std::string(argv[i]) ==
"--display_tag") {
74 }
else if (std::string(argv[i]) ==
"--display_off") {
76 }
else if (std::string(argv[i]) ==
"--color" && i + 1 < argc) {
77 color_id = atoi(argv[i+1]);
78 }
else if (std::string(argv[i]) ==
"--thickness" && i + 1 < argc) {
79 thickness = (
unsigned int) atoi(argv[i+1]);
80 }
else if (std::string(argv[i]) ==
"--tag_family" && i + 1 < argc) {
82 }
else if (std::string(argv[i]) ==
"--z_aligned") {
85 else if (std::string(argv[i]) ==
"--help" || std::string(argv[i]) ==
"-h") {
86 std::cout <<
"Usage: " << argv[0]
87 <<
" [--camera_device <camera device> (default: 0)]" 88 <<
" [--tag_size <tag_size in m> (default: 0.053)]" 89 " [--quad_decimate <quad_decimate> (default: 1)]" 90 " [--nthreads <nb> (default: 1)]" 91 " [--intrinsic <intrinsic file> (default: empty)]" 92 " [--camera_name <camera name> (default: empty)]" 93 " [--pose_method <method> (0: HOMOGRAPHY, 1: HOMOGRAPHY_VIRTUAL_VS, " 94 " 2: DEMENTHON_VIRTUAL_VS, 3: LAGRANGE_VIRTUAL_VS, " 95 " 4: BEST_RESIDUAL_VIRTUAL_VS, 5: HOMOGRAPHY_ORTHOGONAL_ITERATION) (default: 0)]" 96 " [--tag_family <family> (0: TAG_36h11, 1: TAG_36h10 (DEPRECATED), 2: TAG_36ARTOOLKIT (DEPRECATED)," 97 " 3: TAG_25h9, 4: TAG_25h7 (DEPRECATED), 5: TAG_16h5, 6: TAG_CIRCLE21h7, 7: TAG_CIRCLE49h12," 98 " 8: TAG_CUSTOM48h12, 9: TAG_STANDARD41h12, 10: TAG_STANDARD52h13) (default: 0)]" 99 " [--display_tag] [--z_aligned]";
100 #if (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV)) 101 std::cout <<
" [--display_off] [--color <color id>] [--thickness <line thickness>]";
103 std::cout <<
" [--help]" << std::endl;
111 #ifdef VISP_HAVE_PUGIXML 113 if (!intrinsic_file.empty() && !camera_name.empty())
118 #if defined(VISP_HAVE_V4L2) 120 std::ostringstream device;
121 device <<
"/dev/video" << opt_device;
122 std::cout <<
"Use Video 4 Linux grabber on device " << device.str() << std::endl;
126 #elif defined(VISP_HAVE_DC1394) 128 std::cout <<
"Use DC1394 grabber" << std::endl;
131 #elif defined(VISP_HAVE_CMU1394) 133 std::cout <<
"Use CMU1394 grabber" << std::endl;
136 #elif defined(VISP_HAVE_FLYCAPTURE) 138 std::cout <<
"Use FlyCapture grabber" << std::endl;
141 #elif defined(VISP_HAVE_REALSENSE2) 143 std::cout <<
"Use Realsense 2 grabber" << std::endl;
146 config.disable_stream(RS2_STREAM_DEPTH);
147 config.disable_stream(RS2_STREAM_INFRARED);
148 config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
152 std::cout <<
"Read camera parameters from Realsense device" << std::endl;
154 #elif defined(VISP_HAVE_OPENCV) 155 std::cout <<
"Use OpenCV grabber on device " << opt_device << std::endl;
156 cv::VideoCapture g(opt_device);
158 std::cout <<
"Failed to open the camera" << std::endl;
167 std::cout << cam << std::endl;
168 std::cout <<
"poseEstimationMethod: " << poseEstimationMethod << std::endl;
169 std::cout <<
"tagFamily: " << tagFamily << std::endl;
170 std::cout <<
"nThreads : " << nThreads << std::endl;
171 std::cout <<
"Z aligned: " << align_frame << std::endl;
177 #elif defined(VISP_HAVE_GDI) 179 #elif defined(VISP_HAVE_OPENCV) 196 std::vector<double> time_vec;
199 #if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2) 201 #elif defined(VISP_HAVE_OPENCV) 211 std::vector<vpHomogeneousMatrix> cMo_vec;
212 detector.
detect(I, tagSize, cam, cMo_vec);
215 time_vec.push_back(t);
217 std::stringstream ss;
218 ss <<
"Detection time: " << t <<
" ms for " << detector.
getNbObjects() <<
" tags";
222 for (
size_t i = 0; i < cMo_vec.size(); i++) {
233 std::cout <<
"Benchmark computation time" << std::endl;
234 std::cout <<
"Mean / Median / Std: " <<
vpMath::getMean(time_vec) <<
" ms" 242 std::cerr <<
"Catch an exception: " << e.
getMessage() << std::endl;
249 #ifndef VISP_HAVE_APRILTAG 250 std::cout <<
"Enable Apriltag support, configure and build ViSP to run this tutorial" << std::endl;
252 std::cout <<
"Install a 3rd party dedicated to frame grabbing (dc1394, cmu1394, v4l2, OpenCV, FlyCapture, Realsense2), configure and build ViSP again to use this example" << std::endl;
Class that defines generic functionnalities for display.
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
void setAprilTagPoseEstimationMethod(const vpPoseEstimationMethod &poseEstimationMethod)
void open(vpImage< unsigned char > &I)
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
void open(vpImage< unsigned char > &I)
AprilTag 36h11 pattern (recommended)
static double getMedian(const std::vector< double > &v)
Display for windows using GDI (available on any windows 32 platform).
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
void setDevice(const std::string &devname)
static const vpColor none
error that can be emited by ViSP classes.
size_t getNbObjects() const
void open(const rs2::config &cfg=rs2::config())
XML parser to load and save intrinsic camera parameters.
static void flush(const vpImage< unsigned char > &I)
VISP_EXPORT double measureTimeMs()
void open(vpImage< unsigned char > &I)
void setAprilTagQuadDecimate(float quadDecimate)
void open(vpImage< unsigned char > &I)
static double getMean(const std::vector< double > &v)
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
void setZAlignedWithCameraAxis(bool zAlignedWithCameraFrame)
static void display(const vpImage< unsigned char > &I)
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Generic class defining intrinsic camera parameters.
void setAprilTagNbThreads(int nThreads)
void acquire(vpImage< unsigned char > &grey)
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
int parse(vpCameraParameters &cam, const std::string &filename, const std::string &camera_name, const vpCameraParameters::vpCameraParametersProjType &projModel, unsigned int image_width=0, unsigned int image_height=0)
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
const char * getMessage(void) const
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0))
void setDisplayTag(bool display, const vpColor &color=vpColor::none, unsigned int thickness=2)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion) const
Class for firewire ieee1394 video devices using libdc1394-2.x api.
static vpColor getColor(const unsigned int &i)
bool detect(const vpImage< unsigned char > &I)