Visual Servoing Platform  version 3.5.1 under development (2022-08-10)
tutorial-pose-from-points-live.cpp
1 
2 #include <visp3/core/vpConfig.h>
3 #ifdef VISP_HAVE_MODULE_SENSOR
4 #include <visp3/sensor/vp1394CMUGrabber.h>
5 #include <visp3/sensor/vp1394TwoGrabber.h>
6 #include <visp3/sensor/vpFlyCaptureGrabber.h>
7 #include <visp3/sensor/vpRealSense2.h>
8 #include <visp3/sensor/vpV4l2Grabber.h>
9 #endif
10 #include <visp3/core/vpXmlParserCamera.h>
11 #include <visp3/gui/vpDisplayGDI.h>
12 #include <visp3/gui/vpDisplayOpenCV.h>
13 #include <visp3/gui/vpDisplayX.h>
14 
15 #include "pose_helper.h"
16 
17 // Comment / uncomment following lines to use the specific 3rd party compatible with your camera
19 //#undef VISP_HAVE_V4L2
20 //#undef VISP_HAVE_DC1394
21 //#undef VISP_HAVE_CMU1394
22 //#undef VISP_HAVE_FLYCAPTURE
23 //#undef VISP_HAVE_REALSENSE2
24 //#undef VISP_HAVE_OPENCV
26 
27 int main(int argc, char **argv)
28 {
29 #if (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV)) && \
30  (defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || \
31  (VISP_HAVE_OPENCV_VERSION >= 0x020100) || defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2))
32  try {
33  std::string opt_intrinsic_file; // xml file obtained from camera calibration
34  std::string opt_camera_name; // corresponding camera name in the xml calibration file
35  double opt_square_width = 0.12;
36  int opt_device = 0; // For OpenCV and V4l2 grabber to set the camera device
37 
38  for (int i = 0; i < argc; i++) {
39  if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
40  opt_intrinsic_file = std::string(argv[i + 1]);
41  } else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) {
42  opt_camera_name = std::string(argv[i + 1]);
43  } else if (std::string(argv[i]) == "--camera_device" && i + 1 < argc) {
44  opt_device = atoi(argv[i + 1]);
45  } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
46  std::cout << "\nUsage: " << argv[0] << " [--camera_device <camera device> (default: 0)]"
47  << " [--intrinsic <xml calibration file> (default: empty)]"
48  " [--camera_name <camera name in xml calibration file> (default: empty)]"
49  " [--square_width <square width in meter (default: 0.12)] [--help] [-h]\n"
50  << "\nExample using default camera parameters and square size:\n"
51  << " " << argv[0] << "\n"
52  << "\nExample fully tuned for a 0.1m x 0.1m square:\n"
53  << " " << argv[0] << " --intrinsic camera.xml --camera_name Camera --square_width 0.1\n"
54  << std::endl;
55  return 0;
56  }
57  }
58 
60 
61  // Parameters of our camera
62  vpCameraParameters cam(840, 840, I.getWidth() / 2, I.getHeight() / 2); // Default parameters
63  vpXmlParserCamera parser;
64  if (!opt_intrinsic_file.empty() && !opt_camera_name.empty()) {
65  std::cout << "Intrinsic file: " << opt_intrinsic_file << std::endl;
66  std::cout << "Camera name : " << opt_camera_name << std::endl;
67  if (parser.parse(cam, opt_intrinsic_file, opt_camera_name, vpCameraParameters::perspectiveProjWithDistortion) ==
69  std::cout << "Succeed to read camera parameters from xml file" << std::endl;
70  } else {
71  std::cout << "Unable to read camera parameters from xml file" << std::endl;
72  }
73  }
74 
76 #if defined(VISP_HAVE_V4L2)
77  vpV4l2Grabber g;
78  std::ostringstream device;
79  device << "/dev/video" << opt_device;
80  std::cout << "Use Video 4 Linux grabber on device " << device.str() << std::endl;
81  g.setDevice(device.str());
82  g.setScale(1);
83  g.open(I);
84 #elif defined(VISP_HAVE_DC1394)
85  (void)opt_device; // To avoid non used warning
86  std::cout << "Use DC1394 grabber" << std::endl;
88  g.open(I);
89 #elif defined(VISP_HAVE_CMU1394)
90  (void)opt_device; // To avoid non used warning
91  std::cout << "Use CMU1394 grabber" << std::endl;
93  g.open(I);
94 #elif defined(VISP_HAVE_FLYCAPTURE)
95  (void)opt_device; // To avoid non used warning
96  std::cout << "Use FlyCapture grabber" << std::endl;
98  g.open(I);
99 #elif defined(VISP_HAVE_REALSENSE2)
100  (void)opt_device; // To avoid non used warning
101  std::cout << "Use Realsense 2 grabber" << std::endl;
102  vpRealSense2 g;
103  rs2::config config;
104  config.disable_stream(RS2_STREAM_DEPTH);
105  config.disable_stream(RS2_STREAM_INFRARED);
106  config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
107  g.open(config);
108  g.acquire(I);
109 
110  std::cout << "Read camera parameters from Realsense device" << std::endl;
112 #elif defined(VISP_HAVE_OPENCV)
113  std::cout << "Use OpenCV grabber on device " << opt_device << std::endl;
114  cv::VideoCapture g(opt_device); // Open the default camera
115  if (!g.isOpened()) { // Check if we succeeded
116  std::cout << "Failed to open the camera" << std::endl;
117  return -1;
118  }
119  cv::Mat frame;
120  g >> frame; // get a new frame from camera
121  vpImageConvert::convert(frame, I);
122 #endif
124 
125  std::cout << "Square width : " << opt_square_width << std::endl;
126  std::cout << cam << std::endl;
127 
128  // The pose container
130 
131  std::vector<vpDot2> dot(4);
132  std::vector<vpPoint> point; // 3D coordinates of the points
133  std::vector<vpImagePoint> ip; // 2D coordinates of the points in pixels
134  double L = opt_square_width / 2.;
135  point.push_back(vpPoint(-L, -L, 0));
136  point.push_back(vpPoint(L, -L, 0));
137  point.push_back(vpPoint(L, L, 0));
138  point.push_back(vpPoint(-L, L, 0));
139 
140 #if defined(VISP_HAVE_X11)
141  vpDisplayX d(I);
142 #elif defined(VISP_HAVE_GDI)
143  vpDisplayGDI d(I);
144 #elif defined(VISP_HAVE_OPENCV)
145  vpDisplayOpenCV d(I);
146 #endif
147 
148  bool quit = false;
149  bool apply_cv = false; // apply computer vision
150  bool init_cv = true; // initialize tracking and pose computation
151 
152  while (!quit) {
153  double t_begin = vpTime::measureTimeMs();
154  // Image Acquisition
155 #if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || \
156  defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2)
157  g.acquire(I);
158 #elif defined(VISP_HAVE_OPENCV)
159  g >> frame;
160  vpImageConvert::convert(frame, I);
161 #endif
163  if (apply_cv) {
164  try {
165  ip = track(I, dot, init_cv);
166  computePose(point, ip, cam, init_cv, cMo);
167  vpDisplay::displayFrame(I, cMo, cam, opt_square_width, vpColor::none, 3);
168  if (init_cv)
169  init_cv = false; // turn off the computer vision initialisation specific stuff
170 
171  { // Display estimated pose in [m] and [deg]
172  vpPoseVector pose(cMo);
173  std::stringstream ss;
174  ss << "Translation: " << std::setprecision(5) << pose[0] << " " << pose[1] << " " << pose[2] << " [m]";
175  vpDisplay::displayText(I, 60, 20, ss.str(), vpColor::red);
176  ss.str(""); // erase ss
177  ss << "Rotation tu: " << std::setprecision(4) << vpMath::deg(pose[3]) << " " << vpMath::deg(pose[4]) << " "
178  << vpMath::deg(pose[5]) << " [deg]";
179  vpDisplay::displayText(I, 80, 20, ss.str(), vpColor::red);
180  }
181  } catch (...) {
182  std::cout << "Computer vision failure." << std::endl;
183  apply_cv = false;
184  init_cv = true;
185  }
186  }
187  vpDisplay::displayText(I, 20, 20, "Right click: quit", vpColor::red);
188  if (apply_cv) {
189  vpDisplay::displayText(I, 40, 20, "Computer vision in progress...", vpColor::red);
190  } else {
191  vpDisplay::displayText(I, 40, 20, "Left click : start", vpColor::red);
192  }
194  if (vpDisplay::getClick(I, button, false)) {
195  if (button == vpMouseButton::button3) {
196  quit = true;
197  } else if (button == vpMouseButton::button1) {
198  apply_cv = true;
199  }
200  }
201  {
202  std::stringstream ss;
203  ss << "Time: " << vpTime::measureTimeMs() - t_begin << " ms";
204  vpDisplay::displayText(I, 20, I.getWidth() - 100, ss.str(), vpColor::red);
205  }
206  vpDisplay::flush(I);
207  }
208  } catch (const vpException &e) {
209  std::cout << "Catch an exception: " << e.getMessage() << std::endl;
210  }
211 #elif (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV))
212  (void)argc;
213  (void)argv;
214  std::cout << "Install a 3rd party dedicated to frame grabbing (dc1394, cmu1394, v4l2, OpenCV, FlyCapture, "
215  "Realsense2), configure and build ViSP again to use this example"
216  << std::endl;
217 #else
218  (void)argc;
219  (void)argv;
220  std::cout << "Install a 3rd party dedicated to image display (X11, GDI, OpenCV), configure and build ViSP again to "
221  "use this example"
222  << std::endl;
223 #endif
224 }
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
void open(vpImage< unsigned char > &I)
Class for firewire ieee1394 video devices using libdc1394-2.x api.
void open(vpImage< unsigned char > &I)
Generic class defining intrinsic camera parameters.
static const vpColor red
Definition: vpColor.h:217
static const vpColor none
Definition: vpColor.h:229
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:129
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:135
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void flush(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0))
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emited by ViSP classes.
Definition: vpException.h:72
const char * getMessage() const
Definition: vpException.cpp:87
void open(vpImage< unsigned char > &I)
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
unsigned int getWidth() const
Definition: vpImage.h:246
unsigned int getHeight() const
Definition: vpImage.h:188
static double deg(double rad)
Definition: vpMath.h:110
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:82
Implementation of a pose vector and operations on poses.
Definition: vpPoseVector.h:152
void acquire(vpImage< unsigned char > &grey, double *ts=NULL)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
bool open(const rs2::config &cfg=rs2::config())
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void open(vpImage< unsigned char > &I)
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
XML parser to load and save intrinsic camera parameters.
int parse(vpCameraParameters &cam, const std::string &filename, const std::string &camera_name, const vpCameraParameters::vpCameraParametersProjType &projModel, unsigned int image_width=0, unsigned int image_height=0)
VISP_EXPORT double measureTimeMs()