Visual Servoing Platform  version 3.2.0 under development (2019-01-22)
tutorial-pose-from-points-live.cpp
1 
2 #include <visp3/core/vpConfig.h>
3 #ifdef VISP_HAVE_MODULE_SENSOR
4 #include <visp3/sensor/vpV4l2Grabber.h>
5 #include <visp3/sensor/vp1394CMUGrabber.h>
6 #include <visp3/sensor/vp1394TwoGrabber.h>
7 #include <visp3/sensor/vpFlyCaptureGrabber.h>
8 #include <visp3/sensor/vpRealSense2.h>
9 #endif
10 #include <visp3/core/vpXmlParserCamera.h>
11 #include <visp3/gui/vpDisplayGDI.h>
12 #include <visp3/gui/vpDisplayOpenCV.h>
13 #include <visp3/gui/vpDisplayX.h>
14 
15 #include "pose_helper.h"
16 
17 // Comment / uncomment following lines to use the specific 3rd party compatible with your camera
19 //#undef VISP_HAVE_V4L2
20 //#undef VISP_HAVE_DC1394
21 //#undef VISP_HAVE_CMU1394
22 //#undef VISP_HAVE_FLYCAPTURE
23 //#undef VISP_HAVE_REALSENSE2
24 //#undef VISP_HAVE_OPENCV
26 
27 int main(int argc, char **argv)
28 {
29 #if (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV)) && \
30  (defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || (VISP_HAVE_OPENCV_VERSION >= 0x020100) || defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2) )
31  try {
32  std::string opt_intrinsic_file; // xml file obtained from camera calibration
33  std::string opt_camera_name; // corresponding camera name in the xml calibration file
34  double opt_square_width = 0.12;
35  int opt_device = 0; // For OpenCV and V4l2 grabber to set the camera device
36 
37  for (int i = 0; i < argc; i++) {
38  if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
39  opt_intrinsic_file = std::string(argv[i + 1]);
40  } else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) {
41  opt_camera_name = std::string(argv[i + 1]);
42  } else if (std::string(argv[i]) == "--camera_device" && i + 1 < argc) {
43  opt_device = atoi(argv[i + 1]);
44  } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
45  std::cout << "\nUsage: " << argv[0]
46  << " [--camera_device <camera device> (default: 0)]"
47  << " [--intrinsic <xml calibration file> (default: empty)]"
48  " [--camera_name <camera name in xml calibration file> (default: empty)]"
49  " [--square_width <square width in meter (default: 0.12)] [--help] [-h]\n"
50  << "\nExample using default camera parameters and square size:\n"
51  << " " << argv[0] << "\n"
52  << "\nExample fully tuned for a 0.1m x 0.1m square:\n"
53  << " " << argv[0] << " --intrinsic camera.xml --camera_name Camera --square_width 0.1\n"
54  << std::endl;
55  return 0;
56  }
57  }
58 
60 
61  // Parameters of our camera
62  vpCameraParameters cam(840, 840, I.getWidth() / 2, I.getHeight() / 2); // Default parameters
63 #ifdef VISP_HAVE_XML2
64  vpXmlParserCamera parser;
65  if (!opt_intrinsic_file.empty() && !opt_camera_name.empty()) {
66  std::cout << "Intrinsic file: " << opt_intrinsic_file << std::endl;
67  std::cout << "Camera name : " << opt_camera_name << std::endl;
68  if (parser.parse(cam, opt_intrinsic_file, opt_camera_name, vpCameraParameters::perspectiveProjWithDistortion) == vpXmlParserCamera::SEQUENCE_OK) {
69  std::cout << "Succeed to read camera parameters from xml file" << std::endl;
70  } else {
71  std::cout << "Unable to read camera parameters from xml file" << std::endl;
72  }
73  }
74 #endif
75 
77 #if defined(VISP_HAVE_V4L2)
78  vpV4l2Grabber g;
79  std::ostringstream device;
80  device << "/dev/video" << opt_device;
81  std::cout << "Use Video 4 Linux grabber on device " << device.str() << std::endl;
82  g.setDevice(device.str());
83  g.setScale(1);
84  g.open(I);
85 #elif defined(VISP_HAVE_DC1394)
86  (void)opt_device; // To avoid non used warning
87  std::cout << "Use DC1394 grabber" << std::endl;
89  g.open(I);
90 #elif defined(VISP_HAVE_CMU1394)
91  (void)opt_device; // To avoid non used warning
92  std::cout << "Use CMU1394 grabber" << std::endl;
94  g.open(I);
95 #elif defined(VISP_HAVE_FLYCAPTURE)
96  (void)opt_device; // To avoid non used warning
97  std::cout << "Use FlyCapture grabber" << std::endl;
99  g.open(I);
100 #elif defined(VISP_HAVE_REALSENSE2)
101  (void)opt_device; // To avoid non used warning
102  std::cout << "Use Realsense 2 grabber" << std::endl;
103  vpRealSense2 g;
104  rs2::config config;
105  config.disable_stream(RS2_STREAM_DEPTH);
106  config.disable_stream(RS2_STREAM_INFRARED);
107  config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
108  g.open(config);
109  g.acquire(I);
110 
111  std::cout << "Read camera parameters from Realsense device" << std::endl;
113 #elif defined(VISP_HAVE_OPENCV)
114  std::cout << "Use OpenCV grabber on device " << opt_device << std::endl;
115  cv::VideoCapture g(opt_device); // Open the default camera
116  if (!g.isOpened()) { // Check if we succeeded
117  std::cout << "Failed to open the camera" << std::endl;
118  return -1;
119  }
120  cv::Mat frame;
121  g >> frame; // get a new frame from camera
122  vpImageConvert::convert(frame, I);
123 #endif
124 
126  std::cout << "Square width : " << opt_square_width << std::endl;
127  std::cout << cam << std::endl;
128 
129  // The pose container
131 
132  std::vector<vpDot2> dot(4);
133  std::vector<vpPoint> point; // 3D coordinates of the points
134  std::vector<vpImagePoint> ip; // 2D coordinates of the points in pixels
135  double L = opt_square_width / 2.;
136  point.push_back(vpPoint(-L, -L, 0));
137  point.push_back(vpPoint( L, -L, 0));
138  point.push_back(vpPoint( L, L, 0));
139  point.push_back(vpPoint(-L, L, 0));
140 
141 #if defined(VISP_HAVE_X11)
142  vpDisplayX d(I);
143 #elif defined(VISP_HAVE_GDI)
144  vpDisplayGDI d(I);
145 #elif defined(VISP_HAVE_OPENCV)
146  vpDisplayOpenCV d(I);
147 #endif
148 
149  bool quit = false;
150  bool apply_cv = false; // apply computer vision
151  bool init_cv = true; // initialize tracking and pose computation
152 
153  while (! quit) {
154  double t_begin = vpTime::measureTimeMs();
155  // Image Acquisition
156 #if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2)
157  g.acquire(I);
158 #elif defined(VISP_HAVE_OPENCV)
159  g >> frame;
160  vpImageConvert::convert(frame, I);
161 #endif
163  if (apply_cv) {
164  try {
165  ip = track(I, dot, init_cv);
166  computePose(point, ip, cam, init_cv, cMo);
167  vpDisplay::displayFrame(I, cMo, cam, opt_square_width, vpColor::none, 3);
168  if (init_cv)
169  init_cv = false; // turn off the computer vision initialisation specific stuff
170 
171  { // Display estimated pose in [m] and [deg]
172  vpPoseVector pose(cMo);
173  std::stringstream ss;
174  ss << "Translation: " << std::setprecision(5) << pose[0] << " " << pose[1] << " " << pose[2] << " [m]";
175  vpDisplay::displayText(I, 60, 20, ss.str(), vpColor::red);
176  ss.str(""); // erase ss
177  ss << "Rotation tu: " << std::setprecision(4) << vpMath::deg(pose[3]) << " " << vpMath::deg(pose[4]) << " " << vpMath::deg(pose[5]) << " [deg]";
178  vpDisplay::displayText(I, 80, 20, ss.str(), vpColor::red);
179  }
180  }
181  catch(...) {
182  std::cout << "Computer vision failure." << std::endl;
183  apply_cv = false;
184  init_cv = true;
185  }
186  }
187  vpDisplay::displayText(I, 20, 20, "Right click: quit", vpColor::red);
188  if (apply_cv) {
189  vpDisplay::displayText(I, 40, 20, "Computer vision in progress...", vpColor::red);
190  } else {
191  vpDisplay::displayText(I, 40, 20, "Left click : start", vpColor::red);
192  }
194  if (vpDisplay::getClick(I, button, false)) {
195  if (button == vpMouseButton::button3) {
196  quit = true;
197  }
198  else if (button == vpMouseButton::button1) {
199  apply_cv = true;
200  }
201  }
202  {
203  std::stringstream ss;
204  ss << "Time: " << vpTime::measureTimeMs() - t_begin << " ms";
205  vpDisplay::displayText(I, 20, I.getWidth()-100, ss.str(), vpColor::red);
206  }
207  vpDisplay::flush(I);
208  }
209  } catch (const vpException &e) {
210  std::cout << "Catch an exception: " << e.getMessage() << std::endl;
211  }
212 #elif (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI) || defined(VISP_HAVE_OPENCV))
213  (void) argc;
214  (void) argv;
215  std::cout << "Install a 3rd party dedicated to frame grabbing (dc1394, cmu1394, v4l2, OpenCV, FlyCapture, Realsense2), configure and build ViSP again to use this example" << std::endl;
216 #else
217  (void) argc;
218  (void) argv;
219  std::cout << "Install a 3rd party dedicated to image display (X11, GDI, OpenCV), configure and build ViSP again to use this example" << std::endl;
220 #endif
221 }
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
void open(vpImage< unsigned char > &I)
unsigned int getWidth() const
Definition: vpImage.h:239
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Implementation of an homogeneous matrix and operations on such kind of matrices.
void open(vpImage< unsigned char > &I)
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:129
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:151
void setDevice(const std::string &devname)
static const vpColor none
Definition: vpColor.h:192
error that can be emited by ViSP classes.
Definition: vpException.h:71
void open(const rs2::config &cfg=rs2::config())
XML parser to load and save intrinsic camera parameters.
static void flush(const vpImage< unsigned char > &I)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion) const
VISP_EXPORT double measureTimeMs()
Definition: vpTime.cpp:88
void open(vpImage< unsigned char > &I)
static const vpColor red
Definition: vpColor.h:180
Class that defines what is a point.
Definition: vpPoint.h:58
void open(vpImage< unsigned char > &I)
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
static void display(const vpImage< unsigned char > &I)
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Generic class defining intrinsic camera parameters.
const char * getMessage(void) const
Definition: vpException.cpp:90
void acquire(vpImage< unsigned char > &grey)
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
static double deg(double rad)
Definition: vpMath.h:95
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0))
Implementation of a pose vector and operations on poses.
Definition: vpPoseVector.h:92
unsigned int getHeight() const
Definition: vpImage.h:178
Class for firewire ieee1394 video devices using libdc1394-2.x api.