Visual Servoing Platform  version 3.2.0 under development (2019-01-22)
servoPioneerPoint2DDepthWithoutVpServo.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * IBVS on Pioneer P3DX mobile platform
33  *
34  * Authors:
35  * Fabien Spindler
36  *
37  *****************************************************************************/
38 #include <iostream>
39 
40 #include <visp3/robot/vpRobotPioneer.h> // Include first to avoid build issues with Status, None, isfinite
41 #include <visp3/blob/vpDot2.h>
42 #include <visp3/core/vpCameraParameters.h>
43 #include <visp3/core/vpConfig.h>
44 #include <visp3/core/vpHomogeneousMatrix.h>
45 #include <visp3/core/vpImage.h>
46 #include <visp3/core/vpImageConvert.h>
47 #include <visp3/core/vpVelocityTwistMatrix.h>
48 #include <visp3/gui/vpDisplayGDI.h>
49 #include <visp3/gui/vpDisplayX.h>
50 #include <visp3/sensor/vp1394CMUGrabber.h>
51 #include <visp3/sensor/vp1394TwoGrabber.h>
52 #include <visp3/sensor/vpOpenCVGrabber.h>
53 #include <visp3/sensor/vpV4l2Grabber.h>
54 #include <visp3/visual_features/vpFeatureBuilder.h>
55 #include <visp3/visual_features/vpFeatureDepth.h>
56 #include <visp3/visual_features/vpFeaturePoint.h>
57 
58 #if defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_CMU1394) || \
59  (VISP_HAVE_OPENCV_VERSION >= 0x020100)
60 #if defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)
61 #if defined(VISP_HAVE_PIONEER)
62 #define TEST_COULD_BE_ACHIEVED
63 #endif
64 #endif
65 #endif
66 
67 #undef VISP_HAVE_OPENCV // To use a firewire camera
68 #undef VISP_HAVE_V4L2 // To use a firewire camera
69 
92 #ifdef TEST_COULD_BE_ACHIEVED
93 int main(int argc, char **argv)
94 {
95  try {
96  vpImage<unsigned char> I; // Create a gray level image container
97  double depth = 1.;
98  double lambda = 0.6;
99  double coef = 1. / 6.77; // Scale parameter used to estimate the depth Z
100  // of the blob from its surface
101 
102  vpRobotPioneer robot;
103  ArArgumentParser parser(&argc, argv);
104  parser.loadDefaultArguments();
105 
106  // ArRobotConnector connects to the robot, get some initial data from it
107  // such as type and name, and then loads parameter files for this robot.
108  ArRobotConnector robotConnector(&parser, &robot);
109  if (!robotConnector.connectRobot()) {
110  ArLog::log(ArLog::Terse, "Could not connect to the robot.");
111  if (parser.checkHelpAndWarnUnparsed()) {
112  Aria::logOptions();
113  Aria::exit(1);
114  }
115  }
116  if (!Aria::parseArgs()) {
117  Aria::logOptions();
118  Aria::shutdown();
119  return false;
120  }
121 
122  // Wait 3 sec to be sure that the low level Aria thread used to control
123  // the robot is started. Without this delay we experienced a delay
124  // (arround 2.2 sec) between the velocity send to the robot and the
125  // velocity that is really applied to the wheels.
126  vpTime::sleepMs(3000);
127 
128  std::cout << "Robot connected" << std::endl;
129 
130  // Camera parameters. In this experiment we don't need a precise
131  // calibration of the camera
132  vpCameraParameters cam;
133 
134 // Create the camera framegrabber
135 #if defined(VISP_HAVE_OPENCV)
136  int device = 1;
137  std::cout << "Use device: " << device << std::endl;
138  cv::VideoCapture g(device); // open the default camera
139  g.set(CV_CAP_PROP_FRAME_WIDTH, 640);
140  g.set(CV_CAP_PROP_FRAME_HEIGHT, 480);
141  if (!g.isOpened()) // check if we succeeded
142  return -1;
143  cv::Mat frame;
144  g >> frame; // get a new frame from camera
145  vpImageConvert::convert(frame, I);
146 
147  // Logitec sphere parameters
148  cam.initPersProjWithoutDistortion(558, 555, 312, 210);
149 #elif defined(VISP_HAVE_V4L2)
150  // Create a grabber based on v4l2 third party lib (for usb cameras under
151  // Linux)
152  vpV4l2Grabber g;
153  g.setScale(1);
154  g.setInput(0);
155  g.setDevice("/dev/video1");
156  g.open(I);
157  // Logitec sphere parameters
158  cam.initPersProjWithoutDistortion(558, 555, 312, 210);
159 #elif defined(VISP_HAVE_DC1394)
160  // Create a grabber based on libdc1394-2.x third party lib (for firewire
161  // cameras under Linux)
162  vp1394TwoGrabber g(false);
165  // AVT Pike 032C parameters
166  cam.initPersProjWithoutDistortion(800, 795, 320, 216);
167 #elif defined(VISP_HAVE_CMU1394)
168  // Create a grabber based on CMU 1394 third party lib (for firewire
169  // cameras under windows)
171  g.setVideoMode(0, 5); // 640x480 MONO8
172  g.setFramerate(4); // 30 Hz
173  g.open(I);
174  // AVT Pike 032C parameters
175  cam.initPersProjWithoutDistortion(800, 795, 320, 216);
176 #endif
177 
178 // Acquire an image from the grabber
179 #if defined(VISP_HAVE_OPENCV)
180  g >> frame; // get a new frame from camera
181  vpImageConvert::convert(frame, I);
182 #else
183  g.acquire(I);
184 #endif
185 
186 // Create an image viewer
187 #if defined(VISP_HAVE_X11)
188  vpDisplayX d(I, 10, 10, "Current frame");
189 #elif defined(VISP_HAVE_GDI)
190  vpDisplayGDI d(I, 10, 10, "Current frame");
191 #endif
193  vpDisplay::flush(I);
194 
195  // Create a blob tracker
196  vpDot2 dot;
197  dot.setGraphics(true);
198  dot.setComputeMoments(true);
199  dot.setEllipsoidShapePrecision(0.); // to track a blob without any constraint on the shape
200  dot.setGrayLevelPrecision(0.9); // to set the blob gray level bounds for binarisation
201  dot.setEllipsoidBadPointsPercentage(0.5); // to be accept 50% of bad inner
202  // and outside points with bad
203  // gray level
204  dot.initTracking(I);
205  vpDisplay::flush(I);
206 
207  // Current and desired visual feature associated to the x coordinate of
208  // the point
209  vpFeaturePoint s_x, s_xd;
210 
211  // Create the current x visual feature
212  vpFeatureBuilder::create(s_x, cam, dot);
213 
214  // Create the desired x* visual feature
215  s_xd.buildFrom(0, 0, depth);
217 
218  // Create the current log(Z/Z*) visual feature
219  vpFeatureDepth s_Z;
220  // Surface of the blob estimated from the image moment m00 and converted
221  // in meters
222  double surface = 1. / sqrt(dot.m00 / (cam.get_px() * cam.get_py()));
223  double Z, Zd;
224  // Initial depth of the blob in from of the camera
225  Z = coef * surface;
226  // Desired depth Z* of the blob. This depth is learned and equal to the
227  // initial depth
228  Zd = Z;
229  s_Z.buildFrom(s_x.get_x(), s_x.get_y(), Z,
230  0); // log(Z/Z*) = 0 that's why the last parameter is 0
231  vpMatrix L_Z = s_Z.interaction();
232 
233  vpVelocityTwistMatrix cVe = robot.get_cVe();
234  vpMatrix eJe; // pioneer jacobian
235  robot.get_eJe(eJe);
236 
237  vpMatrix L; // Interaction matrix
238  L.stack(L_x); // constant since build with the desired feature
239  L.stack(L_Z); // not constant since it corresponds to log(Z/Z*) that
240  // evolves at each iteration
241 
242  vpColVector v; // vz, wx
243 
244  vpFeatureDepth s_Zd;
245  s_Zd.buildFrom(0, 0, 1, 0); // The value of s* is 0 with Z=1 meter.
246 
247  while (1) {
248 // Acquire a new image
249 #if defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100)
250  g >> frame; // get a new frame from camera
251  vpImageConvert::convert(frame, I);
252 #else
253  g.acquire(I);
254 #endif
255  // Set the image as background of the viewer
257 
258  // Does the blob tracking
259  dot.track(I);
260  // Update the current x feature
261  vpFeatureBuilder::create(s_x, cam, dot);
262 
263  // Update log(Z/Z*) feature. Since the depth Z change, we need to update
264  // the intection matrix
265  surface = 1. / sqrt(dot.m00 / (cam.get_px() * cam.get_py()));
266  Z = coef * surface;
267  s_Z.buildFrom(s_x.get_x(), s_x.get_y(), Z, log(Z / Zd));
268  L_Z = s_Z.interaction();
269 
270  // Update the global interaction matrix
271  vpMatrix L;
272  L.stack(L_x); // constant since build with the desired feature
273  L.stack(L_Z); // not constant since it corresponds to log(Z/Z*) that
274  // evolves at each iteration
275 
276  // Update the global error s-s*
277  vpColVector error;
278  error.stack(s_x.error(s_xd, vpFeaturePoint::selectX()));
279  error.stack(s_Z.error(s_Zd));
280 
281  // Compute the control law. Velocities are computed in the mobile robot
282  // reference frame
283  v = -lambda * (L * cVe * eJe).pseudoInverse() * error;
284 
285  std::cout << "Send velocity to the pionner: " << v[0] << " m/s " << vpMath::deg(v[1]) << " deg/s" << std::endl;
286 
287  // Send the velocity to the robot
289 
290  // Draw a vertical line which corresponds to the desired x coordinate of
291  // the dot cog
292  vpDisplay::displayLine(I, 0, 320, 479, 320, vpColor::red);
293  vpDisplay::flush(I);
294 
295  // A click in the viewer to exit
296  if (vpDisplay::getClick(I, false))
297  break;
298  }
299 
300  std::cout << "Ending robot thread..." << std::endl;
301  robot.stopRunning();
302 
303  // wait for the thread to stop
304  robot.waitForRunExit();
305  return EXIT_SUCCESS;
306  } catch (const vpException &e) {
307  std::cout << "Catch an exception: " << e << std::endl;
308  return EXIT_FAILURE;
309  }
310 }
311 #else
312 int main()
313 {
314  std::cout << "You don't have the right 3rd party libraries to run this example..." << std::endl;
315  return EXIT_SUCCESS;
316 }
317 #endif
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:104
vpMatrix interaction(const unsigned int select=FEATURE_ALL)
void setVideoMode(unsigned long format, unsigned long mode)
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
void open(vpImage< unsigned char > &I)
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
void get_eJe(vpMatrix &eJe)
void open(vpImage< unsigned char > &I)
vpColVector error(const vpBasicFeature &s_star, const unsigned int select=FEATURE_ALL)
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:129
void setEllipsoidBadPointsPercentage(const double &percentage=0.0)
Definition: vpDot2.h:270
void stack(const vpMatrix &A)
Definition: vpMatrix.cpp:4462
vpVelocityTwistMatrix get_cVe() const
Definition: vpUnicycle.h:82
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
void buildFrom(const double x, const double y, const double Z, const double LogZoverZstar)
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:151
void setDevice(const std::string &devname)
Class that defines a 3D point visual feature which is composed by one parameters that is that defin...
error that can be emited by ViSP classes.
Definition: vpException.h:71
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
Interface for Pioneer mobile robots based on Aria 3rd party library.
double get_py() const
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
Definition: vpDot2.h:126
void track(const vpImage< unsigned char > &I)
Definition: vpDot2.cpp:438
static void flush(const vpImage< unsigned char > &I)
static const vpColor red
Definition: vpColor.h:180
void initPersProjWithoutDistortion(const double px, const double py, const double u0, const double v0)
vpMatrix interaction(const unsigned int select=FEATURE_ALL)
void setGrayLevelPrecision(const double &grayLevelPrecision)
Definition: vpDot2.cpp:724
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
void setFramerate(unsigned long fps)
static unsigned int selectX()
void acquire(vpImage< unsigned char > &I)
static void display(const vpImage< unsigned char > &I)
VISP_EXPORT void sleepMs(double t)
Definition: vpTime.cpp:258
Generic class defining intrinsic camera parameters.
void setComputeMoments(const bool activate)
Definition: vpDot2.h:256
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
double get_px() const
void setEllipsoidShapePrecision(const double &ellipsoidShapePrecision)
Definition: vpDot2.cpp:795
void setInput(unsigned input=vpV4l2Grabber::DEFAULT_INPUT)
void buildFrom(const double x, const double y, const double Z)
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
static double deg(double rad)
Definition: vpMath.h:95
Implementation of column vector and the associated operations.
Definition: vpColVector.h:72
double get_y() const
void stack(double d)
double get_x() const
void initTracking(const vpImage< unsigned char > &I, unsigned int size=0)
Definition: vpDot2.cpp:253
Class for firewire ieee1394 video devices using libdc1394-2.x api.
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1)
double m00
Definition: vpDot2.h:351
vpColVector error(const vpBasicFeature &s_star, const unsigned int select=FEATURE_ALL)
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
void setGraphics(const bool activate)
Definition: vpDot2.h:294
void setFramerate(vpV4l2FramerateType framerate)