Visual Servoing Platform  version 3.6.1 under development (2024-05-09)
servoPioneerPoint2DDepth.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See https://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * IBVS on Pioneer P3DX mobile platform
33  *
34 *****************************************************************************/
35 #include <iostream>
36 
37 #include <visp3/core/vpConfig.h>
38 
39 #include <visp3/blob/vpDot2.h>
40 #include <visp3/core/vpCameraParameters.h>
41 #include <visp3/core/vpHomogeneousMatrix.h>
42 #include <visp3/core/vpImage.h>
43 #include <visp3/core/vpImageConvert.h>
44 #include <visp3/core/vpVelocityTwistMatrix.h>
45 #include <visp3/gui/vpDisplayGDI.h>
46 #include <visp3/gui/vpDisplayX.h>
47 #include <visp3/robot/vpRobotPioneer.h> // Include first to avoid build issues with Status, None, isfinite
48 #include <visp3/sensor/vp1394CMUGrabber.h>
49 #include <visp3/sensor/vp1394TwoGrabber.h>
50 #include <visp3/sensor/vpV4l2Grabber.h>
51 #include <visp3/visual_features/vpFeatureBuilder.h>
52 #include <visp3/visual_features/vpFeatureDepth.h>
53 #include <visp3/visual_features/vpFeaturePoint.h>
54 #include <visp3/vs/vpServo.h>
55 
56 #if defined(HAVE_OPENCV_VIDEOIO)
57 #include <opencv2/videoio.hpp>
58 #endif
59 
60 #if defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_CMU1394) || defined(HAVE_OPENCV_VIDEOIO)
61 #if defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)
62 #if defined(VISP_HAVE_PIONEER)
63 #define TEST_COULD_BE_ACHIEVED
64 #endif
65 #endif
66 #endif
67 
68 #undef VISP_HAVE_OPENCV // To use a firewire camera
69 #undef VISP_HAVE_V4L2 // To use a firewire camera
70 
93 #ifdef TEST_COULD_BE_ACHIEVED
94 int main(int argc, char **argv)
95 {
96  try {
97  vpImage<unsigned char> I; // Create a gray level image container
98  double depth = 1.;
99  double lambda = 0.6;
100  double coef = 1. / 6.77; // Scale parameter used to estimate the depth Z
101  // of the blob from its surface
102 
103  vpRobotPioneer robot;
104  ArArgumentParser parser(&argc, argv);
105  parser.loadDefaultArguments();
106 
107  // ArRobotConnector connects to the robot, get some initial data from it
108  // such as type and name, and then loads parameter files for this robot.
109  ArRobotConnector robotConnector(&parser, &robot);
110  if (!robotConnector.connectRobot()) {
111  ArLog::log(ArLog::Terse, "Could not connect to the robot.");
112  if (parser.checkHelpAndWarnUnparsed()) {
113  Aria::logOptions();
114  Aria::exit(1);
115  }
116  }
117  if (!Aria::parseArgs()) {
118  Aria::logOptions();
119  Aria::shutdown();
120  return false;
121  }
122 
123  // Wait 3 sec to be sure that the low level Aria thread used to control
124  // the robot is started. Without this delay we experienced a delay
125  // (around 2.2 sec) between the velocity send to the robot and the
126  // velocity that is really applied to the wheels.
127  vpTime::sleepMs(3000);
128 
129  std::cout << "Robot connected" << std::endl;
130 
131  // Camera parameters. In this experiment we don't need a precise
132  // calibration of the camera
133  vpCameraParameters cam;
134 
135  // Create the camera framegrabber
136 #if defined(HAVE_OPENCV_VIDEOIO)
137  int device = 1;
138  std::cout << "Use device: " << device << std::endl;
139  cv::VideoCapture g(device); // open the default camera
140  g.set(CV_CAP_PROP_FRAME_WIDTH, 640);
141  g.set(CV_CAP_PROP_FRAME_HEIGHT, 480);
142  if (!g.isOpened()) // check if we succeeded
143  return EXIT_FAILURE;
144  cv::Mat frame;
145  g >> frame; // get a new frame from camera
146  vpImageConvert::convert(frame, I);
147 
148  // Logitec sphere parameters
149  cam.initPersProjWithoutDistortion(558, 555, 312, 210);
150 #elif defined(VISP_HAVE_V4L2)
151  // Create a grabber based on v4l2 third party lib (for usb cameras under
152  // Linux)
153  vpV4l2Grabber g;
154  g.setScale(1);
155  g.setInput(0);
156  g.setDevice("/dev/video1");
157  g.open(I);
158  // Logitec sphere parameters
159  cam.initPersProjWithoutDistortion(558, 555, 312, 210);
160 #elif defined(VISP_HAVE_DC1394)
161  // Create a grabber based on libdc1394-2.x third party lib (for firewire
162  // cameras under Linux)
163  vp1394TwoGrabber g(false);
166  // AVT Pike 032C parameters
167  cam.initPersProjWithoutDistortion(800, 795, 320, 216);
168 #elif defined(VISP_HAVE_CMU1394)
169  // Create a grabber based on CMU 1394 third party lib (for firewire
170  // cameras under windows)
172  g.setVideoMode(0, 5); // 640x480 MONO8
173  g.setFramerate(4); // 30 Hz
174  g.open(I);
175  // AVT Pike 032C parameters
176  cam.initPersProjWithoutDistortion(800, 795, 320, 216);
177 #endif
178 
179  // Acquire an image from the grabber
180 #if defined(HAVE_OPENCV_VIDEOIO)
181  g >> frame; // get a new frame from camera
182  vpImageConvert::convert(frame, I);
183 #else
184  g.acquire(I);
185 #endif
186 
187  // Create an image viewer
188 #if defined(VISP_HAVE_X11)
189  vpDisplayX d(I, 10, 10, "Current frame");
190 #elif defined(VISP_HAVE_GDI)
191  vpDisplayGDI d(I, 10, 10, "Current frame");
192 #endif
194  vpDisplay::flush(I);
195 
196  // Create a blob tracker
197  vpDot2 dot;
198  dot.setGraphics(true);
199  dot.setComputeMoments(true);
200  dot.setEllipsoidShapePrecision(0.); // to track a blob without any constraint on the shape
201  dot.setGrayLevelPrecision(0.9); // to set the blob gray level bounds for binarisation
202  dot.setEllipsoidBadPointsPercentage(0.5); // to be accept 50% of bad inner
203  // and outside points with bad
204  // gray level
205  dot.initTracking(I);
206  vpDisplay::flush(I);
207 
208  vpServo task;
211  task.setLambda(lambda);
213  cVe = robot.get_cVe();
214  task.set_cVe(cVe);
215 
216  std::cout << "cVe: \n" << cVe << std::endl;
217 
218  vpMatrix eJe;
219  robot.get_eJe(eJe);
220  task.set_eJe(eJe);
221  std::cout << "eJe: \n" << eJe << std::endl;
222 
223  // Current and desired visual feature associated to the x coordinate of
224  // the point
225  vpFeaturePoint s_x, s_xd;
226 
227  // Create the current x visual feature
228  vpFeatureBuilder::create(s_x, cam, dot);
229 
230  // Create the desired x* visual feature
231  s_xd.buildFrom(0, 0, depth);
232 
233  // Add the feature
234  task.addFeature(s_x, s_xd);
235 
236  // Create the current log(Z/Z*) visual feature
237  vpFeatureDepth s_Z, s_Zd;
238  // Surface of the blob estimated from the image moment m00 and converted
239  // in meters
240  double surface = 1. / sqrt(dot.m00 / (cam.get_px() * cam.get_py()));
241  double Z, Zd;
242  // Initial depth of the blob in from of the camera
243  Z = coef * surface;
244  // Desired depth Z* of the blob. This depth is learned and equal to the
245  // initial depth
246  Zd = Z;
247 
248  std::cout << "Z " << Z << std::endl;
249  s_Z.buildFrom(s_x.get_x(), s_x.get_y(), Z,
250  0); // log(Z/Z*) = 0 that's why the last parameter is 0
251  s_Zd.buildFrom(s_x.get_x(), s_x.get_y(), Zd,
252  0); // log(Z/Z*) = 0 that's why the last parameter is 0
253 
254  // Add the feature
255  task.addFeature(s_Z, s_Zd);
256 
257  vpColVector v; // vz, wx
258 
259  while (1) {
260  // Acquire a new image
261 #if defined(HAVE_OPENCV_VIDEOIO)
262  g >> frame; // get a new frame from camera
263  vpImageConvert::convert(frame, I);
264 #else
265  g.acquire(I);
266 #endif
267  // Set the image as background of the viewer
269 
270  // Does the blob tracking
271  dot.track(I);
272  // Update the current x feature
273  vpFeatureBuilder::create(s_x, cam, dot);
274 
275  // Update log(Z/Z*) feature. Since the depth Z change, we need to update
276  // the interaction matrix
277  surface = 1. / sqrt(dot.m00 / (cam.get_px() * cam.get_py()));
278  Z = coef * surface;
279  s_Z.buildFrom(s_x.get_x(), s_x.get_y(), Z, log(Z / Zd));
280 
281  robot.get_cVe(cVe);
282  task.set_cVe(cVe);
283 
284  robot.get_eJe(eJe);
285  task.set_eJe(eJe);
286 
287  // Compute the control law. Velocities are computed in the mobile robot
288  // reference frame
289  v = task.computeControlLaw();
290 
291  std::cout << "Send velocity to the pioneer: " << v[0] << " m/s " << vpMath::deg(v[1]) << " deg/s" << std::endl;
292 
293  // Send the velocity to the robot
295 
296  // Draw a vertical line which corresponds to the desired x coordinate of
297  // the dot cog
298  vpDisplay::displayLine(I, 0, 320, 479, 320, vpColor::red);
299  vpDisplay::flush(I);
300 
301  // A click in the viewer to exit
302  if (vpDisplay::getClick(I, false))
303  break;
304  }
305 
306  std::cout << "Ending robot thread..." << std::endl;
307  robot.stopRunning();
308 
309  // wait for the thread to stop
310  robot.waitForRunExit();
311 
312  // Kill the servo task
313  task.print();
314  return EXIT_SUCCESS;
315  }
316  catch (const vpException &e) {
317  std::cout << "Catch an exception: " << e << std::endl;
318  return EXIT_FAILURE;
319  }
320 }
321 #else
322 int main()
323 {
324  std::cout << "You don't have the right 3rd party libraries to run this example..." << std::endl;
325  return EXIT_SUCCESS;
326 }
327 #endif
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
void setVideoMode(unsigned long format, unsigned long mode)
void acquire(vpImage< unsigned char > &I)
void setFramerate(unsigned long fps)
void open(vpImage< unsigned char > &I)
Class for firewire ieee1394 video devices using libdc1394-2.x api.
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
Implementation of column vector and the associated operations.
Definition: vpColVector.h:163
static const vpColor red
Definition: vpColor.h:211
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:128
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:128
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
Definition: vpDot2.h:124
void track(const vpImage< unsigned char > &I, bool canMakeTheWindowGrow=true)
Definition: vpDot2.cpp:435
void setGraphics(bool activate)
Definition: vpDot2.h:310
double m00
Definition: vpDot2.h:373
void setGrayLevelPrecision(const double &grayLevelPrecision)
Definition: vpDot2.cpp:717
void setEllipsoidBadPointsPercentage(const double &percentage=0.0)
Definition: vpDot2.h:285
void setEllipsoidShapePrecision(const double &ellipsoidShapePrecision)
Definition: vpDot2.cpp:792
void setComputeMoments(bool activate)
Definition: vpDot2.h:271
void initTracking(const vpImage< unsigned char > &I, unsigned int size=0)
Definition: vpDot2.cpp:254
error that can be emitted by ViSP classes.
Definition: vpException.h:59
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Class that defines a 3D point visual feature which is composed by one parameters that is that defin...
void buildFrom(double x, double y, double Z, double LogZoverZstar)
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
void buildFrom(double x, double y, double Z)
double get_y() const
double get_x() const
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
static double deg(double rad)
Definition: vpMath.h:117
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:146
Interface for Pioneer mobile robots based on Aria 3rd party library.
void get_eJe(vpMatrix &eJe) vp_override
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) vp_override
@ REFERENCE_FRAME
Definition: vpRobot.h:76
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:378
@ EYEINHAND_L_cVe_eJe
Definition: vpServo.h:162
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:329
void set_cVe(const vpVelocityTwistMatrix &cVe_)
Definition: vpServo.h:1028
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition: vpServo.cpp:169
void setLambda(double c)
Definition: vpServo.h:976
void set_eJe(const vpMatrix &eJe_)
Definition: vpServo.h:1091
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:132
@ PSEUDO_INVERSE
Definition: vpServo.h:229
vpColVector computeControlLaw()
Definition: vpServo.cpp:703
@ DESIRED
Definition: vpServo.h:202
vpVelocityTwistMatrix get_cVe() const
Definition: vpUnicycle.h:70
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void setFramerate(vpV4l2FramerateType framerate)
void setInput(unsigned input=vpV4l2Grabber::DEFAULT_INPUT)
void open(vpImage< unsigned char > &I)
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
VISP_EXPORT void sleepMs(double t)