ViSP  2.8.0
servoPioneerPanSegment3D.cpp
1 /****************************************************************************
2  *
3  * $Id: servoPioneerPanSegment3D.cpp 4258 2013-05-15 15:54:05Z fspindle $
4  *
5  * This file is part of the ViSP software.
6  * Copyright (C) 2005 - 2013 by INRIA. All rights reserved.
7  *
8  * This software is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU General Public License
10  * ("GPL") version 2 as published by the Free Software Foundation.
11  * See the file LICENSE.txt at the root directory of this source
12  * distribution for additional information about the GNU GPL.
13  *
14  * For using ViSP with software that can not be combined with the GNU
15  * GPL, please contact INRIA about acquiring a ViSP Professional
16  * Edition License.
17  *
18  * See http://www.irisa.fr/lagadic/visp/visp.html for more information.
19  *
20  * This software was developed at:
21  * INRIA Rennes - Bretagne Atlantique
22  * Campus Universitaire de Beaulieu
23  * 35042 Rennes Cedex
24  * France
25  * http://www.irisa.fr/lagadic
26  *
27  * If you have questions regarding the use of this file, please contact
28  * INRIA at visp@inria.fr
29  *
30  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
31  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
32  *
33  *
34  * Description:
35  * IBVS on Pioneer P3DX mobile platform
36  *
37  * Authors:
38  * Fabien Spindler
39  *
40  *****************************************************************************/
41 #include <iostream>
42 
43 #include <visp/vpConfig.h>
44 
45 #include <visp/vpRobotPioneer.h> // Include before vpDisplayX to avoid build issues
46 #include <visp/vpRobotBiclops.h>
47 #include <visp/vpCameraParameters.h>
48 #include <visp/vpDisplayGDI.h>
49 #include <visp/vpDisplayX.h>
50 #include <visp/vpDot2.h>
51 #include <visp/vpFeatureBuilder.h>
52 #include <visp/vpFeatureSegment.h>
53 #include <visp/vpHomogeneousMatrix.h>
54 #include <visp/vpImage.h>
55 #include <visp/vp1394TwoGrabber.h>
56 #include <visp/vp1394CMUGrabber.h>
57 #include <visp/vpV4l2Grabber.h>
58 #include <visp/vpPioneerPan.h>
59 #include <visp/vpPlot.h>
60 #include <visp/vpServo.h>
61 #include <visp/vpVelocityTwistMatrix.h>
62 
63 #define USE_REAL_ROBOT
64 #define USE_PLOTTER
65 #undef VISP_HAVE_V4L2 // To use a firewire camera
66 
84 #if defined(VISP_HAVE_PIONEER) && defined(VISP_HAVE_BICLOPS)
85 int main(int argc, char **argv)
86 {
87 #if defined(VISP_HAVE_DC1394_2) || defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_CMU1394)
88 #if defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)
89  vpImage<unsigned char> I; // Create a gray level image container
90  double lambda = 0.1;
91  // Scale parameter used to estimate the depth Z of the blob from its surface
92  //double coef = 0.9/14.85; // At 0.9m, the blob has a surface of 14.85 (Logitec sphere)
93  double coef = 1.2/13.0; // At 1m, the blob has a surface of 11.3 (AVT Pike 032C)
94  double L = 0.21; // 3D horizontal segment length
95  double Z_d = 0.8; // Desired distance along Z between camera and segment
96  bool normalized = true; // segment normilized features are used
97 
98  // Warning: To have a non singular task of rank 3, Y_d should be different from 0 so that
99  // the optical axis doesn't intersect the horizontal segment
100  double Y_d = -.11; // Desired distance along Y between camera and segment.
101  vpColVector qm(2); // Measured head position
102  qm = 0;
103  double qm_pan = 0; // Measured pan position (tilt is not handled in that example)
104 
105 #ifdef USE_REAL_ROBOT
106  // Initialize the biclops head
107 
108  vpRobotBiclops biclops("/usr/share/BiclopsDefault.cfg");
109  biclops.setDenavitHartenbergModel(vpBiclops::DH1);
110 
111  // Move to the initial position
112  vpColVector q(2);
113 
114  q=0;
115 // q[0] = vpMath::rad(63);
116 // q[1] = vpMath::rad(12); // introduce a tilt angle to compensate camera sphere tilt so that the camera is parallel to the plane
117 
118  biclops.setRobotState(vpRobot::STATE_POSITION_CONTROL) ;
119  biclops.setPosition( vpRobot::ARTICULAR_FRAME, q );
120  //biclops.setPositioningVelocity(50);
121  biclops.getPosition(vpRobot::ARTICULAR_FRAME, qm);
122  qm_pan = qm[0];
123 
124  // Now the head will be controlled in velocity
125  biclops.setRobotState(vpRobot::STATE_VELOCITY_CONTROL) ;
126 
127  // Initialize the pioneer robot
128  vpRobotPioneer pioneer;
129  ArArgumentParser parser(&argc, argv);
130  parser.loadDefaultArguments();
131 
132  // ArRobotConnector connects to the robot, get some initial data from it such as type and name,
133  // and then loads parameter files for this robot.
134  ArRobotConnector robotConnector(&parser, &pioneer);
135  if(!robotConnector.connectRobot())
136  {
137  ArLog::log(ArLog::Terse, "Could not connect to the pioneer robot.");
138  if(parser.checkHelpAndWarnUnparsed())
139  {
140  Aria::logOptions();
141  Aria::exit(1);
142  }
143  }
144  if (!Aria::parseArgs())
145  {
146  Aria::logOptions();
147  Aria::shutdown();
148  return false;
149  }
150 
151  pioneer.useSonar(false); // disable the sonar device usage
152 
153  // Wait 3 sec to be sure that the low level Aria thread used to control
154  // the robot is started. Without this delay we experienced a delay (arround 2.2 sec)
155  // between the velocity send to the robot and the velocity that is really applied
156  // to the wheels.
157  sleep(3);
158 
159  std::cout << "Pioneer robot connected" << std::endl;
160 #endif
161 
162  vpPioneerPan robot_pan; // Generic robot that computes the velocities for the pioneer and the biclops head
163 
164  // Camera parameters. In this experiment we don't need a precise calibration of the camera
165  vpCameraParameters cam;
166 
167  // Create the camera framegrabber
168 #if defined(VISP_HAVE_V4L2)
169  // Create a grabber based on v4l2 third party lib (for usb cameras under Linux)
170  vpV4l2Grabber g;
171  g.setScale(1);
172  g.setInput(0);
173  g.setDevice("/dev/video1");
174  g.open(I);
175  // Logitec sphere parameters
176  cam.initPersProjWithoutDistortion(558, 555, 312, 210);
177 #elif defined(VISP_HAVE_DC1394_2)
178  // Create a grabber based on libdc1394-2.x third party lib (for firewire cameras under Linux)
179  vp1394TwoGrabber g(false);
182  // AVT Pike 032C parameters
183  cam.initPersProjWithoutDistortion(800, 795, 320, 216);
184 #elif defined(VISP_HAVE_CMU1394)
185  // Create a grabber based on CMU 1394 third party lib (for firewire cameras under windows)
187  g.setVideoMode(0, 5); // 640x480 MONO8
188  g.setFramerate(4); // 30 Hz
189  g.open(I);
190  // AVT Pike 032C parameters
191  cam.initPersProjWithoutDistortion(800, 795, 320, 216);
192 #endif
193 
194  // Acquire an image from the grabber
195  g.acquire(I);
196 
197  // Create an image viewer
198 #if defined(VISP_HAVE_X11)
199  vpDisplayX d(I, 10, 10, "Current frame");
200 #elif defined(VISP_HAVE_GDI)
201  vpDisplayGDI d(I, 10, 10, "Current frame");
202 #endif
204  vpDisplay::flush(I);
205 
206  // The 3D segment consists in two horizontal dots
207  vpDot2 dot[2];
208  for (int i=0; i <2; i++)
209  {
210  dot[i].setGraphics(true);
211  dot[i].setComputeMoments(true);
212  dot[i].setEllipsoidShapePrecision(0.); // to track a blob without any constraint on the shape
213  dot[i].setGrayLevelPrecision(0.9); // to set the blob gray level bounds for binarisation
214  dot[i].setEllipsoidBadPointsPercentage(0.5); // to be accept 50% of bad inner and outside points with bad gray level
215  dot[i].initTracking(I);
216  vpDisplay::flush(I);
217  }
218 
219  vpServo task;
222  task.setLambda(lambda) ;
223  vpVelocityTwistMatrix cVe ; // keep to identity
224  cVe = robot_pan.get_cVe() ;
225  task.set_cVe(cVe) ;
226 
227  std::cout << "cVe: \n" << cVe << std::endl;
228 
229  vpMatrix eJe;
230 
231  // Update the robot jacobian that depends on the pan position
232  robot_pan.set_eJe(qm_pan);
233  // Get the robot jacobian
234  eJe = robot_pan.get_eJe() ;
235  task.set_eJe(eJe) ;
236  std::cout << "eJe: \n" << eJe << std::endl;
237 
238  // Define a 3D horizontal segment an its cordinates in the image plane
239  vpPoint P[2];
240  P[0].setWorldCoordinates(-L/2, 0, 0);
241  P[1].setWorldCoordinates( L/2, 0, 0);
242  // Define the desired camera position
243  vpHomogeneousMatrix cMo(0, Y_d, Z_d, 0, 0, 0); // Here we are in front of the segment
244  for (int i=0; i <2; i++)
245  {
246  P[i].changeFrame(cMo);
247  P[i].project(); // Here the x,y parameters obtained by perspective projection are computed
248  }
249 
250  // Estimate the depth of the segment extremity points
251  double surface[2];
252  double Z[2]; // Depth of the segment points
253  for (int i=0; i<2; i++)
254  {
255  // Surface of the blob estimated from the image moment m00 and converted in meters
256  surface[i] = 1./sqrt(dot[i].m00/(cam.get_px()*cam.get_py()));
257 
258  // Initial depth of the blob
259  Z[i] = coef * surface[i] ;
260  }
261 
262  // Use here a feature segment builder
263  vpFeatureSegment s_segment(normalized), s_segment_d(normalized); // From the segment feature we use only alpha
264  vpFeatureBuilder::create(s_segment, cam, dot[0], dot[1]);
265  s_segment.setZ1(Z[0]);
266  s_segment.setZ2(Z[1]);
267  // Set the desired feature
268  vpFeatureBuilder::create(s_segment_d, P[0], P[1]);
269  s_segment.setZ1( P[0].get_Z() ); // Desired depth
270  s_segment.setZ2( P[1].get_Z() );
271 
272  task.addFeature(s_segment, s_segment_d,
276 
277 #ifdef USE_PLOTTER
278  //Create a window (500 by 500) at position (700, 10) with two graphics
279  vpPlot graph(2, 500, 500, 700, 10, "Curves...");
280 
281  //The first graphic contains 3 curve and the second graphic contains 3 curves
282  graph.initGraph(0,3);
283  graph.initGraph(1,3);
284  graph.setTitle(0, "Velocities");
285  graph.setTitle(1, "Error s-s*");
286  graph.setLegend(0, 0, "vx");
287  graph.setLegend(0, 1, "wz");
288  graph.setLegend(0, 2, "w_pan");
289  graph.setLegend(1, 0, "xm/l");
290  graph.setLegend(1, 1, "1/l");
291  graph.setLegend(1, 2, "alpha");
292 #endif
293 
294  vpColVector v; // vz, wx
295  unsigned int iter = 0;
296  try
297  {
298  while(1)
299  {
300 #ifdef USE_REAL_ROBOT
301  // Get the new pan position
302  biclops.getPosition(vpRobot::ARTICULAR_FRAME, qm);
303 #endif
304  qm_pan = qm[0];
305 
306  // Acquire a new image
307  g.acquire(I);
308  // Set the image as background of the viewer
310 
311  // Display the desired position of the segment
312  for (int i=0; i<2; i++)
313  P[i].display(I, cam, vpColor::red, 3);
314 
315  // Does the blob tracking
316  for (int i=0; i<2; i++)
317  dot[i].track(I);
318 
319  for (int i=0; i<2; i++)
320  {
321  // Surface of the blob estimated from the image moment m00 and converted in meters
322  surface[i] = 1./sqrt(dot[i].m00/(cam.get_px()*cam.get_py()));
323 
324  // Initial depth of the blob
325  Z[i] = coef * surface[i] ;
326  }
327 
328  // Update the features
329  vpFeatureBuilder::create(s_segment, cam, dot[0], dot[1]);
330  // Update the depth of the point. Useful only if current interaction matrix is used
331  // when task.setInteractionMatrixType(vpServo::CURRENT, vpServo::PSEUDO_INVERSE) is set
332  s_segment.setZ1(Z[0]);
333  s_segment.setZ2(Z[1]);
334 
335  robot_pan.get_cVe(cVe);
336  task.set_cVe(cVe);
337 
338  // Update the robot jacobian that depends on the pan position
339  robot_pan.set_eJe(qm_pan);
340  // Get the robot jacobian
341  eJe = robot_pan.get_eJe();
342  // Update the jacobian that will be used to compute the control law
343  task.set_eJe(eJe);
344 
345  // Compute the control law. Velocities are computed in the mobile robot reference frame
346  v = task.computeControlLaw();
347 
348 // std::cout << "-----" << std::endl;
349 // std::cout << "v: " << v.t() << std::endl;
350 // std::cout << "error: " << task.getError().t() << std::endl;
351 // std::cout << "L:\n " << task.getInteractionMatrix() << std::endl;
352 // std::cout << "eJe:\n " << task.get_eJe() << std::endl;
353 // std::cout << "cVe:\n " << task.get_cVe() << std::endl;
354 // std::cout << "L_cVe_eJe:\n" << task.getInteractionMatrix() * task.get_cVe() * task.get_eJe() << std::endl;
355 // task.print() ;
356  if (task.getTaskRank() != 3)
357  std::cout << "Warning: task is of rank " << task.getTaskRank() << std::endl;
358 
359 #ifdef USE_PLOTTER
360  graph.plot(0, iter, v); // plot velocities applied to the robot
361  graph.plot(1, iter, task.getError()); // plot error vector
362 #endif
363 
364 #ifdef USE_REAL_ROBOT
365  // Send the velocity to the robot
366  vpColVector v_pioneer(2); // vx, wz
367  v_pioneer[0] = v[0];
368  v_pioneer[1] = v[1];
369  vpColVector v_biclops(2); // qdot pan and tilt
370  v_biclops[0] = v[2];
371  v_biclops[1] = 0;
372 
373  std::cout << "Send velocity to the pionner: " << v_pioneer[0] << " m/s "
374  << vpMath::deg(v_pioneer[1]) << " deg/s" << std::endl;
375  std::cout << "Send velocity to the biclops head: " << vpMath::deg(v_biclops[0]) << " deg/s" << std::endl;
376 
377  pioneer.setVelocity(vpRobot::REFERENCE_FRAME, v_pioneer);
378  biclops.setVelocity(vpRobot::ARTICULAR_FRAME, v_biclops) ;
379 #endif
380 
381  // Draw a vertical line which corresponds to the desired x coordinate of the dot cog
382  vpDisplay::displayLine(I, 0, cam.get_u0(), 479, cam.get_u0(), vpColor::red);
383  vpDisplay::flush(I);
384 
385  // A click in the viewer to exit
386  if ( vpDisplay::getClick(I, false) )
387  break;
388 
389  iter ++;
390  //break;
391  }
392  }
393  catch(...)
394  {
395  }
396 
397 #ifdef USE_REAL_ROBOT
398  std::cout << "Ending robot thread..." << std::endl;
399  pioneer.stopRunning();
400 
401  // wait for the thread to stop
402  pioneer.waitForRunExit();
403 #endif
404 
405  // Kill the servo task
406  task.print() ;
407  task.kill();
408 
409 #endif
410 #endif
411 }
412 #else
413 int main()
414 {
415  std::cout << "ViSP is not able to control the Pioneer robot" << std::endl;
416  return 0;
417 }
418 #endif
Definition of the vpMatrix class.
Definition: vpMatrix.h:96
double get_u0() const
static unsigned int selectL()
void setVideoMode(unsigned long format, unsigned long mode)
void open(vpImage< unsigned char > &I)
The class provides a data structure for the homogeneous matrices as well as a set of operations on th...
void open(vpImage< unsigned char > &I)
void useSonar(bool usage)
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:133
void setEllipsoidBadPointsPercentage(const double &percentage=0.0)
Definition: vpDot2.h:288
vpVelocityTwistMatrix get_cVe() const
Definition: vpUnicycle.h:89
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
Define the X11 console to display images.
Definition: vpDisplayX.h:152
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, const unsigned int select=vpBasicFeature::FEATURE_ALL)
create a new ste of two visual features
Definition: vpServo.cpp:444
static unsigned int selectAlpha()
void setDevice(const char *devname)
void setLambda(double _lambda)
set the gain lambda
Definition: vpServo.h:253
Initialize the position controller.
Definition: vpRobot.h:71
Interface for Pioneer mobile robots based on Aria 3rd party library.
double get_py() const
Generic functions for Pioneer mobile robots equiped with a pan head.
Definition: vpPioneerPan.h:99
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
Definition: vpDot2.h:131
void set_cVe(vpVelocityTwistMatrix &_cVe)
Definition: vpServo.h:230
static void flush(const vpImage< unsigned char > &I)
Definition: vpDisplay.cpp:1991
static const vpColor red
Definition: vpColor.h:167
Class that defines what is a point.
Definition: vpPoint.h:65
void initPersProjWithoutDistortion(const double px, const double py, const double u0, const double v0)
void setGrayLevelPrecision(const double &grayLevelPrecision)
Definition: vpDot2.cpp:796
void kill()
destruction (memory deallocation if required)
Definition: vpServo.cpp:177
Initialize the velocity controller.
Definition: vpRobot.h:70
vpColVector getError() const
Definition: vpServo.h:301
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
void setFramerate(unsigned long fps)
vpColVector computeControlLaw()
compute the desired control law
Definition: vpServo.cpp:883
Class that defines a 2D segment visual features. This class allow to consider two sets of visual feat...
void acquire(vpImage< unsigned char > &I)
static void display(const vpImage< unsigned char > &I)
Definition: vpDisplay.cpp:203
vpMatrix get_eJe() const
Definition: vpUnicycle.h:115
void set_eJe(vpMatrix &_eJe)
Definition: vpServo.h:238
Generic class defining intrinsic camera parameters.
void setComputeMoments(const bool activate)
Definition: vpDot2.h:274
Class that consider the particular case of twist transformation matrix that allows to transform a vel...
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
Interface for the biclops, pan, tilt head control.
double get_px() const
void setEllipsoidShapePrecision(const double &ellipsoidShapePrecision)
Definition: vpDot2.cpp:871
void setInput(unsigned input=vpV4l2Grabber::DEFAULT_INPUT)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Set the type of the interaction matrix (current, mean, desired, user).
Definition: vpServo.cpp:509
Class for the Video4Linux2 video device.
static double deg(double rad)
Definition: vpMath.h:93
Class that provides a data structure for the column vectors as well as a set of operations on these v...
Definition: vpColVector.h:72
double getTaskRank() const
Definition: vpServo.h:389
void initTracking(const vpImage< unsigned char > &I, unsigned int size=0)
Definition: vpDot2.cpp:245
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition: vpServo.cpp:258
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition: vpPlot.h:117
Class for firewire ieee1394 video devices using libdc1394-2.x api.
virtual bool getClick(bool blocking=true)=0
virtual void displayLine(const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1)=0
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
void changeFrame(const vpHomogeneousMatrix &cMo, vpColVector &_cP)
Definition: vpPoint.cpp:150
Class required to compute the visual servoing control law descbribed in and .
Definition: vpServo.h:153
void set_eJe(double q_pan)
Definition: vpPioneerPan.h:144
void setServo(vpServoType _servo_type)
Choice of the visual servoing control law.
Definition: vpServo.cpp:214
void setGraphics(const bool activate)
Definition: vpDot2.h:312
static unsigned int selectXc()
void setFramerate(vpV4l2FramerateType framerate)
void setWorldCoordinates(const double ox, const double oy, const double oz)
Set the point world coordinates. We mean here the coordinates of the point in the object frame...
Definition: vpPoint.cpp:74