ViSP  2.6.2
servoPioneerPanSegment3D.cpp
1 /****************************************************************************
2  *
3  * $Id: servoPioneerPanSegment3D.cpp 3842 2012-07-13 22:21:42Z fspindle $
4  *
5  * This file is part of the ViSP software.
6  * Copyright (C) 2005 - 2012 by INRIA. All rights reserved.
7  *
8  * This software is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU General Public License
10  * ("GPL") version 2 as published by the Free Software Foundation.
11  * See the file LICENSE.txt at the root directory of this source
12  * distribution for additional information about the GNU GPL.
13  *
14  * For using ViSP with software that can not be combined with the GNU
15  * GPL, please contact INRIA about acquiring a ViSP Professional
16  * Edition License.
17  *
18  * See http://www.irisa.fr/lagadic/visp/visp.html for more information.
19  *
20  * This software was developed at:
21  * INRIA Rennes - Bretagne Atlantique
22  * Campus Universitaire de Beaulieu
23  * 35042 Rennes Cedex
24  * France
25  * http://www.irisa.fr/lagadic
26  *
27  * If you have questions regarding the use of this file, please contact
28  * INRIA at visp@inria.fr
29  *
30  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
31  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
32  *
33  *
34  * Description:
35  * IBVS on Pioneer P3DX mobile platform
36  *
37  * Authors:
38  * Fabien Spindler
39  *
40  *****************************************************************************/
41 #include <iostream>
42 
43 #include <visp/vpConfig.h>
44 
45 #include <visp/vpRobotPioneer.h> // Include before vpDisplayX to avoid build issues
46 #include <visp/vpRobotBiclops.h>
47 #include <visp/vpCameraParameters.h>
48 #include <visp/vpDisplayGDI.h>
49 #include <visp/vpDisplayX.h>
50 #include <visp/vpDot2.h>
51 #include <visp/vpFeatureBuilder.h>
52 #include <visp/vpFeatureSegment.h>
53 #include <visp/vpHomogeneousMatrix.h>
54 #include <visp/vpImage.h>
55 #include <visp/vp1394TwoGrabber.h>
56 #include <visp/vp1394CMUGrabber.h>
57 #include <visp/vpV4l2Grabber.h>
58 #include <visp/vpPioneerPan.h>
59 #include <visp/vpPlot.h>
60 #include <visp/vpServo.h>
61 #include <visp/vpVelocityTwistMatrix.h>
62 
63 #define USE_REAL_ROBOT
64 #define USE_PLOTTER
65 
83 #if defined(VISP_HAVE_PIONEER) && defined(VISP_HAVE_BICLOPS)
84 int main(int argc, char **argv)
85 {
86 #if defined(VISP_HAVE_DC1394_2) || defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_CMU1394)
87 #if defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)
88  vpImage<unsigned char> I; // Create a gray level image container
89  double lambda = 0.05;
90  // Scale parameter used to estimate the depth Z of the blob from its surface
91  double coef = 0.9/14.85; // At 0.9m, the blob has a surface of 14.85
92  double L = 0.21; // 3D horizontal segment length
93  double Z_d = 1.; // Desired distance along Z between camera and segment
94 
95  // Warning: To have a non singular task of rank 3, Y_d should be different from 0 so that
96  // the optical axis doesn't intersect the horizontal segment
97  double Y_d = .18; // Desired distance along Y between camera and segment.
98  vpColVector qm(2); // Measured head position
99  qm = 0;
100  double qm_pan = 0; // Measured pan position (tilt is not handled in that example)
101 
102 #ifdef USE_REAL_ROBOT
103  // Initialize the biclops head
104 
105  vpRobotBiclops biclops("/usr/share/BiclopsDefault.cfg");
106  biclops.setDenavitHartenbergModel(vpBiclops::DH1);
107 
108  // Move to the initial position
109  vpColVector q(2);
110 
111  q=0;
112  q[0] = vpMath::rad(63);
113  q[1] = vpMath::rad(12); // introduce a tilt angle to compensate camera sphere tilt so that the camera is parallel to the plane
114 
115  biclops.setRobotState(vpRobot::STATE_POSITION_CONTROL) ;
116  biclops.setPosition( vpRobot::ARTICULAR_FRAME, q );
117  //biclops.setPositioningVelocity(50);
118  biclops.getPosition(vpRobot::ARTICULAR_FRAME, qm);
119  qm_pan = qm[0];
120 
121  // Now the head will be controlled in velocity
122  biclops.setRobotState(vpRobot::STATE_VELOCITY_CONTROL) ;
123 
124  // Initialize the pioneer robot
125  vpRobotPioneer pioneer;
126  ArArgumentParser parser(&argc, argv);
127  parser.loadDefaultArguments();
128 
129  // ArRobotConnector connects to the robot, get some initial data from it such as type and name,
130  // and then loads parameter files for this robot.
131  ArRobotConnector robotConnector(&parser, &pioneer);
132  if(!robotConnector.connectRobot())
133  {
134  ArLog::log(ArLog::Terse, "Could not connect to the pioneer robot.");
135  if(parser.checkHelpAndWarnUnparsed())
136  {
137  Aria::logOptions();
138  Aria::exit(1);
139  }
140  }
141  if (!Aria::parseArgs())
142  {
143  Aria::logOptions();
144  Aria::shutdown();
145  return false;
146  }
147 
148  pioneer.useSonar(false); // disable the sonar device usage
149 
150  std::cout << "Pioneer robot connected" << std::endl;
151 #endif
152 
153  vpPioneerPan robot_pan; // Generic robot that computes the velocities for the pioneer and the biclops head
154 
155  // Create the camera framegrabber
156 #if defined(VISP_HAVE_V4L2)
157  // Create a grabber based on v4l2 third party lib (for usb cameras under Linux)
158  vpV4l2Grabber g;
159  g.setScale(1);
160  g.setInput(0);
161  g.setDevice("/dev/video1");
162  g.open(I);
163 #elif defined(VISP_HAVE_DC1394_2)
164  // Create a grabber based on libdc1394-2.x third party lib (for firewire cameras under Linux)
165  vp1394TwoGrabber g(false);
168 #elif defined(VISP_HAVE_CMU1394)
169  // Create a grabber based on CMU 1394 third party lib (for firewire cameras under windows)
171  g.setVideoMode(0, 5); // 640x480 MONO8
172  g.setFramerate(4); // 30 Hz
173  g.open(I);
174 #endif
175 
176  // Acquire an image from the grabber
177  g.acquire(I);
178 
179  // Create an image viewer
180 #if defined(VISP_HAVE_X11)
181  vpDisplayX d(I, 10, 10, "Current frame");
182 #elif defined(VISP_HAVE_GDI)
183  vpDisplayGDI d(I, 10, 10, "Current frame");
184 #endif
186  vpDisplay::flush(I);
187 
188  // The 3D segment consists in two horizontal dots
189  vpDot2 dot[2];
190  for (int i=0; i <2; i++)
191  {
192  dot[i].setGraphics(true);
193  dot[i].setComputeMoments(true);
194  dot[i].setEllipsoidShapePrecision(0.); // to track a blob without any constraint on the shape
195  dot[i].setGrayLevelPrecision(0.9); // to set the blob gray level bounds for binarisation
196  dot[i].setEllipsoidBadPointsPercentage(0.5); // to be accept 50% of bad inner and outside points with bad gray level
197  dot[i].initTracking(I);
198  vpDisplay::flush(I);
199  }
200 
201  // Camera parameters. In this experiment we don't need a precise calibration of the camera
202  vpCameraParameters cam;
203  cam.initPersProjWithoutDistortion(558, 555, 312, 210);
204 
205  vpServo task;
208  task.setLambda(lambda) ;
209  vpVelocityTwistMatrix cVe ; // keep to identity
210  cVe = robot_pan.get_cVe() ;
211  task.set_cVe(cVe) ;
212 
213  std::cout << "cVe: \n" << cVe << std::endl;
214 
215  vpMatrix eJe;
216 
217  // Update the robot jacobian that depends on the pan position
218  robot_pan.set_eJe(qm_pan);
219  // Get the robot jacobian
220  eJe = robot_pan.get_eJe() ;
221  task.set_eJe(eJe) ;
222  std::cout << "eJe: \n" << eJe << std::endl;
223 
224  // Define a 3D horizontal segment an its cordinates in the image plane
225  vpPoint P[2];
226  P[0].setWorldCoordinates(-L/2, 0, 0);
227  P[1].setWorldCoordinates( L/2, 0, 0);
228  // Define the desired camera position
229  vpHomogeneousMatrix cMo(0, Y_d, Z_d, 0, 0, 0); // Here we are in front of the segment
230  for (int i=0; i <2; i++)
231  {
232  P[i].changeFrame(cMo);
233  P[i].project(); // Here the x,y parameters obtained by perspective projection are computed
234  }
235 
236  // Estimate the depth of the segment extremity points
237  double surface[2];
238  double Z[2]; // Depth of the segment points
239  for (int i=0; i<2; i++)
240  {
241  // Surface of the blob estimated from the image moment m00 and converted in meters
242  surface[i] = 1./sqrt(dot[i].m00/(cam.get_px()*cam.get_py()));
243 
244  // Initial depth of the blob
245  Z[i] = coef * surface[i] ;
246  }
247 
248  // Use here a feature segment builder
249  bool normalized = true; // segment normilized features are used
250  vpFeatureSegment s_segment(normalized), s_segment_d(normalized); // From the segment feature we use only alpha
251  vpFeatureBuilder::create(s_segment, cam, dot[0], dot[1]);
252  s_segment.setZ1(Z[0]);
253  s_segment.setZ2(Z[1]);
254  // Set the desired feature
255  vpFeatureBuilder::create(s_segment_d, P[0], P[1]);
256  s_segment.setZ1( P[0].get_Z() ); // Desired depth
257  s_segment.setZ2( P[1].get_Z() );
258 
259  task.addFeature(s_segment, s_segment_d,
263 
264 #ifdef USE_PLOTTER
265  //Create a window (500 by 500) at position (700, 10) with two graphics
266  vpPlot graph(2, 500, 500, 700, 10, "Curves...");
267 
268  //The first graphic contains 3 curve and the second graphic contains 3 curves
269  graph.initGraph(0,3);
270  graph.initGraph(1,3);
271  graph.setTitle(0, "Velocities");
272  graph.setTitle(1, "Error s-s*");
273  graph.setLegend(0, 0, "vx");
274  graph.setLegend(0, 1, "wz");
275  graph.setLegend(0, 2, "w_pan");
276  graph.setLegend(1, 0, "xm/l");
277  graph.setLegend(1, 1, "1/l");
278  graph.setLegend(1, 2, "alpha");
279 #endif
280 
281  vpColVector v; // vz, wx
282  unsigned int iter = 0;
283  try
284  {
285  while(1)
286  {
287 #ifdef USE_REAL_ROBOT
288  // Get the new pan position
289  biclops.getPosition(vpRobot::ARTICULAR_FRAME, qm);
290 #endif
291  qm_pan = qm[0];
292 
293  // Acquire a new image
294  g.acquire(I);
295  // Set the image as background of the viewer
297 
298  // Display the desired position of the segment
299  for (int i=0; i<2; i++)
300  P[i].display(I, cam, vpColor::red, 3);
301 
302  // Does the blob tracking
303  for (int i=0; i<2; i++)
304  dot[i].track(I);
305 
306  for (int i=0; i<2; i++)
307  {
308  // Surface of the blob estimated from the image moment m00 and converted in meters
309  surface[i] = 1./sqrt(dot[i].m00/(cam.get_px()*cam.get_py()));
310 
311  // Initial depth of the blob
312  Z[i] = coef * surface[i] ;
313  }
314 
315  // Update the features
316  vpFeatureBuilder::create(s_segment, cam, dot[0], dot[1]);
317  // Update the depth of the point. Useful only if current interaction matrix is used
318  // when task.setInteractionMatrixType(vpServo::CURRENT, vpServo::PSEUDO_INVERSE) is set
319  s_segment.setZ1(Z[0]);
320  s_segment.setZ2(Z[1]);
321 
322  robot_pan.get_cVe(cVe);
323  task.set_cVe(cVe);
324 
325  // Update the robot jacobian that depends on the pan position
326  robot_pan.set_eJe(qm_pan);
327  // Get the robot jacobian
328  eJe = robot_pan.get_eJe();
329  // Update the jacobian that will be used to compute the control law
330  task.set_eJe(eJe);
331 
332  // Compute the control law. Velocities are computed in the mobile robot reference frame
333  v = task.computeControlLaw();
334 
335 // std::cout << "-----" << std::endl;
336 // std::cout << "v: " << v.t() << std::endl;
337 // std::cout << "error: " << task.getError().t() << std::endl;
338 // std::cout << "L:\n " << task.getInteractionMatrix() << std::endl;
339 // std::cout << "eJe:\n " << task.get_eJe() << std::endl;
340 // std::cout << "cVe:\n " << task.get_cVe() << std::endl;
341 // std::cout << "L_cVe_eJe:\n" << task.getInteractionMatrix() * task.get_cVe() * task.get_eJe() << std::endl;
342 // task.print() ;
343  if (task.getTaskRank() != 3)
344  std::cout << "Warning: task is of rank " << task.getTaskRank() << std::endl;
345 
346 #ifdef USE_PLOTTER
347  graph.plot(0, iter, v); // plot velocities applied to the robot
348  graph.plot(1, iter, task.getError()); // plot error vector
349 #endif
350 
351 #ifdef USE_REAL_ROBOT
352  // Send the velocity to the robot
353  vpColVector v_pioneer(2); // vx, wz
354  v_pioneer[0] = v[0];
355  v_pioneer[1] = v[1];
356  vpColVector v_biclops(2); // qdot pan and tilt
357  v_biclops[0] = v[2];
358  v_biclops[1] = 0;
359 
360  pioneer.setVelocity(vpRobot::REFERENCE_FRAME, v_pioneer);
361  biclops.setVelocity(vpRobot::ARTICULAR_FRAME, v_biclops) ;
362 #endif
363 
364  // Draw a vertical line which corresponds to the desired x coordinate of the dot cog
365  vpDisplay::displayLine(I, 0, cam.get_u0(), 479, cam.get_u0(), vpColor::red);
366  vpDisplay::flush(I);
367 
368  // A click in the viewer to exit
369  if ( vpDisplay::getClick(I, false) )
370  break;
371 
372  iter ++;
373  //break;
374  }
375  }
376  catch(...)
377  {
378  }
379 
380 #ifdef USE_REAL_ROBOT
381  std::cout << "Ending robot thread..." << std::endl;
382  pioneer.stopRunning();
383 
384  // wait for the thread to stop
385  pioneer.waitForRunExit();
386 #endif
387 
388  // Kill the servo task
389  task.print() ;
390  task.kill();
391 
392 #endif
393 #endif
394 }
395 #else
396 int main()
397 {
398  std::cout << "ViSP is not able to control the Pioneer robot" << std::endl;
399  return 0;
400 }
401 #endif
Definition of the vpMatrix class.
Definition: vpMatrix.h:96
double get_u0() const
static unsigned int selectL()
void setVideoMode(unsigned long format, unsigned long mode)
void open(vpImage< unsigned char > &I)
The class provides a data structure for the homogeneous matrices as well as a set of operations on th...
void open(vpImage< unsigned char > &I)
void useSonar(bool usage)
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:132
void setEllipsoidBadPointsPercentage(const double &percentage=0.0)
Definition: vpDot2.h:157
vpVelocityTwistMatrix get_cVe() const
Definition: vpUnicycle.h:89
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
Define the X11 console to display images.
Definition: vpDisplayX.h:152
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, const unsigned int select=vpBasicFeature::FEATURE_ALL)
create a new ste of two visual features
Definition: vpServo.cpp:444
static unsigned int selectAlpha()
void setDevice(const char *devname)
void setLambda(double _lambda)
set the gain lambda
Definition: vpServo.h:250
Initialize the position controller.
Definition: vpRobot.h:71
Interface for Pioneer mobile robots based on Aria 3rd party library.
double get_py() const
Generic functions for Pioneer mobile robots equiped with a pan head.
Definition: vpPioneerPan.h:69
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
Definition: vpDot2.h:114
void set_cVe(vpVelocityTwistMatrix &_cVe)
Definition: vpServo.h:227
static void flush(const vpImage< unsigned char > &I)
Definition: vpDisplay.cpp:1964
static const vpColor red
Definition: vpColor.h:165
Class that defines what is a point.
Definition: vpPoint.h:65
void initPersProjWithoutDistortion(const double px, const double py, const double u0, const double v0)
void setGrayLevelPrecision(const double &grayLevelPrecision)
Definition: vpDot2.cpp:763
void kill()
destruction (memory deallocation if required)
Definition: vpServo.cpp:177
Initialize the velocity controller.
Definition: vpRobot.h:70
vpColVector getError() const
Definition: vpServo.h:298
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
void setFramerate(unsigned long fps)
vpColVector computeControlLaw()
compute the desired control law
Definition: vpServo.cpp:883
Class that defines a 2D segment visual features. This class allow to consider two sets of visual feat...
void acquire(vpImage< unsigned char > &I)
static void display(const vpImage< unsigned char > &I)
Definition: vpDisplay.cpp:186
vpMatrix get_eJe() const
Definition: vpUnicycle.h:115
void set_eJe(vpMatrix &_eJe)
Definition: vpServo.h:235
Generic class defining intrinsic camera parameters.
void setComputeMoments(const bool activate)
Definition: vpDot2.h:143
Class that consider the particular case of twist transformation matrix that allows to transform a vel...
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
Interface for the biclops, pan, tilt head control.
double get_px() const
void setEllipsoidShapePrecision(const double &ellipsoidShapePrecision)
Definition: vpDot2.cpp:838
void setInput(unsigned input=vpV4l2Grabber::DEFAULT_INPUT)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Set the type of the interaction matrix (current, mean, desired, user).
Definition: vpServo.cpp:509
static double rad(double deg)
Definition: vpMath.h:100
Class for the Video4Linux2 video device.
Class that provides a data structure for the column vectors as well as a set of operations on these v...
Definition: vpColVector.h:72
double getTaskRank() const
Definition: vpServo.h:386
void initTracking(const vpImage< unsigned char > &I, unsigned int size=0)
Definition: vpDot2.cpp:240
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition: vpServo.cpp:258
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition: vpPlot.h:119
Class for firewire ieee1394 video devices using libdc1394-2.x api.
virtual bool getClick(bool blocking=true)=0
virtual void displayLine(const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1)=0
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
void changeFrame(const vpHomogeneousMatrix &cMo, vpColVector &_cP)
Definition: vpPoint.cpp:150
Class required to compute the visual servoing control law.
Definition: vpServo.h:150
void set_eJe(double q_pan)
Definition: vpPioneerPan.h:112
void setServo(vpServoType _servo_type)
Choice of the visual servoing control law.
Definition: vpServo.cpp:214
void setGraphics(const bool activate)
Definition: vpDot2.h:178
static unsigned int selectXc()
void setFramerate(vpV4l2FramerateType framerate)
void setWorldCoordinates(const double ox, const double oy, const double oz)
Set the point world coordinates. We mean here the coordinates of the point in the object frame...
Definition: vpPoint.cpp:74