64 #include <visp/vpConfig.h>
65 #include <visp/vpDebug.h>
70 #if (defined (VISP_HAVE_AFMA6) && defined (VISP_HAVE_DC1394_2))
74 #include <visp/vp1394TwoGrabber.h>
75 #include <visp/vpImage.h>
76 #include <visp/vpImagePoint.h>
77 #include <visp/vpMath.h>
78 #include <visp/vpHomogeneousMatrix.h>
79 #include <visp/vpFeaturePoint.h>
80 #include <visp/vpPoint.h>
81 #include <visp/vpServo.h>
82 #include <visp/vpFeatureBuilder.h>
83 #include <visp/vpRobotAfma6.h>
84 #include <visp/vpException.h>
85 #include <visp/vpMatrixException.h>
86 #include <visp/vpServoDisplay.h>
87 #include <visp/vpDot.h>
88 #include <visp/vpPose.h>
89 #include <visp/vpImageIo.h>
90 #include <visp/vpDisplay.h>
91 #include <visp/vpDisplayX.h>
118 std::cout << std::endl ;
119 std::cout <<
"-------------------------------------------------------" << std::endl ;
120 std::cout <<
" Test program for vpServo " <<std::endl ;
121 std::cout <<
" Eye-to-hand task control" << std::endl ;
122 std::cout <<
" Simulation " << std::endl ;
123 std::cout <<
" task : servo a point " << std::endl ;
124 std::cout <<
"-------------------------------------------------------" << std::endl ;
125 std::cout << std::endl ;
132 for (i=0 ; i < nbPoint ; i++)
159 for (i=0 ; i < nbPoint ; i++)
174 std::cout << cMo << std::endl ;
182 std::cout <<
" Learning 0/1 " <<std::endl ;
183 char name[FILENAME_MAX] ;
184 sprintf(name,
"cdMo.dat") ;
186 std::cin >> learning ;
190 vpTRACE(
"Save the location of the object in a file cdMo.dat") ;
191 std::ofstream f(name) ;
199 vpTRACE(
"Loading desired location from cdMo.dat") ;
200 std::ifstream f(
"cdMo.dat") ;
209 for (i=0 ; i < nbPoint ; i++)
224 vpTRACE(
"\t we want an eye-in-hand control law") ;
225 vpTRACE(
"\t robot is controlled in the camera frame") ;
230 for (i=0 ; i < nbPoint ; i++)
236 vpTRACE(
"Display task information " ) ;
243 double convergence_threshold = 0.00;
248 unsigned int iter=0 ;
256 oMcamrobot[0][3] = -0.05 ;
261 double lambda_av =0.1;
265 std::cout <<
"alpha 0.7" << std::endl;
267 std::cout <<
"beta 5" << std::endl;
269 std::list<vpImagePoint> Lcog ;
271 while(error > convergence_threshold)
273 std::cout <<
"---------------------------------------------" << iter++ <<std::endl ;
280 "Eye-To-Hand Visual Servoing",
285 "IRISA-INRIA Rennes, Lagadic project",
289 for (i=0 ; i < nbPoint ; i++)
292 Lcog.push_back( dot[i].getCog() );
297 vpTRACE(
"Error detected while tracking visual features") ;
305 for (i=0 ; i < nbPoint ; i++)
331 cMe = cMo *oMcamrobot * camrobotMe ;
345 if (std::fabs(alpha) <= std::numeric_limits<double>::epsilon())
349 gain = alpha * exp (-beta * ( task.
getError() ).sumSquare() ) + lambda_av;
352 else gain = lambda_av ;
356 vpTRACE(
"%f %f %f %f %f",alpha, beta, lambda_av, ( task.
getError() ).sumSquare(), gain) ;
363 for (std::list<vpImagePoint>::const_iterator it_cog = Lcog.begin(); it_cog != Lcog.end(); ++it_cog)
370 error = ( task.
getError() ).sumSquare() ;
371 std::cout <<
"|| s - s* || = "<< error<<std::endl ;
375 vpTRACE(
"Error detected while tracking visual features") ;
384 if ((SAVE==1) && (iter %3==0))
388 sprintf(name,
"/tmp/marchand/image.%04d.ppm",it++) ;
408 vpERROR_TRACE(
"You do not have an afma6 robot or a firewire framegrabber connected to your computer...");
void set_j(const double j)
Definition of the vpMatrix class.
void projection(const vpColVector &_cP, vpColVector &_p)
Projection onto the image plane of a point. Input: the 3D coordinates in the camera frame _cP...
void get_eJe(vpMatrix &_eJe)
static void display(vpServo &s, const vpCameraParameters &cam, vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void print()
Print the matrix as a vector [T thetaU].
The class provides a data structure for the homogeneous matrices as well as a set of operations on th...
void getCameraParameters(vpCameraParameters &cam, const unsigned int &image_width, const unsigned int &image_height)
void display(const vpImage< unsigned char > &I, const vpCameraParameters &cam, const vpColor &color=vpColor::green, const unsigned int thickness=1)
Define the X11 console to display images.
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, const unsigned int select=vpBasicFeature::FEATURE_ALL)
create a new ste of two visual features
void set_i(const double i)
void setLambda(double _lambda)
set the gain lambda
void track(const vpImage< unsigned char > &I)
void set_x(const double x)
Set the point x coordinate in the image plane.
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Point coordinates conversion from pixel coordinates to normalized coordinates in meter...
static const vpColor green
void acquire(vpImage< unsigned char > &I)
void set_cVe(vpVelocityTwistMatrix &_cVe)
static void flush(const vpImage< unsigned char > &I)
void set_y(const double y)
void load(std::ifstream &f)
Control of Irisa's gantry robot named Afma6.
Class that defines what is a point.
void set_x(const double x)
void open(vpImage< unsigned char > &I)
vpImagePoint getCog() const
void kill()
destruction (memory deallocation if required)
Initialize the velocity controller.
vpColVector getError() const
vpColVector computeControlLaw()
compute the desired control law
static void display(const vpImage< unsigned char > &I)
void set_eJe(vpMatrix &_eJe)
Class used for pose computation from N points (pose from point only).
Generic class defining intrinsic camera parameters.
void set_y(const double y)
Set the point y coordinate in the image plane.
vpRobot::vpRobotStateType setRobotState(vpRobot::vpRobotStateType newState)
static void getImage(const vpImage< unsigned char > &Is, vpImage< vpRGBa > &Id)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Set the type of the interaction matrix (current, mean, desired, user).
void get_cMe(vpHomogeneousMatrix &_cMe)
void save(std::ofstream &f) const
Class that provides a data structure for the column vectors as well as a set of operations on these v...
void setGraphics(const bool activate)
virtual void displayCharString(const vpImagePoint &ip, const char *text, const vpColor &color=vpColor::green)=0
void setFramerate(vp1394TwoFramerateType fps)
void setVideoMode(vp1394TwoVideoModeType videomode)
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
This tracker is meant to track a dot (connex pixels with same gray level) on a vpImage.
static void writePPM(const vpImage< unsigned char > &I, const char *filename)
Class for firewire ieee1394 video devices using libdc1394-2.x api.
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &velocity)
void computePose(vpPoseMethodType methode, vpHomogeneousMatrix &cMo)
compute the pose for a given method
virtual bool getClick(bool blocking=true)=0
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
void changeFrame(const vpHomogeneousMatrix &cMo, vpColVector &_cP)
Class required to compute the visual servoing control law.
void addPoint(const vpPoint &P)
Add a new point in this array.
void initTracking(const vpImage< unsigned char > &I)
void setServo(vpServoType _servo_type)
Choice of the visual servoing control law.
virtual void displayPoint(const vpImagePoint &ip, const vpColor &color)=0
static const vpColor blue
void setWorldCoordinates(const double ox, const double oy, const double oz)
Set the point world coordinates. We mean here the coordinates of the point in the object frame...
void clearPoint()
suppress all the point in the array of point