Example of eye-in-hand control law. We control here a real robot, the Afma6 robot (cartesian robot, with 6 degrees of freedom). The velocity is computed in the camera frame. Visual features are given thanks to four lines and are the x and y coordinates of the rectangle center, log(Z/Z*) the current depth relative to the desired depth and the thetau rotations.
#include <visp/vpConfig.h>
#include <visp/vpDebug.h>
#include <stdlib.h>
#include <cmath>
#include <limits>
#if (defined (VISP_HAVE_AFMA6) && defined (VISP_HAVE_DC1394_2))
#include <visp/vp1394TwoGrabber.h>
#include <visp/vpImage.h>
#include <visp/vpImagePoint.h>
#include <visp/vpImageIo.h>
#include <visp/vpDisplay.h>
#include <visp/vpDisplayX.h>
#include <visp/vpDisplayOpenCV.h>
#include <visp/vpDisplayGTK.h>
#include <visp/vpMath.h>
#include <visp/vpHomogeneousMatrix.h>
#include <visp/vpFeatureLine.h>
#include <visp/vpFeaturePoint.h>
#include <visp/vpFeatureDepth.h>
#include <visp/vpGenericFeature.h>
#include <visp/vpLine.h>
#include <visp/vpServo.h>
#include <visp/vpFeatureBuilder.h>
#include <visp/vpPose.h>
#include <visp/vpRobotAfma6.h>
#include <visp/vpException.h>
#include <visp/vpMatrixException.h>
#include <visp/vpServoDisplay.h>
#include <visp/vpDot2.h>
#include <visp/vpPoint.h>
#include <visp/vpHomogeneousMatrix.h>
int
main()
{
try
{
#ifdef VISP_HAVE_X11
#elif defined(VISP_HAVE_OPENCV)
#elif defined(VISP_HAVE_GTK)
#endif
std::cout << std::endl ;
std::cout << "-------------------------------------------------------" << std::endl ;
std::cout << " Test program for vpServo " <<std::endl ;
std::cout << " Eye-in-hand task control, velocity computed in the camera frame" << std::endl ;
std::cout << " Simulation " << std::endl ;
std::cout << " task : servo a line " << std::endl ;
std::cout << "-------------------------------------------------------" << std::endl ;
std::cout << std::endl ;
int nbline =4 ;
int nbpoint =4 ;
vpTRACE(
"sets the desired position of the visual feature ") ;
double L=0.05 ;
vpTRACE(
"Initialization of the tracking") ;
int i ;
for (i=0 ; i < nbline ; i++)
{
}
for (i=0 ; i < nbline ; i++)
{
double x=0, y=0;
{
exit(-1);
}
}
for (i=0 ; i < nbline ; i++)
{
}
vpTRACE(
"sets the current position of the visual feature ") ;
double xc = (point[0].
get_x()+point[2].
get_x())/2;
double yc = (point[0].
get_y()+point[2].
get_y())/2;
vpTRACE(
"\t we want an eye-in-hand control law") ;
vpTRACE(
"\t robot is controlled in the camera frame") ;
vpTRACE(
"\t we want to see a point on a point..") ;
std::cout << std::endl ;
vpTRACE(
"Display task information " ) ;
unsigned int iter=0 ;
double lambda_av =0.05;
double alpha = 0.05 ;
double beta =3 ;
for ( ; ; )
{
std::cout << "---------------------------------------------" << iter <<std::endl ;
try {
for (i=0 ; i < nbline ; i++)
{
double x=0, y=0;
{
exit(-1);
}
}
for (i = 0; i < nbpoint; i++) pointd[i].display(I, cam,
vpColor::red);
double gain ;
{
if (std::fabs(alpha) <= std::numeric_limits<double>::epsilon())
gain = lambda_av ;
else
{
gain = alpha * exp (-beta * ( task.
getError() ).sumSquare() ) + lambda_av ;
}
}
{
v =0 ;
}
}
catch(...)
{
v =0 ;
exit(1) ;
}
iter++;
}
vpTRACE(
"Display task information " ) ;
}
catch (...)
{
return 0;
}
}
#else
int
main()
{
vpERROR_TRACE(
"You do not have an afma6 robot or a firewire framegrabber connected to your computer...");
}
#endif