Example of eye-in-hand control law. We control here a real robot, the Afma6 robot (cartesian robot, with 6 degrees of freedom). The velocity is computed in the camera frame. Visual features are given thanks to four lines and are the x and y coordinates of the rectangle center, log(Z/Z*) the current depth relative to the desired depth and the thetau rotations.
#include <visp3/core/vpConfig.h>
#include <visp3/core/vpDebug.h>
#include <stdlib.h>
#include <cmath>
#include <limits>
#if (defined (VISP_HAVE_AFMA6) && defined (VISP_HAVE_DC1394))
#include <visp3/sensor/vp1394TwoGrabber.h>
#include <visp3/core/vpImage.h>
#include <visp3/core/vpImagePoint.h>
#include <visp3/io/vpImageIo.h>
#include <visp3/core/vpDisplay.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/gui/vpDisplayOpenCV.h>
#include <visp3/gui/vpDisplayGTK.h>
#include <visp3/core/vpMath.h>
#include <visp3/core/vpHomogeneousMatrix.h>
#include <visp3/visual_features/vpFeatureLine.h>
#include <visp3/visual_features/vpFeaturePoint.h>
#include <visp3/visual_features/vpFeatureDepth.h>
#include <visp3/visual_features/vpGenericFeature.h>
#include <visp3/core/vpLine.h>
#include <visp3/vs/vpServo.h>
#include <visp3/visual_features/vpFeatureBuilder.h>
#include <visp3/vision/vpPose.h>
#include <visp3/robot/vpRobotAfma6.h>
#include <visp3/core/vpException.h>
#include <visp3/vs/vpServoDisplay.h>
#include <visp3/blob/vpDot2.h>
#include <visp3/core/vpPoint.h>
#include <visp3/core/vpHomogeneousMatrix.h>
int
main()
{
try
{
#ifdef VISP_HAVE_X11
#elif defined(VISP_HAVE_OPENCV)
#elif defined(VISP_HAVE_GTK)
#endif
std::cout << std::endl ;
std::cout << "-------------------------------------------------------" << std::endl ;
std::cout << " Test program for vpServo " <<std::endl ;
std::cout << " Eye-in-hand task control, velocity computed in the camera frame" << std::endl ;
std::cout << " Simulation " << std::endl ;
std::cout << " task : servo a line " << std::endl ;
std::cout << "-------------------------------------------------------" << std::endl ;
std::cout << std::endl ;
int nbline =4 ;
int nbpoint =4 ;
vpTRACE(
"sets the desired position of the visual feature ") ;
double L=0.05 ;
vpTRACE(
"Initialization of the tracking") ;
int i ;
for (i=0 ; i < nbline ; i++)
{
}
for (i=0 ; i < nbline ; i++)
{
double x=0, y=0;
{
exit(-1);
}
}
for (i=0 ; i < nbline ; i++)
{
}
vpTRACE(
"sets the current position of the visual feature ") ;
double xc = (point[0].
get_x()+point[2].
get_x())/2;
double yc = (point[0].
get_y()+point[2].
get_y())/2;
vpTRACE(
"\t we want an eye-in-hand control law") ;
vpTRACE(
"\t robot is controlled in the camera frame") ;
vpTRACE(
"\t we want to see a point on a point..") ;
std::cout << std::endl ;
vpTRACE(
"Display task information " ) ;
unsigned int iter=0 ;
double lambda_av =0.05;
double alpha = 0.05 ;
double beta =3 ;
for ( ; ; )
{
std::cout << "---------------------------------------------" << iter <<std::endl ;
try {
for (i=0 ; i < nbline ; i++)
{
double x=0, y=0;
{
exit(-1);
}
}
for (i = 0; i < nbpoint; i++) pointd[i].display(I, cam,
vpColor::red);
double gain ;
{
if (std::fabs(alpha) <= std::numeric_limits<double>::epsilon())
gain = lambda_av ;
else
{
gain = alpha * exp (-beta * ( task.
getError() ).sumSquare() ) + lambda_av ;
}
}
{
v =0 ;
}
}
catch(...)
{
v =0 ;
exit(1) ;
}
iter++;
}
vpTRACE(
"Display task information " ) ;
}
catch (...)
{
return 0;
}
}
#else
int
main()
{
vpERROR_TRACE(
"You do not have an afma6 robot or a firewire framegrabber connected to your computer...");
}
#endif