Example of eye-in-hand control law. We control here a real robot, the Afma6 robot (cartesian robot, with 6 degrees of freedom). The velocity is computed in the camera frame. Visual features are the two lines corresponding to the edges of a cylinder.
This example illustrates in one hand a classical visual servoing with a cylinder. And in the other hand it illustrates the behaviour of the robot when adding a secondary task.
#include <cmath>
#include <limits>
#include <stdlib.h>
#include <visp3/core/vpConfig.h>
#include <visp3/core/vpDebug.h>
#if (defined(VISP_HAVE_AFMA6) && defined(VISP_HAVE_DC1394))
#include <visp3/core/vpDisplay.h>
#include <visp3/core/vpImage.h>
#include <visp3/gui/vpDisplayGTK.h>
#include <visp3/gui/vpDisplayOpenCV.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/io/vpImageIo.h>
#include <visp3/sensor/vp1394TwoGrabber.h>
#include <visp3/core/vpCylinder.h>
#include <visp3/core/vpHomogeneousMatrix.h>
#include <visp3/core/vpMath.h>
#include <visp3/me/vpMeLine.h>
#include <visp3/visual_features/vpFeatureBuilder.h>
#include <visp3/visual_features/vpFeatureLine.h>
#include <visp3/vs/vpServo.h>
#include <visp3/robot/vpRobotAfma6.h>
#include <visp3/core/vpException.h>
#include <visp3/vs/vpServoDisplay.h>
int main()
{
try {
#ifdef VISP_HAVE_X11
#elif defined(VISP_HAVE_OPENCV)
#elif defined(VISP_HAVE_GTK)
#endif
std::cout << std::endl;
std::cout << "-------------------------------------------------------" << std::endl;
std::cout << " Test program for vpServo " << std::endl;
std::cout << " Eye-in-hand task control, velocity computed in the camera frame" << std::endl;
std::cout << " Simulation " << std::endl;
std::cout << " task : servo a point " << std::endl;
std::cout << "-------------------------------------------------------" << std::endl;
std::cout << std::endl;
int i;
int nbline = 2;
for (i = 0; i < nbline; i++) {
}
vpTRACE(
"sets the current position of the visual feature ");
for (i = 0; i < nbline; i++)
vpTRACE(
"sets the desired position of the visual feature ");
vpTRACE(
"\t we want an eye-in-hand control law");
vpTRACE(
"\t robot is controlled in the camera frame");
vpTRACE(
"\t we want to see a point on a point..");
std::cout << std::endl;
for (i = 0; i < nbline; i++)
vpTRACE(
"Display task information ");
unsigned int iter = 0;
double lambda_av = 0.05;
double alpha = 0.02;
double beta = 3;
double erreur = 1;
while (erreur > 0.00001) {
std::cout << "---------------------------------------------" << iter << std::endl;
try {
for (i = 0; i < nbline; i++) {
}
double gain;
{
if (std::fabs(alpha) <= std::numeric_limits<double>::epsilon())
gain = lambda_av;
else {
gain = alpha * exp(-beta * (task.
getError()).sumSquare()) + lambda_av;
}
}
if (iter == 0)
} catch (...) {
v = 0;
exit(1);
}
iter++;
}
e1 = 0;
e2 = 0;
iter = 0;
double rapport = 0;
double vitesse = 0.02;
unsigned int tempo = 1200;
for (;;) {
std::cout << "---------------------------------------------" << iter << std::endl;
try {
for (i = 0; i < nbline; i++) {
}
if (iter % tempo < 400 /*&& iter%tempo >= 0*/) {
e2 = 0;
e1[0] = fabs(vitesse);
rapport = vitesse / proj_e1[0];
proj_e1 *= rapport;
v += proj_e1;
if (iter == 199)
iter += 200;
}
if (iter % tempo < 600 && iter % tempo >= 400) {
e1 = 0;
e2[1] = fabs(vitesse);
rapport = vitesse / proj_e2[1];
proj_e2 *= rapport;
v += proj_e2;
}
if (iter % tempo < 1000 && iter % tempo >= 600) {
e2 = 0;
e1[0] = -fabs(vitesse);
rapport = -vitesse / proj_e1[0];
proj_e1 *= rapport;
v += proj_e1;
}
if (iter % tempo < 1200 && iter % tempo >= 1000) {
e1 = 0;
e2[1] = -fabs(vitesse);
rapport = -vitesse / proj_e2[1];
proj_e2 *= rapport;
v += proj_e2;
}
} catch (...) {
v = 0;
exit(1);
}
iter++;
}
vpTRACE(
"Display task information ");
} catch (...) {
return 0;
}
}
#else
int main()
{
vpERROR_TRACE(
"You do not have an afma6 robot or a firewire framegrabber " "connected to your computer...");
}
#endif