Example of eye-in-hand control law. We control here a real robot, the ADEPT Viper 850 robot (arm, with 6 degrees of freedom). The velocity is computed in the camera frame. The visual feature is the center of gravity of a point. We use here a linear Kalman filter with a constant velocity state model to estimate the moving target motion.
#include <visp3/core/vpConfig.h>
#include <visp3/core/vpDebug.h>
#include <fstream>
#include <iostream>
#include <sstream>
#include <stdio.h>
#include <stdlib.h>
#if (defined(VISP_HAVE_VIPER850) && defined(VISP_HAVE_DC1394))
#include <visp3/blob/vpDot2.h>
#include <visp3/core/vpDisplay.h>
#include <visp3/core/vpException.h>
#include <visp3/core/vpHomogeneousMatrix.h>
#include <visp3/core/vpImage.h>
#include <visp3/core/vpIoTools.h>
#include <visp3/core/vpLinearKalmanFilterInstantiation.h>
#include <visp3/core/vpMath.h>
#include <visp3/core/vpPoint.h>
#include <visp3/gui/vpDisplayGTK.h>
#include <visp3/gui/vpDisplayOpenCV.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/io/vpImageIo.h>
#include <visp3/robot/vpRobotViper850.h>
#include <visp3/sensor/vp1394TwoGrabber.h>
#include <visp3/visual_features/vpFeatureBuilder.h>
#include <visp3/visual_features/vpFeaturePoint.h>
#include <visp3/vs/vpAdaptiveGain.h>
#include <visp3/vs/vpServo.h>
#include <visp3/vs/vpServoDisplay.h>
int main()
{
std::string username;
std::string logdirname;
logdirname = "/tmp/" + username;
try {
} catch (...) {
std::cerr << std::endl << "ERROR:" << std::endl;
std::cerr << " Cannot create " << logdirname << std::endl;
exit(-1);
}
}
std::string logfilename;
logfilename = logdirname + "/log.dat";
std::ofstream flog(logfilename.c_str());
try {
unsigned int nsignal = 2;
double rho = 0.3;
unsigned int state_size = 0;
sigma_state.
resize(state_size * nsignal);
sigma_state = 0.00001;
sigma_measure = 0.05;
double dummy = 0;
kalman.
initFilter(nsignal, sigma_state, sigma_measure, rho, dummy);
bool reset = false;
#if 1
#else
#endif
double Tloop = 1. / 80.f;
switch (fps) {
Tloop = 1.f / 15.f;
break;
Tloop = 1.f / 30.f;
break;
Tloop = 1.f / 60.f;
break;
Tloop = 1.f / 120.f;
break;
default:
break;
}
#ifdef VISP_HAVE_X11
#elif defined(VISP_HAVE_OPENCV)
#elif defined(VISP_HAVE_GTK)
#endif
for (int i = 0; i < 10; i++)
std::cout << "Click on a dot..." << std::endl;
task.addFeature(p, pd);
task.setLambda(lambda);
task.print();
std::cout << "\nHit CTRL-C to stop the loop...\n" << std::flush;
int iter = 0;
double t_0, t_1, Tv;
dc1394video_frame_t *frame = NULL;
for (;;) {
try {
Tv = (double)(t_0 - t_1) / 1000.0;
t_1 = t_0;
v1 = task.computeControlLaw();
err = task.getError();
if (iter == 0) {
err_1 = 0;
dedt_mes = 0;
} else {
dedt_mes = (err - err_1) / (Tv)-J1 * vm;
err_1 = err;
}
if (iter < 2)
dedt_mes = 0;
for (unsigned int i = 0; i < nsignal; i++) {
dedt_filt[i] = kalman.
Xest[i * state_size];
}
if (iter < 2)
dedt_filt = 0;
vpMatrix J1p = task.getTaskJacobianPseudoInverse();
v2 = -J1p * dedt_filt;
v = v1 + v2;
iter++;
} catch (...) {
std::cout << "Tracking failed... Stop the robot." << std::endl;
v = 0;
return 0;
}
flog << v[0] << " " << v[1] << " " << v[2] << " " << v[3] << " " << v[4] << " " << v[5] << " ";
flog << qvel[0] << " " << qvel[1] << " " << qvel[2] << " " << qvel[3] << " " << qvel[4] << " " << qvel[5] << " ";
flog << q[0] << " " << q[1] << " " << q[2] << " " << q[3] << " " << q[4] << " " << q[5] << " ";
flog << (task.getError()).t() << std::endl;
}
flog.close();
task.print();
return EXIT_SUCCESS;
}
flog.close();
std::cout <<
"Catch an exception: " << e.
getMessage() << std::endl;
return EXIT_FAILURE;
}
}
#else
int main()
{
std::cout << "You do not have an Viper 850 robot connected to your computer..." << std::endl;
return EXIT_SUCCESS;
}
#endif