Example of eye-in-hand control law. We control here a real robot, the Viper S650 robot (arm with 6 degrees of freedom). The velocity is computed in the camera frame. The inverse jacobian that converts cartesian velocities in joint velocities is implemented in the robot low level controller. Visual features are the image coordinates of 4 points. The target is made of 4 dots arranged as a 10cm by 10cm square.The device used to acquire images is a firewire camera (PointGrey Flea2)
Camera extrinsic (eMc) and intrinsic parameters are retrieved from the robot low level driver that is not public.
#include <fstream>
#include <iostream>
#include <sstream>
#include <stdio.h>
#include <stdlib.h>
#include <visp3/core/vpConfig.h>
#if defined(VISP_HAVE_VIPER650) && defined(VISP_HAVE_DC1394) && defined(VISP_HAVE_X11)
#include <visp3/blob/vpDot2.h>
#include <visp3/core/vpHomogeneousMatrix.h>
#include <visp3/core/vpIoTools.h>
#include <visp3/core/vpPoint.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/robot/vpRobotViper650.h>
#include <visp3/sensor/vp1394TwoGrabber.h>
#include <visp3/vision/vpPose.h>
#include <visp3/visual_features/vpFeatureBuilder.h>
#include <visp3/visual_features/vpFeaturePoint.h>
#include <visp3/vs/vpServo.h>
#include <visp3/vs/vpServoDisplay.h>
#define L 0.05 // to deal with a 10cm by 10cm square
void compute_pose(std::vector<vpPoint> &point, std::vector<vpDot2> &dot,
vpCameraParameters cam,
{
for (size_t i = 0; i < point.size(); i++) {
double x = 0, y = 0;
y);
point[i].set_x(x);
point[i].set_y(y);
}
if (init == true) {
if (residual_lagrange < residual_dementhon)
cMo = cMo_lagrange;
else
cMo = cMo_dementhon;
}
}
int main()
{
std::string username;
std::string logdirname;
logdirname = "/tmp/" + username;
try {
} catch (...) {
std::cerr << std::endl << "ERROR:" << std::endl;
std::cerr << " Cannot create " << logdirname << std::endl;
return (-1);
}
}
std::string logfilename;
logfilename = logdirname + "/log.dat";
std::ofstream flog(logfilename.c_str());
try {
std::cout << "Camera extrinsic parameters (eMc): \n" << eMc << std::endl;
bool reset = false;
std::vector<vpDot2> dot(4);
std::cout << "Click on the 4 dots clockwise starting from upper/left dot..." << std::endl;
for (size_t i = 0; i < dot.size(); i++) {
dot[i].setGraphics(true);
dot[i].initTracking(I);
}
std::cout << "Camera intrinsic parameters: \n" << cam << std::endl;
for (size_t i = 0; i < dot.size(); i++)
std::vector<vpPoint> point(4);
point[0].setWorldCoordinates(-L, -L, 0);
point[1].setWorldCoordinates(L, -L, 0);
point[2].setWorldCoordinates(L, L, 0);
point[3].setWorldCoordinates(-L, L, 0);
compute_pose(point, dot, cam, cMo, true);
std::cout << "Initial camera pose (cMo): \n" << cMo << std::endl;
for (int i = 0; i < 4; i++) {
point[i].changeFrame(cMo_d, cP);
point[i].projection(cP, p);
}
for (size_t i = 0; i < dot.size(); i++)
std::cout << "\nHit CTRL-C or click in the image to stop the loop...\n" << std::flush;
for (;;) {
try {
for (size_t i = 0; i < dot.size(); i++) {
dot[i].track(I);
}
} catch (...) {
std::cout << "Error detected while tracking visual features.." << std::endl;
break;
}
compute_pose(point, dot, cam, cMo, false);
for (size_t i = 0; i < dot.size(); i++) {
point[i].changeFrame(cMo, cP);
}
flog << v[0] << " " << v[1] << " " << v[2] << " " << v[3] << " " << v[4] << " " << v[5] << " ";
flog << qvel[0] << " " << qvel[1] << " " << qvel[2] << " " << qvel[3] << " " << qvel[4] << " " << qvel[5] << " ";
flog << q[0] << " " << q[1] << " " << q[2] << " " << q[3] << " " << q[4] << " " << q[5] << " ";
break;
}
std::cout << "Display task information: " << std::endl;
flog.close();
return EXIT_SUCCESS;
flog.close();
std::cout <<
"Catched an exception: " << e.
getMessage() << std::endl;
return EXIT_FAILURE;
}
}
#else
int main()
{
std::cout << "You do not have an Viper 650 robot connected to your computer..." << std::endl;
return EXIT_SUCCESS;
}
#endif