Example of eye-in-hand control law. We control here a real robot, the ptu-46 robot (pan-tilt head provided by Directed Perception). The velocity is computed in articular. The visual feature is the center of gravity of a point.
#include <visp3/core/vpConfig.h>
#include <visp3/core/vpDebug.h>
#if !defined(_WIN32) && (defined(__unix__) || defined(__unix) || (defined(__APPLE__) && defined(__MACH__)))
#include <unistd.h>
#endif
#include <signal.h>
#if defined(VISP_HAVE_PTU46) && defined(VISP_HAVE_DC1394) && defined(VISP_HAVE_THREADS)
#include <mutex>
#include <visp3/core/vpDisplay.h>
#include <visp3/core/vpImage.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/sensor/vp1394TwoGrabber.h>
#include <visp3/core/vpHomogeneousMatrix.h>
#include <visp3/core/vpMath.h>
#include <visp3/core/vpPoint.h>
#include <visp3/visual_features/vpFeatureBuilder.h>
#include <visp3/visual_features/vpFeaturePoint.h>
#include <visp3/vs/vpServo.h>
#include <visp3/robot/vpRobotPtu46.h>
#include <visp3/core/vpException.h>
#include <visp3/vs/vpServoDisplay.h>
#include <visp3/blob/vpDot2.h>
std::mutex mutexEndLoop;
void signalCtrC(int signumber)
{
(void)(signumber);
mutexEndLoop.unlock();
usleep(1000 * 10);
vpTRACE("Ctrl-C pressed...");
}
int main()
{
#ifdef ENABLE_VISP_NAMESPACE
#endif
std::cout << std::endl;
std::cout << "-------------------------------------------------------" << std::endl;
std::cout << " Test program for vpServo " << std::endl;
std::cout << " Eye-in-hand task control, velocity computed in the camera frame" << std::endl;
std::cout << " Simulation " << std::endl;
std::cout << " task : servo a point " << std::endl;
std::cout << "-------------------------------------------------------" << std::endl;
std::cout << std::endl;
try {
mutexEndLoop.lock();
signal(SIGINT, &signalCtrC);
{
q = 0;
}
try {
}
catch (...) {
vpERROR_TRACE(" Error caught");
return EXIT_FAILURE;
}
vpDisplayX display(I, 100, 100, "testDisplayX.cpp ");
vpTRACE(" ");
try {
}
catch (...) {
vpERROR_TRACE(" Error caught");
return EXIT_FAILURE;
}
try {
vpERROR_TRACE("start dot.initTracking(I) ");
vpDEBUG_TRACE(25, "Click!");
vpERROR_TRACE("after dot.initTracking(I) ");
}
catch (...) {
vpERROR_TRACE(" Error caught ");
return EXIT_FAILURE;
}
vpTRACE("sets the current position of the visual feature ");
vpTRACE("sets the desired position of the visual feature ");
vpTRACE("define the task");
vpTRACE("\t we want an eye-in-hand control law");
vpTRACE("\t articular velocity are computed");
vpTRACE("Set the position of the end-effector frame in the camera frame");
std::cout << cVe << std::endl;
vpTRACE("Set the Jacobian (expressed in the end-effector frame)");
vpTRACE("\t we want to see a point on a point..");
std::cout << std::endl;
vpTRACE("\t set the gain");
vpTRACE("Display task information ");
unsigned int iter = 0;
vpTRACE("\t loop");
while (0 != mutexEndLoop.trylock()) {
std::cout << "---------------------------------------------" << iter << std::endl;
vpTRACE(
"\t\t || s - s* || = %f ", (task.
getError()).sumSquare());
}
vpTRACE("Display task information ");
}
std::cout << "Sorry PtU46 not available. Got exception: " << e << std::endl;
return EXIT_FAILURE
}
return EXIT_SUCCESS;
}
#else
int main() { std::cout << "You do not have an PTU46 PT robot connected to your computer..." << std::endl; }
#endif
Class for firewire ieee1394 video devices using libdc1394-2.x api.
void acquire(vpImage< unsigned char > &I)
void open(vpImage< unsigned char > &I)
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void flush(const vpImage< unsigned char > &I)
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
void track(const vpImage< unsigned char > &I, bool canMakeTheWindowGrow=true)
void setCog(const vpImagePoint &ip)
error that can be emitted by ViSP classes.
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpImagePoint &t)
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
vpFeaturePoint & buildFrom(const double &x, const double &y, const double &Z)
Implementation of an homogeneous matrix and operations on such kind of matrices.
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Implementation of a matrix and operations on matrices.
void get_eJe(vpMatrix &eJe) VP_OVERRIDE
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) VP_OVERRIDE
Interface for the Directed Perception ptu-46 pan, tilt head .
@ STATE_POSITION_CONTROL
Initialize the position controller.
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
void set_cVe(const vpVelocityTwistMatrix &cVe_)
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
void set_eJe(const vpMatrix &eJe_)
void setServo(const vpServoType &servo_type)
vpColVector getError() const
vpColVector computeControlLaw()
vpVelocityTwistMatrix get_cVe() const