ViSP  2.9.0
servoMomentPoints.cpp

Example of moment-based visual servoing with Images

/****************************************************************************
*
* $Id: servoMomentPoints.cpp 4673 2014-02-17 09:06:49Z fspindle $
*
* This file is part of the ViSP software.
* Copyright (C) 2005 - 2014 by INRIA. All rights reserved.
*
* This software is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* ("GPL") version 2 as published by the Free Software Foundation.
* See the file LICENSE.txt at the root directory of this source
* distribution for additional information about the GNU GPL.
*
* For using ViSP with software that can not be combined with the GNU
* GPL, please contact INRIA about acquiring a ViSP Professional
* Edition License.
*
* See http://www.irisa.fr/lagadic/visp/visp.html for more information.
*
* This software was developed at:
* INRIA Rennes - Bretagne Atlantique
* Campus Universitaire de Beaulieu
* 35042 Rennes Cedex
* France
* http://www.irisa.fr/lagadic
*
* If you have questions regarding the use of this file, please contact
* INRIA at visp@inria.fr
*
* This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
* WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
*
*
* Description:
* Example of visual servoing with moments using discrete points as object
* container
*
* Authors:
* Filip Novotny
*
*****************************************************************************/
#include <visp/vpDebug.h>
#include <visp/vpConfig.h>
#include <iostream>
#include <visp/vpHomogeneousMatrix.h>
#include <visp/vpMomentObject.h>
#include <visp/vpMomentDatabase.h>
#include <visp/vpMomentCommon.h>
#include <visp/vpFeatureMomentCommon.h>
#include <visp/vpDisplayX.h>
#include <visp/vpDisplayGTK.h>
#include <visp/vpDisplayGDI.h>
#include <visp/vpCameraParameters.h>
#include <visp/vpIoTools.h>
#include <visp/vpMath.h>
#include <visp/vpHomogeneousMatrix.h>
#include <visp/vpServo.h>
#include <visp/vpDebug.h>
#include <visp/vpFeatureBuilder.h>
#include <visp/vpFeaturePoint.h>
#include <visp/vpSimulatorAfma6.h>
#include <visp/vpPlane.h>
#if !defined(_WIN32) && !defined(VISP_HAVE_PTHREAD)
// Robot simulator used in this example is not available
int main()
{
std::cout << "Can't run this example since vpSimulatorAfma6 capability is not available." << std::endl;
std::cout << "You should install pthread third-party library." << std::endl;
}
// No display available
#elif !defined(VISP_HAVE_X11) && !defined(VISP_HAVE_OPENCV) && !defined(VISP_HAVE_GDI) && !defined(VISP_HAVE_D3D9) && !defined(VISP_HAVE_GTK)
int main()
{
std::cout << "Can't run this example since no display capability is available." << std::endl;
std::cout << "You should install one of the following third-party library: X11, OpenCV, GDI, GTK." << std::endl;
}
#else
//setup robot parameters
void paramRobot();
//update moment objects and interface
void refreshScene(vpMomentObject &obj);
//initialize scene in the interface
void initScene();
//initialize the moment features
void initFeatures();
void execute(unsigned int nbIter); //launch the simulation
void setInteractionMatrixType(vpServo::vpServoIteractionMatrixType type);
double error();
void planeToABC(vpPlane& pl, double& A,double& B, double& C);
void paramRobot();
void removeJointLimits(vpSimulatorAfma6& robot);
int main()
{
try { //intial pose
vpHomogeneousMatrix cMo(0.05,0.1,1.5,vpMath::rad(30),vpMath::rad(20),-vpMath::rad(15));
//Desired pose
//init and run the simulation
init(cMo,cdMo);
execute(1500);
return 0;
}
catch(vpException e) {
std::cout << "Catch an exception: " << e << std::endl;
return 1;
}
}
//init the right display
#if defined VISP_HAVE_X11
vpDisplayX displayInt;
#elif defined VISP_HAVE_OPENCV
vpDisplayOpenCV displayInt;
#elif defined VISP_HAVE_GDI
vpDisplayGDI displayInt;
#elif defined VISP_HAVE_D3D9
vpDisplayD3D displayInt;
#elif defined VISP_HAVE_GTK
vpDisplayGTK displayInt;
#endif
//start and destination positioning matrices
vpSimulatorAfma6 robot(false);//robot used in this simulation
vpImage<vpRGBa> Iint(480,640, 255);//internal image used for interface display
vpServo::vpServoIteractionMatrixType interaction_type; //current or desired
vpServo task; //servoing task
vpCameraParameters cam;//robot camera parameters
double _error; //current error
vpImageSimulator imsim;//image simulator used to simulate the perspective-projection camera
//moment sets and their corresponding features
vpMomentCommon *moments;
vpMomentCommon *momentsDes;
vpFeatureMomentCommon *featureMoments;
vpFeatureMomentCommon *featureMomentsDes;
//source and destination objects for moment manipulation
void initScene(){
std::vector<vpPoint> src_pts;
std::vector<vpPoint> dst_pts;
double x[8] = { 1,3, 4,-1 ,-3,-2,-1,1};
double y[8] = { 0,1, 4, 4, -2,-2, 1,0};
int nbpoints = 8;
for (int i = 0 ; i < nbpoints ; i++){
p.setWorldCoordinates(x[i]/20,y[i]/20,0.0);
p.track(cMo) ;
src_pts.push_back(p);
}
src.fromVector(src_pts);
for (int i = 0 ; i < nbpoints ; i++){
p.setWorldCoordinates(x[i]/20,y[i]/20,0.0);
p.track(cdMo) ;
dst_pts.push_back(p);
}
dst.fromVector(dst_pts);
}
void initFeatures(){
//A,B,C parameters of source and destination plane
double A; double B; double C;
double Ad; double Bd; double Cd;
//init main object: using moments up to order 6
//Initializing values from regular plane (with ax+by+cz=d convention)
vpPlane pl;
pl.setABCD(0,0,1.0,0);
pl.changeFrame(cMo);
planeToABC(pl,A,B,C);
pl.setABCD(0,0,1.0,0);
pl.changeFrame(cdMo);
planeToABC(pl,Ad,Bd,Cd);
//extracting initial position (actually we only care about Zdst)
cdMo.extract(vec);
//don't need to be specific, vpMomentCommon automatically loads Xg,Yg,An,Ci,Cj,Alpha moments
//same thing with common features
featureMoments = new vpFeatureMomentCommon(*moments);
featureMomentsDes = new vpFeatureMomentCommon(*momentsDes);
moments->updateAll(src);
momentsDes->updateAll(dst);
featureMoments->updateAll(A,B,C);
featureMomentsDes->updateAll(Ad,Bd,Cd);
//setup the interaction type
task.setInteractionMatrixType(interaction_type) ;
task.addFeature(featureMoments->getFeatureGravityNormalized(),featureMomentsDes->getFeatureGravityNormalized());
task.addFeature(featureMoments->getFeatureAn(),featureMomentsDes->getFeatureAn());
task.addFeature(featureMoments->getFeatureCInvariant(),featureMomentsDes->getFeatureCInvariant(),(1 << 3) | (1 << 5));
task.addFeature(featureMoments->getFeatureAlpha(),featureMomentsDes->getFeatureAlpha());
task.setLambda(1.) ;
}
void refreshScene(vpMomentObject &obj){
//double x[8] = { 0.05,0.15, 0.2,-0.05 ,-0.15,-0.1,-0.05,0.05};
//double y[8] = { 0,0.05, 0.2, 0.2, -0.1,-0.1, 0.05,0};
double x[8] = { 1,3, 4,-1 ,-3,-2,-1,1};
double y[8] = { 0,1, 4, 4, -2,-2, 1,0};
int nbpoints = 8;
std::vector<vpPoint> cur_pts;
for (int i = 0 ; i < nbpoints ; i++){
p.setWorldCoordinates(x[i]/20,y[i]/20,0.0);
p.track(cMo) ;
cur_pts.push_back(p);
}
obj.fromVector(cur_pts);
}
void init(vpHomogeneousMatrix& _cMo, vpHomogeneousMatrix& _cdMo)
{
cMo = _cMo; //init source matrix
cdMo = _cdMo; //init destination matrix
interaction_type = vpServo::CURRENT; //use interaction matrix for current position
displayInt.init(Iint,700,0, "Visual servoing with moments") ;
paramRobot(); //set up robot parameters
initScene(); //initialize graphical scene (for interface)
initFeatures();//initialize moment features
}
void execute(unsigned int nbIter){
//init main object: using moments up to order 6
//setting object type (disrete, continuous[form polygon])
vpTRACE("Display task information " ) ;
task.print() ;
robot.getInternalView(Iint);
unsigned int iter=0;
while(iter++<nbIter ){
//get the cMo
cMo = robot.get_cMo();
//setup the plane in A,B,C style
vpPlane pl;
double A,B,C;
pl.setABCD(0,0,1.0,0);
pl.changeFrame(cMo);
planeToABC(pl,A,B,C);
//track points, draw points and add refresh our object
refreshScene(obj);
//this is the most important thing to do: update our moments
moments->updateAll(obj);
//and update our features. Do it in that order. Features need to use the information computed by moments
featureMoments->updateAll(A,B,C);
robot.getInternalView(Iint);
if (iter == 1)
v = task.computeControlLaw() ;
//pilot robot using position control. The displacement is t*v with t=10ms step
//robot.setPosition(vpRobot::CAMERA_FRAME,0.01*v);
_error = ( task.getError() ).sumSquare();
}
task.kill();
vpTRACE("\n\nClick in the internal view window to end...");
delete moments;
delete momentsDes;
delete featureMoments;
delete featureMomentsDes;
}
void removeJointLimits(vpSimulatorAfma6& robot_)
{
vpColVector limMin(6);
vpColVector limMax(6);
limMin[0] = vpMath::rad(-3600);
limMin[1] = vpMath::rad(-3600);
limMin[2] = vpMath::rad(-3600);
limMin[3] = vpMath::rad(-3600);
limMin[4] = vpMath::rad(-3600);
limMin[5] = vpMath::rad(-3600);
limMax[0] = vpMath::rad(3600);
limMax[1] = vpMath::rad(3600);
limMax[2] = vpMath::rad(3600);
limMax[3] = vpMath::rad(3600);
limMax[4] = vpMath::rad(3600);
limMax[5] = vpMath::rad(3600);
robot_.setJointLimit(limMin,limMax);
robot_.setMaxRotationVelocity(99999);
robot_.setMaxTranslationVelocity(999999);
}
void planeToABC(vpPlane& pl, double& A,double& B, double& C){
if(fabs(pl.getD())<std::numeric_limits<double>::epsilon()){
std::cout << "Invalid position:" << std::endl;
std::cout << cMo << std::endl;
std::cout << "Cannot put plane in the form 1/Z=Ax+By+C." << std::endl;
throw vpException(vpException::divideByZeroError,"invalid position!");
}
A=-pl.getA()/pl.getD();
B=-pl.getB()/pl.getD();
C=-pl.getC()/pl.getD();
}
void paramRobot(){
/*Initialise the robot and especially the camera*/
robot.setCurrentViewColor(vpColor(150,150,150));
robot.setDesiredViewColor(vpColor(200,200,200));
removeJointLimits(robot);
robot.setConstantSamplingTimeMode(true);
/*Initialise the position of the object relative to the pose of the robot's camera*/
robot.initialiseObjectRelativeToCamera(cMo);
/*Set the desired position (for the displaypart)*/
robot.setDesiredCameraPosition(cdMo);
robot.getCameraParameters(cam,Iint);
}
void setInteractionMatrixType(vpServo::vpServoIteractionMatrixType type){interaction_type=type;}
double error(){return _error;}
#endif