Visual Servoing Platform  version 3.6.1 under development (2024-11-14)
mbot-apriltag-2D-half-vs.cpp
#include <visp3/core/vpConfig.h>
#include <visp3/core/vpPolygon.h>
#include <visp3/core/vpSerial.h>
#include <visp3/core/vpXmlParserCamera.h>
#include <visp3/detection/vpDetectorAprilTag.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/io/vpImageIo.h>
#include <visp3/robot/vpUnicycle.h>
#include <visp3/sensor/vpV4l2Grabber.h>
#include <visp3/visual_features/vpFeatureBuilder.h>
#include <visp3/visual_features/vpFeatureDepth.h>
#include <visp3/visual_features/vpFeaturePoint.h>
#include <visp3/vs/vpServo.h>
int main(int argc, const char **argv)
{
#if defined(VISP_HAVE_APRILTAG) && defined(VISP_HAVE_V4L2)
#ifdef ENABLE_VISP_NAMESPACE
using namespace VISP_NAMESPACE_NAME;
#endif
int device = 0;
double tagSize = 0.065;
float quad_decimate = 4.0;
int nThreads = 2;
std::string intrinsic_file = "";
std::string camera_name = "";
bool display_tag = false;
bool display_on = false;
bool serial_off = false;
bool use_pose = true;
bool save_image = false; // Only possible if display_on = true
for (int i = 1; i < argc; i++) {
if (std::string(argv[i]) == "--without_pose_computation") {
use_pose = false;
}
else if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) {
tagSize = std::atof(argv[i + 1]);
}
else if (std::string(argv[i]) == "--input" && i + 1 < argc) {
device = std::atoi(argv[i + 1]);
}
else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) {
quad_decimate = (float)atof(argv[i + 1]);
}
else if (std::string(argv[i]) == "--nthreads" && i + 1 < argc) {
nThreads = std::atoi(argv[i + 1]);
}
else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
intrinsic_file = std::string(argv[i + 1]);
}
else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) {
camera_name = std::string(argv[i + 1]);
}
else if (std::string(argv[i]) == "--display_tag") {
display_tag = true;
#if defined(VISP_HAVE_X11)
}
else if (std::string(argv[i]) == "--display_on") {
display_on = true;
}
else if (std::string(argv[i]) == "--save_image") {
save_image = true;
#endif
}
else if (std::string(argv[i]) == "--serial_off") {
serial_off = true;
}
else if (std::string(argv[i]) == "--tag_family" && i + 1 < argc) {
tagFamily = (vpDetectorAprilTag::vpAprilTagFamily)atoi(argv[i + 1]);
}
else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
std::cout << "Usage: " << argv[0]
<< " [--input <camera input>] [--tag_size <tag_size in m>]"
" [--quad_decimate <quad_decimate>] [--nthreads <nb>]"
" [--intrinsic <intrinsic file>] [--camera_name <camera name>] [--without_pose_computation]"
" [--tag_family <family> (0: TAG_36h11, 1: TAG_36h10, 2: TAG_36ARTOOLKIT,"
" 3: TAG_25h9, 4: TAG_25h7, 5: TAG_16h5)]"
" [--display_tag]";
#if defined(VISP_HAVE_X11)
std::cout << " [--display_on] [--save_image]";
#endif
std::cout << " [--serial_off] [--help]" << std::endl;
return EXIT_SUCCESS;
}
}
// Me Auriga led ring
// if serial com ok: led 1 green
// if exception: led 1 red
// if tag detected: led 2 green, else led 2 red
// if motor left: led 3 blue
// if motor right: led 4 blue
vpSerial *serial = nullptr;
if (!serial_off) {
serial = new vpSerial("/dev/ttyAMA0", 115200);
serial->write("LED_RING=0,0,0,0\n"); // Switch off all led
serial->write("LED_RING=1,0,10,0\n"); // Switch on led 1 to green: serial ok
}
try {
std::ostringstream device_name;
device_name << "/dev/video" << device;
g.setDevice(device_name.str());
g.setScale(1);
g.acquire(I);
vpDisplay *d = nullptr;
#ifdef VISP_HAVE_X11
if (display_on) {
d = new vpDisplayX(I);
}
#endif
cam.initPersProjWithoutDistortion(615.1674805, 615.1675415, I.getWidth() / 2., I.getHeight() / 2.);
if (!intrinsic_file.empty() && !camera_name.empty())
parser.parse(cam, intrinsic_file, camera_name, vpCameraParameters::perspectiveProjWithoutDistortion);
std::cout << "cam:\n" << cam << std::endl;
std::cout << "use pose: " << use_pose << std::endl;
std::cout << "tagFamily: " << tagFamily << std::endl;
vpDetectorAprilTag detector(tagFamily);
detector.setAprilTagQuadDecimate(quad_decimate);
if (use_pose)
detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
detector.setAprilTagNbThreads(nThreads);
detector.setDisplayTag(display_tag);
vpServo task;
if (display_on)
lambda.initStandard(2.5, 0.4, 30); // lambda(0)=2.5, lambda(oo)=0.4 and lambda'(0)=30
else
lambda.initStandard(4, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
vpUnicycle robot;
task.setLambda(lambda);
cRe[0][0] = 0;
cRe[0][1] = -1;
cRe[0][2] = 0;
cRe[1][0] = 0;
cRe[1][1] = 0;
cRe[1][2] = -1;
cRe[2][0] = 1;
cRe[2][1] = 0;
cRe[2][2] = 0;
task.set_cVe(cVe);
vpMatrix eJe(6, 2, 0);
eJe[0][0] = eJe[5][1] = 1.0;
std::cout << "eJe: \n" << eJe << std::endl;
// Current and desired visual feature associated to the x coordinate of the point
vpFeaturePoint s_x, s_xd;
double Z, Z_d;
Z = Z_d = 0.4;
// Create the current x visual feature
vpFeatureBuilder::create(s_x, cam, cog);
// Create the desired x* visual feature
s_xd.buildFrom(0, 0, Z_d);
// Add the point feature
task.addFeature(s_x, s_xd, vpFeaturePoint::selectX());
// Create the log(Z/Z*) visual feature
vpFeatureDepth s_Z, s_Z_d;
std::cout << "Z " << Z << std::endl;
s_Z.buildFrom(s_x.get_x(), s_x.get_y(), Z, 0); // log(Z/Z*) = 0 that's why the last parameter is 0
s_Z_d.buildFrom(0, 0, Z_d, 0); // The value of s* is 0 with Z=1 meter
// Add the feature
task.addFeature(s_Z, s_Z_d);
std::vector<double> time_vec;
for (;;) {
g.acquire(I);
double t = vpTime::measureTimeMs();
std::vector<vpHomogeneousMatrix> cMo_vec;
if (use_pose)
detector.detect(I, tagSize, cam, cMo_vec);
else
detector.detect(I);
time_vec.push_back(t);
{
std::stringstream ss;
ss << "Detection time: " << t << " ms";
vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
}
if (detector.getNbObjects() == 1) {
// Display visual features
vpHomogeneousMatrix cdMo(0, 0, Z_d, 0, 0, 0);
vpDisplay::displayFrame(I, cdMo, cam, tagSize / 3, vpColor::red, 3);
3); // Current polygon used to compure an moment
3); // Vertical line as desired x position
if (use_pose) {
// Display visual features
vpDisplay::displayFrame(I, cMo_vec[0], cam, tagSize / 2, vpColor::none, 3);
}
if (!serial_off) {
serial->write("LED_RING=2,0,10,0\n"); // Switch on led 2 to green: tag detected
}
if (use_pose) {
Z = cMo_vec[0][2][3];
}
else {
vpPolygon polygon(detector.getPolygon(0));
double surface = polygon.getArea();
std::cout << "Surface: " << surface << std::endl;
// Compute the distance from target surface and 3D size
Z = tagSize * cam.get_px() / sqrt(surface);
}
vpFeatureBuilder::create(s_x, cam, detector.getCog(0));
s_x.set_Z(Z);
// Update log(Z/Z*) feature
s_Z.buildFrom(s_x.get_x(), s_x.get_y(), Z, log(Z / Z_d));
std::cout << "cog: " << detector.getCog(0) << " Z: " << Z << std::endl;
task.set_cVe(cVe);
task.set_eJe(eJe);
// Compute the control law. Velocities are computed in the mobile robot reference frame
std::cout << "Send velocity to the mbot: " << v[0] << " m/s " << vpMath::deg(v[1]) << " deg/s" << std::endl;
task.print();
double radius = 0.0325;
double L = 0.0725;
double motor_left = (-v[0] - L * v[1]) / radius;
double motor_right = (v[0] - L * v[1]) / radius;
std::cout << "motor left vel: " << motor_left << " motor right vel: " << motor_right << std::endl;
if (!serial_off) {
// serial->write("LED_RING=3,0,0,10\n"); // Switch on led 3 to blue: motor left servoed
// serial->write("LED_RING=4,0,0,10\n"); // Switch on led 4 to blue: motor right servoed
}
std::stringstream ss;
double rpm_left = motor_left * 30. / M_PI;
double rpm_right = motor_right * 30. / M_PI;
ss << "MOTOR_RPM=" << vpMath::round(rpm_left) << "," << vpMath::round(rpm_right) << "\n";
std::cout << "Send: " << ss.str() << std::endl;
if (!serial_off) {
serial->write(ss.str());
}
}
else {
// stop the robot
if (!serial_off) {
serial->write("LED_RING=2,10,0,0\n"); // Switch on led 2 to red: tag not detected
// serial->write("LED_RING=3,0,0,0\n"); // Switch on led 3 to blue: motor left not servoed
// serial->write("LED_RING=4,0,0,0\n"); // Switch on led 4 to blue: motor right not servoed
serial->write("MOTOR_RPM=0,-0\n"); // Stop the robot
}
}
vpDisplay::displayText(I, 20, 20, "Click to quit.", vpColor::red);
if (display_on && save_image) {
vpImageIo::write(O, "image.png");
}
if (vpDisplay::getClick(I, false))
break;
}
if (!serial_off) {
serial->write("LED_RING=0,0,0,0\n"); // Switch off all led
}
std::cout << "Benchmark computation time" << std::endl;
std::cout << "Mean / Median / Std: " << vpMath::getMean(time_vec) << " ms"
<< " ; " << vpMath::getMedian(time_vec) << " ms"
<< " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl;
if (display_on)
delete d;
if (!serial_off) {
delete serial;
}
}
catch (const vpException &e) {
std::cerr << "Catch an exception: " << e.getMessage() << std::endl;
if (!serial_off) {
serial->write("LED_RING=1,10,0,0\n"); // Switch on led 1 to red
}
}
return EXIT_SUCCESS;
#else
(void)argc;
(void)argv;
#ifndef VISP_HAVE_APRILTAG
std::cout << "ViSP is not build with Apriltag support" << std::endl;
#endif
#ifndef VISP_HAVE_V4L2
std::cout << "ViSP is not build with v4l2 support" << std::endl;
#endif
std::cout << "Install missing 3rd parties, configure and build ViSP to run this tutorial" << std::endl;
return EXIT_SUCCESS;
#endif
}
Adaptive gain computation.
void initStandard(double gain_at_zero, double gain_at_infinity, double slope_at_zero)
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:191
static const vpColor red
Definition: vpColor.h:217
static const vpColor none
Definition: vpColor.h:229
static const vpColor green
Definition: vpColor.h:220
void setDisplayTag(bool display, const vpColor &color=vpColor::none, unsigned int thickness=2)
void setAprilTagQuadDecimate(float quadDecimate)
bool detect(const vpImage< unsigned char > &I) VP_OVERRIDE
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
void setAprilTagNbThreads(int nThreads)
void setAprilTagPoseEstimationMethod(const vpPoseEstimationMethod &poseEstimationMethod)
std::vector< std::vector< vpImagePoint > > & getPolygon()
size_t getNbObjects() const
vpImagePoint getCog(size_t i) const
Class that defines generic functionalities for display.
Definition: vpDisplay.h:178
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void getImage(const vpImage< unsigned char > &Is, vpImage< vpRGBa > &Id)
Definition: vpDisplay.cpp:140
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition: vpException.h:60
const char * getMessage() const
Definition: vpException.cpp:65
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpImagePoint &t)
Class that defines a 3D point visual feature which is composed by one parameters that is that defin...
vpFeatureDepth & buildFrom(const double &x, const double &y, const double &Z, const double &LogZoverZstar)
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
vpFeaturePoint & buildFrom(const double &x, const double &y, const double &Z)
static unsigned int selectX()
double get_y() const
double get_x() const
void set_Z(double Z)
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void write(const vpImage< unsigned char > &I, const std::string &filename, int backend=IO_DEFAULT_BACKEND)
Definition: vpImageIo.cpp:291
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition: vpImagePoint.h:82
unsigned int getWidth() const
Definition: vpImage.h:242
unsigned int getHeight() const
Definition: vpImage.h:181
static double getMedian(const std::vector< double > &v)
Definition: vpMath.cpp:322
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
Definition: vpMath.cpp:353
static int round(double x)
Definition: vpMath.h:410
static double getMean(const std::vector< double > &v)
Definition: vpMath.cpp:302
static double deg(double rad)
Definition: vpMath.h:119
Implementation of a matrix and operations on matrices.
Definition: vpMatrix.h:169
Defines a generic 2D polygon.
Definition: vpPolygon.h:103
double getArea() const
Definition: vpPolygon.h:148
Implementation of a rotation matrix and operations on such kind of matrices.
void write(const std::string &s)
Definition: vpSerial.cpp:332
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition: vpServo.cpp:380
@ EYEINHAND_L_cVe_eJe
Definition: vpServo.h:168
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition: vpServo.cpp:331
void set_cVe(const vpVelocityTwistMatrix &cVe_)
Definition: vpServo.h:1038
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition: vpServo.cpp:171
void setLambda(double c)
Definition: vpServo.h:986
void set_eJe(const vpMatrix &eJe_)
Definition: vpServo.h:1101
void setServo(const vpServoType &servo_type)
Definition: vpServo.cpp:134
@ PSEUDO_INVERSE
Definition: vpServo.h:235
vpColVector computeControlLaw()
Definition: vpServo.cpp:705
@ CURRENT
Definition: vpServo.h:202
Class that consider the case of a translation vector.
Generic functions for unicycle mobile robots.
Definition: vpUnicycle.h:52
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
void acquire(vpImage< unsigned char > &I)
XML parser to load and save intrinsic camera parameters.
int parse(vpCameraParameters &cam, const std::string &filename, const std::string &camera_name, const vpCameraParameters::vpCameraParametersProjType &projModel, unsigned int image_width=0, unsigned int image_height=0, bool verbose=true)
VISP_EXPORT double measureTimeMs()