65 #include <visp3/core/vpCameraParameters.h>
66 #include <visp3/detection/vpDetectorAprilTag.h>
67 #include <visp3/gui/vpDisplayGDI.h>
68 #include <visp3/gui/vpDisplayX.h>
69 #include <visp3/gui/vpPlot.h>
70 #include <visp3/io/vpImageIo.h>
71 #include <visp3/robot/vpRobotAfma6.h>
72 #include <visp3/sensor/vpRealSense2.h>
73 #include <visp3/visual_features/vpFeatureThetaU.h>
74 #include <visp3/visual_features/vpFeatureTranslation.h>
75 #include <visp3/vs/vpServo.h>
76 #include <visp3/vs/vpServoDisplay.h>
77 #include <visp3/core/vpImageFilter.h>
78 #include <visp3/io/vpVideoWriter.h>
81 #if defined(VISP_HAVE_REALSENSE2) && ((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L))) && \
82 (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && defined(VISP_HAVE_AFMA6) && defined(VISP_HAVE_MODULE_DNN_TRACKER)
85 #include <visp3/io/vpJsonArgumentParser.h>
86 #include <visp3/dnn_tracker/vpMegaPoseTracker.h>
88 #ifdef VISP_HAVE_NLOHMANN_JSON
89 using json = nlohmann::json;
92 std::optional<vpRect> detectObjectForInitMegaposeClick(
const vpImage<vpRGBa> &I)
108 vpRect bb(topLeft, bottomRight);
119 int main(
int argc,
const char *argv[])
121 bool opt_verbose =
true;
122 bool opt_plot =
true;
123 double convergence_threshold_t = 0.0005;
124 double convergence_threshold_tu = 0.5;
126 unsigned width = 640, height = 480;
127 std::string megaposeAddress =
"127.0.0.1";
128 unsigned megaposePort = 5555;
129 int refinerIterations = 1, coarseNumSamples = 1024;
130 std::string objectName =
"";
132 std::string desiredPosFile =
"desired.pos";
133 std::string initialPosFile =
"init.pos";
135 #ifdef VISP_HAVE_NLOHMANN_JSON
136 vpJsonArgumentParser parser(
"Pose-based visual servoing with Megapose on an Afma6, with a Realsense D435.",
"--config",
"/");
138 .addArgument(
"initialPose", initialPosFile,
true,
"Path to the file that contains that the desired pose. Can be acquired with Afma6_office.")
139 .addArgument(
"desiredPose", desiredPosFile,
true,
"Path to the file that contains that the desired pose. Can be acquired with Afma6_office.")
140 .addArgument(
"object", objectName,
true,
"Name of the object to track with megapose.")
141 .addArgument(
"megapose/address", megaposeAddress,
true,
"IP address of the Megapose server.")
142 .addArgument(
"megapose/port", megaposePort,
true,
"Port on which the Megapose server listens for connections.")
143 .addArgument(
"megapose/refinerIterations", refinerIterations,
false,
"Number of Megapose refiner model iterations."
144 "A higher count may lead to better accuracy, at the cost of more processing time")
145 .addArgument(
"megapose/initialisationNumSamples", coarseNumSamples,
false,
"Number of Megapose renderings used for the initial pose estimation.");
146 parser.parse(argc, argv);
152 std::cout <<
"WARNING: This example will move the robot! "
153 <<
"Please make sure to have the user stop button at hand!" << std::endl
154 <<
"Press Enter to continue..." << std::endl;
156 std::vector<vpColVector> velocities;
157 std::vector<vpPoseVector> error;
166 robot.setPositioningVelocity(10.0);
167 robot.readPosFile(desiredPosFile, q);
170 std::cout <<
"Move to joint position: " << q.t() << std::endl;
176 config.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_RGBA8, 30);
182 std::cout <<
"cam:\n" << cam <<
"\n";
184 std::shared_ptr<vpMegaPose> megapose;
186 megapose = std::make_shared<vpMegaPose>(megaposeAddress, megaposePort, cam, height, width);
193 megapose->setCoarseNumSamples(coarseNumSamples);
194 const std::vector<std::string> allObjects = megapose->getObjectNames();
195 if (std::find(allObjects.begin(), allObjects.end(), objectName) == allObjects.end()) {
198 std::future<vpMegaPoseEstimate> trackerFuture;
202 #if defined(VISP_HAVE_X11)
204 #elif defined(VISP_HAVE_GDI)
208 std::optional<vpRect> detection;
212 detection = detectObjectForInitMegaposeClick(I);
219 robot.readPosFile(initialPosFile, q);
222 std::cout <<
"Move to joint position: " << q.t() << std::endl;
226 detection = std::nullopt;
230 detection = detectObjectForInitMegaposeClick(I);
233 auto est = megaposeTracker.init(I, *detection).get();
235 std::cout <<
"Estimate score = " << est.score << std::endl;
257 vpPlot *plotter =
nullptr;
261 plotter =
new vpPlot(2,
static_cast<int>(250 * 2), 500,
static_cast<int>(I.
getWidth()) + 80, 10,
262 "Real time curves plotter");
263 plotter->
setTitle(0,
"Visual features error");
264 plotter->
setTitle(1,
"Camera velocities");
267 plotter->
setLegend(0, 0,
"error_feat_tx");
268 plotter->
setLegend(0, 1,
"error_feat_ty");
269 plotter->
setLegend(0, 2,
"error_feat_tz");
270 plotter->
setLegend(0, 3,
"error_feat_theta_ux");
271 plotter->
setLegend(0, 4,
"error_feat_theta_uy");
272 plotter->
setLegend(0, 5,
"error_feat_theta_uz");
281 bool final_quit =
false;
282 bool has_converged =
false;
283 bool send_velocities =
false;
292 bool callMegapose =
true;
295 while (!has_converged && !final_quit) {
300 if (!callMegapose && trackerFuture.wait_for(std::chrono::milliseconds(0)) == std::future_status::ready) {
301 megaposeEstimate = trackerFuture.get();
303 cTo = megaposeEstimate.
cTo;
305 if (megaposeEstimate.
score < 0.2) {
307 std::cout <<
"Low confidence, exiting" << std::endl;
312 std::cout <<
"Calling megapose" << std::endl;
313 trackerFuture = megaposeTracker.track(I);
314 callMegapose =
false;
317 std::stringstream ss;
318 ss <<
"Left click to " << (send_velocities ?
"stop the robot" :
"servo the robot") <<
", right click to quit.";
326 velocities.push_back(v);
331 cTw = robot.get_fMc(q).inverse();
332 cdTc_true = cdTw * cTw.
inverse();
334 error.push_back(cdrc);
342 plotter->
plot(1, iter_plot, v);
347 std::cout <<
"v: " << v.t() << std::endl;
352 double error_tr = sqrt(cd_t_c.
sumSquare());
356 double error_tr_true = sqrt(cd_t_c_true.
sumSquare());
360 ss <<
"Predicted error_t: " << error_tr <<
", True error_t:" << error_tr_true;
363 ss <<
"Predicted error_tu: " << error_tu <<
", True error_tu:" << error_tu_true;
367 std::cout <<
"error translation: " << error_tr <<
" ; error rotation: " << error_tu << std::endl;
369 if (error_tr < convergence_threshold_t && error_tu < convergence_threshold_tu) {
370 has_converged =
true;
371 std::cout <<
"Servo task has converged" << std::endl;
390 send_velocities = !send_velocities;
403 std::cout <<
"Stop the robot " << std::endl;
406 #ifdef VISP_HAVE_NLOHMANN_JSON
409 {
"velocities", velocities},
412 std::ofstream jsonFile;
413 jsonFile.open(
"results.json");
414 jsonFile << j.dump(4);
418 if (opt_plot && plotter !=
nullptr) {
424 while (!final_quit) {
440 std::cout <<
"ViSP exception: " << e.
what() << std::endl;
441 std::cout <<
"Stop the robot " << std::endl;
445 catch (
const std::exception &e) {
446 std::cout <<
"ur_rtde exception: " << e.what() << std::endl;
455 #if !defined(VISP_HAVE_REALSENSE2)
456 std::cout <<
"Install librealsense-2.x" << std::endl;
458 #if !((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L)))
459 std::cout <<
"Build ViSP with c++17 or higher compiler flag (cmake -DUSE_CXX_STANDARD=17)." << std::endl;
461 #if !defined(VISP_HAVE_AFMA6)
462 std::cout <<
"ViSP is not built with Afma-6 robot support..." << std::endl;
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
static const vpColor none
static const vpColor yellow
Display for windows using GDI (available on any windows 32 platform).
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void getImage(const vpImage< unsigned char > &Is, vpImage< vpRGBa > &Id)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
@ badValue
Used to indicate that a value is not in the allowed range.
const char * what() const
Class that defines a 3D visual feature from a axis/angle parametrization that represent the rotatio...
Class that defines the translation visual feature .
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpThetaUVector getThetaUVector() const
vpHomogeneousMatrix inverse() const
vpTranslationVector getTranslationVector() const
void buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
vp_deprecated void init()
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
unsigned int getWidth() const
Command line argument parsing with support for JSON files. If a JSON file is supplied,...
static double deg(double rad)
A simplified interface to track a single object with MegaPose. This tracker works asynchronously: A c...
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
void initGraph(unsigned int graphNum, unsigned int curveNbr)
void setLegend(unsigned int graphNum, unsigned int curveNum, const std::string &legend)
void plot(unsigned int graphNum, unsigned int curveNum, double x, double y)
void setTitle(unsigned int graphNum, const std::string &title)
Implementation of a pose vector and operations on poses.
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
Defines a rectangle in the plane.
Control of Irisa's gantry robot named Afma6.
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel) vp_override
@ STATE_POSITION_CONTROL
Initialize the position controller.
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
void addFeature(vpBasicFeature &s_cur, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
void setServo(const vpServoType &servo_type)
vpColVector getError() const
vpColVector computeControlLaw()
Implementation of a rotation vector as axis-angle minimal representation.
Class that consider the case of a translation vector.
Class that enables to write easily a video file or a sequence of images.
void saveFrame(vpImage< vpRGBa > &I)
void setFileName(const std::string &filename)
void open(vpImage< vpRGBa > &I)
VISP_EXPORT double measureTimeMs()