Visual Servoing Platform  version 3.6.1 under development (2025-03-13)
tutorial-rbt-realsense.cpp
1 #include <iostream>
3 #include <visp3/core/vpConfig.h>
4 
5 #ifdef ENABLE_VISP_NAMESPACE
6 using namespace VISP_NAMESPACE_NAME;
7 #endif
8 
9 #ifndef VISP_HAVE_REALSENSE2
10 
11 int main()
12 {
13  std::cerr << "To run this tutorial, recompile ViSP with the Realsense third party library" << std::endl;
14  return EXIT_SUCCESS;
15 }
16 
17 #else
18 #include <visp3/sensor/vpRealSense2.h>
19 #include <visp3/io/vpParseArgv.h>
20 
21 #include <visp3/ar/vpPanda3DFrameworkManager.h>
22 
23 #include <visp3/rbt/vpRBTracker.h>
24 
25 #include "render-based-tutorial-utils.h"
26 
27 #ifndef DOXYGEN_SHOULD_SKIP_THIS
28 struct CmdArguments
29 {
30  CmdArguments() : height(480), width(848), fps(60)
31  { }
32 
33  void registerArguments(vpJsonArgumentParser &parser)
34  {
35  parser.addArgument("--height", height, false, "Realsense requested image height")
36  .addArgument("--width", width, false, "Realsense requested image width")
37  .addArgument("--fps", fps, false, "Realsense requested framerate");
38  }
39 
40  unsigned int height, width, fps;
41 };
42 #endif
43 
44 void updateDepth(const vpImage<uint16_t> &depthRaw, float depthScale, float maxZDisplay, vpImage<float> &depth, vpImage<unsigned char> &IdepthDisplay)
45 {
46  depth.resize(depthRaw.getHeight(), depthRaw.getWidth());
47 #ifdef VISP_HAVE_OPENMP
48 #pragma omp parallel for
49 #endif
50  for (unsigned int i = 0; i < depthRaw.getSize(); ++i) {
51  depth.bitmap[i] = depthScale * static_cast<float>(depthRaw.bitmap[i]);
52  IdepthDisplay.bitmap[i] = depth.bitmap[i] > maxZDisplay ? 0 : static_cast<unsigned int>((depth.bitmap[i] / maxZDisplay) * 255.f);
53  }
54 }
55 
56 int main(int argc, const char **argv)
57 {
58  // Read the command line options
59  vpRBTrackerTutorial::BaseArguments baseArgs;
60  CmdArguments realsenseArgs;
61  vpRBTrackerTutorial::vpRBExperimentLogger logger;
62  vpRBTrackerTutorial::vpRBExperimentPlotter plotter;
63 
64  vpJsonArgumentParser parser(
65  "Tutorial showing the usage of the Render-Based tracker with a RealSense camera",
66  "--config", "/"
67  );
68 
69  baseArgs.registerArguments(parser);
70  realsenseArgs.registerArguments(parser);
71  logger.registerArguments(parser);
72  plotter.registerArguments(parser);
73 
74  parser.parse(argc, argv);
75 
76  baseArgs.postProcessArguments();
77  plotter.postProcessArguments(baseArgs.display);
78 
79  if (baseArgs.enableRenderProfiling) {
80  vpRBTrackerTutorial::enableRendererProfiling();
81  }
82 
83  std::cout << "Loading tracker: " << baseArgs.trackerConfiguration << std::endl;
84  vpRBTracker tracker;
85  tracker.loadConfigurationFile(baseArgs.trackerConfiguration);
86  tracker.startTracking();
87  const unsigned int width = realsenseArgs.width, height = realsenseArgs.height;
88  const unsigned fps = realsenseArgs.fps;
89 
90  vpImage<unsigned char> Id(height, width);
91  vpImage<vpRGBa> Icol(height, width);
92  vpImage<uint16_t> depthRaw(height, width);
93  vpImage<float> depth(height, width);
94  vpImage<unsigned char> IdepthDisplay(height, width);
95  vpImage<unsigned char> IProbaDisplay(height, width);
96  vpImage<unsigned char> cannyDisplay(height, width);
97  vpImage<vpRGBa> InormDisplay(height, width);
98 
99  vpRealSense2 realsense;
100 
101  std::cout << "Opening realsense with " << width << "x" << height << " @ " << fps << "fps" << std::endl;
102  rs2::config config;
103  config.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_RGBA8, fps);
104  config.enable_stream(RS2_STREAM_DEPTH, width, height, RS2_FORMAT_Z16, fps);
105  rs2::align align_to(RS2_STREAM_COLOR);
106  try {
107  realsense.open(config);
108  }
109  catch (const vpException &e) {
110  std::cout << "Caught an exception: " << e.what() << std::endl;
111  std::cout << "Check if the Realsense camera is connected..." << std::endl;
112  return EXIT_SUCCESS;
113  }
114 
115  float depthScale = realsense.getDepthScale();
116  //camera warmup
117  for (int i = 0; i < 10; ++i) {
118  realsense.acquire(Icol);
119  }
120  vpImageConvert::convert(Icol, Id);
121 
123  tracker.setCameraParameters(cam, height, width);
124 
125  std::cout << "Creating displays" << std::endl;;
126  std::vector<std::shared_ptr<vpDisplay>> displays, displaysDebug;
127 
128  if (baseArgs.display) {
129  displays = vpRBTrackerTutorial::createDisplays(Id, Icol, IdepthDisplay, IProbaDisplay);
130  if (baseArgs.debugDisplay) {
131  displaysDebug = vpDisplayFactory::makeDisplayGrid(1, 2,
132  0, 0,
133  20, 20,
134  "Normals in object frame", InormDisplay,
135  "Depth canny", cannyDisplay
136  );
137  }
138  plotter.init(displays);
139  }
140 
141  if (baseArgs.display && !baseArgs.hasInlineInit()) {
142  bool ready = false;
143  while (!ready) {
144  realsense.acquire((unsigned char *)Icol.bitmap, (unsigned char *)depthRaw.bitmap, nullptr, nullptr, &align_to);
145  updateDepth(depthRaw, depthScale, baseArgs.maxDepthDisplay, depth, IdepthDisplay);
146  vpImageConvert::convert(Icol, Id);
147  vpDisplay::display(Icol); vpDisplay::display(Id); vpDisplay::display(IdepthDisplay);
148  vpDisplay::flush(Icol); vpDisplay::flush(Id); vpDisplay::flush(IdepthDisplay);
149  if (vpDisplay::getClick(Id, false)) {
150  ready = true;
151  }
152  else {
153  vpTime::wait(1000.0 / fps);
154  }
155  }
156  }
157 
158  updateDepth(depthRaw, depthScale, baseArgs.maxDepthDisplay, depth, IdepthDisplay);
159 
161 
162  // Manual initialization of the tracker
163  std::cout << "Starting init" << std::endl;
164  if (baseArgs.hasInlineInit()) {
165  tracker.setPose(baseArgs.cMoInit);
166  }
167  else if (baseArgs.display) {
168 
169  tracker.initClick(Id, baseArgs.initFile, true);
170  tracker.getPose(cMo);
171  }
172  else {
173  throw vpException(vpException::notImplementedError, "Cannot initalize tracking: no auto init function provided");
174  }
175 
176  std::cout << "Starting pose: " << vpPoseVector(cMo).t() << std::endl;
177 
178  if (baseArgs.display) {
179  vpDisplay::flush(Id);
180  }
181 
182 //vpRBTrackerFilter &ukfm = tracker.getFilter();
183  logger.startLog();
184  unsigned int iter = 1;
185  // Main tracking loop
186  double expStart = vpTime::measureTimeMs();
187  while (true) {
188  double frameStart = vpTime::measureTimeMs();
189  // Acquire images
190  realsense.acquire((unsigned char *)Icol.bitmap, (unsigned char *)depthRaw.bitmap, nullptr, nullptr, &align_to);
191  updateDepth(depthRaw, depthScale, baseArgs.maxDepthDisplay, depth, IdepthDisplay);
192  vpImageConvert::convert(Icol, Id);
193 
194  // Pose tracking
195  double trackingStart = vpTime::measureTimeMs();
196  tracker.track(Id, Icol, depth);
197  double trackingEnd = vpTime::measureTimeMs();
198  tracker.getPose(cMo);
199  double displayStart = vpTime::measureTimeMs();
200  if (baseArgs.display) {
201  if (baseArgs.debugDisplay) {
202  const vpRBFeatureTrackerInput &lastFrame = tracker.getMostRecentFrame();
203 
204  vpRBTrackerTutorial::displayCanny(lastFrame.renders.silhouetteCanny, cannyDisplay, lastFrame.renders.isSilhouette);
205  }
206 
207  vpDisplay::display(IdepthDisplay);
208  vpDisplay::display(Id);
209  vpDisplay::display(Icol);
210  tracker.display(Id, Icol, IdepthDisplay);
211  vpDisplay::displayFrame(Icol, cMo, cam, 0.05, vpColor::none, 2);
212  vpDisplay::displayText(Id, 20, 5, "Right click to exit", vpColor::red);
214  if (vpDisplay::getClick(Id, button, false)) {
215  if (button == vpMouseButton::button3) {
216  break;
217  }
218  }
219  tracker.displayMask(IProbaDisplay);
220  vpDisplay::display(IProbaDisplay);
221 
223  vpDisplay::flush(IdepthDisplay); vpDisplay::flush(IProbaDisplay);
224  }
225 
226  logger.logFrame(tracker, iter, Id, Icol, IdepthDisplay, IProbaDisplay);
227  const double displayEnd = vpTime::measureTimeMs();
228 
229  // ukfm.filter(cMo, 0.05);
230  // const vpHomogeneousMatrix cMoFiltered = ukfm.getFilteredPose();
231  // vpDisplay::displayFrame(Icol, cMoFiltered, cam, 0.05, vpColor::yellow, 2);
232 
233  const double frameEnd = vpTime::measureTimeMs();
234  std::cout << "Iter " << iter << ": " << round(frameEnd - frameStart) << "ms" << std::endl;
235  std::cout << "- Tracking: " << round(trackingEnd - trackingStart) << "ms" << std::endl;
236  std::cout << "- Display: " << round(displayEnd - displayStart) << "ms" << std::endl;
237  plotter.plot(tracker, (frameEnd - expStart) / 1000.0);
238  iter++;
239  }
240 
241  logger.close();
243  return EXIT_SUCCESS;
244 }
245 #endif
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
static const vpColor red
Definition: vpColor.h:198
static const vpColor none
Definition: vpColor.h:210
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition: vpException.h:60
@ notImplementedError
Not implemented.
Definition: vpException.h:69
const char * what() const
Definition: vpException.cpp:71
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Definition of the vpImage class member functions.
Definition: vpImage.h:131
unsigned int getWidth() const
Definition: vpImage.h:242
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
Definition: vpImage.h:544
unsigned int getSize() const
Definition: vpImage.h:221
Type * bitmap
points toward the bitmap
Definition: vpImage.h:135
unsigned int getHeight() const
Definition: vpImage.h:181
Command line argument parsing with support for JSON files. If a JSON file is supplied,...
vpJsonArgumentParser & addArgument(const std::string &name, T &parameter, const bool required=true, const std::string &help="No description")
Add an argument that can be provided by the user, either via command line or through the json file.
void parse(int argc, const char *argv[])
Parse the arguments.
static vpPanda3DFrameworkManager & getInstance()
Implementation of a pose vector and operations on poses.
Definition: vpPoseVector.h:203
vpRowVector t() const
All the data related to a single tracking frame. This contains both the input data (from a real camer...
vpRBRenderData renders
camera parameters
void track(const vpImage< unsigned char > &I)
void initClick(const vpImage< unsigned char > &I, const std::string &initFile, bool displayHelp)
void displayMask(vpImage< unsigned char > &Imask) const
void getPose(vpHomogeneousMatrix &cMo) const
Definition: vpRBTracker.cpp:64
const vpRBFeatureTrackerInput & getMostRecentFrame() const
Definition: vpRBTracker.h:82
void loadConfigurationFile(const std::string &filename)
void display(const vpImage< unsigned char > &I, const vpImage< vpRGBa > &IRGB, const vpImage< unsigned char > &depth)
void setPose(const vpHomogeneousMatrix &cMo)
Definition: vpRBTracker.cpp:69
void startTracking()
void setCameraParameters(const vpCameraParameters &cam, unsigned h, unsigned w)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
float getDepthScale()
std::vector< std::shared_ptr< vpDisplay > > makeDisplayGrid(unsigned int rows, unsigned int cols, unsigned int startX, unsigned int startY, unsigned int paddingX, unsigned int paddingY, Args &... args)
Create a grid of displays, given a set of images. All the displays will be initialized in the correct...
VISP_EXPORT int wait(double t0, double t)
VISP_EXPORT double measureTimeMs()
void registerArguments(vpJsonArgumentParser &parser)
vpImage< vpRGBf > silhouetteCanny
vpImage< unsigned char > isSilhouette
Image containing the orientation of the gradients.