Visual Servoing Platform  version 3.3.0 under development (2020-02-17)
vpRealSense2.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * librealSense2 interface.
33  *
34  *****************************************************************************/
35 
36 #include <visp3/core/vpConfig.h>
37 
38 #if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
39 #include <iomanip>
40 #include <map>
41 #include <set>
42 #include <cstring>
43 #include <visp3/core/vpImageConvert.h>
44 #include <visp3/sensor/vpRealSense2.h>
45 
46 #define MANUAL_POINTCLOUD 1
47 
48 namespace {
49 bool operator==(const rs2_extrinsics &lhs, const rs2_extrinsics &rhs)
50 {
51  for (int i = 0; i < 3; i++) {
52  for (int j = 0; j < 3; j++) {
53  if (std::fabs(lhs.rotation[i*3 + j] - rhs.rotation[i*3 + j]) >
54  std::numeric_limits<float>::epsilon()) {
55  return false;
56  }
57  }
58 
59  if (std::fabs(lhs.translation[i] - rhs.translation[i]) >
60  std::numeric_limits<float>::epsilon()) {
61  return false;
62  }
63  }
64 
65  return true;
66 }
67 }
68 
73  : m_depthScale(0.0f), m_invalidDepthValue(0.0f),
74  m_max_Z(8.0f), m_pipe(), m_pipelineProfile(), m_pointcloud(), m_points()
75 {
76 }
77 
83 
89 {
90  auto data = m_pipe.wait_for_frames();
91  auto color_frame = data.get_color_frame();
92  getGreyFrame(color_frame, grey);
93 }
94 
100 {
101  auto data = m_pipe.wait_for_frames();
102  auto color_frame = data.get_color_frame();
103  getColorFrame(color_frame, color);
104 }
105 
115 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
116  std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared,
117  rs2::align *const align_to)
118 {
119  acquire(data_image, data_depth, data_pointCloud, data_infrared, NULL, align_to);
120 }
121 
178 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
179  std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared1,
180  unsigned char *const data_infrared2, rs2::align *const align_to)
181 {
182  auto data = m_pipe.wait_for_frames();
183  if (align_to != NULL) {
184  // Infrared stream is not aligned
185  // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
186 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
187  data = align_to->process(data);
188 #else
189  data = align_to->proccess(data);
190 #endif
191  }
192 
193  if (data_image != NULL) {
194  auto color_frame = data.get_color_frame();
195  getNativeFrameData(color_frame, data_image);
196  }
197 
198  if (data_depth != NULL || data_pointCloud != NULL) {
199  auto depth_frame = data.get_depth_frame();
200  if (data_depth != NULL) {
201  getNativeFrameData(depth_frame, data_depth);
202  }
203 
204  if (data_pointCloud != NULL) {
205  getPointcloud(depth_frame, *data_pointCloud);
206  }
207  }
208 
209  if (data_infrared1 != NULL) {
210  auto infrared_frame = data.first(RS2_STREAM_INFRARED);
211  getNativeFrameData(infrared_frame, data_infrared1);
212  }
213 
214  if (data_infrared2 != NULL) {
215  auto infrared_frame = data.get_infrared_frame(2);
216  getNativeFrameData(infrared_frame, data_infrared2);
217  }
218 }
219 
220 #ifdef VISP_HAVE_PCL
221 
232 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
233  std::vector<vpColVector> *const data_pointCloud,
234  pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared,
235  rs2::align *const align_to)
236 {
237  acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, NULL, align_to);
238 }
239 
253 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
254  std::vector<vpColVector> *const data_pointCloud,
255  pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared1,
256  unsigned char *const data_infrared2, rs2::align *const align_to)
257 {
258  auto data = m_pipe.wait_for_frames();
259  if (align_to != NULL) {
260  // Infrared stream is not aligned
261  // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
262 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
263  data = align_to->process(data);
264 #else
265  data = align_to->proccess(data);
266 #endif
267  }
268 
269  if (data_image != NULL) {
270  auto color_frame = data.get_color_frame();
271  getNativeFrameData(color_frame, data_image);
272  }
273 
274  if (data_depth != NULL || data_pointCloud != NULL || pointcloud != NULL) {
275  auto depth_frame = data.get_depth_frame();
276  if (data_depth != NULL) {
277  getNativeFrameData(depth_frame, data_depth);
278  }
279 
280  if (data_pointCloud != NULL) {
281  getPointcloud(depth_frame, *data_pointCloud);
282  }
283 
284  if (pointcloud != NULL) {
285  getPointcloud(depth_frame, pointcloud);
286  }
287  }
288 
289  if (data_infrared1 != NULL) {
290  auto infrared_frame = data.first(RS2_STREAM_INFRARED);
291  getNativeFrameData(infrared_frame, data_infrared1);
292  }
293 
294  if (data_infrared2 != NULL) {
295  auto infrared_frame = data.get_infrared_frame(2);
296  getNativeFrameData(infrared_frame, data_infrared2);
297  }
298 }
299 
311 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
312  std::vector<vpColVector> *const data_pointCloud,
313  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud, unsigned char *const data_infrared,
314  rs2::align *const align_to)
315 {
316  acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, NULL, align_to);
317 }
318 
332 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
333  std::vector<vpColVector> *const data_pointCloud,
334  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud, unsigned char *const data_infrared1,
335  unsigned char *const data_infrared2, rs2::align *const align_to)
336 {
337  auto data = m_pipe.wait_for_frames();
338  if (align_to != NULL) {
339  // Infrared stream is not aligned
340  // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
341 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
342  data = align_to->process(data);
343 #else
344  data = align_to->proccess(data);
345 #endif
346  }
347 
348  auto color_frame = data.get_color_frame();
349  if (data_image != NULL) {
350  getNativeFrameData(color_frame, data_image);
351  }
352 
353  if (data_depth != NULL || data_pointCloud != NULL || pointcloud != NULL) {
354  auto depth_frame = data.get_depth_frame();
355  if (data_depth != NULL) {
356  getNativeFrameData(depth_frame, data_depth);
357  }
358 
359  if (data_pointCloud != NULL) {
360  getPointcloud(depth_frame, *data_pointCloud);
361  }
362 
363  if (pointcloud != NULL) {
364  getPointcloud(depth_frame, color_frame, pointcloud);
365  }
366  }
367 
368  if (data_infrared1 != NULL) {
369  auto infrared_frame = data.first(RS2_STREAM_INFRARED);
370  getNativeFrameData(infrared_frame, data_infrared1);
371  }
372 
373  if (data_infrared2 != NULL) {
374  auto infrared_frame = data.get_infrared_frame(2);
375  getNativeFrameData(infrared_frame, data_infrared2);
376  }
377 }
378 #endif
379 
391 void vpRealSense2::close() { m_pipe.stop(); }
392 
403 {
404  auto rs_stream = m_pipelineProfile.get_stream(stream).as<rs2::video_stream_profile>();
405  auto intrinsics = rs_stream.get_intrinsics();
406 
407  vpCameraParameters cam;
408  double u0 = intrinsics.ppx;
409  double v0 = intrinsics.ppy;
410  double px = intrinsics.fx;
411  double py = intrinsics.fy;
412 
414  double kdu = intrinsics.coeffs[0];
415  cam.initPersProjWithDistortion(px, py, u0, v0, -kdu, kdu);
416  } else {
417  cam.initPersProjWithoutDistortion(px, py, u0, v0);
418  }
419 
420  return cam;
421 }
422 
430 rs2_intrinsics vpRealSense2::getIntrinsics(const rs2_stream &stream) const
431 {
432  auto vsp = m_pipelineProfile.get_stream(stream).as<rs2::video_stream_profile>();
433  return vsp.get_intrinsics();
434 }
435 
436 void vpRealSense2::getColorFrame(const rs2::frame &frame, vpImage<vpRGBa> &color)
437 {
438  auto vf = frame.as<rs2::video_frame>();
439  unsigned int width = (unsigned int)vf.get_width();
440  unsigned int height = (unsigned int)vf.get_height();
441  color.resize(height, width);
442 
443  if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
444  vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
445  reinterpret_cast<unsigned char *>(color.bitmap), width, height);
446  } else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
447  memcpy(reinterpret_cast<unsigned char *>(color.bitmap),
448  const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
449  width * height * sizeof(vpRGBa));
450  } else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
451  vpImageConvert::BGRToRGBa(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
452  reinterpret_cast<unsigned char *>(color.bitmap), width, height);
453  } else {
454  throw vpException(vpException::fatalError, "RealSense Camera - color stream not supported!");
455  }
456 }
457 
463 {
464  return m_depthScale;
465 }
466 
467 void vpRealSense2::getGreyFrame(const rs2::frame &frame, vpImage<unsigned char> &grey)
468 {
469  auto vf = frame.as<rs2::video_frame>();
470  unsigned int width = (unsigned int)vf.get_width();
471  unsigned int height = (unsigned int)vf.get_height();
472  grey.resize(height, width);
473 
474  if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
475  vpImageConvert::RGBToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
476  grey.bitmap, width, height);
477  } else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
478  vpImageConvert::RGBaToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
479  grey.bitmap, width * height);
480  } else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
481  vpImageConvert::BGRToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
482  grey.bitmap, width, height);
483  } else {
484  throw vpException(vpException::fatalError, "RealSense Camera - color stream not supported!");
485  }
486 }
487 
488 void vpRealSense2::getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
489 {
490  auto vf = frame.as<rs2::video_frame>();
491  int size = vf.get_width() * vf.get_height();
492 
493  switch (frame.get_profile().format()) {
494  case RS2_FORMAT_RGB8:
495  case RS2_FORMAT_BGR8:
496  memcpy(data, frame.get_data(), size * 3);
497  break;
498 
499  case RS2_FORMAT_RGBA8:
500  case RS2_FORMAT_BGRA8:
501  memcpy(data, frame.get_data(), size * 4);
502  break;
503 
504  case RS2_FORMAT_Y16:
505  case RS2_FORMAT_Z16:
506  memcpy(data, frame.get_data(), size * 2);
507  break;
508 
509  case RS2_FORMAT_Y8:
510  memcpy(data, frame.get_data(), size);
511  break;
512 
513  default:
514  break;
515  }
516 }
517 
518 void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, std::vector<vpColVector> &pointcloud)
519 {
520  if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
521  std::stringstream ss;
522  ss << "Error, depth scale <= 0: " << m_depthScale;
523  throw vpException(vpException::fatalError, ss.str());
524  }
525 
526  auto vf = depth_frame.as<rs2::video_frame>();
527  const int width = vf.get_width();
528  const int height = vf.get_height();
529  pointcloud.resize((size_t)(width * height));
530 
531  const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
532  const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
533 
534  // Multi-threading if OpenMP
535  // Concurrent writes at different locations are safe
536  #pragma omp parallel for schedule(dynamic)
537  for (int i = 0; i < height; i++) {
538  auto depth_pixel_index = i * width;
539 
540  for (int j = 0; j < width; j++, depth_pixel_index++) {
541  if (p_depth_frame[depth_pixel_index] == 0) {
542  pointcloud[(size_t)depth_pixel_index].resize(4, false);
543  pointcloud[(size_t)depth_pixel_index][0] = m_invalidDepthValue;
544  pointcloud[(size_t)depth_pixel_index][1] = m_invalidDepthValue;
545  pointcloud[(size_t)depth_pixel_index][2] = m_invalidDepthValue;
546  pointcloud[(size_t)depth_pixel_index][3] = 1.0;
547  continue;
548  }
549 
550  // Get the depth value of the current pixel
551  auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
552 
553  float points[3];
554  const float pixel[] = {(float)j, (float)i};
555  rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
556 
557  if (pixels_distance > m_max_Z)
558  points[0] = points[1] = points[2] = m_invalidDepthValue;
559 
560  pointcloud[(size_t)depth_pixel_index].resize(4, false);
561  pointcloud[(size_t)depth_pixel_index][0] = points[0];
562  pointcloud[(size_t)depth_pixel_index][1] = points[1];
563  pointcloud[(size_t)depth_pixel_index][2] = points[2];
564  pointcloud[(size_t)depth_pixel_index][3] = 1.0;
565  }
566  }
567 }
568 
569 
570 #ifdef VISP_HAVE_PCL
571 void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud)
572 {
573  if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
574  std::stringstream ss;
575  ss << "Error, depth scale <= 0: " << m_depthScale;
576  throw vpException(vpException::fatalError, ss.str());
577  }
578 
579  auto vf = depth_frame.as<rs2::video_frame>();
580  const int width = vf.get_width();
581  const int height = vf.get_height();
582  pointcloud->width = (uint32_t)width;
583  pointcloud->height = (uint32_t)height;
584  pointcloud->resize((size_t)(width * height));
585 
586 #if MANUAL_POINTCLOUD // faster to compute manually when tested
587  const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
588  const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
589 
590  // Multi-threading if OpenMP
591  // Concurrent writes at different locations are safe
592 #pragma omp parallel for schedule(dynamic)
593  for (int i = 0; i < height; i++) {
594  auto depth_pixel_index = i * width;
595 
596  for (int j = 0; j < width; j++, depth_pixel_index++) {
597  if (p_depth_frame[depth_pixel_index] == 0) {
598  pointcloud->points[(size_t)(depth_pixel_index)].x = m_invalidDepthValue;
599  pointcloud->points[(size_t)(depth_pixel_index)].y = m_invalidDepthValue;
600  pointcloud->points[(size_t)(depth_pixel_index)].z = m_invalidDepthValue;
601  continue;
602  }
603 
604  // Get the depth value of the current pixel
605  auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
606 
607  float points[3];
608  const float pixel[] = {(float)j, (float)i};
609  rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
610 
611  if (pixels_distance > m_max_Z)
612  points[0] = points[1] = points[2] = m_invalidDepthValue;
613 
614  pointcloud->points[(size_t)(depth_pixel_index)].x = points[0];
615  pointcloud->points[(size_t)(depth_pixel_index)].y = points[1];
616  pointcloud->points[(size_t)(depth_pixel_index)].z = points[2];
617  }
618  }
619 #else
620  m_points = m_pointcloud.calculate(depth_frame);
621  auto vertices = m_points.get_vertices();
622 
623  for (size_t i = 0; i < m_points.size(); i++) {
624  if (vertices[i].z <= 0.0f || vertices[i].z > m_max_Z) {
625  pointcloud->points[i].x = m_invalidDepthValue;
626  pointcloud->points[i].y = m_invalidDepthValue;
627  pointcloud->points[i].z = m_invalidDepthValue;
628  } else {
629  pointcloud->points[i].x = vertices[i].x;
630  pointcloud->points[i].y = vertices[i].y;
631  pointcloud->points[i].z = vertices[i].z;
632  }
633  }
634 #endif
635 }
636 
637 void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, const rs2::frame &color_frame,
638  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
639 {
640  if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
641  throw vpException(vpException::fatalError, "Error, depth scale <= 0: %f", m_depthScale);
642  }
643 
644  auto vf = depth_frame.as<rs2::video_frame>();
645  const int depth_width = vf.get_width();
646  const int depth_height = vf.get_height();
647  pointcloud->width = static_cast<uint32_t>(depth_width);
648  pointcloud->height = static_cast<uint32_t>(depth_height);
649  pointcloud->resize(static_cast<uint32_t>(depth_width * depth_height));
650 
651  vf = color_frame.as<rs2::video_frame>();
652  const int color_width = vf.get_width();
653  const int color_height = vf.get_height();
654 
655  const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
656  const rs2_extrinsics depth2ColorExtrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().
657  get_extrinsics_to(color_frame.get_profile().as<rs2::video_stream_profile>());
658  const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
659  const rs2_intrinsics color_intrinsics = color_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
660 
661  auto color_format = color_frame.as<rs2::video_frame>().get_profile().format();
662  const bool swap_rb = color_format == RS2_FORMAT_BGR8 || color_format == RS2_FORMAT_BGRA8;
663  unsigned int nb_color_pixel = (color_format == RS2_FORMAT_RGB8 || color_format == RS2_FORMAT_BGR8) ? 3 : 4;
664  const unsigned char *p_color_frame = static_cast<const unsigned char *>(color_frame.get_data());
665  rs2_extrinsics identity;
666  memset(identity.rotation, 0, sizeof(identity.rotation));
667  memset(identity.translation, 0, sizeof(identity.translation));
668  for (int i = 0; i < 3; i++) {
669  identity.rotation[i*3 + i] = 1;
670  }
671  const bool registered_streams = (depth2ColorExtrinsics == identity) &&
672  (color_width == depth_width) && (color_height == depth_height);
673 
674  // Multi-threading if OpenMP
675  // Concurrent writes at different locations are safe
676 #pragma omp parallel for schedule(dynamic)
677  for (int i = 0; i < depth_height; i++) {
678  auto depth_pixel_index = i * depth_width;
679 
680  for (int j = 0; j < depth_width; j++, depth_pixel_index++) {
681  if (p_depth_frame[depth_pixel_index] == 0) {
682  pointcloud->points[(size_t)depth_pixel_index].x = m_invalidDepthValue;
683  pointcloud->points[(size_t)depth_pixel_index].y = m_invalidDepthValue;
684  pointcloud->points[(size_t)depth_pixel_index].z = m_invalidDepthValue;
685 
686  // For out of bounds color data, default to a shade of blue in order to
687  // visually distinguish holes. This color value is same as the librealsense
688  // out of bounds color value.
689 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
690  unsigned int r = 96, g = 157, b = 198;
691  uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
692 
693  pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
694 #else
695  pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
696  pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
697  pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
698 #endif
699  continue;
700  }
701 
702  // Get the depth value of the current pixel
703  auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
704 
705  float depth_point[3];
706  const float pixel[] = {(float)j, (float)i};
707  rs2_deproject_pixel_to_point(depth_point, &depth_intrinsics, pixel, pixels_distance);
708 
709  if (pixels_distance > m_max_Z) {
710  depth_point[0] = depth_point[1] = depth_point[2] = m_invalidDepthValue;
711  }
712 
713  pointcloud->points[(size_t)depth_pixel_index].x = depth_point[0];
714  pointcloud->points[(size_t)depth_pixel_index].y = depth_point[1];
715  pointcloud->points[(size_t)depth_pixel_index].z = depth_point[2];
716 
717  if (!registered_streams) {
718  float color_point[3];
719  rs2_transform_point_to_point(color_point, &depth2ColorExtrinsics, depth_point);
720  float color_pixel[2];
721  rs2_project_point_to_pixel(color_pixel, &color_intrinsics, color_point);
722 
723  if (color_pixel[1] < 0 || color_pixel[1] >= color_height || color_pixel[0] < 0 || color_pixel[0] >= color_width) {
724  // For out of bounds color data, default to a shade of blue in order to
725  // visually distinguish holes. This color value is same as the librealsense
726  // out of bounds color value.
727 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
728  unsigned int r = 96, g = 157, b = 198;
729  uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
730 
731  pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
732 #else
733  pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
734  pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
735  pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
736 #endif
737  } else {
738  unsigned int i_ = (unsigned int)color_pixel[1];
739  unsigned int j_ = (unsigned int)color_pixel[0];
740 
741 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
742  uint32_t rgb = 0;
743  if (swap_rb) {
744  rgb =
745  (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) |
746  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
747  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2]) << 16);
748  } else {
749  rgb = (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) << 16 |
750  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
751  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2]));
752  }
753 
754  pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
755 #else
756  if (swap_rb) {
757  pointcloud->points[(size_t)depth_pixel_index].b =
758  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
759  pointcloud->points[(size_t)depth_pixel_index].g =
760  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
761  pointcloud->points[(size_t)depth_pixel_index].r =
762  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
763  } else {
764  pointcloud->points[(size_t)depth_pixel_index].r =
765  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
766  pointcloud->points[(size_t)depth_pixel_index].g =
767  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
768  pointcloud->points[(size_t)depth_pixel_index].b =
769  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
770  }
771 #endif
772  }
773  } else {
774 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
775  uint32_t rgb = 0;
776  if (swap_rb) {
777  rgb =
778  (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) |
779  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
780  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]) << 16);
781  } else {
782  rgb = (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) << 16 |
783  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
784  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]));
785  }
786 
787  pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
788 #else
789  if (swap_rb) {
790  pointcloud->points[(size_t)depth_pixel_index].b =
791  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
792  pointcloud->points[(size_t)depth_pixel_index].g =
793  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
794  pointcloud->points[(size_t)depth_pixel_index].r =
795  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
796  } else {
797  pointcloud->points[(size_t)depth_pixel_index].r =
798  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
799  pointcloud->points[(size_t)depth_pixel_index].g =
800  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
801  pointcloud->points[(size_t)depth_pixel_index].b =
802  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
803  }
804 #endif
805  }
806  }
807  }
808 }
809 
810 #endif
811 
817 vpHomogeneousMatrix vpRealSense2::getTransformation(const rs2_stream &from, const rs2_stream &to) const
818 {
819  auto from_stream = m_pipelineProfile.get_stream(from);
820  auto to_stream = m_pipelineProfile.get_stream(to);
821  rs2_extrinsics extrinsics = from_stream.get_extrinsics_to(to_stream);
822 
825  for (unsigned int i = 0; i < 3; i++) {
826  t[i] = extrinsics.translation[i];
827  for (unsigned int j = 0; j < 3; j++)
828  R[i][j] = extrinsics.rotation[j * 3 + i]; //rotation is column-major order
829  }
830 
831  vpHomogeneousMatrix to_M_from(t, R);
832  return to_M_from;
833 }
834 
838 void vpRealSense2::open(const rs2::config &cfg)
839 {
840  m_pipelineProfile = m_pipe.start(cfg);
841 
842  rs2::device dev = m_pipelineProfile.get_device();
843 
844  // Go over the device's sensors
845  for (rs2::sensor &sensor : dev.query_sensors()) {
846  // Check if the sensor is a depth sensor
847  if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
848  m_depthScale = dpt.get_depth_scale();
849  }
850  }
851 }
852 
853 namespace
854 {
855 // Helper functions to print information about the RealSense device
856 void print(const rs2_extrinsics &extrinsics, std::ostream &os)
857 {
858  std::stringstream ss;
859  ss << "Rotation Matrix:\n";
860 
861  for (auto i = 0; i < 3; ++i) {
862  for (auto j = 0; j < 3; ++j) {
863  ss << std::left << std::setw(15) << std::setprecision(5) << extrinsics.rotation[j * 3 + i];
864  }
865  ss << std::endl;
866  }
867 
868  ss << "\nTranslation Vector: ";
869  for (size_t i = 0; i < sizeof(extrinsics.translation) / sizeof(extrinsics.translation[0]); ++i)
870  ss << std::setprecision(15) << extrinsics.translation[i] << " ";
871 
872  os << ss.str() << "\n\n";
873 }
874 
875 void print(const rs2_intrinsics &intrinsics, std::ostream &os)
876 {
877  std::stringstream ss;
878  ss << std::left << std::setw(14) << "Width: "
879  << "\t" << intrinsics.width << "\n"
880  << std::left << std::setw(14) << "Height: "
881  << "\t" << intrinsics.height << "\n"
882  << std::left << std::setw(14) << "PPX: "
883  << "\t" << std::setprecision(15) << intrinsics.ppx << "\n"
884  << std::left << std::setw(14) << "PPY: "
885  << "\t" << std::setprecision(15) << intrinsics.ppy << "\n"
886  << std::left << std::setw(14) << "Fx: "
887  << "\t" << std::setprecision(15) << intrinsics.fx << "\n"
888  << std::left << std::setw(14) << "Fy: "
889  << "\t" << std::setprecision(15) << intrinsics.fy << "\n"
890  << std::left << std::setw(14) << "Distortion: "
891  << "\t" << rs2_distortion_to_string(intrinsics.model) << "\n"
892  << std::left << std::setw(14) << "Coeffs: ";
893 
894  for (size_t i = 0; i < sizeof(intrinsics.coeffs) / sizeof(intrinsics.coeffs[0]); ++i)
895  ss << "\t" << std::setprecision(15) << intrinsics.coeffs[i] << " ";
896 
897  os << ss.str() << "\n\n";
898 }
899 
900 void safe_get_intrinsics(const rs2::video_stream_profile &profile, rs2_intrinsics &intrinsics)
901 {
902  try {
903  intrinsics = profile.get_intrinsics();
904  } catch (...) {
905  }
906 }
907 
908 bool operator==(const rs2_intrinsics &lhs, const rs2_intrinsics &rhs)
909 {
910  return lhs.width == rhs.width && lhs.height == rhs.height && lhs.ppx == rhs.ppx && lhs.ppy == rhs.ppy &&
911  lhs.fx == rhs.fx && lhs.fy == rhs.fy && lhs.model == rhs.model &&
912  !std::memcmp(lhs.coeffs, rhs.coeffs, sizeof(rhs.coeffs));
913 }
914 
915 std::string get_str_formats(const std::set<rs2_format> &formats)
916 {
917  std::stringstream ss;
918  for (auto format = formats.begin(); format != formats.end(); ++format) {
919  ss << *format << ((format != formats.end()) && (next(format) == formats.end()) ? "" : "/");
920  }
921  return ss.str();
922 }
923 
924 struct stream_and_resolution {
925  rs2_stream stream;
926  int stream_index;
927  int width;
928  int height;
929  std::string stream_name;
930 
931  bool operator<(const stream_and_resolution &obj) const
932  {
933  return (std::make_tuple(stream, stream_index, width, height) <
934  std::make_tuple(obj.stream, obj.stream_index, obj.width, obj.height));
935  }
936 };
937 
938 struct stream_and_index {
939  rs2_stream stream;
940  int stream_index;
941 
942  bool operator<(const stream_and_index &obj) const
943  {
944  return (std::make_tuple(stream, stream_index) < std::make_tuple(obj.stream, obj.stream_index));
945  }
946 };
947 } // anonymous namespace
948 
969 std::ostream &operator<<(std::ostream &os, const vpRealSense2 &rs)
970 {
971  os << std::left << std::setw(30) << "Device Name" << std::setw(20) << "Serial Number" << std::setw(20)
972  << "Firmware Version" << std::endl;
973 
974  rs2::device dev = rs.m_pipelineProfile.get_device();
975  os << std::left << std::setw(30) << dev.get_info(RS2_CAMERA_INFO_NAME) << std::setw(20)
976  << dev.get_info(RS2_CAMERA_INFO_SERIAL_NUMBER) << std::setw(20) << dev.get_info(RS2_CAMERA_INFO_FIRMWARE_VERSION)
977  << std::endl;
978 
979  // Show which options are supported by this device
980  os << " Device info: \n";
981  for (auto j = 0; j < RS2_CAMERA_INFO_COUNT; ++j) {
982  auto param = static_cast<rs2_camera_info>(j);
983  if (dev.supports(param))
984  os << " " << std::left << std::setw(30) << rs2_camera_info_to_string(rs2_camera_info(param)) << ": \t"
985  << dev.get_info(param) << "\n";
986  }
987 
988  os << "\n";
989 
990  for (auto &&sensor : dev.query_sensors()) {
991  os << "Options for " << sensor.get_info(RS2_CAMERA_INFO_NAME) << std::endl;
992 
993  os << std::setw(55) << " Supported options:" << std::setw(10) << "min" << std::setw(10) << " max" << std::setw(6)
994  << " step" << std::setw(10) << " default" << std::endl;
995  for (auto j = 0; j < RS2_OPTION_COUNT; ++j) {
996  auto opt = static_cast<rs2_option>(j);
997  if (sensor.supports(opt)) {
998  auto range = sensor.get_option_range(opt);
999  os << " " << std::left << std::setw(50) << opt << " : " << std::setw(5) << range.min << "... "
1000  << std::setw(12) << range.max << std::setw(6) << range.step << std::setw(10) << range.def << "\n";
1001  }
1002  }
1003 
1004  os << "\n";
1005  }
1006 
1007  for (auto &&sensor : dev.query_sensors()) {
1008  os << "Stream Profiles supported by " << sensor.get_info(RS2_CAMERA_INFO_NAME) << "\n";
1009 
1010  os << std::setw(55) << " Supported modes:" << std::setw(10) << "stream" << std::setw(10) << " resolution"
1011  << std::setw(6) << " fps" << std::setw(10) << " format"
1012  << "\n";
1013  // Show which streams are supported by this device
1014  for (auto &&profile : sensor.get_stream_profiles()) {
1015  if (auto video = profile.as<rs2::video_stream_profile>()) {
1016  os << " " << profile.stream_name() << "\t " << video.width() << "x" << video.height() << "\t@ "
1017  << profile.fps() << "Hz\t" << profile.format() << "\n";
1018  } else {
1019  os << " " << profile.stream_name() << "\t@ " << profile.fps() << "Hz\t" << profile.format() << "\n";
1020  }
1021  }
1022 
1023  os << "\n";
1024  }
1025 
1026  std::map<stream_and_index, rs2::stream_profile> streams;
1027  std::map<stream_and_resolution, std::vector<std::pair<std::set<rs2_format>, rs2_intrinsics> > > intrinsics_map;
1028  for (auto &&sensor : dev.query_sensors()) {
1029  // Intrinsics
1030  for (auto &&profile : sensor.get_stream_profiles()) {
1031  if (auto video = profile.as<rs2::video_stream_profile>()) {
1032  if (streams.find(stream_and_index{profile.stream_type(), profile.stream_index()}) == streams.end()) {
1033  streams[stream_and_index{profile.stream_type(), profile.stream_index()}] = profile;
1034  }
1035 
1036  rs2_intrinsics intrinsics{};
1037  stream_and_resolution stream_res{profile.stream_type(), profile.stream_index(), video.width(), video.height(),
1038  profile.stream_name()};
1039  safe_get_intrinsics(video, intrinsics);
1040  auto it = std::find_if(
1041  (intrinsics_map[stream_res]).begin(), (intrinsics_map[stream_res]).end(),
1042  [&](const std::pair<std::set<rs2_format>, rs2_intrinsics> &kvp) { return intrinsics == kvp.second; });
1043  if (it == (intrinsics_map[stream_res]).end()) {
1044  (intrinsics_map[stream_res]).push_back({{profile.format()}, intrinsics});
1045  } else {
1046  it->first.insert(profile.format()); // If the intrinsics are equals,
1047  // add the profile format to
1048  // format set
1049  }
1050  }
1051  }
1052  }
1053 
1054  os << "Provided Intrinsic:\n";
1055  for (auto &kvp : intrinsics_map) {
1056  auto stream_res = kvp.first;
1057  for (auto &intrinsics : kvp.second) {
1058  auto formats = get_str_formats(intrinsics.first);
1059  os << "Intrinsic of \"" << stream_res.stream_name << "\"\t " << stream_res.width << "x" << stream_res.height
1060  << "\t " << formats << "\n";
1061  if (intrinsics.second == rs2_intrinsics{}) {
1062  os << "Intrinsic NOT available!\n\n";
1063  } else {
1064  print(intrinsics.second, os);
1065  }
1066  }
1067  }
1068 
1069  // Print Extrinsics
1070  os << "\nProvided Extrinsic:\n";
1071  for (auto kvp1 = streams.begin(); kvp1 != streams.end(); ++kvp1) {
1072  for (auto kvp2 = streams.begin(); kvp2 != streams.end(); ++kvp2) {
1073  os << "Extrinsic from \"" << kvp1->second.stream_name() << "\"\t "
1074  << "To"
1075  << "\t \"" << kvp2->second.stream_name() << "\"\n";
1076  auto extrinsics = kvp1->second.get_extrinsics_to(kvp2->second);
1077  print(extrinsics, os);
1078  }
1079  }
1080 
1081  return os;
1082 }
1083 
1084 #elif !defined(VISP_BUILD_SHARED_LIBS)
1085 // Work arround to avoid warning: libvisp_sensor.a(vpRealSense2.cpp.o) has no
1086 // symbols
1087 void dummy_vpRealSense2(){};
1088 #endif
static void BGRToRGBa(unsigned char *bgr, unsigned char *rgba, unsigned int width, unsigned int height, bool flip=false)
rs2_intrinsics getIntrinsics(const rs2_stream &stream) const
static void RGBToGrey(unsigned char *rgb, unsigned char *grey, unsigned int size)
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
Definition: vpImage.h:879
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
static void BGRToGrey(unsigned char *bgr, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false)
Type * bitmap
points toward the bitmap
Definition: vpImage.h:141
virtual ~vpRealSense2()
error that can be emited by ViSP classes.
Definition: vpException.h:71
float m_depthScale
Definition: vpRealSense2.h:354
void open(const rs2::config &cfg=rs2::config())
void initPersProjWithDistortion(double px, double py, double u0, double v0, double kud, double kdu)
Definition: vpRGBa.h:66
Implementation of a rotation matrix and operations on such kind of matrices.
float getDepthScale()
rs2::pipeline_profile m_pipelineProfile
Definition: vpRealSense2.h:358
void getGreyFrame(const rs2::frame &frame, vpImage< unsigned char > &grey)
Generic class defining intrinsic camera parameters.
void acquire(vpImage< unsigned char > &grey)
rs2::pointcloud m_pointcloud
Definition: vpRealSense2.h:359
void getPointcloud(const rs2::depth_frame &depth_frame, std::vector< vpColVector > &pointcloud)
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
rs2::pipeline m_pipe
Definition: vpRealSense2.h:357
rs2::points m_points
Definition: vpRealSense2.h:360
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion) const
float m_invalidDepthValue
Definition: vpRealSense2.h:355
void getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
friend VISP_EXPORT std::ostream & operator<<(std::ostream &os, const vpRealSense2 &rs)
vpHomogeneousMatrix getTransformation(const rs2_stream &from, const rs2_stream &to) const
static void RGBaToGrey(unsigned char *rgba, unsigned char *grey, unsigned int size)
void getColorFrame(const rs2::frame &frame, vpImage< vpRGBa > &color)
Class that consider the case of a translation vector.