Visual Servoing Platform  version 3.6.1 under development (2024-04-16)
vpRealSense2.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See https://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * librealSense2 interface.
33  *
34 *****************************************************************************/
35 
36 #include <visp3/core/vpConfig.h>
37 
38 #if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
39 #include <cstring>
40 #include <iomanip>
41 #include <map>
42 #include <set>
43 #include <visp3/core/vpImageConvert.h>
44 #include <visp3/sensor/vpRealSense2.h>
45 
46 #define MANUAL_POINTCLOUD 1
47 
48 namespace
49 {
50 bool operator==(const rs2_extrinsics &lhs, const rs2_extrinsics &rhs)
51 {
52  for (int i = 0; i < 3; i++) {
53  for (int j = 0; j < 3; j++) {
54  if (std::fabs(lhs.rotation[i * 3 + j] - rhs.rotation[i * 3 + j]) > std::numeric_limits<float>::epsilon()) {
55  return false;
56  }
57  }
58 
59  if (std::fabs(lhs.translation[i] - rhs.translation[i]) > std::numeric_limits<float>::epsilon()) {
60  return false;
61  }
62  }
63 
64  return true;
65 }
66 } // namespace
67 
72  : m_depthScale(0.0f), m_invalidDepthValue(0.0f), m_max_Z(8.0f), m_pipe(), m_pipelineProfile(), m_pointcloud(),
73  m_points(), m_pos(), m_quat(), m_rot(), m_product_line(), m_init(false)
74 { }
75 
81 
88 {
89  auto data = m_pipe.wait_for_frames();
90  auto color_frame = data.get_color_frame();
91  getGreyFrame(color_frame, grey);
92  if (ts != nullptr) {
93  *ts = data.get_timestamp();
94  }
95 }
96 
102 void vpRealSense2::acquire(vpImage<vpRGBa> &color, double *ts)
103 {
104  auto data = m_pipe.wait_for_frames();
105  auto color_frame = data.get_color_frame();
106  getColorFrame(color_frame, color);
107  if (ts != nullptr) {
108  *ts = data.get_timestamp();
109  }
110 }
111 
122 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
123  std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared,
124  rs2::align *const align_to, double *ts)
125 {
126  acquire(data_image, data_depth, data_pointCloud, data_infrared, nullptr, align_to, ts);
127 }
128 
186 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
187  std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared1,
188  unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
189 {
190  auto data = m_pipe.wait_for_frames();
191  if (align_to != nullptr) {
192  // Infrared stream is not aligned
193  // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
194 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
195  data = align_to->process(data);
196 #else
197  data = align_to->proccess(data);
198 #endif
199  }
200 
201  if (data_image != nullptr) {
202  auto color_frame = data.get_color_frame();
203  getNativeFrameData(color_frame, data_image);
204  }
205 
206  if (data_depth != nullptr || data_pointCloud != nullptr) {
207  auto depth_frame = data.get_depth_frame();
208  if (data_depth != nullptr) {
209  getNativeFrameData(depth_frame, data_depth);
210  }
211 
212  if (data_pointCloud != nullptr) {
213  getPointcloud(depth_frame, *data_pointCloud);
214  }
215  }
216 
217  if (data_infrared1 != nullptr) {
218  auto infrared_frame = data.first(RS2_STREAM_INFRARED);
219  getNativeFrameData(infrared_frame, data_infrared1);
220  }
221 
222  if (data_infrared2 != nullptr) {
223  auto infrared_frame = data.get_infrared_frame(2);
224  getNativeFrameData(infrared_frame, data_infrared2);
225  }
226 
227  if (ts != nullptr) {
228  *ts = data.get_timestamp();
229  }
230 }
231 
232 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
254 {
255  auto data = m_pipe.wait_for_frames();
256 
257  if (left != nullptr) {
258  auto left_fisheye_frame = data.get_fisheye_frame(1);
259  unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
260  unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
261  left->resize(height, width);
262  getNativeFrameData(left_fisheye_frame, (*left).bitmap);
263  }
264 
265  if (right != nullptr) {
266  auto right_fisheye_frame = data.get_fisheye_frame(2);
267  unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
268  unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
269  right->resize(height, width);
270  getNativeFrameData(right_fisheye_frame, (*right).bitmap);
271  }
272 
273  if (ts != nullptr) {
274  *ts = data.get_timestamp();
275  }
276 }
277 
307  vpColVector *odo_vel, vpColVector *odo_acc, unsigned int *confidence, double *ts)
308 {
309  auto data = m_pipe.wait_for_frames();
310 
311  if (left != nullptr) {
312  auto left_fisheye_frame = data.get_fisheye_frame(1);
313  unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
314  unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
315  left->resize(height, width);
316  getNativeFrameData(left_fisheye_frame, (*left).bitmap);
317  }
318 
319  if (right != nullptr) {
320  auto right_fisheye_frame = data.get_fisheye_frame(2);
321  unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
322  unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
323  right->resize(height, width);
324  getNativeFrameData(right_fisheye_frame, (*right).bitmap);
325  }
326 
327  auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
328  auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
329 
330  if (ts != nullptr) {
331  *ts = data.get_timestamp();
332  }
333 
334  if (cMw != nullptr) {
335  m_pos[0] = static_cast<double>(pose_data.translation.x);
336  m_pos[1] = static_cast<double>(pose_data.translation.y);
337  m_pos[2] = static_cast<double>(pose_data.translation.z);
338 
339  m_quat[0] = static_cast<double>(pose_data.rotation.x);
340  m_quat[1] = static_cast<double>(pose_data.rotation.y);
341  m_quat[2] = static_cast<double>(pose_data.rotation.z);
342  m_quat[3] = static_cast<double>(pose_data.rotation.w);
343 
345  }
346 
347  if (odo_vel != nullptr) {
348  odo_vel->resize(6, false);
349  (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
350  (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
351  (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
352  (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
353  (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
354  (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
355  }
356 
357  if (odo_acc != nullptr) {
358  odo_acc->resize(6, false);
359  (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
360  (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
361  (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
362  (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
363  (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
364  (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
365  }
366 
367  if (confidence != nullptr) {
368  *confidence = pose_data.tracker_confidence;
369  }
370 }
371 
386  vpColVector *odo_vel, vpColVector *odo_acc, vpColVector *imu_vel, vpColVector *imu_acc,
387  unsigned int *confidence, double *ts)
388 {
389  auto data = m_pipe.wait_for_frames();
390 
391  if (left != nullptr) {
392  auto left_fisheye_frame = data.get_fisheye_frame(1);
393  unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
394  unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
395  left->resize(height, width);
396  getNativeFrameData(left_fisheye_frame, (*left).bitmap);
397  }
398 
399  if (right != nullptr) {
400  auto right_fisheye_frame = data.get_fisheye_frame(2);
401  unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
402  unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
403  right->resize(height, width);
404  getNativeFrameData(right_fisheye_frame, (*right).bitmap);
405  }
406 
407  auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
408  auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
409 
410  if (ts != nullptr) {
411  *ts = data.get_timestamp();
412  }
413 
414  if (cMw != nullptr) {
415  m_pos[0] = static_cast<double>(pose_data.translation.x);
416  m_pos[1] = static_cast<double>(pose_data.translation.y);
417  m_pos[2] = static_cast<double>(pose_data.translation.z);
418 
419  m_quat[0] = static_cast<double>(pose_data.rotation.x);
420  m_quat[1] = static_cast<double>(pose_data.rotation.y);
421  m_quat[2] = static_cast<double>(pose_data.rotation.z);
422  m_quat[3] = static_cast<double>(pose_data.rotation.w);
423 
425  }
426 
427  if (odo_vel != nullptr) {
428  odo_vel->resize(6, false);
429  (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
430  (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
431  (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
432  (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
433  (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
434  (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
435  }
436 
437  if (odo_acc != nullptr) {
438  odo_acc->resize(6, false);
439  (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
440  (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
441  (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
442  (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
443  (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
444  (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
445  }
446 
447  auto accel_frame = data.first_or_default(RS2_STREAM_ACCEL);
448  auto accel_data = accel_frame.as<rs2::motion_frame>().get_motion_data();
449 
450  if (imu_acc != nullptr) {
451  imu_acc->resize(3, false);
452  (*imu_acc)[0] = static_cast<double>(accel_data.x);
453  (*imu_acc)[1] = static_cast<double>(accel_data.y);
454  (*imu_acc)[2] = static_cast<double>(accel_data.z);
455  }
456 
457  auto gyro_frame = data.first_or_default(RS2_STREAM_GYRO);
458  auto gyro_data = gyro_frame.as<rs2::motion_frame>().get_motion_data();
459 
460  if (imu_vel != nullptr) {
461  imu_vel->resize(3, false);
462  (*imu_vel)[0] = static_cast<double>(gyro_data.x);
463  (*imu_vel)[1] = static_cast<double>(gyro_data.y);
464  (*imu_vel)[2] = static_cast<double>(gyro_data.z);
465  }
466 
467  if (confidence != nullptr) {
468  *confidence = pose_data.tracker_confidence;
469  }
470 }
471 #endif // #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
472 
473 #ifdef VISP_HAVE_PCL
486 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
487  std::vector<vpColVector> *const data_pointCloud,
488  pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared,
489  rs2::align *const align_to, double *ts)
490 {
491  acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, nullptr, align_to, ts);
492 }
493 
508 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
509  std::vector<vpColVector> *const data_pointCloud,
510  pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared1,
511  unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
512 {
513  auto data = m_pipe.wait_for_frames();
514  if (align_to != nullptr) {
515  // Infrared stream is not aligned
516  // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
517 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
518  data = align_to->process(data);
519 #else
520  data = align_to->proccess(data);
521 #endif
522  }
523 
524  if (data_image != nullptr) {
525  auto color_frame = data.get_color_frame();
526  getNativeFrameData(color_frame, data_image);
527  }
528 
529  if (data_depth != nullptr || data_pointCloud != nullptr || pointcloud != nullptr) {
530  auto depth_frame = data.get_depth_frame();
531  if (data_depth != nullptr) {
532  getNativeFrameData(depth_frame, data_depth);
533  }
534 
535  if (data_pointCloud != nullptr) {
536  getPointcloud(depth_frame, *data_pointCloud);
537  }
538 
539  if (pointcloud != nullptr) {
540  getPointcloud(depth_frame, pointcloud);
541  }
542  }
543 
544  if (data_infrared1 != nullptr) {
545  auto infrared_frame = data.first(RS2_STREAM_INFRARED);
546  getNativeFrameData(infrared_frame, data_infrared1);
547  }
548 
549  if (data_infrared2 != nullptr) {
550  auto infrared_frame = data.get_infrared_frame(2);
551  getNativeFrameData(infrared_frame, data_infrared2);
552  }
553 
554  if (ts != nullptr) {
555  *ts = data.get_timestamp();
556  }
557 }
558 
571 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
572  std::vector<vpColVector> *const data_pointCloud,
573  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud, unsigned char *const data_infrared,
574  rs2::align *const align_to, double *ts)
575 {
576  acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, nullptr, align_to, ts);
577 }
578 
593 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
594  std::vector<vpColVector> *const data_pointCloud,
595  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud, unsigned char *const data_infrared1,
596  unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
597 {
598  auto data = m_pipe.wait_for_frames();
599  if (align_to != nullptr) {
600  // Infrared stream is not aligned
601  // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
602 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
603  data = align_to->process(data);
604 #else
605  data = align_to->proccess(data);
606 #endif
607  }
608 
609  auto color_frame = data.get_color_frame();
610  if (data_image != nullptr) {
611  getNativeFrameData(color_frame, data_image);
612  }
613 
614  if (data_depth != nullptr || data_pointCloud != nullptr || pointcloud != nullptr) {
615  auto depth_frame = data.get_depth_frame();
616  if (data_depth != nullptr) {
617  getNativeFrameData(depth_frame, data_depth);
618  }
619 
620  if (data_pointCloud != nullptr) {
621  getPointcloud(depth_frame, *data_pointCloud);
622  }
623 
624  if (pointcloud != nullptr) {
625  getPointcloud(depth_frame, color_frame, pointcloud);
626  }
627  }
628 
629  if (data_infrared1 != nullptr) {
630  auto infrared_frame = data.first(RS2_STREAM_INFRARED);
631  getNativeFrameData(infrared_frame, data_infrared1);
632  }
633 
634  if (data_infrared2 != nullptr) {
635  auto infrared_frame = data.get_infrared_frame(2);
636  getNativeFrameData(infrared_frame, data_infrared2);
637  }
638 
639  if (ts != nullptr) {
640  *ts = data.get_timestamp();
641  }
642 }
643 #endif
644 
657 {
658  if (m_init) {
659  m_pipe.stop();
660  m_init = false;
661  }
662 }
663 
675  int index) const
676 {
677  auto rs_stream = m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
678  auto intrinsics = rs_stream.get_intrinsics();
679 
680  vpCameraParameters cam;
681  double u0 = intrinsics.ppx;
682  double v0 = intrinsics.ppy;
683  double px = intrinsics.fx;
684  double py = intrinsics.fy;
685 
686  switch (type) {
688  double kdu = intrinsics.coeffs[0];
689  cam.initPersProjWithDistortion(px, py, u0, v0, -kdu, kdu);
690  } break;
691 
693  std::vector<double> tmp_coefs;
694  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[0]));
695  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[1]));
696  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[2]));
697  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[3]));
698  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[4]));
699 
700  cam.initProjWithKannalaBrandtDistortion(px, py, u0, v0, tmp_coefs);
701  } break;
702 
704  default:
705  cam.initPersProjWithoutDistortion(px, py, u0, v0);
706  break;
707  }
708 
709  return cam;
710 }
711 
720 rs2_intrinsics vpRealSense2::getIntrinsics(const rs2_stream &stream, int index) const
721 {
722  auto vsp = m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
723  return vsp.get_intrinsics();
724 }
725 
726 void vpRealSense2::getColorFrame(const rs2::frame &frame, vpImage<vpRGBa> &color)
727 {
728  auto vf = frame.as<rs2::video_frame>();
729  unsigned int width = (unsigned int)vf.get_width();
730  unsigned int height = (unsigned int)vf.get_height();
731  color.resize(height, width);
732 
733  if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
734  vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
735  reinterpret_cast<unsigned char *>(color.bitmap), width, height);
736  }
737  else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
738  memcpy(reinterpret_cast<unsigned char *>(color.bitmap),
739  const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
740  width * height * sizeof(vpRGBa));
741  }
742  else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
743  vpImageConvert::BGRToRGBa(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
744  reinterpret_cast<unsigned char *>(color.bitmap), width, height);
745  }
746  else {
747  throw vpException(vpException::fatalError, "RealSense Camera - color stream not supported!");
748  }
749 }
750 
756 {
757  if (!m_init) { // If pipe is not yet created, create it. Otherwise, we already know depth scale.
758  rs2::pipeline *pipe = new rs2::pipeline;
759  rs2::pipeline_profile *pipelineProfile = new rs2::pipeline_profile;
760  *pipelineProfile = pipe->start();
761 
762  rs2::device dev = pipelineProfile->get_device();
763 
764  // Go over the device's sensors
765  for (rs2::sensor &sensor : dev.query_sensors()) {
766  // Check if the sensor is a depth sensor
767  if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
768  m_depthScale = dpt.get_depth_scale();
769  }
770  }
771 
772  pipe->stop();
773  delete pipe;
774  delete pipelineProfile;
775  }
776 
777  return m_depthScale;
778 }
779 
780 void vpRealSense2::getGreyFrame(const rs2::frame &frame, vpImage<unsigned char> &grey)
781 {
782  auto vf = frame.as<rs2::video_frame>();
783  unsigned int width = (unsigned int)vf.get_width();
784  unsigned int height = (unsigned int)vf.get_height();
785  grey.resize(height, width);
786 
787  if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
788  vpImageConvert::RGBToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
789  grey.bitmap, width, height);
790  }
791  else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
792  vpImageConvert::RGBaToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
793  grey.bitmap, width * height);
794  }
795  else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
796  vpImageConvert::BGRToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
797  grey.bitmap, width, height);
798  }
799  else {
800  throw vpException(vpException::fatalError, "RealSense Camera - color stream not supported!");
801  }
802 }
803 
804 void vpRealSense2::getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
805 {
806  auto vf = frame.as<rs2::video_frame>();
807  int size = vf.get_width() * vf.get_height();
808 
809  switch (frame.get_profile().format()) {
810  case RS2_FORMAT_RGB8:
811  case RS2_FORMAT_BGR8:
812  memcpy(data, frame.get_data(), size * 3);
813  break;
814 
815  case RS2_FORMAT_RGBA8:
816  case RS2_FORMAT_BGRA8:
817  memcpy(data, frame.get_data(), size * 4);
818  break;
819 
820  case RS2_FORMAT_Y16:
821  case RS2_FORMAT_Z16:
822  memcpy(data, frame.get_data(), size * 2);
823  break;
824 
825  case RS2_FORMAT_Y8:
826  memcpy(data, frame.get_data(), size);
827  break;
828 
829  default:
830  break;
831  }
832 }
833 
834 void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, std::vector<vpColVector> &pointcloud)
835 {
836  if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
837  std::stringstream ss;
838  ss << "Error, depth scale <= 0: " << m_depthScale;
839  throw vpException(vpException::fatalError, ss.str());
840  }
841 
842  auto vf = depth_frame.as<rs2::video_frame>();
843  const int width = vf.get_width();
844  const int height = vf.get_height();
845  pointcloud.resize((size_t)(width * height));
846 
847  const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
848  const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
849 
850 // Multi-threading if OpenMP
851 // Concurrent writes at different locations are safe
852 #pragma omp parallel for schedule(dynamic)
853  for (int i = 0; i < height; i++) {
854  auto depth_pixel_index = i * width;
855 
856  for (int j = 0; j < width; j++, depth_pixel_index++) {
857  if (p_depth_frame[depth_pixel_index] == 0) {
858  pointcloud[(size_t)depth_pixel_index].resize(4, false);
859  pointcloud[(size_t)depth_pixel_index][0] = m_invalidDepthValue;
860  pointcloud[(size_t)depth_pixel_index][1] = m_invalidDepthValue;
861  pointcloud[(size_t)depth_pixel_index][2] = m_invalidDepthValue;
862  pointcloud[(size_t)depth_pixel_index][3] = 1.0;
863  continue;
864  }
865 
866  // Get the depth value of the current pixel
867  auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
868 
869  float points[3];
870  const float pixel[] = { (float)j, (float)i };
871  rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
872 
873  if (pixels_distance > m_max_Z)
874  points[0] = points[1] = points[2] = m_invalidDepthValue;
875 
876  pointcloud[(size_t)depth_pixel_index].resize(4, false);
877  pointcloud[(size_t)depth_pixel_index][0] = points[0];
878  pointcloud[(size_t)depth_pixel_index][1] = points[1];
879  pointcloud[(size_t)depth_pixel_index][2] = points[2];
880  pointcloud[(size_t)depth_pixel_index][3] = 1.0;
881  }
882  }
883 }
884 
885 #ifdef VISP_HAVE_PCL
886 void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud)
887 {
888  if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
889  std::stringstream ss;
890  ss << "Error, depth scale <= 0: " << m_depthScale;
891  throw vpException(vpException::fatalError, ss.str());
892  }
893 
894  auto vf = depth_frame.as<rs2::video_frame>();
895  const int width = vf.get_width();
896  const int height = vf.get_height();
897  pointcloud->width = (uint32_t)width;
898  pointcloud->height = (uint32_t)height;
899  pointcloud->resize((size_t)(width * height));
900 
901 #if MANUAL_POINTCLOUD // faster to compute manually when tested
902  const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
903  const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
904 
905  // Multi-threading if OpenMP
906  // Concurrent writes at different locations are safe
907 #pragma omp parallel for schedule(dynamic)
908  for (int i = 0; i < height; i++) {
909  auto depth_pixel_index = i * width;
910 
911  for (int j = 0; j < width; j++, depth_pixel_index++) {
912  if (p_depth_frame[depth_pixel_index] == 0) {
913  pointcloud->points[(size_t)(depth_pixel_index)].x = m_invalidDepthValue;
914  pointcloud->points[(size_t)(depth_pixel_index)].y = m_invalidDepthValue;
915  pointcloud->points[(size_t)(depth_pixel_index)].z = m_invalidDepthValue;
916  continue;
917  }
918 
919  // Get the depth value of the current pixel
920  auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
921 
922  float points[3];
923  const float pixel[] = { (float)j, (float)i };
924  rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
925 
926  if (pixels_distance > m_max_Z)
927  points[0] = points[1] = points[2] = m_invalidDepthValue;
928 
929  pointcloud->points[(size_t)(depth_pixel_index)].x = points[0];
930  pointcloud->points[(size_t)(depth_pixel_index)].y = points[1];
931  pointcloud->points[(size_t)(depth_pixel_index)].z = points[2];
932  }
933  }
934 #else
935  m_points = m_pointcloud.calculate(depth_frame);
936  auto vertices = m_points.get_vertices();
937 
938  for (size_t i = 0; i < m_points.size(); i++) {
939  if (vertices[i].z <= 0.0f || vertices[i].z > m_max_Z) {
940  pointcloud->points[i].x = m_invalidDepthValue;
941  pointcloud->points[i].y = m_invalidDepthValue;
942  pointcloud->points[i].z = m_invalidDepthValue;
943  }
944  else {
945  pointcloud->points[i].x = vertices[i].x;
946  pointcloud->points[i].y = vertices[i].y;
947  pointcloud->points[i].z = vertices[i].z;
948  }
949  }
950 #endif
951 }
952 
953 void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, const rs2::frame &color_frame,
954  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
955 {
956  if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
957  throw vpException(vpException::fatalError, "Error, depth scale <= 0: %f", m_depthScale);
958  }
959 
960  auto vf = depth_frame.as<rs2::video_frame>();
961  const int depth_width = vf.get_width();
962  const int depth_height = vf.get_height();
963  pointcloud->width = static_cast<uint32_t>(depth_width);
964  pointcloud->height = static_cast<uint32_t>(depth_height);
965  pointcloud->resize(static_cast<uint32_t>(depth_width * depth_height));
966 
967  vf = color_frame.as<rs2::video_frame>();
968  const int color_width = vf.get_width();
969  const int color_height = vf.get_height();
970 
971  const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
972  const rs2_extrinsics depth2ColorExtrinsics =
973  depth_frame.get_profile().as<rs2::video_stream_profile>().get_extrinsics_to(
974  color_frame.get_profile().as<rs2::video_stream_profile>());
975  const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
976  const rs2_intrinsics color_intrinsics = color_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
977 
978  auto color_format = color_frame.as<rs2::video_frame>().get_profile().format();
979  const bool swap_rb = color_format == RS2_FORMAT_BGR8 || color_format == RS2_FORMAT_BGRA8;
980  unsigned int nb_color_pixel = (color_format == RS2_FORMAT_RGB8 || color_format == RS2_FORMAT_BGR8) ? 3 : 4;
981  const unsigned char *p_color_frame = static_cast<const unsigned char *>(color_frame.get_data());
982  rs2_extrinsics identity;
983  memset(identity.rotation, 0, sizeof(identity.rotation));
984  memset(identity.translation, 0, sizeof(identity.translation));
985  for (int i = 0; i < 3; i++) {
986  identity.rotation[i * 3 + i] = 1;
987  }
988  const bool registered_streams =
989  (depth2ColorExtrinsics == identity) && (color_width == depth_width) && (color_height == depth_height);
990 
991 // Multi-threading if OpenMP
992 // Concurrent writes at different locations are safe
993 #pragma omp parallel for schedule(dynamic)
994  for (int i = 0; i < depth_height; i++) {
995  auto depth_pixel_index = i * depth_width;
996 
997  for (int j = 0; j < depth_width; j++, depth_pixel_index++) {
998  if (p_depth_frame[depth_pixel_index] == 0) {
999  pointcloud->points[(size_t)depth_pixel_index].x = m_invalidDepthValue;
1000  pointcloud->points[(size_t)depth_pixel_index].y = m_invalidDepthValue;
1001  pointcloud->points[(size_t)depth_pixel_index].z = m_invalidDepthValue;
1002 
1003  // For out of bounds color data, default to a shade of blue in order to
1004  // visually distinguish holes. This color value is same as the librealsense
1005  // out of bounds color value.
1006 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1007  unsigned int r = 96, g = 157, b = 198;
1008  uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1009 
1010  pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1011 #else
1012  pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1013  pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1014  pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1015 #endif
1016  continue;
1017  }
1018 
1019  // Get the depth value of the current pixel
1020  auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
1021 
1022  float depth_point[3];
1023  const float pixel[] = { (float)j, (float)i };
1024  rs2_deproject_pixel_to_point(depth_point, &depth_intrinsics, pixel, pixels_distance);
1025 
1026  if (pixels_distance > m_max_Z) {
1027  depth_point[0] = depth_point[1] = depth_point[2] = m_invalidDepthValue;
1028  }
1029 
1030  pointcloud->points[(size_t)depth_pixel_index].x = depth_point[0];
1031  pointcloud->points[(size_t)depth_pixel_index].y = depth_point[1];
1032  pointcloud->points[(size_t)depth_pixel_index].z = depth_point[2];
1033 
1034  if (!registered_streams) {
1035  float color_point[3];
1036  rs2_transform_point_to_point(color_point, &depth2ColorExtrinsics, depth_point);
1037  float color_pixel[2];
1038  rs2_project_point_to_pixel(color_pixel, &color_intrinsics, color_point);
1039 
1040  if (color_pixel[1] < 0 || color_pixel[1] >= color_height || color_pixel[0] < 0 ||
1041  color_pixel[0] >= color_width) {
1042  // For out of bounds color data, default to a shade of blue in order to
1043  // visually distinguish holes. This color value is same as the librealsense
1044  // out of bounds color value.
1045 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1046  unsigned int r = 96, g = 157, b = 198;
1047  uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1048 
1049  pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1050 #else
1051  pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1052  pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1053  pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1054 #endif
1055  }
1056  else {
1057  unsigned int i_ = (unsigned int)color_pixel[1];
1058  unsigned int j_ = (unsigned int)color_pixel[0];
1059 
1060 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1061  uint32_t rgb = 0;
1062  if (swap_rb) {
1063  rgb =
1064  (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) |
1065  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1066  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2])
1067  << 16);
1068  }
1069  else {
1070  rgb =
1071  (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) << 16 |
1072  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1073  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2]));
1074  }
1075 
1076  pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1077 #else
1078  if (swap_rb) {
1079  pointcloud->points[(size_t)depth_pixel_index].b =
1080  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
1081  pointcloud->points[(size_t)depth_pixel_index].g =
1082  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
1083  pointcloud->points[(size_t)depth_pixel_index].r =
1084  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
1085  }
1086  else {
1087  pointcloud->points[(size_t)depth_pixel_index].r =
1088  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
1089  pointcloud->points[(size_t)depth_pixel_index].g =
1090  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
1091  pointcloud->points[(size_t)depth_pixel_index].b =
1092  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
1093  }
1094 #endif
1095  }
1096  }
1097  else {
1098 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1099  uint32_t rgb = 0;
1100  if (swap_rb) {
1101  rgb = (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) |
1102  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1103  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]) << 16);
1104  }
1105  else {
1106  rgb = (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) << 16 |
1107  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1108  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]));
1109  }
1110 
1111  pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1112 #else
1113  if (swap_rb) {
1114  pointcloud->points[(size_t)depth_pixel_index].b =
1115  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
1116  pointcloud->points[(size_t)depth_pixel_index].g =
1117  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
1118  pointcloud->points[(size_t)depth_pixel_index].r =
1119  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
1120  }
1121  else {
1122  pointcloud->points[(size_t)depth_pixel_index].r =
1123  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
1124  pointcloud->points[(size_t)depth_pixel_index].g =
1125  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
1126  pointcloud->points[(size_t)depth_pixel_index].b =
1127  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
1128  }
1129 #endif
1130  }
1131  }
1132  }
1133 }
1134 
1135 #endif
1136 
1144 vpHomogeneousMatrix vpRealSense2::getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index) const
1145 {
1146  int to_index = -1;
1147 
1148  if (from_index != -1) // If we have to specify indices for streams. (Ex.: T265 device having 2 fisheyes)
1149  {
1150  if (from_index == 1) // From left => To right.
1151  to_index = 2;
1152  else if (from_index == 2) // From right => To left.
1153  to_index = 1;
1154  }
1155 
1156  auto from_stream = m_pipelineProfile.get_stream(from, from_index);
1157  auto to_stream = m_pipelineProfile.get_stream(to, to_index);
1158 
1159  rs2_extrinsics extrinsics = from_stream.get_extrinsics_to(to_stream);
1160 
1162  vpRotationMatrix R;
1163  for (unsigned int i = 0; i < 3; i++) {
1164  t[i] = extrinsics.translation[i];
1165  for (unsigned int j = 0; j < 3; j++)
1166  R[i][j] = extrinsics.rotation[j * 3 + i]; // rotation is column-major order
1167  }
1168 
1169  vpHomogeneousMatrix to_M_from(t, R);
1170  return to_M_from;
1171 }
1172 
1173 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1184  double *ts)
1185 {
1186  auto frame = m_pipe.wait_for_frames();
1187  auto f = frame.first_or_default(RS2_STREAM_POSE);
1188  auto pose_data = f.as<rs2::pose_frame>().get_pose_data();
1189 
1190  if (ts != nullptr)
1191  *ts = frame.get_timestamp();
1192 
1193  if (cMw != nullptr) {
1194  m_pos[0] = static_cast<double>(pose_data.translation.x);
1195  m_pos[1] = static_cast<double>(pose_data.translation.y);
1196  m_pos[2] = static_cast<double>(pose_data.translation.z);
1197 
1198  m_quat[0] = static_cast<double>(pose_data.rotation.x);
1199  m_quat[1] = static_cast<double>(pose_data.rotation.y);
1200  m_quat[2] = static_cast<double>(pose_data.rotation.z);
1201  m_quat[3] = static_cast<double>(pose_data.rotation.w);
1202 
1203  *cMw = vpHomogeneousMatrix(m_pos, m_quat);
1204  }
1205 
1206  if (odo_vel != nullptr) {
1207  odo_vel->resize(6, false);
1208  (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
1209  (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
1210  (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
1211  (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
1212  (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
1213  (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
1214  }
1215 
1216  if (odo_acc != nullptr) {
1217  odo_acc->resize(6, false);
1218  (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
1219  (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
1220  (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
1221  (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
1222  (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
1223  (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
1224  }
1225 
1226  return pose_data.tracker_confidence;
1227 }
1228 
1250 {
1251  auto frame = m_pipe.wait_for_frames();
1252  auto f = frame.first_or_default(RS2_STREAM_ACCEL);
1253  auto imu_acc_data = f.as<rs2::motion_frame>().get_motion_data();
1254 
1255  if (ts != nullptr)
1256  *ts = f.get_timestamp();
1257 
1258  if (imu_acc != nullptr) {
1259  imu_acc->resize(3, false);
1260  (*imu_acc)[0] = static_cast<double>(imu_acc_data.x);
1261  (*imu_acc)[1] = static_cast<double>(imu_acc_data.y);
1262  (*imu_acc)[2] = static_cast<double>(imu_acc_data.z);
1263  }
1264 }
1265 
1286 void vpRealSense2::getIMUVelocity(vpColVector *imu_vel, double *ts)
1287 {
1288  auto frame = m_pipe.wait_for_frames();
1289  auto f = frame.first_or_default(RS2_STREAM_GYRO);
1290  auto imu_vel_data = f.as<rs2::motion_frame>().get_motion_data();
1291 
1292  if (ts != nullptr)
1293  *ts = f.get_timestamp();
1294 
1295  if (imu_vel != nullptr) {
1296  imu_vel->resize(3, false);
1297  (*imu_vel)[0] = static_cast<double>(imu_vel_data.x);
1298  (*imu_vel)[1] = static_cast<double>(imu_vel_data.x);
1299  (*imu_vel)[2] = static_cast<double>(imu_vel_data.x);
1300  }
1301 }
1302 
1323 void vpRealSense2::getIMUData(vpColVector *imu_acc, vpColVector *imu_vel, double *ts)
1324 {
1325  auto data = m_pipe.wait_for_frames();
1326 
1327  if (ts != nullptr)
1328  *ts = data.get_timestamp();
1329 
1330  if (imu_acc != nullptr) {
1331  auto acc_data = data.first_or_default(RS2_STREAM_ACCEL);
1332  auto imu_acc_data = acc_data.as<rs2::motion_frame>().get_motion_data();
1333 
1334  imu_acc->resize(3, false);
1335  (*imu_acc)[0] = static_cast<double>(imu_acc_data.x);
1336  (*imu_acc)[1] = static_cast<double>(imu_acc_data.y);
1337  (*imu_acc)[2] = static_cast<double>(imu_acc_data.z);
1338  }
1339 
1340  if (imu_vel != nullptr) {
1341  auto vel_data = data.first_or_default(RS2_STREAM_GYRO);
1342  auto imu_vel_data = vel_data.as<rs2::motion_frame>().get_motion_data();
1343 
1344  imu_vel->resize(3, false);
1345  (*imu_vel)[0] = static_cast<double>(imu_vel_data.x);
1346  (*imu_vel)[1] = static_cast<double>(imu_vel_data.y);
1347  (*imu_vel)[2] = static_cast<double>(imu_vel_data.z);
1348  }
1349 }
1350 #endif // #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1351 
1355 bool vpRealSense2::open(const rs2::config &cfg)
1356 {
1357  if (m_init) {
1358  close();
1359  }
1360 
1361  m_pipelineProfile = m_pipe.start(cfg);
1362 
1363  rs2::device dev = m_pipelineProfile.get_device();
1364 
1365 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1366  // Query device product line D400/SR300/L500/T200
1367  m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1368 #endif
1369 
1370  // Go over the device's sensors
1371  for (rs2::sensor &sensor : dev.query_sensors()) {
1372  // Check if the sensor is a depth sensor
1373  if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1374  m_depthScale = dpt.get_depth_scale();
1375  }
1376  }
1377 
1378  m_init = true;
1379  return m_init;
1380 }
1381 
1388 bool vpRealSense2::open(const rs2::config &cfg, std::function<void(rs2::frame)> &callback)
1389 {
1390  if (m_init) {
1391  close();
1392  }
1393 
1394  m_pipelineProfile = m_pipe.start(cfg, callback);
1395 
1396  rs2::device dev = m_pipelineProfile.get_device();
1397 
1398 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1399  // Query device product line D400/SR300/L500/T200
1400  m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1401 #endif
1402 
1403  // Go over the device's sensors
1404  for (rs2::sensor &sensor : dev.query_sensors()) {
1405  // Check if the sensor is a depth sensor
1406  if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1407  m_depthScale = dpt.get_depth_scale();
1408  }
1409  }
1410 
1411  m_init = true;
1412  return m_init;
1413 }
1414 
1420 {
1421 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1422  // With previous code, example/device/framegrabber/grabRealSense2.cpp does not work with D455
1423  // Error: Frame didn't arrive within 15000
1424  // Following code from:
1425  // https://github.com/IntelRealSense/librealsense/blob/4673a37d981164af8eeb8e296e430fc1427e008d/unit-tests/live/memory/test-extrinsics.cpp#L119
1426 
1427  // Reset product line info
1428  m_product_line = "unknown";
1429 
1430  rs2::context ctx;
1431  auto list = ctx.query_devices();
1432  if (list.size() > 0) {
1433  // Only one plugged sensor supported
1434  auto dev = list.front();
1435  auto sensors = dev.query_sensors();
1436  if (dev.supports(RS2_CAMERA_INFO_PRODUCT_LINE)) {
1437  m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1438  }
1439  }
1440 
1441  return m_product_line;
1442 #else
1443  return (std::string("unknown"));
1444 #endif
1445 }
1446 
1457 {
1458  std::ostringstream oss;
1459  oss << *this;
1460  return oss.str();;
1461 }
1462 
1463 namespace
1464 {
1465 // Helper functions to print information about the RealSense device
1466 void print(const rs2_extrinsics &extrinsics, std::ostream &os)
1467 {
1468  std::stringstream ss;
1469  ss << "Rotation Matrix:\n";
1470 
1471  for (auto i = 0; i < 3; ++i) {
1472  for (auto j = 0; j < 3; ++j) {
1473  ss << std::left << std::setw(15) << std::setprecision(5) << extrinsics.rotation[j * 3 + i];
1474  }
1475  ss << std::endl;
1476  }
1477 
1478  ss << "\nTranslation Vector: ";
1479  for (size_t i = 0; i < sizeof(extrinsics.translation) / sizeof(extrinsics.translation[0]); ++i)
1480  ss << std::setprecision(15) << extrinsics.translation[i] << " ";
1481 
1482  os << ss.str() << "\n\n";
1483 }
1484 
1485 void print(const rs2_intrinsics &intrinsics, std::ostream &os)
1486 {
1487  std::stringstream ss;
1488  ss << std::left << std::setw(14) << "Width: "
1489  << "\t" << intrinsics.width << "\n"
1490  << std::left << std::setw(14) << "Height: "
1491  << "\t" << intrinsics.height << "\n"
1492  << std::left << std::setw(14) << "PPX: "
1493  << "\t" << std::setprecision(15) << intrinsics.ppx << "\n"
1494  << std::left << std::setw(14) << "PPY: "
1495  << "\t" << std::setprecision(15) << intrinsics.ppy << "\n"
1496  << std::left << std::setw(14) << "Fx: "
1497  << "\t" << std::setprecision(15) << intrinsics.fx << "\n"
1498  << std::left << std::setw(14) << "Fy: "
1499  << "\t" << std::setprecision(15) << intrinsics.fy << "\n"
1500  << std::left << std::setw(14) << "Distortion: "
1501  << "\t" << rs2_distortion_to_string(intrinsics.model) << "\n"
1502  << std::left << std::setw(14) << "Coeffs: ";
1503 
1504  for (size_t i = 0; i < sizeof(intrinsics.coeffs) / sizeof(intrinsics.coeffs[0]); ++i)
1505  ss << "\t" << std::setprecision(15) << intrinsics.coeffs[i] << " ";
1506 
1507  os << ss.str() << "\n\n";
1508 }
1509 
1510 void safe_get_intrinsics(const rs2::video_stream_profile &profile, rs2_intrinsics &intrinsics)
1511 {
1512  try {
1513  intrinsics = profile.get_intrinsics();
1514  }
1515  catch (...) {
1516  }
1517 }
1518 
1519 bool operator==(const rs2_intrinsics &lhs, const rs2_intrinsics &rhs)
1520 {
1521  return lhs.width == rhs.width && lhs.height == rhs.height && lhs.ppx == rhs.ppx && lhs.ppy == rhs.ppy &&
1522  lhs.fx == rhs.fx && lhs.fy == rhs.fy && lhs.model == rhs.model &&
1523  !std::memcmp(lhs.coeffs, rhs.coeffs, sizeof(rhs.coeffs));
1524 }
1525 
1526 std::string get_str_formats(const std::set<rs2_format> &formats)
1527 {
1528  std::stringstream ss;
1529  for (auto format = formats.begin(); format != formats.end(); ++format) {
1530  ss << *format << ((format != formats.end()) && (next(format) == formats.end()) ? "" : "/");
1531  }
1532  return ss.str();
1533 }
1534 
1535 struct stream_and_resolution
1536 {
1537  rs2_stream stream;
1538  int stream_index;
1539  int width;
1540  int height;
1541  std::string stream_name;
1542 
1543  bool operator<(const stream_and_resolution &obj) const
1544  {
1545  return (std::make_tuple(stream, stream_index, width, height) <
1546  std::make_tuple(obj.stream, obj.stream_index, obj.width, obj.height));
1547  }
1548 };
1549 
1550 struct stream_and_index
1551 {
1552  rs2_stream stream;
1553  int stream_index;
1554 
1555  bool operator<(const stream_and_index &obj) const
1556  {
1557  return (std::make_tuple(stream, stream_index) < std::make_tuple(obj.stream, obj.stream_index));
1558  }
1559 };
1560 } // anonymous namespace
1561 
1582 std::ostream &operator<<(std::ostream &os, const vpRealSense2 &rs)
1583 {
1584  rs2::device dev = rs.m_pipelineProfile.get_device();
1585  os << std::left << std::setw(30) << dev.get_info(RS2_CAMERA_INFO_NAME) << std::setw(20)
1586  << dev.get_info(RS2_CAMERA_INFO_SERIAL_NUMBER) << std::setw(20) << dev.get_info(RS2_CAMERA_INFO_FIRMWARE_VERSION)
1587  << std::endl;
1588 
1589  // Show which options are supported by this device
1590  os << " Device info: \n";
1591  for (auto j = 0; j < RS2_CAMERA_INFO_COUNT; ++j) {
1592  auto param = static_cast<rs2_camera_info>(j);
1593  if (dev.supports(param))
1594  os << " " << std::left << std::setw(30) << rs2_camera_info_to_string(rs2_camera_info(param)) << ": \t"
1595  << dev.get_info(param) << "\n";
1596  }
1597 
1598  os << "\n";
1599 
1600  for (auto &&sensor : dev.query_sensors()) {
1601  os << "Options for " << sensor.get_info(RS2_CAMERA_INFO_NAME) << std::endl;
1602 
1603  os << std::setw(55) << " Supported options:" << std::setw(10) << "min" << std::setw(10) << " max" << std::setw(6)
1604  << " step" << std::setw(10) << " default" << std::endl;
1605  for (auto j = 0; j < RS2_OPTION_COUNT; ++j) {
1606  auto opt = static_cast<rs2_option>(j);
1607  if (sensor.supports(opt)) {
1608  auto range = sensor.get_option_range(opt);
1609  os << " " << std::left << std::setw(50) << opt << " : " << std::setw(5) << range.min << "... "
1610  << std::setw(12) << range.max << std::setw(6) << range.step << std::setw(10) << range.def << "\n";
1611  }
1612  }
1613 
1614  os << "\n";
1615  }
1616 
1617  for (auto &&sensor : dev.query_sensors()) {
1618  os << "Stream Profiles supported by " << sensor.get_info(RS2_CAMERA_INFO_NAME) << "\n";
1619 
1620  os << std::setw(55) << " Supported modes:" << std::setw(10) << "stream" << std::setw(10) << " resolution"
1621  << std::setw(6) << " fps" << std::setw(10) << " format"
1622  << "\n";
1623  // Show which streams are supported by this device
1624  for (auto &&profile : sensor.get_stream_profiles()) {
1625  if (auto video = profile.as<rs2::video_stream_profile>()) {
1626  os << " " << profile.stream_name() << "\t " << video.width() << "x" << video.height() << "\t@ "
1627  << profile.fps() << "Hz\t" << profile.format() << "\n";
1628  }
1629  else {
1630  os << " " << profile.stream_name() << "\t@ " << profile.fps() << "Hz\t" << profile.format() << "\n";
1631  }
1632  }
1633 
1634  os << "\n";
1635  }
1636 
1637  std::map<stream_and_index, rs2::stream_profile> streams;
1638  std::map<stream_and_resolution, std::vector<std::pair<std::set<rs2_format>, rs2_intrinsics> > > intrinsics_map;
1639  for (auto &&sensor : dev.query_sensors()) {
1640  // Intrinsics
1641  for (auto &&profile : sensor.get_stream_profiles()) {
1642  if (auto video = profile.as<rs2::video_stream_profile>()) {
1643  if (streams.find(stream_and_index { profile.stream_type(), profile.stream_index() }) == streams.end()) {
1644  streams[stream_and_index { profile.stream_type(), profile.stream_index() }] = profile;
1645  }
1646 
1647  rs2_intrinsics intrinsics {};
1648  stream_and_resolution stream_res { profile.stream_type(), profile.stream_index(), video.width(), video.height(),
1649  profile.stream_name() };
1650  safe_get_intrinsics(video, intrinsics);
1651  auto it = std::find_if(
1652  (intrinsics_map[stream_res]).begin(), (intrinsics_map[stream_res]).end(),
1653  [&](const std::pair<std::set<rs2_format>, rs2_intrinsics> &kvp) { return intrinsics == kvp.second; });
1654  if (it == (intrinsics_map[stream_res]).end()) {
1655  (intrinsics_map[stream_res]).push_back({ {profile.format()}, intrinsics });
1656  }
1657  else {
1658  it->first.insert(profile.format()); // If the intrinsics are equals,
1659  // add the profile format to
1660  // format set
1661  }
1662  }
1663  }
1664  }
1665 
1666  os << "Provided Intrinsic:\n";
1667  for (auto &kvp : intrinsics_map) {
1668  auto stream_res = kvp.first;
1669  for (auto &intrinsics : kvp.second) {
1670  auto formats = get_str_formats(intrinsics.first);
1671  os << "Intrinsic of \"" << stream_res.stream_name << "\"\t " << stream_res.width << "x" << stream_res.height
1672  << "\t " << formats << "\n";
1673  if (intrinsics.second == rs2_intrinsics {}) {
1674  os << "Intrinsic NOT available!\n\n";
1675  }
1676  else {
1677  print(intrinsics.second, os);
1678  }
1679  }
1680  }
1681 
1682  // Print Extrinsics
1683  os << "\nProvided Extrinsic:\n";
1684  for (auto kvp1 = streams.begin(); kvp1 != streams.end(); ++kvp1) {
1685  for (auto kvp2 = streams.begin(); kvp2 != streams.end(); ++kvp2) {
1686  os << "Extrinsic from \"" << kvp1->second.stream_name() << "\"\t "
1687  << "To"
1688  << "\t \"" << kvp2->second.stream_name() << "\"\n";
1689  auto extrinsics = kvp1->second.get_extrinsics_to(kvp2->second);
1690  print(extrinsics, os);
1691  }
1692  }
1693 
1694  return os;
1695 }
1696 
1697 #elif !defined(VISP_BUILD_SHARED_LIBS)
1698 // Work around to avoid warning: libvisp_sensor.a(vpRealSense2.cpp.o) has symbols
1699 void dummy_vpRealSense2() { };
1700 #endif
bool operator==(const vpArray2D< double > &A) const
Definition: vpArray2D.h:1251
friend std::ostream & operator<<(std::ostream &s, const vpArray2D< Type > &A)
Definition: vpArray2D.h:587
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
@ ProjWithKannalaBrandtDistortion
Projection with Kannala-Brandt distortion model.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
void initPersProjWithDistortion(double px, double py, double u0, double v0, double kud, double kdu)
void initProjWithKannalaBrandtDistortion(double px, double py, double u0, double v0, const std::vector< double > &distortion_coefficients)
Implementation of column vector and the associated operations.
Definition: vpColVector.h:163
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:1056
error that can be emitted by ViSP classes.
Definition: vpException.h:59
@ fatalError
Fatal error.
Definition: vpException.h:84
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void RGBToGrey(unsigned char *rgb, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false)
static void RGBaToGrey(unsigned char *rgba, unsigned char *grey, unsigned int width, unsigned int height, unsigned int nThreads=0)
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
static void BGRToGrey(unsigned char *bgr, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false, unsigned int nThreads=0)
static void BGRToRGBa(unsigned char *bgr, unsigned char *rgba, unsigned int width, unsigned int height, bool flip=false)
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
Definition: vpImage.h:783
Type * bitmap
points toward the bitmap
Definition: vpImage.h:139
Definition: vpRGBa.h:61
void getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
std::string getSensorInfo()
virtual ~vpRealSense2()
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
vpQuaternionVector m_quat
Definition: vpRealSense2.h:393
rs2::pipeline m_pipe
Definition: vpRealSense2.h:388
bool open(const rs2::config &cfg=rs2::config())
rs2_intrinsics getIntrinsics(const rs2_stream &stream, int index=-1) const
float getDepthScale()
rs2::pointcloud m_pointcloud
Definition: vpRealSense2.h:390
void getIMUAcceleration(vpColVector *imu_acc, double *ts)
void getPointcloud(const rs2::depth_frame &depth_frame, std::vector< vpColVector > &pointcloud)
void getColorFrame(const rs2::frame &frame, vpImage< vpRGBa > &color)
void getIMUVelocity(vpColVector *imu_vel, double *ts)
float m_depthScale
Definition: vpRealSense2.h:385
vpTranslationVector m_pos
Definition: vpRealSense2.h:392
std::string m_product_line
Definition: vpRealSense2.h:395
std::string getProductLine()
unsigned int getOdometryData(vpHomogeneousMatrix *cMw, vpColVector *odo_vel, vpColVector *odo_acc, double *ts=nullptr)
void getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts)
vpHomogeneousMatrix getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index=-1) const
void getGreyFrame(const rs2::frame &frame, vpImage< unsigned char > &grey)
rs2::points m_points
Definition: vpRealSense2.h:391
float m_invalidDepthValue
Definition: vpRealSense2.h:386
rs2::pipeline_profile m_pipelineProfile
Definition: vpRealSense2.h:389
Implementation of a rotation matrix and operations on such kind of matrices.
Class that consider the case of a translation vector.