36 #include <visp3/core/vpConfig.h>
38 #if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
43 #include <visp3/core/vpImageConvert.h>
44 #include <visp3/sensor/vpRealSense2.h>
46 #define MANUAL_POINTCLOUD 1
50 bool operator==(
const rs2_extrinsics &lhs,
const rs2_extrinsics &rhs)
52 for (
int i = 0; i < 3; i++) {
53 for (
int j = 0; j < 3; j++) {
54 if (std::fabs(lhs.rotation[i * 3 + j] - rhs.rotation[i * 3 + j]) > std::numeric_limits<float>::epsilon()) {
59 if (std::fabs(lhs.translation[i] - rhs.translation[i]) > std::numeric_limits<float>::epsilon()) {
73 : m_depthScale(0.0f), m_invalidDepthValue(0.0f), m_max_Z(8.0f), m_pipe(), m_pipelineProfile(), m_pointcloud(),
74 m_points(), m_pos(), m_quat(), m_rot(), m_product_line(), m_init(false)
90 auto data =
m_pipe.wait_for_frames();
91 auto color_frame = data.get_color_frame();
94 *ts = data.get_timestamp();
105 auto data =
m_pipe.wait_for_frames();
106 auto color_frame = data.get_color_frame();
109 *ts = data.get_timestamp();
124 std::vector<vpColVector> *
const data_pointCloud,
unsigned char *
const data_infrared,
125 rs2::align *
const align_to,
double *ts)
127 acquire(data_image, data_depth, data_pointCloud, data_infrared,
nullptr, align_to, ts);
193 std::vector<vpColVector> *
const data_pointCloud,
unsigned char *
const data_infrared1,
194 unsigned char *
const data_infrared2, rs2::align *
const align_to,
double *ts)
196 auto data =
m_pipe.wait_for_frames();
197 if (align_to !=
nullptr) {
200 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
201 data = align_to->process(data);
203 data = align_to->proccess(data);
207 if (data_image !=
nullptr) {
208 auto color_frame = data.get_color_frame();
212 if (data_depth !=
nullptr || data_pointCloud !=
nullptr) {
213 auto depth_frame = data.get_depth_frame();
214 if (data_depth !=
nullptr) {
218 if (data_pointCloud !=
nullptr) {
223 if (data_infrared1 !=
nullptr) {
224 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
228 if (data_infrared2 !=
nullptr) {
229 auto infrared_frame = data.get_infrared_frame(2);
234 *ts = data.get_timestamp();
238 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
261 auto data =
m_pipe.wait_for_frames();
263 if (left !=
nullptr) {
264 auto left_fisheye_frame = data.get_fisheye_frame(1);
265 unsigned int width =
static_cast<unsigned int>(left_fisheye_frame.get_width());
266 unsigned int height =
static_cast<unsigned int>(left_fisheye_frame.get_height());
267 left->
resize(height, width);
271 if (right !=
nullptr) {
272 auto right_fisheye_frame = data.get_fisheye_frame(2);
273 unsigned int width =
static_cast<unsigned int>(right_fisheye_frame.get_width());
274 unsigned int height =
static_cast<unsigned int>(right_fisheye_frame.get_height());
275 right->
resize(height, width);
280 *ts = data.get_timestamp();
315 auto data =
m_pipe.wait_for_frames();
317 if (left !=
nullptr) {
318 auto left_fisheye_frame = data.get_fisheye_frame(1);
319 unsigned int width =
static_cast<unsigned int>(left_fisheye_frame.get_width());
320 unsigned int height =
static_cast<unsigned int>(left_fisheye_frame.get_height());
321 left->
resize(height, width);
325 if (right !=
nullptr) {
326 auto right_fisheye_frame = data.get_fisheye_frame(2);
327 unsigned int width =
static_cast<unsigned int>(right_fisheye_frame.get_width());
328 unsigned int height =
static_cast<unsigned int>(right_fisheye_frame.get_height());
329 right->
resize(height, width);
333 auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
334 auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
337 *ts = data.get_timestamp();
340 if (cMw !=
nullptr) {
341 m_pos[0] =
static_cast<double>(pose_data.translation.x);
342 m_pos[1] =
static_cast<double>(pose_data.translation.y);
343 m_pos[2] =
static_cast<double>(pose_data.translation.z);
345 m_quat[0] =
static_cast<double>(pose_data.rotation.x);
346 m_quat[1] =
static_cast<double>(pose_data.rotation.y);
347 m_quat[2] =
static_cast<double>(pose_data.rotation.z);
348 m_quat[3] =
static_cast<double>(pose_data.rotation.w);
353 if (odo_vel !=
nullptr) {
354 odo_vel->
resize(6,
false);
355 (*odo_vel)[0] =
static_cast<double>(pose_data.velocity.x);
356 (*odo_vel)[1] =
static_cast<double>(pose_data.velocity.y);
357 (*odo_vel)[2] =
static_cast<double>(pose_data.velocity.z);
358 (*odo_vel)[3] =
static_cast<double>(pose_data.angular_velocity.x);
359 (*odo_vel)[4] =
static_cast<double>(pose_data.angular_velocity.y);
360 (*odo_vel)[5] =
static_cast<double>(pose_data.angular_velocity.z);
363 if (odo_acc !=
nullptr) {
364 odo_acc->
resize(6,
false);
365 (*odo_acc)[0] =
static_cast<double>(pose_data.acceleration.x);
366 (*odo_acc)[1] =
static_cast<double>(pose_data.acceleration.y);
367 (*odo_acc)[2] =
static_cast<double>(pose_data.acceleration.z);
368 (*odo_acc)[3] =
static_cast<double>(pose_data.angular_acceleration.x);
369 (*odo_acc)[4] =
static_cast<double>(pose_data.angular_acceleration.y);
370 (*odo_acc)[5] =
static_cast<double>(pose_data.angular_acceleration.z);
373 if (confidence !=
nullptr) {
374 *confidence = pose_data.tracker_confidence;
393 unsigned int *confidence,
double *ts)
395 auto data =
m_pipe.wait_for_frames();
397 if (left !=
nullptr) {
398 auto left_fisheye_frame = data.get_fisheye_frame(1);
399 unsigned int width =
static_cast<unsigned int>(left_fisheye_frame.get_width());
400 unsigned int height =
static_cast<unsigned int>(left_fisheye_frame.get_height());
401 left->
resize(height, width);
405 if (right !=
nullptr) {
406 auto right_fisheye_frame = data.get_fisheye_frame(2);
407 unsigned int width =
static_cast<unsigned int>(right_fisheye_frame.get_width());
408 unsigned int height =
static_cast<unsigned int>(right_fisheye_frame.get_height());
409 right->
resize(height, width);
413 auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
414 auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
417 *ts = data.get_timestamp();
420 if (cMw !=
nullptr) {
421 m_pos[0] =
static_cast<double>(pose_data.translation.x);
422 m_pos[1] =
static_cast<double>(pose_data.translation.y);
423 m_pos[2] =
static_cast<double>(pose_data.translation.z);
425 m_quat[0] =
static_cast<double>(pose_data.rotation.x);
426 m_quat[1] =
static_cast<double>(pose_data.rotation.y);
427 m_quat[2] =
static_cast<double>(pose_data.rotation.z);
428 m_quat[3] =
static_cast<double>(pose_data.rotation.w);
433 if (odo_vel !=
nullptr) {
434 odo_vel->
resize(6,
false);
435 (*odo_vel)[0] =
static_cast<double>(pose_data.velocity.x);
436 (*odo_vel)[1] =
static_cast<double>(pose_data.velocity.y);
437 (*odo_vel)[2] =
static_cast<double>(pose_data.velocity.z);
438 (*odo_vel)[3] =
static_cast<double>(pose_data.angular_velocity.x);
439 (*odo_vel)[4] =
static_cast<double>(pose_data.angular_velocity.y);
440 (*odo_vel)[5] =
static_cast<double>(pose_data.angular_velocity.z);
443 if (odo_acc !=
nullptr) {
444 odo_acc->
resize(6,
false);
445 (*odo_acc)[0] =
static_cast<double>(pose_data.acceleration.x);
446 (*odo_acc)[1] =
static_cast<double>(pose_data.acceleration.y);
447 (*odo_acc)[2] =
static_cast<double>(pose_data.acceleration.z);
448 (*odo_acc)[3] =
static_cast<double>(pose_data.angular_acceleration.x);
449 (*odo_acc)[4] =
static_cast<double>(pose_data.angular_acceleration.y);
450 (*odo_acc)[5] =
static_cast<double>(pose_data.angular_acceleration.z);
453 auto accel_frame = data.first_or_default(RS2_STREAM_ACCEL);
454 auto accel_data = accel_frame.as<rs2::motion_frame>().get_motion_data();
456 if (imu_acc !=
nullptr) {
457 imu_acc->
resize(3,
false);
458 (*imu_acc)[0] =
static_cast<double>(accel_data.x);
459 (*imu_acc)[1] =
static_cast<double>(accel_data.y);
460 (*imu_acc)[2] =
static_cast<double>(accel_data.z);
463 auto gyro_frame = data.first_or_default(RS2_STREAM_GYRO);
464 auto gyro_data = gyro_frame.as<rs2::motion_frame>().get_motion_data();
466 if (imu_vel !=
nullptr) {
467 imu_vel->
resize(3,
false);
468 (*imu_vel)[0] =
static_cast<double>(gyro_data.x);
469 (*imu_vel)[1] =
static_cast<double>(gyro_data.y);
470 (*imu_vel)[2] =
static_cast<double>(gyro_data.z);
473 if (confidence !=
nullptr) {
474 *confidence = pose_data.tracker_confidence;
479 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
493 std::vector<vpColVector> *
const data_pointCloud,
494 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud,
unsigned char *
const data_infrared,
495 rs2::align *
const align_to,
double *ts)
497 acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared,
nullptr, align_to, ts);
515 std::vector<vpColVector> *
const data_pointCloud,
516 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud,
unsigned char *
const data_infrared1,
517 unsigned char *
const data_infrared2, rs2::align *
const align_to,
double *ts)
519 auto data =
m_pipe.wait_for_frames();
520 if (align_to !=
nullptr) {
523 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
524 data = align_to->process(data);
526 data = align_to->proccess(data);
530 if (data_image !=
nullptr) {
531 auto color_frame = data.get_color_frame();
535 if (data_depth !=
nullptr || data_pointCloud !=
nullptr || pointcloud !=
nullptr) {
536 auto depth_frame = data.get_depth_frame();
537 if (data_depth !=
nullptr) {
541 if (data_pointCloud !=
nullptr) {
545 if (pointcloud !=
nullptr) {
550 if (data_infrared1 !=
nullptr) {
551 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
555 if (data_infrared2 !=
nullptr) {
556 auto infrared_frame = data.get_infrared_frame(2);
561 *ts = data.get_timestamp();
578 std::vector<vpColVector> *
const data_pointCloud,
579 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
unsigned char *
const data_infrared,
580 rs2::align *
const align_to,
double *ts)
582 acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared,
nullptr, align_to, ts);
600 std::vector<vpColVector> *
const data_pointCloud,
601 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
unsigned char *
const data_infrared1,
602 unsigned char *
const data_infrared2, rs2::align *
const align_to,
double *ts)
604 auto data =
m_pipe.wait_for_frames();
605 if (align_to !=
nullptr) {
608 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
609 data = align_to->process(data);
611 data = align_to->proccess(data);
615 auto color_frame = data.get_color_frame();
616 if (data_image !=
nullptr) {
620 if (data_depth !=
nullptr || data_pointCloud !=
nullptr || pointcloud !=
nullptr) {
621 auto depth_frame = data.get_depth_frame();
622 if (data_depth !=
nullptr) {
626 if (data_pointCloud !=
nullptr) {
630 if (pointcloud !=
nullptr) {
635 if (data_infrared1 !=
nullptr) {
636 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
640 if (data_infrared2 !=
nullptr) {
641 auto infrared_frame = data.get_infrared_frame(2);
646 *ts = data.get_timestamp();
683 auto rs_stream =
m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
684 auto intrinsics = rs_stream.get_intrinsics();
687 double u0 = intrinsics.ppx;
688 double v0 = intrinsics.ppy;
689 double px = intrinsics.fx;
690 double py = intrinsics.fy;
694 double kdu = intrinsics.coeffs[0];
699 std::vector<double> tmp_coefs;
700 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[0]));
701 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[1]));
702 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[2]));
703 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[3]));
704 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[4]));
728 auto vsp =
m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
729 return vsp.get_intrinsics();
734 auto vf = frame.as<rs2::video_frame>();
735 unsigned int width = (
unsigned int)vf.get_width();
736 unsigned int height = (
unsigned int)vf.get_height();
737 color.
resize(height, width);
739 if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
741 reinterpret_cast<unsigned char *
>(color.
bitmap), width, height);
743 else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
744 memcpy(
reinterpret_cast<unsigned char *
>(color.
bitmap),
745 const_cast<unsigned char *
>(
static_cast<const unsigned char *
>(frame.get_data())),
746 width * height *
sizeof(
vpRGBa));
748 else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
750 reinterpret_cast<unsigned char *
>(color.
bitmap), width, height);
764 rs2::pipeline *pipe =
new rs2::pipeline;
765 rs2::pipeline_profile *pipelineProfile =
new rs2::pipeline_profile;
766 *pipelineProfile = pipe->start();
768 rs2::device dev = pipelineProfile->get_device();
771 for (rs2::sensor &sensor : dev.query_sensors()) {
773 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
780 delete pipelineProfile;
788 auto vf = frame.as<rs2::video_frame>();
789 unsigned int width = (
unsigned int)vf.get_width();
790 unsigned int height = (
unsigned int)vf.get_height();
791 grey.
resize(height, width);
793 if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
795 grey.
bitmap, width, height);
797 else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
799 grey.
bitmap, width * height);
801 else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
803 grey.
bitmap, width, height);
812 auto vf = frame.as<rs2::video_frame>();
813 int size = vf.get_width() * vf.get_height();
815 switch (frame.get_profile().format()) {
816 case RS2_FORMAT_RGB8:
817 case RS2_FORMAT_BGR8:
818 memcpy(data, frame.get_data(), size * 3);
821 case RS2_FORMAT_RGBA8:
822 case RS2_FORMAT_BGRA8:
823 memcpy(data, frame.get_data(), size * 4);
828 memcpy(data, frame.get_data(), size * 2);
832 memcpy(data, frame.get_data(), size);
842 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
843 std::stringstream ss;
848 auto vf = depth_frame.as<rs2::video_frame>();
849 const int width = vf.get_width();
850 const int height = vf.get_height();
851 pointcloud.resize((
size_t)(width * height));
853 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
854 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
858 #pragma omp parallel for schedule(dynamic)
859 for (
int i = 0; i < height; i++) {
860 auto depth_pixel_index = i * width;
862 for (
int j = 0; j < width; j++, depth_pixel_index++) {
863 if (p_depth_frame[depth_pixel_index] == 0) {
864 pointcloud[(size_t)depth_pixel_index].resize(4,
false);
868 pointcloud[(size_t)depth_pixel_index][3] = 1.0;
873 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
876 const float pixel[] = { (float)j, (
float)i };
877 rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
882 pointcloud[(size_t)depth_pixel_index].resize(4,
false);
883 pointcloud[(size_t)depth_pixel_index][0] = points[0];
884 pointcloud[(size_t)depth_pixel_index][1] = points[1];
885 pointcloud[(size_t)depth_pixel_index][2] = points[2];
886 pointcloud[(size_t)depth_pixel_index][3] = 1.0;
891 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
894 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
895 std::stringstream ss;
900 auto vf = depth_frame.as<rs2::video_frame>();
901 const int width = vf.get_width();
902 const int height = vf.get_height();
903 pointcloud->width = (uint32_t)width;
904 pointcloud->height = (uint32_t)height;
905 pointcloud->resize((
size_t)(width * height));
907 #if MANUAL_POINTCLOUD
908 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
909 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
913 #pragma omp parallel for schedule(dynamic)
914 for (
int i = 0; i < height; i++) {
915 auto depth_pixel_index = i * width;
917 for (
int j = 0; j < width; j++, depth_pixel_index++) {
918 if (p_depth_frame[depth_pixel_index] == 0) {
926 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
929 const float pixel[] = { (float)j, (
float)i };
930 rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
935 pointcloud->points[(size_t)(depth_pixel_index)].x = points[0];
936 pointcloud->points[(size_t)(depth_pixel_index)].y = points[1];
937 pointcloud->points[(size_t)(depth_pixel_index)].z = points[2];
942 auto vertices =
m_points.get_vertices();
944 for (
size_t i = 0; i <
m_points.size(); i++) {
945 if (vertices[i].z <= 0.0f || vertices[i].z >
m_max_Z) {
951 pointcloud->points[i].x = vertices[i].x;
952 pointcloud->points[i].y = vertices[i].y;
953 pointcloud->points[i].z = vertices[i].z;
960 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
962 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
966 auto vf = depth_frame.as<rs2::video_frame>();
967 const int depth_width = vf.get_width();
968 const int depth_height = vf.get_height();
969 pointcloud->width =
static_cast<uint32_t
>(depth_width);
970 pointcloud->height =
static_cast<uint32_t
>(depth_height);
971 pointcloud->resize(
static_cast<uint32_t
>(depth_width * depth_height));
973 vf = color_frame.as<rs2::video_frame>();
974 const int color_width = vf.get_width();
975 const int color_height = vf.get_height();
977 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
978 const rs2_extrinsics depth2ColorExtrinsics =
979 depth_frame.get_profile().as<rs2::video_stream_profile>().get_extrinsics_to(
980 color_frame.get_profile().as<rs2::video_stream_profile>());
981 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
982 const rs2_intrinsics color_intrinsics = color_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
984 auto color_format = color_frame.as<rs2::video_frame>().get_profile().format();
985 const bool swap_rb = color_format == RS2_FORMAT_BGR8 || color_format == RS2_FORMAT_BGRA8;
986 unsigned int nb_color_pixel = (color_format == RS2_FORMAT_RGB8 || color_format == RS2_FORMAT_BGR8) ? 3 : 4;
987 const unsigned char *p_color_frame =
static_cast<const unsigned char *
>(color_frame.get_data());
988 rs2_extrinsics identity;
989 memset(identity.rotation, 0,
sizeof(identity.rotation));
990 memset(identity.translation, 0,
sizeof(identity.translation));
991 for (
int i = 0; i < 3; i++) {
992 identity.rotation[i * 3 + i] = 1;
994 const bool registered_streams =
995 (depth2ColorExtrinsics == identity) && (color_width == depth_width) && (color_height == depth_height);
999 #pragma omp parallel for schedule(dynamic)
1000 for (
int i = 0; i < depth_height; i++) {
1001 auto depth_pixel_index = i * depth_width;
1003 for (
int j = 0; j < depth_width; j++, depth_pixel_index++) {
1004 if (p_depth_frame[depth_pixel_index] == 0) {
1012 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1013 unsigned int r = 96, g = 157, b = 198;
1014 uint32_t rgb = (
static_cast<uint32_t
>(r) << 16 |
static_cast<uint32_t
>(g) << 8 |
static_cast<uint32_t
>(b));
1016 pointcloud->points[(size_t)depth_pixel_index].rgb = *
reinterpret_cast<float *
>(&rgb);
1018 pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1019 pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1020 pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1026 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
1028 float depth_point[3];
1029 const float pixel[] = { (float)j, (
float)i };
1030 rs2_deproject_pixel_to_point(depth_point, &depth_intrinsics, pixel, pixels_distance);
1032 if (pixels_distance >
m_max_Z) {
1036 pointcloud->points[(size_t)depth_pixel_index].x = depth_point[0];
1037 pointcloud->points[(size_t)depth_pixel_index].y = depth_point[1];
1038 pointcloud->points[(size_t)depth_pixel_index].z = depth_point[2];
1040 if (!registered_streams) {
1041 float color_point[3];
1042 rs2_transform_point_to_point(color_point, &depth2ColorExtrinsics, depth_point);
1043 float color_pixel[2];
1044 rs2_project_point_to_pixel(color_pixel, &color_intrinsics, color_point);
1046 if (color_pixel[1] < 0 || color_pixel[1] >= color_height || color_pixel[0] < 0 ||
1047 color_pixel[0] >= color_width) {
1051 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1052 unsigned int r = 96, g = 157, b = 198;
1053 uint32_t rgb = (
static_cast<uint32_t
>(r) << 16 |
static_cast<uint32_t
>(g) << 8 |
static_cast<uint32_t
>(b));
1055 pointcloud->points[(size_t)depth_pixel_index].rgb = *
reinterpret_cast<float *
>(&rgb);
1057 pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1058 pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1059 pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1063 unsigned int i_ = (
unsigned int)color_pixel[1];
1064 unsigned int j_ = (
unsigned int)color_pixel[0];
1066 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1070 (
static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel]) |
1071 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1072 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2])
1077 (
static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel]) << 16 |
1078 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1079 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2]));
1082 pointcloud->points[(size_t)(i * depth_width + j)].rgb = *
reinterpret_cast<float *
>(&rgb);
1085 pointcloud->points[(size_t)depth_pixel_index].b =
1086 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel];
1087 pointcloud->points[(size_t)depth_pixel_index].g =
1088 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1];
1089 pointcloud->points[(size_t)depth_pixel_index].r =
1090 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2];
1093 pointcloud->points[(size_t)depth_pixel_index].r =
1094 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel];
1095 pointcloud->points[(size_t)depth_pixel_index].g =
1096 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1];
1097 pointcloud->points[(size_t)depth_pixel_index].b =
1098 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2];
1104 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1107 rgb = (
static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel]) |
1108 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1109 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2]) << 16);
1112 rgb = (
static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel]) << 16 |
1113 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1114 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2]));
1117 pointcloud->points[(size_t)(i * depth_width + j)].rgb = *
reinterpret_cast<float *
>(&rgb);
1120 pointcloud->points[(size_t)depth_pixel_index].b =
1121 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel];
1122 pointcloud->points[(size_t)depth_pixel_index].g =
1123 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1];
1124 pointcloud->points[(size_t)depth_pixel_index].r =
1125 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2];
1128 pointcloud->points[(size_t)depth_pixel_index].r =
1129 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel];
1130 pointcloud->points[(size_t)depth_pixel_index].g =
1131 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1];
1132 pointcloud->points[(size_t)depth_pixel_index].b =
1133 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2];
1154 if (from_index != -1)
1156 if (from_index == 1)
1158 else if (from_index == 2)
1165 rs2_extrinsics extrinsics = from_stream.get_extrinsics_to(to_stream);
1169 for (
unsigned int i = 0; i < 3; i++) {
1170 t[i] = extrinsics.translation[i];
1171 for (
unsigned int j = 0; j < 3; j++)
1172 R[i][j] = extrinsics.rotation[j * 3 + i];
1179 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1192 auto frame =
m_pipe.wait_for_frames();
1193 auto f = frame.first_or_default(RS2_STREAM_POSE);
1194 auto pose_data = f.as<rs2::pose_frame>().get_pose_data();
1197 *ts = frame.get_timestamp();
1199 if (cMw !=
nullptr) {
1200 m_pos[0] =
static_cast<double>(pose_data.translation.x);
1201 m_pos[1] =
static_cast<double>(pose_data.translation.y);
1202 m_pos[2] =
static_cast<double>(pose_data.translation.z);
1204 m_quat[0] =
static_cast<double>(pose_data.rotation.x);
1205 m_quat[1] =
static_cast<double>(pose_data.rotation.y);
1206 m_quat[2] =
static_cast<double>(pose_data.rotation.z);
1207 m_quat[3] =
static_cast<double>(pose_data.rotation.w);
1212 if (odo_vel !=
nullptr) {
1213 odo_vel->
resize(6,
false);
1214 (*odo_vel)[0] =
static_cast<double>(pose_data.velocity.x);
1215 (*odo_vel)[1] =
static_cast<double>(pose_data.velocity.y);
1216 (*odo_vel)[2] =
static_cast<double>(pose_data.velocity.z);
1217 (*odo_vel)[3] =
static_cast<double>(pose_data.angular_velocity.x);
1218 (*odo_vel)[4] =
static_cast<double>(pose_data.angular_velocity.y);
1219 (*odo_vel)[5] =
static_cast<double>(pose_data.angular_velocity.z);
1222 if (odo_acc !=
nullptr) {
1223 odo_acc->
resize(6,
false);
1224 (*odo_acc)[0] =
static_cast<double>(pose_data.acceleration.x);
1225 (*odo_acc)[1] =
static_cast<double>(pose_data.acceleration.y);
1226 (*odo_acc)[2] =
static_cast<double>(pose_data.acceleration.z);
1227 (*odo_acc)[3] =
static_cast<double>(pose_data.angular_acceleration.x);
1228 (*odo_acc)[4] =
static_cast<double>(pose_data.angular_acceleration.y);
1229 (*odo_acc)[5] =
static_cast<double>(pose_data.angular_acceleration.z);
1232 return pose_data.tracker_confidence;
1257 auto frame =
m_pipe.wait_for_frames();
1258 auto f = frame.first_or_default(RS2_STREAM_ACCEL);
1259 auto imu_acc_data = f.as<rs2::motion_frame>().get_motion_data();
1262 *ts = f.get_timestamp();
1264 if (imu_acc !=
nullptr) {
1265 imu_acc->
resize(3,
false);
1266 (*imu_acc)[0] =
static_cast<double>(imu_acc_data.x);
1267 (*imu_acc)[1] =
static_cast<double>(imu_acc_data.y);
1268 (*imu_acc)[2] =
static_cast<double>(imu_acc_data.z);
1294 auto frame =
m_pipe.wait_for_frames();
1295 auto f = frame.first_or_default(RS2_STREAM_GYRO);
1296 auto imu_vel_data = f.as<rs2::motion_frame>().get_motion_data();
1299 *ts = f.get_timestamp();
1301 if (imu_vel !=
nullptr) {
1302 imu_vel->
resize(3,
false);
1303 (*imu_vel)[0] =
static_cast<double>(imu_vel_data.x);
1304 (*imu_vel)[1] =
static_cast<double>(imu_vel_data.x);
1305 (*imu_vel)[2] =
static_cast<double>(imu_vel_data.x);
1331 auto data =
m_pipe.wait_for_frames();
1334 *ts = data.get_timestamp();
1336 if (imu_acc !=
nullptr) {
1337 auto acc_data = data.first_or_default(RS2_STREAM_ACCEL);
1338 auto imu_acc_data = acc_data.as<rs2::motion_frame>().get_motion_data();
1340 imu_acc->
resize(3,
false);
1341 (*imu_acc)[0] =
static_cast<double>(imu_acc_data.x);
1342 (*imu_acc)[1] =
static_cast<double>(imu_acc_data.y);
1343 (*imu_acc)[2] =
static_cast<double>(imu_acc_data.z);
1346 if (imu_vel !=
nullptr) {
1347 auto vel_data = data.first_or_default(RS2_STREAM_GYRO);
1348 auto imu_vel_data = vel_data.as<rs2::motion_frame>().get_motion_data();
1350 imu_vel->
resize(3,
false);
1351 (*imu_vel)[0] =
static_cast<double>(imu_vel_data.x);
1352 (*imu_vel)[1] =
static_cast<double>(imu_vel_data.y);
1353 (*imu_vel)[2] =
static_cast<double>(imu_vel_data.z);
1371 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1377 for (rs2::sensor &sensor : dev.query_sensors()) {
1379 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1404 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1410 for (rs2::sensor &sensor : dev.query_sensors()) {
1412 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1427 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1437 auto list = ctx.query_devices();
1438 if (list.size() > 0) {
1440 auto dev = list.front();
1441 auto sensors = dev.query_sensors();
1442 if (dev.supports(RS2_CAMERA_INFO_PRODUCT_LINE)) {
1449 return (std::string(
"unknown"));
1464 std::ostringstream oss;
1472 void print(
const rs2_extrinsics &extrinsics, std::ostream &os)
1474 std::stringstream ss;
1475 ss <<
"Rotation Matrix:\n";
1477 for (
auto i = 0; i < 3; ++i) {
1478 for (
auto j = 0; j < 3; ++j) {
1479 ss << std::left << std::setw(15) << std::setprecision(5) << extrinsics.rotation[j * 3 + i];
1484 ss <<
"\nTranslation Vector: ";
1485 for (
size_t i = 0; i <
sizeof(extrinsics.translation) /
sizeof(extrinsics.translation[0]); ++i)
1486 ss << std::setprecision(15) << extrinsics.translation[i] <<
" ";
1488 os << ss.str() <<
"\n\n";
1491 void print(
const rs2_intrinsics &intrinsics, std::ostream &os)
1493 std::stringstream ss;
1494 ss << std::left << std::setw(14) <<
"Width: "
1495 <<
"\t" << intrinsics.width <<
"\n"
1496 << std::left << std::setw(14) <<
"Height: "
1497 <<
"\t" << intrinsics.height <<
"\n"
1498 << std::left << std::setw(14) <<
"PPX: "
1499 <<
"\t" << std::setprecision(15) << intrinsics.ppx <<
"\n"
1500 << std::left << std::setw(14) <<
"PPY: "
1501 <<
"\t" << std::setprecision(15) << intrinsics.ppy <<
"\n"
1502 << std::left << std::setw(14) <<
"Fx: "
1503 <<
"\t" << std::setprecision(15) << intrinsics.fx <<
"\n"
1504 << std::left << std::setw(14) <<
"Fy: "
1505 <<
"\t" << std::setprecision(15) << intrinsics.fy <<
"\n"
1506 << std::left << std::setw(14) <<
"Distortion: "
1507 <<
"\t" << rs2_distortion_to_string(intrinsics.model) <<
"\n"
1508 << std::left << std::setw(14) <<
"Coeffs: ";
1510 for (
size_t i = 0; i <
sizeof(intrinsics.coeffs) /
sizeof(intrinsics.coeffs[0]); ++i)
1511 ss <<
"\t" << std::setprecision(15) << intrinsics.coeffs[i] <<
" ";
1513 os << ss.str() <<
"\n\n";
1516 void safe_get_intrinsics(
const rs2::video_stream_profile &profile, rs2_intrinsics &intrinsics)
1519 intrinsics = profile.get_intrinsics();
1525 bool operator==(
const rs2_intrinsics &lhs,
const rs2_intrinsics &rhs)
1527 return lhs.width == rhs.width && lhs.height == rhs.height && lhs.ppx == rhs.ppx && lhs.ppy == rhs.ppy &&
1528 lhs.fx == rhs.fx && lhs.fy == rhs.fy && lhs.model == rhs.model &&
1529 !std::memcmp(lhs.coeffs, rhs.coeffs,
sizeof(rhs.coeffs));
1532 std::string get_str_formats(
const std::set<rs2_format> &formats)
1534 std::stringstream ss;
1535 for (
auto format = formats.begin(); format != formats.end(); ++format) {
1536 ss << *format << ((format != formats.end()) && (next(format) == formats.end()) ?
"" :
"/");
1541 struct stream_and_resolution
1547 std::string stream_name;
1549 bool operator<(
const stream_and_resolution &obj)
const
1551 return (std::make_tuple(stream, stream_index, width, height) <
1552 std::make_tuple(obj.stream, obj.stream_index, obj.width, obj.height));
1556 struct stream_and_index
1561 bool operator<(
const stream_and_index &obj)
const
1563 return (std::make_tuple(stream, stream_index) < std::make_tuple(obj.stream, obj.stream_index));
1595 os << std::left << std::setw(30) << dev.get_info(RS2_CAMERA_INFO_NAME) << std::setw(20)
1596 << dev.get_info(RS2_CAMERA_INFO_SERIAL_NUMBER) << std::setw(20) << dev.get_info(RS2_CAMERA_INFO_FIRMWARE_VERSION)
1600 os <<
" Device info: \n";
1601 for (
auto j = 0; j < RS2_CAMERA_INFO_COUNT; ++j) {
1602 auto param =
static_cast<rs2_camera_info
>(j);
1603 if (dev.supports(param))
1604 os <<
" " << std::left << std::setw(30) << rs2_camera_info_to_string(rs2_camera_info(param)) <<
": \t"
1605 << dev.get_info(param) <<
"\n";
1610 for (
auto &&sensor : dev.query_sensors()) {
1611 os <<
"Options for " << sensor.get_info(RS2_CAMERA_INFO_NAME) << std::endl;
1613 os << std::setw(55) <<
" Supported options:" << std::setw(10) <<
"min" << std::setw(10) <<
" max" << std::setw(6)
1614 <<
" step" << std::setw(10) <<
" default" << std::endl;
1615 for (
auto j = 0; j < RS2_OPTION_COUNT; ++j) {
1616 auto opt =
static_cast<rs2_option
>(j);
1617 if (sensor.supports(opt)) {
1618 auto range = sensor.get_option_range(opt);
1619 os <<
" " << std::left << std::setw(50) << opt <<
" : " << std::setw(5) << range.min <<
"... "
1620 << std::setw(12) << range.max << std::setw(6) << range.step << std::setw(10) << range.def <<
"\n";
1627 for (
auto &&sensor : dev.query_sensors()) {
1628 os <<
"Stream Profiles supported by " << sensor.get_info(RS2_CAMERA_INFO_NAME) <<
"\n";
1630 os << std::setw(55) <<
" Supported modes:" << std::setw(10) <<
"stream" << std::setw(10) <<
" resolution"
1631 << std::setw(6) <<
" fps" << std::setw(10) <<
" format"
1634 for (
auto &&profile : sensor.get_stream_profiles()) {
1635 if (
auto video = profile.as<rs2::video_stream_profile>()) {
1636 os <<
" " << profile.stream_name() <<
"\t " << video.width() <<
"x" << video.height() <<
"\t@ "
1637 << profile.fps() <<
"Hz\t" << profile.format() <<
"\n";
1640 os <<
" " << profile.stream_name() <<
"\t@ " << profile.fps() <<
"Hz\t" << profile.format() <<
"\n";
1647 std::map<stream_and_index, rs2::stream_profile> streams;
1648 std::map<stream_and_resolution, std::vector<std::pair<std::set<rs2_format>, rs2_intrinsics> > > intrinsics_map;
1649 for (
auto &&sensor : dev.query_sensors()) {
1651 for (
auto &&profile : sensor.get_stream_profiles()) {
1652 if (
auto video = profile.as<rs2::video_stream_profile>()) {
1653 if (streams.find(stream_and_index { profile.stream_type(), profile.stream_index() }) == streams.end()) {
1654 streams[stream_and_index { profile.stream_type(), profile.stream_index() }] = profile;
1657 rs2_intrinsics intrinsics {};
1658 stream_and_resolution stream_res { profile.stream_type(), profile.stream_index(), video.width(), video.height(),
1659 profile.stream_name() };
1660 safe_get_intrinsics(video, intrinsics);
1661 auto it = std::find_if(
1662 (intrinsics_map[stream_res]).begin(), (intrinsics_map[stream_res]).end(),
1663 [&](
const std::pair<std::set<rs2_format>, rs2_intrinsics> &kvp) {
return intrinsics == kvp.second; });
1664 if (it == (intrinsics_map[stream_res]).end()) {
1665 (intrinsics_map[stream_res]).push_back({ {profile.format()}, intrinsics });
1668 it->first.insert(profile.format());
1676 os <<
"Provided Intrinsic:\n";
1677 for (
auto &kvp : intrinsics_map) {
1678 auto stream_res = kvp.first;
1679 for (
auto &intrinsics : kvp.second) {
1680 auto formats = get_str_formats(intrinsics.first);
1681 os <<
"Intrinsic of \"" << stream_res.stream_name <<
"\"\t " << stream_res.width <<
"x" << stream_res.height
1682 <<
"\t " << formats <<
"\n";
1683 if (intrinsics.second == rs2_intrinsics {}) {
1684 os <<
"Intrinsic NOT available!\n\n";
1687 print(intrinsics.second, os);
1693 os <<
"\nProvided Extrinsic:\n";
1694 for (
auto kvp1 = streams.begin(); kvp1 != streams.end(); ++kvp1) {
1695 for (
auto kvp2 = streams.begin(); kvp2 != streams.end(); ++kvp2) {
1696 os <<
"Extrinsic from \"" << kvp1->second.stream_name() <<
"\"\t "
1698 <<
"\t \"" << kvp2->second.stream_name() <<
"\"\n";
1699 auto extrinsics = kvp1->second.get_extrinsics_to(kvp2->second);
1700 print(extrinsics, os);
1707 #elif !defined(VISP_BUILD_SHARED_LIBS)
1709 void dummy_vpRealSense2() { };
bool operator==(const vpArray2D< double > &A) const
friend std::ostream & operator<<(std::ostream &s, const vpArray2D< Type > &A)
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
vpCameraParametersProjType
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
@ ProjWithKannalaBrandtDistortion
Projection with Kannala-Brandt distortion model.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
void initPersProjWithDistortion(double px, double py, double u0, double v0, double kud, double kdu)
void initProjWithKannalaBrandtDistortion(double px, double py, double u0, double v0, const std::vector< double > &distortion_coefficients)
Implementation of column vector and the associated operations.
void resize(unsigned int i, bool flagNullify=true)
error that can be emitted by ViSP classes.
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void RGBToGrey(unsigned char *rgb, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false)
static void RGBaToGrey(unsigned char *rgba, unsigned char *grey, unsigned int width, unsigned int height, unsigned int nThreads=0)
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
static void BGRToGrey(unsigned char *bgr, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false, unsigned int nThreads=0)
static void BGRToRGBa(unsigned char *bgr, unsigned char *rgba, unsigned int width, unsigned int height, bool flip=false)
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
Type * bitmap
points toward the bitmap
void getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
std::string getSensorInfo()
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
vpQuaternionVector m_quat
bool open(const rs2::config &cfg=rs2::config())
rs2_intrinsics getIntrinsics(const rs2_stream &stream, int index=-1) const
rs2::pointcloud m_pointcloud
void getIMUAcceleration(vpColVector *imu_acc, double *ts)
void getPointcloud(const rs2::depth_frame &depth_frame, std::vector< vpColVector > &pointcloud)
void getColorFrame(const rs2::frame &frame, vpImage< vpRGBa > &color)
void getIMUVelocity(vpColVector *imu_vel, double *ts)
vpTranslationVector m_pos
std::string m_product_line
std::string getProductLine()
unsigned int getOdometryData(vpHomogeneousMatrix *cMw, vpColVector *odo_vel, vpColVector *odo_acc, double *ts=nullptr)
void getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts)
vpHomogeneousMatrix getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index=-1) const
void getGreyFrame(const rs2::frame &frame, vpImage< unsigned char > &grey)
float m_invalidDepthValue
rs2::pipeline_profile m_pipelineProfile
Implementation of a rotation matrix and operations on such kind of matrices.
Class that consider the case of a translation vector.