36 #include <visp3/core/vpConfig.h>
38 #if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
43 #include <visp3/core/vpImageConvert.h>
44 #include <visp3/sensor/vpRealSense2.h>
46 #define MANUAL_POINTCLOUD 1
48 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
51 bool operator==(
const rs2_extrinsics &lhs,
const rs2_extrinsics &rhs)
53 for (
int i = 0; i < 3; i++) {
54 for (
int j = 0; j < 3; j++) {
55 if (std::fabs(lhs.rotation[i * 3 + j] - rhs.rotation[i * 3 + j]) > std::numeric_limits<float>::epsilon()) {
60 if (std::fabs(lhs.translation[i] - rhs.translation[i]) > std::numeric_limits<float>::epsilon()) {
75 : m_depthScale(0.0f), m_invalidDepthValue(0.0f), m_max_Z(8.0f), m_pipe(), m_pipelineProfile(), m_pointcloud(),
76 m_points(), m_pos(), m_quat(), m_rot(), m_product_line(), m_init(false)
92 auto data =
m_pipe.wait_for_frames();
93 auto color_frame = data.get_color_frame();
96 *ts = data.get_timestamp();
107 auto data =
m_pipe.wait_for_frames();
108 auto color_frame = data.get_color_frame();
111 *ts = data.get_timestamp();
126 std::vector<vpColVector> *
const data_pointCloud,
unsigned char *
const data_infrared,
127 rs2::align *
const align_to,
double *ts)
129 acquire(data_image, data_depth, data_pointCloud, data_infrared,
nullptr, align_to, ts);
195 std::vector<vpColVector> *
const data_pointCloud,
unsigned char *
const data_infrared1,
196 unsigned char *
const data_infrared2, rs2::align *
const align_to,
double *ts)
198 auto data =
m_pipe.wait_for_frames();
199 if (align_to !=
nullptr) {
202 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
203 data = align_to->process(data);
205 data = align_to->proccess(data);
209 if (data_image !=
nullptr) {
210 auto color_frame = data.get_color_frame();
214 if (data_depth !=
nullptr || data_pointCloud !=
nullptr) {
215 auto depth_frame = data.get_depth_frame();
216 if (data_depth !=
nullptr) {
220 if (data_pointCloud !=
nullptr) {
225 if (data_infrared1 !=
nullptr) {
226 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
230 if (data_infrared2 !=
nullptr) {
231 auto infrared_frame = data.get_infrared_frame(2);
236 *ts = data.get_timestamp();
240 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
263 auto data =
m_pipe.wait_for_frames();
265 if (left !=
nullptr) {
266 auto left_fisheye_frame = data.get_fisheye_frame(1);
267 unsigned int width =
static_cast<unsigned int>(left_fisheye_frame.get_width());
268 unsigned int height =
static_cast<unsigned int>(left_fisheye_frame.get_height());
269 left->
resize(height, width);
273 if (right !=
nullptr) {
274 auto right_fisheye_frame = data.get_fisheye_frame(2);
275 unsigned int width =
static_cast<unsigned int>(right_fisheye_frame.get_width());
276 unsigned int height =
static_cast<unsigned int>(right_fisheye_frame.get_height());
277 right->
resize(height, width);
282 *ts = data.get_timestamp();
317 auto data =
m_pipe.wait_for_frames();
319 if (left !=
nullptr) {
320 auto left_fisheye_frame = data.get_fisheye_frame(1);
321 unsigned int width =
static_cast<unsigned int>(left_fisheye_frame.get_width());
322 unsigned int height =
static_cast<unsigned int>(left_fisheye_frame.get_height());
323 left->
resize(height, width);
327 if (right !=
nullptr) {
328 auto right_fisheye_frame = data.get_fisheye_frame(2);
329 unsigned int width =
static_cast<unsigned int>(right_fisheye_frame.get_width());
330 unsigned int height =
static_cast<unsigned int>(right_fisheye_frame.get_height());
331 right->
resize(height, width);
335 auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
336 auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
339 *ts = data.get_timestamp();
342 if (cMw !=
nullptr) {
343 m_pos[0] =
static_cast<double>(pose_data.translation.x);
344 m_pos[1] =
static_cast<double>(pose_data.translation.y);
345 m_pos[2] =
static_cast<double>(pose_data.translation.z);
347 m_quat[0] =
static_cast<double>(pose_data.rotation.x);
348 m_quat[1] =
static_cast<double>(pose_data.rotation.y);
349 m_quat[2] =
static_cast<double>(pose_data.rotation.z);
350 m_quat[3] =
static_cast<double>(pose_data.rotation.w);
355 if (odo_vel !=
nullptr) {
356 odo_vel->
resize(6,
false);
357 (*odo_vel)[0] =
static_cast<double>(pose_data.velocity.x);
358 (*odo_vel)[1] =
static_cast<double>(pose_data.velocity.y);
359 (*odo_vel)[2] =
static_cast<double>(pose_data.velocity.z);
360 (*odo_vel)[3] =
static_cast<double>(pose_data.angular_velocity.x);
361 (*odo_vel)[4] =
static_cast<double>(pose_data.angular_velocity.y);
362 (*odo_vel)[5] =
static_cast<double>(pose_data.angular_velocity.z);
365 if (odo_acc !=
nullptr) {
366 odo_acc->
resize(6,
false);
367 (*odo_acc)[0] =
static_cast<double>(pose_data.acceleration.x);
368 (*odo_acc)[1] =
static_cast<double>(pose_data.acceleration.y);
369 (*odo_acc)[2] =
static_cast<double>(pose_data.acceleration.z);
370 (*odo_acc)[3] =
static_cast<double>(pose_data.angular_acceleration.x);
371 (*odo_acc)[4] =
static_cast<double>(pose_data.angular_acceleration.y);
372 (*odo_acc)[5] =
static_cast<double>(pose_data.angular_acceleration.z);
375 if (confidence !=
nullptr) {
376 *confidence = pose_data.tracker_confidence;
395 unsigned int *confidence,
double *ts)
397 auto data =
m_pipe.wait_for_frames();
399 if (left !=
nullptr) {
400 auto left_fisheye_frame = data.get_fisheye_frame(1);
401 unsigned int width =
static_cast<unsigned int>(left_fisheye_frame.get_width());
402 unsigned int height =
static_cast<unsigned int>(left_fisheye_frame.get_height());
403 left->
resize(height, width);
407 if (right !=
nullptr) {
408 auto right_fisheye_frame = data.get_fisheye_frame(2);
409 unsigned int width =
static_cast<unsigned int>(right_fisheye_frame.get_width());
410 unsigned int height =
static_cast<unsigned int>(right_fisheye_frame.get_height());
411 right->
resize(height, width);
415 auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
416 auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
419 *ts = data.get_timestamp();
422 if (cMw !=
nullptr) {
423 m_pos[0] =
static_cast<double>(pose_data.translation.x);
424 m_pos[1] =
static_cast<double>(pose_data.translation.y);
425 m_pos[2] =
static_cast<double>(pose_data.translation.z);
427 m_quat[0] =
static_cast<double>(pose_data.rotation.x);
428 m_quat[1] =
static_cast<double>(pose_data.rotation.y);
429 m_quat[2] =
static_cast<double>(pose_data.rotation.z);
430 m_quat[3] =
static_cast<double>(pose_data.rotation.w);
435 if (odo_vel !=
nullptr) {
436 odo_vel->
resize(6,
false);
437 (*odo_vel)[0] =
static_cast<double>(pose_data.velocity.x);
438 (*odo_vel)[1] =
static_cast<double>(pose_data.velocity.y);
439 (*odo_vel)[2] =
static_cast<double>(pose_data.velocity.z);
440 (*odo_vel)[3] =
static_cast<double>(pose_data.angular_velocity.x);
441 (*odo_vel)[4] =
static_cast<double>(pose_data.angular_velocity.y);
442 (*odo_vel)[5] =
static_cast<double>(pose_data.angular_velocity.z);
445 if (odo_acc !=
nullptr) {
446 odo_acc->
resize(6,
false);
447 (*odo_acc)[0] =
static_cast<double>(pose_data.acceleration.x);
448 (*odo_acc)[1] =
static_cast<double>(pose_data.acceleration.y);
449 (*odo_acc)[2] =
static_cast<double>(pose_data.acceleration.z);
450 (*odo_acc)[3] =
static_cast<double>(pose_data.angular_acceleration.x);
451 (*odo_acc)[4] =
static_cast<double>(pose_data.angular_acceleration.y);
452 (*odo_acc)[5] =
static_cast<double>(pose_data.angular_acceleration.z);
455 auto accel_frame = data.first_or_default(RS2_STREAM_ACCEL);
456 auto accel_data = accel_frame.as<rs2::motion_frame>().get_motion_data();
458 if (imu_acc !=
nullptr) {
459 imu_acc->
resize(3,
false);
460 (*imu_acc)[0] =
static_cast<double>(accel_data.x);
461 (*imu_acc)[1] =
static_cast<double>(accel_data.y);
462 (*imu_acc)[2] =
static_cast<double>(accel_data.z);
465 auto gyro_frame = data.first_or_default(RS2_STREAM_GYRO);
466 auto gyro_data = gyro_frame.as<rs2::motion_frame>().get_motion_data();
468 if (imu_vel !=
nullptr) {
469 imu_vel->
resize(3,
false);
470 (*imu_vel)[0] =
static_cast<double>(gyro_data.x);
471 (*imu_vel)[1] =
static_cast<double>(gyro_data.y);
472 (*imu_vel)[2] =
static_cast<double>(gyro_data.z);
475 if (confidence !=
nullptr) {
476 *confidence = pose_data.tracker_confidence;
481 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
495 std::vector<vpColVector> *
const data_pointCloud,
496 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud,
unsigned char *
const data_infrared,
497 rs2::align *
const align_to,
double *ts)
499 acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared,
nullptr, align_to, ts);
517 std::vector<vpColVector> *
const data_pointCloud,
518 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud,
unsigned char *
const data_infrared1,
519 unsigned char *
const data_infrared2, rs2::align *
const align_to,
double *ts)
521 auto data =
m_pipe.wait_for_frames();
522 if (align_to !=
nullptr) {
525 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
526 data = align_to->process(data);
528 data = align_to->proccess(data);
532 if (data_image !=
nullptr) {
533 auto color_frame = data.get_color_frame();
537 if (data_depth !=
nullptr || data_pointCloud !=
nullptr || pointcloud !=
nullptr) {
538 auto depth_frame = data.get_depth_frame();
539 if (data_depth !=
nullptr) {
543 if (data_pointCloud !=
nullptr) {
547 if (pointcloud !=
nullptr) {
552 if (data_infrared1 !=
nullptr) {
553 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
557 if (data_infrared2 !=
nullptr) {
558 auto infrared_frame = data.get_infrared_frame(2);
563 *ts = data.get_timestamp();
580 std::vector<vpColVector> *
const data_pointCloud,
581 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
unsigned char *
const data_infrared,
582 rs2::align *
const align_to,
double *ts)
584 acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared,
nullptr, align_to, ts);
602 std::vector<vpColVector> *
const data_pointCloud,
603 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
unsigned char *
const data_infrared1,
604 unsigned char *
const data_infrared2, rs2::align *
const align_to,
double *ts)
606 auto data =
m_pipe.wait_for_frames();
607 if (align_to !=
nullptr) {
610 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
611 data = align_to->process(data);
613 data = align_to->proccess(data);
617 auto color_frame = data.get_color_frame();
618 if (data_image !=
nullptr) {
622 if (data_depth !=
nullptr || data_pointCloud !=
nullptr || pointcloud !=
nullptr) {
623 auto depth_frame = data.get_depth_frame();
624 if (data_depth !=
nullptr) {
628 if (data_pointCloud !=
nullptr) {
632 if (pointcloud !=
nullptr) {
637 if (data_infrared1 !=
nullptr) {
638 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
642 if (data_infrared2 !=
nullptr) {
643 auto infrared_frame = data.get_infrared_frame(2);
648 *ts = data.get_timestamp();
685 auto rs_stream =
m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
686 auto intrinsics = rs_stream.get_intrinsics();
689 double u0 = intrinsics.ppx;
690 double v0 = intrinsics.ppy;
691 double px = intrinsics.fx;
692 double py = intrinsics.fy;
696 double kdu = intrinsics.coeffs[0];
701 std::vector<double> tmp_coefs;
702 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[0]));
703 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[1]));
704 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[2]));
705 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[3]));
706 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[4]));
730 auto vsp =
m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
731 return vsp.get_intrinsics();
736 auto vf = frame.as<rs2::video_frame>();
737 unsigned int width = (
unsigned int)vf.get_width();
738 unsigned int height = (
unsigned int)vf.get_height();
739 color.
resize(height, width);
741 if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
743 reinterpret_cast<unsigned char *
>(color.
bitmap), width, height);
745 else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
746 memcpy(
reinterpret_cast<unsigned char *
>(color.
bitmap),
747 const_cast<unsigned char *
>(
static_cast<const unsigned char *
>(frame.get_data())),
748 width * height *
sizeof(
vpRGBa));
750 else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
752 reinterpret_cast<unsigned char *
>(color.
bitmap), width, height);
766 rs2::pipeline *pipe =
new rs2::pipeline;
767 rs2::pipeline_profile *pipelineProfile =
new rs2::pipeline_profile;
768 *pipelineProfile = pipe->start();
770 rs2::device dev = pipelineProfile->get_device();
773 for (rs2::sensor &sensor : dev.query_sensors()) {
775 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
782 delete pipelineProfile;
790 auto vf = frame.as<rs2::video_frame>();
791 unsigned int width = (
unsigned int)vf.get_width();
792 unsigned int height = (
unsigned int)vf.get_height();
793 grey.
resize(height, width);
795 if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
797 grey.
bitmap, width, height);
799 else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
801 grey.
bitmap, width * height);
803 else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
805 grey.
bitmap, width, height);
814 auto vf = frame.as<rs2::video_frame>();
815 int size = vf.get_width() * vf.get_height();
817 switch (frame.get_profile().format()) {
818 case RS2_FORMAT_RGB8:
819 case RS2_FORMAT_BGR8:
820 memcpy(data, frame.get_data(), size * 3);
823 case RS2_FORMAT_RGBA8:
824 case RS2_FORMAT_BGRA8:
825 memcpy(data, frame.get_data(), size * 4);
830 memcpy(data, frame.get_data(), size * 2);
834 memcpy(data, frame.get_data(), size);
844 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
845 std::stringstream ss;
850 auto vf = depth_frame.as<rs2::video_frame>();
851 const int width = vf.get_width();
852 const int height = vf.get_height();
853 pointcloud.resize((
size_t)(width * height));
855 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
856 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
860 #pragma omp parallel for schedule(dynamic)
861 for (
int i = 0; i < height; i++) {
862 auto depth_pixel_index = i * width;
864 for (
int j = 0; j < width; j++, depth_pixel_index++) {
865 if (p_depth_frame[depth_pixel_index] == 0) {
866 pointcloud[(size_t)depth_pixel_index].resize(4,
false);
870 pointcloud[(size_t)depth_pixel_index][3] = 1.0;
875 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
878 const float pixel[] = { (float)j, (
float)i };
879 rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
884 pointcloud[(size_t)depth_pixel_index].resize(4,
false);
885 pointcloud[(size_t)depth_pixel_index][0] = points[0];
886 pointcloud[(size_t)depth_pixel_index][1] = points[1];
887 pointcloud[(size_t)depth_pixel_index][2] = points[2];
888 pointcloud[(size_t)depth_pixel_index][3] = 1.0;
893 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
896 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
897 std::stringstream ss;
902 auto vf = depth_frame.as<rs2::video_frame>();
903 const int width = vf.get_width();
904 const int height = vf.get_height();
905 pointcloud->width = (uint32_t)width;
906 pointcloud->height = (uint32_t)height;
907 pointcloud->resize((
size_t)(width * height));
909 #if MANUAL_POINTCLOUD
910 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
911 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
915 #pragma omp parallel for schedule(dynamic)
916 for (
int i = 0; i < height; i++) {
917 auto depth_pixel_index = i * width;
919 for (
int j = 0; j < width; j++, depth_pixel_index++) {
920 if (p_depth_frame[depth_pixel_index] == 0) {
928 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
931 const float pixel[] = { (float)j, (
float)i };
932 rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
937 pointcloud->points[(size_t)(depth_pixel_index)].x = points[0];
938 pointcloud->points[(size_t)(depth_pixel_index)].y = points[1];
939 pointcloud->points[(size_t)(depth_pixel_index)].z = points[2];
944 auto vertices =
m_points.get_vertices();
946 for (
size_t i = 0; i <
m_points.size(); i++) {
947 if (vertices[i].z <= 0.0f || vertices[i].z >
m_max_Z) {
953 pointcloud->points[i].x = vertices[i].x;
954 pointcloud->points[i].y = vertices[i].y;
955 pointcloud->points[i].z = vertices[i].z;
962 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
964 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
968 auto vf = depth_frame.as<rs2::video_frame>();
969 const int depth_width = vf.get_width();
970 const int depth_height = vf.get_height();
971 pointcloud->width =
static_cast<uint32_t
>(depth_width);
972 pointcloud->height =
static_cast<uint32_t
>(depth_height);
973 pointcloud->resize(
static_cast<uint32_t
>(depth_width * depth_height));
975 vf = color_frame.as<rs2::video_frame>();
976 const int color_width = vf.get_width();
977 const int color_height = vf.get_height();
979 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
980 const rs2_extrinsics depth2ColorExtrinsics =
981 depth_frame.get_profile().as<rs2::video_stream_profile>().get_extrinsics_to(
982 color_frame.get_profile().as<rs2::video_stream_profile>());
983 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
984 const rs2_intrinsics color_intrinsics = color_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
986 auto color_format = color_frame.as<rs2::video_frame>().get_profile().format();
987 const bool swap_rb = color_format == RS2_FORMAT_BGR8 || color_format == RS2_FORMAT_BGRA8;
988 unsigned int nb_color_pixel = (color_format == RS2_FORMAT_RGB8 || color_format == RS2_FORMAT_BGR8) ? 3 : 4;
989 const unsigned char *p_color_frame =
static_cast<const unsigned char *
>(color_frame.get_data());
990 rs2_extrinsics identity;
991 memset(identity.rotation, 0,
sizeof(identity.rotation));
992 memset(identity.translation, 0,
sizeof(identity.translation));
993 for (
int i = 0; i < 3; i++) {
994 identity.rotation[i * 3 + i] = 1;
996 const bool registered_streams =
997 (depth2ColorExtrinsics == identity) && (color_width == depth_width) && (color_height == depth_height);
1001 #pragma omp parallel for schedule(dynamic)
1002 for (
int i = 0; i < depth_height; i++) {
1003 auto depth_pixel_index = i * depth_width;
1005 for (
int j = 0; j < depth_width; j++, depth_pixel_index++) {
1006 if (p_depth_frame[depth_pixel_index] == 0) {
1014 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1015 unsigned int r = 96, g = 157, b = 198;
1016 uint32_t rgb = (
static_cast<uint32_t
>(r) << 16 |
static_cast<uint32_t
>(g) << 8 |
static_cast<uint32_t
>(b));
1018 pointcloud->points[(size_t)depth_pixel_index].rgb = *
reinterpret_cast<float *
>(&rgb);
1020 pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1021 pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1022 pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1028 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
1030 float depth_point[3];
1031 const float pixel[] = { (float)j, (
float)i };
1032 rs2_deproject_pixel_to_point(depth_point, &depth_intrinsics, pixel, pixels_distance);
1034 if (pixels_distance >
m_max_Z) {
1038 pointcloud->points[(size_t)depth_pixel_index].x = depth_point[0];
1039 pointcloud->points[(size_t)depth_pixel_index].y = depth_point[1];
1040 pointcloud->points[(size_t)depth_pixel_index].z = depth_point[2];
1042 if (!registered_streams) {
1043 float color_point[3];
1044 rs2_transform_point_to_point(color_point, &depth2ColorExtrinsics, depth_point);
1045 float color_pixel[2];
1046 rs2_project_point_to_pixel(color_pixel, &color_intrinsics, color_point);
1048 if (color_pixel[1] < 0 || color_pixel[1] >= color_height || color_pixel[0] < 0 ||
1049 color_pixel[0] >= color_width) {
1053 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1054 unsigned int r = 96, g = 157, b = 198;
1055 uint32_t rgb = (
static_cast<uint32_t
>(r) << 16 |
static_cast<uint32_t
>(g) << 8 |
static_cast<uint32_t
>(b));
1057 pointcloud->points[(size_t)depth_pixel_index].rgb = *
reinterpret_cast<float *
>(&rgb);
1059 pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1060 pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1061 pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1065 unsigned int i_ = (
unsigned int)color_pixel[1];
1066 unsigned int j_ = (
unsigned int)color_pixel[0];
1068 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1072 (
static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel]) |
1073 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1074 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2])
1079 (
static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel]) << 16 |
1080 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1081 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2]));
1084 pointcloud->points[(size_t)(i * depth_width + j)].rgb = *
reinterpret_cast<float *
>(&rgb);
1087 pointcloud->points[(size_t)depth_pixel_index].b =
1088 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel];
1089 pointcloud->points[(size_t)depth_pixel_index].g =
1090 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1];
1091 pointcloud->points[(size_t)depth_pixel_index].r =
1092 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2];
1095 pointcloud->points[(size_t)depth_pixel_index].r =
1096 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel];
1097 pointcloud->points[(size_t)depth_pixel_index].g =
1098 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1];
1099 pointcloud->points[(size_t)depth_pixel_index].b =
1100 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2];
1106 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1109 rgb = (
static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel]) |
1110 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1111 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2]) << 16);
1114 rgb = (
static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel]) << 16 |
1115 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1116 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2]));
1119 pointcloud->points[(size_t)(i * depth_width + j)].rgb = *
reinterpret_cast<float *
>(&rgb);
1122 pointcloud->points[(size_t)depth_pixel_index].b =
1123 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel];
1124 pointcloud->points[(size_t)depth_pixel_index].g =
1125 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1];
1126 pointcloud->points[(size_t)depth_pixel_index].r =
1127 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2];
1130 pointcloud->points[(size_t)depth_pixel_index].r =
1131 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel];
1132 pointcloud->points[(size_t)depth_pixel_index].g =
1133 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1];
1134 pointcloud->points[(size_t)depth_pixel_index].b =
1135 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2];
1156 if (from_index != -1)
1158 if (from_index == 1)
1160 else if (from_index == 2)
1167 rs2_extrinsics extrinsics = from_stream.get_extrinsics_to(to_stream);
1171 for (
unsigned int i = 0; i < 3; i++) {
1172 t[i] = extrinsics.translation[i];
1173 for (
unsigned int j = 0; j < 3; j++)
1174 R[i][j] = extrinsics.rotation[j * 3 + i];
1181 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1194 auto frame =
m_pipe.wait_for_frames();
1195 auto f = frame.first_or_default(RS2_STREAM_POSE);
1196 auto pose_data = f.as<rs2::pose_frame>().get_pose_data();
1199 *ts = frame.get_timestamp();
1201 if (cMw !=
nullptr) {
1202 m_pos[0] =
static_cast<double>(pose_data.translation.x);
1203 m_pos[1] =
static_cast<double>(pose_data.translation.y);
1204 m_pos[2] =
static_cast<double>(pose_data.translation.z);
1206 m_quat[0] =
static_cast<double>(pose_data.rotation.x);
1207 m_quat[1] =
static_cast<double>(pose_data.rotation.y);
1208 m_quat[2] =
static_cast<double>(pose_data.rotation.z);
1209 m_quat[3] =
static_cast<double>(pose_data.rotation.w);
1214 if (odo_vel !=
nullptr) {
1215 odo_vel->
resize(6,
false);
1216 (*odo_vel)[0] =
static_cast<double>(pose_data.velocity.x);
1217 (*odo_vel)[1] =
static_cast<double>(pose_data.velocity.y);
1218 (*odo_vel)[2] =
static_cast<double>(pose_data.velocity.z);
1219 (*odo_vel)[3] =
static_cast<double>(pose_data.angular_velocity.x);
1220 (*odo_vel)[4] =
static_cast<double>(pose_data.angular_velocity.y);
1221 (*odo_vel)[5] =
static_cast<double>(pose_data.angular_velocity.z);
1224 if (odo_acc !=
nullptr) {
1225 odo_acc->
resize(6,
false);
1226 (*odo_acc)[0] =
static_cast<double>(pose_data.acceleration.x);
1227 (*odo_acc)[1] =
static_cast<double>(pose_data.acceleration.y);
1228 (*odo_acc)[2] =
static_cast<double>(pose_data.acceleration.z);
1229 (*odo_acc)[3] =
static_cast<double>(pose_data.angular_acceleration.x);
1230 (*odo_acc)[4] =
static_cast<double>(pose_data.angular_acceleration.y);
1231 (*odo_acc)[5] =
static_cast<double>(pose_data.angular_acceleration.z);
1234 return pose_data.tracker_confidence;
1257 void vpRealSense2::getIMUAcceleration(
vpColVector *imu_acc,
double *ts)
1259 auto frame =
m_pipe.wait_for_frames();
1260 auto f = frame.first_or_default(RS2_STREAM_ACCEL);
1261 auto imu_acc_data = f.as<rs2::motion_frame>().get_motion_data();
1264 *ts = f.get_timestamp();
1266 if (imu_acc !=
nullptr) {
1267 imu_acc->
resize(3,
false);
1268 (*imu_acc)[0] =
static_cast<double>(imu_acc_data.x);
1269 (*imu_acc)[1] =
static_cast<double>(imu_acc_data.y);
1270 (*imu_acc)[2] =
static_cast<double>(imu_acc_data.z);
1294 void vpRealSense2::getIMUVelocity(
vpColVector *imu_vel,
double *ts)
1296 auto frame =
m_pipe.wait_for_frames();
1297 auto f = frame.first_or_default(RS2_STREAM_GYRO);
1298 auto imu_vel_data = f.as<rs2::motion_frame>().get_motion_data();
1301 *ts = f.get_timestamp();
1303 if (imu_vel !=
nullptr) {
1304 imu_vel->
resize(3,
false);
1305 (*imu_vel)[0] =
static_cast<double>(imu_vel_data.x);
1306 (*imu_vel)[1] =
static_cast<double>(imu_vel_data.x);
1307 (*imu_vel)[2] =
static_cast<double>(imu_vel_data.x);
1333 auto data =
m_pipe.wait_for_frames();
1336 *ts = data.get_timestamp();
1338 if (imu_acc !=
nullptr) {
1339 auto acc_data = data.first_or_default(RS2_STREAM_ACCEL);
1340 auto imu_acc_data = acc_data.as<rs2::motion_frame>().get_motion_data();
1342 imu_acc->
resize(3,
false);
1343 (*imu_acc)[0] =
static_cast<double>(imu_acc_data.x);
1344 (*imu_acc)[1] =
static_cast<double>(imu_acc_data.y);
1345 (*imu_acc)[2] =
static_cast<double>(imu_acc_data.z);
1348 if (imu_vel !=
nullptr) {
1349 auto vel_data = data.first_or_default(RS2_STREAM_GYRO);
1350 auto imu_vel_data = vel_data.as<rs2::motion_frame>().get_motion_data();
1352 imu_vel->
resize(3,
false);
1353 (*imu_vel)[0] =
static_cast<double>(imu_vel_data.x);
1354 (*imu_vel)[1] =
static_cast<double>(imu_vel_data.y);
1355 (*imu_vel)[2] =
static_cast<double>(imu_vel_data.z);
1373 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1379 for (rs2::sensor &sensor : dev.query_sensors()) {
1381 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1406 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1412 for (rs2::sensor &sensor : dev.query_sensors()) {
1414 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1429 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1439 auto list = ctx.query_devices();
1440 if (list.size() > 0) {
1442 auto dev = list.front();
1443 auto sensors = dev.query_sensors();
1444 if (dev.supports(RS2_CAMERA_INFO_PRODUCT_LINE)) {
1451 return (std::string(
"unknown"));
1466 std::ostringstream oss;
1474 void print(
const rs2_extrinsics &extrinsics, std::ostream &os)
1476 std::stringstream ss;
1477 ss <<
"Rotation Matrix:\n";
1479 for (
auto i = 0; i < 3; ++i) {
1480 for (
auto j = 0; j < 3; ++j) {
1481 ss << std::left << std::setw(15) << std::setprecision(5) << extrinsics.rotation[j * 3 + i];
1486 ss <<
"\nTranslation Vector: ";
1487 for (
size_t i = 0; i <
sizeof(extrinsics.translation) /
sizeof(extrinsics.translation[0]); ++i)
1488 ss << std::setprecision(15) << extrinsics.translation[i] <<
" ";
1490 os << ss.str() <<
"\n\n";
1493 void print(
const rs2_intrinsics &intrinsics, std::ostream &os)
1495 std::stringstream ss;
1496 ss << std::left << std::setw(14) <<
"Width: "
1497 <<
"\t" << intrinsics.width <<
"\n"
1498 << std::left << std::setw(14) <<
"Height: "
1499 <<
"\t" << intrinsics.height <<
"\n"
1500 << std::left << std::setw(14) <<
"PPX: "
1501 <<
"\t" << std::setprecision(15) << intrinsics.ppx <<
"\n"
1502 << std::left << std::setw(14) <<
"PPY: "
1503 <<
"\t" << std::setprecision(15) << intrinsics.ppy <<
"\n"
1504 << std::left << std::setw(14) <<
"Fx: "
1505 <<
"\t" << std::setprecision(15) << intrinsics.fx <<
"\n"
1506 << std::left << std::setw(14) <<
"Fy: "
1507 <<
"\t" << std::setprecision(15) << intrinsics.fy <<
"\n"
1508 << std::left << std::setw(14) <<
"Distortion: "
1509 <<
"\t" << rs2_distortion_to_string(intrinsics.model) <<
"\n"
1510 << std::left << std::setw(14) <<
"Coeffs: ";
1512 for (
size_t i = 0; i <
sizeof(intrinsics.coeffs) /
sizeof(intrinsics.coeffs[0]); ++i)
1513 ss <<
"\t" << std::setprecision(15) << intrinsics.coeffs[i] <<
" ";
1515 os << ss.str() <<
"\n\n";
1518 void safe_get_intrinsics(
const rs2::video_stream_profile &profile, rs2_intrinsics &intrinsics)
1521 intrinsics = profile.get_intrinsics();
1527 bool operator==(
const rs2_intrinsics &lhs,
const rs2_intrinsics &rhs)
1529 return lhs.width == rhs.width && lhs.height == rhs.height && lhs.ppx == rhs.ppx && lhs.ppy == rhs.ppy &&
1530 lhs.fx == rhs.fx && lhs.fy == rhs.fy && lhs.model == rhs.model &&
1531 !std::memcmp(lhs.coeffs, rhs.coeffs,
sizeof(rhs.coeffs));
1534 std::string get_str_formats(
const std::set<rs2_format> &formats)
1536 std::stringstream ss;
1537 for (
auto format = formats.begin(); format != formats.end(); ++format) {
1538 ss << *format << ((format != formats.end()) && (next(format) == formats.end()) ?
"" :
"/");
1543 struct stream_and_resolution
1549 std::string stream_name;
1551 bool operator<(
const stream_and_resolution &obj)
const
1553 return (std::make_tuple(stream, stream_index, width, height) <
1554 std::make_tuple(obj.stream, obj.stream_index, obj.width, obj.height));
1558 struct stream_and_index
1563 bool operator<(
const stream_and_index &obj)
const
1565 return (std::make_tuple(stream, stream_index) < std::make_tuple(obj.stream, obj.stream_index));
1597 os << std::left << std::setw(30) << dev.get_info(RS2_CAMERA_INFO_NAME) << std::setw(20)
1598 << dev.get_info(RS2_CAMERA_INFO_SERIAL_NUMBER) << std::setw(20) << dev.get_info(RS2_CAMERA_INFO_FIRMWARE_VERSION)
1602 os <<
" Device info: \n";
1603 for (
auto j = 0; j < RS2_CAMERA_INFO_COUNT; ++j) {
1604 auto param =
static_cast<rs2_camera_info
>(j);
1605 if (dev.supports(param))
1606 os <<
" " << std::left << std::setw(30) << rs2_camera_info_to_string(rs2_camera_info(param)) <<
": \t"
1607 << dev.get_info(param) <<
"\n";
1612 for (
auto &&sensor : dev.query_sensors()) {
1613 os <<
"Options for " << sensor.get_info(RS2_CAMERA_INFO_NAME) << std::endl;
1615 os << std::setw(55) <<
" Supported options:" << std::setw(10) <<
"min" << std::setw(10) <<
" max" << std::setw(6)
1616 <<
" step" << std::setw(10) <<
" default" << std::endl;
1617 for (
auto j = 0; j < RS2_OPTION_COUNT; ++j) {
1618 auto opt =
static_cast<rs2_option
>(j);
1619 if (sensor.supports(opt)) {
1620 auto range = sensor.get_option_range(opt);
1621 os <<
" " << std::left << std::setw(50) << opt <<
" : " << std::setw(5) << range.min <<
"... "
1622 << std::setw(12) << range.max << std::setw(6) << range.step << std::setw(10) << range.def <<
"\n";
1629 for (
auto &&sensor : dev.query_sensors()) {
1630 os <<
"Stream Profiles supported by " << sensor.get_info(RS2_CAMERA_INFO_NAME) <<
"\n";
1632 os << std::setw(55) <<
" Supported modes:" << std::setw(10) <<
"stream" << std::setw(10) <<
" resolution"
1633 << std::setw(6) <<
" fps" << std::setw(10) <<
" format"
1636 for (
auto &&profile : sensor.get_stream_profiles()) {
1637 if (
auto video = profile.as<rs2::video_stream_profile>()) {
1638 os <<
" " << profile.stream_name() <<
"\t " << video.width() <<
"x" << video.height() <<
"\t@ "
1639 << profile.fps() <<
"Hz\t" << profile.format() <<
"\n";
1642 os <<
" " << profile.stream_name() <<
"\t@ " << profile.fps() <<
"Hz\t" << profile.format() <<
"\n";
1649 std::map<stream_and_index, rs2::stream_profile> streams;
1650 std::map<stream_and_resolution, std::vector<std::pair<std::set<rs2_format>, rs2_intrinsics> > > intrinsics_map;
1651 for (
auto &&sensor : dev.query_sensors()) {
1653 for (
auto &&profile : sensor.get_stream_profiles()) {
1654 if (
auto video = profile.as<rs2::video_stream_profile>()) {
1655 if (streams.find(stream_and_index { profile.stream_type(), profile.stream_index() }) == streams.end()) {
1656 streams[stream_and_index { profile.stream_type(), profile.stream_index() }] = profile;
1659 rs2_intrinsics intrinsics {};
1660 stream_and_resolution stream_res { profile.stream_type(), profile.stream_index(), video.width(), video.height(),
1661 profile.stream_name() };
1662 safe_get_intrinsics(video, intrinsics);
1663 auto it = std::find_if(
1664 (intrinsics_map[stream_res]).begin(), (intrinsics_map[stream_res]).end(),
1665 [&](
const std::pair<std::set<rs2_format>, rs2_intrinsics> &kvp) {
return intrinsics == kvp.second; });
1666 if (it == (intrinsics_map[stream_res]).end()) {
1667 (intrinsics_map[stream_res]).push_back({ {profile.format()}, intrinsics });
1670 it->first.insert(profile.format());
1678 os <<
"Provided Intrinsic:\n";
1679 for (
auto &kvp : intrinsics_map) {
1680 auto stream_res = kvp.first;
1681 for (
auto &intrinsics : kvp.second) {
1682 auto formats = get_str_formats(intrinsics.first);
1683 os <<
"Intrinsic of \"" << stream_res.stream_name <<
"\"\t " << stream_res.width <<
"x" << stream_res.height
1684 <<
"\t " << formats <<
"\n";
1685 if (intrinsics.second == rs2_intrinsics {}) {
1686 os <<
"Intrinsic NOT available!\n\n";
1689 print(intrinsics.second, os);
1695 os <<
"\nProvided Extrinsic:\n";
1696 for (
auto kvp1 = streams.begin(); kvp1 != streams.end(); ++kvp1) {
1697 for (
auto kvp2 = streams.begin(); kvp2 != streams.end(); ++kvp2) {
1698 os <<
"Extrinsic from \"" << kvp1->second.stream_name() <<
"\"\t "
1700 <<
"\t \"" << kvp2->second.stream_name() <<
"\"\n";
1701 auto extrinsics = kvp1->second.get_extrinsics_to(kvp2->second);
1702 print(extrinsics, os);
1709 #elif !defined(VISP_BUILD_SHARED_LIBS)
1711 void dummy_vpRealSense2() { };
bool operator==(const vpArray2D< double > &A) const
friend std::ostream & operator<<(std::ostream &s, const vpArray2D< Type > &A)
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
vpCameraParametersProjType
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
@ ProjWithKannalaBrandtDistortion
Projection with Kannala-Brandt distortion model.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
void initPersProjWithDistortion(double px, double py, double u0, double v0, double kud, double kdu)
void initProjWithKannalaBrandtDistortion(double px, double py, double u0, double v0, const std::vector< double > &distortion_coefficients)
Implementation of column vector and the associated operations.
void resize(unsigned int i, bool flagNullify=true)
error that can be emitted by ViSP classes.
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void RGBToGrey(unsigned char *rgb, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false)
static void RGBaToGrey(unsigned char *rgba, unsigned char *grey, unsigned int width, unsigned int height, unsigned int nThreads=0)
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
static void BGRToGrey(unsigned char *bgr, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false, unsigned int nThreads=0)
static void BGRToRGBa(unsigned char *bgr, unsigned char *rgba, unsigned int width, unsigned int height, bool flip=false)
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
Type * bitmap
points toward the bitmap
void getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
std::string getSensorInfo()
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
vpQuaternionVector m_quat
bool open(const rs2::config &cfg=rs2::config())
rs2_intrinsics getIntrinsics(const rs2_stream &stream, int index=-1) const
rs2::pointcloud m_pointcloud
void getPointcloud(const rs2::depth_frame &depth_frame, std::vector< vpColVector > &pointcloud)
void getColorFrame(const rs2::frame &frame, vpImage< vpRGBa > &color)
vpTranslationVector m_pos
std::string m_product_line
std::string getProductLine()
vpHomogeneousMatrix getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index=-1) const
void getGreyFrame(const rs2::frame &frame, vpImage< unsigned char > &grey)
float m_invalidDepthValue
rs2::pipeline_profile m_pipelineProfile
Implementation of a rotation matrix and operations on such kind of matrices.
Class that consider the case of a translation vector.