36 #include <visp3/core/vpConfig.h> 38 #if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11) 43 #include <visp3/core/vpImageConvert.h> 44 #include <visp3/sensor/vpRealSense2.h> 46 #define MANUAL_POINTCLOUD 1 49 bool operator==(
const rs2_extrinsics &lhs,
const rs2_extrinsics &rhs)
51 for (
int i = 0; i < 3; i++) {
52 for (
int j = 0; j < 3; j++) {
53 if (std::fabs(lhs.rotation[i*3 + j] - rhs.rotation[i*3 + j]) >
54 std::numeric_limits<float>::epsilon()) {
59 if (std::fabs(lhs.translation[i] - rhs.translation[i]) >
60 std::numeric_limits<float>::epsilon()) {
73 : m_depthScale(0.0f), m_invalidDepthValue(0.0f),
74 m_max_Z(8.0f), m_pipe(), m_pipelineProfile(), m_pointcloud(), m_points()
90 auto data =
m_pipe.wait_for_frames();
91 auto color_frame = data.get_color_frame();
101 auto data =
m_pipe.wait_for_frames();
102 auto color_frame = data.get_color_frame();
116 std::vector<vpColVector> *
const data_pointCloud,
unsigned char *
const data_infrared,
117 rs2::align *
const align_to)
119 acquire(data_image, data_depth, data_pointCloud, data_infrared, NULL, align_to);
179 std::vector<vpColVector> *
const data_pointCloud,
unsigned char *
const data_infrared1,
180 unsigned char *
const data_infrared2, rs2::align *
const align_to)
182 auto data =
m_pipe.wait_for_frames();
183 if (align_to != NULL) {
186 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0)) 187 data = align_to->process(data);
189 data = align_to->proccess(data);
193 if (data_image != NULL) {
194 auto color_frame = data.get_color_frame();
198 if (data_depth != NULL || data_pointCloud != NULL) {
199 auto depth_frame = data.get_depth_frame();
200 if (data_depth != NULL) {
204 if (data_pointCloud != NULL) {
209 if (data_infrared1 != NULL) {
210 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
214 if (data_infrared2 != NULL) {
215 auto infrared_frame = data.get_infrared_frame(2);
233 std::vector<vpColVector> *
const data_pointCloud,
234 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud,
unsigned char *
const data_infrared,
235 rs2::align *
const align_to)
237 acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, NULL, align_to);
254 std::vector<vpColVector> *
const data_pointCloud,
255 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud,
unsigned char *
const data_infrared1,
256 unsigned char *
const data_infrared2, rs2::align *
const align_to)
258 auto data =
m_pipe.wait_for_frames();
259 if (align_to != NULL) {
262 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0)) 263 data = align_to->process(data);
265 data = align_to->proccess(data);
269 if (data_image != NULL) {
270 auto color_frame = data.get_color_frame();
274 if (data_depth != NULL || data_pointCloud != NULL || pointcloud != NULL) {
275 auto depth_frame = data.get_depth_frame();
276 if (data_depth != NULL) {
280 if (data_pointCloud != NULL) {
284 if (pointcloud != NULL) {
289 if (data_infrared1 != NULL) {
290 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
294 if (data_infrared2 != NULL) {
295 auto infrared_frame = data.get_infrared_frame(2);
312 std::vector<vpColVector> *
const data_pointCloud,
313 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
unsigned char *
const data_infrared,
314 rs2::align *
const align_to)
316 acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, NULL, align_to);
333 std::vector<vpColVector> *
const data_pointCloud,
334 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
unsigned char *
const data_infrared1,
335 unsigned char *
const data_infrared2, rs2::align *
const align_to)
337 auto data =
m_pipe.wait_for_frames();
338 if (align_to != NULL) {
341 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0)) 342 data = align_to->process(data);
344 data = align_to->proccess(data);
348 auto color_frame = data.get_color_frame();
349 if (data_image != NULL) {
353 if (data_depth != NULL || data_pointCloud != NULL || pointcloud != NULL) {
354 auto depth_frame = data.get_depth_frame();
355 if (data_depth != NULL) {
359 if (data_pointCloud != NULL) {
363 if (pointcloud != NULL) {
368 if (data_infrared1 != NULL) {
369 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
373 if (data_infrared2 != NULL) {
374 auto infrared_frame = data.get_infrared_frame(2);
404 auto rs_stream =
m_pipelineProfile.get_stream(stream).as<rs2::video_stream_profile>();
405 auto intrinsics = rs_stream.get_intrinsics();
408 double u0 = intrinsics.ppx;
409 double v0 = intrinsics.ppy;
410 double px = intrinsics.fx;
411 double py = intrinsics.fy;
414 double kdu = intrinsics.coeffs[0];
433 return vsp.get_intrinsics();
438 auto vf = frame.as<rs2::video_frame>();
439 unsigned int width = (
unsigned int)vf.get_width();
440 unsigned int height = (
unsigned int)vf.get_height();
441 color.
resize(height, width);
443 if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
445 reinterpret_cast<unsigned char *
>(color.
bitmap), width, height);
446 }
else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
447 memcpy(reinterpret_cast<unsigned char *>(color.
bitmap),
448 const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
449 width * height *
sizeof(
vpRGBa));
450 }
else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
452 reinterpret_cast<unsigned char *
>(color.
bitmap), width, height);
469 auto vf = frame.as<rs2::video_frame>();
470 unsigned int width = (
unsigned int)vf.get_width();
471 unsigned int height = (
unsigned int)vf.get_height();
472 grey.
resize(height, width);
474 if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
476 grey.
bitmap, width, height);
477 }
else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
479 grey.
bitmap, width * height);
480 }
else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
482 grey.
bitmap, width, height);
490 auto vf = frame.as<rs2::video_frame>();
491 int size = vf.get_width() * vf.get_height();
493 switch (frame.get_profile().format()) {
494 case RS2_FORMAT_RGB8:
495 case RS2_FORMAT_BGR8:
496 memcpy(data, frame.get_data(), size * 3);
499 case RS2_FORMAT_RGBA8:
500 case RS2_FORMAT_BGRA8:
501 memcpy(data, frame.get_data(), size * 4);
506 memcpy(data, frame.get_data(), size * 2);
510 memcpy(data, frame.get_data(), size);
520 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
521 std::stringstream ss;
526 auto vf = depth_frame.as<rs2::video_frame>();
527 const int width = vf.get_width();
528 const int height = vf.get_height();
529 pointcloud.resize((
size_t)(width * height));
531 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
532 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
536 #pragma omp parallel for schedule(dynamic) 537 for (
int i = 0; i < height; i++) {
538 auto depth_pixel_index = i * width;
540 for (
int j = 0; j < width; j++, depth_pixel_index++) {
541 if (p_depth_frame[depth_pixel_index] == 0) {
542 pointcloud[(size_t)depth_pixel_index].resize(4,
false);
546 pointcloud[(size_t)depth_pixel_index][3] = 1.0;
551 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
554 const float pixel[] = {(float)j, (
float)i};
555 rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
560 pointcloud[(size_t)depth_pixel_index].resize(4,
false);
561 pointcloud[(size_t)depth_pixel_index][0] = points[0];
562 pointcloud[(size_t)depth_pixel_index][1] = points[1];
563 pointcloud[(size_t)depth_pixel_index][2] = points[2];
564 pointcloud[(size_t)depth_pixel_index][3] = 1.0;
573 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
574 std::stringstream ss;
579 auto vf = depth_frame.as<rs2::video_frame>();
580 const int width = vf.get_width();
581 const int height = vf.get_height();
582 pointcloud->width = (uint32_t)width;
583 pointcloud->height = (uint32_t)height;
584 pointcloud->resize((
size_t)(width * height));
586 #if MANUAL_POINTCLOUD // faster to compute manually when tested 587 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
588 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
592 #pragma omp parallel for schedule(dynamic) 593 for (
int i = 0; i < height; i++) {
594 auto depth_pixel_index = i * width;
596 for (
int j = 0; j < width; j++, depth_pixel_index++) {
597 if (p_depth_frame[depth_pixel_index] == 0) {
605 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
608 const float pixel[] = {(float)j, (
float)i};
609 rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
614 pointcloud->points[(size_t)(depth_pixel_index)].x = points[0];
615 pointcloud->points[(size_t)(depth_pixel_index)].y = points[1];
616 pointcloud->points[(size_t)(depth_pixel_index)].z = points[2];
621 auto vertices =
m_points.get_vertices();
623 for (
size_t i = 0; i <
m_points.size(); i++) {
624 if (vertices[i].z <= 0.0f || vertices[i].z >
m_max_Z) {
629 pointcloud->points[i].x = vertices[i].x;
630 pointcloud->points[i].y = vertices[i].y;
631 pointcloud->points[i].z = vertices[i].z;
638 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
640 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
644 auto vf = depth_frame.as<rs2::video_frame>();
645 const int depth_width = vf.get_width();
646 const int depth_height = vf.get_height();
647 pointcloud->width =
static_cast<uint32_t
>(depth_width);
648 pointcloud->height =
static_cast<uint32_t
>(depth_height);
649 pointcloud->resize(static_cast<uint32_t>(depth_width * depth_height));
651 vf = color_frame.as<rs2::video_frame>();
652 const int color_width = vf.get_width();
653 const int color_height = vf.get_height();
655 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
656 const rs2_extrinsics depth2ColorExtrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().
657 get_extrinsics_to(color_frame.get_profile().as<rs2::video_stream_profile>());
658 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
659 const rs2_intrinsics color_intrinsics = color_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
661 auto color_format = color_frame.as<rs2::video_frame>().get_profile().format();
662 const bool swap_rb = color_format == RS2_FORMAT_BGR8 || color_format == RS2_FORMAT_BGRA8;
663 unsigned int nb_color_pixel = (color_format == RS2_FORMAT_RGB8 || color_format == RS2_FORMAT_BGR8) ? 3 : 4;
664 const unsigned char *p_color_frame =
static_cast<const unsigned char *
>(color_frame.get_data());
665 rs2_extrinsics identity;
666 memset(identity.rotation, 0,
sizeof(identity.rotation));
667 memset(identity.translation, 0,
sizeof(identity.translation));
668 for (
int i = 0; i < 3; i++) {
669 identity.rotation[i*3 + i] = 1;
671 const bool registered_streams = (depth2ColorExtrinsics == identity) &&
672 (color_width == depth_width) && (color_height == depth_height);
676 #pragma omp parallel for schedule(dynamic) 677 for (
int i = 0; i < depth_height; i++) {
678 auto depth_pixel_index = i * depth_width;
680 for (
int j = 0; j < depth_width; j++, depth_pixel_index++) {
681 if (p_depth_frame[depth_pixel_index] == 0) {
689 #if PCL_VERSION_COMPARE(<, 1, 1, 0) 690 unsigned int r = 96, g = 157, b = 198;
691 uint32_t rgb = (
static_cast<uint32_t
>(r) << 16 | static_cast<uint32_t>(g) << 8 |
static_cast<uint32_t
>(b));
693 pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
695 pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
696 pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
697 pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
703 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
705 float depth_point[3];
706 const float pixel[] = {(float)j, (
float)i};
707 rs2_deproject_pixel_to_point(depth_point, &depth_intrinsics, pixel, pixels_distance);
709 if (pixels_distance >
m_max_Z) {
713 pointcloud->points[(size_t)depth_pixel_index].x = depth_point[0];
714 pointcloud->points[(size_t)depth_pixel_index].y = depth_point[1];
715 pointcloud->points[(size_t)depth_pixel_index].z = depth_point[2];
717 if (!registered_streams) {
718 float color_point[3];
719 rs2_transform_point_to_point(color_point, &depth2ColorExtrinsics, depth_point);
720 float color_pixel[2];
721 rs2_project_point_to_pixel(color_pixel, &color_intrinsics, color_point);
723 if (color_pixel[1] < 0 || color_pixel[1] >= color_height || color_pixel[0] < 0 || color_pixel[0] >= color_width) {
727 #if PCL_VERSION_COMPARE(<, 1, 1, 0) 728 unsigned int r = 96, g = 157, b = 198;
729 uint32_t rgb = (
static_cast<uint32_t
>(r) << 16 | static_cast<uint32_t>(g) << 8 |
static_cast<uint32_t
>(b));
731 pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
733 pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
734 pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
735 pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
738 unsigned int i_ = (
unsigned int)color_pixel[1];
739 unsigned int j_ = (
unsigned int)color_pixel[0];
741 #if PCL_VERSION_COMPARE(<, 1, 1, 0) 745 (
static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel]) |
746 static_cast<uint32_t>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
747 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2]) << 16);
749 rgb = (
static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel]) << 16 |
750 static_cast<uint32_t>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
751 static_cast<uint32_t
>(p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2]));
754 pointcloud->points[(size_t)(i * depth_width + j)].rgb = *
reinterpret_cast<float *
>(&rgb);
757 pointcloud->points[(size_t)depth_pixel_index].b =
758 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel];
759 pointcloud->points[(size_t)depth_pixel_index].g =
760 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1];
761 pointcloud->points[(size_t)depth_pixel_index].r =
762 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2];
764 pointcloud->points[(size_t)depth_pixel_index].r =
765 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel];
766 pointcloud->points[(size_t)depth_pixel_index].g =
767 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 1];
768 pointcloud->points[(size_t)depth_pixel_index].b =
769 p_color_frame[(i_ * (
unsigned int)color_width + j_) * nb_color_pixel + 2];
774 #if PCL_VERSION_COMPARE(<, 1, 1, 0) 778 (
static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel]) |
779 static_cast<uint32_t>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
780 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2]) << 16);
782 rgb = (
static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel]) << 16 |
783 static_cast<uint32_t>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
784 static_cast<uint32_t
>(p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2]));
787 pointcloud->points[(size_t)(i * depth_width + j)].rgb = *
reinterpret_cast<float *
>(&rgb);
790 pointcloud->points[(size_t)depth_pixel_index].b =
791 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel];
792 pointcloud->points[(size_t)depth_pixel_index].g =
793 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1];
794 pointcloud->points[(size_t)depth_pixel_index].r =
795 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2];
797 pointcloud->points[(size_t)depth_pixel_index].r =
798 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel];
799 pointcloud->points[(size_t)depth_pixel_index].g =
800 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 1];
801 pointcloud->points[(size_t)depth_pixel_index].b =
802 p_color_frame[(i * (
unsigned int)color_width + j) * nb_color_pixel + 2];
821 rs2_extrinsics extrinsics = from_stream.get_extrinsics_to(to_stream);
825 for (
unsigned int i = 0; i < 3; i++) {
826 t[i] = extrinsics.translation[i];
827 for (
unsigned int j = 0; j < 3; j++)
828 R[i][j] = extrinsics.rotation[j * 3 + i];
845 for (rs2::sensor &sensor : dev.query_sensors()) {
847 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
856 void print(
const rs2_extrinsics &extrinsics, std::ostream &os)
858 std::stringstream ss;
859 ss <<
"Rotation Matrix:\n";
861 for (
auto i = 0; i < 3; ++i) {
862 for (
auto j = 0; j < 3; ++j) {
863 ss << std::left << std::setw(15) << std::setprecision(5) << extrinsics.rotation[j * 3 + i];
868 ss <<
"\nTranslation Vector: ";
869 for (
size_t i = 0; i <
sizeof(extrinsics.translation) /
sizeof(extrinsics.translation[0]); ++i)
870 ss << std::setprecision(15) << extrinsics.translation[i] <<
" ";
872 os << ss.str() <<
"\n\n";
875 void print(
const rs2_intrinsics &intrinsics, std::ostream &os)
877 std::stringstream ss;
878 ss << std::left << std::setw(14) <<
"Width: " 879 <<
"\t" << intrinsics.width <<
"\n" 880 << std::left << std::setw(14) <<
"Height: " 881 <<
"\t" << intrinsics.height <<
"\n" 882 << std::left << std::setw(14) <<
"PPX: " 883 <<
"\t" << std::setprecision(15) << intrinsics.ppx <<
"\n" 884 << std::left << std::setw(14) <<
"PPY: " 885 <<
"\t" << std::setprecision(15) << intrinsics.ppy <<
"\n" 886 << std::left << std::setw(14) <<
"Fx: " 887 <<
"\t" << std::setprecision(15) << intrinsics.fx <<
"\n" 888 << std::left << std::setw(14) <<
"Fy: " 889 <<
"\t" << std::setprecision(15) << intrinsics.fy <<
"\n" 890 << std::left << std::setw(14) <<
"Distortion: " 891 <<
"\t" << rs2_distortion_to_string(intrinsics.model) <<
"\n" 892 << std::left << std::setw(14) <<
"Coeffs: ";
894 for (
size_t i = 0; i <
sizeof(intrinsics.coeffs) /
sizeof(intrinsics.coeffs[0]); ++i)
895 ss <<
"\t" << std::setprecision(15) << intrinsics.coeffs[i] <<
" ";
897 os << ss.str() <<
"\n\n";
900 void safe_get_intrinsics(
const rs2::video_stream_profile &profile, rs2_intrinsics &intrinsics)
903 intrinsics = profile.get_intrinsics();
908 bool operator==(
const rs2_intrinsics &lhs,
const rs2_intrinsics &rhs)
910 return lhs.width == rhs.width && lhs.height == rhs.height && lhs.ppx == rhs.ppx && lhs.ppy == rhs.ppy &&
911 lhs.fx == rhs.fx && lhs.fy == rhs.fy && lhs.model == rhs.model &&
912 !std::memcmp(lhs.coeffs, rhs.coeffs,
sizeof(rhs.coeffs));
915 std::string get_str_formats(
const std::set<rs2_format> &formats)
917 std::stringstream ss;
918 for (
auto format = formats.begin(); format != formats.end(); ++format) {
919 ss << *format << ((format != formats.end()) && (next(format) == formats.end()) ?
"" :
"/");
924 struct stream_and_resolution {
929 std::string stream_name;
931 bool operator<(
const stream_and_resolution &obj)
const 933 return (std::make_tuple(stream, stream_index, width, height) <
934 std::make_tuple(obj.stream, obj.stream_index, obj.width, obj.height));
938 struct stream_and_index {
942 bool operator<(
const stream_and_index &obj)
const 944 return (std::make_tuple(stream, stream_index) < std::make_tuple(obj.stream, obj.stream_index));
971 os << std::left << std::setw(30) <<
"Device Name" << std::setw(20) <<
"Serial Number" << std::setw(20)
972 <<
"Firmware Version" << std::endl;
975 os << std::left << std::setw(30) << dev.get_info(RS2_CAMERA_INFO_NAME) << std::setw(20)
976 << dev.get_info(RS2_CAMERA_INFO_SERIAL_NUMBER) << std::setw(20) << dev.get_info(RS2_CAMERA_INFO_FIRMWARE_VERSION)
980 os <<
" Device info: \n";
981 for (
auto j = 0; j < RS2_CAMERA_INFO_COUNT; ++j) {
982 auto param =
static_cast<rs2_camera_info
>(j);
983 if (dev.supports(param))
984 os <<
" " << std::left << std::setw(30) << rs2_camera_info_to_string(rs2_camera_info(param)) <<
": \t" 985 << dev.get_info(param) <<
"\n";
990 for (
auto &&sensor : dev.query_sensors()) {
991 os <<
"Options for " << sensor.get_info(RS2_CAMERA_INFO_NAME) << std::endl;
993 os << std::setw(55) <<
" Supported options:" << std::setw(10) <<
"min" << std::setw(10) <<
" max" << std::setw(6)
994 <<
" step" << std::setw(10) <<
" default" << std::endl;
995 for (
auto j = 0; j < RS2_OPTION_COUNT; ++j) {
996 auto opt =
static_cast<rs2_option
>(j);
997 if (sensor.supports(opt)) {
998 auto range = sensor.get_option_range(opt);
999 os <<
" " << std::left << std::setw(50) << opt <<
" : " << std::setw(5) << range.min <<
"... " 1000 << std::setw(12) << range.max << std::setw(6) << range.step << std::setw(10) << range.def <<
"\n";
1007 for (
auto &&sensor : dev.query_sensors()) {
1008 os <<
"Stream Profiles supported by " << sensor.get_info(RS2_CAMERA_INFO_NAME) <<
"\n";
1010 os << std::setw(55) <<
" Supported modes:" << std::setw(10) <<
"stream" << std::setw(10) <<
" resolution" 1011 << std::setw(6) <<
" fps" << std::setw(10) <<
" format" 1014 for (
auto &&profile : sensor.get_stream_profiles()) {
1015 if (
auto video = profile.as<rs2::video_stream_profile>()) {
1016 os <<
" " << profile.stream_name() <<
"\t " << video.width() <<
"x" << video.height() <<
"\t@ " 1017 << profile.fps() <<
"Hz\t" << profile.format() <<
"\n";
1019 os <<
" " << profile.stream_name() <<
"\t@ " << profile.fps() <<
"Hz\t" << profile.format() <<
"\n";
1026 std::map<stream_and_index, rs2::stream_profile> streams;
1027 std::map<stream_and_resolution, std::vector<std::pair<std::set<rs2_format>, rs2_intrinsics> > > intrinsics_map;
1028 for (
auto &&sensor : dev.query_sensors()) {
1030 for (
auto &&profile : sensor.get_stream_profiles()) {
1031 if (
auto video = profile.as<rs2::video_stream_profile>()) {
1032 if (streams.find(stream_and_index{profile.stream_type(), profile.stream_index()}) == streams.end()) {
1033 streams[stream_and_index{profile.stream_type(), profile.stream_index()}] = profile;
1036 rs2_intrinsics intrinsics{};
1037 stream_and_resolution stream_res{profile.stream_type(), profile.stream_index(), video.width(), video.height(),
1038 profile.stream_name()};
1039 safe_get_intrinsics(video, intrinsics);
1040 auto it = std::find_if(
1041 (intrinsics_map[stream_res]).begin(), (intrinsics_map[stream_res]).end(),
1042 [&](
const std::pair<std::set<rs2_format>, rs2_intrinsics> &kvp) {
return intrinsics == kvp.second; });
1043 if (it == (intrinsics_map[stream_res]).end()) {
1044 (intrinsics_map[stream_res]).push_back({{profile.format()}, intrinsics});
1046 it->first.insert(profile.format());
1054 os <<
"Provided Intrinsic:\n";
1055 for (
auto &kvp : intrinsics_map) {
1056 auto stream_res = kvp.first;
1057 for (
auto &intrinsics : kvp.second) {
1058 auto formats = get_str_formats(intrinsics.first);
1059 os <<
"Intrinsic of \"" << stream_res.stream_name <<
"\"\t " << stream_res.width <<
"x" << stream_res.height
1060 <<
"\t " << formats <<
"\n";
1061 if (intrinsics.second == rs2_intrinsics{}) {
1062 os <<
"Intrinsic NOT available!\n\n";
1064 print(intrinsics.second, os);
1070 os <<
"\nProvided Extrinsic:\n";
1071 for (
auto kvp1 = streams.begin(); kvp1 != streams.end(); ++kvp1) {
1072 for (
auto kvp2 = streams.begin(); kvp2 != streams.end(); ++kvp2) {
1073 os <<
"Extrinsic from \"" << kvp1->second.stream_name() <<
"\"\t " 1075 <<
"\t \"" << kvp2->second.stream_name() <<
"\"\n";
1076 auto extrinsics = kvp1->second.get_extrinsics_to(kvp2->second);
1077 print(extrinsics, os);
1084 #elif !defined(VISP_BUILD_SHARED_LIBS) 1087 void dummy_vpRealSense2(){};
static void BGRToRGBa(unsigned char *bgr, unsigned char *rgba, unsigned int width, unsigned int height, bool flip=false)
rs2_intrinsics getIntrinsics(const rs2_stream &stream) const
static void RGBToGrey(unsigned char *rgb, unsigned char *grey, unsigned int size)
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
static void BGRToGrey(unsigned char *bgr, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false)
Type * bitmap
points toward the bitmap
error that can be emited by ViSP classes.
void open(const rs2::config &cfg=rs2::config())
void initPersProjWithDistortion(double px, double py, double u0, double v0, double kud, double kdu)
Implementation of a rotation matrix and operations on such kind of matrices.
rs2::pipeline_profile m_pipelineProfile
vpCameraParametersProjType
void getGreyFrame(const rs2::frame &frame, vpImage< unsigned char > &grey)
Generic class defining intrinsic camera parameters.
void acquire(vpImage< unsigned char > &grey)
rs2::pointcloud m_pointcloud
void getPointcloud(const rs2::depth_frame &depth_frame, std::vector< vpColVector > &pointcloud)
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion) const
float m_invalidDepthValue
void getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
friend VISP_EXPORT std::ostream & operator<<(std::ostream &os, const vpRealSense2 &rs)
vpHomogeneousMatrix getTransformation(const rs2_stream &from, const rs2_stream &to) const
static void RGBaToGrey(unsigned char *rgba, unsigned char *grey, unsigned int size)
void getColorFrame(const rs2::frame &frame, vpImage< vpRGBa > &color)
Class that consider the case of a translation vector.