Visual Servoing Platform  version 3.6.1 under development (2024-07-27)
vpOccipitalStructure.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See https://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * libStructure interface.
33  *
34 *****************************************************************************/
35 
36 #include <visp3/core/vpConfig.h>
37 
38 #if defined(VISP_HAVE_OCCIPITAL_STRUCTURE) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11) && defined(VISP_HAVE_THREADS)
39 #include <cstring>
40 #include <functional>
41 #include <iomanip>
42 #include <map>
43 #include <set>
44 #include <thread>
45 
46 #include <ST/Utilities.h>
47 
48 #include <visp3/core/vpImageConvert.h>
49 #include <visp3/core/vpMeterPixelConversion.h>
50 #include <visp3/core/vpPoint.h>
51 #include <visp3/sensor/vpOccipitalStructure.h>
52 
53 #define MANUAL_POINTCLOUD 1
54 
59 vpOccipitalStructure::vpOccipitalStructure() : m_invalidDepthValue(0.0f), m_maxZ(15000.0f) { }
60 
66 
73 void vpOccipitalStructure::acquire(vpImage<unsigned char> &gray, bool undistorted, double *ts)
74 {
75  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
76  m_delegate.cv_sampleLock.wait(u);
77 
78  if (m_delegate.m_visibleFrame.isValid()) {
79  if (!undistorted)
80  memcpy(gray.bitmap, m_delegate.m_visibleFrame.yData(), m_delegate.m_visibleFrame.ySize());
81  else
82  memcpy(gray.bitmap, m_delegate.m_visibleFrame.undistorted().yData(), m_delegate.m_visibleFrame.ySize());
83 
84  if (ts != nullptr)
85  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
86  }
87 
88  u.unlock();
89 }
90 
97 void vpOccipitalStructure::acquire(vpImage<vpRGBa> &rgb, bool undistorted, double *ts)
98 {
99  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
100  m_delegate.cv_sampleLock.wait(u);
101 
102  if (m_delegate.m_visibleFrame.isValid()) {
103  // Detecting if it's a Color Structure Core device.
104  if (m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
105  vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.rgbData()),
106  reinterpret_cast<unsigned char *>(rgb.bitmap),
107  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
108 
109  else // If it's a monochrome Structure Core device.
110  {
111  if (!undistorted)
112  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.yData()),
113  reinterpret_cast<unsigned char *>(rgb.bitmap),
114  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
115  else
116  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.undistorted().yData()),
117  reinterpret_cast<unsigned char *>(rgb.bitmap),
118  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
119  }
120 
121  if (ts != nullptr)
122  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
123  }
124 
125  u.unlock();
126 }
127 
138  vpColVector *gyroscope_data, bool undistorted, double *ts)
139 {
140  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
141  m_delegate.cv_sampleLock.wait(u);
142 
143  if (rgb != nullptr && m_delegate.m_visibleFrame.isValid()) {
144  // Detecting if it's a Color Structure Core device.
145  if (m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
146  vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.rgbData()),
147  reinterpret_cast<unsigned char *>(rgb->bitmap),
148  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
149 
150  else // If it's a monochrome Structure Core device.
151  {
152  if (!undistorted)
153  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.yData()),
154  reinterpret_cast<unsigned char *>(rgb->bitmap),
155  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
156  else
157  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.undistorted().yData()),
158  reinterpret_cast<unsigned char *>(rgb->bitmap),
159  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
160  }
161 
162  if (ts != nullptr)
163  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
164  }
165 
166  if (depth != nullptr && m_delegate.m_depthFrame.isValid())
167  memcpy((unsigned char *)depth->bitmap, m_delegate.m_depthFrame.convertDepthToRgba(),
168  m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * 4);
169 
170  if (acceleration_data != nullptr) {
171  ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
172  acceleration_data[0] = accel.x;
173  acceleration_data[1] = accel.y;
174  acceleration_data[2] = accel.z;
175  }
176 
177  if (gyroscope_data != nullptr) {
178  ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
179  gyroscope_data[0] = gyro_data.x;
180  gyroscope_data[1] = gyro_data.y;
181  gyroscope_data[2] = gyro_data.z;
182  }
183 
184  if (ts != nullptr) // By default, frames are synchronized.
185  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
186 
187  u.unlock();
188 }
189 
200  vpColVector *gyroscope_data, bool undistorted, double *ts)
201 {
202  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
203  m_delegate.cv_sampleLock.wait(u);
204 
205  if (gray != nullptr && m_delegate.m_visibleFrame.isValid()) {
206  if (!undistorted)
207  memcpy(gray->bitmap, m_delegate.m_visibleFrame.yData(), m_delegate.m_visibleFrame.ySize());
208  else
209  memcpy(gray->bitmap, m_delegate.m_visibleFrame.undistorted().yData(), m_delegate.m_visibleFrame.ySize());
210 
211  if (ts != nullptr)
212  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
213  }
214 
215  if (depth != nullptr && m_delegate.m_depthFrame.isValid())
216  memcpy((unsigned char *)depth->bitmap, m_delegate.m_depthFrame.convertDepthToRgba(),
217  m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * 4);
218 
219  if (acceleration_data != nullptr) {
220  ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
221  acceleration_data[0] = accel.x;
222  acceleration_data[1] = accel.y;
223  acceleration_data[2] = accel.z;
224  }
225 
226  if (gyroscope_data != nullptr) {
227  ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
228  gyroscope_data[0] = gyro_data.x;
229  gyroscope_data[1] = gyro_data.y;
230  gyroscope_data[2] = gyro_data.z;
231  }
232 
233  if (ts != nullptr) // By default, frames are synchronized.
234  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
235 
236  u.unlock();
237 }
238 
250 void vpOccipitalStructure::acquire(unsigned char *const data_image, unsigned char *const data_depth,
251  std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared,
252  vpColVector *acceleration_data, vpColVector *gyroscope_data, bool undistorted,
253  double *ts)
254 {
255  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
256  m_delegate.cv_sampleLock.wait(u);
257 
258  if (m_delegate.m_depthFrame.isValid() && data_depth != nullptr)
259  memcpy(data_depth, m_delegate.m_depthFrame.depthInMillimeters(),
260  m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * sizeof(float));
261 
262  if (m_delegate.m_visibleFrame.isValid() && data_image != nullptr) {
263  if (m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
264  vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.rgbData()),
265  reinterpret_cast<unsigned char *>(data_image),
266  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
267 
268  else // If it's a monochrome Structure Core device.
269  {
270  if (!undistorted)
271  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.yData()),
272  reinterpret_cast<unsigned char *>(data_image),
273  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
274  else
275  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.undistorted().yData()),
276  reinterpret_cast<unsigned char *>(data_image),
277  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
278  }
279  }
280 
281  if (m_delegate.m_infraredFrame.isValid() && data_infrared != nullptr)
282  memcpy(data_infrared, m_delegate.m_infraredFrame.data(),
283  m_delegate.m_infraredFrame.width() * m_delegate.m_infraredFrame.height() * sizeof(uint16_t));
284 
285  if (data_pointCloud != nullptr)
286  getPointcloud(*data_pointCloud);
287 
288  if (acceleration_data != nullptr) {
289  ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
290  acceleration_data[0] = accel.x;
291  acceleration_data[1] = accel.y;
292  acceleration_data[2] = accel.z;
293  }
294 
295  if (gyroscope_data != nullptr) {
296  ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
297  gyroscope_data[0] = gyro_data.x;
298  gyroscope_data[1] = gyro_data.y;
299  gyroscope_data[2] = gyro_data.z;
300  }
301 
302  if (ts != nullptr) // By default, frames are synchronized.
303  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
304 
305  u.unlock();
306 }
307 
308 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
321 void vpOccipitalStructure::acquire(unsigned char *const data_image, unsigned char *const data_depth,
322  std::vector<vpColVector> *const data_pointCloud,
323  pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared,
324  vpColVector *acceleration_data, vpColVector *gyroscope_data, bool undistorted,
325  double *ts)
326 {
327  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
328  m_delegate.cv_sampleLock.wait(u);
329 
330  if (m_delegate.m_visibleFrame.isValid()) {
331  if (m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
332  vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.rgbData()),
333  reinterpret_cast<unsigned char *>(data_image),
334  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
335 
336  else // If it's a monochrome Structure Core device.
337  {
338  if (!undistorted)
339  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.yData()),
340  reinterpret_cast<unsigned char *>(data_image),
341  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
342  else
343  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.undistorted().yData()),
344  reinterpret_cast<unsigned char *>(data_image),
345  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
346  }
347  }
348 
349  if (m_delegate.m_depthFrame.isValid() && data_depth != nullptr)
350  memcpy(data_depth, m_delegate.m_depthFrame.depthInMillimeters(),
351  m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * sizeof(float));
352 
353  if (m_delegate.m_infraredFrame.isValid() && data_infrared != nullptr)
354  memcpy(data_infrared, m_delegate.m_infraredFrame.data(),
355  m_delegate.m_infraredFrame.width() * m_delegate.m_infraredFrame.height() * sizeof(uint16_t));
356 
357  if (data_pointCloud != nullptr)
358  getPointcloud(*data_pointCloud);
359 
360  if (m_delegate.m_depthFrame.isValid() && pointcloud != nullptr)
361  getPointcloud(pointcloud);
362 
363  if (acceleration_data != nullptr) {
364  ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
365  acceleration_data[0] = accel.x;
366  acceleration_data[1] = accel.y;
367  acceleration_data[2] = accel.z;
368  }
369 
370  if (gyroscope_data != nullptr) {
371  ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
372  gyroscope_data[0] = gyro_data.x;
373  gyroscope_data[1] = gyro_data.y;
374  gyroscope_data[2] = gyro_data.z;
375  }
376 
377  if (ts != nullptr) // By default, frames are synchronized.
378  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
379 
380  u.unlock();
381 }
382 
395 void vpOccipitalStructure::acquire(unsigned char *const data_image, unsigned char *const data_depth,
396  std::vector<vpColVector> *const data_pointCloud,
397  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
398  unsigned char *const data_infrared, vpColVector *acceleration_data,
399  vpColVector *gyroscope_data, bool undistorted, double *ts)
400 {
401  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
402  m_delegate.cv_sampleLock.wait(u);
403 
404  if (m_delegate.m_depthFrame.isValid() && data_depth != nullptr)
405  memcpy(data_depth, m_delegate.m_depthFrame.depthInMillimeters(),
406  m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * sizeof(float));
407 
408  if (m_delegate.m_visibleFrame.isValid() && data_image != nullptr) {
409  if (m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
410  vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.rgbData()),
411  reinterpret_cast<unsigned char *>(data_image),
412  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
413 
414  else // If it's a monochrome Structure Core device.
415  {
416  if (!undistorted)
417  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.yData()),
418  reinterpret_cast<unsigned char *>(data_image),
419  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
420  else
421  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.undistorted().yData()),
422  reinterpret_cast<unsigned char *>(data_image),
423  m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
424  }
425  }
426 
427  if (m_delegate.m_infraredFrame.isValid() && data_infrared != nullptr)
428  memcpy(data_infrared, m_delegate.m_infraredFrame.data(),
429  m_delegate.m_infraredFrame.width() * m_delegate.m_infraredFrame.height() * sizeof(uint16_t));
430 
431  if (m_delegate.m_depthFrame.isValid() && data_pointCloud != nullptr)
432  getPointcloud(*data_pointCloud);
433 
434  if (m_delegate.m_depthFrame.isValid() && m_delegate.m_visibleFrame.isValid() && pointcloud != nullptr)
435  getColoredPointcloud(pointcloud);
436 
437  if (acceleration_data != nullptr) {
438  ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
439  acceleration_data[0] = accel.x;
440  acceleration_data[1] = accel.y;
441  acceleration_data[2] = accel.z;
442  }
443 
444  if (gyroscope_data != nullptr) {
445  ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
446  gyroscope_data[0] = gyro_data.x;
447  gyroscope_data[1] = gyro_data.y;
448  gyroscope_data[2] = gyro_data.z;
449  }
450 
451  if (ts != nullptr) // By default, frames are synchronized.
452  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
453 
454  u.unlock();
455 }
456 
457 #endif
458 
481 {
482  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
483  m_delegate.cv_sampleLock.wait(u);
484 
485  if (imu_vel != nullptr) {
486  imu_vel->resize(3, false);
487  ST::RotationRate imu_rotationRate = m_delegate.m_gyroscopeEvent.rotationRate();
488  (*imu_vel)[0] = imu_rotationRate.x;
489  (*imu_vel)[1] = imu_rotationRate.y;
490  (*imu_vel)[2] = imu_rotationRate.z;
491  }
492 
493  if (ts != nullptr)
494  *ts = m_delegate.m_gyroscopeEvent.arrivalTimestamp();
495 
496  u.unlock();
497 }
498 
521 {
522  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
523  m_delegate.cv_sampleLock.wait(u);
524 
525  if (imu_acc != nullptr) {
526  imu_acc->resize(3, false);
527  ST::Acceleration imu_acceleration = m_delegate.m_accelerometerEvent.acceleration();
528  (*imu_acc)[0] = imu_acceleration.x;
529  (*imu_acc)[1] = imu_acceleration.y;
530  (*imu_acc)[2] = imu_acceleration.z;
531  }
532 
533  if (ts != nullptr)
534  *ts = m_delegate.m_accelerometerEvent.arrivalTimestamp();
535 
536  u.unlock();
537 }
538 
562 void vpOccipitalStructure::getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts)
563 {
564  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
565  m_delegate.cv_sampleLock.wait(u);
566  double imu_vel_timestamp, imu_acc_timestamp;
567 
568  if (imu_vel != nullptr) {
569  imu_vel->resize(3, false);
570  ST::RotationRate imu_rotationRate = m_delegate.m_gyroscopeEvent.rotationRate();
571  (*imu_vel)[0] = imu_rotationRate.x;
572  (*imu_vel)[1] = imu_rotationRate.y;
573  (*imu_vel)[2] = imu_rotationRate.z;
574  imu_vel_timestamp =
575  m_delegate.m_gyroscopeEvent.arrivalTimestamp(); // Relative to an unspecified epoch. (see documentation).
576  }
577 
578  if (imu_acc != nullptr) {
579  imu_acc->resize(3, false);
580  ST::Acceleration imu_acceleration = m_delegate.m_accelerometerEvent.acceleration();
581  (*imu_acc)[0] = imu_acceleration.x;
582  (*imu_acc)[1] = imu_acceleration.y;
583  (*imu_acc)[2] = imu_acceleration.z;
584  imu_acc_timestamp = m_delegate.m_accelerometerEvent.arrivalTimestamp();
585  }
586 
587  if (ts != nullptr)
588  *ts = std::max<double>(imu_vel_timestamp, imu_acc_timestamp);
589 
590  u.unlock();
591 }
592 
597 bool vpOccipitalStructure::open(const ST::CaptureSessionSettings &settings)
598 {
599  if (m_init) {
600  close();
601  }
602 
603  m_captureSession.setDelegate(&m_delegate);
604 
605  if (!m_captureSession.startMonitoring(settings)) {
606  std::cout << "Failed to initialize capture session!" << std::endl;
607  return false;
608  }
609 
610  // This will lock the open() function until the CaptureSession
611  // recieves the first frame or a timeout.
612  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
613  std::cv_status var =
614  m_delegate.cv_sampleLock.wait_for(u, std::chrono::seconds(20)); // Make sure a device is connected.
615 
616 // In case the device is not connected, open() should return false.
617  if (var == std::cv_status::timeout) {
618  m_init = false;
619  return m_init;
620  }
621 
622  m_init = true;
623  m_captureSessionSettings = settings;
624  return m_init;
625 }
626 
638 {
639  if (m_init) {
640  m_captureSession.stopStreaming();
641  m_init = false;
642  }
643 }
644 
650 {
651  switch (stream_type) {
652  case vpOccipitalStructureStream::visible:
653  if (m_delegate.m_visibleFrame.isValid())
654  return m_delegate.m_visibleFrame.width();
655  break;
656 
657  case vpOccipitalStructureStream::depth:
658  if (m_delegate.m_depthFrame.isValid())
659  return m_delegate.m_depthFrame.width();
660  break;
661 
662  case vpOccipitalStructureStream::infrared:
663  if (m_delegate.m_infraredFrame.isValid())
664  return m_delegate.m_infraredFrame.width();
665  break;
666 
667  default:
668  break;
669  }
670 
671  return 0;
672 }
673 
679 {
680  switch (stream_type) {
681  case vpOccipitalStructureStream::visible:
682  if (m_delegate.m_visibleFrame.isValid())
683  return m_delegate.m_visibleFrame.height();
684  break;
685 
686  case vpOccipitalStructureStream::depth:
687  if (m_delegate.m_depthFrame.isValid())
688  return m_delegate.m_depthFrame.height();
689  break;
690 
691  case vpOccipitalStructureStream::infrared:
692  if (m_delegate.m_infraredFrame.isValid())
693  return m_delegate.m_infraredFrame.height();
694  break;
695 
696  default:
697  break;
698  }
699 
700  return 0;
701 }
702 
709 {
710  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
711  m_delegate.cv_sampleLock.wait(u);
712 
713  if (m_delegate.m_depthFrame.isValid())
714  return m_delegate.m_depthFrame(x, y);
715 
716  else
717  return 0.;
718 
719  u.unlock();
720 }
721 
727 // Return vpColVector
729 {
730  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
731  m_delegate.cv_sampleLock.wait(u);
732 
733  if (m_delegate.m_depthFrame.isValid()) {
734  ST::Vector3f point_3D = m_delegate.m_depthFrame.unprojectPoint(row, col);
735  return vpPoint(point_3D.x, point_3D.y, point_3D.z);
736  }
737 
738  else // Should be returning invalid vpPoint.
739  return vpPoint(0., 0., 0.);
740 
741  u.unlock();
742 }
743 
751 {
752  vpHomogeneousMatrix result;
753 
754  switch (from) {
756  if (to == vpOccipitalStructure::visible) {
757  ST::Matrix4 v_M_d = m_delegate.m_depthFrame.visibleCameraPoseInDepthCoordinateFrame();
758 
759  result[0][0] = v_M_d.m00;
760  result[0][1] = v_M_d.m10;
761  result[0][2] = v_M_d.m20;
762  result[0][3] = v_M_d.m30;
763  result[1][0] = v_M_d.m01;
764  result[1][1] = v_M_d.m11;
765  result[1][2] = v_M_d.m21;
766  result[1][3] = v_M_d.m31;
767  result[2][0] = v_M_d.m02;
768  result[2][1] = v_M_d.m12;
769  result[2][2] = v_M_d.m22;
770  result[2][3] = v_M_d.m32;
771  }
772 
773  if (to == vpOccipitalStructure::imu) {
774  ST::Matrix4 imu_M_d = m_captureSession.getImuFromDepthExtrinsics().inversed();
775 
776  result[0][0] = imu_M_d.m00;
777  result[0][1] = imu_M_d.m10;
778  result[0][2] = imu_M_d.m20;
779  result[0][3] = imu_M_d.m30;
780  result[1][0] = imu_M_d.m01;
781  result[1][1] = imu_M_d.m11;
782  result[1][2] = imu_M_d.m21;
783  result[1][3] = imu_M_d.m31;
784  result[2][0] = imu_M_d.m02;
785  result[2][1] = imu_M_d.m12;
786  result[2][2] = imu_M_d.m22;
787  result[2][3] = imu_M_d.m32;
788  }
789  break;
790 
792  if (to == vpOccipitalStructure::depth) {
793  ST::Matrix4 d_M_v = m_delegate.m_depthFrame.visibleCameraPoseInDepthCoordinateFrame().inversed();
794 
795  result[0][0] = d_M_v.m00;
796  result[0][1] = d_M_v.m10;
797  result[0][2] = d_M_v.m20;
798  result[0][3] = d_M_v.m30;
799  result[1][0] = d_M_v.m01;
800  result[1][1] = d_M_v.m11;
801  result[1][2] = d_M_v.m21;
802  result[1][3] = d_M_v.m31;
803  result[2][0] = d_M_v.m02;
804  result[2][1] = d_M_v.m12;
805  result[2][2] = d_M_v.m22;
806  result[2][3] = d_M_v.m32;
807  }
808 
809  if (to == vpOccipitalStructure::imu) {
810  ST::Matrix4 imu_M_v = m_captureSession.getImuFromVisibleExtrinsics().inversed();
811 
812  result[0][0] = imu_M_v.m00;
813  result[0][1] = imu_M_v.m10;
814  result[0][2] = imu_M_v.m20;
815  result[0][3] = imu_M_v.m30;
816  result[1][0] = imu_M_v.m01;
817  result[1][1] = imu_M_v.m11;
818  result[1][2] = imu_M_v.m21;
819  result[1][3] = imu_M_v.m31;
820  result[2][0] = imu_M_v.m02;
821  result[2][1] = imu_M_v.m12;
822  result[2][2] = imu_M_v.m22;
823  result[2][3] = imu_M_v.m32;
824  }
825  break;
826 
828  break;
829 
831  if (to == vpOccipitalStructure::depth) {
832  ST::Matrix4 d_M_imu = m_captureSession.getImuFromDepthExtrinsics();
833 
834  result[0][0] = d_M_imu.m00;
835  result[0][1] = d_M_imu.m10;
836  result[0][2] = d_M_imu.m20;
837  result[0][3] = d_M_imu.m30;
838  result[1][0] = d_M_imu.m01;
839  result[1][1] = d_M_imu.m11;
840  result[1][2] = d_M_imu.m21;
841  result[1][3] = d_M_imu.m31;
842  result[2][0] = d_M_imu.m02;
843  result[2][1] = d_M_imu.m12;
844  result[2][2] = d_M_imu.m22;
845  result[2][3] = d_M_imu.m32;
846  }
847 
848  if (to == vpOccipitalStructure::visible) {
849  ST::Matrix4 v_M_imu = m_captureSession.getImuFromVisibleExtrinsics();
850 
851  result[0][0] = v_M_imu.m00;
852  result[0][1] = v_M_imu.m10;
853  result[0][2] = v_M_imu.m20;
854  result[0][3] = v_M_imu.m30;
855  result[1][0] = v_M_imu.m01;
856  result[1][1] = v_M_imu.m11;
857  result[1][2] = v_M_imu.m21;
858  result[1][3] = v_M_imu.m31;
859  result[2][0] = v_M_imu.m02;
860  result[2][1] = v_M_imu.m12;
861  result[2][2] = v_M_imu.m22;
862  result[2][3] = v_M_imu.m32;
863  }
864  break;
865 
866  default:
867  break;
868  }
869 
870  return result;
871 }
872 
878 {
879  ST::Intrinsics result;
880 
881  switch (stream_type) {
883  result = m_delegate.m_visibleFrame.intrinsics();
884  break;
885 
887  result = m_delegate.m_depthFrame.intrinsics();
888  break;
889 
891  result = m_delegate.m_infraredFrame.intrinsics();
892  break;
893 
894  default:
895  // Deal with exceptions.
896  break;
897  }
898 
899  return result;
900 }
901 
908 {
909  m_delegate.m_depthFrame.saveImageAsPointCloudMesh(filename.c_str());
910 }
911 
919 {
920  ST::Intrinsics cam_intrinsics;
921  switch (stream_type) {
922  case vpOccipitalStructureStream::visible:
923  cam_intrinsics = m_delegate.m_visibleFrame.intrinsics();
925  m_visible_camera_parameters = vpCameraParameters(cam_intrinsics.fx, cam_intrinsics.fy, cam_intrinsics.cx,
926  cam_intrinsics.cy, cam_intrinsics.k1, -cam_intrinsics.k1);
927 
930  vpCameraParameters(cam_intrinsics.fx, cam_intrinsics.fy, cam_intrinsics.cx, cam_intrinsics.cy);
931 
933  break;
934 
935  case vpOccipitalStructureStream::depth:
936  cam_intrinsics = m_delegate.m_depthFrame.intrinsics();
938  vpCameraParameters(cam_intrinsics.fx, cam_intrinsics.fy, cam_intrinsics.cx, cam_intrinsics.cy);
939 
941  break;
942 
943  default: // Should throw exception for not having this type of extrinsic transforms.
944  return vpCameraParameters();
945  break;
946  }
947 }
948 
949 // Protected functions.
950 void vpOccipitalStructure::getPointcloud(std::vector<vpColVector> &pointcloud)
951 {
952  ST::DepthFrame last_df = m_delegate.m_depthFrame;
953  const int width = last_df.width();
954  const int height = last_df.height();
955  pointcloud.resize((size_t)(width * height));
956 
957  const float *p_depth_frame = reinterpret_cast<const float *>(last_df.depthInMillimeters());
958 
959 // Multi-threading if OpenMP
960 // Concurrent writes at different locations are safe
961 #pragma omp parallel for schedule(dynamic)
962  for (int i = 0; i < height; i++) {
963  auto depth_pixel_index = i * width;
964 
965  for (int j = 0; j < width; j++, depth_pixel_index++) {
966  if (std::isnan(p_depth_frame[depth_pixel_index])) {
967  pointcloud[(size_t)depth_pixel_index].resize(4, false);
968  pointcloud[(size_t)depth_pixel_index][0] = m_invalidDepthValue;
969  pointcloud[(size_t)depth_pixel_index][1] = m_invalidDepthValue;
970  pointcloud[(size_t)depth_pixel_index][2] = m_invalidDepthValue;
971  pointcloud[(size_t)depth_pixel_index][3] = 1.0;
972  continue;
973  }
974 
975  // Get the depth value of the current pixel
976  auto pixels_distance = p_depth_frame[depth_pixel_index];
977 
978  ST::Vector3f point_3D = last_df.unprojectPoint(i, j);
979 
980  if (pixels_distance > m_maxZ)
981  point_3D.x = point_3D.y = point_3D.z = m_invalidDepthValue;
982 
983  pointcloud[(size_t)depth_pixel_index].resize(4, false);
984  pointcloud[(size_t)depth_pixel_index][0] = point_3D.x * 0.001;
985  pointcloud[(size_t)depth_pixel_index][1] = point_3D.y * 0.001;
986  pointcloud[(size_t)depth_pixel_index][2] = point_3D.z * 0.001;
987  pointcloud[(size_t)depth_pixel_index][3] = 1.0;
988  }
989  }
990 }
991 
992 #if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
993 void vpOccipitalStructure::getPointcloud(pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud)
994 {
995  ST::DepthFrame last_df = m_delegate.m_depthFrame;
996  const int width = last_df.width();
997  const int height = last_df.height();
998  pointcloud->width = (uint32_t)width;
999  pointcloud->height = (uint32_t)height;
1000  pointcloud->resize((size_t)(width * height));
1001 
1002 #if MANUAL_POINTCLOUD // faster to compute manually when tested
1003  const float *p_depth_frame = reinterpret_cast<const float *>(last_df.depthInMillimeters());
1004 
1005  // Multi-threading if OpenMP
1006  // Concurrent writes at different locations are safe
1007 #pragma omp parallel for schedule(dynamic)
1008  for (int i = 0; i < height; i++) {
1009  auto depth_pixel_index = i * width;
1010 
1011  for (int j = 0; j < width; j++, depth_pixel_index++) {
1012  if (p_depth_frame[depth_pixel_index] == 0) {
1013  pointcloud->points[(size_t)(depth_pixel_index)].x = m_invalidDepthValue;
1014  pointcloud->points[(size_t)(depth_pixel_index)].y = m_invalidDepthValue;
1015  pointcloud->points[(size_t)(depth_pixel_index)].z = m_invalidDepthValue;
1016  continue;
1017  }
1018 
1019  // Get the depth value of the current pixel
1020  auto pixels_distance = p_depth_frame[depth_pixel_index];
1021 
1022  // Get 3D coordinates in depth frame. (in mm)
1023  ST::Vector3f point_3D = last_df.unprojectPoint(i, j);
1024 
1025  if (pixels_distance > m_maxZ)
1026  point_3D.x = point_3D.y = point_3D.z = m_invalidDepthValue;
1027 
1028  pointcloud->points[(size_t)(depth_pixel_index)].x = point_3D.x * 0.001;
1029  pointcloud->points[(size_t)(depth_pixel_index)].y = point_3D.y * 0.001;
1030  pointcloud->points[(size_t)(depth_pixel_index)].z = point_3D.z * 0.001;
1031  }
1032  }
1033 // #else
1034 // m_points = m_pointcloud.calculate(depth_frame);
1035 // auto vertices = m_points.get_vertices();
1036 
1037 // for (size_t i = 0; i < m_points.size(); i++) {
1038 // if (vertices[i].z <= 0.0f || vertices[i].z > m_maxZ) {
1039 // pointcloud->points[i].x = m_invalidDepthValue;
1040 // pointcloud->points[i].y = m_invalidDepthValue;
1041 // pointcloud->points[i].z = m_invalidDepthValue;
1042 // } else {
1043 // pointcloud->points[i].x = vertices[i].x;
1044 // pointcloud->points[i].y = vertices[i].y;
1045 // pointcloud->points[i].z = vertices[i].z;
1046 // }
1047 // }
1048 #endif
1049 }
1050 
1051 void vpOccipitalStructure::getColoredPointcloud(pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
1052 {
1053  ST::DepthFrame last_df = m_delegate.m_depthFrame;
1054  ST::ColorFrame last_cf = m_delegate.m_visibleFrame;
1055 
1056  const int depth_width = last_df.width();
1057  const int depth_height = last_df.height();
1058  pointcloud->width = static_cast<uint32_t>(depth_width);
1059  pointcloud->height = static_cast<uint32_t>(depth_height);
1060  pointcloud->resize(static_cast<uint32_t>(depth_width * depth_height));
1061 
1062  const int color_width = last_cf.width();
1063  const int color_height = last_cf.height();
1064 
1065  const float *p_depth_frame = reinterpret_cast<const float *>(last_df.depthInMillimeters());
1066  const ST::Matrix4 depth2ColorExtrinsics = last_df.visibleCameraPoseInDepthCoordinateFrame(); // c_M_d
1067 
1068  const bool swap_rb = false;
1069  unsigned int nb_color_pixel = 3; // RGB image.
1070  const uint8_t *p_color_frame = static_cast<const uint8_t *>(last_cf.rgbData());
1071 
1072  const bool registered_streams = last_df.isRegisteredTo(last_cf);
1073 
1074  // Multi-threading if OpenMP
1075  // Concurrent writes at different locations are safe
1076 #pragma omp parallel for schedule(dynamic)
1077  for (int i = 0; i < depth_height; i++) {
1078  auto depth_pixel_index = i * depth_width;
1079 
1080  for (int j = 0; j < depth_width; j++, depth_pixel_index++) {
1081  if (std::isnan(p_depth_frame[depth_pixel_index])) {
1082  pointcloud->points[(size_t)depth_pixel_index].x = m_invalidDepthValue;
1083  pointcloud->points[(size_t)depth_pixel_index].y = m_invalidDepthValue;
1084  pointcloud->points[(size_t)depth_pixel_index].z = m_invalidDepthValue;
1085 
1086  // For out of bounds color data, default to black.
1087 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1088  unsigned int r = 0, g = 0, b = 0;
1089  uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1090 
1091  pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1092 #else
1093  pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)0;
1094  pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)0;
1095  pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)0;
1096 #endif
1097  continue;
1098  }
1099 
1100  // Get the depth value of the current pixel
1101  auto pixels_distance = p_depth_frame[depth_pixel_index];
1102 
1103  ST::Vector3f depth_point_3D = last_df.unprojectPoint(i, j);
1104 
1105  if (pixels_distance > m_maxZ)
1106  depth_point_3D.x = depth_point_3D.y = depth_point_3D.z = m_invalidDepthValue;
1107 
1108  pointcloud->points[(size_t)depth_pixel_index].x = depth_point_3D.x * 0.001;
1109  pointcloud->points[(size_t)depth_pixel_index].y = depth_point_3D.y * 0.001;
1110  pointcloud->points[(size_t)depth_pixel_index].z = depth_point_3D.z * 0.001;
1111 
1112  if (!registered_streams) {
1113 
1114  ST::Vector3f color_point_3D = depth2ColorExtrinsics * depth_point_3D;
1115 
1116  ST::Vector2f color_pixel;
1117  if (color_point_3D.z != 0) {
1118  double x, y, pixel_x, pixel_y;
1119  x = static_cast<double>(color_point_3D.y / color_point_3D.z);
1120  y = static_cast<double>(color_point_3D.x / color_point_3D.z);
1122  color_pixel.x = pixel_x;
1123  color_pixel.y = pixel_y;
1124  }
1125 
1126  if (color_pixel.y < 0 || color_pixel.y >= color_height || color_pixel.x < 0 || color_pixel.x >= color_width) {
1127  // For out of bounds color data, default to a shade of blue in order to
1128  // visually distinguish holes.
1129 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1130  unsigned int r = 0, g = 0, b = 0;
1131  uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1132 
1133  pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1134 #else
1135  pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)0;
1136  pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)0;
1137  pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)0;
1138 #endif
1139  }
1140  else {
1141  unsigned int i_ = (unsigned int)color_pixel.x;
1142  unsigned int j_ = (unsigned int)color_pixel.y;
1143 
1144 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1145  uint32_t rgb = 0;
1146  if (swap_rb) {
1147  rgb =
1148  (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) |
1149  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1150  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2])
1151  << 16);
1152  }
1153  else {
1154  rgb =
1155  (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) << 16 |
1156  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1157  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2]));
1158  }
1159 
1160  pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1161 #else
1162  if (swap_rb) {
1163  pointcloud->points[(size_t)depth_pixel_index].b =
1164  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
1165  pointcloud->points[(size_t)depth_pixel_index].g =
1166  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
1167  pointcloud->points[(size_t)depth_pixel_index].r =
1168  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
1169  }
1170  else {
1171  pointcloud->points[(size_t)depth_pixel_index].r =
1172  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
1173  pointcloud->points[(size_t)depth_pixel_index].g =
1174  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
1175  pointcloud->points[(size_t)depth_pixel_index].b =
1176  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
1177  }
1178 #endif
1179  }
1180  }
1181  else {
1182 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1183  uint32_t rgb = 0;
1184  if (swap_rb) {
1185  rgb = (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) |
1186  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1187  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]) << 16);
1188  }
1189  else {
1190  rgb = (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) << 16 |
1191  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1192  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]));
1193  }
1194 
1195  pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1196 #else
1197  if (swap_rb) {
1198  pointcloud->points[(size_t)depth_pixel_index].b =
1199  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
1200  pointcloud->points[(size_t)depth_pixel_index].g =
1201  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
1202  pointcloud->points[(size_t)depth_pixel_index].r =
1203  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
1204  }
1205  else {
1206  pointcloud->points[(size_t)depth_pixel_index].r =
1207  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
1208  pointcloud->points[(size_t)depth_pixel_index].g =
1209  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
1210  pointcloud->points[(size_t)depth_pixel_index].b =
1211  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
1212  }
1213 #endif
1214  }
1215  }
1216  }
1217 }
1218 
1219 #endif
1220 END_VISP_NAMESPACE
1221 #endif
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
Definition: vpColVector.h:191
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:1143
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void GreyToRGBa(unsigned char *grey, unsigned char *rgba, unsigned int width, unsigned int height)
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
Type * bitmap
points toward the bitmap
Definition: vpImage.h:135
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
vpPoint unprojectPoint(int row, int col)
void getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts=nullptr)
unsigned int getHeight(vpOccipitalStructureStream stream_type)
vpCameraParameters getCameraParameters(const vpOccipitalStructureStream stream_type, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithoutDistortion)
void acquire(vpImage< unsigned char > &gray, bool undistorted=false, double *ts=nullptr)
vpCameraParameters m_visible_camera_parameters
vpCameraParameters m_depth_camera_parameters
void saveDepthImageAsPointCloudMesh(std::string &filename)
float getDepth(int x, int y)
ST::Intrinsics getIntrinsics(const vpOccipitalStructureStream stream_type) const
void getColoredPointcloud(pcl::PointCloud< pcl::PointXYZRGB >::Ptr &pointcloud)
ST::CaptureSession m_captureSession
void getPointcloud(std::vector< vpColVector > &pointcloud)
unsigned int getWidth(vpOccipitalStructureStream stream_type)
ST::CaptureSessionSettings m_captureSessionSettings
@ infrared
Infrared stream.
vpHomogeneousMatrix getTransform(const vpOccipitalStructureStream from, const vpOccipitalStructureStream to)
void getIMUAcceleration(vpColVector *imu_acc, double *ts)
void getIMUVelocity(vpColVector *imu_vel, double *ts)
bool open(const ST::CaptureSessionSettings &settings)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:79