Visual Servoing Platform  version 3.5.0 under development (2022-02-15)
vpOccipitalStructure.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * libStructure interface.
33  *
34  *****************************************************************************/
35 
36 #include <visp3/core/vpConfig.h>
37 
38 #if defined(VISP_HAVE_OCCIPITAL_STRUCTURE) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
39 #include <iomanip>
40 #include <map>
41 #include <set>
42 #include <cstring>
43 #include <thread>
44 #include <functional>
45 
46 #include <ST/Utilities.h>
47 
48 #include <visp3/core/vpImageConvert.h>
49 #include <visp3/core/vpMeterPixelConversion.h>
50 #include <visp3/core/vpPoint.h>
51 #include <visp3/sensor/vpOccipitalStructure.h>
52 
53 #define MANUAL_POINTCLOUD 1
54 
59  : m_invalidDepthValue(0.0f), m_maxZ(15000.0f)
60 {
61 }
62 
68 
75 void vpOccipitalStructure::acquire(vpImage<unsigned char> &gray, bool undistorted, double *ts)
76 {
77  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
78  m_delegate.cv_sampleLock.wait(u);
79 
80  if(m_delegate.m_visibleFrame.isValid())
81  {
82  if(!undistorted)
83  memcpy(gray.bitmap, m_delegate.m_visibleFrame.yData(), m_delegate.m_visibleFrame.ySize());
84  else
85  memcpy(gray.bitmap, m_delegate.m_visibleFrame.undistorted().yData(), m_delegate.m_visibleFrame.ySize());
86 
87  if(ts != NULL)
88  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
89  }
90 
91  u.unlock();
92 }
93 
100 void vpOccipitalStructure::acquire(vpImage<vpRGBa> &rgb, bool undistorted, double *ts)
101 {
102  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
103  m_delegate.cv_sampleLock.wait(u);
104 
105  if(m_delegate.m_visibleFrame.isValid())
106  {
107  // Detecting if it's a Color Structure Core device.
108  if(m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
109  vpImageConvert::RGBToRGBa(const_cast<unsigned char*>(m_delegate.m_visibleFrame.rgbData()),
110  reinterpret_cast<unsigned char *>(rgb.bitmap),
112 
113  else // If it's a monochrome Structure Core device.
114  {
115  if(!undistorted)
116  vpImageConvert::GreyToRGBa(const_cast<unsigned char*>(m_delegate.m_visibleFrame.yData()),
117  reinterpret_cast<unsigned char *>(rgb.bitmap),
119  else
120  vpImageConvert::GreyToRGBa(const_cast<unsigned char*>(m_delegate.m_visibleFrame.undistorted().yData()),
121  reinterpret_cast<unsigned char *>(rgb.bitmap),
123  }
124 
125  if(ts != NULL)
126  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
127  }
128 
129  u.unlock();
130 }
131 
142  bool undistorted, double *ts)
143 {
144  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
145  m_delegate.cv_sampleLock.wait(u);
146 
147  if(rgb != NULL && m_delegate.m_visibleFrame.isValid())
148  {
149  // Detecting if it's a Color Structure Core device.
150  if(m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
151  vpImageConvert::RGBToRGBa(const_cast<unsigned char*>(m_delegate.m_visibleFrame.rgbData()),
152  reinterpret_cast<unsigned char *>(rgb->bitmap),
154 
155  else // If it's a monochrome Structure Core device.
156  {
157  if(!undistorted)
158  vpImageConvert::GreyToRGBa(const_cast<unsigned char*>(m_delegate.m_visibleFrame.yData()),
159  reinterpret_cast<unsigned char *>(rgb->bitmap),
161  else
162  vpImageConvert::GreyToRGBa(const_cast<unsigned char*>(m_delegate.m_visibleFrame.undistorted().yData()),
163  reinterpret_cast<unsigned char *>(rgb->bitmap),
165  }
166 
167  if (ts != NULL)
168  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
169  }
170 
171  if(depth != NULL && m_delegate.m_depthFrame.isValid())
172  memcpy((unsigned char*)depth->bitmap, m_delegate.m_depthFrame.convertDepthToRgba(),
173  m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * 4);
174 
175  if(acceleration_data != NULL)
176  {
177  ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
178  acceleration_data[0] = accel.x; acceleration_data[1] = accel.y; acceleration_data[2] = accel.z;
179  }
180 
181  if(gyroscope_data != NULL)
182  {
183  ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
184  gyroscope_data[0] = gyro_data.x; gyroscope_data[1] = gyro_data.y; gyroscope_data[2] = gyro_data.z;
185  }
186 
187  if(ts != NULL) // By default, frames are synchronized.
188  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
189 
190  u.unlock();
191 }
192 
193 
204  bool undistorted, double *ts)
205 {
206  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
207  m_delegate.cv_sampleLock.wait(u);
208 
209  if(gray != NULL && m_delegate.m_visibleFrame.isValid())
210  {
211  if(!undistorted)
212  memcpy(gray->bitmap, m_delegate.m_visibleFrame.yData(), m_delegate.m_visibleFrame.ySize());
213  else
214  memcpy(gray->bitmap, m_delegate.m_visibleFrame.undistorted().yData(), m_delegate.m_visibleFrame.ySize());
215 
216  if (ts != NULL)
217  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
218  }
219 
220  if(depth != NULL && m_delegate.m_depthFrame.isValid())
221  memcpy((unsigned char*)depth->bitmap, m_delegate.m_depthFrame.convertDepthToRgba(),
222  m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * 4);
223 
224  if(acceleration_data != NULL)
225  {
226  ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
227  acceleration_data[0] = accel.x; acceleration_data[1] = accel.y; acceleration_data[2] = accel.z;
228  }
229 
230  if(gyroscope_data != NULL)
231  {
232  ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
233  gyroscope_data[0] = gyro_data.x; gyroscope_data[1] = gyro_data.y; gyroscope_data[2] = gyro_data.z;
234  }
235 
236  if(ts != NULL) // By default, frames are synchronized.
237  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
238 
239  u.unlock();
240 }
241 
253 void vpOccipitalStructure::acquire(unsigned char *const data_image, unsigned char *const data_depth,
254  std::vector<vpColVector> *const data_pointCloud,
255  unsigned char *const data_infrared, vpColVector *acceleration_data,
256  vpColVector *gyroscope_data, bool undistorted, double *ts)
257 {
258  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
259  m_delegate.cv_sampleLock.wait(u);
260 
261  if(m_delegate.m_depthFrame.isValid() && data_depth != NULL)
262  memcpy(data_depth, m_delegate.m_depthFrame.depthInMillimeters(),
263  m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * sizeof(float));
264 
265  if(m_delegate.m_visibleFrame.isValid() && data_image != NULL)
266  {
267  if(m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
268  vpImageConvert::RGBToRGBa(const_cast<unsigned char*>(m_delegate.m_visibleFrame.rgbData()),
269  reinterpret_cast<unsigned char*>(data_image),
271 
272  else // If it's a monochrome Structure Core device.
273  {
274  if(!undistorted)
275  vpImageConvert::GreyToRGBa(const_cast<unsigned char*>(m_delegate.m_visibleFrame.yData()),
276  reinterpret_cast<unsigned char *>(data_image),
278  else
279  vpImageConvert::GreyToRGBa(const_cast<unsigned char*>(m_delegate.m_visibleFrame.undistorted().yData()),
280  reinterpret_cast<unsigned char *>(data_image),
282  }
283  }
284 
285  if(m_delegate.m_infraredFrame.isValid() && data_infrared != NULL)
286  memcpy(data_infrared, m_delegate.m_infraredFrame.data(), m_delegate.m_infraredFrame.width() * m_delegate.m_infraredFrame.height() * sizeof(uint16_t));
287 
288  if(data_pointCloud != NULL)
289  getPointcloud(*data_pointCloud);
290 
291  if(acceleration_data != NULL)
292  {
293  ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
294  acceleration_data[0] = accel.x; acceleration_data[1] = accel.y; acceleration_data[2] = accel.z;
295  }
296 
297  if(gyroscope_data != NULL)
298  {
299  ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
300  gyroscope_data[0] = gyro_data.x; gyroscope_data[1] = gyro_data.y; gyroscope_data[2] = gyro_data.z;
301  }
302 
303  if(ts != NULL) // By default, frames are synchronized.
304  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
305 
306  u.unlock();
307 }
308 
309 #ifdef VISP_HAVE_PCL
310 
322 void vpOccipitalStructure::acquire(unsigned char *const data_image, unsigned char *const data_depth,
323  std::vector<vpColVector> *const data_pointCloud, pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud,
324  unsigned char *const data_infrared, vpColVector *acceleration_data,
325  vpColVector *gyroscope_data, bool undistorted, double *ts)
326 {
327  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
328  m_delegate.cv_sampleLock.wait(u);
329 
330  if(m_delegate.m_visibleFrame.isValid())
331  {
332  if(m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
333  vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.rgbData()),
334  reinterpret_cast<unsigned char *>(data_image),
336 
337  else // If it's a monochrome Structure Core device.
338  {
339  if(!undistorted)
340  vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.yData()),
341  reinterpret_cast<unsigned char *>(data_image),
343  else
344  vpImageConvert::GreyToRGBa(const_cast<unsigned char*>(m_delegate.m_visibleFrame.undistorted().yData()),
345  reinterpret_cast<unsigned char *>(data_image),
347  }
348  }
349 
350  if(m_delegate.m_depthFrame.isValid() && data_depth != NULL)
351  memcpy(data_depth, m_delegate.m_depthFrame.depthInMillimeters(), m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * sizeof(float));
352 
353  if(m_delegate.m_infraredFrame.isValid() && data_infrared != NULL)
354  memcpy(data_infrared, m_delegate.m_infraredFrame.data(), m_delegate.m_infraredFrame.width() * m_delegate.m_infraredFrame.height() * sizeof(uint16_t));
355 
356  if(data_pointCloud != NULL)
357  getPointcloud(*data_pointCloud);
358 
359  if(m_delegate.m_depthFrame.isValid() && pointcloud != NULL)
360  getPointcloud(pointcloud);
361 
362  if(acceleration_data != NULL)
363  {
364  ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
365  acceleration_data[0] = accel.x; acceleration_data[1] = accel.y; acceleration_data[2] = accel.z;
366  }
367 
368  if(gyroscope_data != NULL)
369  {
370  ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
371  gyroscope_data[0] = gyro_data.x; gyroscope_data[1] = gyro_data.y; gyroscope_data[2] = gyro_data.z;
372  }
373 
374  if(ts != NULL) // By default, frames are synchronized.
375  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
376 
377  u.unlock();
378 }
379 
392 void vpOccipitalStructure::acquire(unsigned char *const data_image, unsigned char *const data_depth,
393  std::vector<vpColVector> *const data_pointCloud, pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
394  unsigned char *const data_infrared, vpColVector *acceleration_data,
395  vpColVector *gyroscope_data, bool undistorted, double *ts)
396 {
397  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
398  m_delegate.cv_sampleLock.wait(u);
399 
400  if(m_delegate.m_depthFrame.isValid() && data_depth != NULL)
401  memcpy(data_depth, m_delegate.m_depthFrame.depthInMillimeters(),
402  m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * sizeof(float));
403 
404  if(m_delegate.m_visibleFrame.isValid() && data_image != NULL)
405  {
406  if(m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
407  vpImageConvert::RGBToRGBa(const_cast<unsigned char*>(m_delegate.m_visibleFrame.rgbData()),
408  reinterpret_cast<unsigned char*>(data_image),
410 
411  else // If it's a monochrome Structure Core device.
412  {
413  if(!undistorted)
414  vpImageConvert::GreyToRGBa(const_cast<unsigned char*>(m_delegate.m_visibleFrame.yData()),
415  reinterpret_cast<unsigned char *>(data_image),
417  else
418  vpImageConvert::GreyToRGBa(const_cast<unsigned char*>(m_delegate.m_visibleFrame.undistorted().yData()),
419  reinterpret_cast<unsigned char *>(data_image),
421  }
422  }
423 
424  if(m_delegate.m_infraredFrame.isValid() && data_infrared != NULL)
425  memcpy(data_infrared, m_delegate.m_infraredFrame.data(), m_delegate.m_infraredFrame.width() * m_delegate.m_infraredFrame.height() * sizeof(uint16_t));
426 
427  if(m_delegate.m_depthFrame.isValid() && data_pointCloud != NULL)
428  getPointcloud(*data_pointCloud);
429 
430  if(m_delegate.m_depthFrame.isValid() && m_delegate.m_visibleFrame.isValid() && pointcloud != NULL)
431  getColoredPointcloud(pointcloud);
432 
433  if(acceleration_data != NULL)
434  {
435  ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
436  acceleration_data[0] = accel.x; acceleration_data[1] = accel.y; acceleration_data[2] = accel.z;
437  }
438 
439  if(gyroscope_data != NULL)
440  {
441  ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
442  gyroscope_data[0] = gyro_data.x; gyroscope_data[1] = gyro_data.y; gyroscope_data[2] = gyro_data.z;
443  }
444 
445  if(ts != NULL) // By default, frames are synchronized.
446  *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
447 
448  u.unlock();
449 }
450 
451 #endif
452 
475 {
476  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
477  m_delegate.cv_sampleLock.wait(u);
478 
479  if(imu_vel != NULL)
480  {
481  imu_vel->resize(3, false);
482  ST::RotationRate imu_rotationRate = m_delegate.m_gyroscopeEvent.rotationRate();
483  (*imu_vel)[0] = imu_rotationRate.x; (*imu_vel)[1] = imu_rotationRate.y; (*imu_vel)[2] = imu_rotationRate.z;
484  }
485 
486  if(ts != NULL)
487  *ts = m_delegate.m_gyroscopeEvent.arrivalTimestamp();
488 
489  u.unlock();
490 }
491 
514 {
515  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
516  m_delegate.cv_sampleLock.wait(u);
517 
518  if(imu_acc != NULL)
519  {
520  imu_acc->resize(3, false);
521  ST::Acceleration imu_acceleration = m_delegate.m_accelerometerEvent.acceleration();
522  (*imu_acc)[0] = imu_acceleration.x; (*imu_acc)[1] = imu_acceleration.y; (*imu_acc)[2] = imu_acceleration.z;
523  }
524 
525  if(ts != NULL)
526  *ts = m_delegate.m_accelerometerEvent.arrivalTimestamp();
527 
528  u.unlock();
529 }
530 
531 
555 void vpOccipitalStructure::getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts)
556 {
557  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
558  m_delegate.cv_sampleLock.wait(u);
559  double imu_vel_timestamp, imu_acc_timestamp;
560 
561  if(imu_vel != NULL)
562  {
563  imu_vel->resize(3, false);
564  ST::RotationRate imu_rotationRate = m_delegate.m_gyroscopeEvent.rotationRate();
565  (*imu_vel)[0] = imu_rotationRate.x; (*imu_vel)[1] = imu_rotationRate.y; (*imu_vel)[2] = imu_rotationRate.z;
566  imu_vel_timestamp = m_delegate.m_gyroscopeEvent.arrivalTimestamp(); // Relative to an unspecified epoch. (see documentation).
567  }
568 
569  if(imu_acc != NULL)
570  {
571  imu_acc->resize(3, false);
572  ST::Acceleration imu_acceleration = m_delegate.m_accelerometerEvent.acceleration();
573  (*imu_acc)[0] = imu_acceleration.x; (*imu_acc)[1] = imu_acceleration.y; (*imu_acc)[2] = imu_acceleration.z;
574  imu_acc_timestamp = m_delegate.m_accelerometerEvent.arrivalTimestamp();
575  }
576 
577  if(ts != NULL)
578  *ts = std::max(imu_vel_timestamp, imu_acc_timestamp);
579 
580  u.unlock();
581 }
582 
587 bool vpOccipitalStructure::open(const ST::CaptureSessionSettings &settings)
588 {
589  if (m_init) {
590  close();
591  }
592 
593  m_captureSession.setDelegate(&m_delegate);
594 
595  if (!m_captureSession.startMonitoring(settings)) {
596  std::cout << "Failed to initialize capture session!" << std::endl;
597  return false;
598  }
599 
600  // This will lock the open() function until the CaptureSession
601  // recieves the first frame or a timeout.
602  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
603  std::cv_status var = m_delegate.cv_sampleLock.wait_for(u, std::chrono::seconds(20)); // Make sure a device is connected.
604 
605  // In case the device is not connected, open() should return false.
606  if(var == std::cv_status::timeout)
607  {
608  m_init = false;
609  return m_init;
610  }
611 
612  m_init = true;
613  m_captureSessionSettings = settings;
614  return m_init;
615 }
616 
628 {
629  if (m_init)
630  {
631  m_captureSession.stopStreaming();
632  m_init = false;
633  }
634 }
635 
641 {
642  switch(stream_type)
643  {
644  case vpOccipitalStructureStream::visible :
645  if(m_delegate.m_visibleFrame.isValid())
646  return m_delegate.m_visibleFrame.width();
647  break;
648 
649  case vpOccipitalStructureStream::depth :
650  if(m_delegate.m_depthFrame.isValid())
651  return m_delegate.m_depthFrame.width();
652  break;
653 
654  case vpOccipitalStructureStream::infrared :
655  if(m_delegate.m_infraredFrame.isValid())
656  return m_delegate.m_infraredFrame.width();
657  break;
658 
659  default:
660  break;
661  }
662 
663  return 0;
664 }
665 
671 {
672  switch(stream_type)
673  {
674  case vpOccipitalStructureStream::visible :
675  if(m_delegate.m_visibleFrame.isValid())
676  return m_delegate.m_visibleFrame.height();
677  break;
678 
679  case vpOccipitalStructureStream::depth :
680  if(m_delegate.m_depthFrame.isValid())
681  return m_delegate.m_depthFrame.height();
682  break;
683 
684  case vpOccipitalStructureStream::infrared :
685  if(m_delegate.m_infraredFrame.isValid())
686  return m_delegate.m_infraredFrame.height();
687  break;
688 
689  default:
690  break;
691  }
692 
693  return 0;
694 }
695 
702 {
703  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
704  m_delegate.cv_sampleLock.wait(u);
705 
706  if(m_delegate.m_depthFrame.isValid())
707  return m_delegate.m_depthFrame(x, y);
708 
709  else
710  return 0.;
711 
712  u.unlock();
713 }
714 
720 // Return vpColVector
722 {
723  std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
724  m_delegate.cv_sampleLock.wait(u);
725 
726  if(m_delegate.m_depthFrame.isValid())
727  {
728  ST::Vector3f point_3D = m_delegate.m_depthFrame.unprojectPoint(row, col);
729  return vpPoint(point_3D.x, point_3D.y, point_3D.z);
730  }
731 
732  else // Should be returning invalid vpPoint.
733  return vpPoint(0., 0., 0.);
734 
735  u.unlock();
736 }
737 
744 {
745  vpHomogeneousMatrix result;
746 
747  switch(from)
748  {
751  {
752  ST::Matrix4 v_M_d = m_delegate.m_depthFrame.visibleCameraPoseInDepthCoordinateFrame();
753 
754  result[0][0] = v_M_d.m00; result[0][1] = v_M_d.m10; result[0][2] = v_M_d.m20; result[0][3] = v_M_d.m30;
755  result[1][0] = v_M_d.m01; result[1][1] = v_M_d.m11; result[1][2] = v_M_d.m21; result[1][3] = v_M_d.m31;
756  result[2][0] = v_M_d.m02; result[2][1] = v_M_d.m12; result[2][2] = v_M_d.m22; result[2][3] = v_M_d.m32;
757  }
758 
759  if(to == vpOccipitalStructure::imu)
760  {
761  ST::Matrix4 imu_M_d = m_captureSession.getImuFromDepthExtrinsics().inversed();
762 
763  result[0][0] = imu_M_d.m00; result[0][1] = imu_M_d.m10; result[0][2] = imu_M_d.m20; result[0][3] = imu_M_d.m30;
764  result[1][0] = imu_M_d.m01; result[1][1] = imu_M_d.m11; result[1][2] = imu_M_d.m21; result[1][3] = imu_M_d.m31;
765  result[2][0] = imu_M_d.m02; result[2][1] = imu_M_d.m12; result[2][2] = imu_M_d.m22; result[2][3] = imu_M_d.m32;
766  }
767  break;
768 
771  {
772  ST::Matrix4 d_M_v = m_delegate.m_depthFrame.visibleCameraPoseInDepthCoordinateFrame().inversed();
773 
774  result[0][0] = d_M_v.m00; result[0][1] = d_M_v.m10; result[0][2] = d_M_v.m20; result[0][3] = d_M_v.m30;
775  result[1][0] = d_M_v.m01; result[1][1] = d_M_v.m11; result[1][2] = d_M_v.m21; result[1][3] = d_M_v.m31;
776  result[2][0] = d_M_v.m02; result[2][1] = d_M_v.m12; result[2][2] = d_M_v.m22; result[2][3] = d_M_v.m32;
777  }
778 
779  if(to == vpOccipitalStructure::imu)
780  {
781  ST::Matrix4 imu_M_v = m_captureSession.getImuFromVisibleExtrinsics().inversed();
782 
783  result[0][0] = imu_M_v.m00; result[0][1] = imu_M_v.m10; result[0][2] = imu_M_v.m20; result[0][3] = imu_M_v.m30;
784  result[1][0] = imu_M_v.m01; result[1][1] = imu_M_v.m11; result[1][2] = imu_M_v.m21; result[1][3] = imu_M_v.m31;
785  result[2][0] = imu_M_v.m02; result[2][1] = imu_M_v.m12; result[2][2] = imu_M_v.m22; result[2][3] = imu_M_v.m32;
786  }
787  break;
788 
790  break;
791 
794  {
795  ST::Matrix4 d_M_imu = m_captureSession.getImuFromDepthExtrinsics();
796 
797  result[0][0] = d_M_imu.m00; result[0][1] = d_M_imu.m10; result[0][2] = d_M_imu.m20; result[0][3] = d_M_imu.m30;
798  result[1][0] = d_M_imu.m01; result[1][1] = d_M_imu.m11; result[1][2] = d_M_imu.m21; result[1][3] = d_M_imu.m31;
799  result[2][0] = d_M_imu.m02; result[2][1] = d_M_imu.m12; result[2][2] = d_M_imu.m22; result[2][3] = d_M_imu.m32;
800  }
801 
803  {
804  ST::Matrix4 v_M_imu = m_captureSession.getImuFromVisibleExtrinsics();
805 
806  result[0][0] = v_M_imu.m00; result[0][1] = v_M_imu.m10; result[0][2] = v_M_imu.m20; result[0][3] = v_M_imu.m30;
807  result[1][0] = v_M_imu.m01; result[1][1] = v_M_imu.m11; result[1][2] = v_M_imu.m21; result[1][3] = v_M_imu.m31;
808  result[2][0] = v_M_imu.m02; result[2][1] = v_M_imu.m12; result[2][2] = v_M_imu.m22; result[2][3] = v_M_imu.m32;
809  }
810  break;
811 
812  default:
813  break;
814  }
815 
816  return result;
817 }
818 
824 {
825  ST::Intrinsics result;
826 
827  switch(stream_type)
828  {
830  result = m_delegate.m_visibleFrame.intrinsics();
831  break;
832 
834  result = m_delegate.m_depthFrame.intrinsics();
835  break;
836 
838  result = m_delegate.m_infraredFrame.intrinsics();
839  break;
840 
841  default:
842  // Deal with exceptions.
843  break;
844  }
845 
846  return result;
847 }
848 
849 
856 {
857  m_delegate.m_depthFrame.saveImageAsPointCloudMesh(filename.c_str());
858 }
859 
867 {
868  ST::Intrinsics cam_intrinsics;
869  switch(stream_type)
870  {
871  case vpOccipitalStructureStream::visible :
872  cam_intrinsics = m_delegate.m_visibleFrame.intrinsics();
874  m_visible_camera_parameters = vpCameraParameters(cam_intrinsics.fx, cam_intrinsics.fy, cam_intrinsics.cx, cam_intrinsics.cy, cam_intrinsics.k1, -cam_intrinsics.k1);
875 
877  m_visible_camera_parameters = vpCameraParameters(cam_intrinsics.fx, cam_intrinsics.fy, cam_intrinsics.cx, cam_intrinsics.cy);
878 
880  break;
881 
882  case vpOccipitalStructureStream::depth :
883  cam_intrinsics = m_delegate.m_depthFrame.intrinsics();
884  m_depth_camera_parameters = vpCameraParameters(cam_intrinsics.fx, cam_intrinsics.fy, cam_intrinsics.cx, cam_intrinsics.cy);
885 
887  break;
888 
889  default: // Should throw exception for not having this type of extrinsic transforms.
890  return vpCameraParameters();
891  break;
892  }
893 }
894 
895 // Protected functions.
896 void vpOccipitalStructure::getPointcloud(std::vector<vpColVector> &pointcloud)
897 {
898  ST::DepthFrame last_df = m_delegate.m_depthFrame;
899  const int width = last_df.width();
900  const int height = last_df.height();
901  pointcloud.resize((size_t)(width * height));
902 
903  const float *p_depth_frame = reinterpret_cast<const float *>(last_df.depthInMillimeters());
904 
905  // Multi-threading if OpenMP
906  // Concurrent writes at different locations are safe
907  #pragma omp parallel for schedule(dynamic)
908  for (int i = 0; i < height; i++) {
909  auto depth_pixel_index = i * width;
910 
911  for (int j = 0; j < width; j++, depth_pixel_index++) {
912  if (std::isnan(p_depth_frame[depth_pixel_index])) {
913  pointcloud[(size_t)depth_pixel_index].resize(4, false);
914  pointcloud[(size_t)depth_pixel_index][0] = m_invalidDepthValue;
915  pointcloud[(size_t)depth_pixel_index][1] = m_invalidDepthValue;
916  pointcloud[(size_t)depth_pixel_index][2] = m_invalidDepthValue;
917  pointcloud[(size_t)depth_pixel_index][3] = 1.0;
918  continue;
919  }
920 
921  // Get the depth value of the current pixel
922  auto pixels_distance = p_depth_frame[depth_pixel_index];
923 
924  ST::Vector3f point_3D = last_df.unprojectPoint(i, j);
925 
926  if (pixels_distance > m_maxZ)
927  point_3D.x = point_3D.y = point_3D.z = m_invalidDepthValue;
928 
929  pointcloud[(size_t)depth_pixel_index].resize(4, false);
930  pointcloud[(size_t)depth_pixel_index][0] = point_3D.x * 0.001;
931  pointcloud[(size_t)depth_pixel_index][1] = point_3D.y * 0.001;
932  pointcloud[(size_t)depth_pixel_index][2] = point_3D.z * 0.001;
933  pointcloud[(size_t)depth_pixel_index][3] = 1.0;
934  }
935  }
936 }
937 
938 
939 #ifdef VISP_HAVE_PCL
940 void vpOccipitalStructure::getPointcloud(pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud)
941 {
942  ST::DepthFrame last_df = m_delegate.m_depthFrame;
943  const int width = last_df.width();
944  const int height = last_df.height();
945  pointcloud->width = (uint32_t)width;
946  pointcloud->height = (uint32_t)height;
947  pointcloud->resize((size_t)(width * height));
948 
949 #if MANUAL_POINTCLOUD // faster to compute manually when tested
950  const float *p_depth_frame = reinterpret_cast<const float *>(last_df.depthInMillimeters());
951 
952  // Multi-threading if OpenMP
953  // Concurrent writes at different locations are safe
954 #pragma omp parallel for schedule(dynamic)
955  for (int i = 0; i < height; i++) {
956  auto depth_pixel_index = i * width;
957 
958  for (int j = 0; j < width; j++, depth_pixel_index++) {
959  if (p_depth_frame[depth_pixel_index] == 0) {
960  pointcloud->points[(size_t)(depth_pixel_index)].x = m_invalidDepthValue;
961  pointcloud->points[(size_t)(depth_pixel_index)].y = m_invalidDepthValue;
962  pointcloud->points[(size_t)(depth_pixel_index)].z = m_invalidDepthValue;
963  continue;
964  }
965 
966  // Get the depth value of the current pixel
967  auto pixels_distance = p_depth_frame[depth_pixel_index];
968 
969  // Get 3D coordinates in depth frame. (in mm)
970  ST::Vector3f point_3D = last_df.unprojectPoint(i, j);
971 
972  if (pixels_distance > m_maxZ)
973  point_3D.x = point_3D.y = point_3D.z = m_invalidDepthValue;
974 
975  pointcloud->points[(size_t)(depth_pixel_index)].x = point_3D.x * 0.001;
976  pointcloud->points[(size_t)(depth_pixel_index)].y = point_3D.y * 0.001;
977  pointcloud->points[(size_t)(depth_pixel_index)].z = point_3D.z * 0.001;
978  }
979  }
980 // #else
981 // m_points = m_pointcloud.calculate(depth_frame);
982 // auto vertices = m_points.get_vertices();
983 
984 // for (size_t i = 0; i < m_points.size(); i++) {
985 // if (vertices[i].z <= 0.0f || vertices[i].z > m_maxZ) {
986 // pointcloud->points[i].x = m_invalidDepthValue;
987 // pointcloud->points[i].y = m_invalidDepthValue;
988 // pointcloud->points[i].z = m_invalidDepthValue;
989 // } else {
990 // pointcloud->points[i].x = vertices[i].x;
991 // pointcloud->points[i].y = vertices[i].y;
992 // pointcloud->points[i].z = vertices[i].z;
993 // }
994 // }
995 #endif
996 }
997 
998 void vpOccipitalStructure::getColoredPointcloud(pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
999 {
1000  ST::DepthFrame last_df = m_delegate.m_depthFrame;
1001  ST::ColorFrame last_cf = m_delegate.m_visibleFrame;
1002 
1003  const int depth_width = last_df.width();
1004  const int depth_height = last_df.height();
1005  pointcloud->width = static_cast<uint32_t>(depth_width);
1006  pointcloud->height = static_cast<uint32_t>(depth_height);
1007  pointcloud->resize(static_cast<uint32_t>(depth_width * depth_height));
1008 
1009  const int color_width = last_cf.width();
1010  const int color_height = last_cf.height();
1011 
1012  const float *p_depth_frame = reinterpret_cast<const float *>(last_df.depthInMillimeters());
1013  const ST::Matrix4 depth2ColorExtrinsics = last_df.visibleCameraPoseInDepthCoordinateFrame(); // c_M_d
1014 
1015  const bool swap_rb = false;
1016  unsigned int nb_color_pixel = 3; // RGB image.
1017  const uint8_t *p_color_frame = static_cast<const uint8_t *>(last_cf.rgbData());
1018 
1019  const bool registered_streams = last_df.isRegisteredTo(last_cf);
1020 
1021  // Multi-threading if OpenMP
1022  // Concurrent writes at different locations are safe
1023 #pragma omp parallel for schedule(dynamic)
1024  for (int i = 0; i < depth_height; i++) {
1025  auto depth_pixel_index = i * depth_width;
1026 
1027  for (int j = 0; j < depth_width; j++, depth_pixel_index++) {
1028  if (std::isnan(p_depth_frame[depth_pixel_index])) {
1029  pointcloud->points[(size_t)depth_pixel_index].x = m_invalidDepthValue;
1030  pointcloud->points[(size_t)depth_pixel_index].y = m_invalidDepthValue;
1031  pointcloud->points[(size_t)depth_pixel_index].z = m_invalidDepthValue;
1032 
1033  // For out of bounds color data, default to black.
1034 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1035  unsigned int r = 0, g = 0, b = 0;
1036  uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1037 
1038  pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1039 #else
1040  pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)0;
1041  pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)0;
1042  pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)0;
1043 #endif
1044  continue;
1045  }
1046 
1047  // Get the depth value of the current pixel
1048  auto pixels_distance = p_depth_frame[depth_pixel_index];
1049 
1050  ST::Vector3f depth_point_3D = last_df.unprojectPoint(i, j);
1051 
1052  if (pixels_distance > m_maxZ)
1053  depth_point_3D.x = depth_point_3D.y = depth_point_3D.z = m_invalidDepthValue;
1054 
1055  pointcloud->points[(size_t)depth_pixel_index].x = depth_point_3D.x * 0.001;
1056  pointcloud->points[(size_t)depth_pixel_index].y = depth_point_3D.y * 0.001;
1057  pointcloud->points[(size_t)depth_pixel_index].z = depth_point_3D.z * 0.001;
1058 
1059  if (!registered_streams) {
1060 
1061  ST::Vector3f color_point_3D = depth2ColorExtrinsics * depth_point_3D;
1062 
1063  ST::Vector2f color_pixel;
1064  if(color_point_3D.z != 0)
1065  {
1066  double x, y, pixel_x, pixel_y;
1067  x = static_cast<double>(color_point_3D.y / color_point_3D.z);
1068  y = static_cast<double>(color_point_3D.x / color_point_3D.z);
1070  color_pixel.x = pixel_x;
1071  color_pixel.y = pixel_y;
1072  }
1073 
1074  if (color_pixel.y < 0 || color_pixel.y >= color_height || color_pixel.x < 0 || color_pixel.x >= color_width) {
1075  // For out of bounds color data, default to a shade of blue in order to
1076  // visually distinguish holes.
1077 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1078  unsigned int r = 0, g = 0, b = 0;
1079  uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1080 
1081  pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1082 #else
1083  pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)0;
1084  pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)0;
1085  pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)0;
1086 #endif
1087  } else {
1088  unsigned int i_ = (unsigned int)color_pixel.x;
1089  unsigned int j_ = (unsigned int)color_pixel.y;
1090 
1091 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1092  uint32_t rgb = 0;
1093  if (swap_rb) {
1094  rgb =
1095  (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) |
1096  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1097  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2]) << 16);
1098  } else {
1099  rgb = (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) << 16 |
1100  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1101  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2]));
1102  }
1103 
1104  pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1105 #else
1106  if (swap_rb) {
1107  pointcloud->points[(size_t)depth_pixel_index].b =
1108  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
1109  pointcloud->points[(size_t)depth_pixel_index].g =
1110  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
1111  pointcloud->points[(size_t)depth_pixel_index].r =
1112  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
1113  } else {
1114  pointcloud->points[(size_t)depth_pixel_index].r =
1115  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
1116  pointcloud->points[(size_t)depth_pixel_index].g =
1117  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
1118  pointcloud->points[(size_t)depth_pixel_index].b =
1119  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
1120  }
1121 #endif
1122  }
1123  } else {
1124 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1125  uint32_t rgb = 0;
1126  if (swap_rb) {
1127  rgb =
1128  (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) |
1129  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1130  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]) << 16);
1131  } else {
1132  rgb = (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) << 16 |
1133  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1134  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]));
1135  }
1136 
1137  pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1138 #else
1139  if (swap_rb) {
1140  pointcloud->points[(size_t)depth_pixel_index].b =
1141  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
1142  pointcloud->points[(size_t)depth_pixel_index].g =
1143  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
1144  pointcloud->points[(size_t)depth_pixel_index].r =
1145  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
1146  } else {
1147  pointcloud->points[(size_t)depth_pixel_index].r =
1148  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
1149  pointcloud->points[(size_t)depth_pixel_index].g =
1150  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
1151  pointcloud->points[(size_t)depth_pixel_index].b =
1152  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
1153  }
1154 #endif
1155  }
1156  }
1157  }
1158 }
1159 
1160 #endif
1161 
1162 #endif
ST::CaptureSession m_captureSession
float getDepth(int x, int y)
vpCameraParameters m_visible_camera_parameters
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
Type * bitmap
points toward the bitmap
Definition: vpImage.h:143
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
void getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts=NULL)
ST::StructureCoreCameraType m_cameraType
unsigned int getWidth(vpOccipitalStructureStream stream_type)
ST::CaptureSessionSettings m_captureSessionSettings
bool open(const ST::CaptureSessionSettings &settings)
void getPointcloud(std::vector< vpColVector > &pointcloud)
std::condition_variable cv_sampleLock
void getIMUVelocity(vpColVector *imu_vel, double *ts)
static void GreyToRGBa(unsigned char *grey, unsigned char *rgba, unsigned int width, unsigned int height)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition: vpPoint.h:81
ST::DepthFrame m_depthFrame
void acquire(vpImage< unsigned char > &gray, bool undistorted=false, double *ts=NULL)
void saveDepthImageAsPointCloudMesh(std::string &filename)
Generic class defining intrinsic camera parameters.
vpHomogeneousMatrix getTransform(const vpOccipitalStructureStream from, const vpOccipitalStructureStream to)
vpCameraParameters m_depth_camera_parameters
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:310
vpCameraParameters getCameraParameters(const vpOccipitalStructureStream stream_type, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithoutDistortion)
Implementation of column vector and the associated operations.
Definition: vpColVector.h:130
void getIMUAcceleration(vpColVector *imu_acc, double *ts)
ST::GyroscopeEvent m_gyroscopeEvent
ST::InfraredFrame m_infraredFrame
vpPoint unprojectPoint(int row, int col)
unsigned int getHeight(vpOccipitalStructureStream stream_type)
void getColoredPointcloud(pcl::PointCloud< pcl::PointXYZRGB >::Ptr &pointcloud)
ST::AccelerometerEvent m_accelerometerEvent
ST::ColorFrame m_visibleFrame
ST::Intrinsics getIntrinsics(const vpOccipitalStructureStream stream_type) const