Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
vpRealSense2.cpp
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2025 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 *
30 * Description:
31 * librealSense2 interface.
32 */
33
34#include <visp3/core/vpConfig.h>
35
36#if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
37#include <cstring>
38#include <iomanip>
39#include <map>
40#include <set>
41#include <visp3/core/vpImageConvert.h>
42#include <visp3/sensor/vpRealSense2.h>
43
44#define MANUAL_POINTCLOUD 1
45
46#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
47namespace
48{
49bool operator==(const rs2_extrinsics &lhs, const rs2_extrinsics &rhs)
50{
51 for (int i = 0; i < 3; i++) {
52 for (int j = 0; j < 3; j++) {
53 if (std::fabs(lhs.rotation[i * 3 + j] - rhs.rotation[i * 3 + j]) > std::numeric_limits<float>::epsilon()) {
54 return false;
55 }
56 }
57
58 if (std::fabs(lhs.translation[i] - rhs.translation[i]) > std::numeric_limits<float>::epsilon()) {
59 return false;
60 }
61 }
62
63 return true;
64}
65} // namespace
66#endif
67
76
82
89{
90 auto data = m_pipe.wait_for_frames();
91 auto color_frame = data.get_color_frame();
92 getGreyFrame(color_frame, grey);
93 if (ts != nullptr) {
94 *ts = data.get_timestamp();
95 }
96}
97
104{
105 auto data = m_pipe.wait_for_frames();
106 auto color_frame = data.get_color_frame();
107 getColorFrame(color_frame, color);
108 if (ts != nullptr) {
109 *ts = data.get_timestamp();
110 }
111}
112
123void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
124 std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared,
125 rs2::align *const align_to, double *ts)
126{
127 acquire(data_image, data_depth, data_pointCloud, data_infrared, nullptr, align_to, ts);
128}
129
192void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
193 std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared1,
194 unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
195{
196 auto data = m_pipe.wait_for_frames();
197 if (align_to != nullptr) {
198 // Infrared stream is not aligned
199 // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
200#if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
201 data = align_to->process(data);
202#else
203 data = align_to->proccess(data);
204#endif
205 }
206
207 if (data_image != nullptr) {
208 auto color_frame = data.get_color_frame();
209 getNativeFrameData(color_frame, data_image);
210 }
211
212 if (data_depth != nullptr || data_pointCloud != nullptr) {
213 auto depth_frame = data.get_depth_frame();
214 if (data_depth != nullptr) {
215 getNativeFrameData(depth_frame, data_depth);
216 }
217
218 if (data_pointCloud != nullptr) {
219 getPointcloud(depth_frame, *data_pointCloud);
220 }
221 }
222
223 if (data_infrared1 != nullptr) {
224 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
225 getNativeFrameData(infrared_frame, data_infrared1);
226 }
227
228 if (data_infrared2 != nullptr) {
229 auto infrared_frame = data.get_infrared_frame(2);
230 getNativeFrameData(infrared_frame, data_infrared2);
231 }
232
233 if (ts != nullptr) {
234 *ts = data.get_timestamp();
235 }
236}
237
238#if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
260{
261 auto data = m_pipe.wait_for_frames();
262
263 if (left != nullptr) {
264 auto left_fisheye_frame = data.get_fisheye_frame(1);
265 unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
266 unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
267 left->resize(height, width);
268 getNativeFrameData(left_fisheye_frame, (*left).bitmap);
269 }
270
271 if (right != nullptr) {
272 auto right_fisheye_frame = data.get_fisheye_frame(2);
273 unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
274 unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
275 right->resize(height, width);
276 getNativeFrameData(right_fisheye_frame, (*right).bitmap);
277 }
278
279 if (ts != nullptr) {
280 *ts = data.get_timestamp();
281 }
282}
283
313 vpColVector *odo_vel, vpColVector *odo_acc, unsigned int *confidence, double *ts)
314{
315 auto data = m_pipe.wait_for_frames();
316
317 if (left != nullptr) {
318 auto left_fisheye_frame = data.get_fisheye_frame(1);
319 unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
320 unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
321 left->resize(height, width);
322 getNativeFrameData(left_fisheye_frame, (*left).bitmap);
323 }
324
325 if (right != nullptr) {
326 auto right_fisheye_frame = data.get_fisheye_frame(2);
327 unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
328 unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
329 right->resize(height, width);
330 getNativeFrameData(right_fisheye_frame, (*right).bitmap);
331 }
332
333 auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
334 auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
335
336 if (ts != nullptr) {
337 *ts = data.get_timestamp();
338 }
339
340 if (cMw != nullptr) {
341 m_pos[0] = static_cast<double>(pose_data.translation.x);
342 m_pos[1] = static_cast<double>(pose_data.translation.y);
343 m_pos[2] = static_cast<double>(pose_data.translation.z);
344
345 m_quat[0] = static_cast<double>(pose_data.rotation.x);
346 m_quat[1] = static_cast<double>(pose_data.rotation.y);
347 m_quat[2] = static_cast<double>(pose_data.rotation.z);
348 m_quat[3] = static_cast<double>(pose_data.rotation.w);
349
351 }
352
353 if (odo_vel != nullptr) {
354 odo_vel->resize(6, false);
355 (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
356 (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
357 (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
358 (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
359 (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
360 (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
361 }
362
363 if (odo_acc != nullptr) {
364 odo_acc->resize(6, false);
365 (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
366 (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
367 (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
368 (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
369 (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
370 (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
371 }
372
373 if (confidence != nullptr) {
374 *confidence = pose_data.tracker_confidence;
375 }
376}
377
392 vpColVector *odo_vel, vpColVector *odo_acc, vpColVector *imu_vel, vpColVector *imu_acc,
393 unsigned int *confidence, double *ts)
394{
395 auto data = m_pipe.wait_for_frames();
396
397 if (left != nullptr) {
398 auto left_fisheye_frame = data.get_fisheye_frame(1);
399 unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
400 unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
401 left->resize(height, width);
402 getNativeFrameData(left_fisheye_frame, (*left).bitmap);
403 }
404
405 if (right != nullptr) {
406 auto right_fisheye_frame = data.get_fisheye_frame(2);
407 unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
408 unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
409 right->resize(height, width);
410 getNativeFrameData(right_fisheye_frame, (*right).bitmap);
411 }
412
413 auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
414 auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
415
416 if (ts != nullptr) {
417 *ts = data.get_timestamp();
418 }
419
420 if (cMw != nullptr) {
421 m_pos[0] = static_cast<double>(pose_data.translation.x);
422 m_pos[1] = static_cast<double>(pose_data.translation.y);
423 m_pos[2] = static_cast<double>(pose_data.translation.z);
424
425 m_quat[0] = static_cast<double>(pose_data.rotation.x);
426 m_quat[1] = static_cast<double>(pose_data.rotation.y);
427 m_quat[2] = static_cast<double>(pose_data.rotation.z);
428 m_quat[3] = static_cast<double>(pose_data.rotation.w);
429
431 }
432
433 if (odo_vel != nullptr) {
434 odo_vel->resize(6, false);
435 (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
436 (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
437 (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
438 (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
439 (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
440 (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
441 }
442
443 if (odo_acc != nullptr) {
444 odo_acc->resize(6, false);
445 (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
446 (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
447 (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
448 (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
449 (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
450 (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
451 }
452
453 auto accel_frame = data.first_or_default(RS2_STREAM_ACCEL);
454 auto accel_data = accel_frame.as<rs2::motion_frame>().get_motion_data();
455
456 if (imu_acc != nullptr) {
457 imu_acc->resize(3, false);
458 (*imu_acc)[0] = static_cast<double>(accel_data.x);
459 (*imu_acc)[1] = static_cast<double>(accel_data.y);
460 (*imu_acc)[2] = static_cast<double>(accel_data.z);
461 }
462
463 auto gyro_frame = data.first_or_default(RS2_STREAM_GYRO);
464 auto gyro_data = gyro_frame.as<rs2::motion_frame>().get_motion_data();
465
466 if (imu_vel != nullptr) {
467 imu_vel->resize(3, false);
468 (*imu_vel)[0] = static_cast<double>(gyro_data.x);
469 (*imu_vel)[1] = static_cast<double>(gyro_data.y);
470 (*imu_vel)[2] = static_cast<double>(gyro_data.z);
471 }
472
473 if (confidence != nullptr) {
474 *confidence = pose_data.tracker_confidence;
475 }
476}
477#endif // #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
478
479#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
492void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
493 std::vector<vpColVector> *const data_pointCloud,
494 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared,
495 rs2::align *const align_to, double *ts)
496{
497 acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, nullptr, align_to, ts);
498}
499
514void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
515 std::vector<vpColVector> *const data_pointCloud,
516 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared1,
517 unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
518{
519 auto data = m_pipe.wait_for_frames();
520 if (align_to != nullptr) {
521 // Infrared stream is not aligned
522 // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
523#if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
524 data = align_to->process(data);
525#else
526 data = align_to->proccess(data);
527#endif
528 }
529
530 if (data_image != nullptr) {
531 auto color_frame = data.get_color_frame();
532 getNativeFrameData(color_frame, data_image);
533 }
534
535 if (data_depth != nullptr || data_pointCloud != nullptr || pointcloud != nullptr) {
536 auto depth_frame = data.get_depth_frame();
537 if (data_depth != nullptr) {
538 getNativeFrameData(depth_frame, data_depth);
539 }
540
541 if (data_pointCloud != nullptr) {
542 getPointcloud(depth_frame, *data_pointCloud);
543 }
544
545 if (pointcloud != nullptr) {
546 getPointcloud(depth_frame, pointcloud);
547 }
548 }
549
550 if (data_infrared1 != nullptr) {
551 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
552 getNativeFrameData(infrared_frame, data_infrared1);
553 }
554
555 if (data_infrared2 != nullptr) {
556 auto infrared_frame = data.get_infrared_frame(2);
557 getNativeFrameData(infrared_frame, data_infrared2);
558 }
559
560 if (ts != nullptr) {
561 *ts = data.get_timestamp();
562 }
563}
564
577void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
578 std::vector<vpColVector> *const data_pointCloud,
579 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud, unsigned char *const data_infrared,
580 rs2::align *const align_to, double *ts)
581{
582 acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, nullptr, align_to, ts);
583}
584
599void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
600 std::vector<vpColVector> *const data_pointCloud,
601 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud, unsigned char *const data_infrared1,
602 unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
603{
604 auto data = m_pipe.wait_for_frames();
605 if (align_to != nullptr) {
606 // Infrared stream is not aligned
607 // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
608#if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
609 data = align_to->process(data);
610#else
611 data = align_to->proccess(data);
612#endif
613 }
614
615 auto color_frame = data.get_color_frame();
616 if (data_image != nullptr) {
617 getNativeFrameData(color_frame, data_image);
618 }
619
620 if (data_depth != nullptr || data_pointCloud != nullptr || pointcloud != nullptr) {
621 auto depth_frame = data.get_depth_frame();
622 if (data_depth != nullptr) {
623 getNativeFrameData(depth_frame, data_depth);
624 }
625
626 if (data_pointCloud != nullptr) {
627 getPointcloud(depth_frame, *data_pointCloud);
628 }
629
630 if (pointcloud != nullptr) {
631 getPointcloud(depth_frame, color_frame, pointcloud);
632 }
633 }
634
635 if (data_infrared1 != nullptr) {
636 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
637 getNativeFrameData(infrared_frame, data_infrared1);
638 }
639
640 if (data_infrared2 != nullptr) {
641 auto infrared_frame = data.get_infrared_frame(2);
642 getNativeFrameData(infrared_frame, data_infrared2);
643 }
644
645 if (ts != nullptr) {
646 *ts = data.get_timestamp();
647 }
648}
649#endif
650
663{
664 if (m_init) {
665 m_pipe.stop();
666 m_init = false;
667 }
668}
669
681 int index) const
682{
683 auto rs_stream = m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
684 auto intrinsics = rs_stream.get_intrinsics();
685
687 double u0 = intrinsics.ppx;
688 double v0 = intrinsics.ppy;
689 double px = intrinsics.fx;
690 double py = intrinsics.fy;
691
692 switch (type) {
694 double kdu = intrinsics.coeffs[0];
695 cam.initPersProjWithDistortion(px, py, u0, v0, -kdu, kdu);
696 } break;
697
699 std::vector<double> tmp_coefs;
700 tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[0]));
701 tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[1]));
702 tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[2]));
703 tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[3]));
704 tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[4]));
705
706 cam.initProjWithKannalaBrandtDistortion(px, py, u0, v0, tmp_coefs);
707 } break;
708
710 default:
711 cam.initPersProjWithoutDistortion(px, py, u0, v0);
712 break;
713 }
714
715 return cam;
716}
717
726rs2_intrinsics vpRealSense2::getIntrinsics(const rs2_stream &stream, int index) const
727{
728 auto vsp = m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
729 return vsp.get_intrinsics();
730}
731
732void vpRealSense2::getColorFrame(const rs2::frame &frame, vpImage<vpRGBa> &color)
733{
734 auto vf = frame.as<rs2::video_frame>();
735 unsigned int width = static_cast<unsigned int>(vf.get_width());
736 unsigned int height = static_cast<unsigned int>(vf.get_height());
737 color.resize(height, width);
738
739 if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
740 vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
741 reinterpret_cast<unsigned char *>(color.bitmap), width, height);
742 }
743 else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
744 memcpy(reinterpret_cast<unsigned char *>(color.bitmap),
745 const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
746 width * height * sizeof(vpRGBa));
747 }
748 else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
749 vpImageConvert::BGRToRGBa(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
750 reinterpret_cast<unsigned char *>(color.bitmap), width, height);
751 }
752 else {
753 throw vpException(vpException::fatalError, "RealSense Camera - color stream not supported!");
754 }
755}
756
762{
763 if (!m_init) { // If pipe is not yet created, create it. Otherwise, we already know depth scale.
764 rs2::pipeline *pipe = new rs2::pipeline;
765 rs2::pipeline_profile *pipelineProfile = new rs2::pipeline_profile;
766 *pipelineProfile = pipe->start();
767
768 rs2::device dev = pipelineProfile->get_device();
769
770 // Go over the device's sensors
771 for (rs2::sensor &sensor : dev.query_sensors()) {
772 // Check if the sensor is a depth sensor
773 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
774 m_depthScale = dpt.get_depth_scale();
775 }
776 }
777
778 pipe->stop();
779 delete pipe;
780 delete pipelineProfile;
781 }
782
783 return m_depthScale;
784}
785
786void vpRealSense2::getGreyFrame(const rs2::frame &frame, vpImage<unsigned char> &grey)
787{
788 auto vf = frame.as<rs2::video_frame>();
789 unsigned int width = static_cast<unsigned int>(vf.get_width());
790 unsigned int height = static_cast<unsigned int>(vf.get_height());
791 grey.resize(height, width);
792
793 if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
794 vpImageConvert::RGBToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
795 grey.bitmap, width, height);
796 }
797 else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
798 vpImageConvert::RGBaToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
799 grey.bitmap, width * height);
800 }
801 else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
802 vpImageConvert::BGRToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
803 grey.bitmap, width, height);
804 }
805 else {
806 throw vpException(vpException::fatalError, "RealSense Camera - color stream not supported!");
807 }
808}
809
810void vpRealSense2::getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
811{
812 auto vf = frame.as<rs2::video_frame>();
813 int size = vf.get_width() * vf.get_height();
814
815 switch (frame.get_profile().format()) {
816 case RS2_FORMAT_RGB8:
817 case RS2_FORMAT_BGR8:
818 memcpy(data, frame.get_data(), size * 3);
819 break;
820
821 case RS2_FORMAT_RGBA8:
822 case RS2_FORMAT_BGRA8:
823 memcpy(data, frame.get_data(), size * 4);
824 break;
825
826 case RS2_FORMAT_Y16:
827 case RS2_FORMAT_Z16:
828 memcpy(data, frame.get_data(), size * 2);
829 break;
830
831 case RS2_FORMAT_Y8:
832 memcpy(data, frame.get_data(), size);
833 break;
834
835 default:
836 break;
837 }
838}
839
840void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, std::vector<vpColVector> &pointcloud)
841{
842 if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
843 std::stringstream ss;
844 ss << "Error, depth scale <= 0: " << m_depthScale;
845 throw vpException(vpException::fatalError, ss.str());
846 }
847
848 auto vf = depth_frame.as<rs2::video_frame>();
849 const int width = vf.get_width();
850 const int height = vf.get_height();
851 pointcloud.resize(static_cast<size_t>(width * height));
852
853 const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
854 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
855
856// Multi-threading if OpenMP
857// Concurrent writes at different locations are safe
858#pragma omp parallel for schedule(dynamic)
859 for (int i = 0; i < height; i++) {
860 auto depth_pixel_index = i * width;
861
862 for (int j = 0; j < width; j++, depth_pixel_index++) {
863 if (p_depth_frame[depth_pixel_index] == 0) {
864 pointcloud[static_cast<size_t>(depth_pixel_index)].resize(4, false);
865 pointcloud[static_cast<size_t>(depth_pixel_index)][0] = m_invalidDepthValue;
866 pointcloud[static_cast<size_t>(depth_pixel_index)][1] = m_invalidDepthValue;
867 pointcloud[static_cast<size_t>(depth_pixel_index)][2] = m_invalidDepthValue;
868 pointcloud[static_cast<size_t>(depth_pixel_index)][3] = 1.0;
869 continue;
870 }
871
872 // Get the depth value of the current pixel
873 auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
874
875 float points[3];
876 const float pixel[] = { static_cast<float>(j), static_cast<float>(i) };
877 rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
878
879 if (pixels_distance > m_max_Z)
880 points[0] = points[1] = points[2] = m_invalidDepthValue;
881
882 pointcloud[static_cast<size_t>(depth_pixel_index)].resize(4, false);
883 pointcloud[static_cast<size_t>(depth_pixel_index)][0] = points[0];
884 pointcloud[static_cast<size_t>(depth_pixel_index)][1] = points[1];
885 pointcloud[static_cast<size_t>(depth_pixel_index)][2] = points[2];
886 pointcloud[static_cast<size_t>(depth_pixel_index)][3] = 1.0;
887 }
888 }
889}
890
891#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
892void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud)
893{
894 if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
895 std::stringstream ss;
896 ss << "Error, depth scale <= 0: " << m_depthScale;
897 throw vpException(vpException::fatalError, ss.str());
898 }
899
900 auto vf = depth_frame.as<rs2::video_frame>();
901 const int width = vf.get_width();
902 const int height = vf.get_height();
903 pointcloud->width = static_cast<uint32_t>(width);
904 pointcloud->height = static_cast<uint32_t>(height);
905 pointcloud->resize(static_cast<size_t>(width * height));
906
907#if MANUAL_POINTCLOUD // faster to compute manually when tested
908 const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
909 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
910
911 // Multi-threading if OpenMP
912 // Concurrent writes at different locations are safe
913#pragma omp parallel for schedule(dynamic)
914 for (int i = 0; i < height; i++) {
915 auto depth_pixel_index = i * width;
916
917 for (int j = 0; j < width; j++, depth_pixel_index++) {
918 if (p_depth_frame[depth_pixel_index] == 0) {
919 pointcloud->points[static_cast<size_t>(depth_pixel_index)].x = m_invalidDepthValue;
920 pointcloud->points[static_cast<size_t>(depth_pixel_index)].y = m_invalidDepthValue;
921 pointcloud->points[static_cast<size_t>(depth_pixel_index)].z = m_invalidDepthValue;
922 continue;
923 }
924
925 // Get the depth value of the current pixel
926 auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
927
928 float points[3];
929 const float pixel[] = { static_cast<float>(j), static_cast<float>(i) };
930 rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
931
932 if (pixels_distance > m_max_Z)
933 points[0] = points[1] = points[2] = m_invalidDepthValue;
934
935 pointcloud->points[static_cast<size_t>(depth_pixel_index)].x = points[0];
936 pointcloud->points[static_cast<size_t>(depth_pixel_index)].y = points[1];
937 pointcloud->points[static_cast<size_t>(depth_pixel_index)].z = points[2];
938 }
939 }
940#else
941 m_points = m_pointcloud.calculate(depth_frame);
942 auto vertices = m_points.get_vertices();
943
944 for (size_t i = 0; i < m_points.size(); i++) {
945 if (vertices[i].z <= 0.0f || vertices[i].z > m_max_Z) {
946 pointcloud->points[i].x = m_invalidDepthValue;
947 pointcloud->points[i].y = m_invalidDepthValue;
948 pointcloud->points[i].z = m_invalidDepthValue;
949 }
950 else {
951 pointcloud->points[i].x = vertices[i].x;
952 pointcloud->points[i].y = vertices[i].y;
953 pointcloud->points[i].z = vertices[i].z;
954 }
955 }
956#endif
957}
958
959void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, const rs2::frame &color_frame,
960 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
961{
962 if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
963 throw vpException(vpException::fatalError, "Error, depth scale <= 0: %f", m_depthScale);
964 }
965
966 auto vf = depth_frame.as<rs2::video_frame>();
967 const int depth_width = vf.get_width();
968 const int depth_height = vf.get_height();
969 pointcloud->width = static_cast<uint32_t>(depth_width);
970 pointcloud->height = static_cast<uint32_t>(depth_height);
971 pointcloud->resize(static_cast<uint32_t>(depth_width * depth_height));
972
973 vf = color_frame.as<rs2::video_frame>();
974 const int color_width = vf.get_width();
975 const int color_height = vf.get_height();
976
977 const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
978 const rs2_extrinsics depth2ColorExtrinsics =
979 depth_frame.get_profile().as<rs2::video_stream_profile>().get_extrinsics_to(
980 color_frame.get_profile().as<rs2::video_stream_profile>());
981 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
982 const rs2_intrinsics color_intrinsics = color_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
983
984 auto color_format = color_frame.as<rs2::video_frame>().get_profile().format();
985 const bool swap_rb = color_format == RS2_FORMAT_BGR8 || color_format == RS2_FORMAT_BGRA8;
986 unsigned int nb_color_pixel = (color_format == RS2_FORMAT_RGB8 || color_format == RS2_FORMAT_BGR8) ? 3 : 4;
987 const unsigned char *p_color_frame = static_cast<const unsigned char *>(color_frame.get_data());
988 rs2_extrinsics identity;
989 memset(identity.rotation, 0, sizeof(identity.rotation));
990 memset(identity.translation, 0, sizeof(identity.translation));
991 for (int i = 0; i < 3; i++) {
992 identity.rotation[i * 3 + i] = 1;
993 }
994 const bool registered_streams =
995 (depth2ColorExtrinsics == identity) && (color_width == depth_width) && (color_height == depth_height);
996
997// Multi-threading if OpenMP
998// Concurrent writes at different locations are safe
999#pragma omp parallel for schedule(dynamic)
1000 for (int i = 0; i < depth_height; i++) {
1001 auto depth_pixel_index = i * depth_width;
1002
1003 for (int j = 0; j < depth_width; j++, depth_pixel_index++) {
1004 if (p_depth_frame[depth_pixel_index] == 0) {
1005 pointcloud->points[static_cast<size_t>(depth_pixel_index)].x = m_invalidDepthValue;
1006 pointcloud->points[static_cast<size_t>(depth_pixel_index)].y = m_invalidDepthValue;
1007 pointcloud->points[static_cast<size_t>(depth_pixel_index)].z = m_invalidDepthValue;
1008
1009 // For out of bounds color data, default to a shade of blue in order to
1010 // visually distinguish holes. This color value is same as the librealsense
1011 // out of bounds color value.
1012#if (VISP_HAVE_PCL_VERSION < 0x010100) // 1.1.0
1013 unsigned int r = 96, g = 157, b = 198;
1014 uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1015
1016 pointcloud->points[static_cast<size_t>(depth_pixel_index)].rgb = *reinterpret_cast<float *>(&rgb);
1017#else
1018 pointcloud->points[static_cast<size_t>(depth_pixel_index)].r = (uint8_t)96;
1019 pointcloud->points[static_cast<size_t>(depth_pixel_index)].g = (uint8_t)157;
1020 pointcloud->points[static_cast<size_t>(depth_pixel_index)].b = (uint8_t)198;
1021#endif
1022 continue;
1023 }
1024
1025 // Get the depth value of the current pixel
1026 auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
1027
1028 float depth_point[3];
1029 const float pixel[] = { static_cast<float>(j), static_cast<float>(i) };
1030 rs2_deproject_pixel_to_point(depth_point, &depth_intrinsics, pixel, pixels_distance);
1031
1032 if (pixels_distance > m_max_Z) {
1033 depth_point[0] = depth_point[1] = depth_point[2] = m_invalidDepthValue;
1034 }
1035
1036 pointcloud->points[static_cast<size_t>(depth_pixel_index)].x = depth_point[0];
1037 pointcloud->points[static_cast<size_t>(depth_pixel_index)].y = depth_point[1];
1038 pointcloud->points[static_cast<size_t>(depth_pixel_index)].z = depth_point[2];
1039
1040 if (!registered_streams) {
1041 float color_point[3];
1042 rs2_transform_point_to_point(color_point, &depth2ColorExtrinsics, depth_point);
1043 float color_pixel[2];
1044 rs2_project_point_to_pixel(color_pixel, &color_intrinsics, color_point);
1045
1046 if (color_pixel[1] < 0 || color_pixel[1] >= color_height || color_pixel[0] < 0 ||
1047 color_pixel[0] >= color_width) {
1048 // For out of bounds color data, default to a shade of blue in order to
1049 // visually distinguish holes. This color value is same as the librealsense
1050 // out of bounds color value.
1051#if (VISP_HAVE_PCL_VERSION < 0x010100) // 1.1.0
1052 unsigned int r = 96, g = 157, b = 198;
1053 uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1054
1055 pointcloud->points[static_cast<size_t>(depth_pixel_index)].rgb = *reinterpret_cast<float *>(&rgb);
1056#else
1057 pointcloud->points[static_cast<size_t>(depth_pixel_index)].r = (uint8_t)96;
1058 pointcloud->points[static_cast<size_t>(depth_pixel_index)].g = (uint8_t)157;
1059 pointcloud->points[static_cast<size_t>(depth_pixel_index)].b = (uint8_t)198;
1060#endif
1061 }
1062 else {
1063 unsigned int i_ = static_cast<unsigned int>(color_pixel[1]);
1064 unsigned int j_ = static_cast<unsigned int>(color_pixel[0]);
1065
1066#if (VISP_HAVE_PCL_VERSION < 0x010100) // 1.1.0
1067 uint32_t rgb = 0;
1068 if (swap_rb) {
1069 rgb =
1070 (static_cast<uint32_t>(p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel]) |
1071 static_cast<uint32_t>(p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 1]) << 8 |
1072 static_cast<uint32_t>(p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 2])
1073 << 16);
1074 }
1075 else {
1076 rgb =
1077 (static_cast<uint32_t>(p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel]) << 16 |
1078 static_cast<uint32_t>(p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 1]) << 8 |
1079 static_cast<uint32_t>(p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 2]));
1080 }
1081
1082 pointcloud->points[static_cast<size_t>(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1083#else
1084 if (swap_rb) {
1085 pointcloud->points[static_cast<size_t>(depth_pixel_index)].b =
1086 p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel];
1087 pointcloud->points[static_cast<size_t>(depth_pixel_index)].g =
1088 p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 1];
1089 pointcloud->points[static_cast<size_t>(depth_pixel_index)].r =
1090 p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 2];
1091 }
1092 else {
1093 pointcloud->points[static_cast<size_t>(depth_pixel_index)].r =
1094 p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel];
1095 pointcloud->points[static_cast<size_t>(depth_pixel_index)].g =
1096 p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 1];
1097 pointcloud->points[static_cast<size_t>(depth_pixel_index)].b =
1098 p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 2];
1099 }
1100#endif
1101 }
1102 }
1103 else {
1104#if (VISP_HAVE_PCL_VERSION < 0x010100) // 1.1.0
1105 uint32_t rgb = 0;
1106 if (swap_rb) {
1107 rgb = (static_cast<uint32_t>(p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel]) |
1108 static_cast<uint32_t>(p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 1]) << 8 |
1109 static_cast<uint32_t>(p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 2]) << 16);
1110 }
1111 else {
1112 rgb = (static_cast<uint32_t>(p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel]) << 16 |
1113 static_cast<uint32_t>(p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 1]) << 8 |
1114 static_cast<uint32_t>(p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 2]));
1115 }
1116
1117 pointcloud->points[static_cast<size_t>(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1118#else
1119 if (swap_rb) {
1120 pointcloud->points[static_cast<size_t>(depth_pixel_index)].b =
1121 p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel];
1122 pointcloud->points[static_cast<size_t>(depth_pixel_index)].g =
1123 p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 1];
1124 pointcloud->points[static_cast<size_t>(depth_pixel_index)].r =
1125 p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 2];
1126 }
1127 else {
1128 pointcloud->points[static_cast<size_t>(depth_pixel_index)].r =
1129 p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel];
1130 pointcloud->points[static_cast<size_t>(depth_pixel_index)].g =
1131 p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 1];
1132 pointcloud->points[static_cast<size_t>(depth_pixel_index)].b =
1133 p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 2];
1134 }
1135#endif
1136 }
1137 }
1138 }
1139}
1140
1141#endif
1142
1151vpHomogeneousMatrix vpRealSense2::getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index) const
1152{
1153 int to_index = -1;
1154
1155 if (from_index != -1) // If we have to specify indices for streams. (Ex.: T265 device having 2 fisheyes)
1156 {
1157 if (from_index == 1) // From left => To right.
1158 to_index = 2;
1159 else if (from_index == 2) // From right => To left.
1160 to_index = 1;
1161 }
1162
1163 auto from_stream = m_pipelineProfile.get_stream(from, from_index);
1164 auto to_stream = m_pipelineProfile.get_stream(to, to_index);
1165
1166 rs2_extrinsics extrinsics = from_stream.get_extrinsics_to(to_stream);
1167
1170 for (unsigned int i = 0; i < 3; i++) {
1171 t[i] = extrinsics.translation[i];
1172 for (unsigned int j = 0; j < 3; j++)
1173 R[i][j] = extrinsics.rotation[j * 3 + i]; // rotation is column-major order
1174 }
1175
1176 vpHomogeneousMatrix to_M_from(t, R);
1177 return to_M_from;
1178}
1179
1180#if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1191 double *ts)
1192{
1193 auto frame = m_pipe.wait_for_frames();
1194 auto f = frame.first_or_default(RS2_STREAM_POSE);
1195 auto pose_data = f.as<rs2::pose_frame>().get_pose_data();
1196
1197 if (ts != nullptr)
1198 *ts = frame.get_timestamp();
1199
1200 if (cMw != nullptr) {
1201 m_pos[0] = static_cast<double>(pose_data.translation.x);
1202 m_pos[1] = static_cast<double>(pose_data.translation.y);
1203 m_pos[2] = static_cast<double>(pose_data.translation.z);
1204
1205 m_quat[0] = static_cast<double>(pose_data.rotation.x);
1206 m_quat[1] = static_cast<double>(pose_data.rotation.y);
1207 m_quat[2] = static_cast<double>(pose_data.rotation.z);
1208 m_quat[3] = static_cast<double>(pose_data.rotation.w);
1209
1211 }
1212
1213 if (odo_vel != nullptr) {
1214 odo_vel->resize(6, false);
1215 (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
1216 (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
1217 (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
1218 (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
1219 (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
1220 (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
1221 }
1222
1223 if (odo_acc != nullptr) {
1224 odo_acc->resize(6, false);
1225 (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
1226 (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
1227 (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
1228 (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
1229 (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
1230 (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
1231 }
1232
1233 return pose_data.tracker_confidence;
1234}
1235
1257{
1258 auto frame = m_pipe.wait_for_frames();
1259 auto f = frame.first_or_default(RS2_STREAM_ACCEL);
1260 auto imu_acc_data = f.as<rs2::motion_frame>().get_motion_data();
1261
1262 if (ts != nullptr)
1263 *ts = f.get_timestamp();
1264
1265 if (imu_acc != nullptr) {
1266 imu_acc->resize(3, false);
1267 (*imu_acc)[0] = static_cast<double>(imu_acc_data.x);
1268 (*imu_acc)[1] = static_cast<double>(imu_acc_data.y);
1269 (*imu_acc)[2] = static_cast<double>(imu_acc_data.z);
1270 }
1271}
1272
1294{
1295 auto frame = m_pipe.wait_for_frames();
1296 auto f = frame.first_or_default(RS2_STREAM_GYRO);
1297 auto imu_vel_data = f.as<rs2::motion_frame>().get_motion_data();
1298
1299 if (ts != nullptr)
1300 *ts = f.get_timestamp();
1301
1302 if (imu_vel != nullptr) {
1303 imu_vel->resize(3, false);
1304 (*imu_vel)[0] = static_cast<double>(imu_vel_data.x);
1305 (*imu_vel)[1] = static_cast<double>(imu_vel_data.x);
1306 (*imu_vel)[2] = static_cast<double>(imu_vel_data.x);
1307 }
1308}
1309
1330void vpRealSense2::getIMUData(vpColVector *imu_acc, vpColVector *imu_vel, double *ts)
1331{
1332 auto data = m_pipe.wait_for_frames();
1333
1334 if (ts != nullptr)
1335 *ts = data.get_timestamp();
1336
1337 if (imu_acc != nullptr) {
1338 auto acc_data = data.first_or_default(RS2_STREAM_ACCEL);
1339 auto imu_acc_data = acc_data.as<rs2::motion_frame>().get_motion_data();
1340
1341 imu_acc->resize(3, false);
1342 (*imu_acc)[0] = static_cast<double>(imu_acc_data.x);
1343 (*imu_acc)[1] = static_cast<double>(imu_acc_data.y);
1344 (*imu_acc)[2] = static_cast<double>(imu_acc_data.z);
1345 }
1346
1347 if (imu_vel != nullptr) {
1348 auto vel_data = data.first_or_default(RS2_STREAM_GYRO);
1349 auto imu_vel_data = vel_data.as<rs2::motion_frame>().get_motion_data();
1350
1351 imu_vel->resize(3, false);
1352 (*imu_vel)[0] = static_cast<double>(imu_vel_data.x);
1353 (*imu_vel)[1] = static_cast<double>(imu_vel_data.y);
1354 (*imu_vel)[2] = static_cast<double>(imu_vel_data.z);
1355 }
1356}
1357#endif // #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1358
1362bool vpRealSense2::open(const rs2::config &cfg)
1363{
1364 if (m_init) {
1365 close();
1366 }
1367
1368 m_pipelineProfile = m_pipe.start(cfg);
1369
1370 rs2::device dev = m_pipelineProfile.get_device();
1371
1372#if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1373 // Query device product line D400/SR300/L500/T200
1374 m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1375#endif
1376
1377 // Go over the device's sensors
1378 for (rs2::sensor &sensor : dev.query_sensors()) {
1379 // Check if the sensor is a depth sensor
1380 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1381 m_depthScale = dpt.get_depth_scale();
1382 }
1383 }
1384
1385 m_init = true;
1386 return m_init;
1387}
1388
1395bool vpRealSense2::open(const rs2::config &cfg, std::function<void(rs2::frame)> &callback)
1396{
1397 if (m_init) {
1398 close();
1399 }
1400
1401 m_pipelineProfile = m_pipe.start(cfg, callback);
1402
1403 rs2::device dev = m_pipelineProfile.get_device();
1404
1405#if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1406 // Query device product line D400/SR300/L500/T200
1407 m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1408#endif
1409
1410 // Go over the device's sensors
1411 for (rs2::sensor &sensor : dev.query_sensors()) {
1412 // Check if the sensor is a depth sensor
1413 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1414 m_depthScale = dpt.get_depth_scale();
1415 }
1416 }
1417
1418 m_init = true;
1419 return m_init;
1420}
1421
1427{
1428#if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1429 // With previous code, example/device/framegrabber/grabRealSense2.cpp does not work with D455
1430 // Error: Frame didn't arrive within 15000
1431 // Following code from:
1432 // https://github.com/IntelRealSense/librealsense/blob/4673a37d981164af8eeb8e296e430fc1427e008d/unit-tests/live/memory/test-extrinsics.cpp#L119
1433
1434 // Reset product line info
1435 m_product_line = "unknown";
1436
1437 rs2::context ctx;
1438 auto list = ctx.query_devices();
1439 if (list.size() > 0) {
1440 // Only one plugged sensor supported
1441 auto dev = list.front();
1442 auto sensors = dev.query_sensors();
1443 if (dev.supports(RS2_CAMERA_INFO_PRODUCT_LINE)) {
1444 m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1445 }
1446 }
1447
1448 return m_product_line;
1449#else
1450 return (std::string("unknown"));
1451#endif
1452}
1453
1464{
1465 std::ostringstream oss;
1466 oss << *this;
1467 return oss.str();
1468}
1469
1470namespace
1471{
1472// Helper functions to print information about the RealSense device
1473void print(const rs2_extrinsics &extrinsics, std::ostream &os)
1474{
1475 std::stringstream ss;
1476 ss << "Rotation Matrix:\n";
1477
1478 for (auto i = 0; i < 3; ++i) {
1479 for (auto j = 0; j < 3; ++j) {
1480 ss << std::left << std::setw(15) << std::setprecision(5) << extrinsics.rotation[j * 3 + i];
1481 }
1482 ss << std::endl;
1483 }
1484
1485 ss << "\nTranslation Vector: ";
1486 for (size_t i = 0; i < sizeof(extrinsics.translation) / sizeof(extrinsics.translation[0]); ++i)
1487 ss << std::setprecision(15) << extrinsics.translation[i] << " ";
1488
1489 os << ss.str() << "\n\n";
1490}
1491
1492void print(const rs2_intrinsics &intrinsics, std::ostream &os)
1493{
1494 std::stringstream ss;
1495 ss << std::left << std::setw(14) << "Width: "
1496 << "\t" << intrinsics.width << "\n"
1497 << std::left << std::setw(14) << "Height: "
1498 << "\t" << intrinsics.height << "\n"
1499 << std::left << std::setw(14) << "PPX: "
1500 << "\t" << std::setprecision(15) << intrinsics.ppx << "\n"
1501 << std::left << std::setw(14) << "PPY: "
1502 << "\t" << std::setprecision(15) << intrinsics.ppy << "\n"
1503 << std::left << std::setw(14) << "Fx: "
1504 << "\t" << std::setprecision(15) << intrinsics.fx << "\n"
1505 << std::left << std::setw(14) << "Fy: "
1506 << "\t" << std::setprecision(15) << intrinsics.fy << "\n"
1507 << std::left << std::setw(14) << "Distortion: "
1508 << "\t" << rs2_distortion_to_string(intrinsics.model) << "\n"
1509 << std::left << std::setw(14) << "Coeffs: ";
1510
1511 for (size_t i = 0; i < sizeof(intrinsics.coeffs) / sizeof(intrinsics.coeffs[0]); ++i)
1512 ss << "\t" << std::setprecision(15) << intrinsics.coeffs[i] << " ";
1513
1514 os << ss.str() << "\n\n";
1515}
1516
1517void safe_get_intrinsics(const rs2::video_stream_profile &profile, rs2_intrinsics &intrinsics)
1518{
1519 try {
1520 intrinsics = profile.get_intrinsics();
1521 }
1522 catch (...) {
1523 }
1524}
1525
1526bool operator==(const rs2_intrinsics &lhs, const rs2_intrinsics &rhs)
1527{
1528 return lhs.width == rhs.width && lhs.height == rhs.height && lhs.ppx == rhs.ppx && lhs.ppy == rhs.ppy &&
1529 lhs.fx == rhs.fx && lhs.fy == rhs.fy && lhs.model == rhs.model &&
1530 !std::memcmp(lhs.coeffs, rhs.coeffs, sizeof(rhs.coeffs));
1531}
1532
1533std::string get_str_formats(const std::set<rs2_format> &formats)
1534{
1535 std::stringstream ss;
1536 for (auto format = formats.begin(); format != formats.end(); ++format) {
1537 ss << *format << ((format != formats.end()) && (next(format) == formats.end()) ? "" : "/");
1538 }
1539 return ss.str();
1540}
1541
1542struct stream_and_resolution
1543{
1544 rs2_stream stream;
1545 int stream_index;
1546 int width;
1547 int height;
1548 std::string stream_name;
1549
1550 bool operator<(const stream_and_resolution &obj) const
1551 {
1552 return (std::make_tuple(stream, stream_index, width, height) <
1553 std::make_tuple(obj.stream, obj.stream_index, obj.width, obj.height));
1554 }
1555};
1556
1557struct stream_and_index
1558{
1559 rs2_stream stream;
1560 int stream_index;
1561
1562 bool operator<(const stream_and_index &obj) const
1563 {
1564 return (std::make_tuple(stream, stream_index) < std::make_tuple(obj.stream, obj.stream_index));
1565 }
1566};
1567} // anonymous namespace
1568
1593std::ostream &operator<<(std::ostream &os, const vpRealSense2 &rs)
1594{
1595 rs2::device dev = rs.m_pipelineProfile.get_device();
1596 os << std::left << std::setw(30) << dev.get_info(RS2_CAMERA_INFO_NAME) << std::setw(20)
1597 << dev.get_info(RS2_CAMERA_INFO_SERIAL_NUMBER) << std::setw(20) << dev.get_info(RS2_CAMERA_INFO_FIRMWARE_VERSION)
1598 << std::endl;
1599
1600 // Show which options are supported by this device
1601 os << " Device info: \n";
1602 for (auto j = 0; j < RS2_CAMERA_INFO_COUNT; ++j) {
1603 auto param = static_cast<rs2_camera_info>(j);
1604 if (dev.supports(param))
1605 os << " " << std::left << std::setw(30) << rs2_camera_info_to_string(rs2_camera_info(param)) << ": \t"
1606 << dev.get_info(param) << "\n";
1607 }
1608
1609 os << "\n";
1610
1611 for (auto &&sensor : dev.query_sensors()) {
1612 os << "Options for " << sensor.get_info(RS2_CAMERA_INFO_NAME) << std::endl;
1613
1614 os << std::setw(55) << " Supported options:" << std::setw(10) << "min" << std::setw(10) << " max" << std::setw(6)
1615 << " step" << std::setw(10) << " default" << std::endl;
1616 for (auto j = 0; j < RS2_OPTION_COUNT; ++j) {
1617 auto opt = static_cast<rs2_option>(j);
1618 if (sensor.supports(opt)) {
1619 auto range = sensor.get_option_range(opt);
1620 os << " " << std::left << std::setw(50) << opt << " : " << std::setw(5) << range.min << "... "
1621 << std::setw(12) << range.max << std::setw(6) << range.step << std::setw(10) << range.def << "\n";
1622 }
1623 }
1624
1625 os << "\n";
1626 }
1627
1628 for (auto &&sensor : dev.query_sensors()) {
1629 os << "Stream Profiles supported by " << sensor.get_info(RS2_CAMERA_INFO_NAME) << "\n";
1630
1631 os << std::setw(55) << " Supported modes:" << std::setw(10) << "stream" << std::setw(10) << " resolution"
1632 << std::setw(6) << " fps" << std::setw(10) << " format"
1633 << "\n";
1634 // Show which streams are supported by this device
1635 for (auto &&profile : sensor.get_stream_profiles()) {
1636 if (auto video = profile.as<rs2::video_stream_profile>()) {
1637 os << " " << profile.stream_name() << "\t " << video.width() << "x" << video.height() << "\t@ "
1638 << profile.fps() << "Hz\t" << profile.format() << "\n";
1639 }
1640 else {
1641 os << " " << profile.stream_name() << "\t@ " << profile.fps() << "Hz\t" << profile.format() << "\n";
1642 }
1643 }
1644
1645 os << "\n";
1646 }
1647
1648 std::map<stream_and_index, rs2::stream_profile> streams;
1649 std::map<stream_and_resolution, std::vector<std::pair<std::set<rs2_format>, rs2_intrinsics> > > intrinsics_map;
1650 for (auto &&sensor : dev.query_sensors()) {
1651 // Intrinsics
1652 for (auto &&profile : sensor.get_stream_profiles()) {
1653 if (auto video = profile.as<rs2::video_stream_profile>()) {
1654 if (streams.find(stream_and_index { profile.stream_type(), profile.stream_index() }) == streams.end()) {
1655 streams[stream_and_index { profile.stream_type(), profile.stream_index() }] = profile;
1656 }
1657
1658 rs2_intrinsics intrinsics {};
1659 stream_and_resolution stream_res { profile.stream_type(), profile.stream_index(), video.width(), video.height(),
1660 profile.stream_name() };
1661 safe_get_intrinsics(video, intrinsics);
1662 auto it = std::find_if(
1663 (intrinsics_map[stream_res]).begin(), (intrinsics_map[stream_res]).end(),
1664 [&](const std::pair<std::set<rs2_format>, rs2_intrinsics> &kvp) { return intrinsics == kvp.second; });
1665 if (it == (intrinsics_map[stream_res]).end()) {
1666 (intrinsics_map[stream_res]).push_back({ {profile.format()}, intrinsics });
1667 }
1668 else {
1669 it->first.insert(profile.format()); // If the intrinsics are equals,
1670 // add the profile format to
1671 // format set
1672 }
1673 }
1674 }
1675 }
1676
1677 os << "Provided Intrinsic:\n";
1678 for (auto &kvp : intrinsics_map) {
1679 auto stream_res = kvp.first;
1680 for (auto &intrinsics : kvp.second) {
1681 auto formats = get_str_formats(intrinsics.first);
1682 os << "Intrinsic of \"" << stream_res.stream_name << "\"\t " << stream_res.width << "x" << stream_res.height
1683 << "\t " << formats << "\n";
1684 if (intrinsics.second == rs2_intrinsics {}) {
1685 os << "Intrinsic NOT available!\n\n";
1686 }
1687 else {
1688 print(intrinsics.second, os);
1689 }
1690 }
1691 }
1692
1693 // Print Extrinsics
1694 os << "\nProvided Extrinsic:\n";
1695 for (auto kvp1 = streams.begin(); kvp1 != streams.end(); ++kvp1) {
1696 for (auto kvp2 = streams.begin(); kvp2 != streams.end(); ++kvp2) {
1697 os << "Extrinsic from \"" << kvp1->second.stream_name() << "\"\t "
1698 << "To"
1699 << "\t \"" << kvp2->second.stream_name() << "\"\n";
1700 auto extrinsics = kvp1->second.get_extrinsics_to(kvp2->second);
1701 print(extrinsics, os);
1702 }
1703 }
1704
1705 return os;
1706}
1707END_VISP_NAMESPACE
1708#elif !defined(VISP_BUILD_SHARED_LIBS)
1709// Work around to avoid warning: libvisp_sensor.a(vpRealSense2.cpp.o) has symbols
1710void dummy_vpRealSense2() { }
1711#endif
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
@ ProjWithKannalaBrandtDistortion
Projection with Kannala-Brandt distortion model.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
void resize(unsigned int i, bool flagNullify=true)
error that can be emitted by ViSP classes.
Definition vpException.h:60
@ fatalError
Fatal error.
Definition vpException.h:72
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void RGBToGrey(unsigned char *rgb, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false)
static void RGBaToGrey(unsigned char *rgba, unsigned char *grey, unsigned int width, unsigned int height, unsigned int nThreads=0)
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
static void BGRToGrey(unsigned char *bgr, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false, unsigned int nThreads=0)
static void BGRToRGBa(unsigned char *bgr, unsigned char *rgba, unsigned int width, unsigned int height, bool flip=false)
Definition of the vpImage class member functions.
Definition vpImage.h:131
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
Definition vpImage.h:544
Type * bitmap
points toward the bitmap
Definition vpImage.h:135
void getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
std::string getSensorInfo()
virtual ~vpRealSense2()
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
vpQuaternionVector m_quat
rs2::pipeline m_pipe
bool open(const rs2::config &cfg=rs2::config())
rs2_intrinsics getIntrinsics(const rs2_stream &stream, int index=-1) const
vpRotationMatrix m_rot
float getDepthScale()
rs2::pointcloud m_pointcloud
void getIMUAcceleration(vpColVector *imu_acc, double *ts)
void getPointcloud(const rs2::depth_frame &depth_frame, std::vector< vpColVector > &pointcloud)
void getColorFrame(const rs2::frame &frame, vpImage< vpRGBa > &color)
void getIMUVelocity(vpColVector *imu_vel, double *ts)
friend VISP_EXPORT std::ostream & operator<<(std::ostream &os, const vpRealSense2 &rs)
vpTranslationVector m_pos
std::string m_product_line
std::string getProductLine()
unsigned int getOdometryData(vpHomogeneousMatrix *cMw, vpColVector *odo_vel, vpColVector *odo_acc, double *ts=nullptr)
void getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts)
vpHomogeneousMatrix getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index=-1) const
void getGreyFrame(const rs2::frame &frame, vpImage< unsigned char > &grey)
rs2::points m_points
float m_invalidDepthValue
rs2::pipeline_profile m_pipelineProfile
Implementation of a rotation matrix and operations on such kind of matrices.
Class that consider the case of a translation vector.