Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
servoPixhawkDroneIBVS.cpp
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2025 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 *
30 * Description:
31 * Example that shows how to do visual servoing with a drone equipped with a Pixhawk.
32 */
33
34#include <iostream>
35
36#include <visp3/core/vpConfig.h>
37
38// Check if std:c++17 or higher
39#if defined(VISP_HAVE_MAVSDK) && ((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L))) && \
40 defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_PUGIXML)
41
42#include <visp3/core/vpImageConvert.h>
43#include <visp3/core/vpMomentAreaNormalized.h>
44#include <visp3/core/vpMomentBasic.h>
45#include <visp3/core/vpMomentCentered.h>
46#include <visp3/core/vpMomentDatabase.h>
47#include <visp3/core/vpMomentGravityCenter.h>
48#include <visp3/core/vpMomentGravityCenterNormalized.h>
49#include <visp3/core/vpMomentObject.h>
50#include <visp3/core/vpPixelMeterConversion.h>
51#include <visp3/core/vpPoint.h>
52#include <visp3/core/vpTime.h>
53#include <visp3/core/vpXmlParserCamera.h>
54#include <visp3/detection/vpDetectorAprilTag.h>
55#include <visp3/gui/vpDisplayFactory.h>
56#include <visp3/gui/vpPlot.h>
57#include <visp3/robot/vpRobotMavsdk.h>
58#include <visp3/sensor/vpRealSense2.h>
59#include <visp3/visual_features/vpFeatureBuilder.h>
60#include <visp3/visual_features/vpFeatureMomentAreaNormalized.h>
61#include <visp3/visual_features/vpFeatureMomentGravityCenterNormalized.h>
62#include <visp3/visual_features/vpFeatureVanishingPoint.h>
63#include <visp3/vs/vpServo.h>
64#include <visp3/vs/vpServoDisplay.h>
65
66// Comment next line to disable sending commands to the robot
67#define CONTROL_UAV
68
69#ifdef ENABLE_VISP_NAMESPACE
70using namespace VISP_NAMESPACE_NAME;
71#endif
72
73bool compareImagePoint(std::pair<size_t, vpImagePoint> p1, std::pair<size_t, vpImagePoint> p2)
74{
75 return (p1.second.get_v() < p2.second.get_v());
76};
77
94int main(int argc, char **argv)
95{
96#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
97 std::shared_ptr<vpDisplay> display;
98#else
99 vpDisplay *display = nullptr;
100#endif
101 try {
102 std::string opt_connecting_info = "udp://:192.168.30.111:14552";
103 double tagSize = -1;
104 double opt_distance_to_tag = -1;
105 bool opt_has_distance_to_tag = false;
106 int opt_display_fps = 10;
107 bool opt_verbose = false;
108
109 int acq_fps = 30;
110
111 if (argc >= 3 && std::string(argv[1]) == "--tag-size") {
112 tagSize = std::atof(argv[2]); // Tag size option is required
113 if (tagSize <= 0) {
114 std::cout << "Error : invalid tag size." << std::endl << "See " << argv[0] << " --help" << std::endl;
115 return EXIT_FAILURE;
116 }
117 for (int i = 3; i < argc; i++) {
118 if (std::string(argv[i]) == "--co" && i + 1 < argc) {
119 opt_connecting_info = std::string(argv[i + 1]);
120 i++;
121 }
122 else if (std::string(argv[i]) == "--distance-to-tag" && i + 1 < argc) {
123 opt_distance_to_tag = std::atof(argv[i + 1]);
124 if (opt_distance_to_tag <= 0) {
125 std::cout << "Error : invalid distance to tag." << std::endl << "See " << argv[0] << " --help" << std::endl;
126 return EXIT_FAILURE;
127 }
128 opt_has_distance_to_tag = true;
129 i++;
130
131 }
132 else if (std::string(argv[i]) == "--display-fps" && i + 1 < argc) {
133 opt_display_fps = std::stoi(std::string(argv[i + 1]));
134 i++;
135 }
136 else if (std::string(argv[i]) == "--verbose" || std::string(argv[i]) == "-v") {
137 opt_verbose = true;
138 }
139 else {
140 std::cout << "Error : unknown parameter " << argv[i] << std::endl
141 << "See " << argv[0] << " --help" << std::endl;
142 return EXIT_FAILURE;
143 }
144 }
145 }
146 else if (argc >= 2 && (std::string(argv[1]) == "--help" || std::string(argv[1]) == "-h")) {
147 std::cout << "\nUsage:\n"
148 << " " << argv[0]
149 << " [--tag-size <tag size [m]>] [--co <connection information>] [--distance-to-tag <distance>]"
150 << " [--display-fps <display fps>] [--verbose] [-v] [--help] [-h]\n"
151 << std::endl
152 << "Description:\n"
153 << " --tag-size <size>\n"
154 << " The size of the tag to detect in meters, required.\n\n"
155 << " --co <connection information>\n"
156 << " - UDP: udp://[host][:port]\n"
157 << " - TCP: tcp://[host][:port]\n"
158 << " - serial: serial://[path][:baudrate]\n"
159 << " - Default: udp://192.168.30.111:14552).\n\n"
160 << " --distance-to-tag <distance>\n"
161 << " The desired distance to the tag in meters (default: 1 meter).\n\n"
162 << " --display-fps <display_fps>\n"
163 << " The desired fps rate for the video display (default: 10 fps).\n\n"
164 << " --verbose, -v\n"
165 << " Enables verbosity (drone information messages and velocity commands\n"
166 << " are then displayed).\n\n"
167 << " --help, -h\n"
168 << " Print help message.\n"
169 << std::endl;
170 return EXIT_SUCCESS;
171
172 }
173 else {
174 std::cout << "Error : tag size parameter required." << std::endl << "See " << argv[0] << " --help" << std::endl;
175 return EXIT_FAILURE;
176 }
177
178 std::cout << std::endl
179 << "WARNING:" << std::endl
180 << " - This program does no sensing or avoiding of obstacles, " << std::endl
181 << " the drone WILL collide with any objects in the way! Make sure the " << std::endl
182 << " drone has approximately 3 meters of free space on all sides." << std::endl
183 << " - The drone uses a forward-facing camera for Apriltag detection," << std::endl
184 << " make sure the drone flies above a non-uniform flooring," << std::endl
185 << " or its movement will be inacurate and dangerous !" << std::endl
186 << std::endl;
187
188// Connect to the drone
189 vpRobotMavsdk drone(opt_connecting_info);
190
191 if (drone.isRunning()) {
192 vpRealSense2 rs;
193
194 std::string product_line = rs.getProductLine();
195 if (opt_verbose) {
196 std::cout << "Product line: " << product_line << std::endl;
197 }
198
199 if (product_line == "T200") {
200 std::cout << "This example doesn't support T200 product line family !" << std::endl;
201 return EXIT_SUCCESS;
202 }
203 rs2::config config;
204
205 config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, acq_fps);
206 config.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, acq_fps);
207 config.enable_stream(RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, acq_fps);
208
209 rs.open(config);
210 vpCameraParameters cam = rs.getCameraParameters(RS2_STREAM_COLOR);
211
212 if (opt_verbose) {
213 cam.printParameters();
214 }
215
216#ifdef CONTROL_UAV
217 drone.doFlatTrim(); // Flat trim calibration
218 drone.takeOff(); // Take off
219#endif
220
221 vpImage<unsigned char> I(rs.getIntrinsics(RS2_STREAM_COLOR).height, rs.getIntrinsics(RS2_STREAM_COLOR).width);
222
223 int orig_displayX = 100;
224 int orig_displayY = 100;
225#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
226 display = vpDisplayFactory::createDisplay(I, orig_displayX, orig_displayY, "DRONE VIEW");
227#else
228 display = vpDisplayFactory::allocateDisplay(I, orig_displayX, orig_displayY, "DRONE VIEW");
229#endif
232 double time_since_last_display = vpTime::measureTimeMs();
233
234 vpPlot plotter(1, 700, 700, orig_displayX + static_cast<int>(I.getWidth()) + 20, orig_displayY,
235 "Visual servoing tasks");
236 unsigned int iter = 0;
237
240 vpDetectorAprilTag detector(tagFamily); // The detector used to detect Apritags
242 detector.setAprilTagQuadDecimate(4.0);
243 detector.setAprilTagNbThreads(4);
244 detector.setDisplayTag(true);
245
246 vpServo task; // Visual servoing task
247
248 // double lambda = 0.5;
249 vpAdaptiveGain lambda = vpAdaptiveGain(1.5, 0.7, 30);
251 task.setInteractionMatrixType(vpServo::CURRENT);
252 task.setLambda(lambda);
253
255 /*
256 * In the following section, (c1) is an intermediate frame attached to the camera
257 * that has axis aligned with the FLU body frame. The real camera frame is denoted (c).
258 * The FLU body-frame of the drone denoted (e) is the one in which we need to convert
259 * every velocity command computed by visual servoing.
260 *
261 * We can easily estimate the homogeneous matrix between (c1) and (c) where
262 * in our case we have -10 degrees around X camera axis.
263 * Then for the transformation between (e) and (c1) frames we can consider only translations.
264 *
265 * Using those matrices, we can in the end obtain the homogeneous matrix between (c) and (e) frames.
266 * This homogeneous matrix is then used to compute the velocity twist matrix cVe.
267 */
268 vpRxyzVector c1_rxyz_c(vpMath::rad(-10.0), vpMath::rad(0), 0);
269 vpRotationMatrix c1Rc(c1_rxyz_c); // Rotation between (c1) and (c)
270 vpHomogeneousMatrix c1Mc(vpTranslationVector(), c1Rc); // Homogeneous matrix between (c1) and (c)
271
272 vpRotationMatrix c1Re { 1, 0, 0, 0, 0, 1, 0, -1, 0 }; // Rotation between (c1) and (e)
273 vpTranslationVector c1te(0, -0.03, -0.07); // Translation between (c1) and (e)
274 vpHomogeneousMatrix c1Me(c1te, c1Re); // Homogeneous matrix between (c1) and (e)
275
276 vpHomogeneousMatrix cMe = c1Mc.inverse() * c1Me; // Homogeneous matrix between (c) and (e)
277
278 vpVelocityTwistMatrix cVe(cMe);
280 task.set_cVe(cVe);
281
282 vpMatrix eJe(6, 4, 0);
283
284 eJe[0][0] = 1;
285 eJe[1][1] = 1;
286 eJe[2][2] = 1;
287 eJe[5][3] = 1;
288
289 // double Z_d = 1.; // Desired distance to the target
290 double Z_d = (opt_has_distance_to_tag ? opt_distance_to_tag : 1.);
291
292 // Define the desired polygon corresponding the the AprilTag CLOCKWISE
293 double X[4] = { tagSize / 2., tagSize / 2., -tagSize / 2., -tagSize / 2. };
294 double Y[4] = { tagSize / 2., -tagSize / 2., -tagSize / 2., tagSize / 2. };
295 std::vector<vpPoint> vec_P, vec_P_d;
296
297 for (int i = 0; i < 4; i++) {
298 vpPoint P_d(X[i], Y[i], 0);
299 vpHomogeneousMatrix cdMo(0, 0, Z_d, 0, 0, 0);
300 P_d.track(cdMo); //
301 vec_P_d.push_back(P_d);
302 }
303 vpMomentObject m_obj(3), m_obj_d(3);
304 vpMomentDatabase mdb, mdb_d;
305 vpMomentBasic mb_d; // Here only to get the desired area m00
306 vpMomentGravityCenter mg, mg_d;
307 vpMomentCentered mc, mc_d;
308 vpMomentAreaNormalized man(0, Z_d), man_d(0, Z_d); // Declare normalized area updated below with m00
309 vpMomentGravityCenterNormalized mgn, mgn_d; // Declare normalized gravity center
310
311 // Desired moments
312 m_obj_d.setType(vpMomentObject::DENSE_POLYGON); // Consider the AprilTag as a polygon
313 m_obj_d.fromVector(vec_P_d); // Initialize the object with the points coordinates
314
315 mb_d.linkTo(mdb_d); // Add basic moments to database
316 mg_d.linkTo(mdb_d); // Add gravity center to database
317 mc_d.linkTo(mdb_d); // Add centered moments to database
318 man_d.linkTo(mdb_d); // Add area normalized to database
319 mgn_d.linkTo(mdb_d); // Add gravity center normalized to database
320 mdb_d.updateAll(m_obj_d); // All of the moments must be updated, not just an_d
321 mg_d.compute(); // Compute gravity center moment
322 mc_d.compute(); // Compute centered moments AFTER gravity center
323
324 double area = 0;
325 if (m_obj_d.getType() == vpMomentObject::DISCRETE)
326 area = mb_d.get(2, 0) + mb_d.get(0, 2);
327 else
328 area = mb_d.get(0, 0);
329 // Update moment with the desired area
330 man_d.setDesiredArea(area);
331
332 man_d.compute(); // Compute area normalized moment AFTER centered moments
333 mgn_d.compute(); // Compute gravity center normalized moment AFTER area normalized
334 // moment
335
336 // Desired plane
337 double A = 0.0;
338 double B = 0.0;
339 double C = 1.0 / Z_d;
340
341 // Construct area normalized features
342 vpFeatureMomentGravityCenterNormalized s_mgn(mdb, A, B, C), s_mgn_d(mdb_d, A, B, C);
343 vpFeatureMomentAreaNormalized s_man(mdb, A, B, C), s_man_d(mdb_d, A, B, C);
344 vpFeatureVanishingPoint s_vp, s_vp_d;
345
346 // Add the features
347 task.addFeature(s_mgn, s_mgn_d);
348 task.addFeature(s_man, s_man_d);
349 task.addFeature(s_vp, s_vp_d, vpFeatureVanishingPoint::selectAtanOneOverRho());
350
351 plotter.initGraph(0, 4);
352 plotter.setLegend(0, 0, "Xn"); // Distance from center on X axis feature
353 plotter.setLegend(0, 1, "Yn"); // Distance from center on Y axis feature
354 plotter.setLegend(0, 2, "an"); // Tag area feature
355 plotter.setLegend(0, 3, "atan(1/rho)"); // Vanishing point feature
356
357 // Update desired gravity center normalized feature
358 s_mgn_d.update(A, B, C);
359 s_mgn_d.compute_interaction();
360 // Update desired area normalized feature
361 s_man_d.update(A, B, C);
362 s_man_d.compute_interaction();
363
364 // Update desired vanishing point feature for the horizontal line
365 s_vp_d.setAtanOneOverRho(0);
366 s_vp_d.setAlpha(0);
367
368 bool condition;
369 bool runLoop = true;
370 bool vec_ip_has_been_sorted = false;
371 bool send_velocities = false;
372 std::vector<std::pair<size_t, vpImagePoint> > vec_ip_sorted;
373
374 //** Visual servoing loop **//
375 while (drone.isRunning() && runLoop) {
376
377 double startTime = vpTime::measureTimeMs();
378
379 // drone.getGrayscaleImage(I);
380 rs.acquire(I);
381
382 condition = (startTime - time_since_last_display) > 1000. / opt_display_fps ? true : false;
383 if (condition) {
385 time_since_last_display = vpTime::measureTimeMs();
386 }
387
388 std::vector<vpHomogeneousMatrix> cMo_vec;
389 detector.detect(I, tagSize, cam, cMo_vec); // Detect AprilTags in current image
390 double t = vpTime::measureTimeMs() - startTime;
391
392 if (condition) {
393 std::stringstream ss;
394 ss << "Detection time: " << t << " ms";
395 vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
396 }
397
398 if (detector.getNbObjects() != 0) {
399
400 // Update current points used to compute the moments
401 std::vector<vpImagePoint> vec_ip = detector.getPolygon(0);
402 vec_P.clear();
403 for (size_t i = 0; i < vec_ip.size(); i++) { // size = 4
404 double x = 0, y = 0;
405 vpPixelMeterConversion::convertPoint(cam, vec_ip[i], x, y);
406 vpPoint P;
407 P.set_x(x);
408 P.set_y(y);
409 vec_P.push_back(P);
410 }
411
412 // Current moments
413 m_obj.setType(vpMomentObject::DENSE_POLYGON); // Consider the AprilTag as a polygon
414 m_obj.fromVector(vec_P); // Initialize the object with the points coordinates
415
416 mg.linkTo(mdb); // Add gravity center to database
417 mc.linkTo(mdb); // Add centered moments to database
418 man.linkTo(mdb); // Add area normalized to database
419 mgn.linkTo(mdb); // Add gravity center normalized to database
420 mdb.updateAll(m_obj); // All of the moments must be updated, not just an_d
421 mg.compute(); // Compute gravity center moment
422 mc.compute(); // Compute centered moments AFTER gravity center
423 man.setDesiredArea(area); // Desired area was init at 0 (unknow at construction),
424 // need to be updated here
425 man.compute(); // Compute area normalized moment AFTER centered moment
426 mgn.compute(); // Compute gravity center normalized moment AFTER area normalized
427 // moment
428
429 s_mgn.update(A, B, C);
430 s_mgn.compute_interaction();
431 s_man.update(A, B, C);
432 s_man.compute_interaction();
433
434 /* Sort points from their height in the image, and keep original indexes.
435 This is done once, in order to be independent from the orientation of the tag
436 when detecting vanishing points. */
437 if (!vec_ip_has_been_sorted) {
438 for (size_t i = 0; i < vec_ip.size(); i++) {
439
440 // Add the points and their corresponding index
441 std::pair<size_t, vpImagePoint> index_pair = std::pair<size_t, vpImagePoint>(i, vec_ip[i]);
442 vec_ip_sorted.push_back(index_pair);
443 }
444
445 // Sort the points and indexes from the v value of the points
446 std::sort(vec_ip_sorted.begin(), vec_ip_sorted.end(), compareImagePoint);
447
448 vec_ip_has_been_sorted = true;
449 }
450
451 // Use the two highest points for the first line, and the two others for the second
452 // line.
453 vpFeatureBuilder::create(s_vp, cam, vec_ip[vec_ip_sorted[0].first], vec_ip[vec_ip_sorted[1].first],
454 vec_ip[vec_ip_sorted[2].first], vec_ip[vec_ip_sorted[3].first],
456
457 task.set_cVe(cVe);
458 task.set_eJe(eJe);
459
460 // Compute the control law. Velocities are computed in the mobile robot reference
461 // frame
462 vpColVector ve = task.computeControlLaw();
463 if (!send_velocities) {
464 ve = 0;
465 }
466
467 // Sending the control law to the drone
468 if (opt_verbose) {
469 std::cout << "ve: " << ve.t() << std::endl;
470 }
471
472#ifdef CONTROL_UAV
473 drone.setVelocity(ve);
474#endif
475
476 if (condition) {
477 for (size_t i = 0; i < 4; i++) {
478 vpDisplay::displayCross(I, vec_ip[i], 15, vpColor::red, 1);
479 std::stringstream ss;
480 ss << i;
481 vpDisplay::displayText(I, vec_ip[i] + vpImagePoint(15, 15), ss.str(), vpColor::green);
482 }
483
484 // Display visual features
486 3); // Current polygon used to compure an moment
487 vpDisplay::displayCross(I, detector.getCog(0), 15, vpColor::green,
488 3); // Current polygon used to compute a moment
489 vpDisplay::displayLine(I, 0, static_cast<int>(cam.get_u0()), static_cast<int>(I.getHeight()) - 1,
490 static_cast<int>(cam.get_u0()), vpColor::red,
491 3); // Vertical line as desired x position
492 vpDisplay::displayLine(I, static_cast<int>(cam.get_v0()), 0, static_cast<int>(cam.get_v0()),
493 static_cast<int>(I.getWidth()) - 1, vpColor::red,
494 3); // Horizontal line as desired y position
495
496 // Display lines corresponding to the vanishing point for the horizontal lines
497 vpDisplay::displayLine(I, vec_ip[vec_ip_sorted[0].first], vec_ip[vec_ip_sorted[1].first], vpColor::red, 1,
498 false);
499 vpDisplay::displayLine(I, vec_ip[vec_ip_sorted[2].first], vec_ip[vec_ip_sorted[3].first], vpColor::red, 1,
500 false);
501 }
502
503 }
504 else {
505
506 std::stringstream sserr;
507 sserr << "Failed to detect an Apriltag, or detected multiple ones";
508 if (condition) {
509 vpDisplay::displayText(I, 120, 20, sserr.str(), vpColor::red);
511 }
512 else {
513 std::cout << sserr.str() << std::endl;
514 }
515#ifdef CONTROL_UAV
516 drone.stopMoving(); // In this case, we stop the drone
517#endif
518 }
519
520 if (condition) {
521 {
522 std::stringstream ss;
523 ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot")
524 << ", right click to quit.";
525 vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
526 }
528
529 plotter.plot(0, iter, task.getError());
530 }
531
533 if (vpDisplay::getClick(I, button, false)) {
534 switch (button) {
536 send_velocities = !send_velocities;
537 break;
538
540 drone.land();
541 runLoop = false;
542 break;
543
544 default:
545 break;
546 }
547 }
548
549 double totalTime = vpTime::measureTimeMs() - startTime;
550 std::stringstream sstime;
551 sstime << "Total time: " << totalTime << " ms";
552 if (condition) {
553 vpDisplay::displayText(I, 80, 20, sstime.str(), vpColor::red);
555 }
556
557 iter++;
558 vpTime::wait(startTime, 1000. / acq_fps);
559 }
560
561#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
562 if (display != nullptr) {
563 delete display;
564 }
565#endif
566 return EXIT_SUCCESS;
567 }
568 else {
569 std::cout << "ERROR : failed to setup drone control." << std::endl;
570#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
571 if (display != nullptr) {
572 delete display;
573 }
574#endif
575 return EXIT_FAILURE;
576 }
577 }
578 catch (const vpException &e) {
579 std::cout << "Caught an exception: " << e << std::endl;
580#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
581 if (display != nullptr) {
582 delete display;
583 }
584#endif
585 return EXIT_FAILURE;
586 }
587}
588
589#else
590
591int main()
592{
593#ifndef VISP_HAVE_MAVSDK
594 std::cout << "\nThis example requires mavsdk library. You should install it, configure and rebuild ViSP.\n" << std::endl;
595#endif
596#ifndef VISP_HAVE_REALSENSE2
597 std::cout << "\nThis example requires librealsense2 library. You should install it, configure and rebuild ViSP.\n" << std::endl;
598#endif
599#if !defined(VISP_HAVE_PUGIXML)
600 std::cout << "\nThis example requires pugixml built-in 3rdparty." << std::endl;
601#endif
602#if !((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L)))
603 std::cout
604 << "\nThis example requires at least cxx17. You should enable cxx17 during ViSP configuration with cmake and "
605 "rebuild ViSP.\n"
606 << std::endl;
607#endif
608 return EXIT_SUCCESS;
609}
610
611#endif // #if defined(VISP_HAVE_MAVSDK)
Adaptive gain computation.
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
vpRowVector t() const
static const vpColor red
Definition vpColor.h:198
static const vpColor green
Definition vpColor.h:201
@ TAG_36h11
AprilTag 36h11 pattern (recommended).
Class that defines generic functionalities for display.
Definition vpDisplay.h:171
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
static void displayPolygon(const vpImage< unsigned char > &I, const std::vector< vpImagePoint > &vip, const vpColor &color, unsigned int thickness=1, bool closed=true)
error that can be emitted by ViSP classes.
Definition vpException.h:60
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Functionality computation for normalized surface moment feature. Computes the interaction matrix asso...
Functionality computation for centered and normalized moment feature. Computes the interaction matrix...
static unsigned int selectAtanOneOverRho()
void setAlpha(double alpha)
Set vanishing point feature value.
void setAtanOneOverRho(double atan_one_over_rho)
Set vanishing point feature value.
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Definition vpImage.h:131
static double rad(double deg)
Definition vpMath.h:129
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:175
Class handling the normalized surface moment that is invariant in scale and used to estimate depth.
This class defines the 2D basic moment . This class is a wrapper for vpMomentObject which allows to u...
const std::vector< double > & get() const
This class defines the double-indexed centered moment descriptor .
void compute() VP_OVERRIDE
This class allows to register all vpMoments so they can access each other according to their dependen...
virtual void updateAll(vpMomentObject &object)
Class describing 2D normalized gravity center moment.
Class describing 2D gravity center moment.
Class for generic objects.
void linkTo(vpMomentDatabase &moments)
Definition vpMoment.cpp:114
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition vpPlot.h:117
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:79
void set_x(double x)
Set the point x coordinate in the image plane.
Definition vpPoint.cpp:471
void set_y(double y)
Set the point y coordinate in the image plane.
Definition vpPoint.cpp:473
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
rs2_intrinsics getIntrinsics(const rs2_stream &stream, int index=-1) const
std::string getProductLine()
Implementation of a rotation matrix and operations on such kind of matrices.
Implementation of a rotation vector as Euler angle minimal representation.
@ EYEINHAND_L_cVe_eJe
Definition vpServo.h:183
@ CURRENT
Definition vpServo.h:217
Class that consider the case of a translation vector.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()
VISP_EXPORT int wait(double t0, double t)