Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
servoUniversalRobotsIBVS.cpp
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2025 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 *
30 * Description:
31 * tests the control law
32 * eye-in-hand control
33 * velocity computed in the camera frame
34 */
35
57
58#include <iostream>
59
60#include <visp3/core/vpConfig.h>
61
62#if defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_DISPLAY) && defined(VISP_HAVE_UR_RTDE)
63
64#include <visp3/core/vpCameraParameters.h>
65#include <visp3/detection/vpDetectorAprilTag.h>
66#include <visp3/gui/vpDisplayFactory.h>
67#include <visp3/gui/vpPlot.h>
68#include <visp3/io/vpImageIo.h>
69#include <visp3/robot/vpRobotUniversalRobots.h>
70#include <visp3/sensor/vpRealSense2.h>
71#include <visp3/visual_features/vpFeatureBuilder.h>
72#include <visp3/visual_features/vpFeaturePoint.h>
73#include <visp3/vs/vpServo.h>
74#include <visp3/vs/vpServoDisplay.h>
75
76#ifdef ENABLE_VISP_NAMESPACE
77using namespace VISP_NAMESPACE_NAME;
78#endif
79
80void display_point_trajectory(const vpImage<unsigned char> &I, const std::vector<vpImagePoint> &vip,
81 std::vector<vpImagePoint> *traj_vip)
82{
83 for (size_t i = 0; i < vip.size(); i++) {
84 if (traj_vip[i].size()) {
85 // Add the point only if distance with the previous > 1 pixel
86 if (vpImagePoint::distance(vip[i], traj_vip[i].back()) > 1.) {
87 traj_vip[i].push_back(vip[i]);
88 }
89 }
90 else {
91 traj_vip[i].push_back(vip[i]);
92 }
93 }
94 for (size_t i = 0; i < vip.size(); i++) {
95 for (size_t j = 1; j < traj_vip[i].size(); j++) {
96 vpDisplay::displayLine(I, traj_vip[i][j - 1], traj_vip[i][j], vpColor::green, 2);
97 }
98 }
99}
100
101int main(int argc, char **argv)
102{
103 double opt_tagSize = 0.120;
104 std::string opt_robot_ip = "192.168.0.100";
105 std::string opt_eMc_filename = "";
106 bool display_tag = true;
107 int opt_quad_decimate = 2;
108 bool opt_verbose = false;
109 bool opt_plot = false;
110 bool opt_adaptive_gain = false;
111 bool opt_task_sequencing = false;
112 double convergence_threshold = 0.00005;
113
114 for (int i = 1; i < argc; i++) {
115 if (std::string(argv[i]) == "--tag-size" && i + 1 < argc) {
116 opt_tagSize = std::stod(argv[++i]);
117 }
118 else if (std::string(argv[i]) == "--tag-quad-decimate" && i + 1 < argc) {
119 opt_quad_decimate = std::stoi(argv[++i]);
120 }
121 else if (std::string(argv[i]) == "--ip" && i + 1 < argc) {
122 opt_robot_ip = std::string(argv[++i]);
123 }
124 else if (std::string(argv[i]) == "--eMc" && i + 1 < argc) {
125 opt_eMc_filename = std::string(argv[++i]);
126 }
127 else if (std::string(argv[i]) == "--verbose") {
128 opt_verbose = true;
129 }
130 else if (std::string(argv[i]) == "--plot") {
131 opt_plot = true;
132 }
133 else if (std::string(argv[i]) == "--adpative-gain") {
134 opt_adaptive_gain = true;
135 }
136 else if (std::string(argv[i]) == "--task-sequencing") {
137 opt_task_sequencing = true;
138 }
139 else if (std::string(argv[i]) == "--no-convergence-threshold") {
140 convergence_threshold = 0.;
141 }
142 else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
143 std::cout
144 << argv[0] << " [--ip <default " << opt_robot_ip << ">] [--tag-size <marker size in meter; default "
145 << opt_tagSize << ">] [--eMc <eMc extrinsic file>] "
146 << "[--tag-quad-decimate <decimation; default " << opt_quad_decimate
147 << ">] [--adpative-gain] [--plot] [--task-sequencing] [--no-convergence-threshold] [--verbose] [--help] [-h]"
148 << "\n";
149 return EXIT_SUCCESS;
150 }
151 }
152
154
155#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
156 std::shared_ptr<vpDisplay> display;
157#else
158 vpDisplay *display = nullptr;
159#endif
160 try {
161 robot.connect(opt_robot_ip);
162
163 std::cout << "WARNING: This example will move the robot! "
164 << "Please make sure to have the user stop button at hand!" << std::endl
165 << "Press Enter to continue..." << std::endl;
166 std::cin.ignore();
167
168 /*
169 * Move to a safe position
170 */
171 vpColVector q(6, 0);
172 q[0] = -vpMath::rad(16);
173 q[1] = -vpMath::rad(120);
174 q[2] = vpMath::rad(120);
175 q[3] = -vpMath::rad(90);
176 q[4] = -vpMath::rad(90);
177 q[5] = 0;
178 std::cout << "Move to joint position: " << q.t() << std::endl;
179 robot.setRobotState(vpRobot::STATE_POSITION_CONTROL);
180 robot.setPosition(vpRobot::JOINT_STATE, q);
181
182 vpRealSense2 rs;
183 rs2::config config;
184 unsigned int width = 640, height = 480;
185 config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
186 config.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
187 config.enable_stream(RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, 30);
188 rs.open(config);
189
190 // Get camera extrinsics
191 vpPoseVector ePc;
192 // Set camera extrinsics default values
193 ePc[0] = -0.0312543;
194 ePc[1] = -0.0584638;
195 ePc[2] = 0.0309834;
196 ePc[3] = -0.00506562;
197 ePc[4] = -0.00293325;
198 ePc[5] = 0.0117901;
199
200 // If provided, read camera extrinsics from --eMc <file>
201 if (!opt_eMc_filename.empty()) {
202 ePc.loadYAML(opt_eMc_filename, ePc);
203 }
204 else {
205 std::cout << "Warning, opt_eMc_filename is empty! Use hard coded values."
206 << "\n";
207 }
208 vpHomogeneousMatrix eMc(ePc);
209 std::cout << "eMc:\n" << eMc << "\n";
210
211 // Get camera intrinsics
214 std::cout << "cam:\n" << cam << "\n";
215
216 vpImage<unsigned char> I(height, width);
217
218#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
219 display = vpDisplayFactory::createDisplay(I, 10, 10, "Color image");
220#else
221 display = vpDisplayFactory::allocateDisplay(I, 10, 10, "Color image");
222#endif
223
226 // vpDetectorAprilTag::vpPoseEstimationMethod poseEstimationMethod = vpDetectorAprilTag::BEST_RESIDUAL_VIRTUAL_VS;
227 vpDetectorAprilTag detector(tagFamily);
228 detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
229 detector.setDisplayTag(display_tag);
230 detector.setAprilTagQuadDecimate(opt_quad_decimate);
231
232 // Servo
233 vpHomogeneousMatrix cdMc, cMo, oMo;
234
235 // Desired pose used to compute the desired features
236 vpHomogeneousMatrix cdMo(vpTranslationVector(0, 0, opt_tagSize * 3), // 3 times tag with along camera z axis
237 vpRotationMatrix({ 1, 0, 0, 0, -1, 0, 0, 0, -1 }));
238
239 // Create visual features
240 std::vector<vpFeaturePoint> p(4), pd(4); // We use 4 points
241
242 // Define 4 3D points corresponding to the CAD model of the Apriltag
243 std::vector<vpPoint> point(4);
244 point[0].setWorldCoordinates(-opt_tagSize / 2., -opt_tagSize / 2., 0);
245 point[1].setWorldCoordinates(opt_tagSize / 2., -opt_tagSize / 2., 0);
246 point[2].setWorldCoordinates(opt_tagSize / 2., opt_tagSize / 2., 0);
247 point[3].setWorldCoordinates(-opt_tagSize / 2., opt_tagSize / 2., 0);
248
250 // Add the 4 visual feature points
251 for (size_t i = 0; i < p.size(); i++) {
252 task.addFeature(p[i], pd[i]);
253 }
255 task.setInteractionMatrixType(vpServo::CURRENT);
256
257 if (opt_adaptive_gain) {
258 vpAdaptiveGain lambda(1.5, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
259 task.setLambda(lambda);
260 }
261 else {
262 task.setLambda(0.5);
263 }
264
265 vpPlot *plotter = nullptr;
266 int iter_plot = 0;
267
268 if (opt_plot) {
269 plotter = new vpPlot(2, static_cast<int>(250 * 2), 500, static_cast<int>(I.getWidth()) + 80, 10,
270 "Real time curves plotter");
271 plotter->setTitle(0, "Visual features error");
272 plotter->setTitle(1, "Camera velocities");
273 plotter->initGraph(0, 8);
274 plotter->initGraph(1, 6);
275 plotter->setLegend(0, 0, "error_feat_p1_x");
276 plotter->setLegend(0, 1, "error_feat_p1_y");
277 plotter->setLegend(0, 2, "error_feat_p2_x");
278 plotter->setLegend(0, 3, "error_feat_p2_y");
279 plotter->setLegend(0, 4, "error_feat_p3_x");
280 plotter->setLegend(0, 5, "error_feat_p3_y");
281 plotter->setLegend(0, 6, "error_feat_p4_x");
282 plotter->setLegend(0, 7, "error_feat_p4_y");
283 plotter->setLegend(1, 0, "vc_x");
284 plotter->setLegend(1, 1, "vc_y");
285 plotter->setLegend(1, 2, "vc_z");
286 plotter->setLegend(1, 3, "wc_x");
287 plotter->setLegend(1, 4, "wc_y");
288 plotter->setLegend(1, 5, "wc_z");
289 }
290
291 bool final_quit = false;
292 bool has_converged = false;
293 bool send_velocities = false;
294 bool servo_started = false;
295 std::vector<vpImagePoint> *traj_corners = nullptr; // To memorize point trajectory
296
297 static double t_init_servo = vpTime::measureTimeMs();
298
299 robot.set_eMc(eMc); // Set location of the camera wrt end-effector frame
300 robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
301
302 while (!has_converged && !final_quit) {
303 double t_start = vpTime::measureTimeMs();
304
305 rs.acquire(I);
306
308
309 std::vector<vpHomogeneousMatrix> cMo_vec;
310 detector.detect(I, opt_tagSize, cam, cMo_vec);
311
312 {
313 std::stringstream ss;
314 ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
315 vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
316 }
317
318 vpColVector v_c(6);
319
320 // Only one tag is detected
321 if (cMo_vec.size() == 1) {
322 cMo = cMo_vec[0];
323
324 static bool first_time = true;
325 if (first_time) {
326 // Introduce security wrt tag positioning in order to avoid PI rotation
327 std::vector<vpHomogeneousMatrix> v_oMo(2), v_cdMc(2);
328 v_oMo[1].buildFrom(0, 0, 0, 0, 0, M_PI);
329 for (size_t i = 0; i < 2; i++) {
330 v_cdMc[i] = cdMo * v_oMo[i] * cMo.inverse();
331 }
332 if (std::fabs(v_cdMc[0].getThetaUVector().getTheta()) < std::fabs(v_cdMc[1].getThetaUVector().getTheta())) {
333 oMo = v_oMo[0];
334 }
335 else {
336 std::cout << "Desired frame modified to avoid PI rotation of the camera" << std::endl;
337 oMo = v_oMo[1]; // Introduce PI rotation
338 }
339
340 // Compute the desired position of the features from the desired pose
341 for (size_t i = 0; i < point.size(); i++) {
342 vpColVector cP, p_;
343 point[i].changeFrame(cdMo * oMo, cP);
344 point[i].projection(cP, p_);
345
346 pd[i].set_x(p_[0]);
347 pd[i].set_y(p_[1]);
348 pd[i].set_Z(cP[2]);
349 }
350 }
351
352 // Get tag corners
353 std::vector<vpImagePoint> corners = detector.getPolygon(0);
354
355 // Update visual features
356 for (size_t i = 0; i < corners.size(); i++) {
357 // Update the point feature from the tag corners location
358 vpFeatureBuilder::create(p[i], cam, corners[i]);
359 // Set the feature Z coordinate from the pose
360 vpColVector cP;
361 point[i].changeFrame(cMo, cP);
362
363 p[i].set_Z(cP[2]);
364 }
365
366 if (opt_task_sequencing) {
367 if (!servo_started) {
368 if (send_velocities) {
369 servo_started = true;
370 }
371 t_init_servo = vpTime::measureTimeMs();
372 }
373 v_c = task.computeControlLaw((vpTime::measureTimeMs() - t_init_servo) / 1000.);
374 }
375 else {
376 v_c = task.computeControlLaw();
377 }
378
379 // Display the current and desired feature points in the image display
380 vpServoDisplay::display(task, cam, I);
381 for (size_t i = 0; i < corners.size(); i++) {
382 std::stringstream ss;
383 ss << i;
384 // Display current point indexes
385 vpDisplay::displayText(I, corners[i] + vpImagePoint(15, 15), ss.str(), vpColor::red);
386 // Display desired point indexes
387 vpImagePoint ip;
388 vpMeterPixelConversion::convertPoint(cam, pd[i].get_x(), pd[i].get_y(), ip);
389 vpDisplay::displayText(I, ip + vpImagePoint(15, 15), ss.str(), vpColor::red);
390 }
391 if (first_time) {
392 traj_corners = new std::vector<vpImagePoint>[corners.size()];
393 }
394 // Display the trajectory of the points used as features
395 display_point_trajectory(I, corners, traj_corners);
396
397 if (opt_plot) {
398 plotter->plot(0, iter_plot, task.getError());
399 plotter->plot(1, iter_plot, v_c);
400 iter_plot++;
401 }
402
403 if (opt_verbose) {
404 std::cout << "v_c: " << v_c.t() << std::endl;
405 }
406
407 double error = task.getError().sumSquare();
408 std::stringstream ss;
409 ss << "error: " << error;
410 vpDisplay::displayText(I, 20, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
411
412 if (opt_verbose)
413 std::cout << "error: " << error << std::endl;
414
415 if (error < convergence_threshold) {
416 has_converged = true;
417 std::cout << "Servo task has converged"
418 << "\n";
419 vpDisplay::displayText(I, 100, 20, "Servo task has converged", vpColor::red);
420 }
421 if (first_time) {
422 first_time = false;
423 }
424 } // end if (cMo_vec.size() == 1)
425 else {
426 v_c = 0;
427 }
428
429 if (!send_velocities) {
430 v_c = 0;
431 }
432
433 // Send to the robot
434 robot.setVelocity(vpRobot::CAMERA_FRAME, v_c);
435
436 {
437 std::stringstream ss;
438 ss << "Loop time: " << vpTime::measureTimeMs() - t_start << " ms";
439 vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
440 }
442
444 if (vpDisplay::getClick(I, button, false)) {
445 switch (button) {
447 send_velocities = !send_velocities;
448 break;
449
451 final_quit = true;
452 v_c = 0;
453 break;
454
455 default:
456 break;
457 }
458 }
459 }
460 std::cout << "Stop the robot " << std::endl;
461 robot.setRobotState(vpRobot::STATE_STOP);
462
463 if (opt_plot && plotter != nullptr) {
464 delete plotter;
465 plotter = nullptr;
466 }
467
468 if (!final_quit) {
469 while (!final_quit) {
470 rs.acquire(I);
472
473 vpDisplay::displayText(I, 20, 20, "Click to quit the program.", vpColor::red);
474 vpDisplay::displayText(I, 40, 20, "Visual servo converged.", vpColor::red);
475
476 if (vpDisplay::getClick(I, false)) {
477 final_quit = true;
478 }
479
481 }
482 }
483 if (traj_corners) {
484 delete[] traj_corners;
485 }
486 }
487 catch (const vpException &e) {
488 std::cout << "ViSP exception: " << e.what() << std::endl;
489 std::cout << "Stop the robot " << std::endl;
490 robot.setRobotState(vpRobot::STATE_STOP);
491#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
492 if (display != nullptr) {
493 delete display;
494 }
495#endif
496 return EXIT_FAILURE;
497 }
498 catch (const std::exception &e) {
499 std::cout << "ur_rtde exception: " << e.what() << std::endl;
500#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
501 if (display != nullptr) {
502 delete display;
503 }
504#endif
505 return EXIT_FAILURE;
506 }
507
508#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
509 if (display != nullptr) {
510 delete display;
511 }
512#endif
513 return EXIT_SUCCESS;
514}
515#else
516int main()
517{
518#if !defined(VISP_HAVE_REALSENSE2)
519 std::cout << "Install librealsense-2.x" << std::endl;
520#endif
521#if !defined(VISP_HAVE_UR_RTDE)
522 std::cout << "ViSP is not build with libur_rtde 3rd party used to control a robot from Universal Robots..."
523 << std::endl;
524#endif
525 return EXIT_SUCCESS;
526}
527#endif
Adaptive gain computation.
static bool loadYAML(const std::string &filename, vpArray2D< Type > &A, char *header=nullptr)
Definition vpArray2D.h:874
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:198
static const vpColor green
Definition vpColor.h:201
@ TAG_36h11
AprilTag 36h11 pattern (recommended).
Class that defines generic functionalities for display.
Definition vpDisplay.h:171
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:60
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Implementation of an homogeneous matrix and operations on such kind of matrices.
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
Definition of the vpImage class member functions.
Definition vpImage.h:131
static double rad(double deg)
Definition vpMath.h:129
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition vpPlot.h:117
Implementation of a pose vector and operations on poses.
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
@ JOINT_STATE
Definition vpRobot.h:79
@ CAMERA_FRAME
Definition vpRobot.h:81
@ STATE_POSITION_CONTROL
Initialize the position controller.
Definition vpRobot.h:65
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition vpRobot.h:64
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
Definition vpRobot.h:63
Implementation of a rotation matrix and operations on such kind of matrices.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
@ EYEINHAND_CAMERA
Definition vpServo.h:176
@ CURRENT
Definition vpServo.h:217
Class that consider the case of a translation vector.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()