Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
servoUniversalRobotsPBVS.cpp
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2025 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 *
30 * Description:
31 * Data acquisition with RealSense RGB-D sensor and Universal UR5 robot.
32 */
33
53
54#include <iostream>
55
56#include <visp3/core/vpConfig.h>
57
58#if defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_DISPLAY) && defined(VISP_HAVE_UR_RTDE)
59
60#include <visp3/core/vpCameraParameters.h>
61#include <visp3/detection/vpDetectorAprilTag.h>
62#include <visp3/gui/vpDisplayFactory.h>
63#include <visp3/gui/vpPlot.h>
64#include <visp3/io/vpImageIo.h>
65#include <visp3/robot/vpRobotUniversalRobots.h>
66#include <visp3/sensor/vpRealSense2.h>
67#include <visp3/visual_features/vpFeatureThetaU.h>
68#include <visp3/visual_features/vpFeatureTranslation.h>
69#include <visp3/vs/vpServo.h>
70#include <visp3/vs/vpServoDisplay.h>
71
72#ifdef ENABLE_VISP_NAMESPACE
73using namespace VISP_NAMESPACE_NAME;
74#endif
75
76void display_point_trajectory(const vpImage<unsigned char> &I, const std::vector<vpImagePoint> &vip,
77 std::vector<vpImagePoint> *traj_vip)
78{
79 for (size_t i = 0; i < vip.size(); i++) {
80 if (traj_vip[i].size()) {
81 // Add the point only if distance with the previous > 1 pixel
82 if (vpImagePoint::distance(vip[i], traj_vip[i].back()) > 1.) {
83 traj_vip[i].push_back(vip[i]);
84 }
85 }
86 else {
87 traj_vip[i].push_back(vip[i]);
88 }
89 }
90 for (size_t i = 0; i < vip.size(); i++) {
91 for (size_t j = 1; j < traj_vip[i].size(); j++) {
92 vpDisplay::displayLine(I, traj_vip[i][j - 1], traj_vip[i][j], vpColor::green, 2);
93 }
94 }
95}
96
97int main(int argc, char **argv)
98{
99 double opt_tagSize = 0.120;
100 std::string opt_robot_ip = "192.168.0.100";
101 std::string opt_eMc_filename = "";
102 bool display_tag = true;
103 int opt_quad_decimate = 2;
104 bool opt_verbose = false;
105 bool opt_plot = false;
106 bool opt_adaptive_gain = false;
107 bool opt_task_sequencing = false;
108 double convergence_threshold_t = 0.0005; // Value in [m]
109 double convergence_threshold_tu = 0.5; // Value in [deg]
110
111 for (int i = 1; i < argc; i++) {
112 if (std::string(argv[i]) == "--tag-size" && i + 1 < argc) {
113 opt_tagSize = std::stod(argv[++i]);
114 }
115 else if (std::string(argv[i]) == "--tag-quad-decimate" && i + 1 < argc) {
116 opt_quad_decimate = std::stoi(argv[++i]);
117 }
118 else if (std::string(argv[i]) == "--ip" && i + 1 < argc) {
119 opt_robot_ip = std::string(argv[++i]);
120 }
121 else if (std::string(argv[i]) == "--eMc" && i + 1 < argc) {
122 opt_eMc_filename = std::string(argv[++i]);
123 }
124 else if (std::string(argv[i]) == "--verbose") {
125 opt_verbose = true;
126 }
127 else if (std::string(argv[i]) == "--plot") {
128 opt_plot = true;
129 }
130 else if (std::string(argv[i]) == "--adpative-gain") {
131 opt_adaptive_gain = true;
132 }
133 else if (std::string(argv[i]) == "--task-sequencing") {
134 opt_task_sequencing = true;
135 }
136 else if (std::string(argv[i]) == "--no-convergence-threshold") {
137 convergence_threshold_t = 0.;
138 convergence_threshold_tu = 0.;
139 }
140 else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
141 std::cout
142 << argv[0] << " [--ip <default " << opt_robot_ip << ">] [--tag-size <marker size in meter; default "
143 << opt_tagSize << ">] [--eMc <eMc extrinsic file>] "
144 << "[--tag-quad-decimate <decimation; default " << opt_quad_decimate
145 << ">] [--adpative-gain] [--plot] [--task-sequencing] [--no-convergence-threshold] [--verbose] [--help] [-h]"
146 << "\n";
147 return EXIT_SUCCESS;
148 }
149 }
150
152
153#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
154 std::shared_ptr<vpDisplay> display;
155#else
156 vpDisplay *display = nullptr;
157#endif
158
159 try {
160 robot.connect(opt_robot_ip);
161
162 std::cout << "WARNING: This example will move the robot! "
163 << "Please make sure to have the user stop button at hand!" << std::endl
164 << "Press Enter to continue..." << std::endl;
165 std::cin.ignore();
166
167 /*
168 * Move to a safe position
169 */
170 vpColVector q(6, 0);
171 q[0] = -vpMath::rad(16);
172 q[1] = -vpMath::rad(120);
173 q[2] = vpMath::rad(120);
174 q[3] = -vpMath::rad(90);
175 q[4] = -vpMath::rad(90);
176 q[5] = 0;
177 std::cout << "Move to joint position: " << q.t() << std::endl;
178 robot.setRobotState(vpRobot::STATE_POSITION_CONTROL);
179 robot.setPosition(vpRobot::JOINT_STATE, q);
180
181 vpRealSense2 rs;
182 rs2::config config;
183 unsigned int width = 640, height = 480;
184 config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
185 config.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
186 config.enable_stream(RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, 30);
187 rs.open(config);
188
189 // Get camera extrinsics
190 vpPoseVector ePc;
191 // Set camera extrinsics default values
192 ePc[0] = -0.0312543;
193 ePc[1] = -0.0584638;
194 ePc[2] = 0.0309834;
195 ePc[3] = -0.00506562;
196 ePc[4] = -0.00293325;
197 ePc[5] = 0.0117901;
198
199 // If provided, read camera extrinsics from --eMc <file>
200 if (!opt_eMc_filename.empty()) {
201 ePc.loadYAML(opt_eMc_filename, ePc);
202 }
203 else {
204 std::cout << "Warning, opt_eMc_filename is empty! Use hard coded values."
205 << "\n";
206 }
207 vpHomogeneousMatrix eMc(ePc);
208 std::cout << "eMc:\n" << eMc << "\n";
209
210 // Get camera intrinsics
213 std::cout << "cam:\n" << cam << "\n";
214
215 vpImage<unsigned char> I(height, width);
216
217#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
218 display = vpDisplayFactory::createDisplay(I, 10, 10, "Color image");
219#else
220 display = vpDisplayFactory::allocateDisplay(I, 10, 10, "Color image");
221#endif
222
225 // vpDetectorAprilTag::vpPoseEstimationMethod poseEstimationMethod = vpDetectorAprilTag::BEST_RESIDUAL_VIRTUAL_VS;
226 vpDetectorAprilTag detector(tagFamily);
227 detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
228 detector.setDisplayTag(display_tag);
229 detector.setAprilTagQuadDecimate(opt_quad_decimate);
230
231 // Servo
232 vpHomogeneousMatrix cdMc, cMo, oMo;
233
234 // Desired pose to reach
235 vpHomogeneousMatrix cdMo(vpTranslationVector(0, 0, opt_tagSize * 3), // 3 times tag with along camera z axis
236 vpRotationMatrix({ 1, 0, 0, 0, -1, 0, 0, 0, -1 }));
237
238 cdMc = cdMo * cMo.inverse();
241 t.buildFrom(cdMc);
242 tu.buildFrom(cdMc);
243
246
248 task.addFeature(t, td);
249 task.addFeature(tu, tud);
251 task.setInteractionMatrixType(vpServo::CURRENT);
252
253 if (opt_adaptive_gain) {
254 vpAdaptiveGain lambda(1.5, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
255 task.setLambda(lambda);
256 }
257 else {
258 task.setLambda(0.5);
259 }
260
261 vpPlot *plotter = nullptr;
262 int iter_plot = 0;
263
264 if (opt_plot) {
265 plotter = new vpPlot(2, static_cast<int>(250 * 2), 500, static_cast<int>(I.getWidth()) + 80, 10,
266 "Real time curves plotter");
267 plotter->setTitle(0, "Visual features error");
268 plotter->setTitle(1, "Camera velocities");
269 plotter->initGraph(0, 6);
270 plotter->initGraph(1, 6);
271 plotter->setLegend(0, 0, "error_feat_tx");
272 plotter->setLegend(0, 1, "error_feat_ty");
273 plotter->setLegend(0, 2, "error_feat_tz");
274 plotter->setLegend(0, 3, "error_feat_theta_ux");
275 plotter->setLegend(0, 4, "error_feat_theta_uy");
276 plotter->setLegend(0, 5, "error_feat_theta_uz");
277 plotter->setLegend(1, 0, "vc_x");
278 plotter->setLegend(1, 1, "vc_y");
279 plotter->setLegend(1, 2, "vc_z");
280 plotter->setLegend(1, 3, "wc_x");
281 plotter->setLegend(1, 4, "wc_y");
282 plotter->setLegend(1, 5, "wc_z");
283 }
284
285 bool final_quit = false;
286 bool has_converged = false;
287 bool send_velocities = false;
288 bool servo_started = false;
289 std::vector<vpImagePoint> *traj_vip = nullptr; // To memorize point trajectory
290
291 static double t_init_servo = vpTime::measureTimeMs();
292
293 robot.set_eMc(eMc); // Set location of the camera wrt end-effector frame
294 robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
295
296 while (!has_converged && !final_quit) {
297 double t_start = vpTime::measureTimeMs();
298
299 rs.acquire(I);
300
302
303 std::vector<vpHomogeneousMatrix> cMo_vec;
304 detector.detect(I, opt_tagSize, cam, cMo_vec);
305
306 std::stringstream ss;
307 ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
308 vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
309
310 vpColVector v_c(6);
311
312 // Only one tag is detected
313 if (cMo_vec.size() == 1) {
314 cMo = cMo_vec[0];
315
316 static bool first_time = true;
317 if (first_time) {
318 // Introduce security wrt tag positioning in order to avoid PI rotation
319 std::vector<vpHomogeneousMatrix> v_oMo(2), v_cdMc(2);
320 v_oMo[1].buildFrom(0, 0, 0, 0, 0, M_PI);
321 for (size_t i = 0; i < 2; i++) {
322 v_cdMc[i] = cdMo * v_oMo[i] * cMo.inverse();
323 }
324 if (std::fabs(v_cdMc[0].getThetaUVector().getTheta()) < std::fabs(v_cdMc[1].getThetaUVector().getTheta())) {
325 oMo = v_oMo[0];
326 }
327 else {
328 std::cout << "Desired frame modified to avoid PI rotation of the camera" << std::endl;
329 oMo = v_oMo[1]; // Introduce PI rotation
330 }
331 }
332
333 // Update visual features
334 cdMc = cdMo * oMo * cMo.inverse();
335 t.buildFrom(cdMc);
336 tu.buildFrom(cdMc);
337
338 if (opt_task_sequencing) {
339 if (!servo_started) {
340 if (send_velocities) {
341 servo_started = true;
342 }
343 t_init_servo = vpTime::measureTimeMs();
344 }
345 v_c = task.computeControlLaw((vpTime::measureTimeMs() - t_init_servo) / 1000.);
346 }
347 else {
348 v_c = task.computeControlLaw();
349 }
350
351 // Display desired and current pose features
352 vpDisplay::displayFrame(I, cdMo * oMo, cam, opt_tagSize / 1.5, vpColor::yellow, 2);
353 vpDisplay::displayFrame(I, cMo, cam, opt_tagSize / 2, vpColor::none, 3);
354 // Get tag corners
355 std::vector<vpImagePoint> vip = detector.getPolygon(0);
356 // Get the tag cog corresponding to the projection of the tag frame in the image
357 vip.push_back(detector.getCog(0));
358 // Display the trajectory of the points
359 if (first_time) {
360 traj_vip = new std::vector<vpImagePoint>[vip.size()];
361 }
362 display_point_trajectory(I, vip, traj_vip);
363
364 if (opt_plot) {
365 plotter->plot(0, iter_plot, task.getError());
366 plotter->plot(1, iter_plot, v_c);
367 iter_plot++;
368 }
369
370 if (opt_verbose) {
371 std::cout << "v_c: " << v_c.t() << std::endl;
372 }
373
374 vpTranslationVector cd_t_c = cdMc.getTranslationVector();
375 vpThetaUVector cd_tu_c = cdMc.getThetaUVector();
376 double error_tr = sqrt(cd_t_c.sumSquare());
377 double error_tu = vpMath::deg(sqrt(cd_tu_c.sumSquare()));
378
379 ss.str("");
380 ss << "error_t: " << error_tr;
381 vpDisplay::displayText(I, 20, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
382 ss.str("");
383 ss << "error_tu: " << error_tu;
384 vpDisplay::displayText(I, 40, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
385
386 if (opt_verbose)
387 std::cout << "error translation: " << error_tr << " ; error rotation: " << error_tu << std::endl;
388
389 if (error_tr < convergence_threshold_t && error_tu < convergence_threshold_tu) {
390 has_converged = true;
391 std::cout << "Servo task has converged" << std::endl;
392 vpDisplay::displayText(I, 100, 20, "Servo task has converged", vpColor::red);
393 }
394
395 if (first_time) {
396 first_time = false;
397 }
398 } // end if (cMo_vec.size() == 1)
399 else {
400 v_c = 0;
401 }
402
403 if (!send_velocities) {
404 v_c = 0;
405 }
406
407 // Send to the robot
408 robot.setVelocity(vpRobot::CAMERA_FRAME, v_c);
409
410 ss.str("");
411 ss << "Loop time: " << vpTime::measureTimeMs() - t_start << " ms";
412 vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
414
416 if (vpDisplay::getClick(I, button, false)) {
417 switch (button) {
419 send_velocities = !send_velocities;
420 break;
421
423 final_quit = true;
424 v_c = 0;
425 break;
426
427 default:
428 break;
429 }
430 }
431 }
432 std::cout << "Stop the robot " << std::endl;
433 robot.setRobotState(vpRobot::STATE_STOP);
434
435 if (opt_plot && plotter != nullptr) {
436 delete plotter;
437 plotter = nullptr;
438 }
439
440 if (!final_quit) {
441 while (!final_quit) {
442 rs.acquire(I);
444
445 vpDisplay::displayText(I, 20, 20, "Click to quit the program.", vpColor::red);
446 vpDisplay::displayText(I, 40, 20, "Visual servo converged.", vpColor::red);
447
448 if (vpDisplay::getClick(I, false)) {
449 final_quit = true;
450 }
451
453 }
454 }
455
456 if (traj_vip) {
457 delete[] traj_vip;
458 }
459 }
460 catch (const vpException &e) {
461 std::cout << "ViSP exception: " << e.what() << std::endl;
462 std::cout << "Stop the robot " << std::endl;
463 robot.setRobotState(vpRobot::STATE_STOP);
464#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
465 if (display != nullptr) {
466 delete display;
467 }
468#endif
469 return EXIT_FAILURE;
470 }
471 catch (const std::exception &e) {
472 std::cout << "ur_rtde exception: " << e.what() << std::endl;
473#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
474 if (display != nullptr) {
475 delete display;
476 }
477#endif
478 return EXIT_FAILURE;
479 }
480
481#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
482 if (display != nullptr) {
483 delete display;
484 }
485#endif
486 return EXIT_SUCCESS;
487}
488#else
489int main()
490{
491#if !defined(VISP_HAVE_REALSENSE2)
492 std::cout << "Install librealsense-2.x" << std::endl;
493#endif
494#if !defined(VISP_HAVE_UR_RTDE)
495 std::cout << "ViSP is not build with libur_rtde 3rd party used to control a robot from Universal Robots..."
496 << std::endl;
497#endif
498 return EXIT_SUCCESS;
499}
500#endif
Adaptive gain computation.
static bool loadYAML(const std::string &filename, vpArray2D< Type > &A, char *header=nullptr)
Definition vpArray2D.h:874
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:198
static const vpColor none
Definition vpColor.h:210
static const vpColor yellow
Definition vpColor.h:206
static const vpColor green
Definition vpColor.h:201
@ TAG_36h11
AprilTag 36h11 pattern (recommended).
Class that defines generic functionalities for display.
Definition vpDisplay.h:171
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:60
Class that defines a 3D visual feature from a axis/angle parametrization that represent the rotati...
Class that defines the translation visual feature .
Implementation of an homogeneous matrix and operations on such kind of matrices.
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
Definition of the vpImage class member functions.
Definition vpImage.h:131
static double rad(double deg)
Definition vpMath.h:129
static double deg(double rad)
Definition vpMath.h:119
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition vpPlot.h:117
Implementation of a pose vector and operations on poses.
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
@ JOINT_STATE
Definition vpRobot.h:79
@ CAMERA_FRAME
Definition vpRobot.h:81
@ STATE_POSITION_CONTROL
Initialize the position controller.
Definition vpRobot.h:65
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition vpRobot.h:64
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
Definition vpRobot.h:63
Implementation of a rotation matrix and operations on such kind of matrices.
double sumSquare() const
@ EYEINHAND_CAMERA
Definition vpServo.h:176
@ CURRENT
Definition vpServo.h:217
Implementation of a rotation vector as axis-angle minimal representation.
Class that consider the case of a translation vector.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()