Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
servoAfma6MegaposePBVS.cpp
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2024 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 *
30 * Description:
31 * Pose-based visual servoing using MegaPose, on an Afma6 platform.
32 */
33
60
61#include <iostream>
62
63#include <visp3/core/vpCameraParameters.h>
64#include <visp3/core/vpConfig.h>
65#include <visp3/detection/vpDetectorAprilTag.h>
66#include <visp3/gui/vpDisplayFactory.h>
67#include <visp3/gui/vpPlot.h>
68#include <visp3/io/vpImageIo.h>
69#include <visp3/robot/vpRobotAfma6.h>
70#include <visp3/sensor/vpRealSense2.h>
71#include <visp3/visual_features/vpFeatureThetaU.h>
72#include <visp3/visual_features/vpFeatureTranslation.h>
73#include <visp3/vs/vpServo.h>
74#include <visp3/vs/vpServoDisplay.h>
75#include <visp3/core/vpImageFilter.h>
76#include <visp3/io/vpVideoWriter.h>
77
78// Check if std:c++17 or higher
79#if defined(VISP_HAVE_REALSENSE2) && ((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L))) && \
80 defined(VISP_HAVE_DISPLAY) && defined(VISP_HAVE_AFMA6) && defined(VISP_HAVE_MODULE_DNN_TRACKER)
81
82#include <optional>
83#include <visp3/io/vpJsonArgumentParser.h>
84#include <visp3/dnn_tracker/vpMegaPoseTracker.h>
85
86#ifdef VISP_HAVE_NLOHMANN_JSON
87using json = nlohmann::json;
88#endif
89
90#ifdef ENABLE_VISP_NAMESPACE
91using namespace VISP_NAMESPACE_NAME;
92#endif
93
94std::optional<vpRect> detectObjectForInitMegaposeClick(const vpImage<vpRGBa> &I)
95{
96 const bool startLabelling = vpDisplay::getClick(I, false);
97
98 const vpImagePoint textPosition(10.0, 20.0);
99
100 if (startLabelling) {
101 vpImagePoint topLeft, bottomRight;
102 vpDisplay::displayText(I, textPosition, "Click the upper left corner of the bounding box", vpColor::red);
104 vpDisplay::getClick(I, topLeft, true);
106 vpDisplay::displayCross(I, topLeft, 5, vpColor::red, 2);
107 vpDisplay::displayText(I, textPosition, "Click the bottom right corner of the bounding box", vpColor::red);
109 vpDisplay::getClick(I, bottomRight, true);
110 vpRect bb(topLeft, bottomRight);
111 return bb;
112 }
113 else {
115 vpDisplay::displayText(I, textPosition, "Click when the object is visible and static to start reinitializing megapose.", vpColor::red);
117 return std::nullopt;
118 }
119}
120
121int main(int argc, const char *argv[])
122{
123 bool opt_verbose = true;
124 bool opt_plot = true;
125 double convergence_threshold_t = 0.0005; // Value in [m]
126 double convergence_threshold_tu = 0.5; // Value in [deg]
127
128 unsigned width = 640, height = 480;
129 std::string megaposeAddress = "127.0.0.1";
130 unsigned megaposePort = 5555;
131 int refinerIterations = 1, coarseNumSamples = 1024;
132 std::string objectName = "";
133
134 std::string desiredPosFile = "desired.pos";
135 std::string initialPosFile = "init.pos";
136
137#ifdef VISP_HAVE_NLOHMANN_JSON
138 vpJsonArgumentParser parser("Pose-based visual servoing with Megapose on an Afma6, with a Realsense D435.", "--config", "/");
139 parser
140 .addArgument("initialPose", initialPosFile, true, "Path to the file that contains that the desired pose. Can be acquired with Afma6_office.")
141 .addArgument("desiredPose", desiredPosFile, true, "Path to the file that contains that the desired pose. Can be acquired with Afma6_office.")
142 .addArgument("object", objectName, true, "Name of the object to track with megapose.")
143 .addArgument("megapose/address", megaposeAddress, true, "IP address of the Megapose server.")
144 .addArgument("megapose/port", megaposePort, true, "Port on which the Megapose server listens for connections.")
145 .addArgument("megapose/refinerIterations", refinerIterations, false, "Number of Megapose refiner model iterations."
146 "A higher count may lead to better accuracy, at the cost of more processing time")
147 .addArgument("megapose/initialisationNumSamples", coarseNumSamples, false, "Number of Megapose renderings used for the initial pose estimation.");
148 parser.parse(argc, argv);
149#endif
150
151 vpRobotAfma6 robot;
152
153#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
154 std::shared_ptr<vpDisplay> display;
155#else
156 vpDisplay *display = nullptr;
157#endif
158 try {
159 std::cout << "WARNING: This example will move the robot! "
160 << "Please make sure to have the user stop button at hand!" << std::endl
161 << "Press Enter to continue..." << std::endl;
162 std::cin.ignore();
163 std::vector<vpColVector> velocities;
164 std::vector<vpPoseVector> error;
165 /*
166 * Move to a safe position
167 */
168 vpColVector q(6, 0);
169
170 vpVideoWriter writer;
171
172 // Go to desired pose, save true camera pose wrt world frame
173 robot.setPositioningVelocity(10.0); // In %
174 robot.readPosFile(desiredPosFile, q);
175 robot.setRobotState(vpRobot::STATE_POSITION_CONTROL);
176 robot.setPosition(vpRobot::ARTICULAR_FRAME, q); // Move to the joint position
177 std::cout << "Move to joint position: " << q.t() << std::endl;
178 vpHomogeneousMatrix cdTw = robot.get_fMc(q).inverse();
179
180 // Setup camera
181 vpRealSense2 rs;
182 rs2::config config;
183 config.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_RGBA8, 30);
184 rs.open(config);
185
186 // Get camera intrinsics
189 std::cout << "cam:\n" << cam << std::endl;
190 // Initialize Megapose
191 std::shared_ptr<vpMegaPose> megapose;
192 try {
193 megapose = std::make_shared<vpMegaPose>(megaposeAddress, megaposePort, cam, height, width);
194 }
195 catch (...) {
196 throw vpException(vpException::ioError, "Could not connect to Megapose server at " + megaposeAddress + " on port " + std::to_string(megaposePort));
197 }
198
199 vpMegaPoseTracker megaposeTracker(megapose, objectName, refinerIterations);
200 megapose->setCoarseNumSamples(coarseNumSamples);
201 const std::vector<std::string> allObjects = megapose->getObjectNames();
202 if (std::find(allObjects.begin(), allObjects.end(), objectName) == allObjects.end()) {
203 throw vpException(vpException::badValue, "Object " + objectName + " is not known by the Megapose server!");
204 }
205 std::future<vpMegaPoseEstimate> trackerFuture;
206
207 vpImage<vpRGBa> I(height, width);
208
209#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
210 display = vpDisplayFactory::createDisplay(I, 10, 10, "Color image");
211#else
212 display = vpDisplayFactory::allocateDisplay(I, 10, 10, "Color image");
213#endif
214
215 std::optional<vpRect> detection;
216 while (!detection) {
217 rs.acquire(I);
219 detection = detectObjectForInitMegaposeClick(I);
221 }
222
223 vpHomogeneousMatrix cdTo = megaposeTracker.init(I, *detection).get().cTo; //get camera pose relative to object, not world
224
225 // Go to starting pose, save true starting pose in world frame
226 robot.readPosFile(initialPosFile, q);
227 robot.setRobotState(vpRobot::STATE_POSITION_CONTROL);
228 robot.setPosition(vpRobot::ARTICULAR_FRAME, q); // Move to the joint position
229 std::cout << "Move to joint position: " << q.t() << std::endl;
230 vpHomogeneousMatrix cTw = robot.get_fMc(q).inverse();
231 vpHomogeneousMatrix cdTc_true = cdTw * cTw.inverse(); // ground truth error
232
233 detection = std::nullopt;
234 while (!detection) {
235 rs.acquire(I);
237 detection = detectObjectForInitMegaposeClick(I);
239 }
240 auto est = megaposeTracker.init(I, *detection).get();
241 vpHomogeneousMatrix cTo = est.cTo;
242 std::cout << "Estimate score = " << est.score << std::endl;
243 writer.setFileName("video/I%05d.png");
244 //writer.setFramerate(60.0);
245 writer.open(I);
246
247 //vpHomogeneousMatrix oTw = cTo.inverse() * cTw;
248 vpHomogeneousMatrix cdTc = cdTo * cTo.inverse();
251 t.buildFrom(cdTc);
252 tu.buildFrom(cdTc);
253
256
258 task.addFeature(t, td);
259 task.addFeature(tu, tud);
261 task.setInteractionMatrixType(vpServo::CURRENT);
262 task.setLambda(0.2);
263
264 vpPlot *plotter = nullptr;
265 int iter_plot = 0;
266
267 if (opt_plot) {
268 plotter = new vpPlot(2, static_cast<int>(250 * 2), 500, static_cast<int>(I.getWidth()) + 80, 10,
269 "Real time curves plotter");
270 plotter->setTitle(0, "Visual features error");
271 plotter->setTitle(1, "Camera velocities");
272 plotter->initGraph(0, 6);
273 plotter->initGraph(1, 6);
274 plotter->setLegend(0, 0, "error_feat_tx");
275 plotter->setLegend(0, 1, "error_feat_ty");
276 plotter->setLegend(0, 2, "error_feat_tz");
277 plotter->setLegend(0, 3, "error_feat_theta_ux");
278 plotter->setLegend(0, 4, "error_feat_theta_uy");
279 plotter->setLegend(0, 5, "error_feat_theta_uz");
280 plotter->setLegend(1, 0, "vc_x");
281 plotter->setLegend(1, 1, "vc_y");
282 plotter->setLegend(1, 2, "vc_z");
283 plotter->setLegend(1, 3, "wc_x");
284 plotter->setLegend(1, 4, "wc_y");
285 plotter->setLegend(1, 5, "wc_z");
286 }
287
288 bool final_quit = false;
289 bool has_converged = false;
290 bool send_velocities = false;
291
292 robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
293 vpColVector vLastUpdate(6);
294
295 vpHomogeneousMatrix prev_cTo = cTo;
296
297 vpColVector v(6);
298
299 bool callMegapose = true;
300 vpMegaPoseEstimate megaposeEstimate;
301
302 while (!has_converged && !final_quit) {
303 double t_start = vpTime::measureTimeMs();
304
305 rs.acquire(I);
307 if (!callMegapose && trackerFuture.wait_for(std::chrono::milliseconds(0)) == std::future_status::ready) {
308 megaposeEstimate = trackerFuture.get();
309
310 cTo = megaposeEstimate.cTo;
311 callMegapose = true;
312 if (megaposeEstimate.score < 0.2) { // If confidence is low, exit
313 final_quit = true;
314 std::cout << "Low confidence, exiting" << std::endl;
315 }
316 }
317
318 if (callMegapose) {
319 std::cout << "Calling megapose" << std::endl;
320 trackerFuture = megaposeTracker.track(I);
321 callMegapose = false;
322 }
323
324 std::stringstream ss;
325 ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
326 vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
327
328 // Update visual features
329 cdTc = cdTo * cTo.inverse();
330 t.buildFrom(cdTc);
331 tu.buildFrom(cdTc);
332 v = task.computeControlLaw();
333 velocities.push_back(v);
334
335 // Update true pose
337 robot.getPosition(vpRobot::ARTICULAR_FRAME, q);
338 cTw = robot.get_fMc(q).inverse();
339 cdTc_true = cdTw * cTw.inverse();
340 vpPoseVector cdrc(cdTc_true);
341 error.push_back(cdrc);
342
343 // Display desired and current pose features
344 vpDisplay::displayFrame(I, cdTo, cam, 0.05, vpColor::yellow, 2);
345 vpDisplay::displayFrame(I, cTo, cam, 0.05, vpColor::none, 3);
346
347 if (opt_plot) {
348 plotter->plot(0, iter_plot, task.getError());
349 plotter->plot(1, iter_plot, v);
350 iter_plot++;
351 }
352
353 if (opt_verbose) {
354 std::cout << "v: " << v.t() << std::endl;
355 }
356
358 vpThetaUVector cd_tu_c = cdTc.getThetaUVector();
359 double error_tr = sqrt(cd_t_c.sumSquare());
360 double error_tu = vpMath::deg(sqrt(cd_tu_c.sumSquare()));
361 vpTranslationVector cd_t_c_true = cdTc_true.getTranslationVector();
362 vpThetaUVector cd_tu_c_true = cdTc_true.getThetaUVector();
363 double error_tr_true = sqrt(cd_t_c_true.sumSquare());
364 double error_tu_true = vpMath::deg(sqrt(cd_tu_c_true.sumSquare()));
365
366 ss.str("");
367 ss << "Predicted error_t: " << error_tr << ", True error_t:" << error_tr_true;
368 vpDisplay::displayText(I, 20, static_cast<int>(I.getWidth()) - 300, ss.str(), vpColor::red);
369 ss.str("");
370 ss << "Predicted error_tu: " << error_tu << ", True error_tu:" << error_tu_true;
371 vpDisplay::displayText(I, 40, static_cast<int>(I.getWidth()) - 300, ss.str(), vpColor::red);
372
373 if (opt_verbose)
374 std::cout << "error translation: " << error_tr << " ; error rotation: " << error_tu << std::endl;
375
376 if (error_tr < convergence_threshold_t && error_tu < convergence_threshold_tu) {
377 has_converged = true;
378 std::cout << "Servo task has converged" << std::endl;
379 vpDisplay::displayText(I, 100, 20, "Servo task has converged", vpColor::red);
380 }
381
382 // Send to the robot
383 robot.setVelocity(vpRobot::CAMERA_FRAME, v);
384
385 ss.str("");
386 ss << "Loop time: " << vpTime::measureTimeMs() - t_start << " ms";
387 vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
389 vpImage<vpRGBa> displayImage;
390 vpDisplay::getImage(I, displayImage);
391 writer.saveFrame(displayImage);
392
394 if (vpDisplay::getClick(I, button, false)) {
395 switch (button) {
397 send_velocities = !send_velocities;
398 break;
399
401 final_quit = true;
402 v = 0;
403 break;
404
405 default:
406 break;
407 }
408 }
409 }
410 std::cout << "Stop the robot " << std::endl;
411 robot.setRobotState(vpRobot::STATE_STOP);
412
413#ifdef VISP_HAVE_NLOHMANN_JSON
414 // Save results to JSON
415 json j = json {
416 {"velocities", velocities},
417 {"error", error}
418 };
419 std::ofstream jsonFile;
420 jsonFile.open("results.json");
421 jsonFile << j.dump(4);
422 jsonFile.close();
423#endif
424
425 if (opt_plot && plotter != nullptr) {
426 delete plotter;
427 plotter = nullptr;
428 }
429
430 if (!final_quit) {
431 while (!final_quit) {
432 rs.acquire(I);
434
435 vpDisplay::displayText(I, 20, 20, "Click to quit the program.", vpColor::red);
436 vpDisplay::displayText(I, 40, 20, "Visual servo converged.", vpColor::red);
437
438 if (vpDisplay::getClick(I, false)) {
439 final_quit = true;
440 }
441
443 }
444 }
445 }
446 catch (const vpException &e) {
447 std::cout << "ViSP exception: " << e.what() << std::endl;
448 std::cout << "Stop the robot " << std::endl;
449 robot.setRobotState(vpRobot::STATE_STOP);
450#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
451 if (display != nullptr) {
452 delete display;
453 }
454#endif
455 return EXIT_FAILURE;
456 }
457 catch (const std::exception &e) {
458 std::cout << "ur_rtde exception: " << e.what() << std::endl;
459#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
460 if (display != nullptr) {
461 delete display;
462 }
463#endif
464 return EXIT_FAILURE;
465 }
466
467#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
468 if (display != nullptr) {
469 delete display;
470 }
471#endif
472 return EXIT_SUCCESS;
473}
474#else
475int main()
476{
477#if !defined(VISP_HAVE_REALSENSE2)
478 std::cout << "Install librealsense-2.x" << std::endl;
479#endif
480#if !((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L)))
481 std::cout << "Build ViSP with c++17 or higher compiler flag (cmake -DUSE_CXX_STANDARD=17)." << std::endl;
482#endif
483#if !defined(VISP_HAVE_AFMA6)
484 std::cout << "ViSP is not built with Afma-6 robot support..." << std::endl;
485#endif
486 return EXIT_SUCCESS;
487}
488#endif
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:198
static const vpColor none
Definition vpColor.h:210
static const vpColor yellow
Definition vpColor.h:206
Class that defines generic functionalities for display.
Definition vpDisplay.h:171
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void getImage(const vpImage< unsigned char > &Is, vpImage< vpRGBa > &Id)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:60
@ ioError
I/O error.
Definition vpException.h:67
@ badValue
Used to indicate that a value is not in the allowed range.
Definition vpException.h:73
Class that defines a 3D visual feature from a axis/angle parametrization that represent the rotati...
Class that defines the translation visual feature .
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpThetaUVector getThetaUVector() const
VP_DEPRECATED void init()
vpHomogeneousMatrix inverse() const
vpTranslationVector getTranslationVector() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Definition vpImage.h:131
Command line argument parsing with support for JSON files. If a JSON file is supplied,...
static double deg(double rad)
Definition vpMath.h:119
vpHomogeneousMatrix cTo
Definition vpMegaPose.h:76
A simplified interface to track a single object with MegaPose. This tracker works asynchronously: A c...
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition vpPlot.h:117
Implementation of a pose vector and operations on poses.
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
Defines a rectangle in the plane.
Definition vpRect.h:79
Control of Irisa's gantry robot named Afma6.
@ ARTICULAR_FRAME
Definition vpRobot.h:77
@ CAMERA_FRAME
Definition vpRobot.h:81
@ STATE_POSITION_CONTROL
Initialize the position controller.
Definition vpRobot.h:65
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition vpRobot.h:64
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
Definition vpRobot.h:63
double sumSquare() const
@ EYEINHAND_CAMERA
Definition vpServo.h:176
@ CURRENT
Definition vpServo.h:217
Implementation of a rotation vector as axis-angle minimal representation.
Class that consider the case of a translation vector.
Class that enables to write easily a video file or a sequence of images.
void saveFrame(vpImage< vpRGBa > &I)
void setFileName(const std::string &filename)
void open(vpImage< vpRGBa > &I)
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()