Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
servoPioneerPoint2DDepth.cpp
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2025 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 *
30 * Description:
31 * IBVS on Pioneer P3DX mobile platform
32 */
33#include <iostream>
34
35#include <visp3/core/vpConfig.h>
36
38// Comment / uncomment following lines to use the specific 3rd party compatible with your camera
39// #undef VISP_HAVE_V4L2
40// #undef VISP_HAVE_DC1394
41// #undef VISP_HAVE_CMU1394
42// #undef HAVE_OPENCV_HIGHGUI
43// #undef HAVE_OPENCV_VIDEOIO
45
46#include <visp3/blob/vpDot2.h>
47#include <visp3/core/vpCameraParameters.h>
48#include <visp3/core/vpHomogeneousMatrix.h>
49#include <visp3/core/vpImage.h>
50#include <visp3/core/vpImageConvert.h>
51#include <visp3/core/vpVelocityTwistMatrix.h>
52#include <visp3/gui/vpDisplayFactory.h>
53#include <visp3/robot/vpRobotPioneer.h> // Include first to avoid build issues with Status, None, isfinite
54#include <visp3/sensor/vp1394CMUGrabber.h>
55#include <visp3/sensor/vp1394TwoGrabber.h>
56#include <visp3/sensor/vpV4l2Grabber.h>
57#include <visp3/visual_features/vpFeatureBuilder.h>
58#include <visp3/visual_features/vpFeatureDepth.h>
59#include <visp3/visual_features/vpFeaturePoint.h>
60#include <visp3/vs/vpServo.h>
61
62#if defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)
63#include <opencv2/highgui/highgui.hpp> // for cv::VideoCapture
64#elif defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO)
65#include <opencv2/videoio/videoio.hpp> // for cv::VideoCapture
66#endif
67
68#if defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_CMU1394) || defined(VISP_HAVE_OPENCV) && \
69 (((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)) || \
70 ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO)))
71#if defined(VISP_HAVE_DISPLAY)
72#if defined(VISP_HAVE_PIONEER)
73#define TEST_COULD_BE_ACHIEVED
74#endif
75#endif
76#endif
77
100#ifdef TEST_COULD_BE_ACHIEVED
101int main(int argc, char **argv)
102{
103#ifdef ENABLE_VISP_NAMESPACE
104 using namespace VISP_NAMESPACE_NAME;
105#endif
106
107#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
108 std::shared_ptr<vpDisplay> display;
109#else
110 vpDisplay *display = nullptr;
111#endif
112 try {
113 vpImage<unsigned char> I; // Create a gray level image container
114 double depth = 1.;
115 double lambda = 0.6;
116 double coef = 1. / 6.77; // Scale parameter used to estimate the depth Z
117 // of the blob from its surface
118
119 vpRobotPioneer robot;
120 ArArgumentParser parser(&argc, argv);
121 parser.loadDefaultArguments();
122
123 // ArRobotConnector connects to the robot, get some initial data from it
124 // such as type and name, and then loads parameter files for this robot.
125 ArRobotConnector robotConnector(&parser, &robot);
126 if (!robotConnector.connectRobot()) {
127 ArLog::log(ArLog::Terse, "Could not connect to the robot.");
128 if (parser.checkHelpAndWarnUnparsed()) {
129 Aria::logOptions();
130 Aria::exit(1);
131 }
132 }
133 if (!Aria::parseArgs()) {
134 Aria::logOptions();
135 Aria::shutdown();
136 return false;
137 }
138
139 // Wait 3 sec to be sure that the low level Aria thread used to control
140 // the robot is started. Without this delay we experienced a delay
141 // (around 2.2 sec) between the velocity send to the robot and the
142 // velocity that is really applied to the wheels.
143 vpTime::sleepMs(3000);
144
145 std::cout << "Robot connected" << std::endl;
146
147 // Camera parameters. In this experiment we don't need a precise
148 // calibration of the camera
150
151 // Create the camera framegrabber
152#if defined(VISP_HAVE_OPENCV) && \
153 (((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)) || \
154 ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO)))
155 int device = 1;
156 std::cout << "Use device: " << device << std::endl;
157 cv::VideoCapture g(device); // open the default camera
158 g.set(CV_CAP_PROP_FRAME_WIDTH, 640);
159 g.set(CV_CAP_PROP_FRAME_HEIGHT, 480);
160 if (!g.isOpened()) // check if we succeeded
161 return EXIT_FAILURE;
162 cv::Mat frame;
163 g >> frame; // get a new frame from camera
164 vpImageConvert::convert(frame, I);
165
166 // Logitec sphere parameters
167 cam.initPersProjWithoutDistortion(558, 555, 312, 210);
168#elif defined(VISP_HAVE_V4L2)
169 // Create a grabber based on v4l2 third party lib (for usb cameras under
170 // Linux)
172 g.setScale(1);
173 g.setInput(0);
174 g.setDevice("/dev/video1");
175 g.open(I);
176 // Logitec sphere parameters
177 cam.initPersProjWithoutDistortion(558, 555, 312, 210);
178#elif defined(VISP_HAVE_DC1394)
179 // Create a grabber based on libdc1394-2.x third party lib (for firewire
180 // cameras under Linux)
181 vp1394TwoGrabber g(false);
184 // AVT Pike 032C parameters
185 cam.initPersProjWithoutDistortion(800, 795, 320, 216);
186#elif defined(VISP_HAVE_CMU1394)
187 // Create a grabber based on CMU 1394 third party lib (for firewire
188 // cameras under windows)
190 g.setVideoMode(0, 5); // 640x480 MONO8
191 g.setFramerate(4); // 30 Hz
192 g.open(I);
193 // AVT Pike 032C parameters
194 cam.initPersProjWithoutDistortion(800, 795, 320, 216);
195#endif
196
197 // Acquire an image from the grabber
198#if defined(VISP_HAVE_OPENCV) && \
199 (((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)) || \
200 ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO)))
201 g >> frame; // get a new frame from camera
202 vpImageConvert::convert(frame, I);
203#else
204 g.acquire(I);
205#endif
206
207 // Create an image viewer
208#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
209 display = vpDisplayFactory::createDisplay(I, 10, 10, "Current frame");
210#else
211 display = vpDisplayFactory::allocateDisplay(I, 10, 10, "Current frame");
212#endif
215
216 // Create a blob tracker
217 vpDot2 dot;
218 dot.setGraphics(true);
219 dot.setComputeMoments(true);
220 dot.setEllipsoidShapePrecision(0.); // to track a blob without any constraint on the shape
221 dot.setGrayLevelPrecision(0.9); // to set the blob gray level bounds for binarisation
222 dot.setEllipsoidBadPointsPercentage(0.5); // to be accept 50% of bad inner
223 // and outside points with bad
224 // gray level
225 dot.initTracking(I);
227
230 task.setInteractionMatrixType(vpServo::DESIRED, vpServo::PSEUDO_INVERSE);
231 task.setLambda(lambda);
233 cVe = robot.get_cVe();
234 task.set_cVe(cVe);
235
236 std::cout << "cVe: \n" << cVe << std::endl;
237
238 vpMatrix eJe;
239 robot.get_eJe(eJe);
240 task.set_eJe(eJe);
241 std::cout << "eJe: \n" << eJe << std::endl;
242
243 // Current and desired visual feature associated to the x coordinate of
244 // the point
245 vpFeaturePoint s_x, s_xd;
246
247 // Create the current x visual feature
248 vpFeatureBuilder::create(s_x, cam, dot);
249
250 // Create the desired x* visual feature
251 s_xd.buildFrom(0, 0, depth);
252
253 // Add the feature
254 task.addFeature(s_x, s_xd);
255
256 // Create the current log(Z/Z*) visual feature
257 vpFeatureDepth s_Z, s_Zd;
258 // Surface of the blob estimated from the image moment m00 and converted
259 // in meters
260 double surface = 1. / sqrt(dot.m00 / (cam.get_px() * cam.get_py()));
261 double Z, Zd;
262 // Initial depth of the blob in from of the camera
263 Z = coef * surface;
264 // Desired depth Z* of the blob. This depth is learned and equal to the
265 // initial depth
266 Zd = Z;
267
268 std::cout << "Z " << Z << std::endl;
269 s_Z.buildFrom(s_x.get_x(), s_x.get_y(), Z,
270 0); // log(Z/Z*) = 0 that's why the last parameter is 0
271 s_Zd.buildFrom(s_x.get_x(), s_x.get_y(), Zd,
272 0); // log(Z/Z*) = 0 that's why the last parameter is 0
273
274 // Add the feature
275 task.addFeature(s_Z, s_Zd);
276
277 vpColVector v; // vz, wx
278
279 while (1) {
280 // Acquire a new image
281#if defined(VISP_HAVE_OPENCV) && \
282 (((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)) || \
283 ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO)))
284 g >> frame; // get a new frame from camera
285 vpImageConvert::convert(frame, I);
286#else
287 g.acquire(I);
288#endif
289 // Set the image as background of the viewer
291
292 // Does the blob tracking
293 dot.track(I);
294 // Update the current x feature
295 vpFeatureBuilder::create(s_x, cam, dot);
296
297 // Update log(Z/Z*) feature. Since the depth Z change, we need to update
298 // the interaction matrix
299 surface = 1. / sqrt(dot.m00 / (cam.get_px() * cam.get_py()));
300 Z = coef * surface;
301 s_Z.buildFrom(s_x.get_x(), s_x.get_y(), Z, log(Z / Zd));
302
303 robot.get_cVe(cVe);
304 task.set_cVe(cVe);
305
306 robot.get_eJe(eJe);
307 task.set_eJe(eJe);
308
309 // Compute the control law. Velocities are computed in the mobile robot
310 // reference frame
311 v = task.computeControlLaw();
312
313 std::cout << "Send velocity to the pioneer: " << v[0] << " m/s " << vpMath::deg(v[1]) << " deg/s" << std::endl;
314
315 // Send the velocity to the robot
316 robot.setVelocity(vpRobot::REFERENCE_FRAME, v);
317
318 // Draw a vertical line which corresponds to the desired x coordinate of
319 // the dot cog
320 vpDisplay::displayLine(I, 0, 320, 479, 320, vpColor::red);
322
323 // A click in the viewer to exit
324 if (vpDisplay::getClick(I, false))
325 break;
326 }
327
328 std::cout << "Ending robot thread..." << std::endl;
329 robot.stopRunning();
330
331 // wait for the thread to stop
332 robot.waitForRunExit();
333
334 // Kill the servo task
335 task.print();
336#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
337 if (display != nullptr) {
338 delete display;
339 }
340#endif
341 return EXIT_SUCCESS;
342 }
343 catch (const vpException &e) {
344 std::cout << "Catch an exception: " << e << std::endl;
345#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
346 if (display != nullptr) {
347 delete display;
348 }
349#endif
350 return EXIT_FAILURE;
351 }
352}
353#else
354int main()
355{
356 std::cout << "You don't have the right 3rd party libraries to run this example..." << std::endl;
357 return EXIT_SUCCESS;
358}
359#endif
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
void setVideoMode(unsigned long format, unsigned long mode)
void acquire(vpImage< unsigned char > &I)
void setFramerate(unsigned long fps)
void open(vpImage< unsigned char > &I)
Class for firewire ieee1394 video devices using libdc1394-2.x api.
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:198
Class that defines generic functionalities for display.
Definition vpDisplay.h:171
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
Definition vpDot2.h:127
void track(const vpImage< unsigned char > &I, bool canMakeTheWindowGrow=true)
Definition vpDot2.cpp:441
void setGraphics(bool activate)
Definition vpDot2.h:320
double m00
Definition vpDot2.h:394
void setGrayLevelPrecision(const double &grayLevelPrecision)
Definition vpDot2.cpp:715
void setEllipsoidBadPointsPercentage(const double &percentage=0.0)
Definition vpDot2.h:292
void setEllipsoidShapePrecision(const double &ellipsoidShapePrecision)
Definition vpDot2.cpp:790
void setComputeMoments(bool activate)
Definition vpDot2.h:278
void initTracking(const vpImage< unsigned char > &I, unsigned int size=0)
Definition vpDot2.cpp:263
error that can be emitted by ViSP classes.
Definition vpException.h:60
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Class that defines a 3D point visual feature which is composed by one parameters that is that defin...
vpFeatureDepth & buildFrom(const double &x, const double &y, const double &Z, const double &LogZoverZstar)
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
vpFeaturePoint & buildFrom(const double &x, const double &y, const double &Z)
double get_y() const
double get_x() const
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Definition of the vpImage class member functions.
Definition vpImage.h:131
static double deg(double rad)
Definition vpMath.h:119
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:175
Interface for Pioneer mobile robots based on Aria 3rd party library.
@ REFERENCE_FRAME
Definition vpRobot.h:75
@ EYEINHAND_L_cVe_eJe
Definition vpServo.h:183
@ PSEUDO_INVERSE
Definition vpServo.h:250
@ DESIRED
Definition vpServo.h:223
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void setFramerate(vpV4l2FramerateType framerate)
void setInput(unsigned input=vpV4l2Grabber::DEFAULT_INPUT)
void open(vpImage< unsigned char > &I)
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT void sleepMs(double t)