Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
servoAfma6AprilTagIBVS.cpp
1/****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See https://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * tests the control law
33 * eye-in-hand control
34 * velocity computed in the camera frame
35 *
36*****************************************************************************/
54#include <iostream>
55
56#include <visp3/core/vpCameraParameters.h>
57#include <visp3/detection/vpDetectorAprilTag.h>
58#include <visp3/gui/vpDisplayGDI.h>
59#include <visp3/gui/vpDisplayX.h>
60#include <visp3/gui/vpPlot.h>
61#include <visp3/io/vpImageIo.h>
62#include <visp3/robot/vpRobotAfma6.h>
63#include <visp3/sensor/vpRealSense2.h>
64#include <visp3/visual_features/vpFeatureBuilder.h>
65#include <visp3/visual_features/vpFeaturePoint.h>
66#include <visp3/vs/vpServo.h>
67#include <visp3/vs/vpServoDisplay.h>
68
69#if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11) && \
70 (defined(VISP_HAVE_X11) || defined(VISP_HAVE_GDI)) && defined(VISP_HAVE_AFMA6)
71
72void display_point_trajectory(const vpImage<unsigned char> &I, const std::vector<vpImagePoint> &vip,
73 std::vector<vpImagePoint> *traj_vip)
74{
75 for (size_t i = 0; i < vip.size(); i++) {
76 if (traj_vip[i].size()) {
77 // Add the point only if distance with the previous > 1 pixel
78 if (vpImagePoint::distance(vip[i], traj_vip[i].back()) > 1.) {
79 traj_vip[i].push_back(vip[i]);
80 }
81 } else {
82 traj_vip[i].push_back(vip[i]);
83 }
84 }
85 for (size_t i = 0; i < vip.size(); i++) {
86 for (size_t j = 1; j < traj_vip[i].size(); j++) {
87 vpDisplay::displayLine(I, traj_vip[i][j - 1], traj_vip[i][j], vpColor::green, 2);
88 }
89 }
90}
91
92int main(int argc, char **argv)
93{
94 double opt_tagSize = 0.120;
95 bool display_tag = true;
96 int opt_quad_decimate = 2;
97 bool opt_verbose = false;
98 bool opt_plot = false;
99 bool opt_adaptive_gain = false;
100 bool opt_task_sequencing = false;
101 double convergence_threshold = 0.00005;
102
103 for (int i = 1; i < argc; i++) {
104 if (std::string(argv[i]) == "--tag_size" && i + 1 < argc) {
105 opt_tagSize = std::stod(argv[i + 1]);
106 } else if (std::string(argv[i]) == "--verbose") {
107 opt_verbose = true;
108 } else if (std::string(argv[i]) == "--plot") {
109 opt_plot = true;
110 } else if (std::string(argv[i]) == "--adaptive_gain") {
111 opt_adaptive_gain = true;
112 } else if (std::string(argv[i]) == "--task_sequencing") {
113 opt_task_sequencing = true;
114 } else if (std::string(argv[i]) == "--quad_decimate" && i + 1 < argc) {
115 opt_quad_decimate = std::stoi(argv[i + 1]);
116 } else if (std::string(argv[i]) == "--no-convergence-threshold") {
117 convergence_threshold = 0.;
118 } else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
119 std::cout
120 << argv[0] << " --tag_size <marker size in meter; default " << opt_tagSize << ">] "
121 << "[--quad_decimate <decimation; default " << opt_quad_decimate
122 << ">] [--adaptive_gain] [--plot] [--task_sequencing] [--no-convergence-threshold] [--verbose] [--help] [-h]"
123 << "\n";
124 return EXIT_SUCCESS;
125 }
126 }
127
128 vpRobotAfma6 robot;
129
130 try {
131 std::cout << "WARNING: This example will move the robot! "
132 << "Please make sure to have the user stop button at hand!" << std::endl
133 << "Press Enter to continue..." << std::endl;
134 std::cin.ignore();
135
136 /*
137 * Move to a safe position
138 */
139 vpColVector q(6, 0);
140 q[0] = 0.;
141 q[1] = 0.;
142 q[2] = 0.;
143 q[3] = vpMath::rad(0.);
144 q[4] = vpMath::rad(0.);
145 q[5] = vpMath::rad(0.);
146
147 std::cout << "Move to joint position: " << q.t() << std::endl;
149 robot.setPosition(vpRobot::JOINT_STATE, q);
150
151 vpRealSense2 rs;
152 rs2::config config;
153 unsigned int width = 640, height = 480;
154 config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
155 config.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
156 config.enable_stream(RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, 30);
157 rs.open(config);
158
159 // Get camera intrinsics
162 std::cout << "cam:\n" << cam << "\n";
163
164 vpImage<unsigned char> I(height, width);
165
166#if defined(VISP_HAVE_X11)
167 vpDisplayX dc(I, 10, 10, "Color image");
168#elif defined(VISP_HAVE_GDI)
169 vpDisplayGDI dc(I, 10, 10, "Color image");
170#endif
171
174 // vpDetectorAprilTag::vpPoseEstimationMethod poseEstimationMethod = vpDetectorAprilTag::BEST_RESIDUAL_VIRTUAL_VS;
175 vpDetectorAprilTag detector(tagFamily);
176 detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
177 detector.setDisplayTag(display_tag);
178 detector.setAprilTagQuadDecimate(opt_quad_decimate);
179
180 // Servo
181 vpHomogeneousMatrix cdMc, cMo, oMo;
182
183 // Desired pose used to compute the desired features
184 vpHomogeneousMatrix cdMo(vpTranslationVector(0, 0, opt_tagSize * 3), // 3 times tag with along camera z axis
185 vpRotationMatrix({1, 0, 0, 0, -1, 0, 0, 0, -1}));
186
187 // Create visual features
188 std::vector<vpFeaturePoint> p(4), pd(4); // We use 4 points
189
190 // Define 4 3D points corresponding to the CAD model of the Apriltag
191 std::vector<vpPoint> point(4);
192 point[0].setWorldCoordinates(-opt_tagSize / 2., -opt_tagSize / 2., 0);
193 point[1].setWorldCoordinates(opt_tagSize / 2., -opt_tagSize / 2., 0);
194 point[2].setWorldCoordinates(opt_tagSize / 2., opt_tagSize / 2., 0);
195 point[3].setWorldCoordinates(-opt_tagSize / 2., opt_tagSize / 2., 0);
196
197 vpServo task;
198 // Add the 4 visual feature points
199 for (size_t i = 0; i < p.size(); i++) {
200 task.addFeature(p[i], pd[i]);
201 }
204
205 if (opt_adaptive_gain) {
206 vpAdaptiveGain lambda(1.5, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
207 task.setLambda(lambda);
208 } else {
209 task.setLambda(0.5);
210 }
211
212 vpPlot *plotter = nullptr;
213 int iter_plot = 0;
214
215 if (opt_plot) {
216 plotter = new vpPlot(2, static_cast<int>(250 * 2), 500, static_cast<int>(I.getWidth()) + 80, 10,
217 "Real time curves plotter");
218 plotter->setTitle(0, "Visual features error");
219 plotter->setTitle(1, "Camera velocities");
220 plotter->initGraph(0, 8);
221 plotter->initGraph(1, 6);
222 plotter->setLegend(0, 0, "error_feat_p1_x");
223 plotter->setLegend(0, 1, "error_feat_p1_y");
224 plotter->setLegend(0, 2, "error_feat_p2_x");
225 plotter->setLegend(0, 3, "error_feat_p2_y");
226 plotter->setLegend(0, 4, "error_feat_p3_x");
227 plotter->setLegend(0, 5, "error_feat_p3_y");
228 plotter->setLegend(0, 6, "error_feat_p4_x");
229 plotter->setLegend(0, 7, "error_feat_p4_y");
230 plotter->setLegend(1, 0, "vc_x");
231 plotter->setLegend(1, 1, "vc_y");
232 plotter->setLegend(1, 2, "vc_z");
233 plotter->setLegend(1, 3, "wc_x");
234 plotter->setLegend(1, 4, "wc_y");
235 plotter->setLegend(1, 5, "wc_z");
236 }
237
238 bool final_quit = false;
239 bool has_converged = false;
240 bool send_velocities = false;
241 bool servo_started = false;
242 std::vector<vpImagePoint> *traj_corners = nullptr; // To memorize point trajectory
243
244 static double t_init_servo = vpTime::measureTimeMs();
245
247
248 while (!has_converged && !final_quit) {
249 double t_start = vpTime::measureTimeMs();
250
251 rs.acquire(I);
252
254
255 std::vector<vpHomogeneousMatrix> cMo_vec;
256 detector.detect(I, opt_tagSize, cam, cMo_vec);
257
258 {
259 std::stringstream ss;
260 ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
261 vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
262 }
263
264 vpColVector v_c(6);
265
266 // Only one tag is detected
267 if (cMo_vec.size() == 1) {
268 cMo = cMo_vec[0];
269
270 static bool first_time = true;
271 if (first_time) {
272 // Introduce security wrt tag positioning in order to avoid PI rotation
273 std::vector<vpHomogeneousMatrix> v_oMo(2), v_cdMc(2);
274 v_oMo[1].buildFrom(0, 0, 0, 0, 0, M_PI);
275 for (size_t i = 0; i < 2; i++) {
276 v_cdMc[i] = cdMo * v_oMo[i] * cMo.inverse();
277 }
278 if (std::fabs(v_cdMc[0].getThetaUVector().getTheta()) < std::fabs(v_cdMc[1].getThetaUVector().getTheta())) {
279 oMo = v_oMo[0];
280 } else {
281 std::cout << "Desired frame modified to avoid PI rotation of the camera" << std::endl;
282 oMo = v_oMo[1]; // Introduce PI rotation
283 }
284
285 // Compute the desired position of the features from the desired pose
286 for (size_t i = 0; i < point.size(); i++) {
287 vpColVector cP, p_;
288 point[i].changeFrame(cdMo * oMo, cP);
289 point[i].projection(cP, p_);
290
291 pd[i].set_x(p_[0]);
292 pd[i].set_y(p_[1]);
293 pd[i].set_Z(cP[2]);
294 }
295 }
296
297 // Get tag corners
298 std::vector<vpImagePoint> corners = detector.getPolygon(0);
299
300 // Update visual features
301 for (size_t i = 0; i < corners.size(); i++) {
302 // Update the point feature from the tag corners location
303 vpFeatureBuilder::create(p[i], cam, corners[i]);
304 // Set the feature Z coordinate from the pose
305 vpColVector cP;
306 point[i].changeFrame(cMo, cP);
307
308 p[i].set_Z(cP[2]);
309 }
310
311 if (opt_task_sequencing) {
312 if (!servo_started) {
313 if (send_velocities) {
314 servo_started = true;
315 }
316 t_init_servo = vpTime::measureTimeMs();
317 }
318 v_c = task.computeControlLaw((vpTime::measureTimeMs() - t_init_servo) / 1000.);
319 } else {
320 v_c = task.computeControlLaw();
321 }
322
323 // Display the current and desired feature points in the image display
324 vpServoDisplay::display(task, cam, I);
325 for (size_t i = 0; i < corners.size(); i++) {
326 std::stringstream ss;
327 ss << i;
328 // Display current point indexes
329 vpDisplay::displayText(I, corners[i] + vpImagePoint(15, 15), ss.str(), vpColor::red);
330 // Display desired point indexes
331 vpImagePoint ip;
332 vpMeterPixelConversion::convertPoint(cam, pd[i].get_x(), pd[i].get_y(), ip);
333 vpDisplay::displayText(I, ip + vpImagePoint(15, 15), ss.str(), vpColor::red);
334 }
335 if (first_time) {
336 traj_corners = new std::vector<vpImagePoint>[corners.size()];
337 }
338 // Display the trajectory of the points used as features
339 display_point_trajectory(I, corners, traj_corners);
340
341 if (opt_plot) {
342 plotter->plot(0, iter_plot, task.getError());
343 plotter->plot(1, iter_plot, v_c);
344 iter_plot++;
345 }
346
347 if (opt_verbose) {
348 std::cout << "v_c: " << v_c.t() << std::endl;
349 }
350
351 double error = task.getError().sumSquare();
352 std::stringstream ss;
353 ss << "error: " << error;
354 vpDisplay::displayText(I, 20, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
355
356 if (opt_verbose)
357 std::cout << "error: " << error << std::endl;
358
359 if (error < convergence_threshold) {
360 has_converged = true;
361 std::cout << "Servo task has converged"
362 << "\n";
363 vpDisplay::displayText(I, 100, 20, "Servo task has converged", vpColor::red);
364 }
365 if (first_time) {
366 first_time = false;
367 }
368 } // end if (cMo_vec.size() == 1)
369 else {
370 v_c = 0;
371 }
372
373 if (!send_velocities) {
374 v_c = 0;
375 }
376
377 // Send to the robot
379
380 {
381 std::stringstream ss;
382 ss << "Loop time: " << vpTime::measureTimeMs() - t_start << " ms";
383 vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
384 }
386
388 if (vpDisplay::getClick(I, button, false)) {
389 switch (button) {
391 send_velocities = !send_velocities;
392 break;
393
395 final_quit = true;
396 v_c = 0;
397 break;
398
399 default:
400 break;
401 }
402 }
403 }
404 std::cout << "Stop the robot " << std::endl;
406
407 if (opt_plot && plotter != nullptr) {
408 delete plotter;
409 plotter = nullptr;
410 }
411
412 if (!final_quit) {
413 while (!final_quit) {
414 rs.acquire(I);
416
417 vpDisplay::displayText(I, 20, 20, "Click to quit the program.", vpColor::red);
418 vpDisplay::displayText(I, 40, 20, "Visual servo converged.", vpColor::red);
419
420 if (vpDisplay::getClick(I, false)) {
421 final_quit = true;
422 }
423
425 }
426 }
427 if (traj_corners) {
428 delete[] traj_corners;
429 }
430 } catch (const vpException &e) {
431 std::cout << "ViSP exception: " << e.what() << std::endl;
432 std::cout << "Stop the robot " << std::endl;
434 return EXIT_FAILURE;
435 } catch (const std::exception &e) {
436 std::cout << "ur_rtde exception: " << e.what() << std::endl;
437 return EXIT_FAILURE;
438 }
439
440 return EXIT_SUCCESS;
441}
442#else
443int main()
444{
445#if !defined(VISP_HAVE_REALSENSE2)
446 std::cout << "Install librealsense-2.x" << std::endl;
447#endif
448#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
449 std::cout << "Build ViSP with c++11 or higher compiler flag (cmake -DUSE_CXX_STANDARD=11)." << std::endl;
450#endif
451#if !defined(VISP_HAVE_AFMA6)
452 std::cout << "ViSP is not build with Afma6 robot support..." << std::endl;
453#endif
454 return EXIT_SUCCESS;
455}
456#endif
Adaptive gain computation.
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
double sumSquare() const
static const vpColor red
Definition vpColor.h:211
static const vpColor green
Definition vpColor.h:214
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Display for windows using GDI (available on any windows 32 platform).
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition vpDisplayX.h:132
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:59
const char * what() const
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
Definition of the vpImage class member functions.
Definition vpImage.h:135
unsigned int getWidth() const
Definition vpImage.h:242
static double rad(double deg)
Definition vpMath.h:116
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition vpPlot.h:113
void initGraph(unsigned int graphNum, unsigned int curveNbr)
Definition vpPlot.cpp:202
void setLegend(unsigned int graphNum, unsigned int curveNum, const std::string &legend)
Definition vpPlot.cpp:545
void plot(unsigned int graphNum, unsigned int curveNum, double x, double y)
Definition vpPlot.cpp:269
void setTitle(unsigned int graphNum, const std::string &title)
Definition vpPlot.cpp:503
void acquire(vpImage< unsigned char > &grey, double *ts=NULL)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
bool open(const rs2::config &cfg=rs2::config())
Control of Irisa's gantry robot named Afma6.
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
@ JOINT_STATE
Definition vpRobot.h:78
@ CAMERA_FRAME
Definition vpRobot.h:80
@ STATE_POSITION_CONTROL
Initialize the position controller.
Definition vpRobot.h:65
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition vpRobot.h:64
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
Definition vpRobot.h:63
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Definition vpRobot.cpp:198
Implementation of a rotation matrix and operations on such kind of matrices.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition vpServo.cpp:564
@ EYEINHAND_CAMERA
Definition vpServo.h:151
void setLambda(double c)
Definition vpServo.h:403
void setServo(const vpServoType &servo_type)
Definition vpServo.cpp:210
vpColVector getError() const
Definition vpServo.h:276
vpColVector computeControlLaw()
Definition vpServo.cpp:930
@ CURRENT
Definition vpServo.h:179
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition vpServo.cpp:487
Class that consider the case of a translation vector.
VISP_EXPORT double measureTimeMs()