Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
servoPixhawkDroneIBVS.cpp
1/****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See https://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * Example that shows how to do visual servoing with a drone equipped with a Pixhawk.
33 *
34*****************************************************************************/
35
36#include <iostream>
37
38#include <visp3/core/vpConfig.h>
39
40#if defined(VISP_HAVE_MAVSDK) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_17) && defined(VISP_HAVE_REALSENSE2)
41
42#include <visp3/core/vpImageConvert.h>
43#include <visp3/core/vpMomentAreaNormalized.h>
44#include <visp3/core/vpMomentBasic.h>
45#include <visp3/core/vpMomentCentered.h>
46#include <visp3/core/vpMomentDatabase.h>
47#include <visp3/core/vpMomentGravityCenter.h>
48#include <visp3/core/vpMomentGravityCenterNormalized.h>
49#include <visp3/core/vpMomentObject.h>
50#include <visp3/core/vpPixelMeterConversion.h>
51#include <visp3/core/vpPoint.h>
52#include <visp3/core/vpTime.h>
53#include <visp3/core/vpXmlParserCamera.h>
54#include <visp3/detection/vpDetectorAprilTag.h>
55#include <visp3/gui/vpDisplayGDI.h>
56#include <visp3/gui/vpDisplayX.h>
57#include <visp3/gui/vpPlot.h>
58#include <visp3/robot/vpRobotMavsdk.h>
59#include <visp3/sensor/vpRealSense2.h>
60#include <visp3/visual_features/vpFeatureBuilder.h>
61#include <visp3/visual_features/vpFeatureMomentAreaNormalized.h>
62#include <visp3/visual_features/vpFeatureMomentGravityCenterNormalized.h>
63#include <visp3/visual_features/vpFeatureVanishingPoint.h>
64#include <visp3/vs/vpServo.h>
65#include <visp3/vs/vpServoDisplay.h>
66
67// Comment next line to disable sending commands to the robot
68#define CONTROL_UAV
69
70bool compareImagePoint(std::pair<size_t, vpImagePoint> p1, std::pair<size_t, vpImagePoint> p2)
71{
72 return (p1.second.get_v() < p2.second.get_v());
73};
74
91int main(int argc, char **argv)
92{
93 try {
94 std::string opt_connecting_info = "udp://:192.168.30.111:14552";
95 double tagSize = -1;
96 double opt_distance_to_tag = -1;
97 bool opt_has_distance_to_tag = false;
98 int opt_display_fps = 10;
99 bool opt_verbose = false;
100
101 int acq_fps = 30;
102
103 if (argc >= 3 && std::string(argv[1]) == "--tag-size") {
104 tagSize = std::atof(argv[2]); // Tag size option is required
105 if (tagSize <= 0) {
106 std::cout << "Error : invalid tag size." << std::endl << "See " << argv[0] << " --help" << std::endl;
107 return EXIT_FAILURE;
108 }
109 for (int i = 3; i < argc; i++) {
110 if (std::string(argv[i]) == "--co" && i + 1 < argc) {
111 opt_connecting_info = std::string(argv[i + 1]);
112 i++;
113 } else if (std::string(argv[i]) == "--distance-to-tag" && i + 1 < argc) {
114 opt_distance_to_tag = std::atof(argv[i + 1]);
115 if (opt_distance_to_tag <= 0) {
116 std::cout << "Error : invalid distance to tag." << std::endl << "See " << argv[0] << " --help" << std::endl;
117 return EXIT_FAILURE;
118 }
119 opt_has_distance_to_tag = true;
120 i++;
121
122 } else if (std::string(argv[i]) == "--display-fps" && i + 1 < argc) {
123 opt_display_fps = std::stoi(std::string(argv[i + 1]));
124 i++;
125 } else if (std::string(argv[i]) == "--verbose" || std::string(argv[i]) == "-v") {
126 opt_verbose = true;
127 } else {
128 std::cout << "Error : unknown parameter " << argv[i] << std::endl
129 << "See " << argv[0] << " --help" << std::endl;
130 return EXIT_FAILURE;
131 }
132 }
133 } else if (argc >= 2 && (std::string(argv[1]) == "--help" || std::string(argv[1]) == "-h")) {
134 std::cout << "\nUsage:\n"
135 << " " << argv[0]
136 << " [--tag-size <tag size [m]>] [--co <connection information>] [--distance-to-tag <distance>]"
137 << " [--display-fps <display fps>] [--verbose] [-v] [--help] [-h]\n"
138 << std::endl
139 << "Description:\n"
140 << " --tag-size <size>\n"
141 << " The size of the tag to detect in meters, required.\n\n"
142 << " --co <connection information>\n"
143 << " - UDP: udp://[host][:port]\n"
144 << " - TCP: tcp://[host][:port]\n"
145 << " - serial: serial://[path][:baudrate]\n"
146 << " - Default: udp://192.168.30.111:14552).\n\n"
147 << " --distance-to-tag <distance>\n"
148 << " The desired distance to the tag in meters (default: 1 meter).\n\n"
149 << " --display-fps <display_fps>\n"
150 << " The desired fps rate for the video display (default: 10 fps).\n\n"
151 << " --verbose, -v\n"
152 << " Enables verbosity (drone information messages and velocity commands\n"
153 << " are then displayed).\n\n"
154 << " --help, -h\n"
155 << " Print help message.\n"
156 << std::endl;
157 return EXIT_SUCCESS;
158
159 } else {
160 std::cout << "Error : tag size parameter required." << std::endl << "See " << argv[0] << " --help" << std::endl;
161 return EXIT_FAILURE;
162 }
163
164 std::cout << std::endl
165 << "WARNING:" << std::endl
166 << " - This program does no sensing or avoiding of obstacles, " << std::endl
167 << " the drone WILL collide with any objects in the way! Make sure the " << std::endl
168 << " drone has approximately 3 meters of free space on all sides." << std::endl
169 << " - The drone uses a forward-facing camera for Apriltag detection," << std::endl
170 << " make sure the drone flies above a non-uniform flooring," << std::endl
171 << " or its movement will be inacurate and dangerous !" << std::endl
172 << std::endl;
173
174 // Connect to the drone
175 vpRobotMavsdk drone(opt_connecting_info);
176
177 if (drone.isRunning()) {
178 vpRealSense2 rs;
179
180 std::string product_line = rs.getProductLine();
181 if (opt_verbose) {
182 std::cout << "Product line: " << product_line << std::endl;
183 }
184
185 if (product_line == "T200") {
186 std::cout << "This example doesn't support T200 product line family !" << std::endl;
187 return EXIT_SUCCESS;
188 }
189 rs2::config config;
190
191 config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, acq_fps);
192 config.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, acq_fps);
193 config.enable_stream(RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, acq_fps);
194
195 rs.open(config);
196 vpCameraParameters cam = rs.getCameraParameters(RS2_STREAM_COLOR);
197
198 if (opt_verbose) {
199 cam.printParameters();
200 }
201
202#ifdef CONTROL_UAV
203 drone.doFlatTrim(); // Flat trim calibration
204 drone.takeOff(); // Take off
205#endif
206
207 vpImage<unsigned char> I(rs.getIntrinsics(RS2_STREAM_COLOR).height, rs.getIntrinsics(RS2_STREAM_COLOR).width);
208
209#if defined(VISP_HAVE_X11)
210 vpDisplayX display;
211#elif defined(VISP_HAVE_GTK)
212 vpDisplayGTK display;
213#elif defined(VISP_HAVE_GDI)
214 vpDisplayGDI display;
215#elif defined(HAVE_OPENCV_HIGHGUI)
216 vpDisplayOpenCV display;
217#endif
218 int orig_displayX = 100;
219 int orig_displayY = 100;
220 display.init(I, orig_displayX, orig_displayY, "DRONE VIEW");
223 double time_since_last_display = vpTime::measureTimeMs();
224
225 vpPlot plotter(1, 700, 700, orig_displayX + static_cast<int>(I.getWidth()) + 20, orig_displayY,
226 "Visual servoing tasks");
227 unsigned int iter = 0;
228
231 vpDetectorAprilTag detector(tagFamily); // The detector used to detect Apritags
233 detector.setAprilTagQuadDecimate(4.0);
234 detector.setAprilTagNbThreads(4);
235 detector.setDisplayTag(true);
236
237 vpServo task; // Visual servoing task
238
239 // double lambda = 0.5;
240 vpAdaptiveGain lambda = vpAdaptiveGain(1.5, 0.7, 30);
243 task.setLambda(lambda);
244
246 /*
247 * In the following section, (c1) is an intermediate frame attached to the camera
248 * that has axis aligned with the FLU body frame. The real camera frame is denoted (c).
249 * The FLU body-frame of the drone denoted (e) is the one in which we need to convert
250 * every velocity command computed by visual servoing.
251 *
252 * We can easily estimate the homogeneous matrix between (c1) and (c) where
253 * in our case we have -10 degrees around X camera axis.
254 * Then for the transformation between (e) and (c1) frames we can consider only translations.
255 *
256 * Using those matrices, we can in the end obtain the homogeneous matrix between (c) and (e) frames.
257 * This homogeneous matrix is then used to compute the velocity twist matrix cVe.
258 */
259 vpRxyzVector c1_rxyz_c(vpMath::rad(-10.0), vpMath::rad(0), 0);
260 vpRotationMatrix c1Rc(c1_rxyz_c); // Rotation between (c1) and (c)
261 vpHomogeneousMatrix c1Mc(vpTranslationVector(), c1Rc); // Homogeneous matrix between (c1) and (c)
262
263 vpRotationMatrix c1Re{1, 0, 0, 0, 0, 1, 0, -1, 0}; // Rotation between (c1) and (e)
264 vpTranslationVector c1te(0, -0.03, -0.07); // Translation between (c1) and (e)
265 vpHomogeneousMatrix c1Me(c1te, c1Re); // Homogeneous matrix between (c1) and (e)
266
267 vpHomogeneousMatrix cMe = c1Mc.inverse() * c1Me; // Homogeneous matrix between (c) and (e)
268
269 vpVelocityTwistMatrix cVe(cMe);
271 task.set_cVe(cVe);
272
273 vpMatrix eJe(6, 4, 0);
274
275 eJe[0][0] = 1;
276 eJe[1][1] = 1;
277 eJe[2][2] = 1;
278 eJe[5][3] = 1;
279
280 // double Z_d = 1.; // Desired distance to the target
281 double Z_d = (opt_has_distance_to_tag ? opt_distance_to_tag : 1.);
282
283 // Define the desired polygon corresponding the the AprilTag CLOCKWISE
284 double X[4] = {tagSize / 2., tagSize / 2., -tagSize / 2., -tagSize / 2.};
285 double Y[4] = {tagSize / 2., -tagSize / 2., -tagSize / 2., tagSize / 2.};
286 std::vector<vpPoint> vec_P, vec_P_d;
287
288 for (int i = 0; i < 4; i++) {
289 vpPoint P_d(X[i], Y[i], 0);
290 vpHomogeneousMatrix cdMo(0, 0, Z_d, 0, 0, 0);
291 P_d.track(cdMo); //
292 vec_P_d.push_back(P_d);
293 }
294 vpMomentObject m_obj(3), m_obj_d(3);
295 vpMomentDatabase mdb, mdb_d;
296 vpMomentBasic mb_d; // Here only to get the desired area m00
297 vpMomentGravityCenter mg, mg_d;
298 vpMomentCentered mc, mc_d;
299 vpMomentAreaNormalized man(0, Z_d), man_d(0, Z_d); // Declare normalized area updated below with m00
300 vpMomentGravityCenterNormalized mgn, mgn_d; // Declare normalized gravity center
301
302 // Desired moments
303 m_obj_d.setType(vpMomentObject::DENSE_POLYGON); // Consider the AprilTag as a polygon
304 m_obj_d.fromVector(vec_P_d); // Initialize the object with the points coordinates
305
306 mb_d.linkTo(mdb_d); // Add basic moments to database
307 mg_d.linkTo(mdb_d); // Add gravity center to database
308 mc_d.linkTo(mdb_d); // Add centered moments to database
309 man_d.linkTo(mdb_d); // Add area normalized to database
310 mgn_d.linkTo(mdb_d); // Add gravity center normalized to database
311 mdb_d.updateAll(m_obj_d); // All of the moments must be updated, not just an_d
312 mg_d.compute(); // Compute gravity center moment
313 mc_d.compute(); // Compute centered moments AFTER gravity center
314
315 double area = 0;
316 if (m_obj_d.getType() == vpMomentObject::DISCRETE)
317 area = mb_d.get(2, 0) + mb_d.get(0, 2);
318 else
319 area = mb_d.get(0, 0);
320 // Update moment with the desired area
321 man_d.setDesiredArea(area);
322
323 man_d.compute(); // Compute area normalized moment AFTER centered moments
324 mgn_d.compute(); // Compute gravity center normalized moment AFTER area normalized
325 // moment
326
327 // Desired plane
328 double A = 0.0;
329 double B = 0.0;
330 double C = 1.0 / Z_d;
331
332 // Construct area normalized features
333 vpFeatureMomentGravityCenterNormalized s_mgn(mdb, A, B, C), s_mgn_d(mdb_d, A, B, C);
334 vpFeatureMomentAreaNormalized s_man(mdb, A, B, C), s_man_d(mdb_d, A, B, C);
335 vpFeatureVanishingPoint s_vp, s_vp_d;
336
337 // Add the features
338 task.addFeature(s_mgn, s_mgn_d);
339 task.addFeature(s_man, s_man_d);
341
342 plotter.initGraph(0, 4);
343 plotter.setLegend(0, 0, "Xn"); // Distance from center on X axis feature
344 plotter.setLegend(0, 1, "Yn"); // Distance from center on Y axis feature
345 plotter.setLegend(0, 2, "an"); // Tag area feature
346 plotter.setLegend(0, 3, "atan(1/rho)"); // Vanishing point feature
347
348 // Update desired gravity center normalized feature
349 s_mgn_d.update(A, B, C);
350 s_mgn_d.compute_interaction();
351 // Update desired area normalized feature
352 s_man_d.update(A, B, C);
353 s_man_d.compute_interaction();
354
355 // Update desired vanishing point feature for the horizontal line
356 s_vp_d.setAtanOneOverRho(0);
357 s_vp_d.setAlpha(0);
358
359 bool condition;
360 bool runLoop = true;
361 bool vec_ip_has_been_sorted = false;
362 bool send_velocities = false;
363 std::vector<std::pair<size_t, vpImagePoint> > vec_ip_sorted;
364
365 //** Visual servoing loop **//
366 while (drone.isRunning() && runLoop) {
367
368 double startTime = vpTime::measureTimeMs();
369
370 // drone.getGrayscaleImage(I);
371 rs.acquire(I);
372
373 condition = (startTime - time_since_last_display) > 1000. / opt_display_fps ? true : false;
374 if (condition) {
376 time_since_last_display = vpTime::measureTimeMs();
377 }
378
379 std::vector<vpHomogeneousMatrix> cMo_vec;
380 detector.detect(I, tagSize, cam, cMo_vec); // Detect AprilTags in current image
381 double t = vpTime::measureTimeMs() - startTime;
382
383 if (condition) {
384 std::stringstream ss;
385 ss << "Detection time: " << t << " ms";
386 vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
387 }
388
389 if (detector.getNbObjects() != 0) {
390
391 // Update current points used to compute the moments
392 std::vector<vpImagePoint> vec_ip = detector.getPolygon(0);
393 vec_P.clear();
394 for (size_t i = 0; i < vec_ip.size(); i++) { // size = 4
395 double x = 0, y = 0;
396 vpPixelMeterConversion::convertPoint(cam, vec_ip[i], x, y);
397 vpPoint P;
398 P.set_x(x);
399 P.set_y(y);
400 vec_P.push_back(P);
401 }
402
403 // Current moments
404 m_obj.setType(vpMomentObject::DENSE_POLYGON); // Consider the AprilTag as a polygon
405 m_obj.fromVector(vec_P); // Initialize the object with the points coordinates
406
407 mg.linkTo(mdb); // Add gravity center to database
408 mc.linkTo(mdb); // Add centered moments to database
409 man.linkTo(mdb); // Add area normalized to database
410 mgn.linkTo(mdb); // Add gravity center normalized to database
411 mdb.updateAll(m_obj); // All of the moments must be updated, not just an_d
412 mg.compute(); // Compute gravity center moment
413 mc.compute(); // Compute centered moments AFTER gravity center
414 man.setDesiredArea(area); // Desired area was init at 0 (unknow at contruction),
415 // need to be updated here
416 man.compute(); // Compute area normalized moment AFTER centered moment
417 mgn.compute(); // Compute gravity center normalized moment AFTER area normalized
418 // moment
419
420 s_mgn.update(A, B, C);
421 s_mgn.compute_interaction();
422 s_man.update(A, B, C);
423 s_man.compute_interaction();
424
425 /* Sort points from their height in the image, and keep original indexes.
426 This is done once, in order to be independent from the orientation of the tag
427 when detecting vanishing points. */
428 if (!vec_ip_has_been_sorted) {
429 for (size_t i = 0; i < vec_ip.size(); i++) {
430
431 // Add the points and their corresponding index
432 std::pair<size_t, vpImagePoint> index_pair = std::pair<size_t, vpImagePoint>(i, vec_ip[i]);
433 vec_ip_sorted.push_back(index_pair);
434 }
435
436 // Sort the points and indexes from the v value of the points
437 std::sort(vec_ip_sorted.begin(), vec_ip_sorted.end(), compareImagePoint);
438
439 vec_ip_has_been_sorted = true;
440 }
441
442 // Use the two highest points for the first line, and the two others for the second
443 // line.
444 vpFeatureBuilder::create(s_vp, cam, vec_ip[vec_ip_sorted[0].first], vec_ip[vec_ip_sorted[1].first],
445 vec_ip[vec_ip_sorted[2].first], vec_ip[vec_ip_sorted[3].first],
447
448 task.set_cVe(cVe);
449 task.set_eJe(eJe);
450
451 // Compute the control law. Velocities are computed in the mobile robot reference
452 // frame
453 vpColVector ve = task.computeControlLaw();
454 if (!send_velocities) {
455 ve = 0;
456 }
457
458 // Sending the control law to the drone
459 if (opt_verbose) {
460 std::cout << "ve: " << ve.t() << std::endl;
461 }
462
463#ifdef CONTROL_UAV
464 drone.setVelocity(ve);
465#endif
466
467 if (condition) {
468 for (size_t i = 0; i < 4; i++) {
469 vpDisplay::displayCross(I, vec_ip[i], 15, vpColor::red, 1);
470 std::stringstream ss;
471 ss << i;
472 vpDisplay::displayText(I, vec_ip[i] + vpImagePoint(15, 15), ss.str(), vpColor::green);
473 }
474
475 // Display visual features
477 3); // Current polygon used to compure an moment
478 vpDisplay::displayCross(I, detector.getCog(0), 15, vpColor::green,
479 3); // Current polygon used to compute a moment
480 vpDisplay::displayLine(I, 0, static_cast<int>(cam.get_u0()), static_cast<int>(I.getHeight()) - 1,
481 static_cast<int>(cam.get_u0()), vpColor::red,
482 3); // Vertical line as desired x position
483 vpDisplay::displayLine(I, static_cast<int>(cam.get_v0()), 0, static_cast<int>(cam.get_v0()),
484 static_cast<int>(I.getWidth()) - 1, vpColor::red,
485 3); // Horizontal line as desired y position
486
487 // Display lines corresponding to the vanishing point for the horizontal lines
488 vpDisplay::displayLine(I, vec_ip[vec_ip_sorted[0].first], vec_ip[vec_ip_sorted[1].first], vpColor::red, 1,
489 false);
490 vpDisplay::displayLine(I, vec_ip[vec_ip_sorted[2].first], vec_ip[vec_ip_sorted[3].first], vpColor::red, 1,
491 false);
492 }
493
494 } else {
495
496 std::stringstream sserr;
497 sserr << "Failed to detect an Apriltag, or detected multiple ones";
498 if (condition) {
499 vpDisplay::displayText(I, 120, 20, sserr.str(), vpColor::red);
501 } else {
502 std::cout << sserr.str() << std::endl;
503 }
504#ifdef CONTROL_UAV
505 drone.stopMoving(); // In this case, we stop the drone
506#endif
507 }
508
509 if (condition) {
510 {
511 std::stringstream ss;
512 ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot")
513 << ", right click to quit.";
514 vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
515 }
517
518 plotter.plot(0, iter, task.getError());
519 }
520
522 if (vpDisplay::getClick(I, button, false)) {
523 switch (button) {
525 send_velocities = !send_velocities;
526 break;
527
529 drone.land();
530 runLoop = false;
531 break;
532
533 default:
534 break;
535 }
536 }
537
538 double totalTime = vpTime::measureTimeMs() - startTime;
539 std::stringstream sstime;
540 sstime << "Total time: " << totalTime << " ms";
541 if (condition) {
542 vpDisplay::displayText(I, 80, 20, sstime.str(), vpColor::red);
544 }
545
546 iter++;
547 vpTime::wait(startTime, 1000. / acq_fps);
548 }
549
550 return EXIT_SUCCESS;
551 } else {
552 std::cout << "ERROR : failed to setup drone control." << std::endl;
553 return EXIT_FAILURE;
554 }
555 } catch (const vpException &e) {
556 std::cout << "Caught an exception: " << e << std::endl;
557 return EXIT_FAILURE;
558 }
559}
560
561#else
562
563int main()
564{
565#ifndef VISP_HAVE_MAVSDK
566 std::cout << "\nThis example requires mavsdk library. You should install it, configure and rebuid ViSP.\n"
567 << std::endl;
568#endif
569#ifndef VISP_HAVE_REALSENSE2
570 std::cout << "\nThis example requires librealsense2 library. You should install it, configure and rebuid ViSP.\n"
571 << std::endl;
572#endif
573#if !(VISP_CXX_STANDARD >= VISP_CXX_STANDARD_17)
574 std::cout
575 << "\nThis example requires at least cxx17. You should enable cxx17 during ViSP configuration with cmake and "
576 "rebuild ViSP.\n"
577 << std::endl;
578#endif
579 return EXIT_SUCCESS;
580}
581
582#endif // #if defined(VISP_HAVE_MAVSDK)
Adaptive gain computation.
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
vpRowVector t() const
static const vpColor red
Definition vpColor.h:211
static const vpColor green
Definition vpColor.h:214
@ TAG_36h11
AprilTag 36h11 pattern (recommended)
Display for windows using GDI (available on any windows 32 platform).
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition vpDisplayX.h:132
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
static void displayPolygon(const vpImage< unsigned char > &I, const std::vector< vpImagePoint > &vip, const vpColor &color, unsigned int thickness=1, bool closed=true)
error that can be emitted by ViSP classes.
Definition vpException.h:59
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Functionality computation for normalized surface moment feature. Computes the interaction matrix asso...
Functionality computation for centered and normalized moment feature. Computes the interaction matrix...
static unsigned int selectAtanOneOverRho()
void setAlpha(double alpha)
Set vanishing point feature value.
void setAtanOneOverRho(double atan_one_over_rho)
Set vanishing point feature value.
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Definition vpImage.h:135
unsigned int getWidth() const
Definition vpImage.h:242
unsigned int getHeight() const
Definition vpImage.h:184
static double rad(double deg)
Definition vpMath.h:116
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:152
Class handling the normalized surface moment that is invariant in scale and used to estimate depth.
This class defines the 2D basic moment . This class is a wrapper for vpMomentObject wich allows to us...
const std::vector< double > & get() const
This class defines the double-indexed centered moment descriptor .
This class allows to register all vpMoments so they can access each other according to their dependen...
virtual void updateAll(vpMomentObject &object)
Class describing 2D normalized gravity center moment.
Class describing 2D gravity center moment.
Class for generic objects.
void linkTo(vpMomentDatabase &moments)
Definition vpMoment.cpp:98
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition vpPlot.h:113
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:77
void set_x(double x)
Set the point x coordinate in the image plane.
Definition vpPoint.cpp:508
void set_y(double y)
Set the point y coordinate in the image plane.
Definition vpPoint.cpp:510
void acquire(vpImage< unsigned char > &grey, double *ts=NULL)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
bool open(const rs2::config &cfg=rs2::config())
rs2_intrinsics getIntrinsics(const rs2_stream &stream, int index=-1) const
std::string getProductLine()
Implementation of a rotation matrix and operations on such kind of matrices.
Implementation of a rotation vector as Euler angle minimal representation.
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition vpServo.cpp:564
@ EYEINHAND_L_cVe_eJe
Definition vpServo.h:155
void set_cVe(const vpVelocityTwistMatrix &cVe_)
Definition vpServo.h:448
void setLambda(double c)
Definition vpServo.h:403
void set_eJe(const vpMatrix &eJe_)
Definition vpServo.h:506
void setServo(const vpServoType &servo_type)
Definition vpServo.cpp:210
vpColVector getError() const
Definition vpServo.h:276
vpColVector computeControlLaw()
Definition vpServo.cpp:930
@ CURRENT
Definition vpServo.h:179
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition vpServo.cpp:487
Class that consider the case of a translation vector.
VISP_EXPORT int wait(double t0, double t)
VISP_EXPORT double measureTimeMs()