Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
servoAfma62DhalfCamVelocity.cpp
1/****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See https://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * tests the control law
33 * eye-in-hand control
34 * velocity computed in the camera frame
35 *
36*****************************************************************************/
37
60#include <cmath> // std::fabs
61#include <limits> // numeric_limits
62#include <stdlib.h>
63#include <visp3/core/vpConfig.h>
64#include <visp3/core/vpDebug.h> // Debug trace
65#if (defined(VISP_HAVE_AFMA6) && defined(VISP_HAVE_DC1394))
66
67#include <visp3/core/vpDisplay.h>
68#include <visp3/core/vpImage.h>
69#include <visp3/core/vpImagePoint.h>
70#include <visp3/gui/vpDisplayGTK.h>
71#include <visp3/gui/vpDisplayOpenCV.h>
72#include <visp3/gui/vpDisplayX.h>
73#include <visp3/io/vpImageIo.h>
74#include <visp3/sensor/vp1394TwoGrabber.h>
75
76#include <visp3/core/vpHomogeneousMatrix.h>
77#include <visp3/core/vpLine.h>
78#include <visp3/core/vpMath.h>
79#include <visp3/vision/vpPose.h>
80#include <visp3/visual_features/vpFeatureBuilder.h>
81#include <visp3/visual_features/vpFeatureDepth.h>
82#include <visp3/visual_features/vpFeatureLine.h>
83#include <visp3/visual_features/vpFeaturePoint.h>
84#include <visp3/visual_features/vpGenericFeature.h>
85#include <visp3/vs/vpServo.h>
86
87#include <visp3/robot/vpRobotAfma6.h>
88
89// Exception
90#include <visp3/core/vpException.h>
91#include <visp3/vs/vpServoDisplay.h>
92
93#include <visp3/blob/vpDot2.h>
94#include <visp3/core/vpHomogeneousMatrix.h>
95#include <visp3/core/vpPoint.h>
96
97int main()
98{
99 try {
101
105 g.open(I);
106
107 g.acquire(I);
108
109#ifdef VISP_HAVE_X11
110 vpDisplayX display(I, 100, 100, "Current image");
111#elif defined(HAVE_OPENCV_HIGHGUI)
112 vpDisplayOpenCV display(I, 100, 100, "Current image");
113#elif defined(VISP_HAVE_GTK)
114 vpDisplayGTK display(I, 100, 100, "Current image");
115#endif
116
119
120 vpServo task;
121
122 vpRobotAfma6 robot;
123 // robot.move("zero.pos") ;
124
126 // Update camera parameters
127 robot.getCameraParameters(cam, I);
128
129 std::cout << std::endl;
130 std::cout << "-------------------------------------------------------" << std::endl;
131 std::cout << " Test program for vpServo " << std::endl;
132 std::cout << " Eye-in-hand task control, velocity computed in the camera frame" << std::endl;
133 std::cout << " Simulation " << std::endl;
134 std::cout << " task : servo a line " << std::endl;
135 std::cout << "-------------------------------------------------------" << std::endl;
136 std::cout << std::endl;
137
138 int nbline = 4;
139 int nbpoint = 4;
140
141 vpTRACE("sets the desired position of the visual feature ");
142 vpPoint pointd[nbpoint]; // position of the fours corners
143 vpPoint pointcd; // position of the center of the square
145
146 double L = 0.05;
147 pointd[0].setWorldCoordinates(L, -L, 0);
148 pointd[1].setWorldCoordinates(L, L, 0);
149 pointd[2].setWorldCoordinates(-L, L, 0);
150 pointd[3].setWorldCoordinates(-L, -L, 0);
151
152 // The coordinates in the object frame of the point used as a feature ie
153 // the center of the square
154 pointcd.setWorldCoordinates(0, 0, 0);
155
156 // The desired homogeneous matrix.
157 vpHomogeneousMatrix cMod(0, 0, 0.4, 0, 0, vpMath::rad(10));
158
159 pointd[0].project(cMod);
160 pointd[1].project(cMod);
161 pointd[2].project(cMod);
162 pointd[3].project(cMod);
163
164 pointcd.project(cMod);
165
166 vpFeatureBuilder::create(pd, pointcd);
167
168 vpTRACE("Initialization of the tracking");
169 vpMeLine line[nbline];
170 vpPoint point[nbpoint];
171 int i;
172
173 vpMe me;
174 me.setRange(10);
175 me.setPointsToTrack(100);
177 me.setThreshold(15);
178 me.setSampleStep(10);
179
180 // Initialize the tracking. Define the four lines to track
181 for (i = 0; i < nbline; i++) {
182 line[i].setMe(&me);
183
184 line[i].initTracking(I);
185 line[i].track(I);
186 }
187
188 // Compute the position of the four corners. The goal is to
189 // compute the pose
190 vpImagePoint ip;
191 for (i = 0; i < nbline; i++) {
192 double x = 0, y = 0;
193
194 if (!vpMeLine::intersection(line[i % nbline], line[(i + 1) % nbline], ip)) {
195 return EXIT_FAILURE;
196 }
197
199
200 point[i].set_x(x);
201 point[i].set_y(y);
202 }
203
204 // Compute the pose cMo
205 vpPose pose;
206 pose.clearPoint();
208
209 point[0].setWorldCoordinates(L, -L, 0);
210 point[1].setWorldCoordinates(L, L, 0);
211 point[2].setWorldCoordinates(-L, L, 0);
212 point[3].setWorldCoordinates(-L, -L, 0);
213
214 for (i = 0; i < nbline; i++) {
215 pose.addPoint(point[i]); // and added to the pose computation point list
216 }
217
218 // Pose by Dementhon or Lagrange provides an initialization of the non linear virtual visual-servoing pose estimation
220
221 vpTRACE("sets the current position of the visual feature ");
222
223 // The first features are the position in the camera frame x and y of the
224 // square center
225 vpPoint pointc; // The current position of the center of the square
226 double xc = (point[0].get_x() + point[2].get_x()) / 2;
227 double yc = (point[0].get_y() + point[2].get_y()) / 2;
228 pointc.set_x(xc);
229 pointc.set_y(yc);
231 pointc.project(cMo);
232 vpFeatureBuilder::create(p, pointc);
233
234 // The second feature is the depth of the current square center relative
235 // to the depth of the desired square center.
236 vpFeatureDepth logZ;
237 logZ.buildFrom(pointc.get_x(), pointc.get_y(), pointc.get_Z(), log(pointc.get_Z() / pointcd.get_Z()));
238
239 // The last three features are the rotations thetau between the current
240 // pose and the desired pose.
242 cdMc = cMod * cMo.inverse();
244 tu.buildFrom(cdMc);
245
246 vpTRACE("define the task");
247 vpTRACE("\t we want an eye-in-hand control law");
248 vpTRACE("\t robot is controlled in the camera frame");
251
252 vpTRACE("\t we want to see a point on a point..");
253 std::cout << std::endl;
254 task.addFeature(p, pd);
255 task.addFeature(logZ);
256 task.addFeature(tu);
257
258 vpTRACE("\t set the gain");
259 task.setLambda(0.2);
260
261 vpTRACE("Display task information ");
262 task.print();
263
265
266 unsigned int iter = 0;
267 vpTRACE("\t loop");
268 vpColVector v;
270 double lambda_av = 0.05;
271 double alpha = 0.05;
272 double beta = 3;
273
274 for (;;) {
275 std::cout << "---------------------------------------------" << iter << std::endl;
276
277 try {
278 g.acquire(I);
280
281 pose.clearPoint();
282
283 // Track the lines and find the current position of the corners
284 for (i = 0; i < nbline; i++) {
285 line[i].track(I);
286
287 line[i].display(I, vpColor::green);
288
289 double x = 0, y = 0;
290
291 if (!vpMeLine::intersection(line[i % nbline], line[(i + 1) % nbline], ip)) {
292 return EXIT_FAILURE;
293 }
294
296
297 point[i].set_x(x);
298 point[i].set_y(y);
299
300 pose.addPoint(point[i]);
301 }
302
303 // Compute the pose
305
306 // Update the two first features x and y (position of the square
307 // center)
308 xc = (point[0].get_x() + point[2].get_x()) / 2;
309 yc = (point[0].get_y() + point[2].get_y()) / 2;
310 pointc.set_x(xc);
311 pointc.set_y(yc);
312 pointc.project(cMo);
313 vpFeatureBuilder::create(p, pointc);
314 // Print the current and the desired position of the center of the
315 // square Print the desired position of the four corners
316 p.display(cam, I, vpColor::green);
317 pd.display(cam, I, vpColor::red);
318 for (i = 0; i < nbpoint; i++)
319 pointd[i].display(I, cam, vpColor::red);
320
321 // Update the second feature
322 logZ.buildFrom(pointc.get_x(), pointc.get_y(), pointc.get_Z(), log(pointc.get_Z() / pointcd.get_Z()));
323
324 // Update the last three features
325 cdMc = cMod * cMo.inverse();
326 tu.buildFrom(cdMc);
327
328 // Adaptive gain
329 double gain;
330 {
331 if (std::fabs(alpha) <= std::numeric_limits<double>::epsilon())
332 gain = lambda_av;
333 else {
334 gain = alpha * exp(-beta * (task.getError()).sumSquare()) + lambda_av;
335 }
336 }
337
338 task.setLambda(gain);
339
340 v = task.computeControlLaw();
341
343 std::cout << v.sumSquare() << std::endl;
344 if (iter == 0)
346 if (v.sumSquare() > 0.5) {
347 v = 0;
349 robot.stopMotion();
351 }
352
354
355 }
356 catch (...) {
357 v = 0;
359 robot.stopMotion();
360 exit(1);
361 }
362
363 vpTRACE("\t\t || s - s* || = %f ", (task.getError()).sumSquare());
364 iter++;
365 }
366
367 vpTRACE("Display task information ");
368 task.print();
369 return EXIT_SUCCESS;
370 }
371 catch (const vpException &e) {
372 std::cout << "Test failed with exception: " << e << std::endl;
373 return EXIT_FAILURE;
374 }
375}
376
377#else
378int main()
379{
380 std::cout << "You do not have an afma6 robot connected to your computer..." << std::endl;
381 return EXIT_SUCCESS;
382}
383
384#endif
Class for firewire ieee1394 video devices using libdc1394-2.x api.
void acquire(vpImage< unsigned char > &I)
void setVideoMode(vp1394TwoVideoModeType videomode)
void setFramerate(vp1394TwoFramerateType fps)
void open(vpImage< unsigned char > &I)
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
double sumSquare() const
static const vpColor red
Definition vpColor.h:211
static const vpColor green
Definition vpColor.h:214
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition vpDisplayX.h:132
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void flush(const vpImage< unsigned char > &I)
error that can be emitted by ViSP classes.
Definition vpException.h:59
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Class that defines a 3D point visual feature which is composed by one parameters that is that defin...
void buildFrom(double x, double y, double Z, double LogZoverZstar)
Class that defines a 2D point visual feature which is composed by two parameters that are the cartes...
void display(const vpCameraParameters &cam, const vpImage< unsigned char > &I, const vpColor &color=vpColor::green, unsigned int thickness=1) const
Class that defines a 3D visual feature from a axis/angle parametrization that represent the rotatio...
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
void buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Definition vpImage.h:135
static double rad(double deg)
Definition vpMath.h:116
Class that tracks in an image a line moving edges.
Definition vpMeLine.h:148
void display(const vpImage< unsigned char > &I, const vpColor &color, unsigned int thickness=1)
Definition vpMeLine.cpp:181
void track(const vpImage< unsigned char > &I)
Definition vpMeLine.cpp:649
static bool intersection(const vpMeLine &line1, const vpMeLine &line2, vpImagePoint &ip)
Definition vpMeLine.cpp:838
void initTracking(const vpImage< unsigned char > &I)
Definition vpMeLine.cpp:186
void setMe(vpMe *p_me)
Definition vpMe.h:122
void setSampleStep(const double &s)
Definition vpMe.h:390
void setRange(const unsigned int &r)
Definition vpMe.h:383
void setLikelihoodThresholdType(const vpLikelihoodThresholdType likelihood_threshold_type)
Definition vpMe.h:445
void setPointsToTrack(const int &n)
Definition vpMe.h:376
@ NORMALIZED_THRESHOLD
Easy-to-use normalized likelihood threshold corresponding to the minimal luminance contrast to consid...
Definition vpMe.h:132
void setThreshold(const double &t)
Definition vpMe.h:435
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:77
void set_x(double x)
Set the point x coordinate in the image plane.
Definition vpPoint.cpp:508
double get_y() const
Get the point y coordinate in the image plane.
Definition vpPoint.cpp:469
double get_x() const
Get the point x coordinate in the image plane.
Definition vpPoint.cpp:467
double get_Z() const
Get the point cZ coordinate in the camera frame.
Definition vpPoint.cpp:453
void setWorldCoordinates(double oX, double oY, double oZ)
Definition vpPoint.cpp:110
void set_y(double y)
Set the point y coordinate in the image plane.
Definition vpPoint.cpp:510
Class used for pose computation from N points (pose from point only). Some of the algorithms implemen...
Definition vpPose.h:81
void addPoint(const vpPoint &P)
Definition vpPose.cpp:140
@ DEMENTHON_LAGRANGE_VIRTUAL_VS
Definition vpPose.h:102
@ VIRTUAL_VS
Definition vpPose.h:96
void clearPoint()
Definition vpPose.cpp:125
bool computePose(vpPoseMethodType method, vpHomogeneousMatrix &cMo, bool(*func)(const vpHomogeneousMatrix &)=NULL)
Definition vpPose.cpp:469
Control of Irisa's gantry robot named Afma6.
void setVelocity(const vpRobot::vpControlFrameType frame, const vpColVector &vel)
@ CAMERA_FRAME
Definition vpRobot.h:80
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition vpRobot.h:64
virtual vpRobotStateType setRobotState(const vpRobot::vpRobotStateType newState)
Definition vpRobot.cpp:198
void setInteractionMatrixType(const vpServoIteractionMatrixType &interactionMatrixType, const vpServoInversionType &interactionMatrixInversion=PSEUDO_INVERSE)
Definition vpServo.cpp:564
@ EYEINHAND_CAMERA
Definition vpServo.h:151
void print(const vpServo::vpServoPrintType display_level=ALL, std::ostream &os=std::cout)
Definition vpServo.cpp:299
void setLambda(double c)
Definition vpServo.h:403
void setServo(const vpServoType &servo_type)
Definition vpServo.cpp:210
vpColVector getError() const
Definition vpServo.h:276
@ PSEUDO_INVERSE
Definition vpServo.h:199
vpColVector computeControlLaw()
Definition vpServo.cpp:930
@ CURRENT
Definition vpServo.h:179
void addFeature(vpBasicFeature &s, vpBasicFeature &s_star, unsigned int select=vpBasicFeature::FEATURE_ALL)
Definition vpServo.cpp:487
#define vpTRACE
Definition vpDebug.h:411