Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
vpMbGenericTracker.cpp
1/****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See https://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * Generic model-based tracker.
33 *
34*****************************************************************************/
35
36#include <visp3/mbt/vpMbGenericTracker.h>
37
38#include <visp3/core/vpDisplay.h>
39#include <visp3/core/vpExponentialMap.h>
40#include <visp3/core/vpTrackingException.h>
41#include <visp3/core/vpIoTools.h>
42#include <visp3/mbt/vpMbtXmlGenericParser.h>
43
44#ifdef VISP_HAVE_NLOHMANN_JSON
45#include <nlohmann/json.hpp>
46using json = nlohmann::json;
47#endif
48
50 : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
51 m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
52 m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
53{
54 m_mapOfTrackers["Camera"] = new TrackerWrapper(EDGE_TRACKER);
55
56 // Add default camera transformation matrix
58
59 // Add default ponderation between each feature type
61
62#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
64#endif
65
68}
69
70vpMbGenericTracker::vpMbGenericTracker(unsigned int nbCameras, int trackerType)
71 : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
72 m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
73 m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
74{
75 if (nbCameras == 0) {
76 throw vpException(vpTrackingException::fatalError, "Cannot use no camera!");
77 }
78 else if (nbCameras == 1) {
79 m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerType);
80
81 // Add default camera transformation matrix
83 }
84 else {
85 for (unsigned int i = 1; i <= nbCameras; i++) {
86 std::stringstream ss;
87 ss << "Camera" << i;
88 m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerType);
89
90 // Add default camera transformation matrix
92 }
93
94 // Set by default the reference camera to the first one
96 }
97
98 // Add default ponderation between each feature type
100
101#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
103#endif
104
107}
108
109vpMbGenericTracker::vpMbGenericTracker(const std::vector<int> &trackerTypes)
110 : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
111 m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
112 m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
113{
114 if (trackerTypes.empty()) {
115 throw vpException(vpException::badValue, "There is no camera!");
116 }
117
118 if (trackerTypes.size() == 1) {
119 m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerTypes[0]);
120
121 // Add default camera transformation matrix
123 }
124 else {
125 for (size_t i = 1; i <= trackerTypes.size(); i++) {
126 std::stringstream ss;
127 ss << "Camera" << i;
128 m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerTypes[i - 1]);
129
130 // Add default camera transformation matrix
132 }
133
134 // Set by default the reference camera to the first one
135 m_referenceCameraName = m_mapOfTrackers.begin()->first;
136 }
137
138 // Add default ponderation between each feature type
140
141#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
143#endif
144
147}
148
149vpMbGenericTracker::vpMbGenericTracker(const std::vector<std::string> &cameraNames,
150 const std::vector<int> &trackerTypes)
151 : m_error(), m_L(), m_mapOfCameraTransformationMatrix(), m_mapOfFeatureFactors(), m_mapOfTrackers(),
152 m_percentageGdPt(0.4), m_referenceCameraName("Camera"), m_thresholdOutlier(0.5), m_w(), m_weightedError(),
153 m_nb_feat_edge(0), m_nb_feat_klt(0), m_nb_feat_depthNormal(0), m_nb_feat_depthDense(0)
154{
155 if (cameraNames.size() != trackerTypes.size() || cameraNames.empty()) {
157 "cameraNames.size() != trackerTypes.size() || cameraNames.empty()");
158 }
159
160 for (size_t i = 0; i < cameraNames.size(); i++) {
161 m_mapOfTrackers[cameraNames[i]] = new TrackerWrapper(trackerTypes[i]);
162
163 // Add default camera transformation matrix
165 }
166
167 // Set by default the reference camera to the first one
168 m_referenceCameraName = m_mapOfTrackers.begin()->first;
169
170 // Add default ponderation between each feature type
172
173#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
175#endif
176
179}
180
182{
183 for (std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.begin(); it != m_mapOfTrackers.end();
184 ++it) {
185 delete it->second;
186 it->second = NULL;
187 }
188}
189
208 const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam)
209{
210 double rawTotalProjectionError = 0.0;
211 unsigned int nbTotalFeaturesUsed = 0;
212
213 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
214 it != m_mapOfTrackers.end(); ++it) {
215 TrackerWrapper *tracker = it->second;
216
217 unsigned int nbFeaturesUsed = 0;
218 double curProjError = tracker->computeProjectionErrorImpl(I, cMo, cam, nbFeaturesUsed);
219
220 if (nbFeaturesUsed > 0) {
221 nbTotalFeaturesUsed += nbFeaturesUsed;
222 rawTotalProjectionError += curProjError;
223 }
224 }
225
226 if (nbTotalFeaturesUsed > 0) {
227 return vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
228 }
229
230 return 90.0;
231}
232
251 const vpHomogeneousMatrix &_cMo,
252 const vpCameraParameters &_cam)
253{
255 vpImageConvert::convert(I_color, I); // FS: Shoudn't we use here m_I that was converted in track() ?
256
257 return computeCurrentProjectionError(I, _cMo, _cam);
258}
259
261{
262 if (computeProjError) {
263 double rawTotalProjectionError = 0.0;
264 unsigned int nbTotalFeaturesUsed = 0;
265
266 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
267 it != m_mapOfTrackers.end(); ++it) {
268 TrackerWrapper *tracker = it->second;
269
270 double curProjError = tracker->getProjectionError();
271 unsigned int nbFeaturesUsed = tracker->nbFeaturesForProjErrorComputation;
272
273 if (nbFeaturesUsed > 0) {
274 nbTotalFeaturesUsed += nbFeaturesUsed;
275 rawTotalProjectionError += (vpMath::rad(curProjError) * nbFeaturesUsed);
276 }
277 }
278
279 if (nbTotalFeaturesUsed > 0) {
280 projectionError = vpMath::deg(rawTotalProjectionError / (double)nbTotalFeaturesUsed);
281 }
282 else {
283 projectionError = 90.0;
284 }
285 }
286 else {
287 projectionError = 90.0;
288 }
289}
290
291void vpMbGenericTracker::computeVVS(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
292{
293 computeVVSInit(mapOfImages);
294
295 if (m_error.getRows() < 4) {
296 throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
297 }
298
299 double normRes = 0;
300 double normRes_1 = -1;
301 unsigned int iter = 0;
302
303 vpMatrix LTL;
304 vpColVector LTR, v;
305 vpColVector error_prev;
306
307 double mu = m_initialMu;
308 vpHomogeneousMatrix cMo_prev;
309
310 bool isoJoIdentity = m_isoJoIdentity; // Backup since it can be modified if L is not full rank
311 if (isoJoIdentity)
312 oJo.eye();
313
314 // Covariance
315 vpColVector W_true(m_error.getRows());
316 vpMatrix L_true, LVJ_true;
317
318 // Create the map of VelocityTwistMatrices
319 std::map<std::string, vpVelocityTwistMatrix> mapOfVelocityTwist;
320 for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = m_mapOfCameraTransformationMatrix.begin();
321 it != m_mapOfCameraTransformationMatrix.end(); ++it) {
323 cVo.buildFrom(it->second);
324 mapOfVelocityTwist[it->first] = cVo;
325 }
326
327 double factorEdge = m_mapOfFeatureFactors[EDGE_TRACKER];
328#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
329 double factorKlt = m_mapOfFeatureFactors[KLT_TRACKER];
330#endif
331 double factorDepth = m_mapOfFeatureFactors[DEPTH_NORMAL_TRACKER];
332 double factorDepthDense = m_mapOfFeatureFactors[DEPTH_DENSE_TRACKER];
333
334 m_nb_feat_edge = 0;
335 m_nb_feat_klt = 0;
338
339 while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
340 computeVVSInteractionMatrixAndResidu(mapOfImages, mapOfVelocityTwist);
341
342 bool reStartFromLastIncrement = false;
343 computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
344 if (reStartFromLastIncrement) {
345 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
346 it != m_mapOfTrackers.end(); ++it) {
347 TrackerWrapper *tracker = it->second;
348
349 tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo_prev;
350
351#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
352 vpHomogeneousMatrix c_curr_tTc_curr0 =
353 m_mapOfCameraTransformationMatrix[it->first] * cMo_prev * tracker->c0Mo.inverse();
354 tracker->ctTc0 = c_curr_tTc_curr0;
355#endif
356 }
357 }
358
359 if (!reStartFromLastIncrement) {
361
362 if (computeCovariance) {
363 L_true = m_L;
364 if (!isoJoIdentity) {
366 cVo.buildFrom(m_cMo);
367 LVJ_true = (m_L * (cVo * oJo));
368 }
369 }
370
372 if (iter == 0) {
373 // If all the 6 dof should be estimated, we check if the interaction
374 // matrix is full rank. If not we remove automatically the dof that
375 // cannot be estimated. This is particularly useful when considering
376 // circles (rank 5) and cylinders (rank 4)
377 if (isoJoIdentity) {
378 cVo.buildFrom(m_cMo);
379
380 vpMatrix K; // kernel
381 unsigned int rank = (m_L * cVo).kernel(K);
382 if (rank == 0) {
383 throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
384 }
385
386 if (rank != 6) {
387 vpMatrix I; // Identity
388 I.eye(6);
389 oJo = I - K.AtA();
390 isoJoIdentity = false;
391 }
392 }
393 }
394
395 // Weighting
396 double num = 0;
397 double den = 0;
398
399 unsigned int start_index = 0;
400 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
401 it != m_mapOfTrackers.end(); ++it) {
402 TrackerWrapper *tracker = it->second;
403
404 if (tracker->m_trackerType & EDGE_TRACKER) {
405 for (unsigned int i = 0; i < tracker->m_error_edge.getRows(); i++) {
406 double wi = tracker->m_w_edge[i] * tracker->m_factor[i] * factorEdge;
407 W_true[start_index + i] = wi;
408 m_weightedError[start_index + i] = wi * m_error[start_index + i];
409
410 num += wi * vpMath::sqr(m_error[start_index + i]);
411 den += wi;
412
413 for (unsigned int j = 0; j < m_L.getCols(); j++) {
414 m_L[start_index + i][j] *= wi;
415 }
416 }
417
418 start_index += tracker->m_error_edge.getRows();
419 }
420
421#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
422 if (tracker->m_trackerType & KLT_TRACKER) {
423 for (unsigned int i = 0; i < tracker->m_error_klt.getRows(); i++) {
424 double wi = tracker->m_w_klt[i] * factorKlt;
425 W_true[start_index + i] = wi;
426 m_weightedError[start_index + i] = wi * m_error[start_index + i];
427
428 num += wi * vpMath::sqr(m_error[start_index + i]);
429 den += wi;
430
431 for (unsigned int j = 0; j < m_L.getCols(); j++) {
432 m_L[start_index + i][j] *= wi;
433 }
434 }
435
436 start_index += tracker->m_error_klt.getRows();
437 }
438#endif
439
440 if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
441 for (unsigned int i = 0; i < tracker->m_error_depthNormal.getRows(); i++) {
442 double wi = tracker->m_w_depthNormal[i] * factorDepth;
443 W_true[start_index + i] = wi;
444 m_weightedError[start_index + i] = wi * m_error[start_index + i];
445
446 num += wi * vpMath::sqr(m_error[start_index + i]);
447 den += wi;
448
449 for (unsigned int j = 0; j < m_L.getCols(); j++) {
450 m_L[start_index + i][j] *= wi;
451 }
452 }
453
454 start_index += tracker->m_error_depthNormal.getRows();
455 }
456
457 if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
458 for (unsigned int i = 0; i < tracker->m_error_depthDense.getRows(); i++) {
459 double wi = tracker->m_w_depthDense[i] * factorDepthDense;
460 W_true[start_index + i] = wi;
461 m_weightedError[start_index + i] = wi * m_error[start_index + i];
462
463 num += wi * vpMath::sqr(m_error[start_index + i]);
464 den += wi;
465
466 for (unsigned int j = 0; j < m_L.getCols(); j++) {
467 m_L[start_index + i][j] *= wi;
468 }
469 }
470
471 start_index += tracker->m_error_depthDense.getRows();
472 }
473 }
474
475 normRes_1 = normRes;
476 normRes = sqrt(num / den);
477
478 computeVVSPoseEstimation(isoJoIdentity, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
479
480 cMo_prev = m_cMo;
481
483
484#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
485 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
486 it != m_mapOfTrackers.end(); ++it) {
487 TrackerWrapper *tracker = it->second;
488
489 vpHomogeneousMatrix c_curr_tTc_curr0 =
490 m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
491 tracker->ctTc0 = c_curr_tTc_curr0;
492 }
493#endif
494
495 // Update cMo
496 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
497 it != m_mapOfTrackers.end(); ++it) {
498 TrackerWrapper *tracker = it->second;
499 tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
500 }
501 }
502
503 iter++;
504 }
505
506 // Update features number
507 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
508 it != m_mapOfTrackers.end(); ++it) {
509 TrackerWrapper *tracker = it->second;
510 if (tracker->m_trackerType & EDGE_TRACKER) {
511 m_nb_feat_edge += tracker->m_error_edge.size();
512 }
513#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
514 if (tracker->m_trackerType & KLT_TRACKER) {
515 m_nb_feat_klt += tracker->m_error_klt.size();
516 }
517#endif
518 if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
519 m_nb_feat_depthNormal += tracker->m_error_depthNormal.size();
520 }
521 if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
522 m_nb_feat_depthDense += tracker->m_error_depthDense.size();
523 }
524 }
525
526 computeCovarianceMatrixVVS(isoJoIdentity, W_true, cMo_prev, L_true, LVJ_true, m_error);
527
528 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
529 it != m_mapOfTrackers.end(); ++it) {
530 TrackerWrapper *tracker = it->second;
531
532 if (tracker->m_trackerType & EDGE_TRACKER) {
533 tracker->updateMovingEdgeWeights();
534 }
535 }
536}
537
539{
540 throw vpException(vpException::fatalError, "vpMbGenericTracker::computeVVSInit() should not be called!");
541}
542
543void vpMbGenericTracker::computeVVSInit(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
544{
545 unsigned int nbFeatures = 0;
546
547 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
548 it != m_mapOfTrackers.end(); ++it) {
549 TrackerWrapper *tracker = it->second;
550 tracker->computeVVSInit(mapOfImages[it->first]);
551
552 nbFeatures += tracker->m_error.getRows();
553 }
554
555 m_L.resize(nbFeatures, 6, false, false);
556 m_error.resize(nbFeatures, false);
557
558 m_weightedError.resize(nbFeatures, false);
559 m_w.resize(nbFeatures, false);
560 m_w = 1;
561}
562
564{
565 throw vpException(vpException::fatalError, "vpMbGenericTracker::"
566 "computeVVSInteractionMatrixAndR"
567 "esidu() should not be called");
568}
569
571 std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
572 std::map<std::string, vpVelocityTwistMatrix> &mapOfVelocityTwist)
573{
574 unsigned int start_index = 0;
575
576 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
577 it != m_mapOfTrackers.end(); ++it) {
578 TrackerWrapper *tracker = it->second;
579
580 tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
581#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
582 vpHomogeneousMatrix c_curr_tTc_curr0 =
583 m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
584 tracker->ctTc0 = c_curr_tTc_curr0;
585#endif
586
587 tracker->computeVVSInteractionMatrixAndResidu(mapOfImages[it->first]);
588
589 m_L.insert(tracker->m_L * mapOfVelocityTwist[it->first], start_index, 0);
590 m_error.insert(start_index, tracker->m_error);
591
592 start_index += tracker->m_error.getRows();
593 }
594}
595
597{
598 unsigned int start_index = 0;
599
600 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
601 it != m_mapOfTrackers.end(); ++it) {
602 TrackerWrapper *tracker = it->second;
603 tracker->computeVVSWeights();
604
605 m_w.insert(start_index, tracker->m_w);
606 start_index += tracker->m_w.getRows();
607 }
608}
609
624 const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
625 bool displayFullModel)
626{
627 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
628 if (it != m_mapOfTrackers.end()) {
629 TrackerWrapper *tracker = it->second;
630 tracker->display(I, cMo, cam, col, thickness, displayFullModel);
631 }
632 else {
633 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
634 }
635}
636
651 const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
652 bool displayFullModel)
653{
654 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
655 if (it != m_mapOfTrackers.end()) {
656 TrackerWrapper *tracker = it->second;
657 tracker->display(I, cMo, cam, col, thickness, displayFullModel);
658 }
659 else {
660 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
661 }
662}
663
681 const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
682 const vpCameraParameters &cam1, const vpCameraParameters &cam2, const vpColor &color,
683 unsigned int thickness, bool displayFullModel)
684{
685 if (m_mapOfTrackers.size() == 2) {
686 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
687 it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
688 ++it;
689
690 it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
691 }
692 else {
693 std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
694 << std::endl;
695 }
696}
697
715 const vpHomogeneousMatrix &c2Mo, const vpCameraParameters &cam1,
716 const vpCameraParameters &cam2, const vpColor &color, unsigned int thickness,
717 bool displayFullModel)
718{
719 if (m_mapOfTrackers.size() == 2) {
720 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
721 it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
722 ++it;
723
724 it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
725 }
726 else {
727 std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
728 << std::endl;
729 }
730}
731
743void vpMbGenericTracker::display(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
744 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
745 const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
746 const vpColor &col, unsigned int thickness, bool displayFullModel)
747{
748 // Display only for the given images
749 for (std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img = mapOfImages.begin();
750 it_img != mapOfImages.end(); ++it_img) {
751 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
752 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
753 std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
754
755 if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
756 it_cam != mapOfCameraParameters.end()) {
757 TrackerWrapper *tracker = it_tracker->second;
758 tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
759 }
760 else {
761 std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
762 }
763 }
764}
765
777void vpMbGenericTracker::display(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfImages,
778 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
779 const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
780 const vpColor &col, unsigned int thickness, bool displayFullModel)
781{
782 // Display only for the given images
783 for (std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfImages.begin();
784 it_img != mapOfImages.end(); ++it_img) {
785 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
786 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
787 std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
788
789 if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
790 it_cam != mapOfCameraParameters.end()) {
791 TrackerWrapper *tracker = it_tracker->second;
792 tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
793 }
794 else {
795 std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
796 }
797 }
798}
799
805std::vector<std::string> vpMbGenericTracker::getCameraNames() const
806{
807 std::vector<std::string> cameraNames;
808
809 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
810 it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
811 cameraNames.push_back(it_tracker->first);
812 }
813
814 return cameraNames;
815}
816
821
831{
832 if (m_mapOfTrackers.size() == 2) {
833 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
834 it->second->getCameraParameters(cam1);
835 ++it;
836
837 it->second->getCameraParameters(cam2);
838 }
839 else {
840 std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
841 << std::endl;
842 }
843}
844
850void vpMbGenericTracker::getCameraParameters(std::map<std::string, vpCameraParameters> &mapOfCameraParameters) const
851{
852 // Clear the input map
853 mapOfCameraParameters.clear();
854
855 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
856 it != m_mapOfTrackers.end(); ++it) {
858 it->second->getCameraParameters(cam_);
859 mapOfCameraParameters[it->first] = cam_;
860 }
861}
862
869std::map<std::string, int> vpMbGenericTracker::getCameraTrackerTypes() const
870{
871 std::map<std::string, int> trackingTypes;
872
873 TrackerWrapper *traker;
874 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
875 it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
876 traker = it_tracker->second;
877 trackingTypes[it_tracker->first] = traker->getTrackerType();
878 }
879
880 return trackingTypes;
881}
882
891void vpMbGenericTracker::getClipping(unsigned int &clippingFlag1, unsigned int &clippingFlag2) const
892{
893 if (m_mapOfTrackers.size() == 2) {
894 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
895 clippingFlag1 = it->second->getClipping();
896 ++it;
897
898 clippingFlag2 = it->second->getClipping();
899 }
900 else {
901 std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
902 << std::endl;
903 }
904}
905
911void vpMbGenericTracker::getClipping(std::map<std::string, unsigned int> &mapOfClippingFlags) const
912{
913 mapOfClippingFlags.clear();
914
915 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
916 it != m_mapOfTrackers.end(); ++it) {
917 TrackerWrapper *tracker = it->second;
918 mapOfClippingFlags[it->first] = tracker->getClipping();
919 }
920}
921
928{
929 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
930 if (it != m_mapOfTrackers.end()) {
931 return it->second->getFaces();
932 }
933
934 std::cerr << "The reference camera: " << m_referenceCameraName << " cannot be found!" << std::endl;
935 return faces;
936}
937
944{
945 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
946 if (it != m_mapOfTrackers.end()) {
947 return it->second->getFaces();
948 }
949
950 std::cerr << "The camera: " << cameraName << " cannot be found!" << std::endl;
951 return faces;
952}
953
954#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
958std::list<vpMbtDistanceCircle *> &vpMbGenericTracker::getFeaturesCircle()
959{
960 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
961 if (it != m_mapOfTrackers.end()) {
962 TrackerWrapper *tracker = it->second;
963 return tracker->getFeaturesCircle();
964 }
965 else {
966 throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
967 m_referenceCameraName.c_str());
968 }
969}
970
974std::list<vpMbtDistanceKltCylinder *> &vpMbGenericTracker::getFeaturesKltCylinder()
975{
976 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
977 if (it != m_mapOfTrackers.end()) {
978 TrackerWrapper *tracker = it->second;
979 return tracker->getFeaturesKltCylinder();
980 }
981 else {
982 throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
983 m_referenceCameraName.c_str());
984 }
985}
986
990std::list<vpMbtDistanceKltPoints *> &vpMbGenericTracker::getFeaturesKlt()
991{
992 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
993 if (it != m_mapOfTrackers.end()) {
994 TrackerWrapper *tracker = it->second;
995 return tracker->getFeaturesKlt();
996 }
997 else {
998 throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
999 m_referenceCameraName.c_str());
1000 }
1001}
1002#endif
1003
1030std::vector<std::vector<double> > vpMbGenericTracker::getFeaturesForDisplay()
1031{
1032 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1033
1034 if (it != m_mapOfTrackers.end()) {
1035 return it->second->getFeaturesForDisplay();
1036 }
1037 else {
1038 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1039 }
1040
1041 return std::vector<std::vector<double> >();
1042}
1043
1069void vpMbGenericTracker::getFeaturesForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfFeatures)
1070{
1071 // Clear the input map
1072 mapOfFeatures.clear();
1073
1074 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1075 it != m_mapOfTrackers.end(); ++it) {
1076 mapOfFeatures[it->first] = it->second->getFeaturesForDisplay();
1077 }
1078}
1079
1090
1091#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
1100std::vector<vpImagePoint> vpMbGenericTracker::getKltImagePoints() const
1101{
1102 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1103 if (it != m_mapOfTrackers.end()) {
1104 TrackerWrapper *tracker = it->second;
1105 return tracker->getKltImagePoints();
1106 }
1107 else {
1108 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1109 }
1110
1111 return std::vector<vpImagePoint>();
1112}
1113
1122std::map<int, vpImagePoint> vpMbGenericTracker::getKltImagePointsWithId() const
1123{
1124 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1125 if (it != m_mapOfTrackers.end()) {
1126 TrackerWrapper *tracker = it->second;
1127 return tracker->getKltImagePointsWithId();
1128 }
1129 else {
1130 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1131 }
1132
1133 return std::map<int, vpImagePoint>();
1134}
1135
1142{
1143 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1144 if (it != m_mapOfTrackers.end()) {
1145 TrackerWrapper *tracker = it->second;
1146 return tracker->getKltMaskBorder();
1147 }
1148 else {
1149 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1150 }
1151
1152 return 0;
1153}
1154
1161{
1162 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1163 if (it != m_mapOfTrackers.end()) {
1164 TrackerWrapper *tracker = it->second;
1165 return tracker->getKltNbPoints();
1166 }
1167 else {
1168 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1169 }
1170
1171 return 0;
1172}
1173
1180{
1181 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1182
1183 if (it_tracker != m_mapOfTrackers.end()) {
1184 TrackerWrapper *tracker;
1185 tracker = it_tracker->second;
1186 return tracker->getKltOpencv();
1187 }
1188 else {
1189 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1190 }
1191
1192 return vpKltOpencv();
1193}
1194
1204{
1205 if (m_mapOfTrackers.size() == 2) {
1206 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1207 klt1 = it->second->getKltOpencv();
1208 ++it;
1209
1210 klt2 = it->second->getKltOpencv();
1211 }
1212 else {
1213 std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1214 << std::endl;
1215 }
1216}
1217
1223void vpMbGenericTracker::getKltOpencv(std::map<std::string, vpKltOpencv> &mapOfKlts) const
1224{
1225 mapOfKlts.clear();
1226
1227 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1228 it != m_mapOfTrackers.end(); ++it) {
1229 TrackerWrapper *tracker = it->second;
1230 mapOfKlts[it->first] = tracker->getKltOpencv();
1231 }
1232}
1233
1239std::vector<cv::Point2f> vpMbGenericTracker::getKltPoints() const
1240{
1241 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1242 if (it != m_mapOfTrackers.end()) {
1243 TrackerWrapper *tracker = it->second;
1244 return tracker->getKltPoints();
1245 }
1246 else {
1247 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1248 }
1249
1250 return std::vector<cv::Point2f>();
1251}
1252
1260#endif
1261
1274void vpMbGenericTracker::getLcircle(std::list<vpMbtDistanceCircle *> &circlesList, unsigned int level) const
1275{
1276 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1277 if (it != m_mapOfTrackers.end()) {
1278 it->second->getLcircle(circlesList, level);
1279 }
1280 else {
1281 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1282 }
1283}
1284
1298void vpMbGenericTracker::getLcircle(const std::string &cameraName, std::list<vpMbtDistanceCircle *> &circlesList,
1299 unsigned int level) const
1300{
1301 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1302 if (it != m_mapOfTrackers.end()) {
1303 it->second->getLcircle(circlesList, level);
1304 }
1305 else {
1306 std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1307 }
1308}
1309
1322void vpMbGenericTracker::getLcylinder(std::list<vpMbtDistanceCylinder *> &cylindersList, unsigned int level) const
1323{
1324 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1325 if (it != m_mapOfTrackers.end()) {
1326 it->second->getLcylinder(cylindersList, level);
1327 }
1328 else {
1329 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1330 }
1331}
1332
1346void vpMbGenericTracker::getLcylinder(const std::string &cameraName, std::list<vpMbtDistanceCylinder *> &cylindersList,
1347 unsigned int level) const
1348{
1349 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1350 if (it != m_mapOfTrackers.end()) {
1351 it->second->getLcylinder(cylindersList, level);
1352 }
1353 else {
1354 std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1355 }
1356}
1357
1370void vpMbGenericTracker::getLline(std::list<vpMbtDistanceLine *> &linesList, unsigned int level) const
1371{
1372 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1373
1374 if (it != m_mapOfTrackers.end()) {
1375 it->second->getLline(linesList, level);
1376 }
1377 else {
1378 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1379 }
1380}
1381
1395void vpMbGenericTracker::getLline(const std::string &cameraName, std::list<vpMbtDistanceLine *> &linesList,
1396 unsigned int level) const
1397{
1398 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1399 if (it != m_mapOfTrackers.end()) {
1400 it->second->getLline(linesList, level);
1401 }
1402 else {
1403 std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1404 }
1405}
1406
1436std::vector<std::vector<double> > vpMbGenericTracker::getModelForDisplay(unsigned int width, unsigned int height,
1437 const vpHomogeneousMatrix &cMo,
1438 const vpCameraParameters &cam,
1439 bool displayFullModel)
1440{
1441 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1442
1443 if (it != m_mapOfTrackers.end()) {
1444 return it->second->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1445 }
1446 else {
1447 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1448 }
1449
1450 return std::vector<std::vector<double> >();
1451}
1452
1478void vpMbGenericTracker::getModelForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfModels,
1479 const std::map<std::string, unsigned int> &mapOfwidths,
1480 const std::map<std::string, unsigned int> &mapOfheights,
1481 const std::map<std::string, vpHomogeneousMatrix> &mapOfcMos,
1482 const std::map<std::string, vpCameraParameters> &mapOfCams,
1483 bool displayFullModel)
1484{
1485 // Clear the input map
1486 mapOfModels.clear();
1487
1488 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1489 it != m_mapOfTrackers.end(); ++it) {
1490 std::map<std::string, unsigned int>::const_iterator findWidth = mapOfwidths.find(it->first);
1491 std::map<std::string, unsigned int>::const_iterator findHeight = mapOfheights.find(it->first);
1492 std::map<std::string, vpHomogeneousMatrix>::const_iterator findcMo = mapOfcMos.find(it->first);
1493 std::map<std::string, vpCameraParameters>::const_iterator findCam = mapOfCams.find(it->first);
1494
1495 if (findWidth != mapOfwidths.end() && findHeight != mapOfheights.end() && findcMo != mapOfcMos.end() &&
1496 findCam != mapOfCams.end()) {
1497 mapOfModels[it->first] = it->second->getModelForDisplay(findWidth->second, findHeight->second, findcMo->second,
1498 findCam->second, displayFullModel);
1499 }
1500 }
1501}
1502
1509{
1510 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1511
1512 if (it != m_mapOfTrackers.end()) {
1513 return it->second->getMovingEdge();
1514 }
1515 else {
1516 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1517 }
1518
1519 return vpMe();
1520}
1521
1531{
1532 if (m_mapOfTrackers.size() == 2) {
1533 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1534 it->second->getMovingEdge(me1);
1535 ++it;
1536
1537 it->second->getMovingEdge(me2);
1538 }
1539 else {
1540 std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1541 << std::endl;
1542 }
1543}
1544
1550void vpMbGenericTracker::getMovingEdge(std::map<std::string, vpMe> &mapOfMovingEdges) const
1551{
1552 mapOfMovingEdges.clear();
1553
1554 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1555 it != m_mapOfTrackers.end(); ++it) {
1556 TrackerWrapper *tracker = it->second;
1557 mapOfMovingEdges[it->first] = tracker->getMovingEdge();
1558 }
1559}
1560
1578unsigned int vpMbGenericTracker::getNbPoints(unsigned int level) const
1579{
1580 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1581
1582 unsigned int nbGoodPoints = 0;
1583 if (it != m_mapOfTrackers.end()) {
1584
1585 nbGoodPoints = it->second->getNbPoints(level);
1586 }
1587 else {
1588 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1589 }
1590
1591 return nbGoodPoints;
1592}
1593
1608void vpMbGenericTracker::getNbPoints(std::map<std::string, unsigned int> &mapOfNbPoints, unsigned int level) const
1609{
1610 mapOfNbPoints.clear();
1611
1612 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1613 it != m_mapOfTrackers.end(); ++it) {
1614 TrackerWrapper *tracker = it->second;
1615 mapOfNbPoints[it->first] = tracker->getNbPoints(level);
1616 }
1617}
1618
1625{
1626 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1627 if (it != m_mapOfTrackers.end()) {
1628 return it->second->getNbPolygon();
1629 }
1630
1631 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1632 return 0;
1633}
1634
1640void vpMbGenericTracker::getNbPolygon(std::map<std::string, unsigned int> &mapOfNbPolygons) const
1641{
1642 mapOfNbPolygons.clear();
1643
1644 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1645 it != m_mapOfTrackers.end(); ++it) {
1646 TrackerWrapper *tracker = it->second;
1647 mapOfNbPolygons[it->first] = tracker->getNbPolygon();
1648 }
1649}
1650
1662{
1663 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1664 if (it != m_mapOfTrackers.end()) {
1665 return it->second->getPolygon(index);
1666 }
1667
1668 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1669 return NULL;
1670}
1671
1683vpMbtPolygon *vpMbGenericTracker::getPolygon(const std::string &cameraName, unsigned int index)
1684{
1685 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1686 if (it != m_mapOfTrackers.end()) {
1687 return it->second->getPolygon(index);
1688 }
1689
1690 std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1691 return NULL;
1692}
1693
1709std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > >
1710vpMbGenericTracker::getPolygonFaces(bool orderPolygons, bool useVisibility, bool clipPolygon)
1711{
1712 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces;
1713
1714 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1715 if (it != m_mapOfTrackers.end()) {
1716 TrackerWrapper *tracker = it->second;
1717 polygonFaces = tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1718 }
1719 else {
1720 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1721 }
1722
1723 return polygonFaces;
1724}
1725
1743void vpMbGenericTracker::getPolygonFaces(std::map<std::string, std::vector<vpPolygon> > &mapOfPolygons,
1744 std::map<std::string, std::vector<std::vector<vpPoint> > > &mapOfPoints,
1745 bool orderPolygons, bool useVisibility, bool clipPolygon)
1746{
1747 mapOfPolygons.clear();
1748 mapOfPoints.clear();
1749
1750 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1751 it != m_mapOfTrackers.end(); ++it) {
1752 TrackerWrapper *tracker = it->second;
1753 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces =
1754 tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1755
1756 mapOfPolygons[it->first] = polygonFaces.first;
1757 mapOfPoints[it->first] = polygonFaces.second;
1758 }
1759}
1760
1762
1772{
1773 if (m_mapOfTrackers.size() == 2) {
1774 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1775 it->second->getPose(c1Mo);
1776 ++it;
1777
1778 it->second->getPose(c2Mo);
1779 }
1780 else {
1781 std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1782 << std::endl;
1783 }
1784}
1785
1791void vpMbGenericTracker::getPose(std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses) const
1792{
1793 // Clear the map
1794 mapOfCameraPoses.clear();
1795
1796 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1797 it != m_mapOfTrackers.end(); ++it) {
1798 TrackerWrapper *tracker = it->second;
1799 tracker->getPose(mapOfCameraPoses[it->first]);
1800 }
1801}
1802
1807
1812{
1813 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1814 if (it != m_mapOfTrackers.end()) {
1815 TrackerWrapper *tracker = it->second;
1816 return tracker->getTrackerType();
1817 }
1818 else {
1819 throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
1820 m_referenceCameraName.c_str());
1821 }
1822}
1823
1825{
1826 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1827 it != m_mapOfTrackers.end(); ++it) {
1828 TrackerWrapper *tracker = it->second;
1829 tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
1830 tracker->init(I);
1831 }
1832}
1833
1834void vpMbGenericTracker::initCircle(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const vpPoint & /*p3*/,
1835 double /*radius*/, int /*idFace*/, const std::string & /*name*/)
1836{
1837 throw vpException(vpException::fatalError, "vpMbGenericTracker::initCircle() should not be called!");
1838}
1839
1840#ifdef VISP_HAVE_MODULE_GUI
1841
1885 const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1886 const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1887{
1888 if (m_mapOfTrackers.size() == 2) {
1889 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1890 TrackerWrapper *tracker = it->second;
1891 tracker->initClick(I1, initFile1, displayHelp, T1);
1892
1893 ++it;
1894
1895 tracker = it->second;
1896 tracker->initClick(I2, initFile2, displayHelp, T2);
1897
1899 if (it != m_mapOfTrackers.end()) {
1900 tracker = it->second;
1901
1902 // Set the reference cMo
1903 tracker->getPose(m_cMo);
1904 }
1905 }
1906 else {
1908 "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1909 }
1910}
1911
1955 const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1956 const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1957{
1958 if (m_mapOfTrackers.size() == 2) {
1959 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1960 TrackerWrapper *tracker = it->second;
1961 tracker->initClick(I_color1, initFile1, displayHelp, T1);
1962
1963 ++it;
1964
1965 tracker = it->second;
1966 tracker->initClick(I_color2, initFile2, displayHelp, T2);
1967
1969 if (it != m_mapOfTrackers.end()) {
1970 tracker = it->second;
1971
1972 // Set the reference cMo
1973 tracker->getPose(m_cMo);
1974 }
1975 }
1976 else {
1978 "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1979 }
1980}
1981
2024void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2025 const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
2026 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2027{
2028 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2029 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2030 mapOfImages.find(m_referenceCameraName);
2031 std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
2032
2033 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2034 TrackerWrapper *tracker = it_tracker->second;
2035 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2036 if (it_T != mapOfT.end())
2037 tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2038 else
2039 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2040 tracker->getPose(m_cMo);
2041 }
2042 else {
2043 throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2044 }
2045
2046 // Vector of missing initFile for cameras
2047 std::vector<std::string> vectorOfMissingCameraPoses;
2048
2049 // Set pose for the specified cameras
2050 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2051 if (it_tracker->first != m_referenceCameraName) {
2052 it_img = mapOfImages.find(it_tracker->first);
2053 it_initFile = mapOfInitFiles.find(it_tracker->first);
2054
2055 if (it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2056 // InitClick for the current camera
2057 TrackerWrapper *tracker = it_tracker->second;
2058 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2059 }
2060 else {
2061 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2062 }
2063 }
2064 }
2065
2066 // Init for cameras that do not have an initFile
2067 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2068 it != vectorOfMissingCameraPoses.end(); ++it) {
2069 it_img = mapOfImages.find(*it);
2070 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2072
2073 if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2074 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2075 m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2076 m_mapOfTrackers[*it]->init(*it_img->second);
2077 }
2078 else {
2080 "Missing image or missing camera transformation "
2081 "matrix! Cannot set the pose for camera: %s!",
2082 it->c_str());
2083 }
2084 }
2085}
2086
2129void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2130 const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
2131 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2132{
2133 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2134 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2135 std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
2136
2137 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2138 TrackerWrapper *tracker = it_tracker->second;
2139 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2140 if (it_T != mapOfT.end())
2141 tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2142 else
2143 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2144 tracker->getPose(m_cMo);
2145 }
2146 else {
2147 throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2148 }
2149
2150 // Vector of missing initFile for cameras
2151 std::vector<std::string> vectorOfMissingCameraPoses;
2152
2153 // Set pose for the specified cameras
2154 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2155 if (it_tracker->first != m_referenceCameraName) {
2156 it_img = mapOfColorImages.find(it_tracker->first);
2157 it_initFile = mapOfInitFiles.find(it_tracker->first);
2158
2159 if (it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2160 // InitClick for the current camera
2161 TrackerWrapper *tracker = it_tracker->second;
2162 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2163 }
2164 else {
2165 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2166 }
2167 }
2168 }
2169
2170 // Init for cameras that do not have an initFile
2171 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2172 it != vectorOfMissingCameraPoses.end(); ++it) {
2173 it_img = mapOfColorImages.find(*it);
2174 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2176
2177 if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2178 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2179 m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2180 vpImageConvert::convert(*it_img->second, m_mapOfTrackers[*it]->m_I);
2181 m_mapOfTrackers[*it]->init(m_mapOfTrackers[*it]->m_I);
2182 }
2183 else {
2185 "Missing image or missing camera transformation "
2186 "matrix! Cannot set the pose for camera: %s!",
2187 it->c_str());
2188 }
2189 }
2190}
2191#endif
2192
2193void vpMbGenericTracker::initCylinder(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const double /*radius*/,
2194 const int /*idFace*/, const std::string & /*name*/)
2195{
2196 throw vpException(vpException::fatalError, "vpMbGenericTracker::initCylinder() should not be called!");
2197}
2198
2200{
2201 throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromCorners() should not be called!");
2202}
2203
2205{
2206 throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromLines() should not be called!");
2207}
2208
2239 const std::string &initFile1, const std::string &initFile2)
2240{
2241 if (m_mapOfTrackers.size() == 2) {
2242 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2243 TrackerWrapper *tracker = it->second;
2244 tracker->initFromPoints(I1, initFile1);
2245
2246 ++it;
2247
2248 tracker = it->second;
2249 tracker->initFromPoints(I2, initFile2);
2250
2252 if (it != m_mapOfTrackers.end()) {
2253 tracker = it->second;
2254
2255 // Set the reference cMo
2256 tracker->getPose(m_cMo);
2257
2258 // Set the reference camera parameters
2259 tracker->getCameraParameters(m_cam);
2260 }
2261 }
2262 else {
2264 "Cannot initFromPoints()! Require two cameras but "
2265 "there are %d cameras!",
2266 m_mapOfTrackers.size());
2267 }
2268}
2269
2300 const std::string &initFile1, const std::string &initFile2)
2301{
2302 if (m_mapOfTrackers.size() == 2) {
2303 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2304 TrackerWrapper *tracker = it->second;
2305 tracker->initFromPoints(I_color1, initFile1);
2306
2307 ++it;
2308
2309 tracker = it->second;
2310 tracker->initFromPoints(I_color2, initFile2);
2311
2313 if (it != m_mapOfTrackers.end()) {
2314 tracker = it->second;
2315
2316 // Set the reference cMo
2317 tracker->getPose(m_cMo);
2318
2319 // Set the reference camera parameters
2320 tracker->getCameraParameters(m_cam);
2321 }
2322 }
2323 else {
2325 "Cannot initFromPoints()! Require two cameras but "
2326 "there are %d cameras!",
2327 m_mapOfTrackers.size());
2328 }
2329}
2330
2331void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2332 const std::map<std::string, std::string> &mapOfInitPoints)
2333{
2334 // Set the reference cMo
2335 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2336 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2337 mapOfImages.find(m_referenceCameraName);
2338 std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2339
2340 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2341 TrackerWrapper *tracker = it_tracker->second;
2342 tracker->initFromPoints(*it_img->second, it_initPoints->second);
2343 tracker->getPose(m_cMo);
2344 }
2345 else {
2346 throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2347 }
2348
2349 // Vector of missing initPoints for cameras
2350 std::vector<std::string> vectorOfMissingCameraPoints;
2351
2352 // Set pose for the specified cameras
2353 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2354 it_img = mapOfImages.find(it_tracker->first);
2355 it_initPoints = mapOfInitPoints.find(it_tracker->first);
2356
2357 if (it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2358 // Set pose
2359 it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2360 }
2361 else {
2362 vectorOfMissingCameraPoints.push_back(it_tracker->first);
2363 }
2364 }
2365
2366 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2367 it != vectorOfMissingCameraPoints.end(); ++it) {
2368 it_img = mapOfImages.find(*it);
2369 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2371
2372 if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2373 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2374 m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2375 }
2376 else {
2378 "Missing image or missing camera transformation "
2379 "matrix! Cannot init the pose for camera: %s!",
2380 it->c_str());
2381 }
2382 }
2383}
2384
2385void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2386 const std::map<std::string, std::string> &mapOfInitPoints)
2387{
2388 // Set the reference cMo
2389 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2390 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2391 std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2392
2393 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() &&
2394 it_initPoints != mapOfInitPoints.end()) {
2395 TrackerWrapper *tracker = it_tracker->second;
2396 tracker->initFromPoints(*it_img->second, it_initPoints->second);
2397 tracker->getPose(m_cMo);
2398 }
2399 else {
2400 throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2401 }
2402
2403 // Vector of missing initPoints for cameras
2404 std::vector<std::string> vectorOfMissingCameraPoints;
2405
2406 // Set pose for the specified cameras
2407 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2408 it_img = mapOfColorImages.find(it_tracker->first);
2409 it_initPoints = mapOfInitPoints.find(it_tracker->first);
2410
2411 if (it_img != mapOfColorImages.end() && it_initPoints != mapOfInitPoints.end()) {
2412 // Set pose
2413 it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2414 }
2415 else {
2416 vectorOfMissingCameraPoints.push_back(it_tracker->first);
2417 }
2418 }
2419
2420 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2421 it != vectorOfMissingCameraPoints.end(); ++it) {
2422 it_img = mapOfColorImages.find(*it);
2423 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2425
2426 if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2427 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2428 m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2429 }
2430 else {
2432 "Missing image or missing camera transformation "
2433 "matrix! Cannot init the pose for camera: %s!",
2434 it->c_str());
2435 }
2436 }
2437}
2438
2443
2456 const std::string &initFile1, const std::string &initFile2)
2457{
2458 if (m_mapOfTrackers.size() == 2) {
2459 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2460 TrackerWrapper *tracker = it->second;
2461 tracker->initFromPose(I1, initFile1);
2462
2463 ++it;
2464
2465 tracker = it->second;
2466 tracker->initFromPose(I2, initFile2);
2467
2469 if (it != m_mapOfTrackers.end()) {
2470 tracker = it->second;
2471
2472 // Set the reference cMo
2473 tracker->getPose(m_cMo);
2474
2475 // Set the reference camera parameters
2476 tracker->getCameraParameters(m_cam);
2477 }
2478 }
2479 else {
2481 "Cannot initFromPose()! Require two cameras but there "
2482 "are %d cameras!",
2483 m_mapOfTrackers.size());
2484 }
2485}
2486
2499 const std::string &initFile1, const std::string &initFile2)
2500{
2501 if (m_mapOfTrackers.size() == 2) {
2502 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2503 TrackerWrapper *tracker = it->second;
2504 tracker->initFromPose(I_color1, initFile1);
2505
2506 ++it;
2507
2508 tracker = it->second;
2509 tracker->initFromPose(I_color2, initFile2);
2510
2512 if (it != m_mapOfTrackers.end()) {
2513 tracker = it->second;
2514
2515 // Set the reference cMo
2516 tracker->getPose(m_cMo);
2517
2518 // Set the reference camera parameters
2519 tracker->getCameraParameters(m_cam);
2520 }
2521 }
2522 else {
2524 "Cannot initFromPose()! Require two cameras but there "
2525 "are %d cameras!",
2526 m_mapOfTrackers.size());
2527 }
2528}
2529
2544void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2545 const std::map<std::string, std::string> &mapOfInitPoses)
2546{
2547 // Set the reference cMo
2548 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2549 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2550 mapOfImages.find(m_referenceCameraName);
2551 std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2552
2553 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2554 TrackerWrapper *tracker = it_tracker->second;
2555 tracker->initFromPose(*it_img->second, it_initPose->second);
2556 tracker->getPose(m_cMo);
2557 }
2558 else {
2559 throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2560 }
2561
2562 // Vector of missing pose matrices for cameras
2563 std::vector<std::string> vectorOfMissingCameraPoses;
2564
2565 // Set pose for the specified cameras
2566 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2567 it_img = mapOfImages.find(it_tracker->first);
2568 it_initPose = mapOfInitPoses.find(it_tracker->first);
2569
2570 if (it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2571 // Set pose
2572 it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2573 }
2574 else {
2575 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2576 }
2577 }
2578
2579 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2580 it != vectorOfMissingCameraPoses.end(); ++it) {
2581 it_img = mapOfImages.find(*it);
2582 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2584
2585 if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2586 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2587 m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2588 }
2589 else {
2591 "Missing image or missing camera transformation "
2592 "matrix! Cannot init the pose for camera: %s!",
2593 it->c_str());
2594 }
2595 }
2596}
2597
2612void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2613 const std::map<std::string, std::string> &mapOfInitPoses)
2614{
2615 // Set the reference cMo
2616 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2617 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2618 std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2619
2620 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2621 TrackerWrapper *tracker = it_tracker->second;
2622 tracker->initFromPose(*it_img->second, it_initPose->second);
2623 tracker->getPose(m_cMo);
2624 }
2625 else {
2626 throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2627 }
2628
2629 // Vector of missing pose matrices for cameras
2630 std::vector<std::string> vectorOfMissingCameraPoses;
2631
2632 // Set pose for the specified cameras
2633 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2634 it_img = mapOfColorImages.find(it_tracker->first);
2635 it_initPose = mapOfInitPoses.find(it_tracker->first);
2636
2637 if (it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2638 // Set pose
2639 it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2640 }
2641 else {
2642 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2643 }
2644 }
2645
2646 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2647 it != vectorOfMissingCameraPoses.end(); ++it) {
2648 it_img = mapOfColorImages.find(*it);
2649 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2651
2652 if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2653 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2654 m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2655 }
2656 else {
2658 "Missing image or missing camera transformation "
2659 "matrix! Cannot init the pose for camera: %s!",
2660 it->c_str());
2661 }
2662 }
2663}
2664
2676 const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2677{
2678 if (m_mapOfTrackers.size() == 2) {
2679 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2680 it->second->initFromPose(I1, c1Mo);
2681
2682 ++it;
2683
2684 it->second->initFromPose(I2, c2Mo);
2685
2686 m_cMo = c1Mo;
2687 }
2688 else {
2690 "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2691 }
2692}
2693
2705 const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2706{
2707 if (m_mapOfTrackers.size() == 2) {
2708 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2709 it->second->initFromPose(I_color1, c1Mo);
2710
2711 ++it;
2712
2713 it->second->initFromPose(I_color2, c2Mo);
2714
2715 m_cMo = c1Mo;
2716 }
2717 else {
2719 "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2720 }
2721}
2722
2736void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2737 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2738{
2739 // Set the reference cMo
2740 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2741 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2742 mapOfImages.find(m_referenceCameraName);
2743 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2744
2745 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2746 TrackerWrapper *tracker = it_tracker->second;
2747 tracker->initFromPose(*it_img->second, it_camPose->second);
2748 tracker->getPose(m_cMo);
2749 }
2750 else {
2751 throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2752 }
2753
2754 // Vector of missing pose matrices for cameras
2755 std::vector<std::string> vectorOfMissingCameraPoses;
2756
2757 // Set pose for the specified cameras
2758 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2759 it_img = mapOfImages.find(it_tracker->first);
2760 it_camPose = mapOfCameraPoses.find(it_tracker->first);
2761
2762 if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2763 // Set pose
2764 it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2765 }
2766 else {
2767 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2768 }
2769 }
2770
2771 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2772 it != vectorOfMissingCameraPoses.end(); ++it) {
2773 it_img = mapOfImages.find(*it);
2774 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2776
2777 if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2778 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2779 m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2780 }
2781 else {
2783 "Missing image or missing camera transformation "
2784 "matrix! Cannot set the pose for camera: %s!",
2785 it->c_str());
2786 }
2787 }
2788}
2789
2803void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2804 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2805{
2806 // Set the reference cMo
2807 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2808 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2809 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2810
2811 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2812 TrackerWrapper *tracker = it_tracker->second;
2813 tracker->initFromPose(*it_img->second, it_camPose->second);
2814 tracker->getPose(m_cMo);
2815 }
2816 else {
2817 throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2818 }
2819
2820 // Vector of missing pose matrices for cameras
2821 std::vector<std::string> vectorOfMissingCameraPoses;
2822
2823 // Set pose for the specified cameras
2824 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2825 it_img = mapOfColorImages.find(it_tracker->first);
2826 it_camPose = mapOfCameraPoses.find(it_tracker->first);
2827
2828 if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2829 // Set pose
2830 it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2831 }
2832 else {
2833 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2834 }
2835 }
2836
2837 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2838 it != vectorOfMissingCameraPoses.end(); ++it) {
2839 it_img = mapOfColorImages.find(*it);
2840 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2842
2843 if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2844 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2845 m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2846 }
2847 else {
2849 "Missing image or missing camera transformation "
2850 "matrix! Cannot set the pose for camera: %s!",
2851 it->c_str());
2852 }
2853 }
2854}
2855
2867void vpMbGenericTracker::loadConfigFile(const std::string &configFile, bool verbose)
2868{
2869 const std::string extension = vpIoTools::getFileExtension(configFile);
2870 if (extension == ".xml") {
2871 loadConfigFileXML(configFile, verbose);
2872 }
2873#ifdef VISP_HAVE_NLOHMANN_JSON
2874 else if (extension == ".json") {
2875 loadConfigFileJSON(configFile, verbose);
2876 }
2877#endif
2878 else {
2879 throw vpException(vpException::ioError, "MBT config parsing: File format " + extension + "for file " + configFile + " is not supported.");
2880 }
2881}
2882
2894void vpMbGenericTracker::loadConfigFileXML(const std::string &configFile, bool verbose)
2895{
2896 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2897 it != m_mapOfTrackers.end(); ++it) {
2898 TrackerWrapper *tracker = it->second;
2899 tracker->loadConfigFile(configFile, verbose);
2900 }
2901
2903 throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
2904 }
2905
2906 m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
2907 this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
2908 this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
2909 this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
2910}
2911
2912#ifdef VISP_HAVE_NLOHMANN_JSON
2921void vpMbGenericTracker::loadConfigFileJSON(const std::string &settingsFile, bool verbose)
2922{
2923 //Read file
2924 std::ifstream jsonFile(settingsFile);
2925 if (!jsonFile.good()) {
2926 throw vpException(vpException::ioError, "Could not read from settings file " + settingsFile + " to initialise the vpMbGenericTracker");
2927 }
2928 json settings;
2929 try {
2930 settings = json::parse(jsonFile);
2931 }
2932 catch (json::parse_error &e) {
2933 std::stringstream msg;
2934 msg << "Could not parse JSON file : \n";
2935
2936 msg << e.what() << std::endl;
2937 msg << "Byte position of error: " << e.byte;
2938 throw vpException(vpException::ioError, msg.str());
2939 }
2940 jsonFile.close();
2941
2942 if (!settings.contains("version")) {
2943 throw vpException(vpException::notInitialized, "JSON configuration does not contain versioning information");
2944 }
2945 else if (settings["version"].get<std::string>() != MBT_JSON_SETTINGS_VERSION) {
2946 throw vpException(vpException::badValue, "Trying to load an old configuration file");
2947 }
2948
2949 //Load Basic settings
2950 settings.at("referenceCameraName").get_to(m_referenceCameraName);
2951 json trackersJson;
2952 trackersJson = settings.at("trackers");
2953
2954 //Find camera that are already present in the tracker but not in the config file: they will be removed
2955 std::vector<std::string> unusedCameraNames = getCameraNames();
2956
2957 bool refCameraFound = false;
2958 //Foreach camera
2959 for (const auto &it : trackersJson.items()) {
2960 const std::string cameraName = it.key();
2961 const json trackerJson = it.value();
2962 refCameraFound = refCameraFound || cameraName == m_referenceCameraName;
2963
2964 //Load transformation between current camera and reference camera, if it exists
2965 if (trackerJson.contains("camTref")) {
2966 m_mapOfCameraTransformationMatrix[cameraName] = trackerJson["camTref"].get<vpHomogeneousMatrix>();
2967 }
2968 else if (cameraName != m_referenceCameraName) { // No transformation to reference and its not the reference itself
2969 throw vpException(vpException::notInitialized, "Camera " + cameraName + " has no transformation to the reference camera");
2970 }
2971 if (verbose) {
2972 std::cout << "Loading tracker " << cameraName << std::endl << " with settings: " << std::endl << trackerJson.dump(2);
2973 }
2974 if (m_mapOfTrackers.count(cameraName)) {
2975 if (verbose) {
2976 std::cout << "Updating an already existing tracker with JSON configuration." << std::endl;
2977 }
2978 from_json(trackerJson, *(m_mapOfTrackers[cameraName]));
2979 }
2980 else {
2981 if (verbose) {
2982 std::cout << "Creating a new tracker from JSON configuration." << std::endl;
2983 }
2984 TrackerWrapper *tw = new TrackerWrapper(); //vpMBTracker is responsible for deleting trackers
2985 *tw = trackerJson;
2986 m_mapOfTrackers[cameraName] = tw;
2987 }
2988 const auto unusedCamIt = std::remove(unusedCameraNames.begin(), unusedCameraNames.end(), cameraName); // Mark this camera name as used
2989 unusedCameraNames.erase(unusedCamIt, unusedCameraNames.end());
2990 }
2991 if (!refCameraFound) {
2992 throw vpException(vpException::badValue, "Reference camera not found in trackers");
2993 }
2994
2995 // All camerasthat were defined in the tracker but not in the config file are removed
2996 for (const std::string &oldCameraName : unusedCameraNames) {
2997 m_mapOfCameraTransformationMatrix.erase(oldCameraName);
2998 TrackerWrapper *tw = m_mapOfTrackers[oldCameraName];
2999 m_mapOfTrackers.erase(oldCameraName);
3000 delete tw;
3001 }
3002
3003 const TrackerWrapper *refTracker = m_mapOfTrackers[m_referenceCameraName];
3004 refTracker->getCameraParameters(m_cam);
3005 this->angleAppears = refTracker->getAngleAppear();
3006 this->angleDisappears = refTracker->getAngleDisappear();
3007 this->clippingFlag = refTracker->getClipping();
3008 this->distNearClip = refTracker->getNearClippingDistance();
3009 this->distFarClip = refTracker->getFarClippingDistance();
3010
3011 // These settings can be set in each tracker or globally. Global value overrides local ones.
3012 if (settings.contains("display")) {
3013 const json displayJson = settings["display"];
3014 setDisplayFeatures(displayJson.value("features", displayFeatures));
3015 setProjectionErrorDisplay(displayJson.value("projectionError", m_projectionErrorDisplay));
3016 }
3017 if (settings.contains("visibilityTest")) {
3018 const json visJson = settings["visibilityTest"];
3019 setOgreVisibilityTest(visJson.value("ogre", useOgre));
3020 setScanLineVisibilityTest(visJson.value("scanline", useScanLine));
3021 }
3022 //If a 3D model is defined, load it
3023 if (settings.contains("model")) {
3024 loadModel(settings.at("model").get<std::string>(), verbose);
3025 }
3026}
3027
3035void vpMbGenericTracker::saveConfigFile(const std::string &settingsFile) const
3036{
3037 json j;
3038 j["referenceCameraName"] = m_referenceCameraName;
3039 j["version"] = MBT_JSON_SETTINGS_VERSION;
3040 // j["thresholdOutlier"] = m_thresholdOutlier;
3041 json trackers;
3042 for (const auto &kv : m_mapOfTrackers) {
3043 trackers[kv.first] = *(kv.second);
3044 const auto itTransformation = m_mapOfCameraTransformationMatrix.find(kv.first);
3045 if (itTransformation != m_mapOfCameraTransformationMatrix.end()) {
3046 trackers[kv.first]["camTref"] = itTransformation->second;
3047 }
3048 }
3049 j["trackers"] = trackers;
3050 std::ofstream f(settingsFile);
3051 if (f.good()) {
3052 const unsigned indentLevel = 4;
3053 f << j.dump(indentLevel);
3054 f.close();
3055 }
3056 else {
3057 throw vpException(vpException::ioError, "Could not save tracker configuration to JSON file: " + settingsFile);
3058 }
3059}
3060
3061#endif
3062
3077void vpMbGenericTracker::loadConfigFile(const std::string &configFile1, const std::string &configFile2, bool verbose)
3078{
3079 if (m_mapOfTrackers.size() != 2) {
3080 throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
3081 }
3082
3083 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3084 TrackerWrapper *tracker = it_tracker->second;
3085 tracker->loadConfigFile(configFile1, verbose);
3086
3087 ++it_tracker;
3088 tracker = it_tracker->second;
3089 tracker->loadConfigFile(configFile2, verbose);
3090
3092 throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
3093 }
3094
3095 m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
3096 this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
3097 this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
3098 this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
3099}
3100
3114void vpMbGenericTracker::loadConfigFile(const std::map<std::string, std::string> &mapOfConfigFiles, bool verbose)
3115{
3116 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3117 it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3118 TrackerWrapper *tracker = it_tracker->second;
3119
3120 std::map<std::string, std::string>::const_iterator it_config = mapOfConfigFiles.find(it_tracker->first);
3121 if (it_config != mapOfConfigFiles.end()) {
3122 tracker->loadConfigFile(it_config->second, verbose);
3123 }
3124 else {
3125 throw vpException(vpTrackingException::initializationError, "Missing configuration file for camera: %s!",
3126 it_tracker->first.c_str());
3127 }
3128 }
3129
3130 // Set the reference camera parameters
3131 std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.find(m_referenceCameraName);
3132 if (it != m_mapOfTrackers.end()) {
3133 TrackerWrapper *tracker = it->second;
3134 tracker->getCameraParameters(m_cam);
3135
3136 // Set clipping
3137 this->clippingFlag = tracker->getClipping();
3138 this->angleAppears = tracker->getAngleAppear();
3139 this->angleDisappears = tracker->getAngleDisappear();
3140 }
3141 else {
3142 throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
3143 m_referenceCameraName.c_str());
3144 }
3145}
3146
3165void vpMbGenericTracker::loadModel(const std::string &modelFile, bool verbose, const vpHomogeneousMatrix &T)
3166{
3167 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3168 it != m_mapOfTrackers.end(); ++it) {
3169 TrackerWrapper *tracker = it->second;
3170 tracker->loadModel(modelFile, verbose, T);
3171 }
3172}
3173
3196void vpMbGenericTracker::loadModel(const std::string &modelFile1, const std::string &modelFile2, bool verbose,
3197 const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3198{
3199 if (m_mapOfTrackers.size() != 2) {
3200 throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
3201 }
3202
3203 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3204 TrackerWrapper *tracker = it_tracker->second;
3205 tracker->loadModel(modelFile1, verbose, T1);
3206
3207 ++it_tracker;
3208 tracker = it_tracker->second;
3209 tracker->loadModel(modelFile2, verbose, T2);
3210}
3211
3231void vpMbGenericTracker::loadModel(const std::map<std::string, std::string> &mapOfModelFiles, bool verbose,
3232 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3233{
3234 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3235 it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3236 std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(it_tracker->first);
3237
3238 if (it_model != mapOfModelFiles.end()) {
3239 TrackerWrapper *tracker = it_tracker->second;
3240 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3241
3242 if (it_T != mapOfT.end())
3243 tracker->loadModel(it_model->second, verbose, it_T->second);
3244 else
3245 tracker->loadModel(it_model->second, verbose);
3246 }
3247 else {
3248 throw vpException(vpTrackingException::initializationError, "Cannot load model for camera: %s",
3249 it_tracker->first.c_str());
3250 }
3251 }
3252}
3253
3254#ifdef VISP_HAVE_PCL
3255void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3256 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
3257{
3258 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3259 it != m_mapOfTrackers.end(); ++it) {
3260 TrackerWrapper *tracker = it->second;
3261 tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
3262 }
3263}
3264#endif
3265
3266void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3267 std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
3268 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3269 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3270{
3271 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3272 it != m_mapOfTrackers.end(); ++it) {
3273 TrackerWrapper *tracker = it->second;
3274 tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3275 mapOfPointCloudHeights[it->first]);
3276 }
3277}
3278
3290void vpMbGenericTracker::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
3291 const vpHomogeneousMatrix &cMo, bool verbose, const vpHomogeneousMatrix &T)
3292{
3293 if (m_mapOfTrackers.size() != 1) {
3294 throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3295 m_mapOfTrackers.size());
3296 }
3297
3298 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3299 if (it_tracker != m_mapOfTrackers.end()) {
3300 TrackerWrapper *tracker = it_tracker->second;
3301 tracker->reInitModel(I, cad_name, cMo, verbose, T);
3302
3303 // Set reference pose
3304 tracker->getPose(m_cMo);
3305 }
3306 else {
3307 throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3308 }
3309
3310 modelInitialised = true;
3311}
3312
3324void vpMbGenericTracker::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
3325 const vpHomogeneousMatrix &cMo, bool verbose, const vpHomogeneousMatrix &T)
3326{
3327 if (m_mapOfTrackers.size() != 1) {
3328 throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3329 m_mapOfTrackers.size());
3330 }
3331
3332 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3333 if (it_tracker != m_mapOfTrackers.end()) {
3334 TrackerWrapper *tracker = it_tracker->second;
3335 tracker->reInitModel(I_color, cad_name, cMo, verbose, T);
3336
3337 // Set reference pose
3338 tracker->getPose(m_cMo);
3339 }
3340 else {
3341 throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3342 }
3343
3344 modelInitialised = true;
3345}
3346
3368 const std::string &cad_name1, const std::string &cad_name2,
3369 const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo, bool verbose,
3370 const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3371{
3372 if (m_mapOfTrackers.size() == 2) {
3373 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3374
3375 it_tracker->second->reInitModel(I1, cad_name1, c1Mo, verbose, T1);
3376
3377 ++it_tracker;
3378
3379 it_tracker->second->reInitModel(I2, cad_name2, c2Mo, verbose, T2);
3380
3381 it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3382 if (it_tracker != m_mapOfTrackers.end()) {
3383 // Set reference pose
3384 it_tracker->second->getPose(m_cMo);
3385 }
3386 }
3387 else {
3388 throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3389 }
3390
3391 modelInitialised = true;
3392}
3393
3415 const std::string &cad_name1, const std::string &cad_name2,
3416 const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo, bool verbose,
3417 const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3418{
3419 if (m_mapOfTrackers.size() == 2) {
3420 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3421
3422 it_tracker->second->reInitModel(I_color1, cad_name1, c1Mo, verbose, T1);
3423
3424 ++it_tracker;
3425
3426 it_tracker->second->reInitModel(I_color2, cad_name2, c2Mo, verbose, T2);
3427
3428 it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3429 if (it_tracker != m_mapOfTrackers.end()) {
3430 // Set reference pose
3431 it_tracker->second->getPose(m_cMo);
3432 }
3433 }
3434 else {
3435 throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3436 }
3437
3438 modelInitialised = true;
3439}
3440
3455void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3456 const std::map<std::string, std::string> &mapOfModelFiles,
3457 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses, bool verbose,
3458 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3459{
3460 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3461 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
3462 mapOfImages.find(m_referenceCameraName);
3463 std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3464 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3465
3466 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3467 it_camPose != mapOfCameraPoses.end()) {
3468 TrackerWrapper *tracker = it_tracker->second;
3469 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3470 if (it_T != mapOfT.end())
3471 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3472 else
3473 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3474
3475 // Set reference pose
3476 tracker->getPose(m_cMo);
3477 }
3478 else {
3479 throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3480 }
3481
3482 std::vector<std::string> vectorOfMissingCameras;
3483 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3484 if (it_tracker->first != m_referenceCameraName) {
3485 it_img = mapOfImages.find(it_tracker->first);
3486 it_model = mapOfModelFiles.find(it_tracker->first);
3487 it_camPose = mapOfCameraPoses.find(it_tracker->first);
3488
3489 if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
3490 TrackerWrapper *tracker = it_tracker->second;
3491 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3492 }
3493 else {
3494 vectorOfMissingCameras.push_back(it_tracker->first);
3495 }
3496 }
3497 }
3498
3499 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3500 ++it) {
3501 it_img = mapOfImages.find(*it);
3502 it_model = mapOfModelFiles.find(*it);
3503 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3505
3506 if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3507 it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3508 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3509 m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3510 }
3511 }
3512
3513 modelInitialised = true;
3514}
3515
3530void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
3531 const std::map<std::string, std::string> &mapOfModelFiles,
3532 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses, bool verbose,
3533 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3534{
3535 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3536 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
3537 std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3538 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3539
3540 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3541 it_camPose != mapOfCameraPoses.end()) {
3542 TrackerWrapper *tracker = it_tracker->second;
3543 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3544 if (it_T != mapOfT.end())
3545 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3546 else
3547 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3548
3549 // Set reference pose
3550 tracker->getPose(m_cMo);
3551 }
3552 else {
3553 throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3554 }
3555
3556 std::vector<std::string> vectorOfMissingCameras;
3557 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3558 if (it_tracker->first != m_referenceCameraName) {
3559 it_img = mapOfColorImages.find(it_tracker->first);
3560 it_model = mapOfModelFiles.find(it_tracker->first);
3561 it_camPose = mapOfCameraPoses.find(it_tracker->first);
3562
3563 if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3564 it_camPose != mapOfCameraPoses.end()) {
3565 TrackerWrapper *tracker = it_tracker->second;
3566 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3567 }
3568 else {
3569 vectorOfMissingCameras.push_back(it_tracker->first);
3570 }
3571 }
3572 }
3573
3574 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3575 ++it) {
3576 it_img = mapOfColorImages.find(*it);
3577 it_model = mapOfModelFiles.find(*it);
3578 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3580
3581 if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3582 it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3583 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3584 m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3585 }
3586 }
3587
3588 modelInitialised = true;
3589}
3590
3596{
3597 m_cMo.eye();
3598
3599 useScanLine = false;
3600
3601#ifdef VISP_HAVE_OGRE
3602 useOgre = false;
3603#endif
3604
3605 m_computeInteraction = true;
3606 m_lambda = 1.0;
3607
3611 distNearClip = 0.001;
3612 distFarClip = 100;
3613
3615 m_maxIter = 30;
3616 m_stopCriteriaEpsilon = 1e-8;
3617 m_initialMu = 0.01;
3618
3619 // Only for Edge
3620 m_percentageGdPt = 0.4;
3621
3622 // Only for KLT
3623 m_thresholdOutlier = 0.5;
3624
3625 // Reset default ponderation between each feature type
3627
3628#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
3630#endif
3631
3634
3635 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3636 it != m_mapOfTrackers.end(); ++it) {
3637 TrackerWrapper *tracker = it->second;
3638 tracker->resetTracker();
3639 }
3640}
3641
3652{
3654
3655 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3656 it != m_mapOfTrackers.end(); ++it) {
3657 TrackerWrapper *tracker = it->second;
3658 tracker->setAngleAppear(a);
3659 }
3660}
3661
3674void vpMbGenericTracker::setAngleAppear(const double &a1, const double &a2)
3675{
3676 if (m_mapOfTrackers.size() == 2) {
3677 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3678 it->second->setAngleAppear(a1);
3679
3680 ++it;
3681 it->second->setAngleAppear(a2);
3682
3684 if (it != m_mapOfTrackers.end()) {
3685 angleAppears = it->second->getAngleAppear();
3686 }
3687 else {
3688 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3689 }
3690 }
3691 else {
3692 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3693 m_mapOfTrackers.size());
3694 }
3695}
3696
3706void vpMbGenericTracker::setAngleAppear(const std::map<std::string, double> &mapOfAngles)
3707{
3708 for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3709 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3710
3711 if (it_tracker != m_mapOfTrackers.end()) {
3712 TrackerWrapper *tracker = it_tracker->second;
3713 tracker->setAngleAppear(it->second);
3714
3715 if (it->first == m_referenceCameraName) {
3716 angleAppears = it->second;
3717 }
3718 }
3719 }
3720}
3721
3732{
3734
3735 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3736 it != m_mapOfTrackers.end(); ++it) {
3737 TrackerWrapper *tracker = it->second;
3738 tracker->setAngleDisappear(a);
3739 }
3740}
3741
3754void vpMbGenericTracker::setAngleDisappear(const double &a1, const double &a2)
3755{
3756 if (m_mapOfTrackers.size() == 2) {
3757 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3758 it->second->setAngleDisappear(a1);
3759
3760 ++it;
3761 it->second->setAngleDisappear(a2);
3762
3764 if (it != m_mapOfTrackers.end()) {
3765 angleDisappears = it->second->getAngleDisappear();
3766 }
3767 else {
3768 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3769 }
3770 }
3771 else {
3772 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3773 m_mapOfTrackers.size());
3774 }
3775}
3776
3786void vpMbGenericTracker::setAngleDisappear(const std::map<std::string, double> &mapOfAngles)
3787{
3788 for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3789 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3790
3791 if (it_tracker != m_mapOfTrackers.end()) {
3792 TrackerWrapper *tracker = it_tracker->second;
3793 tracker->setAngleDisappear(it->second);
3794
3795 if (it->first == m_referenceCameraName) {
3796 angleDisappears = it->second;
3797 }
3798 }
3799 }
3800}
3801
3808{
3810
3811 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3812 it != m_mapOfTrackers.end(); ++it) {
3813 TrackerWrapper *tracker = it->second;
3814 tracker->setCameraParameters(camera);
3815 }
3816}
3817
3827{
3828 if (m_mapOfTrackers.size() == 2) {
3829 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3830 it->second->setCameraParameters(camera1);
3831
3832 ++it;
3833 it->second->setCameraParameters(camera2);
3834
3836 if (it != m_mapOfTrackers.end()) {
3837 it->second->getCameraParameters(m_cam);
3838 }
3839 else {
3840 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3841 }
3842 }
3843 else {
3844 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3845 m_mapOfTrackers.size());
3846 }
3847}
3848
3857void vpMbGenericTracker::setCameraParameters(const std::map<std::string, vpCameraParameters> &mapOfCameraParameters)
3858{
3859 for (std::map<std::string, vpCameraParameters>::const_iterator it = mapOfCameraParameters.begin();
3860 it != mapOfCameraParameters.end(); ++it) {
3861 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3862
3863 if (it_tracker != m_mapOfTrackers.end()) {
3864 TrackerWrapper *tracker = it_tracker->second;
3865 tracker->setCameraParameters(it->second);
3866
3867 if (it->first == m_referenceCameraName) {
3868 m_cam = it->second;
3869 }
3870 }
3871 }
3872}
3873
3882void vpMbGenericTracker::setCameraTransformationMatrix(const std::string &cameraName,
3883 const vpHomogeneousMatrix &cameraTransformationMatrix)
3884{
3885 std::map<std::string, vpHomogeneousMatrix>::iterator it = m_mapOfCameraTransformationMatrix.find(cameraName);
3886
3887 if (it != m_mapOfCameraTransformationMatrix.end()) {
3888 it->second = cameraTransformationMatrix;
3889 }
3890 else {
3891 throw vpException(vpTrackingException::fatalError, "Cannot find camera: %s!", cameraName.c_str());
3892 }
3893}
3894
3903 const std::map<std::string, vpHomogeneousMatrix> &mapOfTransformationMatrix)
3904{
3905 for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = mapOfTransformationMatrix.begin();
3906 it != mapOfTransformationMatrix.end(); ++it) {
3907 std::map<std::string, vpHomogeneousMatrix>::iterator it_camTrans =
3908 m_mapOfCameraTransformationMatrix.find(it->first);
3909
3910 if (it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3911 it_camTrans->second = it->second;
3912 }
3913 }
3914}
3915
3925void vpMbGenericTracker::setClipping(const unsigned int &flags)
3926{
3928
3929 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3930 it != m_mapOfTrackers.end(); ++it) {
3931 TrackerWrapper *tracker = it->second;
3932 tracker->setClipping(flags);
3933 }
3934}
3935
3946void vpMbGenericTracker::setClipping(const unsigned int &flags1, const unsigned int &flags2)
3947{
3948 if (m_mapOfTrackers.size() == 2) {
3949 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3950 it->second->setClipping(flags1);
3951
3952 ++it;
3953 it->second->setClipping(flags2);
3954
3956 if (it != m_mapOfTrackers.end()) {
3957 clippingFlag = it->second->getClipping();
3958 }
3959 else {
3960 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3961 }
3962 }
3963 else {
3964 std::stringstream ss;
3965 ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
3967 }
3968}
3969
3977void vpMbGenericTracker::setClipping(const std::map<std::string, unsigned int> &mapOfClippingFlags)
3978{
3979 for (std::map<std::string, unsigned int>::const_iterator it = mapOfClippingFlags.begin();
3980 it != mapOfClippingFlags.end(); ++it) {
3981 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3982
3983 if (it_tracker != m_mapOfTrackers.end()) {
3984 TrackerWrapper *tracker = it_tracker->second;
3985 tracker->setClipping(it->second);
3986
3987 if (it->first == m_referenceCameraName) {
3988 clippingFlag = it->second;
3989 }
3990 }
3991 }
3992}
3993
4004{
4005 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4006 it != m_mapOfTrackers.end(); ++it) {
4007 TrackerWrapper *tracker = it->second;
4008 tracker->setDepthDenseFilteringMaxDistance(maxDistance);
4009 }
4010}
4011
4021{
4022 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4023 it != m_mapOfTrackers.end(); ++it) {
4024 TrackerWrapper *tracker = it->second;
4025 tracker->setDepthDenseFilteringMethod(method);
4026 }
4027}
4028
4039{
4040 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4041 it != m_mapOfTrackers.end(); ++it) {
4042 TrackerWrapper *tracker = it->second;
4043 tracker->setDepthDenseFilteringMinDistance(minDistance);
4044 }
4045}
4046
4057{
4058 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4059 it != m_mapOfTrackers.end(); ++it) {
4060 TrackerWrapper *tracker = it->second;
4061 tracker->setDepthDenseFilteringOccupancyRatio(occupancyRatio);
4062 }
4063}
4064
4073void vpMbGenericTracker::setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
4074{
4075 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4076 it != m_mapOfTrackers.end(); ++it) {
4077 TrackerWrapper *tracker = it->second;
4078 tracker->setDepthDenseSamplingStep(stepX, stepY);
4079 }
4080}
4081
4090{
4091 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4092 it != m_mapOfTrackers.end(); ++it) {
4093 TrackerWrapper *tracker = it->second;
4094 tracker->setDepthNormalFaceCentroidMethod(method);
4095 }
4096}
4097
4107{
4108 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4109 it != m_mapOfTrackers.end(); ++it) {
4110 TrackerWrapper *tracker = it->second;
4111 tracker->setDepthNormalFeatureEstimationMethod(method);
4112 }
4113}
4114
4123{
4124 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4125 it != m_mapOfTrackers.end(); ++it) {
4126 TrackerWrapper *tracker = it->second;
4127 tracker->setDepthNormalPclPlaneEstimationMethod(method);
4128 }
4129}
4130
4139{
4140 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4141 it != m_mapOfTrackers.end(); ++it) {
4142 TrackerWrapper *tracker = it->second;
4143 tracker->setDepthNormalPclPlaneEstimationRansacMaxIter(maxIter);
4144 }
4145}
4146
4155{
4156 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4157 it != m_mapOfTrackers.end(); ++it) {
4158 TrackerWrapper *tracker = it->second;
4159 tracker->setDepthNormalPclPlaneEstimationRansacThreshold(thresold);
4160 }
4161}
4162
4171void vpMbGenericTracker::setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
4172{
4173 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4174 it != m_mapOfTrackers.end(); ++it) {
4175 TrackerWrapper *tracker = it->second;
4176 tracker->setDepthNormalSamplingStep(stepX, stepY);
4177 }
4178}
4179
4199{
4201
4202 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4203 it != m_mapOfTrackers.end(); ++it) {
4204 TrackerWrapper *tracker = it->second;
4205 tracker->setDisplayFeatures(displayF);
4206 }
4207}
4208
4217{
4219
4220 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4221 it != m_mapOfTrackers.end(); ++it) {
4222 TrackerWrapper *tracker = it->second;
4223 tracker->setFarClippingDistance(dist);
4224 }
4225}
4226
4235void vpMbGenericTracker::setFarClippingDistance(const double &dist1, const double &dist2)
4236{
4237 if (m_mapOfTrackers.size() == 2) {
4238 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4239 it->second->setFarClippingDistance(dist1);
4240
4241 ++it;
4242 it->second->setFarClippingDistance(dist2);
4243
4245 if (it != m_mapOfTrackers.end()) {
4246 distFarClip = it->second->getFarClippingDistance();
4247 }
4248 else {
4249 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4250 }
4251 }
4252 else {
4253 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4254 m_mapOfTrackers.size());
4255 }
4256}
4257
4263void vpMbGenericTracker::setFarClippingDistance(const std::map<std::string, double> &mapOfClippingDists)
4264{
4265 for (std::map<std::string, double>::const_iterator it = mapOfClippingDists.begin(); it != mapOfClippingDists.end();
4266 ++it) {
4267 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4268
4269 if (it_tracker != m_mapOfTrackers.end()) {
4270 TrackerWrapper *tracker = it_tracker->second;
4271 tracker->setFarClippingDistance(it->second);
4272
4273 if (it->first == m_referenceCameraName) {
4274 distFarClip = it->second;
4275 }
4276 }
4277 }
4278}
4279
4286void vpMbGenericTracker::setFeatureFactors(const std::map<vpTrackerType, double> &mapOfFeatureFactors)
4287{
4288 for (std::map<vpTrackerType, double>::iterator it = m_mapOfFeatureFactors.begin(); it != m_mapOfFeatureFactors.end();
4289 ++it) {
4290 std::map<vpTrackerType, double>::const_iterator it_factor = mapOfFeatureFactors.find(it->first);
4291 if (it_factor != mapOfFeatureFactors.end()) {
4292 it->second = it_factor->second;
4293 }
4294 }
4295}
4296
4313{
4314 m_percentageGdPt = threshold;
4315
4316 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4317 it != m_mapOfTrackers.end(); ++it) {
4318 TrackerWrapper *tracker = it->second;
4319 tracker->setGoodMovingEdgesRatioThreshold(threshold);
4320 }
4321}
4322
4323#ifdef VISP_HAVE_OGRE
4336{
4337 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4338 it != m_mapOfTrackers.end(); ++it) {
4339 TrackerWrapper *tracker = it->second;
4340 tracker->setGoodNbRayCastingAttemptsRatio(ratio);
4341 }
4342}
4343
4356{
4357 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4358 it != m_mapOfTrackers.end(); ++it) {
4359 TrackerWrapper *tracker = it->second;
4360 tracker->setNbRayCastingAttemptsForVisibility(attempts);
4361 }
4362}
4363#endif
4364
4365#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
4374{
4375 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4376 it != m_mapOfTrackers.end(); ++it) {
4377 TrackerWrapper *tracker = it->second;
4378 tracker->setKltOpencv(t);
4379 }
4380}
4381
4391{
4392 if (m_mapOfTrackers.size() == 2) {
4393 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4394 it->second->setKltOpencv(t1);
4395
4396 ++it;
4397 it->second->setKltOpencv(t2);
4398 }
4399 else {
4400 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4401 m_mapOfTrackers.size());
4402 }
4403}
4404
4410void vpMbGenericTracker::setKltOpencv(const std::map<std::string, vpKltOpencv> &mapOfKlts)
4411{
4412 for (std::map<std::string, vpKltOpencv>::const_iterator it = mapOfKlts.begin(); it != mapOfKlts.end(); ++it) {
4413 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4414
4415 if (it_tracker != m_mapOfTrackers.end()) {
4416 TrackerWrapper *tracker = it_tracker->second;
4417 tracker->setKltOpencv(it->second);
4418 }
4419 }
4420}
4421
4430{
4431 m_thresholdOutlier = th;
4432
4433 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4434 it != m_mapOfTrackers.end(); ++it) {
4435 TrackerWrapper *tracker = it->second;
4436 tracker->setKltThresholdAcceptation(th);
4437 }
4438}
4439#endif
4440
4453void vpMbGenericTracker::setLod(bool useLod, const std::string &name)
4454{
4455 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4456 it != m_mapOfTrackers.end(); ++it) {
4457 TrackerWrapper *tracker = it->second;
4458 tracker->setLod(useLod, name);
4459 }
4460}
4461
4462#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
4470void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e)
4471{
4472 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4473 it != m_mapOfTrackers.end(); ++it) {
4474 TrackerWrapper *tracker = it->second;
4475 tracker->setKltMaskBorder(e);
4476 }
4477}
4478
4487void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e1, const unsigned int &e2)
4488{
4489 if (m_mapOfTrackers.size() == 2) {
4490 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4491 it->second->setKltMaskBorder(e1);
4492
4493 ++it;
4494
4495 it->second->setKltMaskBorder(e2);
4496 }
4497 else {
4498 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4499 m_mapOfTrackers.size());
4500 }
4501}
4502
4508void vpMbGenericTracker::setKltMaskBorder(const std::map<std::string, unsigned int> &mapOfErosions)
4509{
4510 for (std::map<std::string, unsigned int>::const_iterator it = mapOfErosions.begin(); it != mapOfErosions.end();
4511 ++it) {
4512 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4513
4514 if (it_tracker != m_mapOfTrackers.end()) {
4515 TrackerWrapper *tracker = it_tracker->second;
4516 tracker->setKltMaskBorder(it->second);
4517 }
4518 }
4519}
4520#endif
4521
4528{
4530
4531 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4532 it != m_mapOfTrackers.end(); ++it) {
4533 TrackerWrapper *tracker = it->second;
4534 tracker->setMask(mask);
4535 }
4536}
4537
4549void vpMbGenericTracker::setMinLineLengthThresh(double minLineLengthThresh, const std::string &name)
4550{
4551 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4552 it != m_mapOfTrackers.end(); ++it) {
4553 TrackerWrapper *tracker = it->second;
4554 tracker->setMinLineLengthThresh(minLineLengthThresh, name);
4555 }
4556}
4557
4568void vpMbGenericTracker::setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name)
4569{
4570 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4571 it != m_mapOfTrackers.end(); ++it) {
4572 TrackerWrapper *tracker = it->second;
4573 tracker->setMinPolygonAreaThresh(minPolygonAreaThresh, name);
4574 }
4575}
4576
4585{
4586 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4587 it != m_mapOfTrackers.end(); ++it) {
4588 TrackerWrapper *tracker = it->second;
4589 tracker->setMovingEdge(me);
4590 }
4591}
4592
4602void vpMbGenericTracker::setMovingEdge(const vpMe &me1, const vpMe &me2)
4603{
4604 if (m_mapOfTrackers.size() == 2) {
4605 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4606 it->second->setMovingEdge(me1);
4607
4608 ++it;
4609
4610 it->second->setMovingEdge(me2);
4611 }
4612 else {
4613 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4614 m_mapOfTrackers.size());
4615 }
4616}
4617
4623void vpMbGenericTracker::setMovingEdge(const std::map<std::string, vpMe> &mapOfMe)
4624{
4625 for (std::map<std::string, vpMe>::const_iterator it = mapOfMe.begin(); it != mapOfMe.end(); ++it) {
4626 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4627
4628 if (it_tracker != m_mapOfTrackers.end()) {
4629 TrackerWrapper *tracker = it_tracker->second;
4630 tracker->setMovingEdge(it->second);
4631 }
4632 }
4633}
4634
4643{
4645
4646 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4647 it != m_mapOfTrackers.end(); ++it) {
4648 TrackerWrapper *tracker = it->second;
4649 tracker->setNearClippingDistance(dist);
4650 }
4651}
4652
4661void vpMbGenericTracker::setNearClippingDistance(const double &dist1, const double &dist2)
4662{
4663 if (m_mapOfTrackers.size() == 2) {
4664 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4665 it->second->setNearClippingDistance(dist1);
4666
4667 ++it;
4668
4669 it->second->setNearClippingDistance(dist2);
4670
4672 if (it != m_mapOfTrackers.end()) {
4673 distNearClip = it->second->getNearClippingDistance();
4674 }
4675 else {
4676 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4677 }
4678 }
4679 else {
4680 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4681 m_mapOfTrackers.size());
4682 }
4683}
4684
4690void vpMbGenericTracker::setNearClippingDistance(const std::map<std::string, double> &mapOfDists)
4691{
4692 for (std::map<std::string, double>::const_iterator it = mapOfDists.begin(); it != mapOfDists.end(); ++it) {
4693 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4694
4695 if (it_tracker != m_mapOfTrackers.end()) {
4696 TrackerWrapper *tracker = it_tracker->second;
4697 tracker->setNearClippingDistance(it->second);
4698
4699 if (it->first == m_referenceCameraName) {
4700 distNearClip = it->second;
4701 }
4702 }
4703 }
4704}
4705
4719{
4720 vpMbTracker::setOgreShowConfigDialog(showConfigDialog);
4721
4722 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4723 it != m_mapOfTrackers.end(); ++it) {
4724 TrackerWrapper *tracker = it->second;
4725 tracker->setOgreShowConfigDialog(showConfigDialog);
4726 }
4727}
4728
4740{
4742
4743 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4744 it != m_mapOfTrackers.end(); ++it) {
4745 TrackerWrapper *tracker = it->second;
4746 tracker->setOgreVisibilityTest(v);
4747 }
4748
4749#ifdef VISP_HAVE_OGRE
4750 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4751 it != m_mapOfTrackers.end(); ++it) {
4752 TrackerWrapper *tracker = it->second;
4753 tracker->faces.getOgreContext()->setWindowName("Multi Generic MBT (" + it->first + ")");
4754 }
4755#endif
4756}
4757
4766{
4768
4769 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4770 it != m_mapOfTrackers.end(); ++it) {
4771 TrackerWrapper *tracker = it->second;
4772 tracker->setOptimizationMethod(opt);
4773 }
4774}
4775
4789{
4790 if (m_mapOfTrackers.size() > 1) {
4791 throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4792 "to be configured with only one camera!");
4793 }
4794
4795 m_cMo = cdMo;
4796
4797 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4798 if (it != m_mapOfTrackers.end()) {
4799 TrackerWrapper *tracker = it->second;
4800 tracker->setPose(I, cdMo);
4801 }
4802 else {
4803 throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4804 m_referenceCameraName.c_str());
4805 }
4806}
4807
4821{
4822 if (m_mapOfTrackers.size() > 1) {
4823 throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4824 "to be configured with only one camera!");
4825 }
4826
4827 m_cMo = cdMo;
4828
4829 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4830 if (it != m_mapOfTrackers.end()) {
4831 TrackerWrapper *tracker = it->second;
4832 vpImageConvert::convert(I_color, m_I);
4833 tracker->setPose(m_I, cdMo);
4834 }
4835 else {
4836 throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4837 m_referenceCameraName.c_str());
4838 }
4839}
4840
4853 const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4854{
4855 if (m_mapOfTrackers.size() == 2) {
4856 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4857 it->second->setPose(I1, c1Mo);
4858
4859 ++it;
4860
4861 it->second->setPose(I2, c2Mo);
4862
4864 if (it != m_mapOfTrackers.end()) {
4865 // Set reference pose
4866 it->second->getPose(m_cMo);
4867 }
4868 else {
4869 throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4870 m_referenceCameraName.c_str());
4871 }
4872 }
4873 else {
4874 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4875 m_mapOfTrackers.size());
4876 }
4877}
4878
4891 const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4892{
4893 if (m_mapOfTrackers.size() == 2) {
4894 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4895 it->second->setPose(I_color1, c1Mo);
4896
4897 ++it;
4898
4899 it->second->setPose(I_color2, c2Mo);
4900
4902 if (it != m_mapOfTrackers.end()) {
4903 // Set reference pose
4904 it->second->getPose(m_cMo);
4905 }
4906 else {
4907 throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4908 m_referenceCameraName.c_str());
4909 }
4910 }
4911 else {
4912 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4913 m_mapOfTrackers.size());
4914 }
4915}
4916
4932void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
4933 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4934{
4935 // Set the reference cMo
4936 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
4937 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
4938 mapOfImages.find(m_referenceCameraName);
4939 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
4940
4941 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4942 TrackerWrapper *tracker = it_tracker->second;
4943 tracker->setPose(*it_img->second, it_camPose->second);
4944 tracker->getPose(m_cMo);
4945 }
4946 else {
4947 throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
4948 }
4949
4950 // Vector of missing pose matrices for cameras
4951 std::vector<std::string> vectorOfMissingCameraPoses;
4952
4953 // Set pose for the specified cameras
4954 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
4955 if (it_tracker->first != m_referenceCameraName) {
4956 it_img = mapOfImages.find(it_tracker->first);
4957 it_camPose = mapOfCameraPoses.find(it_tracker->first);
4958
4959 if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4960 // Set pose
4961 TrackerWrapper *tracker = it_tracker->second;
4962 tracker->setPose(*it_img->second, it_camPose->second);
4963 }
4964 else {
4965 vectorOfMissingCameraPoses.push_back(it_tracker->first);
4966 }
4967 }
4968 }
4969
4970 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
4971 it != vectorOfMissingCameraPoses.end(); ++it) {
4972 it_img = mapOfImages.find(*it);
4973 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
4975
4976 if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
4977 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
4978 m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
4979 }
4980 else {
4982 "Missing image or missing camera transformation "
4983 "matrix! Cannot set pose for camera: %s!",
4984 it->c_str());
4985 }
4986 }
4987}
4988
5004void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5005 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
5006{
5007 // Set the reference cMo
5008 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
5009 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
5010 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
5011
5012 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5013 TrackerWrapper *tracker = it_tracker->second;
5014 tracker->setPose(*it_img->second, it_camPose->second);
5015 tracker->getPose(m_cMo);
5016 }
5017 else {
5018 throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
5019 }
5020
5021 // Vector of missing pose matrices for cameras
5022 std::vector<std::string> vectorOfMissingCameraPoses;
5023
5024 // Set pose for the specified cameras
5025 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
5026 if (it_tracker->first != m_referenceCameraName) {
5027 it_img = mapOfColorImages.find(it_tracker->first);
5028 it_camPose = mapOfCameraPoses.find(it_tracker->first);
5029
5030 if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5031 // Set pose
5032 TrackerWrapper *tracker = it_tracker->second;
5033 tracker->setPose(*it_img->second, it_camPose->second);
5034 }
5035 else {
5036 vectorOfMissingCameraPoses.push_back(it_tracker->first);
5037 }
5038 }
5039 }
5040
5041 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
5042 it != vectorOfMissingCameraPoses.end(); ++it) {
5043 it_img = mapOfColorImages.find(*it);
5044 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
5046
5047 if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
5048 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
5049 m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
5050 }
5051 else {
5053 "Missing image or missing camera transformation "
5054 "matrix! Cannot set pose for camera: %s!",
5055 it->c_str());
5056 }
5057 }
5058}
5059
5075{
5077
5078 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5079 it != m_mapOfTrackers.end(); ++it) {
5080 TrackerWrapper *tracker = it->second;
5081 tracker->setProjectionErrorComputation(flag);
5082 }
5083}
5084
5089{
5091
5092 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5093 it != m_mapOfTrackers.end(); ++it) {
5094 TrackerWrapper *tracker = it->second;
5095 tracker->setProjectionErrorDisplay(display);
5096 }
5097}
5098
5103{
5105
5106 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5107 it != m_mapOfTrackers.end(); ++it) {
5108 TrackerWrapper *tracker = it->second;
5109 tracker->setProjectionErrorDisplayArrowLength(length);
5110 }
5111}
5112
5114{
5116
5117 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5118 it != m_mapOfTrackers.end(); ++it) {
5119 TrackerWrapper *tracker = it->second;
5120 tracker->setProjectionErrorDisplayArrowThickness(thickness);
5121 }
5122}
5123
5129void vpMbGenericTracker::setReferenceCameraName(const std::string &referenceCameraName)
5130{
5131 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(referenceCameraName);
5132 if (it != m_mapOfTrackers.end()) {
5133 m_referenceCameraName = referenceCameraName;
5134 }
5135 else {
5136 std::cerr << "The reference camera: " << referenceCameraName << " does not exist!";
5137 }
5138}
5139
5141{
5143
5144 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5145 it != m_mapOfTrackers.end(); ++it) {
5146 TrackerWrapper *tracker = it->second;
5147 tracker->setScanLineVisibilityTest(v);
5148 }
5149}
5150
5163{
5164 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5165 it != m_mapOfTrackers.end(); ++it) {
5166 TrackerWrapper *tracker = it->second;
5167 tracker->setTrackerType(type);
5168 }
5169}
5170
5180void vpMbGenericTracker::setTrackerType(const std::map<std::string, int> &mapOfTrackerTypes)
5181{
5182 for (std::map<std::string, int>::const_iterator it = mapOfTrackerTypes.begin(); it != mapOfTrackerTypes.end(); ++it) {
5183 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
5184 if (it_tracker != m_mapOfTrackers.end()) {
5185 TrackerWrapper *tracker = it_tracker->second;
5186 tracker->setTrackerType(it->second);
5187 }
5188 }
5189}
5190
5200void vpMbGenericTracker::setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
5201{
5202 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5203 it != m_mapOfTrackers.end(); ++it) {
5204 TrackerWrapper *tracker = it->second;
5205 tracker->setUseDepthDenseTracking(name, useDepthDenseTracking);
5206 }
5207}
5208
5218void vpMbGenericTracker::setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
5219{
5220 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5221 it != m_mapOfTrackers.end(); ++it) {
5222 TrackerWrapper *tracker = it->second;
5223 tracker->setUseDepthNormalTracking(name, useDepthNormalTracking);
5224 }
5225}
5226
5236void vpMbGenericTracker::setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
5237{
5238 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5239 it != m_mapOfTrackers.end(); ++it) {
5240 TrackerWrapper *tracker = it->second;
5241 tracker->setUseEdgeTracking(name, useEdgeTracking);
5242 }
5243}
5244
5245#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5255void vpMbGenericTracker::setUseKltTracking(const std::string &name, const bool &useKltTracking)
5256{
5257 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5258 it != m_mapOfTrackers.end(); ++it) {
5259 TrackerWrapper *tracker = it->second;
5260 tracker->setUseKltTracking(name, useKltTracking);
5261 }
5262}
5263#endif
5264
5266{
5267 // Test tracking fails only if all testTracking have failed
5268 bool isOneTestTrackingOk = false;
5269 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5270 it != m_mapOfTrackers.end(); ++it) {
5271 TrackerWrapper *tracker = it->second;
5272 try {
5273 tracker->testTracking();
5274 isOneTestTrackingOk = true;
5275 }
5276 catch (...) {
5277 }
5278 }
5279
5280 if (!isOneTestTrackingOk) {
5281 std::ostringstream oss;
5282 oss << "Not enough moving edges to track the object. Try to reduce the "
5283 "threshold="
5284 << m_percentageGdPt << " using setGoodMovingEdgesRatioThreshold()";
5286 }
5287}
5288
5299{
5300 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5301 mapOfImages[m_referenceCameraName] = &I;
5302
5303 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5304 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5305
5306 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5307}
5308
5319{
5320 std::map<std::string, const vpImage<vpRGBa> *> mapOfColorImages;
5321 mapOfColorImages[m_referenceCameraName] = &I_color;
5322
5323 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5324 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5325
5326 track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5327}
5328
5340{
5341 if (m_mapOfTrackers.size() == 2) {
5342 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5343 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5344 mapOfImages[it->first] = &I1;
5345 ++it;
5346
5347 mapOfImages[it->first] = &I2;
5348
5349 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5350 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5351
5352 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5353 }
5354 else {
5355 std::stringstream ss;
5356 ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5357 throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5358 }
5359}
5360
5371void vpMbGenericTracker::track(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &_colorI2)
5372{
5373 if (m_mapOfTrackers.size() == 2) {
5374 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5375 std::map<std::string, const vpImage<vpRGBa> *> mapOfImages;
5376 mapOfImages[it->first] = &I_color1;
5377 ++it;
5378
5379 mapOfImages[it->first] = &_colorI2;
5380
5381 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5382 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5383
5384 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5385 }
5386 else {
5387 std::stringstream ss;
5388 ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5389 throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5390 }
5391}
5392
5400void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
5401{
5402 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5403 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5404
5405 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5406}
5407
5415void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages)
5416{
5417 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5418 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5419
5420 track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5421}
5422
5423#ifdef VISP_HAVE_PCL
5432void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5433 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5434{
5435 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5436 it != m_mapOfTrackers.end(); ++it) {
5437 TrackerWrapper *tracker = it->second;
5438
5439 if ((tracker->m_trackerType & (EDGE_TRACKER |
5440#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5441 KLT_TRACKER |
5442#endif
5444 throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5445 }
5446
5447 if (tracker->m_trackerType & (EDGE_TRACKER
5448#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5449 | KLT_TRACKER
5450#endif
5451 ) &&
5452 mapOfImages[it->first] == NULL) {
5453 throw vpException(vpException::fatalError, "Image pointer is NULL!");
5454 }
5455
5456 if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5457 !mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5458 throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
5459 }
5460 }
5461
5462 preTracking(mapOfImages, mapOfPointClouds);
5463
5464 try {
5465 computeVVS(mapOfImages);
5466 }
5467 catch (...) {
5468 covarianceMatrix = -1;
5469 throw; // throw the original exception
5470 }
5471
5472 testTracking();
5473
5474 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5475 it != m_mapOfTrackers.end(); ++it) {
5476 TrackerWrapper *tracker = it->second;
5477
5478 if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5479 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5480 }
5481
5482 tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5483
5484 if (displayFeatures) {
5485#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5486 if (tracker->m_trackerType & KLT_TRACKER) {
5487 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5488 }
5489#endif
5490
5491 if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5492 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5493 }
5494 }
5495 }
5496
5498}
5499
5508void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5509 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5510{
5511 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5512 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5513 it != m_mapOfTrackers.end(); ++it) {
5514 TrackerWrapper *tracker = it->second;
5515
5516 if ((tracker->m_trackerType & (EDGE_TRACKER |
5517#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5518 KLT_TRACKER |
5519#endif
5521 throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5522 }
5523
5524 if (tracker->m_trackerType & (EDGE_TRACKER
5525#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5526 | KLT_TRACKER
5527#endif
5528 ) &&
5529 mapOfImages[it->first] == NULL) {
5530 throw vpException(vpException::fatalError, "Image pointer is NULL!");
5531 }
5532 else if (tracker->m_trackerType & (EDGE_TRACKER
5533#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5534 | KLT_TRACKER
5535#endif
5536 ) &&
5537 mapOfImages[it->first] != NULL) {
5538 vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5539 mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5540 }
5541
5542 if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5543 !mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5544 throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
5545 }
5546 }
5547
5548 preTracking(mapOfImages, mapOfPointClouds);
5549
5550 try {
5551 computeVVS(mapOfImages);
5552 }
5553 catch (...) {
5554 covarianceMatrix = -1;
5555 throw; // throw the original exception
5556 }
5557
5558 testTracking();
5559
5560 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5561 it != m_mapOfTrackers.end(); ++it) {
5562 TrackerWrapper *tracker = it->second;
5563
5564 if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5565 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5566 }
5567
5568 tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5569
5570 if (displayFeatures) {
5571#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5572 if (tracker->m_trackerType & KLT_TRACKER) {
5573 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5574 }
5575#endif
5576
5577 if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5578 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5579 }
5580 }
5581 }
5582
5584}
5585#endif
5586
5597void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5598 std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5599 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5600 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5601{
5602 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5603 it != m_mapOfTrackers.end(); ++it) {
5604 TrackerWrapper *tracker = it->second;
5605
5606 if ((tracker->m_trackerType & (EDGE_TRACKER |
5607#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5608 KLT_TRACKER |
5609#endif
5611 throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5612 }
5613
5614 if (tracker->m_trackerType & (EDGE_TRACKER
5615#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5616 | KLT_TRACKER
5617#endif
5618 ) &&
5619 mapOfImages[it->first] == NULL) {
5620 throw vpException(vpException::fatalError, "Image pointer is NULL!");
5621 }
5622
5623 if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5624 (mapOfPointClouds[it->first] == NULL)) {
5625 throw vpException(vpException::fatalError, "Pointcloud is NULL!");
5626 }
5627 }
5628
5629 preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5630
5631 try {
5632 computeVVS(mapOfImages);
5633 }
5634 catch (...) {
5635 covarianceMatrix = -1;
5636 throw; // throw the original exception
5637 }
5638
5639 testTracking();
5640
5641 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5642 it != m_mapOfTrackers.end(); ++it) {
5643 TrackerWrapper *tracker = it->second;
5644
5645 if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5646 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5647 }
5648
5649 tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5650
5651 if (displayFeatures) {
5652#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5653 if (tracker->m_trackerType & KLT_TRACKER) {
5654 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5655 }
5656#endif
5657
5658 if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5659 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5660 }
5661 }
5662 }
5663
5665}
5666
5677void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5678 std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5679 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5680 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5681{
5682 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5683 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5684 it != m_mapOfTrackers.end(); ++it) {
5685 TrackerWrapper *tracker = it->second;
5686
5687 if ((tracker->m_trackerType & (EDGE_TRACKER |
5688#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5689 KLT_TRACKER |
5690#endif
5692 throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5693 }
5694
5695 if (tracker->m_trackerType & (EDGE_TRACKER
5696#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5697 | KLT_TRACKER
5698#endif
5699 ) &&
5700 mapOfColorImages[it->first] == NULL) {
5701 throw vpException(vpException::fatalError, "Image pointer is NULL!");
5702 }
5703 else if (tracker->m_trackerType & (EDGE_TRACKER
5704#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5705 | KLT_TRACKER
5706#endif
5707 ) &&
5708 mapOfColorImages[it->first] != NULL) {
5709 vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5710 mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5711 }
5712
5713 if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5714 (mapOfPointClouds[it->first] == NULL)) {
5715 throw vpException(vpException::fatalError, "Pointcloud is NULL!");
5716 }
5717 }
5718
5719 preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5720
5721 try {
5722 computeVVS(mapOfImages);
5723 }
5724 catch (...) {
5725 covarianceMatrix = -1;
5726 throw; // throw the original exception
5727 }
5728
5729 testTracking();
5730
5731 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5732 it != m_mapOfTrackers.end(); ++it) {
5733 TrackerWrapper *tracker = it->second;
5734
5735 if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5736 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5737 }
5738
5739 tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5740
5741 if (displayFeatures) {
5742#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5743 if (tracker->m_trackerType & KLT_TRACKER) {
5744 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5745 }
5746#endif
5747
5748 if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5749 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5750 }
5751 }
5752 }
5753
5755}
5756
5758vpMbGenericTracker::TrackerWrapper::TrackerWrapper()
5759 : m_error(), m_L(), m_trackerType(EDGE_TRACKER), m_w(), m_weightedError()
5760{
5761 m_lambda = 1.0;
5762 m_maxIter = 30;
5763
5764#ifdef VISP_HAVE_OGRE
5765 faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5766
5767 m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5768#endif
5769}
5770
5771vpMbGenericTracker::TrackerWrapper::TrackerWrapper(int trackerType)
5772 : m_error(), m_L(), m_trackerType(trackerType), m_w(), m_weightedError()
5773{
5774 if ((m_trackerType & (EDGE_TRACKER |
5775#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5776 KLT_TRACKER |
5777#endif
5779 throw vpException(vpTrackingException::badValue, "Bad value for tracker type: %d!", m_trackerType);
5780 }
5781
5782 m_lambda = 1.0;
5783 m_maxIter = 30;
5784
5785#ifdef VISP_HAVE_OGRE
5786 faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5787
5788 m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5789#endif
5790}
5791
5792vpMbGenericTracker::TrackerWrapper::~TrackerWrapper() { }
5793
5794// Implemented only for debugging purposes: use TrackerWrapper as a standalone tracker
5796{
5797 computeVVSInit(ptr_I);
5798
5799 if (m_error.getRows() < 4) {
5800 throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
5801 }
5802
5803 double normRes = 0;
5804 double normRes_1 = -1;
5805 unsigned int iter = 0;
5806
5807 double factorEdge = 1.0;
5808#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5809 double factorKlt = 1.0;
5810#endif
5811 double factorDepth = 1.0;
5812 double factorDepthDense = 1.0;
5813
5814 vpMatrix LTL;
5815 vpColVector LTR, v;
5816 vpColVector error_prev;
5817
5818 double mu = m_initialMu;
5819 vpHomogeneousMatrix cMo_prev;
5820#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5821 vpHomogeneousMatrix ctTc0_Prev; // Only for KLT
5822#endif
5823 bool isoJoIdentity = m_isoJoIdentity; // Backup since it can be modified if L is not full rank
5824 if (isoJoIdentity)
5825 oJo.eye();
5826
5827 // Covariance
5828 vpColVector W_true(m_error.getRows());
5829 vpMatrix L_true, LVJ_true;
5830
5831 unsigned int nb_edge_features = m_error_edge.getRows();
5832#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5833 unsigned int nb_klt_features = m_error_klt.getRows();
5834#endif
5835 unsigned int nb_depth_features = m_error_depthNormal.getRows();
5836 unsigned int nb_depth_dense_features = m_error_depthDense.getRows();
5837
5838 while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
5839 computeVVSInteractionMatrixAndResidu(ptr_I);
5840
5841 bool reStartFromLastIncrement = false;
5842 computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
5843
5844#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5845 if (reStartFromLastIncrement) {
5846 if (m_trackerType & KLT_TRACKER) {
5847 ctTc0 = ctTc0_Prev;
5848 }
5849 }
5850#endif
5851
5852 if (!reStartFromLastIncrement) {
5853 computeVVSWeights();
5854
5855 if (computeCovariance) {
5856 L_true = m_L;
5857 if (!isoJoIdentity) {
5859 cVo.buildFrom(m_cMo);
5860 LVJ_true = (m_L * cVo * oJo);
5861 }
5862 }
5863
5865 if (iter == 0) {
5866 // If all the 6 dof should be estimated, we check if the interaction
5867 // matrix is full rank. If not we remove automatically the dof that
5868 // cannot be estimated. This is particularly useful when considering
5869 // circles (rank 5) and cylinders (rank 4)
5870 if (isoJoIdentity) {
5871 cVo.buildFrom(m_cMo);
5872
5873 vpMatrix K; // kernel
5874 unsigned int rank = (m_L * cVo).kernel(K);
5875 if (rank == 0) {
5876 throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
5877 }
5878
5879 if (rank != 6) {
5880 vpMatrix I; // Identity
5881 I.eye(6);
5882 oJo = I - K.AtA();
5883
5884 isoJoIdentity = false;
5885 }
5886 }
5887 }
5888
5889 // Weighting
5890 double num = 0;
5891 double den = 0;
5892
5893 unsigned int start_index = 0;
5894 if (m_trackerType & EDGE_TRACKER) {
5895 for (unsigned int i = 0; i < nb_edge_features; i++) {
5896 double wi = m_w_edge[i] * m_factor[i] * factorEdge;
5897 W_true[i] = wi;
5898 m_weightedError[i] = wi * m_error[i];
5899
5900 num += wi * vpMath::sqr(m_error[i]);
5901 den += wi;
5902
5903 for (unsigned int j = 0; j < m_L.getCols(); j++) {
5904 m_L[i][j] *= wi;
5905 }
5906 }
5907
5908 start_index += nb_edge_features;
5909 }
5910
5911#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5912 if (m_trackerType & KLT_TRACKER) {
5913 for (unsigned int i = 0; i < nb_klt_features; i++) {
5914 double wi = m_w_klt[i] * factorKlt;
5915 W_true[start_index + i] = wi;
5916 m_weightedError[start_index + i] = wi * m_error_klt[i];
5917
5918 num += wi * vpMath::sqr(m_error[start_index + i]);
5919 den += wi;
5920
5921 for (unsigned int j = 0; j < m_L.getCols(); j++) {
5922 m_L[start_index + i][j] *= wi;
5923 }
5924 }
5925
5926 start_index += nb_klt_features;
5927 }
5928#endif
5929
5930 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
5931 for (unsigned int i = 0; i < nb_depth_features; i++) {
5932 double wi = m_w_depthNormal[i] * factorDepth;
5933 m_w[start_index + i] = m_w_depthNormal[i];
5934 m_weightedError[start_index + i] = wi * m_error[start_index + i];
5935
5936 num += wi * vpMath::sqr(m_error[start_index + i]);
5937 den += wi;
5938
5939 for (unsigned int j = 0; j < m_L.getCols(); j++) {
5940 m_L[start_index + i][j] *= wi;
5941 }
5942 }
5943
5944 start_index += nb_depth_features;
5945 }
5946
5947 if (m_trackerType & DEPTH_DENSE_TRACKER) {
5948 for (unsigned int i = 0; i < nb_depth_dense_features; i++) {
5949 double wi = m_w_depthDense[i] * factorDepthDense;
5950 m_w[start_index + i] = m_w_depthDense[i];
5951 m_weightedError[start_index + i] = wi * m_error[start_index + i];
5952
5953 num += wi * vpMath::sqr(m_error[start_index + i]);
5954 den += wi;
5955
5956 for (unsigned int j = 0; j < m_L.getCols(); j++) {
5957 m_L[start_index + i][j] *= wi;
5958 }
5959 }
5960
5961 // start_index += nb_depth_dense_features;
5962 }
5963
5964 computeVVSPoseEstimation(isoJoIdentity, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
5965
5966 cMo_prev = m_cMo;
5967#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5968 if (m_trackerType & KLT_TRACKER) {
5969 ctTc0_Prev = ctTc0;
5970 }
5971#endif
5972
5973 m_cMo = vpExponentialMap::direct(v).inverse() * m_cMo;
5974
5975#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5976 if (m_trackerType & KLT_TRACKER) {
5977 ctTc0 = vpExponentialMap::direct(v).inverse() * ctTc0;
5978 }
5979#endif
5980 normRes_1 = normRes;
5981
5982 normRes = sqrt(num / den);
5983 }
5984
5985 iter++;
5986 }
5987
5988 computeCovarianceMatrixVVS(isoJoIdentity, W_true, cMo_prev, L_true, LVJ_true, m_error);
5989
5990 if (m_trackerType & EDGE_TRACKER) {
5992 }
5993}
5994
5995void vpMbGenericTracker::TrackerWrapper::computeVVSInit()
5996{
5997 throw vpException(vpException::fatalError, "vpMbGenericTracker::"
5998 "TrackerWrapper::computeVVSInit("
5999 ") should not be called!");
6000}
6001
6002void vpMbGenericTracker::TrackerWrapper::computeVVSInit(const vpImage<unsigned char> *const ptr_I)
6003{
6004 initMbtTracking(ptr_I);
6005
6006 unsigned int nbFeatures = 0;
6007
6008 if (m_trackerType & EDGE_TRACKER) {
6009 nbFeatures += m_error_edge.getRows();
6010 }
6011 else {
6012 m_error_edge.clear();
6013 m_weightedError_edge.clear();
6014 m_L_edge.clear();
6015 m_w_edge.clear();
6016 }
6017
6018#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6019 if (m_trackerType & KLT_TRACKER) {
6021 nbFeatures += m_error_klt.getRows();
6022 }
6023 else {
6024 m_error_klt.clear();
6025 m_weightedError_klt.clear();
6026 m_L_klt.clear();
6027 m_w_klt.clear();
6028 }
6029#endif
6030
6031 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6033 nbFeatures += m_error_depthNormal.getRows();
6034 }
6035 else {
6036 m_error_depthNormal.clear();
6037 m_weightedError_depthNormal.clear();
6038 m_L_depthNormal.clear();
6039 m_w_depthNormal.clear();
6040 }
6041
6042 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6044 nbFeatures += m_error_depthDense.getRows();
6045 }
6046 else {
6047 m_error_depthDense.clear();
6048 m_weightedError_depthDense.clear();
6049 m_L_depthDense.clear();
6050 m_w_depthDense.clear();
6051 }
6052
6053 m_L.resize(nbFeatures, 6, false, false);
6054 m_error.resize(nbFeatures, false);
6055
6056 m_weightedError.resize(nbFeatures, false);
6057 m_w.resize(nbFeatures, false);
6058 m_w = 1;
6059}
6060
6061void vpMbGenericTracker::TrackerWrapper::computeVVSInteractionMatrixAndResidu()
6062{
6063 throw vpException(vpException::fatalError, "vpMbGenericTracker::"
6064 "TrackerWrapper::"
6065 "computeVVSInteractionMatrixAndR"
6066 "esidu() should not be called!");
6067}
6068
6069void vpMbGenericTracker::TrackerWrapper::computeVVSInteractionMatrixAndResidu(const vpImage<unsigned char> *const ptr_I)
6070{
6071 if (m_trackerType & EDGE_TRACKER) {
6073 }
6074
6075#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6076 if (m_trackerType & KLT_TRACKER) {
6078 }
6079#endif
6080
6081 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6083 }
6084
6085 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6087 }
6088
6089 unsigned int start_index = 0;
6090 if (m_trackerType & EDGE_TRACKER) {
6091 m_L.insert(m_L_edge, start_index, 0);
6092 m_error.insert(start_index, m_error_edge);
6093
6094 start_index += m_error_edge.getRows();
6095 }
6096
6097#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6098 if (m_trackerType & KLT_TRACKER) {
6099 m_L.insert(m_L_klt, start_index, 0);
6100 m_error.insert(start_index, m_error_klt);
6101
6102 start_index += m_error_klt.getRows();
6103 }
6104#endif
6105
6106 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6107 m_L.insert(m_L_depthNormal, start_index, 0);
6108 m_error.insert(start_index, m_error_depthNormal);
6109
6110 start_index += m_error_depthNormal.getRows();
6111 }
6112
6113 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6114 m_L.insert(m_L_depthDense, start_index, 0);
6115 m_error.insert(start_index, m_error_depthDense);
6116
6117 // start_index += m_error_depthDense.getRows();
6118 }
6119}
6120
6121void vpMbGenericTracker::TrackerWrapper::computeVVSWeights()
6122{
6123 unsigned int start_index = 0;
6124
6125 if (m_trackerType & EDGE_TRACKER) {
6127 m_w.insert(start_index, m_w_edge);
6128
6129 start_index += m_w_edge.getRows();
6130 }
6131
6132#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6133 if (m_trackerType & KLT_TRACKER) {
6134 vpMbTracker::computeVVSWeights(m_robust_klt, m_error_klt, m_w_klt);
6135 m_w.insert(start_index, m_w_klt);
6136
6137 start_index += m_w_klt.getRows();
6138 }
6139#endif
6140
6141 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6142 if (m_depthNormalUseRobust) {
6143 vpMbTracker::computeVVSWeights(m_robust_depthNormal, m_error_depthNormal, m_w_depthNormal);
6144 m_w.insert(start_index, m_w_depthNormal);
6145 }
6146
6147 start_index += m_w_depthNormal.getRows();
6148 }
6149
6150 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6152 m_w.insert(start_index, m_w_depthDense);
6153
6154 // start_index += m_w_depthDense.getRows();
6155 }
6156}
6157
6158void vpMbGenericTracker::TrackerWrapper::display(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cMo,
6159 const vpCameraParameters &cam, const vpColor &col,
6160 unsigned int thickness, bool displayFullModel)
6161{
6162 if (displayFeatures) {
6163 std::vector<std::vector<double> > features = getFeaturesForDisplay();
6164 for (size_t i = 0; i < features.size(); i++) {
6165 if (vpMath::equal(features[i][0], 0)) {
6166 vpImagePoint ip(features[i][1], features[i][2]);
6167 int state = static_cast<int>(features[i][3]);
6168
6169 switch (state) {
6172 break;
6173
6174 case vpMeSite::CONTRAST:
6176 break;
6177
6180 break;
6181
6184 break;
6185
6186 case vpMeSite::TOO_NEAR:
6188 break;
6189
6190 default:
6192 }
6193 }
6194 else if (vpMath::equal(features[i][0], 1)) {
6195 vpImagePoint ip1(features[i][1], features[i][2]);
6197
6198 vpImagePoint ip2(features[i][3], features[i][4]);
6199 double id = features[i][5];
6200 std::stringstream ss;
6201 ss << id;
6202 vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
6203 }
6204 else if (vpMath::equal(features[i][0], 2)) {
6205 vpImagePoint im_centroid(features[i][1], features[i][2]);
6206 vpImagePoint im_extremity(features[i][3], features[i][4]);
6207 bool desired = vpMath::equal(features[i][0], 2);
6208 vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
6209 }
6210 }
6211 }
6212
6213 std::vector<std::vector<double> > models =
6214 getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
6215 for (size_t i = 0; i < models.size(); i++) {
6216 if (vpMath::equal(models[i][0], 0)) {
6217 vpImagePoint ip1(models[i][1], models[i][2]);
6218 vpImagePoint ip2(models[i][3], models[i][4]);
6219 vpDisplay::displayLine(I, ip1, ip2, col, thickness);
6220 }
6221 else if (vpMath::equal(models[i][0], 1)) {
6222 vpImagePoint center(models[i][1], models[i][2]);
6223 double n20 = models[i][3];
6224 double n11 = models[i][4];
6225 double n02 = models[i][5];
6226 vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness);
6227 }
6228 }
6229
6230#ifdef VISP_HAVE_OGRE
6231 if ((m_trackerType & EDGE_TRACKER)
6232#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6233 || (m_trackerType & KLT_TRACKER)
6234#endif
6235 ) {
6236 if (useOgre)
6237 faces.displayOgre(cMo);
6238 }
6239#endif
6240}
6241
6242void vpMbGenericTracker::TrackerWrapper::display(const vpImage<vpRGBa> &I, const vpHomogeneousMatrix &cMo,
6243 const vpCameraParameters &cam, const vpColor &col,
6244 unsigned int thickness, bool displayFullModel)
6245{
6246 if (displayFeatures) {
6247 std::vector<std::vector<double> > features = getFeaturesForDisplay();
6248 for (size_t i = 0; i < features.size(); i++) {
6249 if (vpMath::equal(features[i][0], 0)) {
6250 vpImagePoint ip(features[i][1], features[i][2]);
6251 int state = static_cast<int>(features[i][3]);
6252
6253 switch (state) {
6256 break;
6257
6258 case vpMeSite::CONTRAST:
6260 break;
6261
6264 break;
6265
6268 break;
6269
6270 case vpMeSite::TOO_NEAR:
6272 break;
6273
6274 default:
6276 }
6277 }
6278 else if (vpMath::equal(features[i][0], 1)) {
6279 vpImagePoint ip1(features[i][1], features[i][2]);
6281
6282 vpImagePoint ip2(features[i][3], features[i][4]);
6283 double id = features[i][5];
6284 std::stringstream ss;
6285 ss << id;
6286 vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
6287 }
6288 else if (vpMath::equal(features[i][0], 2)) {
6289 vpImagePoint im_centroid(features[i][1], features[i][2]);
6290 vpImagePoint im_extremity(features[i][3], features[i][4]);
6291 bool desired = vpMath::equal(features[i][0], 2);
6292 vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
6293 }
6294 }
6295 }
6296
6297 std::vector<std::vector<double> > models =
6298 getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
6299 for (size_t i = 0; i < models.size(); i++) {
6300 if (vpMath::equal(models[i][0], 0)) {
6301 vpImagePoint ip1(models[i][1], models[i][2]);
6302 vpImagePoint ip2(models[i][3], models[i][4]);
6303 vpDisplay::displayLine(I, ip1, ip2, col, thickness);
6304 }
6305 else if (vpMath::equal(models[i][0], 1)) {
6306 vpImagePoint center(models[i][1], models[i][2]);
6307 double n20 = models[i][3];
6308 double n11 = models[i][4];
6309 double n02 = models[i][5];
6310 vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness);
6311 }
6312 }
6313
6314#ifdef VISP_HAVE_OGRE
6315 if ((m_trackerType & EDGE_TRACKER)
6316#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6317 || (m_trackerType & KLT_TRACKER)
6318#endif
6319 ) {
6320 if (useOgre)
6321 faces.displayOgre(cMo);
6322 }
6323#endif
6324}
6325
6326std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getFeaturesForDisplay()
6327{
6328 std::vector<std::vector<double> > features;
6329
6330 if (m_trackerType & EDGE_TRACKER) {
6331 // m_featuresToBeDisplayedEdge updated after computeVVS()
6332 features.insert(features.end(), m_featuresToBeDisplayedEdge.begin(), m_featuresToBeDisplayedEdge.end());
6333 }
6334
6335#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6336 if (m_trackerType & KLT_TRACKER) {
6337 // m_featuresToBeDisplayedKlt updated after postTracking()
6338 features.insert(features.end(), m_featuresToBeDisplayedKlt.begin(), m_featuresToBeDisplayedKlt.end());
6339 }
6340#endif
6341
6342 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6343 // m_featuresToBeDisplayedDepthNormal updated after postTracking()
6344 features.insert(features.end(), m_featuresToBeDisplayedDepthNormal.begin(),
6345 m_featuresToBeDisplayedDepthNormal.end());
6346 }
6347
6348 return features;
6349}
6350
6351std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getModelForDisplay(unsigned int width,
6352 unsigned int height,
6353 const vpHomogeneousMatrix &cMo,
6354 const vpCameraParameters &cam,
6355 bool displayFullModel)
6356{
6357 std::vector<std::vector<double> > models;
6358
6359 // Do not add multiple times the same models
6360 if (m_trackerType == EDGE_TRACKER) {
6361 models = vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6362 }
6363#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6364 else if (m_trackerType == KLT_TRACKER) {
6365 models = vpMbKltTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6366 }
6367#endif
6368 else if (m_trackerType == DEPTH_NORMAL_TRACKER) {
6369 models = vpMbDepthNormalTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6370 }
6371 else if (m_trackerType == DEPTH_DENSE_TRACKER) {
6372 models = vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6373 }
6374 else {
6375 // Edge and KLT trackers use the same primitives
6376 if (m_trackerType & EDGE_TRACKER) {
6377 std::vector<std::vector<double> > edgeModels =
6378 vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6379 models.insert(models.end(), edgeModels.begin(), edgeModels.end());
6380 }
6381
6382 // Depth dense and depth normal trackers use the same primitives
6383 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6384 std::vector<std::vector<double> > depthDenseModels =
6385 vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6386 models.insert(models.end(), depthDenseModels.begin(), depthDenseModels.end());
6387 }
6388 }
6389
6390 return models;
6391}
6392
6393void vpMbGenericTracker::TrackerWrapper::init(const vpImage<unsigned char> &I)
6394{
6395 if (!modelInitialised) {
6396 throw vpException(vpException::fatalError, "model not initialized");
6397 }
6398
6399 if (useScanLine || clippingFlag > 3)
6400 m_cam.computeFov(I.getWidth(), I.getHeight());
6401
6402 bool reInitialisation = false;
6403 if (!useOgre) {
6404 faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6405 }
6406 else {
6407#ifdef VISP_HAVE_OGRE
6408 if (!faces.isOgreInitialised()) {
6409 faces.setBackgroundSizeOgre(I.getHeight(), I.getWidth());
6410
6411 faces.setOgreShowConfigDialog(ogreShowConfigDialog);
6412 faces.initOgre(m_cam);
6413 // Turn off Ogre config dialog display for the next call to this
6414 // function since settings are saved in the ogre.cfg file and used
6415 // during the next call
6416 ogreShowConfigDialog = false;
6417 }
6418
6419 faces.setVisibleOgre(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6420#else
6421 faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6422#endif
6423 }
6424
6425 if (useScanLine) {
6426 faces.computeClippedPolygons(m_cMo, m_cam);
6427 faces.computeScanLineRender(m_cam, I.getWidth(), I.getHeight());
6428 }
6429
6430#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6431 if (m_trackerType & KLT_TRACKER)
6433#endif
6434
6435 if (m_trackerType & EDGE_TRACKER) {
6437
6438 bool a = false;
6439 vpMbEdgeTracker::visibleFace(I, m_cMo, a); // should be useless, but keep it for nbvisiblepolygone
6440
6442 }
6443
6444 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6445 vpMbDepthNormalTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthNormalTracker::init(I);
6446
6447 if (m_trackerType & DEPTH_DENSE_TRACKER)
6448 vpMbDepthDenseTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthDenseTracker::init(I);
6449}
6450
6451void vpMbGenericTracker::TrackerWrapper::initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3,
6452 double radius, int idFace, const std::string &name)
6453{
6454 if (m_trackerType & EDGE_TRACKER)
6455 vpMbEdgeTracker::initCircle(p1, p2, p3, radius, idFace, name);
6456
6457#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6458 if (m_trackerType & KLT_TRACKER)
6459 vpMbKltTracker::initCircle(p1, p2, p3, radius, idFace, name);
6460#endif
6461}
6462
6463void vpMbGenericTracker::TrackerWrapper::initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace,
6464 const std::string &name)
6465{
6466 if (m_trackerType & EDGE_TRACKER)
6467 vpMbEdgeTracker::initCylinder(p1, p2, radius, idFace, name);
6468
6469#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6470 if (m_trackerType & KLT_TRACKER)
6471 vpMbKltTracker::initCylinder(p1, p2, radius, idFace, name);
6472#endif
6473}
6474
6475void vpMbGenericTracker::TrackerWrapper::initFaceFromCorners(vpMbtPolygon &polygon)
6476{
6477 if (m_trackerType & EDGE_TRACKER)
6479
6480#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6481 if (m_trackerType & KLT_TRACKER)
6483#endif
6484
6485 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6487
6488 if (m_trackerType & DEPTH_DENSE_TRACKER)
6490}
6491
6492void vpMbGenericTracker::TrackerWrapper::initFaceFromLines(vpMbtPolygon &polygon)
6493{
6494 if (m_trackerType & EDGE_TRACKER)
6496
6497#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6498 if (m_trackerType & KLT_TRACKER)
6500#endif
6501
6502 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6504
6505 if (m_trackerType & DEPTH_DENSE_TRACKER)
6507}
6508
6510{
6511 if (m_trackerType & EDGE_TRACKER) {
6514 }
6515}
6516
6517void vpMbGenericTracker::TrackerWrapper::loadConfigFile(const std::string &configFile, bool verbose)
6518{
6519 // Load projection error config
6520 vpMbTracker::loadConfigFile(configFile, verbose);
6521
6523 xmlp.setVerbose(verbose);
6524 xmlp.setCameraParameters(m_cam);
6525 xmlp.setAngleAppear(vpMath::deg(angleAppears));
6526 xmlp.setAngleDisappear(vpMath::deg(angleDisappears));
6527
6528 // Edge
6529 xmlp.setEdgeMe(me);
6530
6531 // KLT
6532 xmlp.setKltMaxFeatures(10000);
6533 xmlp.setKltWindowSize(5);
6534 xmlp.setKltQuality(0.01);
6535 xmlp.setKltMinDistance(5);
6536 xmlp.setKltHarrisParam(0.01);
6537 xmlp.setKltBlockSize(3);
6538 xmlp.setKltPyramidLevels(3);
6539#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6540 xmlp.setKltMaskBorder(maskBorder);
6541#endif
6542
6543 // Depth normal
6544 xmlp.setDepthNormalFeatureEstimationMethod(m_depthNormalFeatureEstimationMethod);
6545 xmlp.setDepthNormalPclPlaneEstimationMethod(m_depthNormalPclPlaneEstimationMethod);
6546 xmlp.setDepthNormalPclPlaneEstimationRansacMaxIter(m_depthNormalPclPlaneEstimationRansacMaxIter);
6547 xmlp.setDepthNormalPclPlaneEstimationRansacThreshold(m_depthNormalPclPlaneEstimationRansacThreshold);
6548 xmlp.setDepthNormalSamplingStepX(m_depthNormalSamplingStepX);
6549 xmlp.setDepthNormalSamplingStepY(m_depthNormalSamplingStepY);
6550
6551 // Depth dense
6552 xmlp.setDepthDenseSamplingStepX(m_depthDenseSamplingStepX);
6553 xmlp.setDepthDenseSamplingStepY(m_depthDenseSamplingStepY);
6554
6555 try {
6556 if (verbose) {
6557 std::cout << " *********** Parsing XML for";
6558 }
6559
6560 std::vector<std::string> tracker_names;
6561 if (m_trackerType & EDGE_TRACKER)
6562 tracker_names.push_back("Edge");
6563#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6564 if (m_trackerType & KLT_TRACKER)
6565 tracker_names.push_back("Klt");
6566#endif
6567 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6568 tracker_names.push_back("Depth Normal");
6569 if (m_trackerType & DEPTH_DENSE_TRACKER)
6570 tracker_names.push_back("Depth Dense");
6571
6572 if (verbose) {
6573 for (size_t i = 0; i < tracker_names.size(); i++) {
6574 std::cout << " " << tracker_names[i];
6575 if (i == tracker_names.size() - 1) {
6576 std::cout << " ";
6577 }
6578 }
6579
6580 std::cout << "Model-Based Tracker ************ " << std::endl;
6581 }
6582
6583 xmlp.parse(configFile);
6584 }
6585 catch (...) {
6586 throw vpException(vpException::ioError, "Can't open XML file \"%s\"\n ", configFile.c_str());
6587 }
6588
6589 vpCameraParameters camera;
6590 xmlp.getCameraParameters(camera);
6591 setCameraParameters(camera);
6592
6593 angleAppears = vpMath::rad(xmlp.getAngleAppear());
6594 angleDisappears = vpMath::rad(xmlp.getAngleDisappear());
6595
6596 if (xmlp.hasNearClippingDistance())
6597 setNearClippingDistance(xmlp.getNearClippingDistance());
6598
6599 if (xmlp.hasFarClippingDistance())
6600 setFarClippingDistance(xmlp.getFarClippingDistance());
6601
6602 if (xmlp.getFovClipping()) {
6604 }
6605
6606 useLodGeneral = xmlp.getLodState();
6607 minLineLengthThresholdGeneral = xmlp.getLodMinLineLengthThreshold();
6608 minPolygonAreaThresholdGeneral = xmlp.getLodMinPolygonAreaThreshold();
6609
6610 applyLodSettingInConfig = false;
6611 if (this->getNbPolygon() > 0) {
6612 applyLodSettingInConfig = true;
6613 setLod(useLodGeneral);
6614 setMinLineLengthThresh(minLineLengthThresholdGeneral);
6615 setMinPolygonAreaThresh(minPolygonAreaThresholdGeneral);
6616 }
6617
6618 // Edge
6619 vpMe meParser;
6620 xmlp.getEdgeMe(meParser);
6622
6623 // KLT
6624#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6625 tracker.setMaxFeatures((int)xmlp.getKltMaxFeatures());
6626 tracker.setWindowSize((int)xmlp.getKltWindowSize());
6627 tracker.setQuality(xmlp.getKltQuality());
6628 tracker.setMinDistance(xmlp.getKltMinDistance());
6629 tracker.setHarrisFreeParameter(xmlp.getKltHarrisParam());
6630 tracker.setBlockSize((int)xmlp.getKltBlockSize());
6631 tracker.setPyramidLevels((int)xmlp.getKltPyramidLevels());
6632 maskBorder = xmlp.getKltMaskBorder();
6633
6634 // if(useScanLine)
6635 faces.getMbScanLineRenderer().setMaskBorder(maskBorder);
6636#endif
6637
6638 // Depth normal
6639 setDepthNormalFeatureEstimationMethod(xmlp.getDepthNormalFeatureEstimationMethod());
6640 setDepthNormalPclPlaneEstimationMethod(xmlp.getDepthNormalPclPlaneEstimationMethod());
6641 setDepthNormalPclPlaneEstimationRansacMaxIter(xmlp.getDepthNormalPclPlaneEstimationRansacMaxIter());
6642 setDepthNormalPclPlaneEstimationRansacThreshold(xmlp.getDepthNormalPclPlaneEstimationRansacThreshold());
6643 setDepthNormalSamplingStep(xmlp.getDepthNormalSamplingStepX(), xmlp.getDepthNormalSamplingStepY());
6644
6645 // Depth dense
6646 setDepthDenseSamplingStep(xmlp.getDepthDenseSamplingStepX(), xmlp.getDepthDenseSamplingStepY());
6647}
6648
6649#ifdef VISP_HAVE_PCL
6651 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6652{
6653#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6654 // KLT
6655 if (m_trackerType & KLT_TRACKER) {
6656 if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6657 vpMbKltTracker::reinit(*ptr_I);
6658 }
6659 }
6660#endif
6661
6662 // Looking for new visible face
6663 if (m_trackerType & EDGE_TRACKER) {
6664 bool newvisibleface = false;
6665 vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6666
6667 if (useScanLine) {
6668 faces.computeClippedPolygons(m_cMo, m_cam);
6669 faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6670 }
6671 }
6672
6673 // Depth normal
6674 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6675 vpMbDepthNormalTracker::computeVisibility(point_cloud->width, point_cloud->height);
6676
6677 // Depth dense
6678 if (m_trackerType & DEPTH_DENSE_TRACKER)
6679 vpMbDepthDenseTracker::computeVisibility(point_cloud->width, point_cloud->height);
6680
6681 // Edge
6682 if (m_trackerType & EDGE_TRACKER) {
6684
6685 vpMbEdgeTracker::initMovingEdge(*ptr_I, m_cMo);
6686 // Reinit the moving edge for the lines which need it.
6687 vpMbEdgeTracker::reinitMovingEdge(*ptr_I, m_cMo);
6688
6689 if (computeProjError) {
6691 }
6692 }
6693}
6694
6696 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6697{
6698 if (m_trackerType & EDGE_TRACKER) {
6699 try {
6701 }
6702 catch (...) {
6703 std::cerr << "Error in moving edge tracking" << std::endl;
6704 throw;
6705 }
6706 }
6707
6708#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6709 if (m_trackerType & KLT_TRACKER) {
6710 try {
6712 }
6713 catch (const vpException &e) {
6714 std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
6715 throw;
6716 }
6717 }
6718#endif
6719
6720 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6721 try {
6723 }
6724 catch (...) {
6725 std::cerr << "Error in Depth normal tracking" << std::endl;
6726 throw;
6727 }
6728 }
6729
6730 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6731 try {
6733 }
6734 catch (...) {
6735 std::cerr << "Error in Depth dense tracking" << std::endl;
6736 throw;
6737 }
6738 }
6739}
6740#endif
6741
6743 const unsigned int pointcloud_width,
6744 const unsigned int pointcloud_height)
6745{
6746#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6747 // KLT
6748 if (m_trackerType & KLT_TRACKER) {
6749 if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6750 vpMbKltTracker::reinit(*ptr_I);
6751 }
6752 }
6753#endif
6754
6755 // Looking for new visible face
6756 if (m_trackerType & EDGE_TRACKER) {
6757 bool newvisibleface = false;
6758 vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6759
6760 if (useScanLine) {
6761 faces.computeClippedPolygons(m_cMo, m_cam);
6762 faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6763 }
6764 }
6765
6766 // Depth normal
6767 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6768 vpMbDepthNormalTracker::computeVisibility(pointcloud_width, pointcloud_height);
6769
6770 // Depth dense
6771 if (m_trackerType & DEPTH_DENSE_TRACKER)
6772 vpMbDepthDenseTracker::computeVisibility(pointcloud_width, pointcloud_height);
6773
6774 // Edge
6775 if (m_trackerType & EDGE_TRACKER) {
6777
6778 vpMbEdgeTracker::initMovingEdge(*ptr_I, m_cMo);
6779 // Reinit the moving edge for the lines which need it.
6780 vpMbEdgeTracker::reinitMovingEdge(*ptr_I, m_cMo);
6781
6782 if (computeProjError) {
6784 }
6785 }
6786}
6787
6789 const std::vector<vpColVector> *const point_cloud,
6790 const unsigned int pointcloud_width,
6791 const unsigned int pointcloud_height)
6792{
6793 if (m_trackerType & EDGE_TRACKER) {
6794 try {
6796 }
6797 catch (...) {
6798 std::cerr << "Error in moving edge tracking" << std::endl;
6799 throw;
6800 }
6801 }
6802
6803#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6804 if (m_trackerType & KLT_TRACKER) {
6805 try {
6807 }
6808 catch (const vpException &e) {
6809 std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
6810 throw;
6811 }
6812 }
6813#endif
6814
6815 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6816 try {
6817 vpMbDepthNormalTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
6818 }
6819 catch (...) {
6820 std::cerr << "Error in Depth tracking" << std::endl;
6821 throw;
6822 }
6823 }
6824
6825 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6826 try {
6827 vpMbDepthDenseTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
6828 }
6829 catch (...) {
6830 std::cerr << "Error in Depth dense tracking" << std::endl;
6831 throw;
6832 }
6833 }
6834}
6835
6837 const vpImage<vpRGBa> *const I_color, const std::string &cad_name,
6838 const vpHomogeneousMatrix &cMo, bool verbose,
6839 const vpHomogeneousMatrix &T)
6840{
6841 m_cMo.eye();
6842
6843 // Edge
6847
6848 for (unsigned int i = 0; i < scales.size(); i++) {
6849 if (scales[i]) {
6850 for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[i].begin(); it != lines[i].end(); ++it) {
6851 l = *it;
6852 if (l != NULL)
6853 delete l;
6854 l = NULL;
6855 }
6856
6857 for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[i].begin(); it != cylinders[i].end();
6858 ++it) {
6859 cy = *it;
6860 if (cy != NULL)
6861 delete cy;
6862 cy = NULL;
6863 }
6864
6865 for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[i].begin(); it != circles[i].end(); ++it) {
6866 ci = *it;
6867 if (ci != NULL)
6868 delete ci;
6869 ci = NULL;
6870 }
6871
6872 lines[i].clear();
6873 cylinders[i].clear();
6874 circles[i].clear();
6875 }
6876 }
6877
6878 nline = 0;
6879 ncylinder = 0;
6880 ncircle = 0;
6881 nbvisiblepolygone = 0;
6882
6883 // KLT
6884#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6885 // delete the Klt Polygon features
6886 for (std::list<vpMbtDistanceKltPoints *>::const_iterator it = kltPolygons.begin(); it != kltPolygons.end(); ++it) {
6887 vpMbtDistanceKltPoints *kltpoly = *it;
6888 if (kltpoly != NULL) {
6889 delete kltpoly;
6890 }
6891 kltpoly = NULL;
6892 }
6893 kltPolygons.clear();
6894
6895 for (std::list<vpMbtDistanceKltCylinder *>::const_iterator it = kltCylinders.begin(); it != kltCylinders.end();
6896 ++it) {
6897 vpMbtDistanceKltCylinder *kltPolyCylinder = *it;
6898 if (kltPolyCylinder != NULL) {
6899 delete kltPolyCylinder;
6900 }
6901 kltPolyCylinder = NULL;
6902 }
6903 kltCylinders.clear();
6904
6905 // delete the structures used to display circles
6906 for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles_disp.begin(); it != circles_disp.end(); ++it) {
6907 ci = *it;
6908 if (ci != NULL) {
6909 delete ci;
6910 }
6911 ci = NULL;
6912 }
6913 circles_disp.clear();
6914
6915 firstInitialisation = true;
6916
6917#endif
6918
6919 // Depth normal
6920 for (size_t i = 0; i < m_depthNormalFaces.size(); i++) {
6921 delete m_depthNormalFaces[i];
6922 m_depthNormalFaces[i] = NULL;
6923 }
6924 m_depthNormalFaces.clear();
6925
6926 // Depth dense
6927 for (size_t i = 0; i < m_depthDenseFaces.size(); i++) {
6928 delete m_depthDenseFaces[i];
6929 m_depthDenseFaces[i] = NULL;
6930 }
6931 m_depthDenseFaces.clear();
6932
6933 faces.reset();
6934
6935 loadModel(cad_name, verbose, T);
6936 if (I) {
6937 initFromPose(*I, cMo);
6938 }
6939 else {
6940 initFromPose(*I_color, cMo);
6941 }
6942}
6943
6944void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
6945 const vpHomogeneousMatrix &cMo, bool verbose,
6946 const vpHomogeneousMatrix &T)
6947{
6948 reInitModel(&I, NULL, cad_name, cMo, verbose, T);
6949}
6950
6951void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
6952 const vpHomogeneousMatrix &cMo, bool verbose,
6953 const vpHomogeneousMatrix &T)
6954{
6955 reInitModel(NULL, &I_color, cad_name, cMo, verbose, T);
6956}
6957
6958void vpMbGenericTracker::TrackerWrapper::resetTracker()
6959{
6961#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6963#endif
6966}
6967
6968void vpMbGenericTracker::TrackerWrapper::setCameraParameters(const vpCameraParameters &cam)
6969{
6970 m_cam = cam;
6971
6973#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6975#endif
6978}
6979
6980void vpMbGenericTracker::TrackerWrapper::setClipping(const unsigned int &flags) { vpMbEdgeTracker::setClipping(flags); }
6981
6982void vpMbGenericTracker::TrackerWrapper::setFarClippingDistance(const double &dist)
6983{
6985}
6986
6987void vpMbGenericTracker::TrackerWrapper::setNearClippingDistance(const double &dist)
6988{
6990}
6991
6992void vpMbGenericTracker::TrackerWrapper::setOgreVisibilityTest(const bool &v)
6993{
6995#ifdef VISP_HAVE_OGRE
6996 faces.getOgreContext()->setWindowName("TrackerWrapper");
6997#endif
6998}
6999
7000void vpMbGenericTracker::TrackerWrapper::setPose(const vpImage<unsigned char> *const I,
7001 const vpImage<vpRGBa> *const I_color, const vpHomogeneousMatrix &cdMo)
7002{
7003 bool performKltSetPose = false;
7004 if (I_color) {
7005 vpImageConvert::convert(*I_color, m_I);
7006 }
7007
7008#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7009 if (m_trackerType & KLT_TRACKER) {
7010 performKltSetPose = true;
7011
7012 if (useScanLine || clippingFlag > 3) {
7013 m_cam.computeFov(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7014 }
7015
7016 vpMbKltTracker::setPose(I ? *I : m_I, cdMo);
7017 }
7018#endif
7019
7020 if (!performKltSetPose) {
7021 m_cMo = cdMo;
7022 init(I ? *I : m_I);
7023 return;
7024 }
7025
7026 if (m_trackerType & EDGE_TRACKER) {
7028 }
7029
7030 if (useScanLine) {
7031 faces.computeClippedPolygons(m_cMo, m_cam);
7032 faces.computeScanLineRender(m_cam, I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7033 }
7034
7035#if 0
7036 if (m_trackerType & EDGE_TRACKER) {
7037 initPyramid(I, Ipyramid);
7038
7039 unsigned int i = (unsigned int)scales.size();
7040 do {
7041 i--;
7042 if (scales[i]) {
7043 downScale(i);
7044 vpMbEdgeTracker::initMovingEdge(*Ipyramid[i], cMo);
7045 upScale(i);
7046 }
7047 } while (i != 0);
7048
7049 cleanPyramid(Ipyramid);
7050 }
7051#else
7052 if (m_trackerType & EDGE_TRACKER) {
7053 vpMbEdgeTracker::initMovingEdge(I ? *I : m_I, m_cMo);
7054 }
7055#endif
7056
7057 // Depth normal
7058 vpMbDepthNormalTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7059
7060 // Depth dense
7061 vpMbDepthDenseTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7062}
7063
7064void vpMbGenericTracker::TrackerWrapper::setPose(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cdMo)
7065{
7066 setPose(&I, NULL, cdMo);
7067}
7068
7069void vpMbGenericTracker::TrackerWrapper::setPose(const vpImage<vpRGBa> &I_color, const vpHomogeneousMatrix &cdMo)
7070{
7071 setPose(NULL, &I_color, cdMo);
7072}
7073
7074void vpMbGenericTracker::TrackerWrapper::setProjectionErrorComputation(const bool &flag)
7075{
7077}
7078
7079void vpMbGenericTracker::TrackerWrapper::setScanLineVisibilityTest(const bool &v)
7080{
7082#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7084#endif
7087}
7088
7089void vpMbGenericTracker::TrackerWrapper::setTrackerType(int type)
7090{
7091 if ((type & (EDGE_TRACKER |
7092#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7093 KLT_TRACKER |
7094#endif
7095 DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
7096 throw vpException(vpTrackingException::badValue, "bad value for tracker type: !", type);
7097 }
7098
7099 m_trackerType = type;
7100}
7101
7102void vpMbGenericTracker::TrackerWrapper::testTracking()
7103{
7104 if (m_trackerType & EDGE_TRACKER) {
7106 }
7107}
7108
7109void vpMbGenericTracker::TrackerWrapper::track(const vpImage<unsigned char> &
7110#if defined(VISP_HAVE_PCL) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
7111 I
7112#endif
7113)
7114{
7115 if ((m_trackerType & (EDGE_TRACKER
7116#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7117 | KLT_TRACKER
7118#endif
7119 )) == 0) {
7120 std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
7121 return;
7122 }
7123
7124#if defined(VISP_HAVE_PCL) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
7125 track(&I, nullptr);
7126#endif
7127}
7128
7129void vpMbGenericTracker::TrackerWrapper::track(const vpImage<vpRGBa> &)
7130{
7131 // not exposed to the public API, only for debug
7132}
7133
7134#ifdef VISP_HAVE_PCL
7135void vpMbGenericTracker::TrackerWrapper::track(const vpImage<unsigned char> *const ptr_I,
7136 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
7137{
7138 if ((m_trackerType & (EDGE_TRACKER |
7139#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7140 KLT_TRACKER |
7141#endif
7142 DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER)) == 0) {
7143 std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
7144 return;
7145 }
7146
7147 if (m_trackerType & (EDGE_TRACKER
7148#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7149 | KLT_TRACKER
7150#endif
7151 ) &&
7152 ptr_I == NULL) {
7153 throw vpException(vpException::fatalError, "Image pointer is NULL!");
7154 }
7155
7156 if (m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) && !point_cloud) { // point_cloud == nullptr
7157 throw vpException(vpException::fatalError, "Pointcloud smart pointer is NULL!");
7158 }
7159
7160 // Back-up cMo in case of exception
7161 vpHomogeneousMatrix cMo_1 = m_cMo;
7162 try {
7163 preTracking(ptr_I, point_cloud);
7164
7165 try {
7166 computeVVS(ptr_I);
7167 }
7168 catch (...) {
7169 covarianceMatrix = -1;
7170 throw; // throw the original exception
7171 }
7172
7173 if (m_trackerType == EDGE_TRACKER)
7174 testTracking();
7175
7176 postTracking(ptr_I, point_cloud);
7177
7178 }
7179 catch (const vpException &e) {
7180 std::cerr << "Exception: " << e.what() << std::endl;
7181 m_cMo = cMo_1;
7182 throw; // rethrowing the original exception
7183 }
7184}
7185#endif
void setWindowName(const Ogre::String &n)
Definition vpAROgre.h:269
unsigned int getCols() const
Definition vpArray2D.h:280
void resize(unsigned int nrows, unsigned int ncols, bool flagNullify=true, bool recopy_=true)
Definition vpArray2D.h:305
unsigned int size() const
Return the number of elements of the 2D array.
Definition vpArray2D.h:292
unsigned int getRows() const
Definition vpArray2D.h:290
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
void insert(unsigned int i, const vpColVector &v)
void resize(unsigned int i, bool flagNullify=true)
Class to define RGB colors available for display functionalities.
Definition vpColor.h:152
static const vpColor red
Definition vpColor.h:211
static const vpColor cyan
Definition vpColor.h:220
static const vpColor blue
Definition vpColor.h:217
static const vpColor purple
Definition vpColor.h:222
static const vpColor yellow
Definition vpColor.h:219
static const vpColor green
Definition vpColor.h:214
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayEllipse(const vpImage< unsigned char > &I, const vpImagePoint &center, const double &coef1, const double &coef2, const double &coef3, bool use_normalized_centered_moments, const vpColor &color, unsigned int thickness=1, bool display_center=false, bool display_arc=false)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:59
@ ioError
I/O error.
Definition vpException.h:79
@ badValue
Used to indicate that a value is not in the allowed range.
Definition vpException.h:85
@ notInitialized
Used to indicate that a parameter is not initialized.
Definition vpException.h:86
@ fatalError
Fatal error.
Definition vpException.h:84
const char * what() const
static vpHomogeneousMatrix direct(const vpColVector &v)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
vp_deprecated void init()
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Definition vpImage.h:135
unsigned int getWidth() const
Definition vpImage.h:242
unsigned int getHeight() const
Definition vpImage.h:184
static std::string getFileExtension(const std::string &pathname, bool checkFile=false)
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition vpKltOpencv.h:73
static double rad(double deg)
Definition vpMath.h:116
static double sqr(double x)
Definition vpMath.h:124
static bool equal(double x, double y, double threshold=0.001)
Definition vpMath.h:369
static double deg(double rad)
Definition vpMath.h:106
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:152
void eye()
Definition vpMatrix.cpp:446
vpMatrix AtA() const
Definition vpMatrix.cpp:625
void insert(const vpMatrix &A, unsigned int r, unsigned int c)
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void computeVisibility(unsigned int width, unsigned int height)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void setScanLineVisibilityTest(const bool &v)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void setCameraParameters(const vpCameraParameters &camera)
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void setCameraParameters(const vpCameraParameters &camera)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void computeVisibility(unsigned int width, unsigned int height)
virtual void setCameraParameters(const vpCameraParameters &cam)
virtual void setNearClippingDistance(const double &dist)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void computeVVSInit()
virtual void setFarClippingDistance(const double &dist)
void computeProjectionError(const vpImage< unsigned char > &_I)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void computeVVSWeights()
virtual void setClipping(const unsigned int &flags)
void initMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
void trackMovingEdge(const vpImage< unsigned char > &I)
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="")
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="")
void computeVVSFirstPhaseFactor(const vpImage< unsigned char > &I, unsigned int lvl=0)
void updateMovingEdge(const vpImage< unsigned char > &I)
unsigned int initMbtTracking(unsigned int &nberrors_lines, unsigned int &nberrors_cylinders, unsigned int &nberrors_circles)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void reinitMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
void setMovingEdge(const vpMe &me)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
void visibleFace(const vpImage< unsigned char > &_I, const vpHomogeneousMatrix &_cMo, bool &newvisibleline)
virtual void testTracking()
virtual void setUseKltTracking(const std::string &name, const bool &useKltTracking)
virtual void setCameraParameters(const vpCameraParameters &camera)
virtual std::vector< std::vector< double > > getFeaturesForDisplay()
virtual void setLod(bool useLod, const std::string &name="")
virtual void setDisplayFeatures(bool displayF)
virtual double getGoodMovingEdgesRatioThreshold() const
virtual int getTrackerType() const
std::map< std::string, TrackerWrapper * > m_mapOfTrackers
virtual void setKltMaskBorder(const unsigned int &e)
virtual void setProjectionErrorComputation(const bool &flag)
unsigned int m_nb_feat_edge
Number of moving-edges features.
virtual void setKltThresholdAcceptation(double th)
std::map< std::string, vpHomogeneousMatrix > m_mapOfCameraTransformationMatrix
virtual double getKltThresholdAcceptation() const
virtual void getLcircle(std::list< vpMbtDistanceCircle * > &circlesList, unsigned int level=0) const
virtual void setDepthDenseFilteringMaxDistance(double maxDistance)
virtual std::list< vpMbtDistanceKltCylinder * > & getFeaturesKltCylinder()
virtual void setOgreShowConfigDialog(bool showConfigDialog)
virtual vpHomogeneousMatrix getPose() const
virtual std::vector< cv::Point2f > getKltPoints() const
virtual void setGoodNbRayCastingAttemptsRatio(const double &ratio)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
virtual void setGoodMovingEdgesRatioThreshold(double threshold)
virtual void setAngleAppear(const double &a)
virtual void setDepthDenseFilteringMinDistance(double minDistance)
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
virtual void setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
vpColVector m_w
Robust weights.
virtual void saveConfigFile(const std::string &settingsFile) const
virtual std::string getReferenceCameraName() const
virtual void setMinLineLengthThresh(double minLineLengthThresh, const std::string &name="")
virtual std::map< std::string, int > getCameraTrackerTypes() const
vpMbGenericTracker()
json namespace shortcut
virtual void setNearClippingDistance(const double &dist)
virtual void initFromPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo)
virtual void loadConfigFileJSON(const std::string &configFile, bool verbose=true)
virtual void getCameraParameters(vpCameraParameters &camera) const
unsigned int m_nb_feat_depthDense
Number of depth dense features.
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
virtual void setAngleDisappear(const double &a)
virtual void setDepthNormalPclPlaneEstimationMethod(int method)
virtual void computeProjectionError()
virtual void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
virtual void setMovingEdge(const vpMe &me)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
virtual void setDepthNormalPclPlaneEstimationRansacThreshold(double threshold)
virtual void setFeatureFactors(const std::map< vpTrackerType, double > &mapOfFeatureFactors)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness)
virtual void getLcylinder(std::list< vpMbtDistanceCylinder * > &cylindersList, unsigned int level=0) const
virtual std::vector< vpImagePoint > getKltImagePoints() const
virtual void setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
virtual std::vector< std::string > getCameraNames() const
virtual void setDepthDenseFilteringMethod(int method)
vpColVector m_weightedError
Weighted error.
virtual std::list< vpMbtDistanceKltPoints * > & getFeaturesKlt()
vpMatrix m_L
Interaction matrix.
virtual void init(const vpImage< unsigned char > &I)
virtual void setKltOpencv(const vpKltOpencv &t)
virtual void setDepthNormalFaceCentroidMethod(const vpMbtFaceDepthNormal::vpFaceCentroidType &method)
virtual std::pair< std::vector< vpPolygon >, std::vector< std::vector< vpPoint > > > getPolygonFaces(bool orderPolygons=true, bool useVisibility=true, bool clipPolygon=false)
virtual void computeVVSWeights()
virtual void loadConfigFileXML(const std::string &configFile, bool verbose=true)
virtual vpMbHiddenFaces< vpMbtPolygon > & getFaces()
virtual void setNbRayCastingAttemptsForVisibility(const unsigned int &attempts)
virtual void computeVVSInit()
virtual void setDepthDenseFilteringOccupancyRatio(double occupancyRatio)
virtual unsigned int getKltMaskBorder() const
virtual unsigned int getNbPolygon() const
vpColVector m_error
(s - s*)
virtual void setReferenceCameraName(const std::string &referenceCameraName)
virtual void setProjectionErrorDisplay(bool display)
std::map< vpTrackerType, double > m_mapOfFeatureFactors
Ponderation between each feature type in the VVS stage.
virtual void computeVVS(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages)
virtual std::list< vpMbtDistanceCircle * > & getFeaturesCircle()
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="")
virtual void getLline(std::list< vpMbtDistanceLine * > &linesList, unsigned int level=0) const
virtual void setTrackerType(int type)
unsigned int m_nb_feat_klt
Number of klt features.
virtual void initFromPoints(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2)
virtual unsigned int getNbPoints(unsigned int level=0) const
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
virtual vpKltOpencv getKltOpencv() const
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual vpMbtPolygon * getPolygon(unsigned int index)
virtual int getKltNbPoints() const
unsigned int m_nb_feat_depthNormal
Number of depth normal features.
virtual void initClick(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2, bool displayHelp=false, const vpHomogeneousMatrix &T1=vpHomogeneousMatrix(), const vpHomogeneousMatrix &T2=vpHomogeneousMatrix())
virtual vpMe getMovingEdge() const
virtual void setFarClippingDistance(const double &dist)
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="")
virtual void preTracking(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages, std::map< std::string, pcl::PointCloud< pcl::PointXYZ >::ConstPtr > &mapOfPointClouds)
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam)
virtual void setClipping(const unsigned int &flags)
virtual void loadConfigFile(const std::string &configFile, bool verbose=true)
virtual void setCameraTransformationMatrix(const std::string &cameraName, const vpHomogeneousMatrix &cameraTransformationMatrix)
virtual void setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
virtual void setOgreVisibilityTest(const bool &v)
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
std::string m_referenceCameraName
Name of the reference camera.
virtual void setMask(const vpImage< bool > &mask)
virtual void setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name="")
virtual void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false)
friend void from_json(const nlohmann::json &j, TrackerWrapper &t)
Load configuration settings from a JSON object for a tracker wrapper.
virtual void track(const vpImage< unsigned char > &I)
virtual unsigned int getClipping() const
virtual std::map< int, vpImagePoint > getKltImagePointsWithId() const
Implementation of the polygons management for the model-based trackers.
vpAROgre * getOgreContext()
virtual void setScanLineVisibilityTest(const bool &v)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo)
virtual void initCylinder(const vpPoint &, const vpPoint &, double, int, const std::string &name="")
void preTracking(const vpImage< unsigned char > &I)
virtual void initCircle(const vpPoint &, const vpPoint &, const vpPoint &, double, int, const std::string &name="")
bool postTracking(const vpImage< unsigned char > &I, vpColVector &w)
virtual void computeVVSInteractionMatrixAndResidu()
virtual void reinit(const vpImage< unsigned char > &I)
virtual void initFaceFromCorners(vpMbtPolygon &polygon)
void setCameraParameters(const vpCameraParameters &cam)
virtual void computeVVSInit()
virtual void initFaceFromLines(vpMbtPolygon &polygon)
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
double m_lambda
Gain of the virtual visual servoing stage.
bool modelInitialised
virtual void computeVVSPoseEstimation(const bool isoJoIdentity, unsigned int iter, vpMatrix &L, vpMatrix &LTL, vpColVector &R, const vpColVector &error, vpColVector &error_prev, vpColVector &LTR, double &mu, vpColVector &v, const vpColVector *const w=NULL, vpColVector *const m_w_prev=NULL)
bool m_projectionErrorDisplay
Display gradient and model orientation for projection error computation.
virtual void setOgreShowConfigDialog(bool showConfigDialog)
virtual void setMask(const vpImage< bool > &mask)
virtual void getCameraParameters(vpCameraParameters &cam) const
vpImage< unsigned char > m_I
Grayscale image buffer, used when passing color images.
virtual void computeVVSCheckLevenbergMarquardt(unsigned int iter, vpColVector &error, const vpColVector &m_error_prev, const vpHomogeneousMatrix &cMoPrev, double &mu, bool &reStartFromLastIncrement, vpColVector *const w=NULL, const vpColVector *const m_w_prev=NULL)
virtual void setDisplayFeatures(bool displayF)
virtual vpHomogeneousMatrix getPose() const
bool m_computeInteraction
vpMatrix oJo
The Degrees of Freedom to estimate.
vpMatrix covarianceMatrix
Covariance matrix.
double m_initialMu
Initial Mu for Levenberg Marquardt optimization loop.
bool computeProjError
vpHomogeneousMatrix m_cMo
The current pose.
vpCameraParameters m_cam
The camera parameters.
double projectionError
double m_stopCriteriaEpsilon
Epsilon threshold to stop the VVS optimization loop.
bool useOgre
Use Ogre3d for visibility tests.
virtual void computeVVSWeights(vpRobust &robust, const vpColVector &error, vpColVector &w)
vpMbHiddenFaces< vpMbtPolygon > faces
Set of faces describing the object.
virtual void setCameraParameters(const vpCameraParameters &cam)
virtual void setAngleDisappear(const double &a)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void setOgreVisibilityTest(const bool &v)
virtual void computeCovarianceMatrixVVS(const bool isoJoIdentity, const vpColVector &w_true, const vpHomogeneousMatrix &cMoPrev, const vpMatrix &L_true, const vpMatrix &LVJ_true, const vpColVector &error)
vpMbtOptimizationMethod m_optimizationMethod
Optimization method used.
bool displayFeatures
If true, the features are displayed.
virtual void setProjectionErrorDisplay(bool display)
double angleDisappears
Angle used to detect a face disappearance.
virtual void setNearClippingDistance(const double &dist)
virtual void setFarClippingDistance(const double &dist)
double distFarClip
Distance for near clipping.
bool m_isoJoIdentity
Boolean to know if oJo is identity (for fast computation)
bool useScanLine
Use Scanline for visibility tests.
virtual void setProjectionErrorComputation(const bool &flag)
virtual void setClipping(const unsigned int &flags)
double angleAppears
Angle used to detect a face appearance.
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness)
virtual void setAngleAppear(const double &a)
bool computeCovariance
Flag used to specify if the covariance matrix has to be computed or not.
double distNearClip
Distance for near clipping.
unsigned int m_maxIter
Maximum number of iterations of the virtual visual servoing stage.
unsigned int clippingFlag
Flags specifying which clipping to used.
vpMbHiddenFaces< vpMbtPolygon > m_projectionErrorFaces
Set of faces describing the object, used for projection error.
virtual void loadConfigFile(const std::string &configFile, bool verbose=true)
Manage a circle used in the model-based tracker.
Manage a cylinder used in the model-based tracker.
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
Implementation of a polygon of the model containing points of interest. It is used by the model-based...
Manage the line of a polygon used in the model-based tracker.
Implementation of a polygon of the model used by the model-based tracker.
Parse an Xml file to extract configuration parameters of a mbtConfig object.
@ TOO_NEAR
Point removed because too near image borders.
Definition vpMeSite.h:90
@ THRESHOLD
Point removed due to a threshold problem.
Definition vpMeSite.h:88
@ CONTRAST
Point removed due to a contrast problem.
Definition vpMeSite.h:84
@ M_ESTIMATOR
Point removed during virtual visual-servoing because considered as an outlier.
Definition vpMeSite.h:89
@ NO_SUPPRESSION
Point used by the tracker.
Definition vpMeSite.h:83
Definition vpMe.h:122
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:77
Error that can be emitted by the vpTracker class and its derivatives.
@ notEnoughPointError
Not enough point to track.
@ initializationError
Tracker initialization error.
@ fatalError
Tracker fatal error.
vpVelocityTwistMatrix buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)