View Javadoc
1   /*
2    * Copyright (C) 2017 Alberto Irurueta Carro (alberto@irurueta.com)
3    *
4    * Licensed under the Apache License, Version 2.0 (the "License");
5    * you may not use this file except in compliance with the License.
6    * You may obtain a copy of the License at
7    *
8    *         http://www.apache.org/licenses/LICENSE-2.0
9    *
10   * Unless required by applicable law or agreed to in writing, software
11   * distributed under the License is distributed on an "AS IS" BASIS,
12   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13   * See the License for the specific language governing permissions and
14   * limitations under the License.
15   */
16  
17  package com.irurueta.ar.sfm;
18  
19  import com.irurueta.algebra.Matrix;
20  import com.irurueta.ar.calibration.estimators.KruppaDualImageOfAbsoluteConicEstimator;
21  import com.irurueta.ar.calibration.estimators.LMSEImageOfAbsoluteConicEstimator;
22  import com.irurueta.ar.epipolar.Corrector;
23  import com.irurueta.ar.epipolar.EpipolarException;
24  import com.irurueta.ar.epipolar.EssentialMatrix;
25  import com.irurueta.ar.epipolar.FundamentalMatrix;
26  import com.irurueta.ar.epipolar.estimators.EightPointsFundamentalMatrixEstimator;
27  import com.irurueta.ar.epipolar.estimators.FundamentalMatrixEstimatorMethod;
28  import com.irurueta.ar.epipolar.estimators.FundamentalMatrixRobustEstimator;
29  import com.irurueta.ar.epipolar.estimators.LMedSFundamentalMatrixRobustEstimator;
30  import com.irurueta.ar.epipolar.estimators.MSACFundamentalMatrixRobustEstimator;
31  import com.irurueta.ar.epipolar.estimators.PROMedSFundamentalMatrixRobustEstimator;
32  import com.irurueta.ar.epipolar.estimators.PROSACFundamentalMatrixRobustEstimator;
33  import com.irurueta.ar.epipolar.estimators.RANSACFundamentalMatrixRobustEstimator;
34  import com.irurueta.ar.epipolar.estimators.SevenPointsFundamentalMatrixEstimator;
35  import com.irurueta.geometry.PinholeCamera;
36  import com.irurueta.geometry.PinholeCameraIntrinsicParameters;
37  import com.irurueta.geometry.Point2D;
38  import com.irurueta.geometry.Point3D;
39  import com.irurueta.geometry.Transformation2D;
40  import com.irurueta.geometry.estimators.*;
41  import com.irurueta.numerical.robust.RobustEstimatorMethod;
42  
43  import java.util.ArrayList;
44  import java.util.BitSet;
45  import java.util.List;
46  
47  /**
48   * Base class in charge of estimating cameras and 3D reconstructed points from sparse
49   * image point correspondences for multiple views.
50   *
51   * @param <C> type of configuration.
52   * @param <R> type of re-constructor.
53   * @param <L> type of listener.
54   */
55  public abstract class BaseSparseReconstructor<C extends BaseSparseReconstructorConfiguration<C>,
56          R extends BaseSparseReconstructor<C, R, L>, L extends BaseSparseReconstructorListener<R>> {
57  
58      /**
59       * Minimum required number of views.
60       */
61      public static final int MIN_NUMBER_OF_VIEWS = 2;
62  
63      /**
64       * Default scale.
65       */
66      protected static final double DEFAULT_SCALE = 1.0;
67  
68      /**
69       * Current estimated camera in a metric stratum (i.e. up to scale).
70       */
71      protected EstimatedCamera currentMetricEstimatedCamera;
72  
73      /**
74       * Previous estimated camera in a metric stratum (i.e. up to scale).
75       */
76      protected EstimatedCamera previousMetricEstimatedCamera;
77  
78      /**
79       * Reconstructed 3D points which still remain active to match next view in a metric stratum (i.e. up
80       * to scale).
81       */
82      protected List<ReconstructedPoint3D> activeMetricReconstructedPoints;
83  
84      /**
85       * Current estimated scale. This will typically converge to a constant value as more views are
86       * processed.
87       * The smaller the variance of estimated scale, the more accurate the scale will be.
88       */
89      protected double currentScale = DEFAULT_SCALE;
90  
91      /**
92       * Current estimated camera in euclidean stratum (i.e. with actual scale).
93       */
94      protected EstimatedCamera currentEuclideanEstimatedCamera;
95  
96      /**
97       * Previous estimated camera in Euclidean stratum (i.e. with actual scale).
98       */
99      protected EstimatedCamera previousEuclideanEstimatedCamera;
100 
101     /**
102      * Reconstructed 3D points which still remain active to match next view in Euclidean stratum (i.e.
103      * with actual scale).
104      */
105     protected List<ReconstructedPoint3D> activeEuclideanReconstructedPoints;
106 
107     /**
108      * Configuration for this re-constructor.
109      */
110     protected C configuration;
111 
112     /**
113      * Listener in charge of handling events such as when reconstruction starts, ends,
114      * when certain data is needed or when estimation of data has been computed.
115      */
116     protected L listener;
117 
118     /**
119      * Indicates whether reconstruction has failed or not.
120      */
121     protected volatile boolean failed;
122 
123     /**
124      * Indicates whether reconstruction is running or not.
125      */
126     protected volatile boolean running;
127 
128     /**
129      * Current estimated fundamental matrix.
130      */
131     private EstimatedFundamentalMatrix currentEstimatedFundamentalMatrix;
132 
133     /**
134      * Indicates whether reconstruction has been cancelled or not.
135      */
136     private volatile boolean cancelled;
137 
138     /**
139      * Counter of number of processed views.
140      */
141     private int viewCount;
142 
143     /**
144      * Indicates whether reconstruction has finished or not.
145      */
146     private boolean finished = false;
147 
148     /**
149      * All samples (tracked and non-tracked) on previous view.
150      */
151     private List<Sample2D> allPreviousViewSamples;
152 
153     /**
154      * Tracked samples on previous view.
155      */
156     private List<Sample2D> previousViewTrackedSamples;
157 
158     /**
159      * Tracked samples on last processed view (i.e. current view).
160      */
161     private List<Sample2D> currentViewTrackedSamples;
162 
163     /**
164      * New samples on las processed view (i.e. current view).
165      */
166     private List<Sample2D> currentViewNewlySpawnedSamples;
167 
168     /**
169      * Active matches between current and previous views.
170      */
171     private final List<MatchedSamples> matches = new ArrayList<>();
172 
173     /**
174      * ID of previous view.
175      */
176     private int previousViewId;
177 
178     /**
179      * ID of current view.
180      */
181     private int currentViewId;
182 
183     /**
184      * Constructor.
185      *
186      * @param configuration configuration for this re-constructor.
187      * @param listener      listener in charge of handling events.
188      * @throws NullPointerException if listener or configuration is not provided.
189      */
190     protected BaseSparseReconstructor(final C configuration, final L listener) {
191         if (configuration == null || listener == null) {
192             throw new NullPointerException();
193         }
194         this.configuration = configuration;
195         this.listener = listener;
196     }
197 
198     /**
199      * Gets configuration for this re-constructor.
200      *
201      * @return configuration for this re-constructor.
202      */
203     public C getConfiguration() {
204         return configuration;
205     }
206 
207     /**
208      * Gets listener in charge of handling events such as when reconstruction starts,
209      * ends, when certain data is needed or when estimation of data has been computed.
210      *
211      * @return listener in charge of handling events.
212      */
213     public L getListener() {
214         return listener;
215     }
216 
217     /**
218      * Indicates whether reconstruction is running or not.
219      *
220      * @return true if reconstruction is running, false if reconstruction has stopped
221      * for any reason.
222      */
223     public boolean isRunning() {
224         return running;
225     }
226 
227     /**
228      * Indicates whether reconstruction has been cancelled or not.
229      *
230      * @return true if reconstruction has been cancelled, false otherwise.
231      */
232     public boolean isCancelled() {
233         return cancelled;
234     }
235 
236     /**
237      * Indicates whether reconstruction has failed or not.
238      *
239      * @return true if reconstruction has failed, false otherwise.
240      */
241     public boolean hasFailed() {
242         return failed;
243     }
244 
245     /**
246      * Indicates whether the reconstruction has finished.
247      *
248      * @return true if reconstruction has finished, false otherwise.
249      */
250     public boolean isFinished() {
251         return finished;
252     }
253 
254     /**
255      * Gets counter of number of processed views.
256      *
257      * @return counter of number of processed views.
258      */
259     public int getViewCount() {
260         return viewCount;
261     }
262 
263     /**
264      * Gets estimated fundamental matrix for current view.
265      * This fundamental matrix relates current view with the previously processed one.
266      *
267      * @return current estimated fundamental matrix.
268      */
269     public EstimatedFundamentalMatrix getCurrentEstimatedFundamentalMatrix() {
270         return currentEstimatedFundamentalMatrix;
271     }
272 
273     /**
274      * Gets estimated metric camera for current view (i.e. up to scale).
275      *
276      * @return current estimated metric camera.
277      */
278     public EstimatedCamera getCurrentMetricEstimatedCamera() {
279         return currentMetricEstimatedCamera;
280     }
281 
282     /**
283      * Gets estimated camera for previous view (i.e. up to scale).
284      *
285      * @return previous estimated metric camera.
286      */
287     public EstimatedCamera getPreviousMetricEstimatedCamera() {
288         return previousMetricEstimatedCamera;
289     }
290 
291     /**
292      * Gets estimated euclidean camera for current view (i.e. with actual scale).
293      *
294      * @return current estimated euclidean camera.
295      */
296     public EstimatedCamera getCurrentEuclideanEstimatedCamera() {
297         return currentEuclideanEstimatedCamera;
298     }
299 
300     /**
301      * Gets estimated Euclidean camera for previous view (i.e. with actual scale).
302      *
303      * @return previous estimated euclidean camera.
304      */
305     public EstimatedCamera getPreviousEuclideanEstimatedCamera() {
306         return previousEuclideanEstimatedCamera;
307     }
308 
309     /**
310      * Gets metric reconstructed 3D points (i.e. up to scale) which still remain active to match next view.
311      *
312      * @return active metric reconstructed 3D points.
313      */
314     public List<ReconstructedPoint3D> getActiveMetricReconstructedPoints() {
315         return activeMetricReconstructedPoints;
316     }
317 
318     /**
319      * Gets Euclidean reconstructed 3D points (i.e. with actual scale) which still remain active to match
320      * next view.
321      *
322      * @return active euclidean reconstructed 3D points.
323      */
324     public List<ReconstructedPoint3D> getActiveEuclideanReconstructedPoints() {
325         return activeEuclideanReconstructedPoints;
326     }
327 
328     /**
329      * Gets current estimated scale. This will typically converge to a constant value as more views are
330      * processed.
331      * The smaller the variance of estimated scale, the more accurate the scale will be.
332      *
333      * @return current estimated scale.
334      */
335     public double getCurrentScale() {
336         return currentScale;
337     }
338 
339     /**
340      * Gets tracked samples on previous view.
341      *
342      * @return tracked samples on previous view.
343      */
344     public List<Sample2D> getPreviousViewTrackedSamples() {
345         return previousViewTrackedSamples;
346     }
347 
348     /**
349      * Gets tracked samples (from previous view) on current view.
350      *
351      * @return tracked samples on current view
352      */
353     public List<Sample2D> getCurrentViewTrackedSamples() {
354         return currentViewTrackedSamples;
355     }
356 
357     /**
358      * Gets new samples (not tracked) on current view.
359      *
360      * @return new samples on current view.
361      */
362     public List<Sample2D> getCurrentViewNewlySpawnedSamples() {
363         return currentViewNewlySpawnedSamples;
364     }
365 
366     /**
367      * Process one view of all the available data during the reconstruction.
368      * This method can be called multiple times instead of {@link #start()} to build the
369      * reconstruction step by step, one view at a time.
370      * This method is useful when data is gathered on real time from a camera and the
371      * number of views is unknown.
372      *
373      * @return true if more views can be processed, false when reconstruction has finished.
374      */
375     public boolean processOneView() {
376         if (viewCount == 0) {
377             if (running) {
378                 // already started
379                 return true;
380             }
381 
382             reset();
383             running = true;
384 
385             //noinspection unchecked
386             listener.onStart((R) this);
387         }
388 
389         //noinspection unchecked
390         if (!listener.hasMoreViewsAvailable((R) this)) {
391             //noinspection unchecked
392             listener.onFinish((R) this);
393             running = false;
394             finished = true;
395             return false;
396         }
397 
398         previousViewTrackedSamples = new ArrayList<>();
399         currentViewTrackedSamples = new ArrayList<>();
400         currentViewNewlySpawnedSamples = new ArrayList<>();
401         //noinspection unchecked
402         listener.onRequestSamples((R) this, previousViewId, viewCount, previousViewTrackedSamples,
403                 currentViewTrackedSamples, currentViewNewlySpawnedSamples);
404 
405         boolean processed;
406         if (isFirstView()) {
407             currentEstimatedFundamentalMatrix = null;
408             // for first view we simply keep samples (if enough are provided)
409             processed = processFirstView();
410         } else {
411 
412             if (isSecondView()) {
413                 // for second view, check that we have enough samples
414                 processed = processSecondView();
415             } else {
416                 processed = processAdditionalView();
417             }
418         }
419 
420         if (processed) {
421             viewCount++;
422         }
423 
424         if (cancelled) {
425             //noinspection unchecked
426             listener.onCancel((R) this);
427         }
428 
429         return !finished;
430     }
431 
432     /**
433      * Indicates whether current view is the first view.
434      *
435      * @return true if current view is the first view, false otherwise.
436      */
437     public boolean isFirstView() {
438         return viewCount == 0 && (previousViewTrackedSamples == null || previousViewTrackedSamples.isEmpty());
439     }
440 
441     /**
442      * Indicates whether current view is the second view.
443      *
444      * @return true if current view is the second view, false otherwise.
445      */
446     public boolean isSecondView() {
447         return !isFirstView() && currentEstimatedFundamentalMatrix == null;
448     }
449 
450     /**
451      * Indicates whether current view is an additional view.
452      *
453      * @return true if current view is an additional view, false otherwise.
454      */
455     public boolean isAdditionalView() {
456         return !isFirstView() && !isSecondView();
457     }
458 
459     /**
460      * Starts reconstruction of all available data to reconstruct the whole scene.
461      * If reconstruction has already started and is running, calling this method has
462      * no effect.
463      * This method is useful when all data is available before starting the reconstruction.
464      */
465     public void start() {
466         while (processOneView()) {
467             if (cancelled) {
468                 break;
469             }
470         }
471     }
472 
473     /**
474      * Cancels reconstruction.
475      * If reconstruction has already been cancelled, calling this method has no effect.
476      */
477     public void cancel() {
478         if (cancelled) {
479             // already cancelled
480             return;
481         }
482 
483         cancelled = true;
484     }
485 
486     /**
487      * Resets this instance so that a reconstruction can be started from the beginning without cancelling
488      * current one.
489      */
490     public void reset() {
491         if (previousViewTrackedSamples != null) {
492             previousViewTrackedSamples.clear();
493         }
494         if (currentViewTrackedSamples != null) {
495             currentViewTrackedSamples.clear();
496         }
497         if (currentViewNewlySpawnedSamples != null) {
498             currentViewNewlySpawnedSamples.clear();
499         }
500         matches.clear();
501 
502         cancelled = failed = false;
503         viewCount = 0;
504         running = false;
505 
506         currentEstimatedFundamentalMatrix = null;
507         currentMetricEstimatedCamera = previousMetricEstimatedCamera = null;
508         activeMetricReconstructedPoints = null;
509         currentScale = DEFAULT_SCALE;
510         currentEuclideanEstimatedCamera = previousEuclideanEstimatedCamera = null;
511         activeEuclideanReconstructedPoints = null;
512 
513         previousViewId = 0;
514         currentViewId = 0;
515 
516         finished = false;
517     }
518 
519     /**
520      * Called when processing one frame is successfully finished. This can be done to estimate scale on
521      * those implementations where scale can be measured or is already known.
522      *
523      * @param isInitialPairOfViews true if initial pair of views is being processed, false otherwise.
524      * @return true if post-processing succeeded, false otherwise.
525      */
526     protected abstract boolean postProcessOne(final boolean isInitialPairOfViews);
527 
528     /**
529      * Processes data for first view.
530      *
531      * @return true if view was successfully processed, false otherwise.
532      */
533     private boolean processFirstView() {
534         if (hasEnoughSamplesForFundamentalMatrixEstimation(currentViewTrackedSamples)) {
535             //noinspection unchecked
536             listener.onSamplesAccepted((R) this, viewCount, previousViewTrackedSamples, currentViewTrackedSamples);
537             if (allPreviousViewSamples == null) {
538                 allPreviousViewSamples = new ArrayList<>();
539             } else {
540                 allPreviousViewSamples.clear();
541             }
542 
543             allPreviousViewSamples.addAll(currentViewTrackedSamples);
544             allPreviousViewSamples.addAll(currentViewNewlySpawnedSamples);
545 
546             previousViewTrackedSamples = currentViewTrackedSamples;
547             previousViewId = viewCount;
548             return true;
549         } else {
550             //noinspection unchecked
551             listener.onSamplesRejected((R) this, viewCount, previousViewTrackedSamples, currentViewTrackedSamples);
552             return false;
553         }
554     }
555 
556     /**
557      * Processes data for second view.
558      *
559      * @return true if view was successfully processed, false otherwise.
560      */
561     private boolean processSecondView() {
562         if (hasEnoughSamplesForFundamentalMatrixEstimation(currentViewTrackedSamples)) {
563 
564             // find matches
565             matches.clear();
566 
567             // matching is up to listener implementation
568             //noinspection unchecked
569             listener.onRequestMatches((R) this, allPreviousViewSamples, previousViewTrackedSamples,
570                     currentViewTrackedSamples, previousViewId, viewCount, matches);
571 
572             if (hasEnoughMatchesForFundamentalMatrixEstimation(matches)) {
573                 // if enough matches are retrieved, attempt to compute
574                 // fundamental matrix
575                 if ((configuration.isGeneralSceneAllowed()
576                         && estimateFundamentalMatrix(matches, previousViewId, viewCount, true))
577                         || (configuration.isPlanarSceneAllowed()
578                         && estimatePlanarFundamentalMatrix(matches, previousViewId, viewCount, true))) {
579                     // fundamental matrix could be estimated
580                     //noinspection unchecked
581                     listener.onSamplesAccepted((R) this, viewCount, previousViewTrackedSamples,
582                             currentViewTrackedSamples);
583 
584                     allPreviousViewSamples.clear();
585                     allPreviousViewSamples.addAll(currentViewTrackedSamples);
586                     allPreviousViewSamples.addAll(currentViewNewlySpawnedSamples);
587 
588                     previousViewTrackedSamples = currentViewTrackedSamples;
589                     previousViewId = currentViewId;
590                     currentViewId = viewCount;
591 
592                     //noinspection unchecked
593                     listener.onFundamentalMatrixEstimated((R) this, currentEstimatedFundamentalMatrix);
594 
595                     if (estimateInitialCamerasAndPoints()) {
596                         // cameras and points have been estimated
597                         //noinspection unchecked
598                         listener.onMetricCameraEstimated((R) this, previousViewId, currentViewId,
599                                 previousMetricEstimatedCamera, currentMetricEstimatedCamera);
600                         //noinspection unchecked
601                         listener.onMetricReconstructedPointsEstimated((R) this, matches,
602                                 activeMetricReconstructedPoints);
603 
604                         if (!postProcessOne(true)) {
605                             // something failed
606                             failed = true;
607                             //noinspection unchecked
608                             listener.onFail((R) this);
609                             return false;
610                         } else {
611                             // post-processing succeeded
612                             //noinspection unchecked
613                             listener.onEuclideanCameraEstimated((R) this, previousViewId, currentViewId, currentScale,
614                                     previousEuclideanEstimatedCamera, currentEuclideanEstimatedCamera);
615                             //noinspection unchecked
616                             listener.onEuclideanReconstructedPointsEstimated((R) this, currentScale,
617                                     activeEuclideanReconstructedPoints);
618                             return true;
619                         }
620                     } else {
621                         // initial cameras failed
622                         failed = true;
623                         //noinspection unchecked
624                         listener.onFail((R) this);
625                         return false;
626                     }
627                 } else {
628                     // estimation of fundamental matrix failed
629                     //noinspection unchecked
630                     listener.onSamplesRejected((R) this, viewCount, previousViewTrackedSamples,
631                             currentViewTrackedSamples);
632                     return false;
633                 }
634             }
635         }
636 
637         //noinspection unchecked
638         listener.onSamplesRejected((R) this, viewCount, previousViewTrackedSamples, currentViewTrackedSamples);
639         return false;
640     }
641 
642     /**
643      * Processes data for one additional view.
644      *
645      * @return true if view was successfully processed, false otherwise.
646      */
647     private boolean processAdditionalView() {
648         // find matches
649         matches.clear();
650 
651         //noinspection unchecked
652         listener.onRequestMatches((R) this, allPreviousViewSamples, previousViewTrackedSamples,
653                 currentViewTrackedSamples, currentViewId, viewCount, matches);
654 
655         final var points3D = new ArrayList<Point3D>();
656         final var points2D = new ArrayList<Point2D>();
657         final var qualityScores = setUpCameraEstimatorMatches(points3D, points2D);
658         var samplesRejected = false;
659 
660         if (hasEnoughSamplesForCameraEstimation(points3D, points2D) && hasEnoughMatchesForCameraEstimation(matches)) {
661             // enough matches available.
662             PinholeCamera currentCamera = null;
663             Matrix currentCameraCovariance = null;
664             if (configuration.getUseEPnPForAdditionalCamerasEstimation()) {
665                 // use EPnP for additional cameras' estimation.
666                 // EPnP requires knowledge of camera intrinsics
667 
668                 PinholeCameraIntrinsicParameters intrinsicParameters = null;
669                 if ((configuration.getUseDAQForAdditionalCamerasIntrinsics()
670                         || configuration.getUseDIACForAdditionalCamerasIntrinsics())
671                         && hasEnoughMatchesForFundamentalMatrixEstimation(matches)) {
672 
673                     // compute fundamental matrix to estimate intrinsics
674                     if ((configuration.isGeneralSceneAllowed()
675                             && estimateFundamentalMatrix(matches, currentViewId, viewCount, false))
676                             || (configuration.isPlanarSceneAllowed()
677                             && estimatePlanarFundamentalMatrix(matches, currentViewId, viewCount,
678                             false))) {
679                         // fundamental matrix could be estimated
680                         //noinspection unchecked
681                         listener.onFundamentalMatrixEstimated((R) this, currentEstimatedFundamentalMatrix);
682 
683                         // use fundamental matrix to estimate intrinsics using DIAC or DAQ
684                         if (configuration.getUseDIACForAdditionalCamerasIntrinsics()) {
685                             intrinsicParameters = estimateIntrinsicsDIAC();
686                         } else if (configuration.getUseDAQForAdditionalCamerasIntrinsics()) {
687                             intrinsicParameters = estimateIntrinsicsDAQ();
688                         }
689 
690                     } else {
691                         // fundamental matrix estimation failed
692 
693                         //noinspection unchecked
694                         listener.onSamplesRejected((R) this, viewCount, previousViewTrackedSamples,
695                                 currentViewTrackedSamples);
696                         return false;
697                     }
698 
699                 } else if (configuration.getAdditionalCamerasIntrinsics() != null) {
700                     // use configuration provided intrinsics
701                     intrinsicParameters = configuration.getAdditionalCamerasIntrinsics();
702 
703                     if (intrinsicParameters == null) {
704                         // something failed or bad configuration
705                         failed = true;
706                         //noinspection unchecked
707                         listener.onFail((R) this);
708                         return false;
709                     }
710                 }
711 
712                 try {
713                     if (intrinsicParameters != null) {
714                         // use EPnP for additional cameras estimation
715                         final var cameraEstimator = EPnPPointCorrespondencePinholeCameraRobustEstimator.create(
716                                 intrinsicParameters, points3D, points2D, qualityScores,
717                                 configuration.getAdditionalCamerasRobustEstimationMethod());
718                         cameraEstimator.setPlanarConfigurationAllowed(
719                                 configuration.getAdditionalCamerasAllowPlanarConfiguration());
720                         cameraEstimator.setNullspaceDimension2Allowed(
721                                 configuration.getAdditionalCamerasAllowNullspaceDimension2());
722                         cameraEstimator.setNullspaceDimension3Allowed(
723                                 configuration.getAdditionalCamerasAllowNullspaceDimension3());
724                         cameraEstimator.setPlanarThreshold(configuration.getAdditionalCamerasPlanarThreshold());
725                         cameraEstimator.setResultRefined(configuration.areAdditionalCamerasRefined());
726                         cameraEstimator.setCovarianceKept(configuration.isAdditionalCamerasCovarianceKept());
727                         cameraEstimator.setFastRefinementUsed(configuration.getAdditionalCamerasUseFastRefinement());
728                         cameraEstimator.setConfidence(configuration.getAdditionalCamerasConfidence());
729                         cameraEstimator.setMaxIterations(configuration.getAdditionalCamerasMaxIterations());
730 
731                         switch (configuration.getAdditionalCamerasRobustEstimationMethod()) {
732                             case LMEDS:
733                                 ((LMedSEPnPPointCorrespondencePinholeCameraRobustEstimator) cameraEstimator)
734                                         .setStopThreshold(configuration.getAdditionalCamerasThreshold());
735                                 break;
736                             case MSAC:
737                                 ((MSACEPnPPointCorrespondencePinholeCameraRobustEstimator) cameraEstimator)
738                                         .setThreshold(configuration.getAdditionalCamerasThreshold());
739                                 break;
740                             case PROMEDS:
741                                 ((PROMedSEPnPPointCorrespondencePinholeCameraRobustEstimator) cameraEstimator)
742                                         .setStopThreshold(configuration.getAdditionalCamerasThreshold());
743                                 break;
744                             case PROSAC:
745                                 var prosacCameraEstimator =
746                                         (PROSACEPnPPointCorrespondencePinholeCameraRobustEstimator) cameraEstimator;
747                                 prosacCameraEstimator.setThreshold(configuration.getAdditionalCamerasThreshold());
748                                 prosacCameraEstimator.setComputeAndKeepInliersEnabled(
749                                         configuration.getAdditionalCamerasComputeAndKeepInliers());
750                                 prosacCameraEstimator.setComputeAndKeepResidualsEnabled(
751                                         configuration.getAdditionalCamerasComputeAndKeepResiduals());
752                                 break;
753                             case RANSAC:
754                                 var ransacCameraEstimator =
755                                         (RANSACEPnPPointCorrespondencePinholeCameraRobustEstimator) cameraEstimator;
756                                 ransacCameraEstimator.setThreshold(configuration.getAdditionalCamerasThreshold());
757                                 ransacCameraEstimator.setComputeAndKeepInliersEnabled(
758                                         configuration.getAdditionalCamerasComputeAndKeepInliers());
759                                 ransacCameraEstimator.setComputeAndKeepResidualsEnabled(
760                                         configuration.getAdditionalCamerasComputeAndKeepResiduals());
761                                 break;
762                             default:
763                                 break;
764                         }
765 
766                         cameraEstimator.setSuggestSkewnessValueEnabled(
767                                 configuration.isAdditionalCamerasSuggestSkewnessValueEnabled());
768                         cameraEstimator.setSuggestedSkewnessValue(
769                                 configuration.getAdditionalCamerasSuggestedSkewnessValue());
770 
771                         cameraEstimator.setSuggestHorizontalFocalLengthEnabled(
772                                 configuration.isAdditionalCamerasSuggestHorizontalFocalLengthEnabled());
773                         cameraEstimator.setSuggestedHorizontalFocalLengthValue(
774                                 configuration.getAdditionalCamerasSuggestedHorizontalFocalLengthValue());
775 
776                         cameraEstimator.setSuggestVerticalFocalLengthEnabled(
777                                 configuration.isAdditionalCamerasSuggestVerticalFocalLengthEnabled());
778                         cameraEstimator.setSuggestedVerticalFocalLengthValue(
779                                 configuration.getAdditionalCamerasSuggestedVerticalFocalLengthValue());
780 
781                         cameraEstimator.setSuggestAspectRatioEnabled(
782                                 configuration.isAdditionalCamerasSuggestAspectRatioEnabled());
783                         cameraEstimator.setSuggestedAspectRatioValue(
784                                 configuration.getAdditionalCamerasSuggestedAspectRatioValue());
785 
786                         cameraEstimator.setSuggestPrincipalPointEnabled(
787                                 configuration.isAdditionalCamerasSuggestPrincipalPointEnabled());
788                         cameraEstimator.setSuggestedPrincipalPointValue(
789                                 configuration.getAdditionalCamerasSuggestedPrincipalPointValue());
790 
791                         currentCamera = cameraEstimator.estimate();
792                         currentCameraCovariance = cameraEstimator.getCovariance();
793 
794                         //noinspection unchecked
795                         listener.onSamplesAccepted((R) this, viewCount, previousViewTrackedSamples,
796                                 currentViewTrackedSamples);
797 
798                         allPreviousViewSamples.clear();
799                         allPreviousViewSamples.addAll(currentViewTrackedSamples);
800                         allPreviousViewSamples.addAll(currentViewNewlySpawnedSamples);
801 
802                         previousViewTrackedSamples = currentViewTrackedSamples;
803                         previousViewId = currentViewId;
804                         currentViewId = viewCount;
805                     }
806 
807                 } catch (final Exception e) {
808                     // camera estimation failed
809                     samplesRejected = true;
810                 }
811 
812             } else if (configuration.getUseUPnPForAdditionalCamerasEstimation()) {
813 
814                 try {
815                     // use UPnP for additional cameras estimation
816                     final var cameraEstimator = UPnPPointCorrespondencePinholeCameraRobustEstimator.create(points3D,
817                             points2D, qualityScores, configuration.getAdditionalCamerasRobustEstimationMethod());
818                     cameraEstimator.setPlanarConfigurationAllowed(
819                             configuration.getAdditionalCamerasAllowPlanarConfiguration());
820                     cameraEstimator.setNullspaceDimension2Allowed(
821                             configuration.getAdditionalCamerasAllowNullspaceDimension2());
822                     cameraEstimator.setPlanarThreshold(configuration.getAdditionalCamerasPlanarThreshold());
823                     cameraEstimator.setResultRefined(configuration.areAdditionalCamerasRefined());
824                     cameraEstimator.setCovarianceKept(configuration.isAdditionalCamerasCovarianceKept());
825                     cameraEstimator.setFastRefinementUsed(configuration.getAdditionalCamerasUseFastRefinement());
826                     cameraEstimator.setConfidence(configuration.getAdditionalCamerasConfidence());
827                     cameraEstimator.setMaxIterations(configuration.getAdditionalCamerasMaxIterations());
828 
829                     switch (configuration.getAdditionalCamerasRobustEstimationMethod()) {
830                         case LMEDS:
831                             ((LMedSUPnPPointCorrespondencePinholeCameraRobustEstimator) cameraEstimator)
832                                     .setStopThreshold(configuration.getAdditionalCamerasThreshold());
833                             break;
834                         case MSAC:
835                             ((MSACUPnPPointCorrespondencePinholeCameraRobustEstimator) cameraEstimator)
836                                     .setThreshold(configuration.getAdditionalCamerasThreshold());
837                             break;
838                         case PROMEDS:
839                             ((PROMedSUPnPPointCorrespondencePinholeCameraRobustEstimator) cameraEstimator)
840                                     .setStopThreshold(configuration.getAdditionalCamerasThreshold());
841                             break;
842                         case PROSAC:
843                             var prosacCameraEstimator =
844                                     (PROSACUPnPPointCorrespondencePinholeCameraRobustEstimator) cameraEstimator;
845                             prosacCameraEstimator.setThreshold(configuration.getAdditionalCamerasThreshold());
846                             prosacCameraEstimator.setComputeAndKeepInliersEnabled(
847                                     configuration.getAdditionalCamerasComputeAndKeepInliers());
848                             prosacCameraEstimator.setComputeAndKeepResidualsEnabled(
849                                     configuration.getAdditionalCamerasComputeAndKeepResiduals());
850                             break;
851                         case RANSAC:
852                             var ransacCameraEstimator =
853                                     (RANSACUPnPPointCorrespondencePinholeCameraRobustEstimator) cameraEstimator;
854                             ransacCameraEstimator.setThreshold(configuration.getAdditionalCamerasThreshold());
855                             ransacCameraEstimator.setComputeAndKeepInliersEnabled(
856                                     configuration.getAdditionalCamerasComputeAndKeepInliers());
857                             ransacCameraEstimator.setComputeAndKeepResidualsEnabled(
858                                     configuration.getAdditionalCamerasComputeAndKeepResiduals());
859                             break;
860                         default:
861                             break;
862                     }
863 
864                     cameraEstimator.setSkewness(configuration.getAdditionalCamerasSkewness());
865                     cameraEstimator.setHorizontalPrincipalPoint(
866                             configuration.getAdditionalCamerasHorizontalPrincipalPoint());
867                     cameraEstimator.setVerticalPrincipalPoint(
868                             configuration.getAdditionalCamerasVerticalPrincipalPoint());
869 
870                     cameraEstimator.setSuggestSkewnessValueEnabled(
871                             configuration.isAdditionalCamerasSuggestSkewnessValueEnabled());
872                     cameraEstimator.setSuggestedSkewnessValue(
873                             configuration.getAdditionalCamerasSuggestedSkewnessValue());
874 
875                     cameraEstimator.setSuggestHorizontalFocalLengthEnabled(
876                             configuration.isAdditionalCamerasSuggestHorizontalFocalLengthEnabled());
877                     cameraEstimator.setSuggestedHorizontalFocalLengthValue(
878                             configuration.getAdditionalCamerasSuggestedHorizontalFocalLengthValue());
879 
880                     cameraEstimator.setSuggestVerticalFocalLengthEnabled(
881                             configuration.isAdditionalCamerasSuggestVerticalFocalLengthEnabled());
882                     cameraEstimator.setSuggestedVerticalFocalLengthValue(
883                             configuration.getAdditionalCamerasSuggestedVerticalFocalLengthValue());
884 
885                     cameraEstimator.setSuggestAspectRatioEnabled(
886                             configuration.isAdditionalCamerasSuggestAspectRatioEnabled());
887                     cameraEstimator.setSuggestedAspectRatioValue(
888                             configuration.getAdditionalCamerasSuggestedAspectRatioValue());
889 
890                     cameraEstimator.setSuggestPrincipalPointEnabled(
891                             configuration.isAdditionalCamerasSuggestPrincipalPointEnabled());
892                     cameraEstimator.setSuggestedPrincipalPointValue(
893                             configuration.getAdditionalCamerasSuggestedPrincipalPointValue());
894 
895                     currentCamera = cameraEstimator.estimate();
896                     currentCameraCovariance = cameraEstimator.getCovariance();
897 
898                     //noinspection unchecked
899                     listener.onSamplesAccepted((R) this, viewCount, previousViewTrackedSamples,
900                             currentViewTrackedSamples);
901 
902                     allPreviousViewSamples.clear();
903                     allPreviousViewSamples.addAll(currentViewTrackedSamples);
904                     allPreviousViewSamples.addAll(currentViewNewlySpawnedSamples);
905 
906                     previousViewTrackedSamples = currentViewTrackedSamples;
907                     previousViewId = currentViewId;
908                     currentViewId = viewCount;
909 
910                 } catch (final Exception e) {
911                     // camera estimation failed
912                     samplesRejected = true;
913                 }
914 
915             } else {
916 
917                 try {
918                     // use DLT for additional cameras estimation
919                     final var cameraEstimator = DLTPointCorrespondencePinholeCameraRobustEstimator.create(points3D,
920                             points2D, qualityScores, configuration.getAdditionalCamerasRobustEstimationMethod());
921                     cameraEstimator.setResultRefined(configuration.areAdditionalCamerasRefined());
922                     cameraEstimator.setCovarianceKept(configuration.isAdditionalCamerasCovarianceKept());
923                     cameraEstimator.setFastRefinementUsed(configuration.getAdditionalCamerasUseFastRefinement());
924                     cameraEstimator.setConfidence(configuration.getAdditionalCamerasConfidence());
925                     cameraEstimator.setMaxIterations(configuration.getAdditionalCamerasMaxIterations());
926 
927                     switch (configuration.getAdditionalCamerasRobustEstimationMethod()) {
928                         case LMEDS:
929                             ((LMedSDLTPointCorrespondencePinholeCameraRobustEstimator) cameraEstimator)
930                                     .setStopThreshold(configuration.getAdditionalCamerasThreshold());
931                             break;
932                         case MSAC:
933                             ((MSACDLTPointCorrespondencePinholeCameraRobustEstimator) cameraEstimator)
934                                     .setThreshold(configuration.getAdditionalCamerasThreshold());
935                             break;
936                         case PROMEDS:
937                             ((PROMedSDLTPointCorrespondencePinholeCameraRobustEstimator) cameraEstimator)
938                                     .setStopThreshold(configuration.getAdditionalCamerasThreshold());
939                             break;
940                         case PROSAC:
941                             var prosacCameraEstimator =
942                                     (PROSACDLTPointCorrespondencePinholeCameraRobustEstimator) cameraEstimator;
943                             prosacCameraEstimator.setThreshold(configuration.getAdditionalCamerasThreshold());
944                             prosacCameraEstimator.setComputeAndKeepInliersEnabled(
945                                     configuration.getAdditionalCamerasComputeAndKeepInliers());
946                             prosacCameraEstimator.setComputeAndKeepResidualsEnabled(
947                                     configuration.getAdditionalCamerasComputeAndKeepResiduals());
948                             break;
949                         case RANSAC:
950                             var ransacCameraEstimator =
951                                     (RANSACDLTPointCorrespondencePinholeCameraRobustEstimator) cameraEstimator;
952                             ransacCameraEstimator.setThreshold(configuration.getAdditionalCamerasThreshold());
953                             ransacCameraEstimator.setComputeAndKeepInliersEnabled(
954                                     configuration.getAdditionalCamerasComputeAndKeepInliers());
955                             ransacCameraEstimator.setComputeAndKeepResidualsEnabled(
956                                     configuration.getAdditionalCamerasComputeAndKeepResiduals());
957                             break;
958                         default:
959                             break;
960                     }
961 
962                     cameraEstimator.setSuggestSkewnessValueEnabled(
963                             configuration.isAdditionalCamerasSuggestSkewnessValueEnabled());
964                     cameraEstimator.setSuggestedSkewnessValue(
965                             configuration.getAdditionalCamerasSuggestedSkewnessValue());
966 
967                     cameraEstimator.setSuggestHorizontalFocalLengthEnabled(
968                             configuration.isAdditionalCamerasSuggestHorizontalFocalLengthEnabled());
969                     cameraEstimator.setSuggestedHorizontalFocalLengthValue(
970                             configuration.getAdditionalCamerasSuggestedHorizontalFocalLengthValue());
971 
972                     cameraEstimator.setSuggestVerticalFocalLengthEnabled(
973                             configuration.isAdditionalCamerasSuggestVerticalFocalLengthEnabled());
974                     cameraEstimator.setSuggestedVerticalFocalLengthValue(
975                             configuration.getAdditionalCamerasSuggestedVerticalFocalLengthValue());
976 
977                     cameraEstimator.setSuggestAspectRatioEnabled(
978                             configuration.isAdditionalCamerasSuggestAspectRatioEnabled());
979                     cameraEstimator.setSuggestedAspectRatioValue(
980                             configuration.getAdditionalCamerasSuggestedAspectRatioValue());
981 
982                     cameraEstimator.setSuggestPrincipalPointEnabled(
983                             configuration.isAdditionalCamerasSuggestPrincipalPointEnabled());
984                     cameraEstimator.setSuggestedPrincipalPointValue(
985                             configuration.getAdditionalCamerasSuggestedPrincipalPointValue());
986 
987                     currentCamera = cameraEstimator.estimate();
988                     currentCameraCovariance = cameraEstimator.getCovariance();
989 
990                     //noinspection unchecked
991                     listener.onSamplesAccepted((R) this, viewCount, previousViewTrackedSamples,
992                             currentViewTrackedSamples);
993 
994                     allPreviousViewSamples.clear();
995                     allPreviousViewSamples.addAll(currentViewTrackedSamples);
996                     allPreviousViewSamples.addAll(currentViewNewlySpawnedSamples);
997 
998                     previousViewTrackedSamples = currentViewTrackedSamples;
999                     previousViewId = currentViewId;
1000                     currentViewId = viewCount;
1001 
1002                 } catch (final Exception e) {
1003                     // camera estimation failed
1004                     samplesRejected = true;
1005                 }
1006             }
1007 
1008             if (!samplesRejected) {
1009                 previousMetricEstimatedCamera = currentMetricEstimatedCamera;
1010 
1011                 currentMetricEstimatedCamera = new EstimatedCamera();
1012                 currentMetricEstimatedCamera.setCamera(currentCamera);
1013                 currentMetricEstimatedCamera.setViewId(currentViewId);
1014                 currentMetricEstimatedCamera.setCovariance(currentCameraCovariance);
1015 
1016                 // notify camera estimation
1017                 //noinspection unchecked
1018                 listener.onMetricCameraEstimated((R) this, previousViewId, currentViewId, previousMetricEstimatedCamera,
1019                         currentMetricEstimatedCamera);
1020 
1021                 // reconstruct all matches and refine existing reconstructed points
1022                 reconstructAndRefineMatches();
1023 
1024                 // notify reconstruction update
1025                 //noinspection unchecked
1026                 listener.onMetricReconstructedPointsEstimated((R) this, matches, activeMetricReconstructedPoints);
1027 
1028                 if (!postProcessOne(false)) {
1029                     // something failed
1030                     failed = true;
1031                     //noinspection unchecked
1032                     listener.onFail((R) this);
1033                     return false;
1034                 } else {
1035                     // post-processing succeeded
1036                     //noinspection unchecked
1037                     listener.onEuclideanCameraEstimated((R) this, previousViewId, currentViewId, currentScale,
1038                             previousEuclideanEstimatedCamera, currentEuclideanEstimatedCamera);
1039                     //noinspection unchecked
1040                     listener.onEuclideanReconstructedPointsEstimated((R) this, currentScale,
1041                             activeEuclideanReconstructedPoints);
1042                     return true;
1043                 }
1044             }
1045         }
1046 
1047         //noinspection unchecked
1048         listener.onSamplesRejected((R) this, viewCount, previousViewTrackedSamples, currentViewTrackedSamples);
1049         return false;
1050     }
1051 
1052     /**
1053      * Reconstructs new 3D points or refines existing ones taking into account existing matches and estimated cameras
1054      */
1055     private void reconstructAndRefineMatches() {
1056         if (matches.isEmpty()) {
1057             return;
1058         }
1059 
1060         try {
1061             RobustSinglePoint3DTriangulator robustTriangulator = null;
1062             SinglePoint3DTriangulator triangulator = null;
1063             var qualityScoresRequired = false;
1064             if (configuration.getAdditionalCamerasRobustEstimationMethod() != null) {
1065                 robustTriangulator = RobustSinglePoint3DTriangulator.create(
1066                         configuration.getAdditionalCamerasRobustEstimationMethod());
1067                 robustTriangulator.setConfidence(configuration.getPointTriangulatorConfidence());
1068                 robustTriangulator.setMaxIterations(configuration.getPointTriangulatorMaxIterations());
1069 
1070                 var threshold = configuration.getPointTriangulatorThreshold();
1071                 switch (configuration.getAdditionalCamerasRobustEstimationMethod()) {
1072                     case LMEDS:
1073                         ((LMedSRobustSinglePoint3DTriangulator) robustTriangulator).setStopThreshold(threshold);
1074                         break;
1075                     case MSAC:
1076                         ((MSACRobustSinglePoint3DTriangulator) robustTriangulator).setThreshold(threshold);
1077                         break;
1078                     case PROMEDS:
1079                         ((PROMedSRobustSinglePoint3DTriangulator) robustTriangulator).setStopThreshold(threshold);
1080                         qualityScoresRequired = true;
1081                         break;
1082                     case PROSAC:
1083                         ((PROSACRobustSinglePoint3DTriangulator) robustTriangulator).setThreshold(threshold);
1084                         qualityScoresRequired = true;
1085                         break;
1086                     case RANSAC:
1087                         ((RANSACRobustSinglePoint3DTriangulator) robustTriangulator).setThreshold(threshold);
1088                         break;
1089                     default:
1090                         break;
1091                 }
1092 
1093             } else {
1094                 if (configuration.isHomogeneousPointTriangulatorUsed()) {
1095                     triangulator = SinglePoint3DTriangulator.create(
1096                             Point3DTriangulatorType.LMSE_HOMOGENEOUS_TRIANGULATOR);
1097                 } else {
1098                     triangulator = SinglePoint3DTriangulator.create(
1099                             Point3DTriangulatorType.LMSE_INHOMOGENEOUS_TRIANGULATOR);
1100                 }
1101             }
1102 
1103             activeMetricReconstructedPoints = new ArrayList<>();
1104             ReconstructedPoint3D reconstructedPoint;
1105             var matchPos = 0;
1106             for (final var match : matches) {
1107                 final var samples = match.getSamples();
1108                 final var estimatedCameras = match.getCameras();
1109 
1110                 // estimated cameras does not yet contain last estimated camera
1111                 if (samples.length != estimatedCameras.length + 1) {
1112                     continue;
1113                 }
1114 
1115                 final var points = new ArrayList<Point2D>();
1116                 final var cameras = new ArrayList<PinholeCamera>();
1117                 final var validSamples = new BitSet(samples.length);
1118                 PinholeCamera camera = null;
1119                 Point2D point2D;
1120                 var numValid = 0;
1121                 final var samplesLength = samples.length;
1122                 final var samplesLengthMinusOne = samplesLength - 1;
1123                 boolean isLast;
1124                 for (var i = 0; i < samples.length; i++) {
1125                     isLast = (i == samplesLengthMinusOne);
1126                     point2D = samples[i].getPoint();
1127 
1128                     if (!isLast) {
1129                         camera = estimatedCameras[i].getCamera();
1130                     }
1131 
1132                     if (point2D == null || (camera == null && !isLast)) {
1133                         validSamples.clear(i);
1134                     } else {
1135                         validSamples.set(i);
1136 
1137                         points.add(point2D);
1138                         if (!isLast) {
1139                             cameras.add(camera);
1140                         }
1141 
1142                         numValid++;
1143                     }
1144                 }
1145 
1146                 // also add current camera which is not yet available on estimated cameras array
1147                 cameras.add(currentMetricEstimatedCamera.getCamera());
1148 
1149                 if (points.size() < SinglePoint3DTriangulator.MIN_REQUIRED_VIEWS || points.size() != cameras.size()) {
1150                     // point cannot be triangulated
1151                     continue;
1152                 }
1153 
1154                 Point3D point3D;
1155                 if (robustTriangulator != null) {
1156                     robustTriangulator.setPointsAndCameras(points, cameras);
1157                     if (qualityScoresRequired) {
1158                         // copy quality scores
1159                         final var qualityScores = new double[numValid];
1160                         var j = 0;
1161                         for (var i = 0; i < samples.length; i++) {
1162                             if (validSamples.get(i)) {
1163                                 qualityScores[j] = samples[i].getQualityScore();
1164                                 j++;
1165                             }
1166                         }
1167                         robustTriangulator.setQualityScores(qualityScores);
1168                     }
1169 
1170                     point3D = robustTriangulator.triangulate();
1171 
1172                 } else if (triangulator != null) {
1173                     triangulator.setPointsAndCameras(points, cameras);
1174                     point3D = triangulator.triangulate();
1175 
1176                 } else {
1177                     continue;
1178                 }
1179 
1180                 // save triangulated point
1181                 reconstructedPoint = new ReconstructedPoint3D();
1182                 reconstructedPoint.setPoint(point3D);
1183                 reconstructedPoint.setInlier(true);
1184                 reconstructedPoint.setId(String.valueOf(matchPos));
1185                 match.setReconstructedPoint(reconstructedPoint);
1186 
1187                 activeMetricReconstructedPoints.add(reconstructedPoint);
1188 
1189                 matchPos++;
1190             }
1191         } catch (final Exception e) {
1192             // something failed
1193             failed = true;
1194             //noinspection all
1195             listener.onFail((R) this);
1196         }
1197     }
1198 
1199     /**
1200      * Setups current matched 3D/2D points to estimate a pinhole camera.
1201      *
1202      * @param points3D 3D matched points.
1203      * @param points2D 2D matched points.
1204      * @return quality scores for matched points.
1205      */
1206     private double[] setUpCameraEstimatorMatches(final List<Point3D> points3D, final List<Point2D> points2D) {
1207         if (matches.isEmpty()) {
1208             return null;
1209         }
1210 
1211         points3D.clear();
1212         points2D.clear();
1213 
1214         final var qualityScoresRequired =
1215                 configuration.getAdditionalCamerasRobustEstimationMethod() == RobustEstimatorMethod.PROSAC
1216                         || configuration.getAdditionalCamerasRobustEstimationMethod() == RobustEstimatorMethod.PROMEDS;
1217 
1218 
1219         int[] positions = null;
1220         if (qualityScoresRequired) {
1221             positions = new int[matches.size()];
1222         }
1223 
1224         var numMatches = 0;
1225         var i = 0;
1226         for (final var match : matches) {
1227             final var samples = match.getSamples();
1228             final var viewIds = match.getViewIds();
1229             final var pos = getPositionForViewId(viewIds, viewCount);
1230             if (pos < 0) {
1231                 continue;
1232             }
1233             if (positions != null) {
1234                 positions[i] = pos;
1235             }
1236 
1237             final var sample = samples[pos];
1238             final var reconstructedPoint3D = match.getReconstructedPoint();
1239 
1240             if (sample == null || sample.getPoint() == null || reconstructedPoint3D == null
1241                     || reconstructedPoint3D.getPoint() == null) {
1242                 if (positions != null) {
1243                     positions[i] = -1;
1244                 }
1245             } else {
1246                 points2D.add(sample.getPoint());
1247                 points3D.add(reconstructedPoint3D.getPoint());
1248                 numMatches++;
1249             }
1250 
1251             i++;
1252         }
1253 
1254         // pick quality scores
1255         double[] qualityScores = null;
1256         if (qualityScoresRequired && numMatches > 0) {
1257             qualityScores = new double[numMatches];
1258             var j = 0;
1259             for (i = 0; i < positions.length; i++) {
1260                 if (positions[i] < 0) {
1261                     continue;
1262                 }
1263 
1264                 qualityScores[j] = matches.get(i).getQualityScore();
1265                 j++;
1266             }
1267         }
1268 
1269         return qualityScores;
1270     }
1271 
1272     /**
1273      * Estimates additional camera intrinsics using DIAC (Dual Image of Absolute Conic) method.
1274      *
1275      * @return additional camera intrinsics or null if something fails.
1276      */
1277     private PinholeCameraIntrinsicParameters estimateIntrinsicsDIAC() {
1278         final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix();
1279 
1280         try {
1281             final var diacEstimator = new KruppaDualImageOfAbsoluteConicEstimator(fundamentalMatrix);
1282             diacEstimator.setPrincipalPointX(configuration.getAdditionalCamerasHorizontalPrincipalPoint());
1283             diacEstimator.setPrincipalPointY(configuration.getAdditionalCamerasVerticalPrincipalPoint());
1284             diacEstimator.setFocalDistanceAspectRatioKnown(true);
1285             diacEstimator.setFocalDistanceAspectRatio(configuration.getAdditionalCamerasAspectRatio());
1286 
1287             final var diac = diacEstimator.estimate();
1288             return diac.getIntrinsicParameters();
1289 
1290         } catch (final Exception e) {
1291             return null;
1292         }
1293     }
1294 
1295     /**
1296      * Estimates additional cameras intrinsics using DAQ (Dual Absolute Quadric) method.
1297      *
1298      * @return additional camera intrinsics or null if something fails.
1299      */
1300     private PinholeCameraIntrinsicParameters estimateIntrinsicsDAQ() {
1301         try {
1302             final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix();
1303             fundamentalMatrix.normalize();
1304 
1305             final var estimator = new DualAbsoluteQuadricInitialCamerasEstimator(fundamentalMatrix);
1306             estimator.setAspectRatio(configuration.getInitialCamerasAspectRatio());
1307             estimator.estimate();
1308 
1309             final var camera = estimator.getEstimatedLeftCamera();
1310             camera.decompose();
1311             return camera.getIntrinsicParameters();
1312 
1313         } catch (final Exception e) {
1314             return null;
1315         }
1316     }
1317 
1318     /**
1319      * Indicates whether there are enough matched points to estimate an additional camera.
1320      *
1321      * @param points3D 3D matched points to check.
1322      * @param points2D 2D matched points to check.
1323      * @return true if there are enough matched points, false otherwise.
1324      */
1325     private boolean hasEnoughSamplesForCameraEstimation(final List<Point3D> points3D, final List<Point2D> points2D) {
1326         return points3D != null && points2D != null && points3D.size() == points2D.size()
1327                 && hasEnoughSamplesOrMatchesForCameraEstimation(points3D.size());
1328     }
1329 
1330     /**
1331      * Indicates whether there are enough matches to estimate an additional camera.
1332      *
1333      * @param matches matches to check.
1334      * @return true if there are enough matches, false otherwise.
1335      */
1336     private boolean hasEnoughMatchesForCameraEstimation(final List<MatchedSamples> matches) {
1337         return hasEnoughSamplesOrMatchesForCameraEstimation(matches != null ? matches.size() : 0);
1338     }
1339 
1340     /**
1341      * Indicates whether there are enough matches or samples to estimate an additional
1342      * camera.
1343      *
1344      * @param count number of matches or samples.
1345      * @return true if there are enough matches or samples, false otherwise.
1346      */
1347     private boolean hasEnoughSamplesOrMatchesForCameraEstimation(final int count) {
1348         if (configuration.getUseDAQForAdditionalCamerasIntrinsics()
1349                 || configuration.getUseDIACForAdditionalCamerasIntrinsics()) {
1350             // when DAQ or DIAC is required for additional cameras, fundamental matrix
1351             // also needs to be computed, which requires 7 or 8 matches.
1352             return hasEnoughSamplesOrMatchesForFundamentalMatrixEstimation(count);
1353         } else {
1354             // EPnP, UPnP or DLT is used for additional cameras estimation without fundamental
1355             // matrix. Only 6 matches are required
1356             return count >= PointCorrespondencePinholeCameraRobustEstimator.MIN_NUMBER_OF_POINT_CORRESPONDENCES;
1357         }
1358     }
1359 
1360     /**
1361      * Indicates whether there are enough samples to estimate a fundamental matrix.
1362      *
1363      * @param samples samples to check.
1364      * @return true if there are enough samples, false otherwise.
1365      */
1366     private boolean hasEnoughSamplesForFundamentalMatrixEstimation(final List<Sample2D> samples) {
1367         return hasEnoughSamplesOrMatchesForFundamentalMatrixEstimation(samples != null ? samples.size() : 0);
1368     }
1369 
1370     /**
1371      * Indicates whether there are enough matches to estimate a fundamental matrix.
1372      *
1373      * @param matches matches to check.
1374      * @return true if there are enough matches, false otherwise.
1375      */
1376     private boolean hasEnoughMatchesForFundamentalMatrixEstimation(final List<MatchedSamples> matches) {
1377         return hasEnoughSamplesOrMatchesForFundamentalMatrixEstimation(matches != null ? matches.size() : 0);
1378     }
1379 
1380     /**
1381      * Indicates whether there are enough matches or samples to estimate a fundamental
1382      * matrix.
1383      *
1384      * @param count number of matches or samples.
1385      * @return true if there are enough matches or samples, false otherwise.
1386      */
1387     private boolean hasEnoughSamplesOrMatchesForFundamentalMatrixEstimation(final int count) {
1388         if (configuration.isGeneralSceneAllowed()) {
1389             if (configuration.getNonRobustFundamentalMatrixEstimatorMethod()
1390                     == FundamentalMatrixEstimatorMethod.EIGHT_POINTS_ALGORITHM) {
1391                 return count >= EightPointsFundamentalMatrixEstimator.MIN_REQUIRED_POINTS;
1392             } else if (configuration.getNonRobustFundamentalMatrixEstimatorMethod()
1393                     == FundamentalMatrixEstimatorMethod.SEVEN_POINTS_ALGORITHM) {
1394                 return count >= SevenPointsFundamentalMatrixEstimator.MIN_REQUIRED_POINTS;
1395             }
1396         } else if (configuration.isPlanarSceneAllowed()) {
1397             return count >= ProjectiveTransformation2DRobustEstimator.MINIMUM_SIZE;
1398         }
1399         return false;
1400     }
1401 
1402     /**
1403      * Estimates fundamental matrix for provided matches, when 3D points lay in a general
1404      * non-degenerate 3D configuration.
1405      *
1406      * @param matches              pairs of matches to find fundamental matrix.
1407      * @param viewId1              id of first view being related by estimated fundamental matrix.
1408      * @param viewId2              id of second view being related by estimated fundamental matrix.
1409      * @param isInitialPairOfViews true if fundamental matrix needs to be estimated for the initial
1410      *                             pair of views, false otherwise.
1411      * @return true if estimation succeeded, false otherwise.
1412      */
1413     private boolean estimateFundamentalMatrix(
1414             final List<MatchedSamples> matches, final int viewId1, final int viewId2,
1415             final boolean isInitialPairOfViews) {
1416         if (matches == null) {
1417             return false;
1418         }
1419 
1420         final var count = matches.size();
1421         final var leftSamples = new ArrayList<Sample2D>(count);
1422         final var rightSamples = new ArrayList<Sample2D>(count);
1423         final var leftPoints = new ArrayList<Point2D>(count);
1424         final var rightPoints = new ArrayList<Point2D>(count);
1425         final var qualityScores = new double[count];
1426         final double principalPointX;
1427         final double principalPointY;
1428         if (isInitialPairOfViews) {
1429             if (configuration.getInitialCamerasEstimatorMethod() == InitialCamerasEstimatorMethod.DUAL_ABSOLUTE_QUADRIC
1430                     || configuration.getInitialCamerasEstimatorMethod()
1431                     == InitialCamerasEstimatorMethod.DUAL_ABSOLUTE_QUADRIC_AND_ESSENTIAL_MATRIX) {
1432                 principalPointX = configuration.getPrincipalPointX();
1433                 principalPointY = configuration.getPrincipalPointY();
1434             } else {
1435                 principalPointX = principalPointY = 0.0;
1436             }
1437         } else {
1438             if (configuration.getUseDIACForAdditionalCamerasIntrinsics()
1439                     || configuration.getUseDAQForAdditionalCamerasIntrinsics()) {
1440                 principalPointX = configuration.getAdditionalCamerasHorizontalPrincipalPoint();
1441                 principalPointY = configuration.getAdditionalCamerasVerticalPrincipalPoint();
1442             } else {
1443                 principalPointX = principalPointY = 0.0;
1444             }
1445         }
1446 
1447         var i = 0;
1448         for (final var match : matches) {
1449             final var samples = match.getSamples();
1450             if (samples.length < MIN_NUMBER_OF_VIEWS) {
1451                 return false;
1452             }
1453 
1454             final var viewIds = match.getViewIds();
1455             final var pos1 = getPositionForViewId(viewIds, viewId1);
1456             if (pos1 < 0) {
1457                 return false;
1458             }
1459 
1460             final var pos2 = getPositionForViewId(viewIds, viewId2);
1461             if (pos2 < 0) {
1462                 return false;
1463             }
1464 
1465             final var leftSample = samples[pos1];
1466             final var rightSample = samples[pos2];
1467             final var p1 = leftSample.getPoint();
1468             final var p2 = rightSample.getPoint();
1469 
1470             leftSamples.add(leftSample);
1471             rightSamples.add(rightSample);
1472 
1473             final var leftPoint = Point2D.create();
1474             leftPoint.setInhomogeneousCoordinates(p1.getInhomX() - principalPointX,
1475                     p1.getInhomY() - principalPointY);
1476             leftPoints.add(leftPoint);
1477 
1478             final var rightPoint = Point2D.create();
1479             rightPoint.setInhomogeneousCoordinates(p2.getInhomX() - principalPointX,
1480                     p2.getInhomY() - principalPointY);
1481             rightPoints.add(rightPoint);
1482 
1483             qualityScores[i] = match.getQualityScore();
1484             i++;
1485         }
1486 
1487         try {
1488             final var estimator = FundamentalMatrixRobustEstimator.create(leftPoints, rightPoints, qualityScores,
1489                     configuration.getRobustFundamentalMatrixEstimatorMethod());
1490             estimator.setNonRobustFundamentalMatrixEstimatorMethod(
1491                     configuration.getNonRobustFundamentalMatrixEstimatorMethod());
1492             estimator.setResultRefined(configuration.isFundamentalMatrixRefined());
1493             estimator.setCovarianceKept(configuration.isFundamentalMatrixCovarianceKept());
1494             estimator.setConfidence(configuration.getFundamentalMatrixConfidence());
1495             estimator.setMaxIterations(configuration.getFundamentalMatrixMaxIterations());
1496 
1497             switch (configuration.getRobustFundamentalMatrixEstimatorMethod()) {
1498                 case LMEDS:
1499                     ((LMedSFundamentalMatrixRobustEstimator) estimator)
1500                             .setStopThreshold(configuration.getFundamentalMatrixThreshold());
1501                     break;
1502                 case MSAC:
1503                     ((MSACFundamentalMatrixRobustEstimator) estimator)
1504                             .setThreshold(configuration.getFundamentalMatrixThreshold());
1505                     break;
1506                 case PROMEDS:
1507                     ((PROMedSFundamentalMatrixRobustEstimator) estimator)
1508                             .setStopThreshold(configuration.getFundamentalMatrixThreshold());
1509                     break;
1510                 case PROSAC:
1511                     var prosacEstimator = (PROSACFundamentalMatrixRobustEstimator) estimator;
1512                     prosacEstimator.setThreshold(configuration.getFundamentalMatrixThreshold());
1513                     prosacEstimator.setComputeAndKeepInliersEnabled(
1514                             configuration.getFundamentalMatrixComputeAndKeepInliers());
1515                     prosacEstimator.setComputeAndKeepResidualsEnabled(
1516                             configuration.getFundamentalMatrixComputeAndKeepResiduals());
1517                     break;
1518                 case RANSAC:
1519                     var ransacEstimator = (RANSACFundamentalMatrixRobustEstimator) estimator;
1520                     ransacEstimator.setThreshold(configuration.getFundamentalMatrixThreshold());
1521                     ransacEstimator.setComputeAndKeepInliersEnabled(
1522                             configuration.getFundamentalMatrixComputeAndKeepInliers());
1523                     ransacEstimator.setComputeAndKeepResidualsEnabled(
1524                             configuration.getFundamentalMatrixComputeAndKeepResiduals());
1525                     break;
1526                 default:
1527                     break;
1528             }
1529 
1530             final var fundamentalMatrix = estimator.estimate();
1531 
1532             currentEstimatedFundamentalMatrix = new EstimatedFundamentalMatrix();
1533             currentEstimatedFundamentalMatrix.setFundamentalMatrix(fundamentalMatrix);
1534             currentEstimatedFundamentalMatrix.setViewId1(viewId1);
1535             currentEstimatedFundamentalMatrix.setViewId2(viewId2);
1536             currentEstimatedFundamentalMatrix.setCovariance(estimator.getCovariance());
1537 
1538             // determine quality score and inliers
1539             final var inliersData = estimator.getInliersData();
1540             if (inliersData != null) {
1541                 final var numInliers = inliersData.getNumInliers();
1542                 final var inliers = inliersData.getInliers();
1543                 final var length = inliers.length();
1544                 var fundamentalMatrixQualityScore = 0.0;
1545                 for (i = 0; i < length; i++) {
1546                     if (inliers.get(i)) {
1547                         // inlier
1548                         fundamentalMatrixQualityScore += qualityScores[i] / numInliers;
1549                     }
1550                 }
1551                 currentEstimatedFundamentalMatrix.setQualityScore(fundamentalMatrixQualityScore);
1552                 currentEstimatedFundamentalMatrix.setInliers(inliers);
1553             }
1554 
1555             // store left/right samples
1556             currentEstimatedFundamentalMatrix.setLeftSamples(leftSamples);
1557             currentEstimatedFundamentalMatrix.setRightSamples(rightSamples);
1558 
1559             return true;
1560         } catch (final Exception e) {
1561             return false;
1562         }
1563     }
1564 
1565     /**
1566      * Estimates fundamental matrix for provided matches, when 3D points lay in a planar 3D scene.
1567      *
1568      * @param matches              pairs of matches to find fundamental matrix.
1569      * @param viewId1              id of first view being related by estimated fundamental matrix.
1570      * @param viewId2              id of second view being related by estimated fundamental matrix.
1571      * @param isInitialPairOfViews true if fundamental matrix needs to be estimated for the initial
1572      *                             pair of views, false otherwise.
1573      * @return true if estimation succeeded, false otherwise.
1574      */
1575     private boolean estimatePlanarFundamentalMatrix(
1576             final List<MatchedSamples> matches, final int viewId1, final int viewId2,
1577             final boolean isInitialPairOfViews) {
1578         if (matches == null) {
1579             return false;
1580         }
1581 
1582         final var count = matches.size();
1583         final var leftSamples = new ArrayList<Sample2D>(count);
1584         final var rightSamples = new ArrayList<Sample2D>(count);
1585         final var leftPoints = new ArrayList<Point2D>(count);
1586         final var rightPoints = new ArrayList<Point2D>(count);
1587         final var qualityScores = new double[count];
1588         double principalPointX;
1589         double principalPointY;
1590         if (isInitialPairOfViews) {
1591             if (configuration.getInitialCamerasEstimatorMethod() == InitialCamerasEstimatorMethod.DUAL_ABSOLUTE_QUADRIC
1592                     || configuration.getInitialCamerasEstimatorMethod()
1593                     == InitialCamerasEstimatorMethod.DUAL_ABSOLUTE_QUADRIC_AND_ESSENTIAL_MATRIX) {
1594                 principalPointX = configuration.getPrincipalPointX();
1595                 principalPointY = configuration.getPrincipalPointY();
1596             } else {
1597                 principalPointX = principalPointY = 0.0;
1598             }
1599         } else {
1600             if (configuration.getUseDIACForAdditionalCamerasIntrinsics()
1601                     || configuration.getUseDAQForAdditionalCamerasIntrinsics()) {
1602                 principalPointX = configuration.getAdditionalCamerasHorizontalPrincipalPoint();
1603                 principalPointY = configuration.getAdditionalCamerasVerticalPrincipalPoint();
1604             } else {
1605                 principalPointX = principalPointY = 0.0;
1606             }
1607         }
1608 
1609         var i = 0;
1610         for (final var match : matches) {
1611             final var samples = match.getSamples();
1612             if (samples.length < MIN_NUMBER_OF_VIEWS) {
1613                 return false;
1614             }
1615 
1616             final var viewIds = match.getViewIds();
1617             final var pos1 = getPositionForViewId(viewIds, viewId1);
1618             if (pos1 < 0) {
1619                 return false;
1620             }
1621 
1622             final var pos2 = getPositionForViewId(viewIds, viewId2);
1623             if (pos2 < 0) {
1624                 return false;
1625             }
1626 
1627             final var leftSample = samples[pos1];
1628             final var rightSample = samples[pos2];
1629             final var p1 = leftSample.getPoint();
1630             final var p2 = rightSample.getPoint();
1631 
1632             leftSamples.add(leftSample);
1633             rightSamples.add(rightSample);
1634 
1635             final var leftPoint = Point2D.create();
1636             leftPoint.setInhomogeneousCoordinates(p1.getInhomX() - principalPointX,
1637                     p1.getInhomY() - principalPointY);
1638             leftPoints.add(leftPoint);
1639 
1640             final var rightPoint = Point2D.create();
1641             rightPoint.setInhomogeneousCoordinates(p2.getInhomX() - principalPointX,
1642                     p2.getInhomY() - principalPointY);
1643             rightPoints.add(rightPoint);
1644 
1645             qualityScores[i] = match.getQualityScore();
1646             i++;
1647         }
1648 
1649         try {
1650             final var homographyEstimator = PointCorrespondenceProjectiveTransformation2DRobustEstimator.create(
1651                     configuration.getRobustPlanarHomographyEstimatorMethod());
1652             homographyEstimator.setResultRefined(configuration.isPlanarHomographyRefined());
1653             homographyEstimator.setCovarianceKept(configuration.isPlanarHomographyCovarianceKept());
1654             homographyEstimator.setConfidence(configuration.getPlanarHomographyConfidence());
1655             homographyEstimator.setMaxIterations(configuration.getPlanarHomographyMaxIterations());
1656 
1657             switch (configuration.getRobustPlanarHomographyEstimatorMethod()) {
1658                 case LMEDS:
1659                     ((LMedSPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator)
1660                             .setStopThreshold(configuration.getPlanarHomographyThreshold());
1661                     break;
1662                 case MSAC:
1663                     ((MSACPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator)
1664                             .setThreshold(configuration.getPlanarHomographyThreshold());
1665                     break;
1666                 case PROMEDS:
1667                     ((PROMedSPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator)
1668                             .setStopThreshold(configuration.getPlanarHomographyThreshold());
1669                     break;
1670                 case PROSAC:
1671                     final var prosacHomographyEstimator =
1672                             (PROSACPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator;
1673 
1674                     prosacHomographyEstimator.setThreshold(configuration.getPlanarHomographyThreshold());
1675                     prosacHomographyEstimator.setComputeAndKeepInliersEnabled(
1676                             configuration.getPlanarHomographyComputeAndKeepInliers());
1677                     prosacHomographyEstimator.setComputeAndKeepResidualsEnabled(
1678                             configuration.getPlanarHomographyComputeAndKeepResiduals());
1679                     break;
1680                 case RANSAC:
1681                     final var ransacHomographyEstimator =
1682                             (RANSACPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator;
1683 
1684                     ransacHomographyEstimator.setThreshold(configuration.getPlanarHomographyThreshold());
1685                     ransacHomographyEstimator.setComputeAndKeepInliersEnabled(
1686                             configuration.getPlanarHomographyComputeAndKeepInliers());
1687                     ransacHomographyEstimator.setComputeAndKeepResidualsEnabled(
1688                             configuration.getPlanarHomographyComputeAndKeepResiduals());
1689                     break;
1690                 default:
1691                     break;
1692             }
1693 
1694             final var fundamentalMatrixEstimator = new PlanarBestFundamentalMatrixEstimatorAndReconstructor();
1695             fundamentalMatrixEstimator.setHomographyEstimator(homographyEstimator);
1696             fundamentalMatrixEstimator.setLeftAndRightPoints(leftPoints, rightPoints);
1697             fundamentalMatrixEstimator.setQualityScores(qualityScores);
1698 
1699             var intrinsic1 = configuration.getInitialIntrinsic1();
1700             var intrinsic2 = configuration.getInitialIntrinsic1();
1701             if (intrinsic1 == null && intrinsic2 == null) {
1702                 // estimate homography
1703                 final var homography = homographyEstimator.estimate();
1704 
1705                 // estimate intrinsic parameters using the Image of Absolute
1706                 // Conic (IAC)
1707                 final var homographies = new ArrayList<Transformation2D>();
1708                 homographies.add(homography);
1709 
1710                 final var iacEstimator = new LMSEImageOfAbsoluteConicEstimator(homographies);
1711                 final var iac = iacEstimator.estimate();
1712 
1713                 intrinsic1 = intrinsic2 = iac.getIntrinsicParameters();
1714 
1715             } else if (intrinsic1 == null) { //&& intrinsic2 != null
1716                 intrinsic1 = intrinsic2;
1717             } else if (intrinsic2 == null) { //&& intrinsic1 != null
1718                 intrinsic2 = intrinsic1;
1719             }
1720             fundamentalMatrixEstimator.setLeftIntrinsics(intrinsic1);
1721             fundamentalMatrixEstimator.setRightIntrinsics(intrinsic2);
1722 
1723             fundamentalMatrixEstimator.estimateAndReconstruct();
1724 
1725             final var fundamentalMatrix = fundamentalMatrixEstimator.getFundamentalMatrix();
1726 
1727             currentEstimatedFundamentalMatrix = new EstimatedFundamentalMatrix();
1728             currentEstimatedFundamentalMatrix.setFundamentalMatrix(fundamentalMatrix);
1729             currentEstimatedFundamentalMatrix.setViewId1(viewId1);
1730             currentEstimatedFundamentalMatrix.setViewId2(viewId2);
1731 
1732             // determine quality score and inliers
1733             final var inliersData = homographyEstimator.getInliersData();
1734             if (inliersData != null) {
1735                 final var numInliers = inliersData.getNumInliers();
1736                 final var inliers = inliersData.getInliers();
1737                 final var length = inliers.length();
1738                 var fundamentalMatrixQualityScore = 0.0;
1739                 for (i = 0; i < length; i++) {
1740                     if (inliers.get(i)) {
1741                         // inlier
1742                         fundamentalMatrixQualityScore += qualityScores[i] / numInliers;
1743                     }
1744                 }
1745                 currentEstimatedFundamentalMatrix.setQualityScore(fundamentalMatrixQualityScore);
1746                 currentEstimatedFundamentalMatrix.setInliers(inliers);
1747             }
1748 
1749             // store left/right samples
1750             currentEstimatedFundamentalMatrix.setLeftSamples(leftSamples);
1751             currentEstimatedFundamentalMatrix.setRightSamples(rightSamples);
1752 
1753             return true;
1754         } catch (final Exception e) {
1755             return false;
1756         }
1757     }
1758 
1759     /**
1760      * Gets position of a view id within provided array of view id's.
1761      *
1762      * @param viewIds array of view IDs where search is done.
1763      * @param viewId  view id to be searched.
1764      * @return position where view id is found or -1 if not found.
1765      */
1766     private int getPositionForViewId(final int[] viewIds, final int viewId) {
1767         final var length = viewIds.length;
1768         for (var i = 0; i < length; i++) {
1769             if (viewIds[i] == viewId) {
1770                 return i;
1771             }
1772         }
1773         return -1;
1774     }
1775 
1776     /**
1777      * Estimates initial cameras and reconstructed points.
1778      *
1779      * @return true if cameras and points could be estimated, false if something
1780      * failed.
1781      */
1782     private boolean estimateInitialCamerasAndPoints() {
1783         return switch (configuration.getInitialCamerasEstimatorMethod()) {
1784             case ESSENTIAL_MATRIX -> estimateInitialCamerasAndPointsEssential();
1785             case DUAL_IMAGE_OF_ABSOLUTE_CONIC -> estimateInitialCamerasAndPointsDIAC();
1786             case DUAL_ABSOLUTE_QUADRIC -> estimateInitialCamerasAndPointsDAQ();
1787             default -> estimateInitialCamerasAndPointsDAQAndEssential();
1788         };
1789     }
1790 
1791     /**
1792      * Estimates initial cameras and reconstructed points using the Dual
1793      * Absolute Quadric to estimate intrinsic parameters and then use those
1794      * intrinsic parameters with the essential matrix.
1795      *
1796      * @return true if cameras and points could be estimated, false if something
1797      * failed.
1798      */
1799     private boolean estimateInitialCamerasAndPointsDAQAndEssential() {
1800         try {
1801             final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix();
1802 
1803             final var estimator = new DualAbsoluteQuadricInitialCamerasEstimator(fundamentalMatrix);
1804             estimator.setAspectRatio(configuration.getInitialCamerasAspectRatio());
1805             estimator.estimate();
1806 
1807             final var camera1 = estimator.getEstimatedLeftCamera();
1808             final var camera2 = estimator.getEstimatedRightCamera();
1809 
1810             camera1.decompose();
1811             camera2.decompose();
1812 
1813             final var intrinsicZeroPrincipalPoint1 = camera1.getIntrinsicParameters();
1814             final var intrinsicZeroPrincipalPoint2 = camera2.getIntrinsicParameters();
1815 
1816             final double principalPointX = configuration.getPrincipalPointX();
1817             final double principalPointY = configuration.getPrincipalPointY();
1818 
1819             final var intrinsic1 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint1);
1820             intrinsic1.setHorizontalPrincipalPoint(intrinsic1.getHorizontalPrincipalPoint() + principalPointX);
1821             intrinsic1.setVerticalPrincipalPoint(intrinsic1.getVerticalPrincipalPoint() + principalPointY);
1822 
1823             final var intrinsic2 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint2);
1824             intrinsic2.setHorizontalPrincipalPoint(intrinsic2.getHorizontalPrincipalPoint() + principalPointX);
1825             intrinsic2.setVerticalPrincipalPoint(intrinsic2.getVerticalPrincipalPoint() + principalPointY);
1826 
1827             // fix fundamental matrix to account for principal point different
1828             // from zero
1829             fixFundamentalMatrix(fundamentalMatrix, intrinsicZeroPrincipalPoint1, intrinsicZeroPrincipalPoint2,
1830                     intrinsic1, intrinsic2);
1831 
1832             return estimateInitialCamerasAndPointsEssential(intrinsic1, intrinsic2);
1833         } catch (final Exception e) {
1834             return false;
1835         }
1836     }
1837 
1838     /**
1839      * Estimates initial cameras and reconstructed points using the Dual
1840      * Absolute Quadric.
1841      *
1842      * @return true if cameras and points could be estimated, false if something
1843      * failed.
1844      */
1845     private boolean estimateInitialCamerasAndPointsDAQ() {
1846         try {
1847             final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix();
1848             fundamentalMatrix.normalize();
1849 
1850             final var estimator = new DualAbsoluteQuadricInitialCamerasEstimator(fundamentalMatrix);
1851             estimator.setAspectRatio(configuration.getInitialCamerasAspectRatio());
1852             estimator.estimate();
1853 
1854             final var camera1 = estimator.getEstimatedLeftCamera();
1855             final var camera2 = estimator.getEstimatedRightCamera();
1856 
1857             camera1.decompose();
1858             camera2.decompose();
1859 
1860             final var intrinsicZeroPrincipalPoint1 = camera1.getIntrinsicParameters();
1861             final var intrinsicZeroPrincipalPoint2 = camera2.getIntrinsicParameters();
1862 
1863             final var principalPointX = configuration.getPrincipalPointX();
1864             final var principalPointY = configuration.getPrincipalPointY();
1865 
1866             final var intrinsic1 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint1);
1867             intrinsic1.setHorizontalPrincipalPoint(intrinsic1.getHorizontalPrincipalPoint() + principalPointX);
1868             intrinsic1.setVerticalPrincipalPoint(intrinsic1.getVerticalPrincipalPoint() + principalPointY);
1869             camera1.setIntrinsicParameters(intrinsic1);
1870 
1871             final var intrinsic2 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint2);
1872             intrinsic2.setHorizontalPrincipalPoint(intrinsic2.getHorizontalPrincipalPoint() + principalPointX);
1873             intrinsic2.setVerticalPrincipalPoint(intrinsic2.getVerticalPrincipalPoint() + principalPointY);
1874             camera2.setIntrinsicParameters(intrinsic2);
1875 
1876             previousMetricEstimatedCamera = new EstimatedCamera();
1877             previousMetricEstimatedCamera.setCamera(camera1);
1878             previousMetricEstimatedCamera.setViewId(previousViewId);
1879 
1880             currentMetricEstimatedCamera = new EstimatedCamera();
1881             currentMetricEstimatedCamera.setCamera(camera2);
1882             currentMetricEstimatedCamera.setViewId(currentViewId);
1883 
1884             // fix fundamental matrix to account for principal point different
1885             // from zero
1886             fixFundamentalMatrix(fundamentalMatrix, intrinsicZeroPrincipalPoint1, intrinsicZeroPrincipalPoint2,
1887                     intrinsic1, intrinsic2);
1888 
1889             // triangulate points
1890             Corrector corrector = null;
1891             if (configuration.getInitialCamerasCorrectorType() != null) {
1892                 corrector = Corrector.create(fundamentalMatrix, configuration.getInitialCamerasCorrectorType());
1893             }
1894 
1895             // use all points used for fundamental matrix estimation
1896             final var samples1 = currentEstimatedFundamentalMatrix.getLeftSamples();
1897             final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples();
1898 
1899             final var points1 = new ArrayList<Point2D>();
1900             final var points2 = new ArrayList<Point2D>();
1901             final var length = samples1.size();
1902             for (var i = 0; i < length; i++) {
1903                 final var sample1 = samples1.get(i);
1904                 final var sample2 = samples2.get(i);
1905 
1906                 final var point1 = sample1.getPoint();
1907                 final var point2 = sample2.getPoint();
1908 
1909                 points1.add(point1);
1910                 points2.add(point2);
1911             }
1912 
1913             // correct points if needed
1914             final List<Point2D> correctedPoints1;
1915             final List<Point2D> correctedPoints2;
1916             if (corrector != null) {
1917                 corrector.setLeftAndRightPoints(points1, points2);
1918                 corrector.correct();
1919 
1920                 correctedPoints1 = corrector.getLeftCorrectedPoints();
1921                 correctedPoints2 = corrector.getRightCorrectedPoints();
1922             } else {
1923                 correctedPoints1 = points1;
1924                 correctedPoints2 = points2;
1925             }
1926 
1927             // triangulate points
1928             final SinglePoint3DTriangulator triangulator;
1929             if (configuration.getDaqUseHomogeneousPointTriangulator()) {
1930                 triangulator = SinglePoint3DTriangulator.create(Point3DTriangulatorType.LMSE_HOMOGENEOUS_TRIANGULATOR);
1931             } else {
1932                 triangulator = SinglePoint3DTriangulator.create(
1933                         Point3DTriangulatorType.LMSE_INHOMOGENEOUS_TRIANGULATOR);
1934             }
1935 
1936             final var cameras = new ArrayList<PinholeCamera>();
1937             cameras.add(camera1);
1938             cameras.add(camera2);
1939 
1940             activeMetricReconstructedPoints = new ArrayList<>();
1941             final var points = new ArrayList<Point2D>();
1942             final var numPoints = correctedPoints1.size();
1943 
1944             Point3D triangulatedPoint;
1945             ReconstructedPoint3D reconstructedPoint;
1946             for (var i = 0; i < numPoints; i++) {
1947                 points.clear();
1948                 points.add(correctedPoints1.get(i));
1949                 points.add(correctedPoints2.get(i));
1950 
1951                 triangulator.setPointsAndCameras(points, cameras);
1952                 triangulatedPoint = triangulator.triangulate();
1953 
1954                 reconstructedPoint = new ReconstructedPoint3D();
1955                 reconstructedPoint.setPoint(triangulatedPoint);
1956 
1957                 // only points reconstructed in front of both cameras are
1958                 // considered valid
1959                 final var front1 = camera1.isPointInFrontOfCamera(triangulatedPoint);
1960                 final var front2 = camera2.isPointInFrontOfCamera(triangulatedPoint);
1961                 final var inlier = front1 && front2;
1962                 reconstructedPoint.setInlier(inlier);
1963 
1964                 activeMetricReconstructedPoints.add(reconstructedPoint);
1965 
1966                 if (inlier) {
1967                     matches.get(i).setReconstructedPoint(reconstructedPoint);
1968                 }
1969             }
1970 
1971             return true;
1972         } catch (final Exception e) {
1973             return false;
1974         }
1975     }
1976 
1977     /**
1978      * Estimates initial cameras and reconstructed points using Dual Image of
1979      * Absolute Conic.
1980      *
1981      * @return true if cameras and points could be estimated, false if something
1982      * failed.
1983      */
1984     private boolean estimateInitialCamerasAndPointsDIAC() {
1985         final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix();
1986 
1987         // use inlier points used for fundamental matrix estimation
1988         final var samples1 = currentEstimatedFundamentalMatrix.getLeftSamples();
1989         final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples();
1990 
1991         final var points1 = new ArrayList<Point2D>();
1992         final var points2 = new ArrayList<Point2D>();
1993         final var length = samples1.size();
1994         for (var i = 0; i < length; i++) {
1995             final var sample1 = samples1.get(i);
1996             final var sample2 = samples2.get(i);
1997 
1998             final var point1 = sample1.getPoint();
1999             final var point2 = sample2.getPoint();
2000 
2001             points1.add(point1);
2002             points2.add(point2);
2003         }
2004 
2005         try {
2006             final var estimator = new DualImageOfAbsoluteConicInitialCamerasEstimator(fundamentalMatrix, points1,
2007                     points2);
2008             estimator.setPrincipalPoint(configuration.getPrincipalPointX(), configuration.getPrincipalPointY());
2009             estimator.setAspectRatio(configuration.getInitialCamerasAspectRatio());
2010             estimator.setCorrectorType(configuration.getInitialCamerasCorrectorType());
2011             estimator.setPointsTriangulated(true);
2012             estimator.setValidTriangulatedPointsMarked(configuration.getInitialCamerasMarkValidTriangulatedPoints());
2013 
2014             estimator.estimate();
2015 
2016             // store cameras
2017             final var camera1 = estimator.getEstimatedLeftCamera();
2018             final var camera2 = estimator.getEstimatedRightCamera();
2019 
2020             previousMetricEstimatedCamera = new EstimatedCamera();
2021             previousMetricEstimatedCamera.setCamera(camera1);
2022 
2023             currentMetricEstimatedCamera = new EstimatedCamera();
2024             currentMetricEstimatedCamera.setCamera(camera2);
2025 
2026             // store points
2027             final var triangulatedPoints = estimator.getTriangulatedPoints();
2028             final var validTriangulatedPoints = estimator.getValidTriangulatedPoints();
2029 
2030             activeMetricReconstructedPoints = new ArrayList<>();
2031             final var triangulatedPointsSize = triangulatedPoints.size();
2032             for (var i = 0; i < triangulatedPointsSize; i++) {
2033                 final var reconstructedPoint = new ReconstructedPoint3D();
2034                 reconstructedPoint.setPoint(triangulatedPoints.get(i));
2035                 reconstructedPoint.setInlier(validTriangulatedPoints.get(i));
2036                 activeMetricReconstructedPoints.add(reconstructedPoint);
2037 
2038                 if (validTriangulatedPoints.get(i)) {
2039                     matches.get(i).setReconstructedPoint(reconstructedPoint);
2040                 }
2041             }
2042 
2043             return true;
2044         } catch (final Exception e) {
2045             return false;
2046         }
2047     }
2048 
2049     /**
2050      * Estimates initial cameras and reconstructed points using the essential
2051      * matrix and provided intrinsic parameters that must have been set during
2052      * offline calibration.
2053      *
2054      * @return true if cameras and points could be estimated, false if something
2055      * failed.
2056      */
2057     private boolean estimateInitialCamerasAndPointsEssential() {
2058         final var intrinsic1 = configuration.getInitialIntrinsic1();
2059         final var intrinsic2 = configuration.getInitialIntrinsic2();
2060         return estimateInitialCamerasAndPointsEssential(intrinsic1, intrinsic2);
2061     }
2062 
2063     /**
2064      * Estimates initial cameras and reconstructed points using the essential
2065      * matrix and provided intrinsic parameters that must have been set during
2066      * offline calibration.
2067      *
2068      * @param intrinsic1 intrinsic parameters of 1st camera.
2069      * @param intrinsic2 intrinsic parameters of 2nd camera.
2070      * @return true if cameras and points could be estimated, false if something
2071      * failed.
2072      */
2073     private boolean estimateInitialCamerasAndPointsEssential(
2074             final PinholeCameraIntrinsicParameters intrinsic1, final PinholeCameraIntrinsicParameters intrinsic2) {
2075         final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix();
2076 
2077         // use all points used for fundamental matrix estimation
2078         final var samples1 = currentEstimatedFundamentalMatrix.getLeftSamples();
2079         final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples();
2080 
2081         final var points1 = new ArrayList<Point2D>();
2082         final var points2 = new ArrayList<Point2D>();
2083         final var length = samples1.size();
2084         for (var i = 0; i < length; i++) {
2085             final var sample1 = samples1.get(i);
2086             final var sample2 = samples2.get(i);
2087 
2088             final var point1 = sample1.getPoint();
2089             final var point2 = sample2.getPoint();
2090 
2091             points1.add(point1);
2092             points2.add(point2);
2093         }
2094 
2095         try {
2096             final var estimator = new EssentialMatrixInitialCamerasEstimator(fundamentalMatrix, intrinsic1, intrinsic2,
2097                     points1, points2);
2098 
2099             estimator.setCorrectorType(configuration.getInitialCamerasCorrectorType());
2100             estimator.setPointsTriangulated(true);
2101             estimator.setValidTriangulatedPointsMarked(configuration.getInitialCamerasMarkValidTriangulatedPoints());
2102 
2103             estimator.estimate();
2104 
2105             // store cameras
2106             final var camera1 = estimator.getEstimatedLeftCamera();
2107             final var camera2 = estimator.getEstimatedRightCamera();
2108 
2109             previousMetricEstimatedCamera = new EstimatedCamera();
2110             previousMetricEstimatedCamera.setCamera(camera1);
2111             previousMetricEstimatedCamera.setViewId(previousViewId);
2112 
2113             currentMetricEstimatedCamera = new EstimatedCamera();
2114             currentMetricEstimatedCamera.setCamera(camera2);
2115             currentMetricEstimatedCamera.setViewId(currentViewId);
2116 
2117             // store points
2118             final var triangulatedPoints = estimator.getTriangulatedPoints();
2119             final var validTriangulatedPoints = estimator.getValidTriangulatedPoints();
2120 
2121             activeMetricReconstructedPoints = new ArrayList<>();
2122             final var triangulatedPointsSize = triangulatedPoints.size();
2123             final var matchesSize = matches.size();
2124             var j = 0;
2125             for (var i = 0; i < triangulatedPointsSize && j < matchesSize; i++, j++) {
2126                 if (!validTriangulatedPoints.get(i)) {
2127                     continue;
2128                 }
2129 
2130                 final var reconstructedPoint = new ReconstructedPoint3D();
2131                 reconstructedPoint.setPoint(triangulatedPoints.get(i));
2132                 reconstructedPoint.setInlier(validTriangulatedPoints.get(i));
2133                 activeMetricReconstructedPoints.add(reconstructedPoint);
2134 
2135                 matches.get(j).setReconstructedPoint(reconstructedPoint);
2136             }
2137 
2138             return true;
2139         } catch (final Exception e) {
2140             return false;
2141         }
2142     }
2143 
2144     /**
2145      * Fixes fundamental matrix to account for principal point different from
2146      * zero when using DAQ estimation.
2147      *
2148      * @param fundamentalMatrix            fundamental matrix to be fixed.
2149      * @param intrinsicZeroPrincipalPoint1 intrinsic parameters of camera 1
2150      *                                     assuming zero principal point.
2151      * @param intrinsicZeroPrincipalPoint2 intrinsic parameters of camera 2
2152      *                                     assuming zero principal point.
2153      * @param intrinsicPrincipalPoint1     intrinsic parameters of camera 1 using
2154      *                                     proper principal point.
2155      * @param intrinsicPrincipalPoint2     intrinsic parameters of camera 2 using
2156      *                                     proper principal point.
2157      * @throws EpipolarException if something fails.
2158      * @throws NotReadyException never happens.
2159      */
2160     private void fixFundamentalMatrix(
2161             final FundamentalMatrix fundamentalMatrix,
2162             final PinholeCameraIntrinsicParameters intrinsicZeroPrincipalPoint1,
2163             final PinholeCameraIntrinsicParameters intrinsicZeroPrincipalPoint2,
2164             final PinholeCameraIntrinsicParameters intrinsicPrincipalPoint1,
2165             final PinholeCameraIntrinsicParameters intrinsicPrincipalPoint2)
2166             throws EpipolarException, NotReadyException {
2167 
2168         // first compute essential matrix as E = K2a'F*K1a
2169         final var essential = new EssentialMatrix(fundamentalMatrix, intrinsicZeroPrincipalPoint1,
2170                 intrinsicZeroPrincipalPoint2);
2171         final var fixedFundamentalMatrix = essential.toFundamentalMatrix(intrinsicPrincipalPoint1,
2172                 intrinsicPrincipalPoint2);
2173         fixedFundamentalMatrix.normalize();
2174         currentEstimatedFundamentalMatrix.setFundamentalMatrix(fixedFundamentalMatrix);
2175         currentEstimatedFundamentalMatrix.setCovariance(null);
2176     }
2177 }