View Javadoc
1   /*
2    * Copyright (C) 2017 Alberto Irurueta Carro (alberto@irurueta.com)
3    *
4    * Licensed under the Apache License, Version 2.0 (the "License");
5    * you may not use this file except in compliance with the License.
6    * You may obtain a copy of the License at
7    *
8    *         http://www.apache.org/licenses/LICENSE-2.0
9    *
10   * Unless required by applicable law or agreed to in writing, software
11   * distributed under the License is distributed on an "AS IS" BASIS,
12   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13   * See the License for the specific language governing permissions and
14   * limitations under the License.
15   */
16  
17  package com.irurueta.ar.sfm;
18  
19  import com.irurueta.algebra.AlgebraException;
20  import com.irurueta.ar.calibration.estimators.LMSEImageOfAbsoluteConicEstimator;
21  import com.irurueta.ar.epipolar.Corrector;
22  import com.irurueta.ar.epipolar.EpipolarException;
23  import com.irurueta.ar.epipolar.EssentialMatrix;
24  import com.irurueta.ar.epipolar.FundamentalMatrix;
25  import com.irurueta.ar.epipolar.estimators.EightPointsFundamentalMatrixEstimator;
26  import com.irurueta.ar.epipolar.estimators.FundamentalMatrixEstimatorMethod;
27  import com.irurueta.ar.epipolar.estimators.FundamentalMatrixRobustEstimator;
28  import com.irurueta.ar.epipolar.estimators.LMedSFundamentalMatrixRobustEstimator;
29  import com.irurueta.ar.epipolar.estimators.MSACFundamentalMatrixRobustEstimator;
30  import com.irurueta.ar.epipolar.estimators.PROMedSFundamentalMatrixRobustEstimator;
31  import com.irurueta.ar.epipolar.estimators.PROSACFundamentalMatrixRobustEstimator;
32  import com.irurueta.ar.epipolar.estimators.RANSACFundamentalMatrixRobustEstimator;
33  import com.irurueta.ar.epipolar.estimators.SevenPointsFundamentalMatrixEstimator;
34  import com.irurueta.geometry.*;
35  import com.irurueta.geometry.estimators.LMedSPointCorrespondenceProjectiveTransformation2DRobustEstimator;
36  import com.irurueta.geometry.estimators.MSACPointCorrespondenceProjectiveTransformation2DRobustEstimator;
37  import com.irurueta.geometry.estimators.NotReadyException;
38  import com.irurueta.geometry.estimators.PROMedSPointCorrespondenceProjectiveTransformation2DRobustEstimator;
39  import com.irurueta.geometry.estimators.PROSACPointCorrespondenceProjectiveTransformation2DRobustEstimator;
40  import com.irurueta.geometry.estimators.PointCorrespondenceProjectiveTransformation2DRobustEstimator;
41  import com.irurueta.geometry.estimators.ProjectiveTransformation2DRobustEstimator;
42  import com.irurueta.geometry.estimators.RANSACPointCorrespondenceProjectiveTransformation2DRobustEstimator;
43  
44  import java.util.ArrayList;
45  import java.util.List;
46  
47  /**
48   * Base class in charge of estimating cameras and 3D reconstructed points from sparse
49   * image point correspondences in pairs of views.
50   * Views are processed in pairs so that fundamental matrix is estimated and pairs of
51   * cameras and reconstructed points are computed.
52   * Because view pairs are processed separately, the scale of each view pair is
53   * estimated individually, hence the scale will need to be
54   *
55   * @param <C> type of configuration.
56   * @param <R> type of re-constructor.
57   * @param <L> type of listener.
58   */
59  @SuppressWarnings("DuplicatedCode")
60  public abstract class BasePairedViewsSparseReconstructor<
61          C extends BasePairedViewsSparseReconstructorConfiguration<C>,
62          R extends BasePairedViewsSparseReconstructor<C, R, L>,
63          L extends BasePairedViewsSparseReconstructorListener<R>> {
64  
65      /**
66       * Minimum required number of views.
67       */
68      public static final int MIN_NUMBER_OF_VIEWS = 2;
69  
70      /**
71       * Default scale.
72       */
73      protected static final double DEFAULT_SCALE = 1.0;
74  
75      /**
76       * Current estimated camera in a metric stratum (i.e. up to scale).
77       */
78      protected EstimatedCamera currentMetricEstimatedCamera;
79  
80      /**
81       * Previous estimated camera in a metric stratum (i.e. up to scale).
82       */
83      protected EstimatedCamera previousMetricEstimatedCamera;
84  
85      /**
86       * Reconstructed 3D points for current pair of views in a metric stratum (i.e. up to scale).
87       */
88      protected List<ReconstructedPoint3D> metricReconstructedPoints;
89  
90      /**
91       * Transformation to set reference frame on estimated pair of Euclidean cameras.
92       * This is used when estimating a new pair of Euclidean cameras to transform such pair to
93       * the location and rotation of the last estimated Euclidean camera so that the first camera
94       * of the pair is not referred to the world origin.
95       */
96      protected MetricTransformation3D referenceEuclideanTransformation;
97  
98      /**
99       * Current estimated scale. This will typically converge to a constant value as more views are
100      * processed.
101      * The smaller the variance of estimated scale, the more accurate the scale will be.
102      */
103     protected double currentScale = DEFAULT_SCALE;
104 
105     /**
106      * Current estimated camera in euclidean stratum (i.e. with actual scale).
107      */
108     protected EstimatedCamera currentEuclideanEstimatedCamera;
109 
110     /**
111      * Previous estimated camera in Euclidean stratum (i.e. with actual scale).
112      */
113     protected EstimatedCamera previousEuclideanEstimatedCamera;
114 
115     /**
116      * Reconstructed 3D points for current pair of views in Euclidean stratum (i.e. with actual
117      * scale).
118      */
119     protected List<ReconstructedPoint3D> euclideanReconstructedPoints;
120 
121     /**
122      * Configuration for this re-constructor.
123      */
124     protected C configuration;
125 
126     /**
127      * Listener in charge of handling events such as when reconstruction starts,
128      * ends, when certain data is needed or when estimation of data has been
129      * computed.
130      */
131     protected L listener;
132 
133     /**
134      * Indicates whether reconstruction has failed or not.
135      */
136     protected volatile boolean failed;
137 
138     /**
139      * Indicates whether reconstruction is running or not.
140      */
141     protected volatile boolean running;
142 
143     /**
144      * ID of previous view.
145      */
146     protected int previousViewId = 0;
147 
148     /**
149      * ID of current view.
150      */
151     protected int currentViewId;
152 
153     /**
154      * Center of current Euclidean camera on last view pair.
155      */
156     protected Point3D lastEuclideanCameraCenter = new InhomogeneousPoint3D();
157 
158     /**
159      * Rotation of current Euclidean camera on last view pair.
160      */
161     protected Rotation3D lastEuclideanCameraRotation;
162 
163     /**
164      * Center of current metric camera on last view pair.
165      */
166     private Point3D lastMetricCameraCenter;
167 
168     /**
169      * Rotation of current metric camera on last view pair.
170      */
171     private Rotation3D mLastMetricCameraRotation;
172 
173     /**
174      * Inverse metric camera rotation. This is reused for memory efficiency.
175      */
176     private Rotation3D invMetricCameraRotation;
177 
178     /**
179      * Current estimated fundamental matrix.
180      */
181     private EstimatedFundamentalMatrix currentEstimatedFundamentalMatrix;
182 
183     /**
184      * Indicates whether reconstruction has been cancelled or not.
185      */
186     private volatile boolean cancelled;
187 
188     /**
189      * Counter of number of processed views.
190      */
191     private int viewCount;
192 
193     /**
194      * Indicates whether reconstruction has finished or not.
195      */
196     private boolean finished = false;
197 
198     /**
199      * Samples on previous view.
200      */
201     private List<Sample2D> previousViewSamples;
202 
203     /**
204      * Samples on last processed view (i.e. current view).
205      */
206     private List<Sample2D> currentViewSamples;
207 
208     /**
209      * Matches between first and current view.
210      * Views are always processed in pairs.
211      */
212     private final List<MatchedSamples> matches = new ArrayList<>();
213 
214     /**
215      * Transformation to set reference frame on estimated pair of metric cameras.
216      * This is used when estimating a new pair of metric cameras to transform such pair to
217      * the location and rotation of last estimated metric camera so that the first camera of
218      * the pair is not referred to the world origin.
219      */
220     private EuclideanTransformation3D referenceMetricTransformation;
221 
222     /**
223      * Constructor.
224      *
225      * @param configuration configuration for this re-constructor.
226      * @param listener      listener in charge of handling events.
227      * @throws NullPointerException if listener or configuration is not
228      *                              provided.
229      */
230     protected BasePairedViewsSparseReconstructor(final C configuration, final L listener) {
231         if (configuration == null || listener == null) {
232             throw new NullPointerException();
233         }
234         this.configuration = configuration;
235         this.listener = listener;
236     }
237 
238     /**
239      * Gets configuration for this re-constructor.
240      *
241      * @return configuration for this reconstructor.
242      */
243     public C getConfiguration() {
244         return configuration;
245     }
246 
247     /**
248      * Gets listener in charge of handling events such as when reconstruction
249      * starts, ends, when certain data is needed or when estimation of data has
250      * been computed.
251      *
252      * @return listener in charge of handling events.
253      */
254     public L getListener() {
255         return listener;
256     }
257 
258     /**
259      * Indicates whether reconstruction is running or not.
260      *
261      * @return true if reconstruction is running, false if reconstruction has
262      * stopped for any reason.
263      */
264     public boolean isRunning() {
265         return running;
266     }
267 
268     /**
269      * Indicates whether reconstruction has been cancelled or not.
270      *
271      * @return true if reconstruction has been cancelled, false otherwise.
272      */
273     public boolean isCancelled() {
274         return cancelled;
275     }
276 
277     /**
278      * Indicates whether reconstruction has failed or not.
279      *
280      * @return true if reconstruction has failed, false otherwise.
281      */
282     public boolean hasFailed() {
283         return failed;
284     }
285 
286     /**
287      * Indicates whether the reconstruction has finished.
288      *
289      * @return true if reconstruction has finished, false otherwise.
290      */
291     public boolean isFinished() {
292         return finished;
293     }
294 
295     /**
296      * Gets counter of number of processed views.
297      *
298      * @return counter of number of processed views.
299      */
300     public int getViewCount() {
301         return viewCount;
302     }
303 
304     /**
305      * Gets estimated fundamental matrix for current view.
306      * This fundamental matrix relates current view with the previously processed one.
307      *
308      * @return current estimated fundamental matrix.
309      */
310     public EstimatedFundamentalMatrix getCurrentEstimatedFundamentalMatrix() {
311         return currentEstimatedFundamentalMatrix;
312     }
313 
314     /**
315      * Gets estimated euclidean camera for current view (i.e. with actual scale).
316      *
317      * @return current estimated euclidean camera.
318      */
319     public EstimatedCamera getCurrentEuclideanEstimatedCamera() {
320         return currentEuclideanEstimatedCamera;
321     }
322 
323     /**
324      * Gets estimated Euclidean camera for previous view (i.e. with actual scale).
325      *
326      * @return previous estimated euclidean camera.
327      */
328     public EstimatedCamera getPreviousEuclideanEstimatedCamera() {
329         return previousEuclideanEstimatedCamera;
330     }
331 
332     /**
333      * Gets Euclidean reconstructed 3D points (i.e. with actual scale) for current
334      * pair of views.
335      *
336      * @return active euclidean reconstructed 3D points.
337      */
338     public List<ReconstructedPoint3D> getEuclideanReconstructedPoints() {
339         return euclideanReconstructedPoints;
340     }
341 
342     /**
343      * Gets current estimated scale. This will typically converge to a constant value as more views are
344      * processed.
345      * The smaller the variance of estimated scale, the more accurate the scale will be.
346      *
347      * @return current estimated scale.
348      */
349     public double getCurrentScale() {
350         return currentScale;
351     }
352 
353     /**
354      * Gets samples on previous view.
355      *
356      * @return samples on previous view.
357      */
358     public List<Sample2D> getPreviousViewSamples() {
359         return previousViewSamples;
360     }
361 
362     /**
363      * Gets samples on current view.
364      *
365      * @return samples on current view.
366      */
367     public List<Sample2D> getCurrentViewSamples() {
368         return currentViewSamples;
369     }
370 
371     /**
372      * Process one view-pair of all the available data during the reconstruction.
373      * This method can be called multiple times instead of {@link #start()} to build the
374      * reconstruction step by step, one view pair at a time.
375      * This method is useful when data is gathered on real time from a camera and the
376      * number of views is unknown.
377      *
378      * @return true if more views can be processed, false when reconstruction has finished.
379      */
380     public boolean processOneViewPair() {
381         if (viewCount == 0 && !running) {
382 
383             reset();
384             running = true;
385 
386             //noinspection unchecked
387             listener.onStart((R) this);
388         }
389 
390         //noinspection unchecked
391         if (!listener.hasMoreViewsAvailable((R) this)) {
392             //noinspection unchecked
393             listener.onFinish((R) this);
394             running = false;
395             finished = true;
396             return false;
397         }
398 
399         previousViewSamples = new ArrayList<>();
400         currentViewSamples = new ArrayList<>();
401         //noinspection unchecked
402         listener.onRequestSamplesForCurrentViewPair((R) this, viewCount, viewCount + 1,
403                 previousViewSamples, currentViewSamples);
404 
405         final boolean processed;
406         currentEstimatedFundamentalMatrix = null;
407         if (isFirstViewPair()) {
408             // for first view we simply keep samples (if enough are provided)
409             processed = processFirstViewPair();
410         } else {
411             processed = processAdditionalViewPair();
412         }
413 
414         if (processed) {
415             viewCount += 2;
416         }
417 
418         if (cancelled) {
419             //noinspection unchecked
420             listener.onCancel((R) this);
421         }
422 
423         return !finished;
424     }
425 
426     /**
427      * Indicates whether current view pair is the first one.
428      *
429      * @return true if current view pair is the first one, false otherwise.
430      */
431     public boolean isFirstViewPair() {
432         return viewCount == 0;
433     }
434 
435     /**
436      * Indicates whether current view pair is an additional one.
437      *
438      * @return true if current view pair is an additional one, false otherwise.
439      */
440     public boolean isAdditionalViewPair() {
441         return !isFirstViewPair();
442     }
443 
444     /**
445      * Starts reconstruction of all available data to reconstruct the whole scene.
446      * If reconstruction has already started and is running, calling this method
447      * has no effect.
448      * This method is useful when all data is available before starting the reconstruction.
449      */
450     public void start() {
451         if (running) {
452             // already started
453             return;
454         }
455 
456         while (processOneViewPair()) {
457             if (cancelled) {
458                 break;
459             }
460         }
461     }
462 
463     /**
464      * Cancels reconstruction.
465      * If reconstruction has already been cancelled, calling this method has no effect.
466      */
467     public void cancel() {
468         if (cancelled) {
469             // already cancelled
470             return;
471         }
472 
473         cancelled = true;
474     }
475 
476     /**
477      * Resets this instance so that a reconstruction can be started from the beginning without cancelling
478      * current one.
479      */
480     public void reset() {
481         if (previousViewSamples != null) {
482             previousViewSamples.clear();
483         }
484         if (currentViewSamples != null) {
485             currentViewSamples.clear();
486         }
487 
488         matches.clear();
489 
490         cancelled = failed = false;
491         viewCount = 0;
492         running = false;
493 
494         currentEstimatedFundamentalMatrix = null;
495         currentMetricEstimatedCamera = previousMetricEstimatedCamera = null;
496         metricReconstructedPoints = null;
497         currentScale = DEFAULT_SCALE;
498         currentEuclideanEstimatedCamera = previousEuclideanEstimatedCamera = null;
499         euclideanReconstructedPoints = null;
500 
501         previousViewId = 0;
502         currentViewId = 0;
503 
504         finished = false;
505     }
506 
507     /**
508      * Gets estimated metric camera for current view (i.e. up to scale).
509      *
510      * @return current estimated metric camera.
511      */
512     protected EstimatedCamera getCurrentMetricEstimatedCamera() {
513         return currentMetricEstimatedCamera;
514     }
515 
516     /**
517      * Gets estimated camera for previous view (i.e. up to scale).
518      *
519      * @return previous estimated metric camera.
520      */
521     protected EstimatedCamera getPreviousMetricEstimatedCamera() {
522         return previousMetricEstimatedCamera;
523     }
524 
525     /**
526      * Gets metric reconstructed 3D points (i.e. up to scale) for current pair of views.
527      *
528      * @return active metric reconstructed 3D points.
529      */
530     protected List<ReconstructedPoint3D> getMetricReconstructedPoints() {
531         return metricReconstructedPoints;
532     }
533 
534     /**
535      * Transforms cameras on current pair of views so that they are referred to
536      * last kept location and rotation and upgrades cameras from metric stratum to
537      * Euclidean stratum.
538      *
539      * @param isInitialPairOfViews   true if initial pair of views is being processed, false otherwise.
540      * @param hasAbsoluteOrientation true if absolute orientation is required, false otherwise.
541      * @return true if cameras were successfully transformed.
542      */
543     protected boolean transformPairOfCamerasAndPoints(
544             final boolean isInitialPairOfViews, final boolean hasAbsoluteOrientation) {
545         if (isInitialPairOfViews) {
546             // initial pair does not need transformation
547             return true;
548         }
549 
550         if (previousMetricEstimatedCamera == null || currentMetricEstimatedCamera == null) {
551             return false;
552         }
553 
554         final var previousMetricCamera = previousMetricEstimatedCamera.getCamera();
555         final var currentMetricCamera = currentMetricEstimatedCamera.getCamera();
556         if (previousMetricCamera == null || currentMetricCamera == null) {
557             return false;
558         }
559 
560         if (invMetricCameraRotation == null) {
561             invMetricCameraRotation = mLastMetricCameraRotation.inverseRotationAndReturnNew();
562         } else {
563             mLastMetricCameraRotation.inverseRotation(invMetricCameraRotation);
564         }
565 
566         if (referenceMetricTransformation == null) {
567             referenceMetricTransformation = new EuclideanTransformation3D(invMetricCameraRotation);
568         } else {
569             referenceMetricTransformation.setRotation(invMetricCameraRotation);
570         }
571         referenceMetricTransformation.setTranslation(lastMetricCameraCenter);
572 
573         try {
574             referenceMetricTransformation.transform(previousMetricCamera);
575             referenceMetricTransformation.transform(currentMetricCamera);
576 
577             Point3D p;
578             for (final var metricReconstructedPoint : metricReconstructedPoints) {
579                 p = metricReconstructedPoint.getPoint();
580                 referenceMetricTransformation.transform(p, p);
581             }
582             return true;
583         } catch (final AlgebraException e) {
584             return false;
585         }
586     }
587 
588     /**
589      * Processes data for the first view pair.
590      *
591      * @return true if view pair was successfully processed, false otherwise.
592      */
593     private boolean processFirstViewPair() {
594         return processViewPair(true);
595     }
596 
597     /**
598      * Processes data for an additional view pair.
599      *
600      * @return true if view pair was successfully processed, false otherwise.
601      */
602     private boolean processAdditionalViewPair() {
603         return processViewPair(false);
604     }
605 
606     /**
607      * Processed data for a view pair.
608      *
609      * @param isInitialPairOfViews true if initial pair of views is being processed,
610      *                             false otherwise.
611      * @return true if view pair was successfully processed, false otherwise.
612      */
613     private boolean processViewPair(final boolean isInitialPairOfViews) {
614         // for second view, check that we have enough samples
615         if (hasEnoughSamples(currentViewSamples)) {
616 
617             // find matches
618             matches.clear();
619             var viewId1 = viewCount;
620             var viewId2 = viewCount + 1;
621             //noinspection unchecked
622             listener.onRequestMatches((R) this, viewId1, viewId2, previousViewSamples, currentViewSamples, matches);
623 
624             if (hasEnoughMatches(matches)) {
625                 // if enough matches are retrieved, attempt to compute
626                 // fundamental matrix
627                 if ((configuration.isGeneralSceneAllowed() && estimateFundamentalMatrix(matches, viewId1, viewId2))
628                         || (configuration.isPlanarSceneAllowed()
629                         && estimatePlanarFundamentalMatrix(matches, viewId1, viewId2))) {
630                     // fundamental matrix could be estimated
631                     // noinspection unchecked
632                     listener.onSamplesAccepted((R) this, viewId1, viewId2, previousViewSamples, currentViewSamples);
633                     previousViewId = viewId1;
634                     currentViewId = viewId2;
635 
636                     //noinspection unchecked
637                     listener.onFundamentalMatrixEstimated((R) this, viewId1, viewId2,
638                             currentEstimatedFundamentalMatrix);
639 
640                     if (estimatePairOfCamerasAndPoints(isInitialPairOfViews)) {
641                         //noinspection unchecked
642                         listener.onEuclideanCameraPairEstimated((R) this, previousViewId, currentViewId, currentScale,
643                                 previousEuclideanEstimatedCamera, currentEuclideanEstimatedCamera);
644                         //noinspection unchecked
645                         listener.onEuclideanReconstructedPointsEstimated((R) this, previousViewId, currentViewId,
646                                 currentScale, euclideanReconstructedPoints);
647                         return true;
648                     } else {
649                         // pair of cameras estimation failed
650                         failed = true;
651                         //noinspection unchecked
652                         listener.onFail((R) this);
653                         return false;
654                     }
655                 } else {
656                     // estimation of fundamental matrix failed
657                     //noinspection unchecked
658                     listener.onSamplesRejected((R) this, previousViewId, currentViewId, previousViewSamples,
659                             currentViewSamples);
660                     return false;
661                 }
662             }
663         }
664 
665         //noinspection unchecked
666         listener.onSamplesRejected((R) this, previousViewId, currentViewId, previousViewSamples, currentViewSamples);
667         return false;
668     }
669 
670     /**
671      * Indicates whether implementations of a re-constructor uses absolute orientation or
672      * not.
673      *
674      * @return true if absolute orientation is used, false, otherwise.
675      */
676     protected abstract boolean hasAbsoluteOrientation();
677 
678     /**
679      * Indicates whether there are enough samples to estimate a fundamental
680      * matrix.
681      *
682      * @param samples samples to check.
683      * @return true if there are enough samples, false otherwise.
684      */
685     private boolean hasEnoughSamples(final List<Sample2D> samples) {
686         return hasEnoughSamplesOrMatches(samples != null ? samples.size() : 0);
687     }
688 
689     /**
690      * Indicates whether there are enough matches to estimate a fundamental
691      * matrix.
692      *
693      * @param matches matches to check.
694      * @return true if there are enough matches, false otherwise.
695      */
696     private boolean hasEnoughMatches(final List<MatchedSamples> matches) {
697         return hasEnoughSamplesOrMatches(matches != null ? matches.size() : 0);
698     }
699 
700     /**
701      * Indicates whether there are enough matches or samples to estimate a
702      * fundamental matrix.
703      *
704      * @param count number of matches or samples.
705      * @return true if there are enough matches or samples, false otherwise.
706      */
707     private boolean hasEnoughSamplesOrMatches(final int count) {
708         if (configuration.isGeneralSceneAllowed()) {
709             if (configuration.getNonRobustFundamentalMatrixEstimatorMethod()
710                     == FundamentalMatrixEstimatorMethod.EIGHT_POINTS_ALGORITHM) {
711                 return count >= EightPointsFundamentalMatrixEstimator.MIN_REQUIRED_POINTS;
712             } else if (configuration.getNonRobustFundamentalMatrixEstimatorMethod()
713                     == FundamentalMatrixEstimatorMethod.SEVEN_POINTS_ALGORITHM) {
714                 return count >= SevenPointsFundamentalMatrixEstimator.MIN_REQUIRED_POINTS;
715             }
716         } else if (configuration.isPlanarSceneAllowed()) {
717             return count >= ProjectiveTransformation2DRobustEstimator.MINIMUM_SIZE;
718         }
719         return false;
720     }
721 
722     /**
723      * Estimates fundamental matrix for provided matches, when 3D points lay in
724      * a general non-degenerate 3D configuration.
725      *
726      * @param matches pairs of matches to find fundamental matrix.
727      * @param viewId1 id of first view.
728      * @param viewId2 id of second view.
729      * @return true if estimation succeeded, false otherwise.
730      */
731     private boolean estimateFundamentalMatrix(final List<MatchedSamples> matches, final int viewId1,
732                                               final int viewId2) {
733         if (matches == null) {
734             return false;
735         }
736 
737         final var count = matches.size();
738         final var leftSamples = new ArrayList<Sample2D>(count);
739         final var rightSamples = new ArrayList<Sample2D>(count);
740         final var leftPoints = new ArrayList<Point2D>(count);
741         final var rightPoints = new ArrayList<Point2D>(count);
742         final var qualityScores = new double[count];
743         double principalPointX;
744         double principalPointY;
745         if (configuration.getPairedCamerasEstimatorMethod() == InitialCamerasEstimatorMethod.DUAL_ABSOLUTE_QUADRIC
746                 || configuration.getPairedCamerasEstimatorMethod()
747                 == InitialCamerasEstimatorMethod.DUAL_ABSOLUTE_QUADRIC_AND_ESSENTIAL_MATRIX) {
748             principalPointX = configuration.getPrincipalPointX();
749             principalPointY = configuration.getPrincipalPointY();
750         } else {
751             principalPointX = principalPointY = 0.0;
752         }
753 
754         var i = 0;
755         for (final var match : matches) {
756             final var samples = match.getSamples();
757             if (samples.length != MIN_NUMBER_OF_VIEWS) {
758                 return false;
759             }
760 
761             leftSamples.add(samples[0]);
762             rightSamples.add(samples[1]);
763 
764             final var leftPoint = Point2D.create();
765             leftPoint.setInhomogeneousCoordinates(
766                     samples[0].getPoint().getInhomX() - principalPointX,
767                     samples[0].getPoint().getInhomY() - principalPointY);
768             leftPoints.add(leftPoint);
769 
770             final var rightPoint = Point2D.create();
771             rightPoint.setInhomogeneousCoordinates(
772                     samples[1].getPoint().getInhomX() - principalPointX,
773                     samples[1].getPoint().getInhomY() - principalPointY);
774             rightPoints.add(rightPoint);
775 
776             qualityScores[i] = match.getQualityScore();
777             i++;
778         }
779 
780         try {
781             final var estimator = FundamentalMatrixRobustEstimator.create(leftPoints, rightPoints, qualityScores,
782                     configuration.getRobustFundamentalMatrixEstimatorMethod());
783             estimator.setNonRobustFundamentalMatrixEstimatorMethod(
784                     configuration.getNonRobustFundamentalMatrixEstimatorMethod());
785             estimator.setResultRefined(configuration.isFundamentalMatrixRefined());
786             estimator.setCovarianceKept(configuration.isFundamentalMatrixCovarianceKept());
787             estimator.setConfidence(configuration.getFundamentalMatrixConfidence());
788             estimator.setMaxIterations(configuration.getFundamentalMatrixMaxIterations());
789 
790             switch (configuration.getRobustFundamentalMatrixEstimatorMethod()) {
791                 case LMEDS:
792                     ((LMedSFundamentalMatrixRobustEstimator) estimator).setStopThreshold(
793                             configuration.getFundamentalMatrixThreshold());
794                     break;
795                 case MSAC:
796                     ((MSACFundamentalMatrixRobustEstimator) estimator).setThreshold(
797                             configuration.getFundamentalMatrixThreshold());
798                     break;
799                 case PROMEDS:
800                     ((PROMedSFundamentalMatrixRobustEstimator) estimator).setStopThreshold(
801                             configuration.getFundamentalMatrixThreshold());
802                     break;
803                 case PROSAC:
804                     final PROSACFundamentalMatrixRobustEstimator prosacEstimator =
805                             (PROSACFundamentalMatrixRobustEstimator) estimator;
806                     prosacEstimator.setThreshold(configuration.getFundamentalMatrixThreshold());
807                     prosacEstimator.setComputeAndKeepInliersEnabled(
808                             configuration.getFundamentalMatrixComputeAndKeepInliers());
809                     prosacEstimator.setComputeAndKeepResidualsEnabled(
810                             configuration.getFundamentalMatrixComputeAndKeepResiduals());
811                     break;
812                 case RANSAC:
813                     final RANSACFundamentalMatrixRobustEstimator ransacEstimator =
814                             (RANSACFundamentalMatrixRobustEstimator) estimator;
815                     ransacEstimator.setThreshold(configuration.getFundamentalMatrixThreshold());
816                     ransacEstimator.setComputeAndKeepInliersEnabled(
817                             configuration.getFundamentalMatrixComputeAndKeepInliers());
818                     ransacEstimator.setComputeAndKeepResidualsEnabled(
819                             configuration.getFundamentalMatrixComputeAndKeepResiduals());
820                     break;
821                 default:
822                     break;
823             }
824 
825 
826             final var fundamentalMatrix = estimator.estimate();
827 
828             currentEstimatedFundamentalMatrix = new EstimatedFundamentalMatrix();
829             currentEstimatedFundamentalMatrix.setFundamentalMatrix(fundamentalMatrix);
830             currentEstimatedFundamentalMatrix.setViewId1(viewId1);
831             currentEstimatedFundamentalMatrix.setViewId2(viewId2);
832             currentEstimatedFundamentalMatrix.setCovariance(estimator.getCovariance());
833 
834             // determine quality score and inliers
835             final var inliersData = estimator.getInliersData();
836             if (inliersData != null) {
837                 final var numInliers = inliersData.getNumInliers();
838                 final var inliers = inliersData.getInliers();
839                 final var length = inliers.length();
840                 var fundamentalMatrixQualityScore = 0.0;
841                 for (i = 0; i < length; i++) {
842                     if (inliers.get(i)) {
843                         // inlier
844                         fundamentalMatrixQualityScore += qualityScores[i] / numInliers;
845                     }
846                 }
847                 currentEstimatedFundamentalMatrix.setQualityScore(fundamentalMatrixQualityScore);
848                 currentEstimatedFundamentalMatrix.setInliers(inliers);
849             }
850 
851             // store left/right samples
852             currentEstimatedFundamentalMatrix.setLeftSamples(leftSamples);
853             currentEstimatedFundamentalMatrix.setRightSamples(rightSamples);
854 
855             return true;
856         } catch (final Exception e) {
857             return false;
858         }
859     }
860 
861     /**
862      * Estimates fundamental matrix for provided matches, when 3D points lay in
863      * a planar 3D scene.
864      *
865      * @param matches pairs of matches to find fundamental matrix.
866      * @param viewId1 id of first view.
867      * @param viewId2 id of second view.
868      * @return true if estimation succeeded, false otherwise.
869      */
870     private boolean estimatePlanarFundamentalMatrix(final List<MatchedSamples> matches, final int viewId1,
871                                                     final int viewId2) {
872         if (matches == null) {
873             return false;
874         }
875 
876         final var count = matches.size();
877         final var leftSamples = new ArrayList<Sample2D>();
878         final var rightSamples = new ArrayList<Sample2D>();
879         final var leftPoints = new ArrayList<Point2D>();
880         final var rightPoints = new ArrayList<Point2D>();
881         final var qualityScores = new double[count];
882         final double principalPointX;
883         final double principalPointY;
884         if (configuration.getPairedCamerasEstimatorMethod() == InitialCamerasEstimatorMethod.DUAL_ABSOLUTE_QUADRIC
885                 || configuration.getPairedCamerasEstimatorMethod()
886                 == InitialCamerasEstimatorMethod.DUAL_ABSOLUTE_QUADRIC_AND_ESSENTIAL_MATRIX) {
887             principalPointX = configuration.getPrincipalPointX();
888             principalPointY = configuration.getPrincipalPointY();
889         } else {
890             principalPointX = principalPointY = 0.0;
891         }
892 
893         var i = 0;
894         for (final var match : matches) {
895             final var samples = match.getSamples();
896             if (samples.length != MIN_NUMBER_OF_VIEWS) {
897                 return false;
898             }
899 
900             leftSamples.add(samples[0]);
901             rightSamples.add(samples[1]);
902 
903             final var leftPoint = Point2D.create();
904             leftPoint.setInhomogeneousCoordinates(
905                     samples[0].getPoint().getInhomX() - principalPointX,
906                     samples[0].getPoint().getInhomY() - principalPointY);
907             leftPoints.add(leftPoint);
908 
909             final var rightPoint = Point2D.create();
910             rightPoint.setInhomogeneousCoordinates(
911                     samples[1].getPoint().getInhomX() - principalPointX,
912                     samples[1].getPoint().getInhomY() - principalPointY);
913             rightPoints.add(rightPoint);
914 
915             qualityScores[i] = match.getQualityScore();
916             i++;
917         }
918 
919         try {
920             final var homographyEstimator = PointCorrespondenceProjectiveTransformation2DRobustEstimator.create(
921                     configuration.getRobustPlanarHomographyEstimatorMethod());
922             homographyEstimator.setResultRefined(configuration.isPlanarHomographyRefined());
923             homographyEstimator.setCovarianceKept(configuration.isPlanarHomographyCovarianceKept());
924             homographyEstimator.setConfidence(configuration.getPlanarHomographyConfidence());
925             homographyEstimator.setMaxIterations(configuration.getPlanarHomographyMaxIterations());
926 
927             switch (configuration.getRobustPlanarHomographyEstimatorMethod()) {
928                 case LMEDS:
929                     ((LMedSPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator)
930                             .setStopThreshold(configuration.getPlanarHomographyThreshold());
931                     break;
932                 case MSAC:
933                     ((MSACPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator)
934                             .setThreshold(configuration.getPlanarHomographyThreshold());
935                     break;
936                 case PROMEDS:
937                     ((PROMedSPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator)
938                             .setStopThreshold(configuration.getPlanarHomographyThreshold());
939                     break;
940                 case PROSAC:
941                     PROSACPointCorrespondenceProjectiveTransformation2DRobustEstimator prosacHomographyEstimator =
942                             (PROSACPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator;
943 
944                     prosacHomographyEstimator.setThreshold(configuration.getPlanarHomographyThreshold());
945                     prosacHomographyEstimator.setComputeAndKeepInliersEnabled(
946                             configuration.getPlanarHomographyComputeAndKeepInliers());
947                     prosacHomographyEstimator.setComputeAndKeepResidualsEnabled(
948                             configuration.getPlanarHomographyComputeAndKeepResiduals());
949                     break;
950                 case RANSAC:
951                     RANSACPointCorrespondenceProjectiveTransformation2DRobustEstimator ransacHomographyEstimator =
952                             (RANSACPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator;
953 
954                     ransacHomographyEstimator.setThreshold(configuration.getPlanarHomographyThreshold());
955                     ransacHomographyEstimator.setComputeAndKeepInliersEnabled(
956                             configuration.getPlanarHomographyComputeAndKeepInliers());
957                     ransacHomographyEstimator.setComputeAndKeepResidualsEnabled(
958                             configuration.getPlanarHomographyComputeAndKeepResiduals());
959                     break;
960                 default:
961                     break;
962             }
963 
964             final PlanarBestFundamentalMatrixEstimatorAndReconstructor fundamentalMatrixEstimator =
965                     new PlanarBestFundamentalMatrixEstimatorAndReconstructor();
966             fundamentalMatrixEstimator.setHomographyEstimator(homographyEstimator);
967             fundamentalMatrixEstimator.setLeftAndRightPoints(leftPoints, rightPoints);
968             fundamentalMatrixEstimator.setQualityScores(qualityScores);
969 
970             PinholeCameraIntrinsicParameters intrinsic1 = null;
971             PinholeCameraIntrinsicParameters intrinsic2 = null;
972             if (configuration.areIntrinsicParametersKnown()) {
973                 //noinspection unchecked
974                 intrinsic1 = listener.onIntrinsicParametersRequested((R) this, viewId1);
975                 //noinspection unchecked
976                 intrinsic2 = listener.onIntrinsicParametersRequested((R) this, viewId2);
977             }
978             if (intrinsic1 == null && intrinsic2 == null) {
979                 // estimate homography
980                 final var homography = homographyEstimator.estimate();
981 
982                 // estimate intrinsic parameters using the Image of Absolute
983                 // Conic (IAC)
984                 final var homographies = new ArrayList<Transformation2D>();
985                 homographies.add(homography);
986 
987                 final var iacEstimator = new LMSEImageOfAbsoluteConicEstimator(homographies);
988                 final var iac = iacEstimator.estimate();
989 
990                 intrinsic1 = intrinsic2 = iac.getIntrinsicParameters();
991 
992             } else if (intrinsic1 == null) { // && intrinsic2 != null
993                 intrinsic1 = intrinsic2;
994             } else if (intrinsic2 == null) { // && intrinsic1 != null
995                 intrinsic2 = intrinsic1;
996             }
997             fundamentalMatrixEstimator.setLeftIntrinsics(intrinsic1);
998             fundamentalMatrixEstimator.setRightIntrinsics(intrinsic2);
999 
1000             fundamentalMatrixEstimator.estimateAndReconstruct();
1001 
1002             final var fundamentalMatrix = fundamentalMatrixEstimator.getFundamentalMatrix();
1003 
1004             currentEstimatedFundamentalMatrix = new EstimatedFundamentalMatrix();
1005             currentEstimatedFundamentalMatrix.setFundamentalMatrix(fundamentalMatrix);
1006             currentEstimatedFundamentalMatrix.setViewId1(viewId1);
1007             currentEstimatedFundamentalMatrix.setViewId2(viewId2);
1008 
1009             // determine quality score and inliers
1010             final var inliersData = homographyEstimator.getInliersData();
1011             if (inliersData != null) {
1012                 final var numInliers = inliersData.getNumInliers();
1013                 final var inliers = inliersData.getInliers();
1014                 final var length = inliers.length();
1015                 var fundamentalMatrixQualityScore = 0.0;
1016                 for (i = 0; i < length; i++) {
1017                     if (inliers.get(i)) {
1018                         // inlier
1019                         fundamentalMatrixQualityScore += qualityScores[i] / numInliers;
1020                     }
1021                 }
1022                 currentEstimatedFundamentalMatrix.setQualityScore(
1023                         fundamentalMatrixQualityScore);
1024                 currentEstimatedFundamentalMatrix.setInliers(inliers);
1025             }
1026 
1027             // store left/right samples
1028             currentEstimatedFundamentalMatrix.setLeftSamples(leftSamples);
1029             currentEstimatedFundamentalMatrix.setRightSamples(rightSamples);
1030 
1031             return true;
1032         } catch (final Exception e) {
1033             return false;
1034         }
1035     }
1036 
1037     /**
1038      * Estimates a pair of cameras and reconstructed points.
1039      *
1040      * @param isInitialPairOfViews true if initial pair of views is being processed,
1041      *                             false otherwise.
1042      * @return true if cameras and points could be estimated, false if something
1043      * failed.
1044      */
1045     private boolean estimatePairOfCamerasAndPoints(final boolean isInitialPairOfViews) {
1046         return switch (configuration.getPairedCamerasEstimatorMethod()) {
1047             case ESSENTIAL_MATRIX -> estimateInitialCamerasAndPointsEssential(isInitialPairOfViews);
1048             case DUAL_IMAGE_OF_ABSOLUTE_CONIC -> estimateInitialCamerasAndPointsDIAC(isInitialPairOfViews);
1049             case DUAL_ABSOLUTE_QUADRIC -> estimateInitialCamerasAndPointsDAQ(isInitialPairOfViews);
1050             default -> estimateInitialCamerasAndPointsDAQAndEssential(isInitialPairOfViews);
1051         };
1052     }
1053 
1054     /**
1055      * Estimates initial cameras and reconstructed points using the Dual
1056      * Absolute Quadric to estimate intrinsic parameters and then use those
1057      * intrinsic parameters with the essential matrix.
1058      *
1059      * @param isInitialPairOfViews true if initial pair of views is being processed,
1060      *                             false otherwise.
1061      * @return true if cameras and points could be estimated, false if something
1062      * failed.
1063      */
1064     private boolean estimateInitialCamerasAndPointsDAQAndEssential(final boolean isInitialPairOfViews) {
1065         // for non-initial view, keep last center and rotation
1066         if (!isInitialPairOfViews && keepLastCenterAndRotation()) {
1067             return false;
1068         }
1069 
1070         try {
1071             final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix();
1072 
1073             final var estimator = new DualAbsoluteQuadricInitialCamerasEstimator(fundamentalMatrix);
1074             estimator.setAspectRatio(configuration.getPairedCamerasAspectRatio());
1075             estimator.estimate();
1076 
1077             final var camera1 = estimator.getEstimatedLeftCamera();
1078             final var camera2 = estimator.getEstimatedRightCamera();
1079 
1080             camera1.decompose();
1081             camera2.decompose();
1082 
1083             final var intrinsicZeroPrincipalPoint1 = camera1.getIntrinsicParameters();
1084             final var intrinsicZeroPrincipalPoint2 = camera2.getIntrinsicParameters();
1085 
1086             final var principalPointX = configuration.getPrincipalPointX();
1087             final var principalPointY = configuration.getPrincipalPointY();
1088 
1089             final var intrinsic1 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint1);
1090             intrinsic1.setHorizontalPrincipalPoint(intrinsic1.getHorizontalPrincipalPoint() + principalPointX);
1091             intrinsic1.setVerticalPrincipalPoint(intrinsic1.getVerticalPrincipalPoint() + principalPointY);
1092 
1093             final var intrinsic2 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint2);
1094             intrinsic2.setHorizontalPrincipalPoint(intrinsic2.getHorizontalPrincipalPoint() + principalPointX);
1095             intrinsic2.setVerticalPrincipalPoint(intrinsic2.getVerticalPrincipalPoint() + principalPointY);
1096 
1097             // fix fundamental matrix to account for principal point different
1098             // from zero
1099             fixFundamentalMatrix(fundamentalMatrix, intrinsicZeroPrincipalPoint1, intrinsicZeroPrincipalPoint2,
1100                     intrinsic1, intrinsic2);
1101 
1102             return estimateInitialCamerasAndPointsEssential(intrinsic1, intrinsic2)
1103                     && transformPairOfCamerasAndPoints(isInitialPairOfViews, hasAbsoluteOrientation());
1104         } catch (final Exception e) {
1105             return false;
1106         }
1107     }
1108 
1109     /**
1110      * Estimates initial cameras and reconstructed points using the Dual
1111      * Absolute Quadric.
1112      *
1113      * @param isInitialPairOfViews true if initial pair of views is being processed,
1114      *                             false otherwise.
1115      * @return true if cameras and points could be estimated, false if something
1116      * failed.
1117      */
1118     private boolean estimateInitialCamerasAndPointsDAQ(final boolean isInitialPairOfViews) {
1119         // for non-initial view, keep last center and rotation
1120         if (!isInitialPairOfViews && keepLastCenterAndRotation()) {
1121             return false;
1122         }
1123 
1124         try {
1125             final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix();
1126             fundamentalMatrix.normalize();
1127 
1128             final var estimator = new DualAbsoluteQuadricInitialCamerasEstimator(fundamentalMatrix);
1129             estimator.setAspectRatio(configuration.getPairedCamerasAspectRatio());
1130             estimator.estimate();
1131 
1132             final var camera1 = estimator.getEstimatedLeftCamera();
1133             final var camera2 = estimator.getEstimatedRightCamera();
1134 
1135             camera1.decompose();
1136             camera2.decompose();
1137 
1138             final var intrinsicZeroPrincipalPoint1 = camera1.getIntrinsicParameters();
1139             final var intrinsicZeroPrincipalPoint2 = camera2.getIntrinsicParameters();
1140 
1141             final var principalPointX = configuration.getPrincipalPointX();
1142             final var principalPointY = configuration.getPrincipalPointY();
1143 
1144             final var intrinsic1 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint1);
1145             intrinsic1.setHorizontalPrincipalPoint(intrinsic1.getHorizontalPrincipalPoint() + principalPointX);
1146             intrinsic1.setVerticalPrincipalPoint(intrinsic1.getVerticalPrincipalPoint() + principalPointY);
1147             camera1.setIntrinsicParameters(intrinsic1);
1148 
1149             final var intrinsic2 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint2);
1150             intrinsic2.setHorizontalPrincipalPoint(intrinsic2.getHorizontalPrincipalPoint() + principalPointX);
1151             intrinsic2.setVerticalPrincipalPoint(intrinsic2.getVerticalPrincipalPoint() + principalPointY);
1152             camera2.setIntrinsicParameters(intrinsic2);
1153 
1154             previousMetricEstimatedCamera = new EstimatedCamera();
1155             previousMetricEstimatedCamera.setCamera(camera1);
1156 
1157             currentMetricEstimatedCamera = new EstimatedCamera();
1158             currentMetricEstimatedCamera.setCamera(camera2);
1159 
1160             // fix fundamental matrix to account for principal point different
1161             // from zero
1162             fixFundamentalMatrix(fundamentalMatrix, intrinsicZeroPrincipalPoint1, intrinsicZeroPrincipalPoint2,
1163                     intrinsic1, intrinsic2);
1164 
1165             // triangulate points
1166             Corrector corrector = null;
1167             if (configuration.getPairedCamerasCorrectorType() != null) {
1168                 corrector = Corrector.create(fundamentalMatrix, configuration.getPairedCamerasCorrectorType());
1169             }
1170 
1171             // use all points used for fundamental matrix estimation
1172             final var samples1 = currentEstimatedFundamentalMatrix.getLeftSamples();
1173             final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples();
1174 
1175             final var points1 = new ArrayList<Point2D>();
1176             final var points2 = new ArrayList<Point2D>();
1177             final var length = samples1.size();
1178             for (var i = 0; i < length; i++) {
1179                 final var sample1 = samples1.get(i);
1180                 final var sample2 = samples2.get(i);
1181 
1182                 final var point1 = sample1.getPoint();
1183                 final var point2 = sample2.getPoint();
1184 
1185                 points1.add(point1);
1186                 points2.add(point2);
1187             }
1188 
1189             // correct points if needed
1190             final List<Point2D> correctedPoints1;
1191             final List<Point2D> correctedPoints2;
1192             if (corrector != null) {
1193                 corrector.setLeftAndRightPoints(points1, points2);
1194                 corrector.correct();
1195 
1196                 correctedPoints1 = corrector.getLeftCorrectedPoints();
1197                 correctedPoints2 = corrector.getRightCorrectedPoints();
1198             } else {
1199                 correctedPoints1 = points1;
1200                 correctedPoints2 = points2;
1201             }
1202 
1203             // triangulate points
1204             final SinglePoint3DTriangulator triangulator;
1205             if (configuration.getDaqUseHomogeneousPointTriangulator()) {
1206                 triangulator = SinglePoint3DTriangulator.create(Point3DTriangulatorType.LMSE_HOMOGENEOUS_TRIANGULATOR);
1207             } else {
1208                 triangulator = SinglePoint3DTriangulator.create(
1209                         Point3DTriangulatorType.LMSE_INHOMOGENEOUS_TRIANGULATOR);
1210             }
1211 
1212             final var cameras = new ArrayList<PinholeCamera>();
1213             cameras.add(camera1);
1214             cameras.add(camera2);
1215 
1216             metricReconstructedPoints = new ArrayList<>();
1217             final var points = new ArrayList<Point2D>();
1218             final var numPoints = correctedPoints1.size();
1219             Point3D triangulatedPoint;
1220             ReconstructedPoint3D reconstructedPoint;
1221             for (var i = 0; i < numPoints; i++) {
1222                 points.clear();
1223                 points.add(correctedPoints1.get(i));
1224                 points.add(correctedPoints2.get(i));
1225 
1226                 triangulator.setPointsAndCameras(points, cameras);
1227                 triangulatedPoint = triangulator.triangulate();
1228 
1229                 reconstructedPoint = new ReconstructedPoint3D();
1230                 reconstructedPoint.setPoint(triangulatedPoint);
1231 
1232                 // only points reconstructed in front of both cameras are
1233                 // considered valid
1234                 final var front1 = camera1.isPointInFrontOfCamera(triangulatedPoint);
1235                 final var front2 = camera2.isPointInFrontOfCamera(triangulatedPoint);
1236                 reconstructedPoint.setInlier(front1 && front2);
1237 
1238                 metricReconstructedPoints.add(reconstructedPoint);
1239             }
1240 
1241             return transformPairOfCamerasAndPoints(isInitialPairOfViews, hasAbsoluteOrientation());
1242         } catch (final Exception e) {
1243             return false;
1244         }
1245     }
1246 
1247     /**
1248      * Estimates initial cameras and reconstructed points using Dual Image of
1249      * Absolute Conic.
1250      *
1251      * @param isInitialPairOfViews true if initial pair of views is being processed,
1252      *                             false otherwise.
1253      * @return true if cameras and points could be estimated, false if something
1254      * failed.
1255      */
1256     private boolean estimateInitialCamerasAndPointsDIAC(final boolean isInitialPairOfViews) {
1257         // for non-initial view, keep last center and rotation
1258         if (!isInitialPairOfViews && keepLastCenterAndRotation()) {
1259             return false;
1260         }
1261 
1262         final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix();
1263 
1264         // use inlier points used for fundamental matrix estimation
1265         final var samples1 = currentEstimatedFundamentalMatrix.getLeftSamples();
1266         final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples();
1267 
1268         final var points1 = new ArrayList<Point2D>();
1269         final var points2 = new ArrayList<Point2D>();
1270         final var length = samples1.size();
1271         for (var i = 0; i < length; i++) {
1272             final var sample1 = samples1.get(i);
1273             final var sample2 = samples2.get(i);
1274 
1275             final var point1 = sample1.getPoint();
1276             final var point2 = sample2.getPoint();
1277 
1278             points1.add(point1);
1279             points2.add(point2);
1280         }
1281 
1282         try {
1283             final var estimator = new DualImageOfAbsoluteConicInitialCamerasEstimator(fundamentalMatrix, points1,
1284                     points2);
1285             estimator.setPrincipalPoint(configuration.getPrincipalPointX(), configuration.getPrincipalPointY());
1286             estimator.setAspectRatio(configuration.getPairedCamerasAspectRatio());
1287             estimator.setCorrectorType(configuration.getPairedCamerasCorrectorType());
1288             estimator.setPointsTriangulated(true);
1289             estimator.setValidTriangulatedPointsMarked(configuration.getPairedCamerasMarkValidTriangulatedPoints());
1290 
1291             estimator.estimate();
1292 
1293             // store cameras
1294             final var camera1 = estimator.getEstimatedLeftCamera();
1295             final var camera2 = estimator.getEstimatedRightCamera();
1296 
1297             previousMetricEstimatedCamera = new EstimatedCamera();
1298             previousMetricEstimatedCamera.setCamera(camera1);
1299 
1300             currentMetricEstimatedCamera = new EstimatedCamera();
1301             currentMetricEstimatedCamera.setCamera(camera2);
1302 
1303             // store points
1304             final var triangulatedPoints = estimator.getTriangulatedPoints();
1305             final var validTriangulatedPoints = estimator.getValidTriangulatedPoints();
1306 
1307             metricReconstructedPoints = new ArrayList<>();
1308             final var size = triangulatedPoints.size();
1309             for (var i = 0; i < size; i++) {
1310                 final var reconstructedPoint = new ReconstructedPoint3D();
1311                 reconstructedPoint.setPoint(triangulatedPoints.get(i));
1312                 reconstructedPoint.setInlier(validTriangulatedPoints.get(i));
1313                 metricReconstructedPoints.add(reconstructedPoint);
1314             }
1315 
1316             return transformPairOfCamerasAndPoints(isInitialPairOfViews, hasAbsoluteOrientation());
1317         } catch (final Exception e) {
1318             return false;
1319         }
1320     }
1321 
1322     /**
1323      * Estimates initial cameras and reconstructed points using the essential
1324      * matrix and provided intrinsic parameters that must have been set during
1325      * offline calibration.
1326      *
1327      * @param isInitialPairOfViews true if initial pair of views is being processed,
1328      *                             false otherwise.
1329      * @return true if cameras and points could be estimated, false if something
1330      * failed.
1331      */
1332     private boolean estimateInitialCamerasAndPointsEssential(final boolean isInitialPairOfViews) {
1333         // for non-initial view, keep last center and rotation
1334         if (!isInitialPairOfViews && keepLastCenterAndRotation()) {
1335             return false;
1336         }
1337 
1338         PinholeCameraIntrinsicParameters intrinsic1 = null;
1339         PinholeCameraIntrinsicParameters intrinsic2 = null;
1340         if (configuration.areIntrinsicParametersKnown()) {
1341             //noinspection unchecked
1342             intrinsic1 = listener.onIntrinsicParametersRequested((R) this, previousViewId);
1343             //noinspection unchecked
1344             intrinsic2 = listener.onIntrinsicParametersRequested((R) this, currentViewId);
1345         }
1346 
1347         if (intrinsic1 != null && intrinsic2 != null) {
1348             return estimateInitialCamerasAndPointsEssential(intrinsic1, intrinsic2)
1349                     && transformPairOfCamerasAndPoints(isInitialPairOfViews, hasAbsoluteOrientation());
1350         } else {
1351             // missing intrinsic parameters
1352 
1353             failed = true;
1354             //noinspection unchecked
1355             listener.onFail((R) this);
1356             return false;
1357         }
1358     }
1359 
1360     /**
1361      * Estimates initial cameras and reconstructed points using the essential
1362      * matrix and provided intrinsic parameters that must have been set during
1363      * offline calibration.
1364      *
1365      * @param intrinsic1 intrinsic parameters of 1st camera.
1366      * @param intrinsic2 intrinsic parameters of 2nd camera.
1367      * @return true if cameras and points could be estimated, false if something
1368      * failed.
1369      */
1370     private boolean estimateInitialCamerasAndPointsEssential(
1371             final PinholeCameraIntrinsicParameters intrinsic1, final PinholeCameraIntrinsicParameters intrinsic2) {
1372         final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix();
1373 
1374         // use all points used for fundamental matrix estimation
1375         final var samples1 = currentEstimatedFundamentalMatrix.getLeftSamples();
1376         final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples();
1377 
1378         final var points1 = new ArrayList<Point2D>();
1379         final var points2 = new ArrayList<Point2D>();
1380         final var length = samples1.size();
1381         for (var i = 0; i < length; i++) {
1382             final var sample1 = samples1.get(i);
1383             final var sample2 = samples2.get(i);
1384 
1385             final var point1 = sample1.getPoint();
1386             final var point2 = sample2.getPoint();
1387 
1388             points1.add(point1);
1389             points2.add(point2);
1390         }
1391 
1392         try {
1393             final var estimator = new EssentialMatrixInitialCamerasEstimator(fundamentalMatrix, intrinsic1, intrinsic2,
1394                     points1, points2);
1395 
1396             estimator.setCorrectorType(configuration.getPairedCamerasCorrectorType());
1397             estimator.setPointsTriangulated(true);
1398             estimator.setValidTriangulatedPointsMarked(configuration.getPairedCamerasMarkValidTriangulatedPoints());
1399 
1400             estimator.estimate();
1401 
1402             // store cameras
1403             final var camera1 = estimator.getEstimatedLeftCamera();
1404             final var camera2 = estimator.getEstimatedRightCamera();
1405 
1406             previousMetricEstimatedCamera = new EstimatedCamera();
1407             previousMetricEstimatedCamera.setCamera(camera1);
1408 
1409             currentMetricEstimatedCamera = new EstimatedCamera();
1410             currentMetricEstimatedCamera.setCamera(camera2);
1411 
1412             // store points
1413             final var triangulatedPoints = estimator.getTriangulatedPoints();
1414             final var validTriangulatedPoints = estimator.getValidTriangulatedPoints();
1415 
1416             metricReconstructedPoints = new ArrayList<>();
1417             final var size = triangulatedPoints.size();
1418             for (var i = 0; i < size; i++) {
1419                 final var reconstructedPoint = new ReconstructedPoint3D();
1420                 reconstructedPoint.setPoint(triangulatedPoints.get(i));
1421                 reconstructedPoint.setInlier(validTriangulatedPoints.get(i));
1422                 metricReconstructedPoints.add(reconstructedPoint);
1423             }
1424 
1425             return true;
1426         } catch (final Exception e) {
1427             return false;
1428         }
1429     }
1430 
1431 
1432     /**
1433      * Keeps center and rotation of last camera (current camera on previous view pair).
1434      *
1435      * @return false if camera and rotation were successfully kept, true otherwise.
1436      */
1437     private boolean keepLastCenterAndRotation() {
1438         // keep last metric center and rotation
1439         if (currentMetricEstimatedCamera == null || currentEuclideanEstimatedCamera == null) {
1440             return true;
1441         }
1442 
1443         final var metricCamera = currentMetricEstimatedCamera.getCamera();
1444         if (metricCamera == null) {
1445             return true;
1446         }
1447 
1448         try {
1449             // decompose camera if needed
1450             if (!metricCamera.isCameraCenterAvailable() || !metricCamera.isCameraRotationAvailable()) {
1451                 metricCamera.decompose();
1452             }
1453 
1454             lastMetricCameraCenter = metricCamera.getCameraCenter();
1455             mLastMetricCameraRotation = metricCamera.getCameraRotation();
1456 
1457         } catch (final GeometryException e) {
1458             return true;
1459         }
1460 
1461         // keep last Euclidean center and rotation
1462         final var euclideanCamera = currentEuclideanEstimatedCamera.getCamera();
1463         if (euclideanCamera == null) {
1464             return true;
1465         }
1466 
1467         try {
1468             // decompose camera if needed
1469             if (!euclideanCamera.isCameraCenterAvailable() || !euclideanCamera.isCameraRotationAvailable()) {
1470                 euclideanCamera.decompose();
1471             }
1472 
1473             lastEuclideanCameraCenter = euclideanCamera.getCameraCenter();
1474             lastEuclideanCameraRotation = euclideanCamera.getCameraRotation();
1475 
1476             return false;
1477         } catch (final GeometryException e) {
1478             return true;
1479         }
1480     }
1481 
1482     /**
1483      * Fixes fundamental matrix to account for principal point different from
1484      * zero when using DAQ estimation.
1485      *
1486      * @param fundamentalMatrix            fundamental matrix to be fixed.
1487      * @param intrinsicZeroPrincipalPoint1 intrinsic parameters of camera 1
1488      *                                     assuming zero principal point.
1489      * @param intrinsicZeroPrincipalPoint2 intrinsic parameters of camera 2
1490      *                                     assuming zero principal point.
1491      * @param intrinsicPrincipalPoint1     intrinsic parameters of camera 1 using
1492      *                                     proper principal point.
1493      * @param intrinsicPrincipalPoint2     intrinsic parameters of camera 2 using
1494      *                                     proper principal point.
1495      * @throws EpipolarException if something fails.
1496      * @throws NotReadyException never happens.
1497      */
1498     private void fixFundamentalMatrix(final FundamentalMatrix fundamentalMatrix,
1499                                       final PinholeCameraIntrinsicParameters intrinsicZeroPrincipalPoint1,
1500                                       final PinholeCameraIntrinsicParameters intrinsicZeroPrincipalPoint2,
1501                                       final PinholeCameraIntrinsicParameters intrinsicPrincipalPoint1,
1502                                       final PinholeCameraIntrinsicParameters intrinsicPrincipalPoint2)
1503             throws EpipolarException, NotReadyException {
1504 
1505         // first compute essential matrix as E = K2a'F*K1a
1506         final var essential = new EssentialMatrix(fundamentalMatrix, intrinsicZeroPrincipalPoint1,
1507                 intrinsicZeroPrincipalPoint2);
1508         final var fixedFundamentalMatrix = essential.toFundamentalMatrix(intrinsicPrincipalPoint1,
1509                 intrinsicPrincipalPoint2);
1510         fixedFundamentalMatrix.normalize();
1511         currentEstimatedFundamentalMatrix.setFundamentalMatrix(fixedFundamentalMatrix);
1512         currentEstimatedFundamentalMatrix.setCovariance(null);
1513     }
1514 }