View Javadoc
1   /*
2    * Copyright (C) 2017 Alberto Irurueta Carro (alberto@irurueta.com)
3    *
4    * Licensed under the Apache License, Version 2.0 (the "License");
5    * you may not use this file except in compliance with the License.
6    * You may obtain a copy of the License at
7    *
8    *         http://www.apache.org/licenses/LICENSE-2.0
9    *
10   * Unless required by applicable law or agreed to in writing, software
11   * distributed under the License is distributed on an "AS IS" BASIS,
12   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13   * See the License for the specific language governing permissions and
14   * limitations under the License.
15   */
16  
17  package com.irurueta.ar.sfm;
18  
19  import com.irurueta.ar.slam.AbsoluteOrientationBaseSlamEstimator;
20  import com.irurueta.ar.slam.BaseCalibrationData;
21  import com.irurueta.geometry.MetricTransformation3D;
22  import com.irurueta.geometry.Point3D;
23  import com.irurueta.geometry.Rotation3D;
24  
25  import java.util.ArrayList;
26  
27  /**
28   * Base class in charge of estimating cameras and 3D reconstructed points from sparse
29   * image point correspondences in multiple views and also in charge of estimating overall
30   * scene scale and absolute orientation by means of SLAM (Simultaneous Location And Mapping)
31   * using data obtained from sensors like accelerometers or gyroscopes.
32   * NOTE: absolute orientation slam estimators are not very accurate during estimation of
33   * the orientation state, for that reason we take into account the initial orientation.
34   *
35   * @param <D> type defining calibration data.
36   * @param <C> type of configuration.
37   * @param <R> type of re-constructor.
38   * @param <L> type of listener.
39   * @param <S> type of SLAM estimator.
40   */
41  public abstract class BaseAbsoluteOrientationSlamSparseReconstructor<
42          D extends BaseCalibrationData,
43          C extends BaseSlamSparseReconstructorConfiguration<D, C>,
44          R extends BaseSlamSparseReconstructor<D, C, R, L, S>,
45          L extends BaseSlamSparseReconstructorListener<R>,
46          S extends AbsoluteOrientationBaseSlamEstimator<D>> extends
47          BaseSlamSparseReconstructor<D, C, R, L, S> {
48  
49      /**
50       * First sample of orientation received.
51       */
52      private Rotation3D firstOrientation;
53  
54      /**
55       * Inverse of first orientation.
56       */
57      private Rotation3D invFirstOrientation;
58  
59      /**
60       * Constructor.
61       *
62       * @param configuration configuration for this re-constructor.
63       * @param listener      listener in charge of handling events.
64       * @throws NullPointerException if listener or configuration is not
65       *                              provided.
66       */
67      protected BaseAbsoluteOrientationSlamSparseReconstructor(final C configuration, final L listener) {
68          super(configuration, listener);
69      }
70  
71      /**
72       * Provides a new orientation sample to update SLAM estimator.
73       * If re-constructor is not running, calling this method has no effect.
74       *
75       * @param timestamp   timestamp of accelerometer sample since epoch time and
76       *                    expressed in nanoseconds.
77       * @param orientation new orientation.
78       */
79      public void updateOrientationSample(final long timestamp, final Rotation3D orientation) {
80          if (slamEstimator != null) {
81              slamEstimator.updateOrientationSample(timestamp, orientation);
82          }
83          if (firstOrientation == null) {
84              // make a copy of orientation
85              firstOrientation = orientation.toQuaternion();
86              invFirstOrientation = firstOrientation.inverseRotationAndReturnNew();
87          }
88      }
89  
90      /**
91       * Updates scene scale and orientation using SLAM data.
92       *
93       * @param isInitialPairOfViews true if initial pair of views is being processed, false otherwise.
94       * @return true if scale was successfully updated, false otherwise.
95       */
96      @SuppressWarnings("DuplicatedCode")
97      protected boolean updateScaleAndOrientation(final boolean isInitialPairOfViews) {
98  
99          try {
100             final var metricCamera1 = previousMetricEstimatedCamera.getCamera();
101             final var metricCamera2 = currentMetricEstimatedCamera.getCamera();
102 
103             double slamPosX;
104             double slamPosY;
105             double slamPosZ;
106             double scale;
107             if (isInitialPairOfViews) {
108                 // obtain baseline (camera separation from slam estimator data
109                 slamPosX = slamEstimator.getStatePositionX();
110                 slamPosY = slamEstimator.getStatePositionY();
111                 slamPosZ = slamEstimator.getStatePositionZ();
112 
113                 slamPosition.setInhomogeneousCoordinates(slamPosX, slamPosY, slamPosZ);
114 
115                 metricCamera1.decompose(false, true);
116                 metricCamera2.decompose(false, true);
117 
118                 final var center1 = metricCamera1.getCameraCenter();
119                 final var center2 = metricCamera2.getCameraCenter();
120 
121                 final var baseline = center1.distanceTo(slamPosition);
122                 final var estimatedBaseline = center1.distanceTo(center2);
123 
124                 scale = currentScale = baseline / estimatedBaseline;
125             } else {
126                 scale = currentScale;
127             }
128 
129             // R1' = R1*Rdiff
130             // Rdiff = R1^T*R1'
131 
132             // where R1' is the desired orientation (obtained by sampling a
133             // sensor)
134             // and R1 is always the identity for the 1st camera.
135             // Hence R1' = Rdiff
136 
137             // t1' is the desired translation which is zero for the 1st
138             // camera.
139 
140             // We want: P1' = K*[R1' t1'] = K*[R1' 0]
141             // And we have P1 = K[I 0]
142 
143             // We need a transformation T so that:
144             // P1' = P1*T^-1 = K[I 0][R1' 0]
145             //                       [0   1]
146 
147             // Hence: T^-1 = [R1' 0]
148             //               [0   1]
149 
150             // or T = [R1'^T 0]
151             //        [0     1]
152 
153             // because we are also applying a transformation of scale s,
154             // the combination of both transformations is
155             // T = [s*R1'^T 0]
156             //     [0       1]
157 
158             final var scaleAndOrientationTransformation = new MetricTransformation3D(scale);
159             scaleAndOrientationTransformation.setRotation(invFirstOrientation);
160 
161             // update scale of cameras
162             final var euclideanCamera1 = scaleAndOrientationTransformation.transformAndReturnNew(metricCamera1);
163             final var euclideanCamera2 = scaleAndOrientationTransformation.transformAndReturnNew(metricCamera2);
164 
165             euclideanCamera2.decompose(false, true);
166             slamEstimator.correctWithPositionMeasure(euclideanCamera2.getCameraCenter(),
167                     configuration.getCameraPositionCovariance());
168 
169             if (!isInitialPairOfViews) {
170                 slamPosX = slamEstimator.getStatePositionX();
171                 slamPosY = slamEstimator.getStatePositionY();
172                 slamPosZ = slamEstimator.getStatePositionZ();
173                 slamPosition.setInhomogeneousCoordinates(slamPosX, slamPosY, slamPosZ);
174 
175                 // adjust scale of current camera
176                 final var euclideanCenter2 = euclideanCamera2.getCameraCenter();
177 
178                 final var euclideanPosX = euclideanCenter2.getInhomX();
179                 final var euclideanPosY = euclideanCenter2.getInhomY();
180                 final var euclideanPosZ = euclideanCenter2.getInhomZ();
181 
182                 final var scaleVariationX = euclideanPosX / slamPosX;
183                 final var scaleVariationY = euclideanPosY / slamPosY;
184                 final var scaleVariationZ = euclideanPosZ / slamPosZ;
185 
186                 final var scaleVariation = (scaleVariationX + scaleVariationY + scaleVariationZ) / 3.0;
187                 scale *= scaleVariation;
188                 currentScale = scale;
189                 scaleAndOrientationTransformation.setScale(currentScale);
190 
191                 // update camera
192                 scaleAndOrientationTransformation.transform(metricCamera2, euclideanCamera2);
193             }
194             final var sqrScale = scale * scale;
195 
196             previousEuclideanEstimatedCamera = new EstimatedCamera();
197             previousEuclideanEstimatedCamera.setCamera(euclideanCamera1);
198             previousEuclideanEstimatedCamera.setViewId(previousMetricEstimatedCamera.getViewId());
199             previousEuclideanEstimatedCamera.setQualityScore(previousMetricEstimatedCamera.getQualityScore());
200             if (previousMetricEstimatedCamera.getCovariance() != null) {
201                 previousEuclideanEstimatedCamera.setCovariance(previousMetricEstimatedCamera.getCovariance()
202                         .multiplyByScalarAndReturnNew(sqrScale));
203             }
204 
205             currentEuclideanEstimatedCamera = new EstimatedCamera();
206             currentEuclideanEstimatedCamera.setCamera(euclideanCamera2);
207             currentEuclideanEstimatedCamera.setViewId(currentMetricEstimatedCamera.getViewId());
208             currentEuclideanEstimatedCamera.setQualityScore(currentMetricEstimatedCamera.getQualityScore());
209             if (currentMetricEstimatedCamera.getCovariance() != null) {
210                 currentEuclideanEstimatedCamera.setCovariance(currentMetricEstimatedCamera.getCovariance()
211                         .multiplyByScalarAndReturnNew(sqrScale));
212             }
213 
214             // update scale of reconstructed points
215             final var numPoints = activeMetricReconstructedPoints.size();
216             final var metricReconstructedPoints3D = new ArrayList<Point3D>();
217             for (final var reconstructedPoint : activeMetricReconstructedPoints) {
218                 metricReconstructedPoints3D.add(reconstructedPoint.getPoint());
219             }
220 
221             final var euclideanReconstructedPoints3D = scaleAndOrientationTransformation.transformPointsAndReturnNew(
222                     metricReconstructedPoints3D);
223 
224             // set scaled points into result
225             activeEuclideanReconstructedPoints = new ArrayList<>();
226             ReconstructedPoint3D euclideanPoint;
227             ReconstructedPoint3D metricPoint;
228             for (var i = 0; i < numPoints; i++) {
229                 metricPoint = activeMetricReconstructedPoints.get(i);
230 
231                 euclideanPoint = new ReconstructedPoint3D();
232                 euclideanPoint.setId(metricPoint.getId());
233                 euclideanPoint.setPoint(euclideanReconstructedPoints3D.get(i));
234                 euclideanPoint.setInlier(metricPoint.isInlier());
235                 euclideanPoint.setQualityScore(metricPoint.getQualityScore());
236                 if (metricPoint.getCovariance() != null) {
237                     euclideanPoint.setCovariance(metricPoint.getCovariance().multiplyByScalarAndReturnNew(sqrScale));
238                 }
239                 euclideanPoint.setColorData(metricPoint.getColorData());
240 
241                 activeEuclideanReconstructedPoints.add(euclideanPoint);
242             }
243 
244             return true;
245 
246         } catch (final Exception e) {
247             failed = true;
248             //noinspection unchecked
249             listener.onFail((R) this);
250 
251             return false;
252         }
253     }
254 }