CPD Results
The following document contains the results of PMD's CPD 7.14.0.
Duplications
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/KruppaDualImageOfAbsoluteConicEstimator.java | 482 |
com/irurueta/ar/calibration/estimators/KruppaDualImageOfAbsoluteConicEstimator.java | 838 |
private void estimateUnknownAspectRatio(final DualImageOfAbsoluteConic result) throws KruppaDualImageOfAbsoluteConicEstimatorException { try { final var x0 = principalPointX; final var y0 = principalPointY; // SVD decompose fundamental matrix fundamentalMatrix.normalize(); final var decomposer = new SingularValueDecomposer(fundamentalMatrix.getInternalMatrix()); decomposer.decompose(); final var sigmas = decomposer.getSingularValues(); final var u = decomposer.getU(); final var v = decomposer.getV(); final var sigma1 = sigmas[0]; final var sigma2 = sigmas[1]; // Column u1 final var u11 = u.getElementAt(0, 0); final var u21 = u.getElementAt(1, 0); final var u31 = u.getElementAt(2, 0); // Column u2 final var u12 = u.getElementAt(0, 1); final var u22 = u.getElementAt(1, 1); final var u32 = u.getElementAt(2, 1); // Column v1 final var v11 = v.getElementAt(0, 0); final var v21 = v.getElementAt(1, 0); final var v31 = v.getElementAt(2, 0); // Column v2 final var v12 = v.getElementAt(0, 1); final var v22 = v.getElementAt(1, 1); final var v32 = v.getElementAt(2, 1); // build Kruppa equations final var polyA = u12 * u11; final var polyB = u22 * u21; final var polyC = Math.pow(x0, 2.0) * u12 * u11 + x0 * y0 * u22 * u11 + x0 * u32 * u11 + x0 * y0 * u12 * u21 + Math.pow(y0, 2.0) * u22 * u21 + y0 * u32 * u21 + x0 * u12 * u31 + y0 * u22 * u31 + u32 * u31; final var polyD = Math.pow(sigma2, 2.0) * v12 * v12; final var polyE = Math.pow(sigma2, 2.0) * v22 * v22; final var polyF = Math.pow(sigma2 * x0, 2.0) * v12 * v12 + Math.pow(sigma2, 2.0) * x0 * y0 * v22 * v12 + Math.pow(sigma2, 2.0) * x0 * v32 * v12 + Math.pow(sigma2, 2.0) * x0 * y0 * v12 * v22 + Math.pow(sigma2 * y0, 2.0) * v22 * v22 + Math.pow(sigma2, 2.0) * y0 * v32 * v22 + Math.pow(sigma2, 2.0) * x0 * v12 * v32 + Math.pow(sigma2, 2.0) * y0 * v22 * v32 + Math.pow(sigma2, 2.0) * v32 * v32; final var polyG = u11 * u11; final var polyH = u21 * u21; final var polyI = Math.pow(x0, 2.0) * u11 * u11 + x0 * y0 * u21 * u11 + x0 * u31 * u11 + x0 * y0 * u11 * u21 + Math.pow(y0, 2.0) * u21 * u21 + y0 * u31 * u21 + x0 * u11 * u31 + y0 * u21 * u31 + u31 * u31; final var polyJ = sigma1 * sigma2 * v12 * v11; final var polyK = sigma1 * sigma2 * v22 * v21; final var polyL = sigma1 * sigma2 * Math.pow(x0, 2.0) * v12 * v11 + sigma1 * sigma2 * x0 * y0 * v22 * v11 + sigma1 * sigma2 * x0 * v32 * v11 + sigma1 * sigma2 * x0 * y0 * v12 * v21 + sigma1 * sigma2 * Math.pow(y0, 2.0) * v22 * v21 + sigma1 * sigma2 * y0 * v32 * v21 + sigma1 * sigma2 * x0 * v12 * v31 + sigma1 * sigma2 * y0 * v22 * v31 + sigma1 * sigma2 * v32 * v31; final var polyM = Math.pow(sigma1, 2.0) * v11 * v11; final var polyN = Math.pow(sigma1, 2.0) * v21 * v21; final var polyO = Math.pow(sigma1 * x0, 2.0) * v11 * v11 + Math.pow(sigma1, 2.0) * x0 * y0 * v21 * v11 + Math.pow(sigma1, 2.0) * x0 * v31 * v11 + Math.pow(sigma1, 2.0) * x0 * y0 * v11 * v21 + Math.pow(sigma1 * y0, 2.0) * v21 * v21 + Math.pow(sigma1, 2.0) * y0 * v31 * v21 + Math.pow(sigma1, 2.0) * x0 * v11 * v31 + Math.pow(sigma1, 2.0) * y0 * v21 * v31 + Math.pow(sigma1, 2.0) * v31 * v31; final var polyP = u12 * u12; final var polyQ = u22 * u22; final var polyR = Math.pow(x0, 2.0) * u12 * u12 + x0 * y0 * u22 * u12 + x0 * u32 * u12 + x0 * y0 * u12 * u22 + Math.pow(y0, 2.0) * u22 * u22 + y0 * u32 * u22 + x0 * u12 * u32 + y0 * u22 * u32 + u32 * u32; final var tmp = (polyP * polyJ + polyA * polyM) / (polyG * polyM - polyP * polyD); |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructorConfiguration.java | 835 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructorConfiguration.java | 410 |
protected BaseSparseReconstructorConfiguration() { } /** * Gets method to use for non-robust fundamental matrix estimation. * This is only used when general scenes are allowed. * * @return method to use for non-robust fundamental matrix estimation. */ public FundamentalMatrixEstimatorMethod getNonRobustFundamentalMatrixEstimatorMethod() { return mNonRobustFundamentalMatrixEstimatorMethod; } /** * Sets method to use for non-robust fundamental matrix estimation. * This is only used when general scenes are allowed. * * @param method method to use for non-robust fundamental matrix estimation. * @return this instance so that method can be easily chained. */ public T setNonRobustFundamentalMatrixEstimatorMethod(final FundamentalMatrixEstimatorMethod method) { mNonRobustFundamentalMatrixEstimatorMethod = method; //noinspection unchecked return (T) this; } /** * Gets method to use for robust fundamental matrix estimation. * This is only used when general scenes are allowed. * * @return method to use for robust fundamental matrix estimation. */ public RobustEstimatorMethod getRobustFundamentalMatrixEstimatorMethod() { return mRobustFundamentalMatrixEstimatorMethod; } /** * Sets method to use for robust fundamental matrix estimation. * This is only used when general scenes are allowed. * * @param method method to use for robust fundamental matrix estimation. * @return this instance so that method can be easily chained. */ public T setRobustFundamentalMatrixEstimatorMethod(final RobustEstimatorMethod method) { mRobustFundamentalMatrixEstimatorMethod = method; //noinspection unchecked return (T) this; } /** * Indicates whether estimated fundamental matrix is refined among all found inliers. * This is only used when general scenes are allowed. * * @return true if fundamental matrix is refined, false otherwise. */ public boolean isFundamentalMatrixRefined() { return refineFundamentalMatrix; } /** * Specifies whether estimated fundamental matrix is refined among all found inliers. * This is only used when general scenes are allowed. * * @param refineFundamentalMatrix true if fundamental matrix is refined, false otherwise. * @return this instance so that method can be easily chained. */ public T setFundamentalMatrixRefined(final boolean refineFundamentalMatrix) { this.refineFundamentalMatrix = refineFundamentalMatrix; //noinspection unchecked return (T) this; } /** * Indicates whether covariance of estimated fundamental matrix is kept after the estimation. * This is only used when general scenes are allowed. * * @return true if covariance is kept, false otherwise. */ public boolean isFundamentalMatrixCovarianceKept() { return keepFundamentalMatrixCovariance; } /** * Specifies whether covariance of estimated fundamental matrix is kept after the * estimation. * This is only used when general scenes are allowed. * * @param keepFundamentalMatrixCovariance true if covariance is kept, false otherwise. * @return this instance so that method can be easily chained. */ public T setFundamentalMatrixCovarianceKept(final boolean keepFundamentalMatrixCovariance) { this.keepFundamentalMatrixCovariance = keepFundamentalMatrixCovariance; //noinspection unchecked return (T) this; } /** * Gets confidence of robustly estimated fundamental matrix. * This is only used when general scenes are allowed. * * @return confidence of robustly estimated fundamental matrix. */ public double getFundamentalMatrixConfidence() { return fundamentalMatrixConfidence; } /** * Sets confidence of robustly estimated fundamental matrix. * This is only used when general scenes are allowed. * * @param fundamentalMatrixConfidence confidence of robustly estimated fundamental matrix. * @return this instance so that method can be easily chained. */ public T setFundamentalMatrixConfidence(final double fundamentalMatrixConfidence) { this.fundamentalMatrixConfidence = fundamentalMatrixConfidence; //noinspection unchecked return (T) this; } /** * Gets maximum number of iterations to robustly estimate fundamental matrix. * This is only used when general scenes are allowed. * * @return maximum number of iterations to robustly estimate fundamental matrix. */ public int getFundamentalMatrixMaxIterations() { return fundamentalMatrixMaxIterations; } /** * Sets maximum number of iterations to robustly estimate fundamental matrix. * This is only used when general scenes are allowed. * * @param fundamentalMatrixMaxIterations maximum number of iterations to robustly estimate * fundamental matrix. * @return this instance so that method can be easily chained. */ public T setFundamentalMatrixMaxIterations(final int fundamentalMatrixMaxIterations) { this.fundamentalMatrixMaxIterations = fundamentalMatrixMaxIterations; //noinspection unchecked return (T) this; } /** * Gets threshold to determine whether samples for robust fundamental matrix estimation * are inliers or not. * This is only used when general scenes are allowed. * * @return threshold to determine whether samples for robust fundamental matrix * estimation are inliers or not. */ public double getFundamentalMatrixThreshold() { return fundamentalMatrixThreshold; } /** * Sets threshold to determine whether samples for robust fundamental matrix * estimation are inliers or not. * This is only used when general scenes are allowed. * * @param fundamentalMatrixThreshold threshold to determine whether samples for * robust fundamental matrix estimation are inliers * or not. * @return this instance so that method can be easily chained. */ public T setFundamentalMatrixThreshold(final double fundamentalMatrixThreshold) { this.fundamentalMatrixThreshold = fundamentalMatrixThreshold; //noinspection unchecked return (T) this; } /** * Indicates whether inliers must be kept during robust fundamental matrix estimation. * This is only used when general scenes are allowed. * * @return true if inliers must be kept during robust fundamental matrix estimation, * false otherwise. */ public boolean getFundamentalMatrixComputeAndKeepInliers() { return fundamentalMatrixComputeAndKeepInliers; } /** * Specifies whether inliers must be kept during robust fundamental matrix estimation. * This is only used when general scenes are allowed. * * @param fundamentalMatrixComputeAndKeepInliers true if inliers must be kept during * robust fundamental matrix estimation, false * otherwise. * @return this instance so that method can be easily chained. */ public T setFundamentalMatrixComputeAndKeepInliers(final boolean fundamentalMatrixComputeAndKeepInliers) { this.fundamentalMatrixComputeAndKeepInliers = fundamentalMatrixComputeAndKeepInliers; //noinspection unchecked return (T) this; } /** * Indicates whether residuals must be computed and kept during robust fundamental * matrix estimation. * This is only used when general scenes are allowed. * * @return true if residuals must be computed and kept, false otherwise. */ public boolean getFundamentalMatrixComputeAndKeepResiduals() { return fundamentalMatrixComputeAndKeepResiduals; } /** * Specifies whether residuals must be computed and kept during robust fundamental * matrix estimation. * This is only used when general scenes are allowed. * * @param fundamentalMatrixComputeAndKeepResiduals true if residuals must be * computed and kept, false otherwise. * @return this instance so that method can be easily chained. */ public T setFundamentalMatrixComputeAndKeepResiduals(final boolean fundamentalMatrixComputeAndKeepResiduals) { this.fundamentalMatrixComputeAndKeepResiduals = fundamentalMatrixComputeAndKeepResiduals; //noinspection unchecked return (T) this; } /** * Gets method to use for initial cameras' estimation. * * @return method to use for initial cameras' estimation. */ public InitialCamerasEstimatorMethod getInitialCamerasEstimatorMethod() { return initialCamerasEstimatorMethod; } /** * Sets method to use for initial cameras' estimation. * * @param method method to use for initial cameras' estimation. * @return this instance so that method can be easily chained. */ public T setInitialCamerasEstimatorMethod(final InitialCamerasEstimatorMethod method) { initialCamerasEstimatorMethod = method; //noinspection unchecked return (T) this; } /** * Indicates whether an homogeneous point triangulator is used for point triangulation * when Dual Absolute Quadric (DAQ) camera initialization is used. * * @return true if homogeneous point triangulator is used, false if an inhomogeneous * point triangulator is used instead. */ public boolean getDaqUseHomogeneousPointTriangulator() { return daqUseHomogeneousPointTriangulator; } /** * Specifies whether an homogeneous point triangulator is used for point * triangulation when Dual Absolute Quadric (DAQ) camera initialization is used. * * @param daqUseHomogeneousPointTriangulator true if homogeneous point triangulator * is used, false if inhomogeneous point * triangulator is used instead. * @return this instance so that method can be easily chained. */ public T setDaqUseHomogeneousPointTriangulator(final boolean daqUseHomogeneousPointTriangulator) { this.daqUseHomogeneousPointTriangulator = daqUseHomogeneousPointTriangulator; //noinspection unchecked return (T) this; } /** * Gets aspect ratio for initial cameras estimation using DAQ or DIAC methods. * * @return aspect ratio for initial cameras using DAQ or DIAC methods. */ public double getInitialCamerasAspectRatio() { return initialCamerasAspectRatio; } /** * Sets aspect ratio for initial cameras using DAQ or DIAC methods. * * @param initialCamerasAspectRatio aspect ratio for initial cameras using DAQ or DIAC * methods. * @return this instance so that method can be easily chained. */ public T setInitialCamerasAspectRatio(final double initialCamerasAspectRatio) { this.initialCamerasAspectRatio = initialCamerasAspectRatio; //noinspection unchecked return (T) this; } /** * Gets horizontal principal point value to use for initial cameras estimation * using DIAC or DAQ methods. * * @return horizontal principal point value to use for initial cameras estimation * using DIAC or DAQ methods. */ public double getPrincipalPointX() { return principalPointX; } /** * Sets horizontal principal point value to use for initial cameras estimation * using DIAC or DAQ methods. * * @param principalPointX horizontal principal point value to use for initial * cameras estimation using DIAC or DAQ methods. * @return this instance so that method can be easily chained. */ public T setPrincipalPointX(final double principalPointX) { this.principalPointX = principalPointX; //noinspection unchecked return (T) this; } /** * Gets vertical principal point value to use for initial cameras estimation * using DIAC or DAQ methods. * * @return vertical principal point value to use for initial cameras * estimation using DIAC or DAQ methods. */ public double getPrincipalPointY() { return principalPointY; } /** * Sets vertical principal point value to use for initial cameras estimation using * DIAC or DAQ methods. * * @param principalPointY vertical principal point value to use for initial cameras * estimation using DIAC or DAQ methods. * @return this instance so that method can be easily chained. */ public T setPrincipalPointY(final double principalPointY) { this.principalPointY = principalPointY; //noinspection unchecked return (T) this; } /** * Gets corrector type to use for point triangulation when initial cameras are being * estimated using either DIAC or essential matrix methods or null if no corrector is * used. * * @return corrector type to use for point triangulation when initial cameras are * being estimated using either DIAC or essential matrix methods or null if no * corrector is used. */ public CorrectorType getInitialCamerasCorrectorType() { return initialCamerasCorrectorType; } /** * Sets corrector type to use for point triangulation when initial cameras are being * estimated using either DIAC or essential matrix methods or null if no corrector * is used. * * @param type corrector type to use for point triangulation when initial cameras * are being estimated using either DIAC or essential matrix methods * or null if no corrector is used. * @return this instance so that method can be easily chained. */ public T setInitialCamerasCorrectorType(final CorrectorType type) { initialCamerasCorrectorType = type; //noinspection unchecked return (T) this; } /** * Gets value indicating whether valid triangulated points are marked during initial * cameras estimation using either DIAC or essential matrix methods. * * @return value indicating whether valid triangulated points are marked during * initial cameras estimation using either DIAC or essential matrix methods. */ public boolean getInitialCamerasMarkValidTriangulatedPoints() { return initialCamerasMarkValidTriangulatedPoints; } /** * Sets value indicating whether valid triangulated points are marked during initial * cameras estimation using either DIAC or essential matrix methods. * * @param initialCamerasMarkValidTriangulatedPoints value indicating whether valid * triangulated points are marked during * initial cameras estimation using * either DIAC or essential matrix * methods. * @return this instance so that method can be easily chained. */ public T setInitialCamerasMarkValidTriangulatedPoints(final boolean initialCamerasMarkValidTriangulatedPoints) { this.initialCamerasMarkValidTriangulatedPoints = initialCamerasMarkValidTriangulatedPoints; //noinspection unchecked return (T) this; } /** * Intrinsic parameters of first camera estimated using the essential matrix method. * * @return parameters of first camera estimated using the essential matrix method. */ public PinholeCameraIntrinsicParameters getInitialIntrinsic1() { return initialIntrinsic1; } /** * Sets intrinsic parameters of first camera estimated using the essential matrix * method. * * @param initialIntrinsic1 parameters of first camera estimated using the essential * matrix method. * @return this instance so that method can be easily chained. */ public T setInitialIntrinsic1(final PinholeCameraIntrinsicParameters initialIntrinsic1) { this.initialIntrinsic1 = initialIntrinsic1; //noinspection unchecked return (T) this; } /** * Intrinsic parameters of second camera estimated using the essential matrix method. * * @return parameters of second camera estimated using the essential matrix method. */ public PinholeCameraIntrinsicParameters getInitialIntrinsic2() { return initialIntrinsic2; } /** * Sets intrinsic parameters of second camera estimated using the essential matrix * method. * * @param initialIntrinsic2 parameters of second camera estimated using the essential * matrix method. * @return this instance so that method can be easily chained. */ public T setInitialIntrinsic2(final PinholeCameraIntrinsicParameters initialIntrinsic2) { this.initialIntrinsic2 = initialIntrinsic2; //noinspection unchecked return (T) this; } /** * Indicates whether a general scene (points laying in a general 3D position) is * allowed. * When true, an initial geometry estimation is attempted for general points. * * @return true if general scene is allowed, false otherwise. */ public boolean isGeneralSceneAllowed() { return allowGeneralScene; } /** * Specifies whether a general scene (points laying in a general 3D position) is * allowed. * When true, an initial geometry estimation is attempted for general points. * * @param allowGeneralScene true if general scene is allowed, false otherwise. * @return this instance so that method can be easily chained. */ public T setGeneralSceneAllowed(final boolean allowGeneralScene) { this.allowGeneralScene = allowGeneralScene; //noinspection unchecked return (T) this; } /** * Indicates whether a planar scene (points laying in a 3D plane) is allowed or not. * When true, an initial geometry estimation is attempted for planar points. * * @return true if planar scene is allowed, false otherwise. */ public boolean isPlanarSceneAllowed() { return allowPlanarScene; } /** * Specifies whether a planar scene (points laying in a 3D plane) is allowed or not. * When true, an initial geometry estimation is attempted for planar points. * * @param allowPlanarScene true if planar scene is allowed, false otherwise. * @return this instance so that method can be easily chained. */ public T setPlanarSceneAllowed(final boolean allowPlanarScene) { this.allowPlanarScene = allowPlanarScene; //noinspection unchecked return (T) this; } /** * Gets robust method to use for planar homography estimation. * This is only used when planar scenes are allowed. * * @return robust method to use for planar homography estimation. */ public RobustEstimatorMethod getRobustPlanarHomographyEstimatorMethod() { return robustPlanarHomographyEstimatorMethod; } /** * Sets robust method to use for planar homography estimation. * This is only used when planar scenes are allowed. * * @param robustPlanarHomographyEstimatorMethod robust method to use for planar * homography estimation. * @return this instance so that method can be easily chained. */ public T setRobustPlanarHomographyEstimatorMethod( final RobustEstimatorMethod robustPlanarHomographyEstimatorMethod) { this.robustPlanarHomographyEstimatorMethod = robustPlanarHomographyEstimatorMethod; //noinspection unchecked return (T) this; } /** * Indicates whether planar homography is refined using all found inliers or not. * This is only used when planar scenes are allowed. * * @return true if planar homography is refined, false otherwise. */ public boolean isPlanarHomographyRefined() { return refinePlanarHomography; } /** * Specifies whether planar homography is refined using all found inliers or not. * This is only used when planar scenes are allowed. * * @param refinePlanarHomography true if planar homography must be refined, false * otherwise. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyRefined(final boolean refinePlanarHomography) { this.refinePlanarHomography = refinePlanarHomography; //noinspection unchecked return (T) this; } /** * Indicates whether planar homography covariance is kept after estimation. * This is only used when planar scenes are allowed. * * @return true if planar homography covariance is kept, false otherwise. */ public boolean isPlanarHomographyCovarianceKept() { return keepPlanarHomographyCovariance; } /** * Specifies whether planar homography covariance is kept after estimation. * This is only used when planar scenes are allowed. * * @param keepPlanarHomographyCovariance true if planar homography covariance is * kept, false otherwise. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyCovarianceKept(final boolean keepPlanarHomographyCovariance) { this.keepPlanarHomographyCovariance = keepPlanarHomographyCovariance; //noinspection unchecked return (T) this; } /** * Gets confidence of robustly estimated planar homography. By default, this is 99%. * This is only used when planar scenes are allowed. * * @return confidence of robustly estimated planar homography. */ public double getPlanarHomographyConfidence() { return planarHomographyConfidence; } /** * Sets confidence of robustly estimated planar homography. By default, this is 99%. * This is only used when planar scenes are allowed. * * @param planarHomographyConfidence confidence of robustly estimated planar * homography. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyConfidence(final double planarHomographyConfidence) { this.planarHomographyConfidence = planarHomographyConfidence; //noinspection unchecked return (T) this; } /** * Gets maximum number of iterations to make while robustly estimating planar * homography. By default, this is 5000. * This is only used when planar scenes are allowed. * * @return maximum number of iterations to make while robustly estimating planar * homography. */ public int getPlanarHomographyMaxIterations() { return planarHomographyMaxIterations; } /** * Sets maximum number of iterations to make while robustly estimating planar * homography. By default, this is 5000. * This is only used when planar scenes are allowed. * * @param planarHomographyMaxIterations maximum number of iterations to make while * robustly estimating planar homography. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyMaxIterations(final int planarHomographyMaxIterations) { this.planarHomographyMaxIterations = planarHomographyMaxIterations; //noinspection unchecked return (T) this; } /** * Gets threshold to determine whether samples for robust projective 2D * transformation estimation are inliers or not. * This is only used when planar scenes are allowed. * * @return threshold to robustly estimate projective 2D transformation. */ public double getPlanarHomographyThreshold() { return planarHomographyThreshold; } /** * Sets threshold to determine whether samples for robust projective 2D * transformation estimation are inliers or not. * This is only used when planar scenes are allowed. * * @param planarHomographyThreshold threshold to robustly estimate projective 2D * transformation. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyThreshold(final double planarHomographyThreshold) { this.planarHomographyThreshold = planarHomographyThreshold; //noinspection unchecked return (T) this; } /** * Gets value indicating that inlier data is kept after robust planar homography * estimation. * This is only used when planar scenes are allowed. * * @return true if inlier data is kept, false otherwise. */ public boolean getPlanarHomographyComputeAndKeepInliers() { return planarHomographyComputeAndKeepInliers; } /** * Specifies whether inlier data is kept after robust planar homography estimation. * This is only used when planar scenes are allowed. * * @param planarHomographyComputeAndKeepInliers true if inlier data is kept, false * otherwise. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyComputeAndKeepInliers(final boolean planarHomographyComputeAndKeepInliers) { this.planarHomographyComputeAndKeepInliers = planarHomographyComputeAndKeepInliers; //noinspection unchecked return (T) this; } /** * Gets value indicating that residual data is kept after robust planar homography * estimation. * This is only used when planar scenes are allowed. * * @return true if residual data is kept, false otherwise. */ public boolean getPlanarHomographyComputeAndKeepResiduals() { return planarHomographyComputeAndKeepResiduals; } /** * Sets value indicating that residual data is kept after robust planar homography * estimation. * This is only used when planar scenes are allowed. * * @param planarHomographyComputeAndKeepResiduals true if residual data is kept, * false otherwise. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyComputeAndKeepResiduals(final boolean planarHomographyComputeAndKeepResiduals) { this.planarHomographyComputeAndKeepResiduals = planarHomographyComputeAndKeepResiduals; //noinspection unchecked return (T) this; } |
File | Line |
---|---|
com/irurueta/ar/slam/BaseSlamCalibrator.java | 379 |
com/irurueta/ar/slam/BaseSlamEstimator.java | 644 |
} /** * Indicates whether accumulation of samples is enabled or not. * * @return true if accumulation of samples is enabled, false otherwise. */ public boolean isAccumulationEnabled() { return accumulationEnabled; } /** * Specifies whether accumulation of samples is enabled or not. * * @param accumulationEnabled true if accumulation of samples is enabled, * false otherwise. */ public void setAccumulationEnabled(final boolean accumulationEnabled) { this.accumulationEnabled = accumulationEnabled; } /** * Gets timestamp expressed in nanoseconds since the epoch time of the last * accelerometer sample, or -1 if no sample has been set yet. * * @return timestamp expressed in nanoseconds since the epoch time of the * last accelerometer sample, or -1. */ public long getAccelerometerTimestampNanos() { return accelerometerTimestampNanos; } /** * Gets timestamp expressed in nanoseconds since the epoch time of the last * gyroscope sample, or -1 if no sample has been set yet. * * @return timestamp expressed in nanoseconds since the epoch time of the * last gyroscope sample, or -1. */ public long getGyroscopeTimestampNanos() { return gyroscopeTimestampNanos; } /** * Gets number of accelerometer samples accumulated since last full sample. * * @return number of accelerometer samples accumulated since last full * sample. */ public int getAccumulatedAccelerometerSamples() { return accumulatedAccelerometerSamples; } /** * Gets number of gyroscope samples accumulated since last full sample. * * @return number of gyroscope samples accumulated since last full sample. */ public int getAccumulatedGyroscopeSamples() { return accumulatedGyroscopeSamples; } /** * Indicates whether the accelerometer sample has been received since the * last full sample (accelerometer + gyroscope). * * @return true if accelerometer sample has been received, false otherwise. */ public boolean isAccelerometerSampleReceived() { return accumulatedAccelerometerSamples > 0; } /** * Indicates whether the gyroscope sample has been received since the last * full sample (accelerometer + gyroscope). * * @return true if gyroscope sample has been received, false otherwise. */ public boolean isGyroscopeSampleReceived() { return accumulatedGyroscopeSamples > 0; } /** * Indicates whether a full sample (accelerometer + gyroscope) has been * received or not. * * @return true if full sample has been received, false otherwise. */ public boolean isFullSampleAvailable() { return isAccelerometerSampleReceived() && isGyroscopeSampleReceived(); } /** * Gets average acceleration along x-axis accumulated since last full * sample. Expressed in meters per squared second (m/s^2). * * @return average acceleration along x-axis accumulated since last full * sample. */ public double getAccumulatedAccelerationSampleX() { return accumulatedAccelerationSampleX; } /** * Gets average acceleration along y-axis accumulated since last full * sample. Expressed in meters per squared second (m/s^2). * * @return average acceleration along y-axis accumulated since last full * sample. */ public double getAccumulatedAccelerationSampleY() { return accumulatedAccelerationSampleY; } /** * Gets average acceleration along z-axis accumulated since last full * sample. Expressed in meters per squared second (m/s^2). * * @return average acceleration along z-axis accumulated since last full * sample. */ public double getAccumulatedAccelerationSampleZ() { return accumulatedAccelerationSampleZ; } /** * Gets average acceleration along x,y,z axes accumulated since last full * sample. Expressed in meters per squared second (m/s^2). * * @return average acceleration along x,y,z axes expressed in meters per * squared second (m/s^2). */ public double[] getAccumulatedAccelerationSample() { return new double[]{ accumulatedAccelerationSampleX, accumulatedAccelerationSampleY, accumulatedAccelerationSampleZ }; } /** * Gets average acceleration along x,yz axes accumulated since last full * sample. Expressed in meters per squared second (m/s^2). * * @param result array where average acceleration along x,y,z axes will be * stored. * @throws IllegalArgumentException if provided array does not have length * 3. */ public void getAccumulatedAccelerationSample(final double[] result) { if (result.length != N_COMPONENTS_3D) { throw new IllegalArgumentException("result must have length 3"); } result[0] = accumulatedAccelerationSampleX; result[1] = accumulatedAccelerationSampleY; result[2] = accumulatedAccelerationSampleZ; } /** * Gets average angular speed along x-axis accumulated since last full * sample. Expressed in radians per second (rad/s). * * @return average angular speed along x-axis expressed in radians per * second (rad/s). */ public double getAccumulatedAngularSpeedSampleX() { return accumulatedAngularSpeedSampleX; } /** * Gets average angular speed along y-axis accumulated since last full * sample. Expressed in radians per second (rad/s). * * @return average angular speed along y-axis expressed in radians per * second (rad/s). */ public double getAccumulatedAngularSpeedSampleY() { return accumulatedAngularSpeedSampleY; } /** * Gets average angular speed along z-axis accumulated since last full * sample. Expressed in radians per second (rad/s). * * @return average angular speed along z-axis expressed in radians per * second (rad/s). */ public double getAccumulatedAngularSpeedSampleZ() { return accumulatedAngularSpeedSampleZ; } /** * Gets average angular speed along x,y,z axes accumulated since last full * sample. Expressed in radians per second (rad/s). * * @return average angular speed along x,y,z axes expressed in radians per * second. */ public double[] getAccumulatedAngularSpeedSample() { return new double[]{ accumulatedAngularSpeedSampleX, accumulatedAngularSpeedSampleY, accumulatedAngularSpeedSampleZ }; } /** * Gets average angular speed along x,y,z axes accumulated since last full * sample. Expressed in radians per second (rad/s). * * @param result array where average angular speed along x,y,z axes will be * stored. * @throws IllegalArgumentException if provided array does not have length * 3. */ public void getAccumulatedAngularSpeedSample(final double[] result) { if (result.length != N_COMPONENTS_3D) { throw new IllegalArgumentException("result must have length 3"); } result[0] = accumulatedAngularSpeedSampleX; result[1] = accumulatedAngularSpeedSampleY; result[2] = accumulatedAngularSpeedSampleZ; } /** * Provides a new accelerometer sample. * If accumulation is enabled, samples are averaged until a full sample is * received. * When a full sample (accelerometer + gyroscope) is received, internal * state gets also updated. * * @param timestamp timestamp of accelerometer sample since epoch time and * @param accelerationX linear acceleration along x-axis expressed in meters * per squared second (m/s^2). * @param accelerationY linear acceleration along y-axis expressed in meters * per squared second (m/s^2). * @param accelerationZ linear acceleration along z-axis expressed in meters * per squared second (m/s^2). */ public void updateAccelerometerSample( final long timestamp, final float accelerationX, final float accelerationY, final float accelerationZ) { if (!isFullSampleAvailable()) { accelerometerTimestampNanos = timestamp; if (isAccumulationEnabled() && isAccelerometerSampleReceived()) { // accumulation enabled final var nextSamples = accumulatedAccelerometerSamples + 1; accumulatedAccelerationSampleX = (accumulatedAccelerationSampleX * accumulatedAccelerometerSamples + accelerationX) / nextSamples; accumulatedAccelerationSampleY = (accumulatedAccelerationSampleY * accumulatedAccelerometerSamples + accelerationY) / nextSamples; accumulatedAccelerationSampleZ = (accumulatedAccelerationSampleZ * accumulatedAccelerometerSamples + accelerationZ) / nextSamples; accumulatedAccelerometerSamples = nextSamples; } else { // accumulation disabled accumulatedAccelerationSampleX = accelerationX; accumulatedAccelerationSampleY = accelerationY; accumulatedAccelerationSampleZ = accelerationZ; accumulatedAccelerometerSamples++; } notifyFullSampleAndResetSampleReceive(); } } /** * Provides a new accelerometer sample. * If accumulation is enabled, samples are averaged until a full sample is * received. * When a full sample (accelerometer + gyroscope) is received, internal * state gets also updated. * * @param timestamp timestamp of accelerometer sample since epoch time and * expressed in nanoseconds. * @param data array containing x,y,z components of linear acceleration * expressed in meters per squared second (m/s^2). * @throws IllegalArgumentException if provided array does not have length * 3. */ public void updateAccelerometerSample(final long timestamp, final float[] data) { if (data.length != N_COMPONENTS_3D) { throw new IllegalArgumentException("acceleration must have length 3"); } updateAccelerometerSample(timestamp, data[0], data[1], data[2]); } /** * Provides a new gyroscope sample. * If accumulation is enabled, samples are averaged until a full sample is * received. * When a full sample (accelerometer + gyroscope) is received, internal * state gets also updated. * * @param timestamp timestamp of accelerometer sample since epoch time and * expressed in nanoseconds. * @param angularSpeedX angular speed of rotation along x-axis expressed in * radians per second (rad/s). * @param angularSpeedY angular speed of rotation along y-axis expressed in * radians per second (rad/s). * @param angularSpeedZ angular speed of rotation along z-axis expressed in * radians per second (rad/s). */ public void updateGyroscopeSample( final long timestamp, final float angularSpeedX, final float angularSpeedY, final float angularSpeedZ) { if (!isFullSampleAvailable()) { gyroscopeTimestampNanos = timestamp; if (isAccumulationEnabled() && isGyroscopeSampleReceived()) { // accumulation enabled final var nextSamples = accumulatedGyroscopeSamples + 1; accumulatedAngularSpeedSampleX = (accumulatedAngularSpeedSampleX * accumulatedGyroscopeSamples + angularSpeedX) / nextSamples; accumulatedAngularSpeedSampleY = (accumulatedAngularSpeedSampleY * accumulatedGyroscopeSamples + angularSpeedY) / nextSamples; accumulatedAngularSpeedSampleZ = (accumulatedAngularSpeedSampleZ * accumulatedGyroscopeSamples + angularSpeedZ) / nextSamples; accumulatedGyroscopeSamples = nextSamples; } else { // accumulation disabled accumulatedAngularSpeedSampleX = angularSpeedX; accumulatedAngularSpeedSampleY = angularSpeedY; accumulatedAngularSpeedSampleZ = angularSpeedZ; accumulatedGyroscopeSamples++; } notifyFullSampleAndResetSampleReceive(); } } /** * Provides a new gyroscope sample. * If accumulation is enabled, samples are averaged until a full sample is * received. * When a full sample (accelerometer + gyroscope) is received, internal * state gets also updated. * * @param timestamp timestamp of gyroscope sample since epoch time and * expressed in nanoseconds. * @param data angular speed of rotation along x,y,z axes expressed in * radians per second (rad/s). * @throws IllegalArgumentException if provided array does not have length * 3. */ public void updateGyroscopeSample(final long timestamp, final float[] data) { if (data.length != N_COMPONENTS_3D) { throw new IllegalArgumentException("angular speed must have length 3"); } updateGyroscopeSample(timestamp, data[0], data[1], data[2]); } /** * Gets most recent timestamp of received partial samples (accelerometer or * gyroscope). * * @return most recent timestamp of received partial sample. */ public long getMostRecentTimestampNanos() { return Math.max(accelerometerTimestampNanos, gyroscopeTimestampNanos); } /** * Gets listener in charge of handling events raised by instances of this * class. * * @return listener in charge of handling events raised by instances of this * class. */ public BaseSlamCalibratorListener<D> getListener() { |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/EightPointsFundamentalMatrixEstimator.java | 192 |
com/irurueta/ar/epipolar/estimators/SevenPointsFundamentalMatrixEstimator.java | 238 |
if (listener != null) { listener.onEstimateStart(this); } final var nPoints = leftPoints.size(); try { ProjectiveTransformation2D leftNormalization = null; ProjectiveTransformation2D rightNormalization = null; final List<Point2D> leftPoints; final List<Point2D> rightPoints; if (normalizePoints) { // normalize points on left view final var normalizer = new Point2DNormalizer(this.leftPoints); normalizer.compute(); leftNormalization = normalizer.getTransformation(); // normalize points on right view normalizer.setPoints(this.rightPoints); normalizer.compute(); rightNormalization = normalizer.getTransformation(); // normalize to increase accuracy leftNormalization.normalize(); rightNormalization.normalize(); leftPoints = leftNormalization.transformPointsAndReturnNew(this.leftPoints); rightPoints = rightNormalization.transformPointsAndReturnNew(this.rightPoints); } else { leftPoints = this.leftPoints; rightPoints = this.rightPoints; } final Matrix a; if (isLMSESolutionAllowed()) { a = new Matrix(nPoints, 9); } else { a = new Matrix(MIN_REQUIRED_POINTS, 9); } Point2D leftPoint; Point2D rightPoint; double homLeftX; double homLeftY; double homLeftW; double homRightX; double homRightY; double homRightW; double value0; double value1; double value2; double value3; double value4; double value5; double value6; double value7; double value8; double rowNorm; for (var i = 0; i < nPoints; i++) { leftPoint = leftPoints.get(i); rightPoint = rightPoints.get(i); // normalize points to increase accuracy leftPoint.normalize(); rightPoint.normalize(); homLeftX = leftPoint.getHomX(); homLeftY = leftPoint.getHomY(); homLeftW = leftPoint.getHomW(); homRightX = rightPoint.getHomX(); homRightY = rightPoint.getHomY(); homRightW = rightPoint.getHomW(); // set a row values value0 = homLeftX * homRightX; value1 = homLeftY * homRightX; value2 = homLeftW * homRightX; value3 = homLeftX * homRightY; value4 = homLeftY * homRightY; value5 = homLeftW * homRightY; value6 = homLeftX * homRightW; value7 = homLeftY * homRightW; value8 = homLeftW * homRightW; // normalize row to increase accuracy rowNorm = Math.sqrt(Math.pow(value0, 2.0) + Math.pow(value1, 2.0) + Math.pow(value2, 2.0) + Math.pow(value3, 2.0) + Math.pow(value4, 2.0) + Math.pow(value5, 2.0) + Math.pow(value6, 2.0) + Math.pow(value7, 2.0) + Math.pow(value8, 2.0)); a.setElementAt(i, 0, value0 / rowNorm); a.setElementAt(i, 1, value1 / rowNorm); a.setElementAt(i, 2, value2 / rowNorm); a.setElementAt(i, 3, value3 / rowNorm); a.setElementAt(i, 4, value4 / rowNorm); a.setElementAt(i, 5, value5 / rowNorm); a.setElementAt(i, 6, value6 / rowNorm); a.setElementAt(i, 7, value7 / rowNorm); a.setElementAt(i, 8, value8 / rowNorm); if (!isLMSESolutionAllowed() && i == (MIN_REQUIRED_POINTS - 1)) { break; } } final var decomposer = new SingularValueDecomposer(a); decomposer.decompose(); // if nullity of provided a matrix is not of dimension 1 (number of // dimensions of null-space), then epipolar geometry is degenerate // because there is more than one possible solution (up to scale). // This is typically due to co-linearities or co-planarities on // projected 2D points. In this case we throw an exception if (decomposer.getNullity() > 1) { |
File | Line |
---|---|
com/irurueta/ar/sfm/LMSEHomogeneousSinglePoint3DTriangulator.java | 197 |
com/irurueta/ar/sfm/WeightedHomogeneousSinglePoint3DTriangulator.java | 347 |
camera.fixCameraSign(); final var homX = point.getHomX(); final var homY = point.getHomY(); final var homW = point.getHomW(); // pick rows of camera corresponding to different planes // (we do not normalize planes, as it would introduce errors) // 1st camera row (p1T) camera.verticalAxisPlane(verticalAxisPlane); // 2nd camera row (p2T) camera.horizontalAxisPlane(horizontalAxisPlane); // 3rd camera row (p3T) camera.principalPlane(principalPlane); // 1st equation a.setElementAt(row, 0, homX * principalPlane.getA() - homW * verticalAxisPlane.getA()); a.setElementAt(row, 1, homX * principalPlane.getB() - homW * verticalAxisPlane.getB()); a.setElementAt(row, 2, homX * principalPlane.getC() - homW * verticalAxisPlane.getC()); a.setElementAt(row, 3, homX * principalPlane.getD() - homW * verticalAxisPlane.getD()); // normalize row (equation) to increase accuracy rowNorm = Math.sqrt(Math.pow(a.getElementAt(row, 0), 2.0) + Math.pow(a.getElementAt(row, 1), 2.0) + Math.pow(a.getElementAt(row, 2), 2.0) + Math.pow(a.getElementAt(row, 3), 2.0)); a.setElementAt(row, 0, a.getElementAt(row, 0) / rowNorm); a.setElementAt(row, 1, a.getElementAt(row, 1) / rowNorm); a.setElementAt(row, 2, a.getElementAt(row, 2) / rowNorm); a.setElementAt(row, 3, a.getElementAt(row, 3) / rowNorm); // 2nd equation row++; a.setElementAt(row, 0, homY * principalPlane.getA() - homW * horizontalAxisPlane.getA()); a.setElementAt(row, 1, homY * principalPlane.getB() - homW * horizontalAxisPlane.getB()); a.setElementAt(row, 2, homY * principalPlane.getC() - homW * horizontalAxisPlane.getC()); a.setElementAt(row, 3, homY * principalPlane.getD() - homW * horizontalAxisPlane.getD()); // normalize row (equation) to increase accuracy rowNorm = Math.sqrt(Math.pow(a.getElementAt(row, 0), 2.0) + Math.pow(a.getElementAt(row, 1), 2.0) + Math.pow(a.getElementAt(row, 2), 2.0) + Math.pow(a.getElementAt(row, 3), 2.0)); a.setElementAt(row, 0, a.getElementAt(row, 0) / rowNorm); a.setElementAt(row, 1, a.getElementAt(row, 1) / rowNorm); a.setElementAt(row, 2, a.getElementAt(row, 2) / rowNorm); a.setElementAt(row, 3, a.getElementAt(row, 3) / rowNorm); |
File | Line |
---|---|
com/irurueta/ar/sfm/LMSEInhomogeneousSinglePoint3DTriangulator.java | 192 |
com/irurueta/ar/sfm/WeightedInhomogeneousSinglePoint3DTriangulator.java | 345 |
for (var i = 0; i < numViews; i++) { point = points2D.get(i); camera = cameras.get(i); // to increase accuracy point.normalize(); camera.normalize(); final var homX = point.getHomX(); final var homY = point.getHomY(); final var homW = point.getHomW(); // pick rows of camera corresponding to different planes // (we do not normalize planes, as it would introduce errors) // 1st camera row (p1T) camera.verticalAxisPlane(verticalAxisPlane); // 2nd camera row (p2T) camera.horizontalAxisPlane(horizontalAxisPlane); // 3rd camera row (p3T) camera.principalPlane(principalPlane); // 1st equation a.setElementAt(row, 0, homX * principalPlane.getA() - homW * verticalAxisPlane.getA()); a.setElementAt(row, 1, homX * principalPlane.getB() - homW * verticalAxisPlane.getB()); a.setElementAt(row, 2, homX * principalPlane.getC() - homW * verticalAxisPlane.getC()); b[row] = homW * verticalAxisPlane.getD() - homX * principalPlane.getD(); // normalize equation to increase accuracy rowNorm = Math.sqrt(Math.pow(a.getElementAt(row, 0), 2.0) + Math.pow(a.getElementAt(row, 1), 2.0) + Math.pow(a.getElementAt(row, 2), 2.0)); a.setElementAt(row, 0, a.getElementAt(row, 0) / rowNorm); a.setElementAt(row, 1, a.getElementAt(row, 1) / rowNorm); a.setElementAt(row, 2, a.getElementAt(row, 2) / rowNorm); b[row] /= rowNorm; // 2nd equation row++; a.setElementAt(row, 0, homY * principalPlane.getA() - homW * horizontalAxisPlane.getA()); a.setElementAt(row, 1, homY * principalPlane.getB() - homW * horizontalAxisPlane.getB()); a.setElementAt(row, 2, homY * principalPlane.getC() - homW * horizontalAxisPlane.getC()); b[row] = homW * horizontalAxisPlane.getD() - homY * principalPlane.getD(); // normalize equation to increase accuracy rowNorm = Math.sqrt(Math.pow(a.getElementAt(row, 0), 2.0) + Math.pow(a.getElementAt(row, 1), 2.0) + Math.pow(a.getElementAt(row, 2), 2.0)); a.setElementAt(row, 0, a.getElementAt(row, 0) / rowNorm); a.setElementAt(row, 1, a.getElementAt(row, 1) / rowNorm); a.setElementAt(row, 2, a.getElementAt(row, 2) / rowNorm); b[row] /= rowNorm; |
File | Line |
---|---|
com/irurueta/ar/calibration/AlternatingCameraCalibrator.java | 228 |
com/irurueta/ar/calibration/ErrorOptimizationCameraCalibrator.java | 247 |
} /** * Returns robust estimator method to be used for radial distortion * estimation. * * @return robust estimator method to be used for radial distortion * estimation. */ public RobustEstimatorMethod getDistortionMethod() { return distortionMethod; } /** * Sets robust estimator method to be used for radial distortion * estimation. * * @param distortionMethod robust estimator method to be used for * radial distortion estimation. * @throws LockedException if this instance is locked. */ public void setDistortionMethod(final RobustEstimatorMethod distortionMethod) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetDistortionMethod(distortionMethod); } /** * Returns radial distortion estimator, which can be retrieved in case * that some additional parameter needed to be adjusted. * It is discouraged to directly access the distortion estimator during * camera calibration, as it might interfere with the results. * * @return radial distortion estimator. */ public RadialDistortionRobustEstimator getDistortionEstimator() { return distortionEstimator; } /** * Returns threshold to robustly estimate radial distortion. * Usually the default value is good enough for most situations, but this * setting can be changed for finer adjustments. * * @return threshold to robustly estimate radial distortion. */ public double getDistortionEstimatorThreshold() { return switch (distortionEstimator.getMethod()) { case LMEDS -> ((LMedSRadialDistortionRobustEstimator) distortionEstimator).getStopThreshold(); case MSAC -> ((MSACRadialDistortionRobustEstimator) distortionEstimator).getThreshold(); case PROSAC -> ((PROSACRadialDistortionRobustEstimator) distortionEstimator).getThreshold(); case PROMEDS -> ((PROMedSRadialDistortionRobustEstimator) distortionEstimator).getStopThreshold(); default -> ((RANSACRadialDistortionRobustEstimator) distortionEstimator).getThreshold(); }; } /** * Sets threshold to robustly estimate radial distortion. * Usually the default value is good enough for most situations, but this * setting can be changed for finder adjustments. * * @param distortionEstimatorThreshold threshold to robustly estimate * radial distortion . * @throws LockedException if this instance is locked. * @throws IllegalArgumentException if provided value is zero or negative. */ public void setDistortionEstimatorThreshold(final double distortionEstimatorThreshold) throws LockedException { if (isLocked()) { throw new LockedException(); } switch (distortionEstimator.getMethod()) { case LMEDS: ((LMedSRadialDistortionRobustEstimator) distortionEstimator) .setStopThreshold(distortionEstimatorThreshold); break; case MSAC: ((MSACRadialDistortionRobustEstimator) distortionEstimator).setThreshold(distortionEstimatorThreshold); break; case PROSAC: ((PROSACRadialDistortionRobustEstimator) distortionEstimator) .setThreshold(distortionEstimatorThreshold); break; case PROMEDS: ((PROMedSRadialDistortionRobustEstimator) distortionEstimator) .setStopThreshold(distortionEstimatorThreshold); break; case RANSAC: default: ((RANSACRadialDistortionRobustEstimator) distortionEstimator) .setThreshold(distortionEstimatorThreshold); break; } } /** * Returns confidence to robustly estimate radial distortion. * Usually the default value is good enough for most situations, but this * setting can be changed for finer adjustments. * Confidence is expressed as a value between 0.0 (0%) and 1.0 (100%). The * amount of confidence indicates the probability that the estimated * homography is correct (i.e. no outliers were used for the estimation, * because they were successfully discarded). * Typically, this value will be close to 1.0, but not exactly 1.0, because * a 100% confidence would require an infinite number of iterations. * Usually the default value is good enough for most situations, but this * setting can be changed for finer adjustments. * * @return confidence to robustly estimate homographies. */ public double getDistortionEstimatorConfidence() { return distortionEstimator.getConfidence(); } /** * Sets confidence to robustly estimate radial distortion. * Usually the default value is good enough for most situations, but this * setting can be changed for finer adjustments. * Confidence is expressed as a value between 0.0 (0%) and 1.0 (100%). The * amount of confidence indicates the probability that the estimated * homography is correct (i.e. no outliers were used for the estimation, * because they were successfully discarded). * Typically, this value will be close to 1.0, but not exactly 1.0, because * a 100% confidence would require an infinite number of iterations. * Usually the default value is good enough for most situations, but this * setting can be changed for finer adjustments. * * @param distortionEstimatorConfidence confidence to robustly estimate * radial distortion. * @throws LockedException if this instance is locked. * @throws IllegalArgumentException if provided value is not between 0.0 and * 1.0. */ public void setDistortionEstimatorConfidence(final double distortionEstimatorConfidence) throws LockedException { if (isLocked()) { throw new LockedException(); } distortionEstimator.setConfidence(distortionEstimatorConfidence); } /** * Returns the maximum number of iterations to be done when estimating * the radial distortion. * If the maximum allowed number of iterations is reached, resulting * estimation might not have desired confidence. * Usually the default value is good enough for most situations, but this * setting can be changed for finer adjustments. * * @return maximum number of iterations to be done when estimating the * homographies. */ public int getDistortionEstimatorMaxIterations() { return distortionEstimator.getMaxIterations(); } /** * Sets the maximum number of iterations to be done when estimating the * radial distortion. * If the maximum allowed number of iterations is reached, resulting * estimation might not have desired confidence. * Usually the default value is good enough for most situations, but this * setting can be changed for finer adjustments. * * @param distortionEstimatorMaxIterations maximum number of iterations to * be done when estimating radial distortion. * @throws LockedException if this instance is locked. * @throws IllegalArgumentException if provided value is negative or zero. */ public void setDistortionEstimatorMaxIterations(final int distortionEstimatorMaxIterations) throws LockedException { if (isLocked()) { throw new LockedException(); } distortionEstimator.setMaxIterations(distortionEstimatorMaxIterations); } /** * Starts the calibration process. * Depending on the settings the following will be estimated: * intrinsic pinhole camera parameters, radial distortion of lens, * camera pose (rotation and translation) for each sample, and the * associated homobraphy of sampled points respect to the ideal pattern * samples. * * @throws CalibrationException if calibration fails for some reason. * @throws LockedException if this instance is locked because calibration is * already in progress. * @throws NotReadyException if this instance does not have enough data to * start camera calibration. */ @Override public void calibrate() throws CalibrationException, LockedException, NotReadyException { if (isLocked()) { throw new LockedException(); } if (!isReady()) { throw new NotReadyException(); } locked = true; homographyQualityScoresRequired = (distortionEstimator.getMethod() == RobustEstimatorMethod.PROSAC || distortionEstimator.getMethod() == RobustEstimatorMethod.PROMEDS); if (listener != null) { listener.onCalibrateStart(this); } reset(); radialDistortionProgress = iterProgress = previousNotifiedProgress = 0.0f; |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 234 |
com/irurueta/ar/slam/StatePredictor.java | 277 |
} if (jacobianU != null) { jacobianU.initialize(0.0); for (int i = 7, j = 0; i < STATE_COMPONENTS; i++, j++) { jacobianU.setElementAt(i, j, 1.0); } } } catch (final WrongSizeException ignore) { // never thrown } } /** * Updates the system model (position, orientation, linear velocity and * angular velocity) assuming a constant velocity model (without * acceleration). * * @param x initial system state containing: position-x, position-y, * position-z, quaternion-a, quaternion-b, quaternion-c, quaternion-d, * linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. Must have * length 13. * @param u linear and angular velocity perturbations or controls: * linear-velocity-change-x, linear-velocity-change-y, * linear-velocity-change-z, angular-velocity-change-x, * angular-velocity-change-y, angular-velocity-change-z. Must have length 6. * @param dt time interval to compute prediction expressed in seconds. * @param result instance where updated system model will be stored. Must * have length 13. * @throws IllegalArgumentException if system state array or control array * or result do not have proper size. * @see <a href="https://github.com/joansola/slamtb">constVel.m at https://github.com/joansola/slamtb</a> */ public static void predict(final double[] x, final double[] u, final double dt, final double[] result) { predict(x, u, dt, result, null, null); } /** * Updates the system model (position, orientation, linear velocity and * angular velocity) assuming a constant velocity model (without * acceleration). * * @param x initial system state containing: position-x, position-y, * position-z, quaternion-a, quaternion-b, quaternion-c, quaternion-d, * linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. Must have * length 13. * @param u linear and angular velocity perturbations or controls: * linear-velocity-change-x, linear-velocity-change-y, * linear-velocity-change-z, angular-velocity-change-x, * angular-velocity-change-y, angular-velocity-change-z. Must have length 6. * @param dt time interval to compute prediction expressed in seconds. * @param jacobianX jacobian wrt system state. Must be 13x13. * @param jacobianU jacobian wrt control. Must be 13x6. * @return instance where updated system model will be stored. * @throws IllegalArgumentException if system state array, control array or * jacobians do not have proper size. * @see <a href="https://github.com/joansola/slamtb">constVel.m at https://github.com/joansola/slamtb</a> */ public static double[] predict(final double[] x, final double[] u, final double dt, final Matrix jacobianX, final Matrix jacobianU) { final var result = new double[STATE_COMPONENTS]; predict(x, u, dt, result, jacobianX, jacobianU); return result; } /** * Updates the system model (position, orientation, linear velocity and * angular velocity) assuming a constant velocity model (without * acceleration). * * @param x initial system state containing: position-x, position-y, * position-z, quaternion-a, quaternion-b, quaternion-c, quaternion-d, * linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. Must have * length 13. * @param u linear and angular velocity perturbations or controls: * linear-velocity-change-x, linear-velocity-change-y, * linear-velocity-change-z, angular-velocity-change-x, * angular-velocity-change-y, angular-velocity-change-z. Must have length 6. * @param dt time interval to compute prediction expressed in seconds. * @return a new instance containing the updated system state. * @throws IllegalArgumentException if system state array, control array or * jacobians do not have proper size. * @see <a href="https://github.com/joansola/slamtb">constVel.m at https://github.com/joansola/slamtb</a> */ public static double[] predict(final double[] x, final double[] u, final double dt) { final var result = new double[STATE_COMPONENTS]; predict(x, u, dt, result); return result; } /** * Updates the system model (position, orientation, linear velocity and * angular velocity) assuming a constant velocity model (without * acceleration) when no velocity control signal is present. * * @param x initial system state containing: position-x, position-y, * position-z, quaternion-a, quaternion-b, quaternion-c, quaternion-d, * linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. Must have * length 13. * @param u linear ang angular velocity perturbations or controls and * position perturbations or controls: position-change-x, position-change-y, * position-change-z, linear-velocity-change-x, linear-velocity-change-y, * linear-velocity-change-z, angular-velocity-change-x, * angular-velocity-change-y, angular-velocity-change-z. Must have length 9. * @param dt time interval to compute prediction expressed in seconds. * @param result instance where updated system model will be stored. Must * have length 13. * @param jacobianX jacobian wrt system state. Must be 13x13. * @param jacobianU jacobian wrt control. Must be 13x9. * @throws IllegalArgumentException if system state array, control array, * result or jacobians do not have proper size. */ public static void predictWithPositionAdjustment( final double[] x, final double[] u, final double dt, final double[] result, final Matrix jacobianX, final Matrix jacobianU) { if (x.length != STATE_WITH_POSITION_ADJUSTMENT_COMPONENTS) { // x must have length 13 throw new IllegalArgumentException(); } if (u.length != CONTROL_WITH_POSITION_ADJUSTMENT_COMPONENTS) { // u must have length 9 throw new IllegalArgumentException(); } if (result.length != STATE_WITH_POSITION_ADJUSTMENT_COMPONENTS) { // result must have length 13 throw new IllegalArgumentException(); } if (jacobianX != null && (jacobianX.getRows() != STATE_WITH_POSITION_ADJUSTMENT_COMPONENTS || jacobianX.getColumns() != STATE_WITH_POSITION_ADJUSTMENT_COMPONENTS)) { // jacobian wrt x must be 13x13 throw new IllegalArgumentException(); } if (jacobianU != null && (jacobianU.getRows() != STATE_WITH_POSITION_ADJUSTMENT_COMPONENTS || jacobianU.getColumns() != CONTROL_WITH_POSITION_ADJUSTMENT_COMPONENTS)) { // jacobian wrt u must be 13x9 throw new IllegalArgumentException(); } // position final var r = new InhomogeneousPoint3D(x[0], x[1], x[2]); // orientation var q = new Quaternion(x[3], x[4], x[5], x[6]); // linear velocity var vx = x[7]; var vy = x[8]; var vz = x[9]; // angular velocity var wx = x[10]; |
File | Line |
---|---|
com/irurueta/ar/slam/StatePredictor.java | 780 |
com/irurueta/ar/slam/StatePredictor.java | 1082 |
PositionPredictor.predict(r, vx, vy, vz, ax, ay, az, dt, r, rr, rv, ra); // update orientation Matrix qq = null; Matrix qdq = null; Matrix qw = null; if (jacobianX != null) { qq = new Matrix(Quaternion.N_PARAMS, Quaternion.N_PARAMS); qw = new Matrix(Quaternion.N_PARAMS, ANGULAR_SPEED_COMPONENTS); } if (jacobianU != null) { qdq = new Matrix(Quaternion.N_PARAMS, Quaternion.N_PARAMS); } q = QuaternionPredictor.predictWithRotationAdjustment(q, dq, wx, wy, wz, dt, qq, qdq, qw); // set updated linear velocity vx = v[0]; vy = v[1]; vz = v[2]; // apply control signals vx += uvx; vy += uvy; vz += uvz; ax += uax; ay += uay; az += uaz; wx += uwx; wy += uwy; wz += uwz; // set new state result[0] = r.getInhomX(); result[1] = r.getInhomY(); result[2] = r.getInhomZ(); result[3] = q.getA(); result[4] = q.getB(); result[5] = q.getC(); result[6] = q.getD(); result[7] = vx; result[8] = vy; result[9] = vz; result[10] = ax; result[11] = ay; result[12] = az; result[13] = wx; result[14] = wy; result[15] = wz; // jacobians if (jacobianX != null) { // [Rr 0 Rv Ra 0 ] // [0 Qq 0 0 Qw ] // [0 0 Vv Va 0 ] // [0 0 0 eye 0 ] // [0 0 0 0 eye] jacobianX.initialize(0.0); jacobianX.setSubmatrix(0, 0, 2, 2, rr); jacobianX.setSubmatrix(3, 3, 6, 6, qq); jacobianX.setSubmatrix(0, 7, 2, 9, rv); jacobianX.setSubmatrix(7, 7, 9, 9, vv); jacobianX.setSubmatrix(0, 10, 2, 12, ra); jacobianX.setSubmatrix(7, 10, 9, 12, va); jacobianX.setSubmatrix(3, 13, 6, 15, qw); for (var i = 10; i < STATE_WITH_ROTATION_ADJUSTMENT_COMPONENTS; i++) { |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationBaseSlamCalibrator.java | 63 |
com/irurueta/ar/slam/AbsoluteOrientationBaseSlamEstimator.java | 64 |
accumulatedOrientation = new Quaternion(); } /** * Gets timestamp expressed in nanoseconds since the epoch time of the last * orientation sample, or -1 if no sample has been set yet. * * @return timestamp expressed in nanoseconds since the epoch time of the * last orientation sample, or -1. */ public long getOrientationTimestampNanos() { return orientationTimestampNanos; } /** * Gets average orientation accumulated since last full sample. * * @return orientation accumulated since last full sample. */ public Rotation3D getAccumulatedOrientation() { return accumulatedOrientation.toQuaternion(); } /** * Gets average orientation accumulated since last full sample. * * @param result instance where orientation accumulated since last full * sample will be stored. */ public void getAccumulatedOrientation(final Rotation3D result) { result.fromRotation(accumulatedOrientation); } /** * Gets number of orientation samples accumulated since last full sample. * * @return number of orientation samples accumulated since last full sample. */ public int getAccumulatedOrientationSamples() { return accumulatedOrientationSamples; } /** * Indicates whether the orientation sample has been received since last * full sample (accelerometer + gyroscope + orientation). * * @return true if orientation sample has been received, false otherwise. */ public boolean isOrientationSampleReceived() { return accumulatedOrientationSamples > 0; } /** * Indicates whether a full sample (accelerometer + gyroscope + * magnetic field) has been received or not. * * @return true if full sample has been received, false otherwise. */ @Override public boolean isFullSampleAvailable() { return super.isFullSampleAvailable() && isOrientationSampleReceived(); } /** * Provides a new orientation sample. * If accumulation is enabled, samples are averaged until a full sample is * received. * When a full sample (accelerometer + gyroscope + orientation) is * received, internal state gets also updated. * * @param timestamp timestamp of accelerometer sample since epoch time and * expressed in nanoseconds. * @param orientation new orientation. */ @SuppressWarnings("DuplicatedCode") public void updateOrientationSample(final long timestamp, final Rotation3D orientation) { if (!isFullSampleAvailable()) { orientationTimestampNanos = timestamp; if (isAccumulationEnabled() && isOrientationSampleReceived()) { // accumulation enabled final var nextSamples = accumulatedOrientationSamples + 1; var accumA = accumulatedOrientation.getA(); var accumB = accumulatedOrientation.getB(); var accumC = accumulatedOrientation.getC(); var accumD = accumulatedOrientation.getD(); if (tempQ == null) { tempQ = new Quaternion(); } tempQ.fromRotation(orientation); tempQ.normalize(); final var a = tempQ.getA(); final var b = tempQ.getB(); final var c = tempQ.getC(); final var d = tempQ.getD(); accumA = (accumA * accumulatedOrientationSamples + a) / nextSamples; accumB = (accumB * accumulatedOrientationSamples + b) / nextSamples; accumC = (accumC * accumulatedOrientationSamples + c) / nextSamples; accumD = (accumD * accumulatedOrientationSamples + d) / nextSamples; accumulatedOrientation.setA(accumA); accumulatedOrientation.setB(accumB); accumulatedOrientation.setC(accumC); accumulatedOrientation.setD(accumD); accumulatedOrientationSamples = nextSamples; } else { // accumulation disabled accumulatedOrientation.fromRotation(orientation); accumulatedOrientationSamples++; } notifyFullSampleAndResetSampleReceive(); } } /** * Gets most recent timestamp of received partial samples (accelerometer, * gyroscope or magnetic field). * * @return most recent timestamp of received partial sample. */ @Override public long getMostRecentTimestampNanos() { final var mostRecent = super.getMostRecentTimestampNanos(); return Math.max(mostRecent, orientationTimestampNanos); } /** * Notifies that a full sample has been received and resets flags indicating * whether partial samples have been received. */ @Override protected void notifyFullSampleAndResetSampleReceive() { if (isFullSampleAvailable()) { processFullSample(); accumulatedAccelerometerSamples = accumulatedGyroscopeSamples = accumulatedOrientationSamples = 0; } } } |
File | Line |
---|---|
com/irurueta/ar/slam/StatePredictor.java | 201 |
com/irurueta/ar/slam/StatePredictor.java | 488 |
PositionPredictor.predict(r, vx, vy, vz, ax, ay, az, dt, r, rr, rv, ra); // update orientation Matrix qq = null; Matrix qw = null; if (jacobianX != null) { qq = new Matrix(Quaternion.N_PARAMS, Quaternion.N_PARAMS); qw = new Matrix(Quaternion.N_PARAMS, ANGULAR_SPEED_COMPONENTS); } q = QuaternionPredictor.predict(q, wx, wy, wz, dt, true, qq, qw); // set updated linear velocity vx = v[0]; vy = v[1]; vz = v[2]; // apply control signals vx += uvx; vy += uvy; vz += uvz; ax += uax; ay += uay; az += uaz; wx += uwx; wy += uwy; wz += uwz; // set new state result[0] = r.getInhomX(); result[1] = r.getInhomY(); result[2] = r.getInhomZ(); result[3] = q.getA(); result[4] = q.getB(); result[5] = q.getC(); result[6] = q.getD(); result[7] = vx; result[8] = vy; result[9] = vz; result[10] = ax; result[11] = ay; result[12] = az; result[13] = wx; result[14] = wy; result[15] = wz; // jacobians if (jacobianX != null) { // [Rr 0 Rv Ra 0 ] // [0 Qq 0 0 Qw ] // [0 0 Vv Va 0 ] // [0 0 0 eye 0 ] // [0 0 0 0 eye] jacobianX.initialize(0.0); jacobianX.setSubmatrix(0, 0, 2, 2, rr); jacobianX.setSubmatrix(3, 3, 6, 6, qq); jacobianX.setSubmatrix(0, 7, 2, 9, rv); jacobianX.setSubmatrix(7, 7, 9, 9, vv); jacobianX.setSubmatrix(0, 10, 2, 12, ra); jacobianX.setSubmatrix(7, 10, 9, 12, va); jacobianX.setSubmatrix(3, 13, 6, 15, qw); for (var i = 10; i < STATE_COMPONENTS; i++) { |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/AffineFundamentalMatrixEstimator.java | 84 |
com/irurueta/ar/epipolar/estimators/EightPointsFundamentalMatrixEstimator.java | 86 |
public AffineFundamentalMatrixEstimator(final List<Point2D> leftPoints, final List<Point2D> rightPoints) { super(leftPoints, rightPoints); allowLMSESolution = DEFAULT_ALLOW_LMSE_SOLUTION; normalizePoints = DEFAULT_NORMALIZE_POINT_CORRESPONDENCES; } /** * Returns boolean indicating whether an LMSE (the Least Mean Square Error) * solution is allowed or not. When an LMSE solution is allowed, more than 8 * matched points can be used for fundamental matrix estimation. If LMSE * solution is not allowed then only the 4 former matched points will be * taken into account. * * @return true if an LMSE solution is allowed, false otherwise. */ public boolean isLMSESolutionAllowed() { return allowLMSESolution; } /** * Sets boolean indicating whether an LMSE (the Least Mean Square Error) * solution is allowed or not. When an LMSE solution is allowed, more than 8 * matched points can be used for fundamental matrix estimation. If LMSE * solution is not allowed then only the 4 former matched points will be * taken into account. * * @param allowed true if an LMSE solution is allowed, false otherwise. * @throws LockedException if this instance is locked because an estimation * is in progress. */ public void setLMSESolutionAllowed(final boolean allowed) throws LockedException { if (isLocked()) { throw new LockedException(); } allowLMSESolution = allowed; } /** * Indicates whether provided matched 2D points must be normalized to * increase the accuracy of the estimation. * * @return true if points must be normalized, false otherwise. */ public boolean arePointsNormalized() { return normalizePoints; } /** * Sets boolean indicating whether provided matched 2D points must be * normalized to increase the accuracy of the estimation. * * @param normalizePoints true if points must be normalized, false * otherwise. * @throws LockedException if this instance is locked because an estimation * is in progress. */ public void setPointsNormalized(final boolean normalizePoints) throws LockedException { if (isLocked()) { throw new LockedException(); } this.normalizePoints = normalizePoints; } /** * Returns boolean indicating whether estimator is ready to start the * fundamental matrix estimation. * This is true when the required minimum number of matched points is * provided to obtain a solution and both left and right views have the * same number of matched points. * * @return true if estimator is ready to start the fundamental matrix * estimation, false otherwise. */ @Override public boolean isReady() { return leftPoints != null && rightPoints != null && leftPoints.size() == rightPoints.size() && leftPoints.size() >= MIN_REQUIRED_POINTS; } /** * Estimates a fundamental matrix using provided lists of matched points on * left and right views. * * @return a fundamental matrix. * @throws LockedException if estimator is locked doing an estimation. * @throws NotReadyException if estimator is not ready because required * input points have not already been provided. * @throws FundamentalMatrixEstimatorException if configuration of provided * 2D points is degenerate and fundamental matrix * estimation fails. */ @SuppressWarnings("DuplicatedCode") @Override public FundamentalMatrix estimate() throws LockedException, NotReadyException, FundamentalMatrixEstimatorException { if (isLocked()) { throw new LockedException(); } if (!isReady()) { throw new NotReadyException(); } locked = true; if (listener != null) { listener.onEstimateStart(this); } final var nPoints = leftPoints.size(); try { ProjectiveTransformation2D leftNormalization = null; ProjectiveTransformation2D rightNormalization = null; final List<Point2D> leftPoints; final List<Point2D> rightPoints; if (normalizePoints) { // normalize points on left view final var normalizer = new Point2DNormalizer(this.leftPoints); normalizer.compute(); leftNormalization = normalizer.getTransformation(); // normalize points on right view normalizer.setPoints(this.rightPoints); normalizer.compute(); rightNormalization = normalizer.getTransformation(); // normalize to increase accuracy leftNormalization.normalize(); rightNormalization.normalize(); leftPoints = leftNormalization.transformPointsAndReturnNew(this.leftPoints); rightPoints = rightNormalization.transformPointsAndReturnNew(this.rightPoints); } else { leftPoints = this.leftPoints; rightPoints = this.rightPoints; } final Matrix a; if (isLMSESolutionAllowed()) { a = new Matrix(nPoints, 5); |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationConstantVelocityModelSlamEstimator.java | 164 |
com/irurueta/ar/slam/AbsoluteOrientationSlamEstimator.java | 180 |
com/irurueta/ar/slam/ConstantVelocityModelSlamEstimator.java | 141 |
com/irurueta/ar/slam/SlamEstimator.java | 160 |
public AbsoluteOrientationConstantVelocityModelSlamEstimator() { super(); x = new double[STATE_LENGTH]; // initial value of quaternion. x[3] = 1.0; u = new double[CONTROL_LENGTH]; try { jacobianPredictionX = new Matrix(STATE_LENGTH, STATE_LENGTH); jacobianPredictionU = new Matrix(STATE_LENGTH, CONTROL_LENGTH); control = new Matrix(CONTROL_LENGTH, 1); measurement = new Matrix(MEASUREMENT_LENGTH, 1); measurementMatrix = Matrix.identity(MEASUREMENT_LENGTH, STATE_LENGTH); } catch (final WrongSizeException ignore) { // never thrown } try { kalmanFilter = new KalmanFilter(STATE_LENGTH, MEASUREMENT_LENGTH, CONTROL_LENGTH); // setup matrix relating position measures with internal status. kalmanFilter.setMeasurementMatrix(measurementMatrix); } catch (final SignalProcessingException ignore) { // never thrown } try { // set initial Kalman filter state (state pre and pro must be two // different instances!) kalmanFilter.getStatePre().fromArray(x); kalmanFilter.getStatePost().fromArray(x); } catch (final WrongSizeException ignore) { // never thrown } } /** * Gets covariance matrix of state variables (position, velocity, acceleration, orientation and * angular speed). * Diagonal elements of matrix returned by this method are in the following order: * position-x, position-y, position-z, quaternion-a, quaternion-b, quaternion-c, * quaternion-d, linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. * Off-diagonal elements correspond to cross-correlation values of diagonal ones. * * @return covariance matrix of state variables. */ @Override public Matrix getStateCovariance() { return kalmanFilter.getStatePre(); } /** * Updates covariance matrix of position measures. * If null is provided, covariance matrix is not updated. * * @param positionCovariance new position covariance determining position * accuracy or null if last available covariance does not need to be * updated. * @throws IllegalArgumentException if provided covariance matrix is not * 3x3. */ @Override public void setPositionCovarianceMatrix(final Matrix positionCovariance) { if (positionCovariance != null) { kalmanFilter.setMeasurementNoiseCov(positionCovariance); } } /** * Gets current covariance matrix of position measures determining current * accuracy of provided position measures. * * @return covariance matrix of position measures. */ @Override public Matrix getPositionCovarianceMatrix() { return kalmanFilter.getMeasurementNoiseCov(); } /** * Corrects system state with provided position measure using current * position accuracy. * * @param positionX new position along x-axis expressed in meters (m). * @param positionY new position along y-axis expressed in meters (m). * @param positionZ new position along z-axis expressed in meters (m). */ @Override public void correctWithPositionMeasure(final double positionX, final double positionY, final double positionZ) { if (!predictionAvailable) { return; } if (listener != null) { listener.onCorrectWithPositionMeasure(this); } try { measurement.setElementAtIndex(0, positionX); measurement.setElementAtIndex(1, positionY); measurement.setElementAtIndex(2, positionZ); updateCorrectedState(kalmanFilter.correct(measurement)); // copy corrected state to predicted state kalmanFilter.getStatePost().copyTo(kalmanFilter.getStatePre()); kalmanFilter.getErrorCovPost().copyTo(kalmanFilter.getErrorCovPre()); } catch (final SignalProcessingException e) { error = true; } if (listener != null) { listener.onCorrectedWithPositionMeasure(this); } } /** * Creates an instance of a calibrator to be used with this SLAM estimator. * * @return a calibrator. */ public static AbsoluteOrientationConstantVelocityModelSlamCalibrator createCalibrator() { |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1427 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1589 |
final double principalPointY; if (isInitialPairOfViews) { if (configuration.getInitialCamerasEstimatorMethod() == InitialCamerasEstimatorMethod.DUAL_ABSOLUTE_QUADRIC || configuration.getInitialCamerasEstimatorMethod() == InitialCamerasEstimatorMethod.DUAL_ABSOLUTE_QUADRIC_AND_ESSENTIAL_MATRIX) { principalPointX = configuration.getPrincipalPointX(); principalPointY = configuration.getPrincipalPointY(); } else { principalPointX = principalPointY = 0.0; } } else { if (configuration.getUseDIACForAdditionalCamerasIntrinsics() || configuration.getUseDAQForAdditionalCamerasIntrinsics()) { principalPointX = configuration.getAdditionalCamerasHorizontalPrincipalPoint(); principalPointY = configuration.getAdditionalCamerasVerticalPrincipalPoint(); } else { principalPointX = principalPointY = 0.0; } } var i = 0; for (final var match : matches) { final var samples = match.getSamples(); if (samples.length < MIN_NUMBER_OF_VIEWS) { return false; } final var viewIds = match.getViewIds(); final var pos1 = getPositionForViewId(viewIds, viewId1); if (pos1 < 0) { return false; } final var pos2 = getPositionForViewId(viewIds, viewId2); if (pos2 < 0) { return false; } final var leftSample = samples[pos1]; final var rightSample = samples[pos2]; final var p1 = leftSample.getPoint(); final var p2 = rightSample.getPoint(); leftSamples.add(leftSample); rightSamples.add(rightSample); final var leftPoint = Point2D.create(); leftPoint.setInhomogeneousCoordinates(p1.getInhomX() - principalPointX, p1.getInhomY() - principalPointY); leftPoints.add(leftPoint); final var rightPoint = Point2D.create(); rightPoint.setInhomogeneousCoordinates(p2.getInhomX() - principalPointX, p2.getInhomY() - principalPointY); rightPoints.add(rightPoint); qualityScores[i] = match.getQualityScore(); i++; } try { final var estimator = FundamentalMatrixRobustEstimator.create(leftPoints, rightPoints, qualityScores, |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructorConfiguration.java | 841 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructorConfiguration.java | 869 |
return (T) this; } /** * Indicates whether a general scene (points laying in a general 3D position) is * allowed. * When true, an initial geometry estimation is attempted for general points. * * @return true if general scene is allowed, false otherwise. */ public boolean isGeneralSceneAllowed() { return allowGeneralScene; } /** * Specifies whether a general scene (points laying in a general 3D position) is * allowed. * When true, an initial geometry estimation is attempted for general points. * * @param allowGeneralScene true if general scene is allowed, false otherwise. * @return this instance so that method can be easily chained. */ public T setGeneralSceneAllowed(final boolean allowGeneralScene) { this.allowGeneralScene = allowGeneralScene; //noinspection unchecked return (T) this; } /** * Indicates whether a planar scene (points laying in a 3D plane) is allowed or * not. * When true, an initial geometry estimation is attempted for planar points. * * @return true if planar scene is allowed, false otherwise. */ public boolean isPlanarSceneAllowed() { return allowPlanarScene; } /** * specifies whether a planar scene (points laying in a 3D plane) is allowed or * not. * When true, an initial geometry estimation is attempted for planar points. * * @param allowPlanarScene true if planar scene is allowed, false otherwise. * @return this instance so that method can be easily chained. */ public T setPlanarSceneAllowed(final boolean allowPlanarScene) { this.allowPlanarScene = allowPlanarScene; //noinspection unchecked return (T) this; } /** * Gets robust method to use for planar homography estimation. * This is only used when planar scenes are allowed. * * @return robust method to use for planar homography estimation. */ public RobustEstimatorMethod getRobustPlanarHomographyEstimatorMethod() { return robustPlanarHomographyEstimatorMethod; } /** * Sets robust method to use for planar homography estimation. * This is only used when planar scenes are allowed. * * @param robustPlanarHomographyEstimatorMethod robust method to use for planar * homography estimation. * @return this instance so that method can be easily chained. */ public T setRobustPlanarHomographyEstimatorMethod( final RobustEstimatorMethod robustPlanarHomographyEstimatorMethod) { this.robustPlanarHomographyEstimatorMethod = robustPlanarHomographyEstimatorMethod; //noinspection unchecked return (T) this; } /** * Indicates whether planar homography is refined using all found inliers or not. * This is only used when planar scenes are allowed. * * @return true if planar homography is refined, false otherwise. */ public boolean isPlanarHomographyRefined() { return refinePlanarHomography; } /** * Specifies whether planar homography is refined using all found inliers or not. * This is only used when planar scenes are allowed. * * @param refinePlanarHomography true if planar homography must be refined, false * otherwise. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyRefined(final boolean refinePlanarHomography) { this.refinePlanarHomography = refinePlanarHomography; //noinspection unchecked return (T) this; } /** * Indicates whether planar homography covariance is kept after estimation. * This is only used when planar scenes are allowed. * * @return true if planar homography covariance is kept, false otherwise. */ public boolean isPlanarHomographyCovarianceKept() { return keepPlanarHomographyCovariance; } /** * Specifies whether planar homography covariance is kept after estimation. * This is only used when planar scenes are allowed. * * @param keepPlanarHomographyCovariance true if planar homography covariance is * kept, false otherwise. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyCovarianceKept(final boolean keepPlanarHomographyCovariance) { this.keepPlanarHomographyCovariance = keepPlanarHomographyCovariance; //noinspection unchecked return (T) this; } /** * Gets confidence of robustly estimated planar homography. By default, this is 99%. * This is only used when planar scenes are allowed. * * @return confidence of robustly estimated planar homography. */ public double getPlanarHomographyConfidence() { return planarHomographyConfidence; } /** * Sets confidence of robustly estimated planar homography. By default, this is 99%. * This is only used when planar scenes are allowed. * * @param planarHomographyConfidence confidence of robustly estimated planar * homography. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyConfidence(final double planarHomographyConfidence) { this.planarHomographyConfidence = planarHomographyConfidence; //noinspection unchecked return (T) this; } /** * Gets maximum number of iterations to make while robustly estimating planar * homography. By default, this is 5000. * This is only used when planar scenes are allowed. * * @return maximum number of iterations to make while robustly estimating planar * homography. */ public int getPlanarHomographyMaxIterations() { return planarHomographyMaxIterations; } /** * Sets maximum number of iterations to make while robustly estimating planar * homography. By default, this is 5000. * This is only used when planar scenes are allowed. * * @param planarHomographyMaxIterations maximum number of iterations to make while * robustly estimating planar homography. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyMaxIterations(final int planarHomographyMaxIterations) { this.planarHomographyMaxIterations = planarHomographyMaxIterations; //noinspection unchecked return (T) this; } /** * Gets threshold to determine whether samples for robust projective 2D * transformation estimation are inliers or not. * This is only used when planar scenes are allowed. * * @return threshold to robustly estimate projective 2D transformation. */ public double getPlanarHomographyThreshold() { return planarHomographyThreshold; } /** * Sets threshold to determine whether samples for robust projective 2D * transformation estimation are inliers or not. * This is only used when planar scenes are allowed. * * @param planarHomographyThreshold threshold to robustly estimate projective 2D * transformation. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyThreshold(final double planarHomographyThreshold) { this.planarHomographyThreshold = planarHomographyThreshold; //noinspection unchecked return (T) this; } /** * Gets value indicating that inlier data is kept after robust planar homography * estimation. * This is only used when planar scenes are allowed. * * @return true if inlier data is kept, false otherwise. */ public boolean getPlanarHomographyComputeAndKeepInliers() { return planarHomographyComputeAndKeepInliers; } /** * Specifies whether inlier data is kept after robust planar homography estimation. * This is only used when planar scenes are allowed. * * @param planarHomographyComputeAndKeepInliers true if inlier data is kept, false * otherwise. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyComputeAndKeepInliers(final boolean planarHomographyComputeAndKeepInliers) { this.planarHomographyComputeAndKeepInliers = planarHomographyComputeAndKeepInliers; //noinspection unchecked return (T) this; } /** * Gets value indicating that residual data is kept after robust planar homography * estimation. * This is only used when planar scenes are allowed. * * @return true if residual data is kept, false otherwise. */ public boolean getPlanarHomographyComputeAndKeepResiduals() { return planarHomographyComputeAndKeepResiduals; } /** * Sets value indicating that residual data is kept after robust planar homography * estimation. * This is only used when planar scenes are allowed. * * @param planarHomographyComputeAndKeepResiduals true if residual data is kept, false * otherwise. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyComputeAndKeepResiduals(final boolean planarHomographyComputeAndKeepResiduals) { this.planarHomographyComputeAndKeepResiduals = planarHomographyComputeAndKeepResiduals; //noinspection unchecked return (T) this; } } |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructorConfiguration.java | 841 |
com/irurueta/ar/sfm/BaseSparseReconstructorConfiguration.java | 1277 |
return (T) this; } /** * Indicates whether a general scene (points laying in a general 3D position) is * allowed. * When true, an initial geometry estimation is attempted for general points. * * @return true if general scene is allowed, false otherwise. */ public boolean isGeneralSceneAllowed() { return allowGeneralScene; } /** * Specifies whether a general scene (points laying in a general 3D position) is * allowed. * When true, an initial geometry estimation is attempted for general points. * * @param allowGeneralScene true if general scene is allowed, false otherwise. * @return this instance so that method can be easily chained. */ public T setGeneralSceneAllowed(final boolean allowGeneralScene) { this.allowGeneralScene = allowGeneralScene; //noinspection unchecked return (T) this; } /** * Indicates whether a planar scene (points laying in a 3D plane) is allowed or * not. * When true, an initial geometry estimation is attempted for planar points. * * @return true if planar scene is allowed, false otherwise. */ public boolean isPlanarSceneAllowed() { return allowPlanarScene; } /** * specifies whether a planar scene (points laying in a 3D plane) is allowed or * not. * When true, an initial geometry estimation is attempted for planar points. * * @param allowPlanarScene true if planar scene is allowed, false otherwise. * @return this instance so that method can be easily chained. */ public T setPlanarSceneAllowed(final boolean allowPlanarScene) { this.allowPlanarScene = allowPlanarScene; //noinspection unchecked return (T) this; } /** * Gets robust method to use for planar homography estimation. * This is only used when planar scenes are allowed. * * @return robust method to use for planar homography estimation. */ public RobustEstimatorMethod getRobustPlanarHomographyEstimatorMethod() { return robustPlanarHomographyEstimatorMethod; } /** * Sets robust method to use for planar homography estimation. * This is only used when planar scenes are allowed. * * @param robustPlanarHomographyEstimatorMethod robust method to use for planar * homography estimation. * @return this instance so that method can be easily chained. */ public T setRobustPlanarHomographyEstimatorMethod( final RobustEstimatorMethod robustPlanarHomographyEstimatorMethod) { this.robustPlanarHomographyEstimatorMethod = robustPlanarHomographyEstimatorMethod; //noinspection unchecked return (T) this; } /** * Indicates whether planar homography is refined using all found inliers or not. * This is only used when planar scenes are allowed. * * @return true if planar homography is refined, false otherwise. */ public boolean isPlanarHomographyRefined() { return refinePlanarHomography; } /** * Specifies whether planar homography is refined using all found inliers or not. * This is only used when planar scenes are allowed. * * @param refinePlanarHomography true if planar homography must be refined, false * otherwise. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyRefined(final boolean refinePlanarHomography) { this.refinePlanarHomography = refinePlanarHomography; //noinspection unchecked return (T) this; } /** * Indicates whether planar homography covariance is kept after estimation. * This is only used when planar scenes are allowed. * * @return true if planar homography covariance is kept, false otherwise. */ public boolean isPlanarHomographyCovarianceKept() { return keepPlanarHomographyCovariance; } /** * Specifies whether planar homography covariance is kept after estimation. * This is only used when planar scenes are allowed. * * @param keepPlanarHomographyCovariance true if planar homography covariance is * kept, false otherwise. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyCovarianceKept(final boolean keepPlanarHomographyCovariance) { this.keepPlanarHomographyCovariance = keepPlanarHomographyCovariance; //noinspection unchecked return (T) this; } /** * Gets confidence of robustly estimated planar homography. By default, this is 99%. * This is only used when planar scenes are allowed. * * @return confidence of robustly estimated planar homography. */ public double getPlanarHomographyConfidence() { return planarHomographyConfidence; } /** * Sets confidence of robustly estimated planar homography. By default, this is 99%. * This is only used when planar scenes are allowed. * * @param planarHomographyConfidence confidence of robustly estimated planar * homography. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyConfidence(final double planarHomographyConfidence) { this.planarHomographyConfidence = planarHomographyConfidence; //noinspection unchecked return (T) this; } /** * Gets maximum number of iterations to make while robustly estimating planar * homography. By default, this is 5000. * This is only used when planar scenes are allowed. * * @return maximum number of iterations to make while robustly estimating planar * homography. */ public int getPlanarHomographyMaxIterations() { return planarHomographyMaxIterations; } /** * Sets maximum number of iterations to make while robustly estimating planar * homography. By default, this is 5000. * This is only used when planar scenes are allowed. * * @param planarHomographyMaxIterations maximum number of iterations to make while * robustly estimating planar homography. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyMaxIterations(final int planarHomographyMaxIterations) { this.planarHomographyMaxIterations = planarHomographyMaxIterations; //noinspection unchecked return (T) this; } /** * Gets threshold to determine whether samples for robust projective 2D * transformation estimation are inliers or not. * This is only used when planar scenes are allowed. * * @return threshold to robustly estimate projective 2D transformation. */ public double getPlanarHomographyThreshold() { return planarHomographyThreshold; } /** * Sets threshold to determine whether samples for robust projective 2D * transformation estimation are inliers or not. * This is only used when planar scenes are allowed. * * @param planarHomographyThreshold threshold to robustly estimate projective 2D * transformation. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyThreshold(final double planarHomographyThreshold) { this.planarHomographyThreshold = planarHomographyThreshold; //noinspection unchecked return (T) this; } /** * Gets value indicating that inlier data is kept after robust planar homography * estimation. * This is only used when planar scenes are allowed. * * @return true if inlier data is kept, false otherwise. */ public boolean getPlanarHomographyComputeAndKeepInliers() { return planarHomographyComputeAndKeepInliers; } /** * Specifies whether inlier data is kept after robust planar homography estimation. * This is only used when planar scenes are allowed. * * @param planarHomographyComputeAndKeepInliers true if inlier data is kept, false * otherwise. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyComputeAndKeepInliers(final boolean planarHomographyComputeAndKeepInliers) { this.planarHomographyComputeAndKeepInliers = planarHomographyComputeAndKeepInliers; //noinspection unchecked return (T) this; } /** * Gets value indicating that residual data is kept after robust planar homography * estimation. * This is only used when planar scenes are allowed. * * @return true if residual data is kept, false otherwise. */ public boolean getPlanarHomographyComputeAndKeepResiduals() { return planarHomographyComputeAndKeepResiduals; } /** * Sets value indicating that residual data is kept after robust planar homography * estimation. * This is only used when planar scenes are allowed. * * @param planarHomographyComputeAndKeepResiduals true if residual data is kept, false * otherwise. * @return this instance so that method can be easily chained. */ public T setPlanarHomographyComputeAndKeepResiduals(final boolean planarHomographyComputeAndKeepResiduals) { this.planarHomographyComputeAndKeepResiduals = planarHomographyComputeAndKeepResiduals; //noinspection unchecked return (T) this; } |
File | Line |
---|---|
com/irurueta/ar/slam/StatePredictor.java | 210 |
com/irurueta/ar/slam/StatePredictor.java | 793 |
q = QuaternionPredictor.predict(q, wx, wy, wz, dt, true, qq, qw); // set updated linear velocity vx = v[0]; vy = v[1]; vz = v[2]; // apply control signals vx += uvx; vy += uvy; vz += uvz; ax += uax; ay += uay; az += uaz; wx += uwx; wy += uwy; wz += uwz; // set new state result[0] = r.getInhomX(); result[1] = r.getInhomY(); result[2] = r.getInhomZ(); result[3] = q.getA(); result[4] = q.getB(); result[5] = q.getC(); result[6] = q.getD(); result[7] = vx; result[8] = vy; result[9] = vz; result[10] = ax; result[11] = ay; result[12] = az; result[13] = wx; result[14] = wy; result[15] = wz; // jacobians if (jacobianX != null) { // [Rr 0 Rv Ra 0 ] // [0 Qq 0 0 Qw ] // [0 0 Vv Va 0 ] // [0 0 0 eye 0 ] // [0 0 0 0 eye] jacobianX.initialize(0.0); jacobianX.setSubmatrix(0, 0, 2, 2, rr); jacobianX.setSubmatrix(3, 3, 6, 6, qq); jacobianX.setSubmatrix(0, 7, 2, 9, rv); jacobianX.setSubmatrix(7, 7, 9, 9, vv); jacobianX.setSubmatrix(0, 10, 2, 12, ra); jacobianX.setSubmatrix(7, 10, 9, 12, va); jacobianX.setSubmatrix(3, 13, 6, 15, qw); for (var i = 10; i < STATE_COMPONENTS; i++) { |
File | Line |
---|---|
com/irurueta/ar/slam/StatePredictor.java | 497 |
com/irurueta/ar/slam/StatePredictor.java | 793 |
q = QuaternionPredictor.predict(q, wx, wy, wz, dt, true, qq, qw); // set updated linear velocity vx = v[0]; vy = v[1]; vz = v[2]; // apply control signals vx += uvx; vy += uvy; vz += uvz; ax += uax; ay += uay; az += uaz; wx += uwx; wy += uwy; wz += uwz; // set new state result[0] = r.getInhomX(); result[1] = r.getInhomY(); result[2] = r.getInhomZ(); result[3] = q.getA(); result[4] = q.getB(); result[5] = q.getC(); result[6] = q.getD(); result[7] = vx; result[8] = vy; result[9] = vz; result[10] = ax; result[11] = ay; result[12] = az; result[13] = wx; result[14] = wy; result[15] = wz; // jacobians if (jacobianX != null) { // [Rr 0 Rv Ra 0 ] // [0 Qq 0 0 Qw ] // [0 0 Vv Va 0 ] // [0 0 0 eye 0 ] // [0 0 0 0 eye] jacobianX.initialize(0.0); jacobianX.setSubmatrix(0, 0, 2, 2, rr); jacobianX.setSubmatrix(3, 3, 6, 6, qq); jacobianX.setSubmatrix(0, 7, 2, 9, rv); jacobianX.setSubmatrix(7, 7, 9, 9, vv); jacobianX.setSubmatrix(0, 10, 2, 12, ra); jacobianX.setSubmatrix(7, 10, 9, 12, va); jacobianX.setSubmatrix(3, 13, 6, 15, qw); for (var i = 10; i < STATE_WITH_POSITION_ADJUSTMENT_COMPONENTS; i++) { |
File | Line |
---|---|
com/irurueta/ar/slam/StatePredictor.java | 210 |
com/irurueta/ar/slam/StatePredictor.java | 1095 |
q = QuaternionPredictor.predict(q, wx, wy, wz, dt, true, qq, qw); // set updated linear velocity vx = v[0]; vy = v[1]; vz = v[2]; // apply control signals vx += uvx; vy += uvy; vz += uvz; ax += uax; ay += uay; az += uaz; wx += uwx; wy += uwy; wz += uwz; // set new state result[0] = r.getInhomX(); result[1] = r.getInhomY(); result[2] = r.getInhomZ(); result[3] = q.getA(); result[4] = q.getB(); result[5] = q.getC(); result[6] = q.getD(); result[7] = vx; result[8] = vy; result[9] = vz; result[10] = ax; result[11] = ay; result[12] = az; result[13] = wx; result[14] = wy; result[15] = wz; // jacobians if (jacobianX != null) { // [Rr 0 Rv Ra 0 ] // [0 Qq 0 0 Qw ] // [0 0 Vv Va 0 ] // [0 0 0 eye 0 ] // [0 0 0 0 eye] jacobianX.initialize(0.0); jacobianX.setSubmatrix(0, 0, 2, 2, rr); jacobianX.setSubmatrix(3, 3, 6, 6, qq); jacobianX.setSubmatrix(0, 7, 2, 9, rv); jacobianX.setSubmatrix(7, 7, 9, 9, vv); jacobianX.setSubmatrix(0, 10, 2, 12, ra); jacobianX.setSubmatrix(7, 10, 9, 12, va); jacobianX.setSubmatrix(3, 13, 6, 15, qw); for (var i = 10; i < STATE_COMPONENTS; i++) { |
File | Line |
---|---|
com/irurueta/ar/slam/StatePredictor.java | 497 |
com/irurueta/ar/slam/StatePredictor.java | 1095 |
q = QuaternionPredictor.predict(q, wx, wy, wz, dt, true, qq, qw); // set updated linear velocity vx = v[0]; vy = v[1]; vz = v[2]; // apply control signals vx += uvx; vy += uvy; vz += uvz; ax += uax; ay += uay; az += uaz; wx += uwx; wy += uwy; wz += uwz; // set new state result[0] = r.getInhomX(); result[1] = r.getInhomY(); result[2] = r.getInhomZ(); result[3] = q.getA(); result[4] = q.getB(); result[5] = q.getC(); result[6] = q.getD(); result[7] = vx; result[8] = vy; result[9] = vz; result[10] = ax; result[11] = ay; result[12] = az; result[13] = wx; result[14] = wy; result[15] = wz; // jacobians if (jacobianX != null) { // [Rr 0 Rv Ra 0 ] // [0 Qq 0 0 Qw ] // [0 0 Vv Va 0 ] // [0 0 0 eye 0 ] // [0 0 0 0 eye] jacobianX.initialize(0.0); jacobianX.setSubmatrix(0, 0, 2, 2, rr); jacobianX.setSubmatrix(3, 3, 6, 6, qq); jacobianX.setSubmatrix(0, 7, 2, 9, rv); jacobianX.setSubmatrix(7, 7, 9, 9, vv); jacobianX.setSubmatrix(0, 10, 2, 12, ra); jacobianX.setSubmatrix(7, 10, 9, 12, va); jacobianX.setSubmatrix(3, 13, 6, 15, qw); for (var i = 10; i < STATE_WITH_POSITION_ADJUSTMENT_COMPONENTS; i++) { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/MSACRadialDistortionRobustEstimator.java | 211 |
com/irurueta/ar/calibration/estimators/PROSACRadialDistortionRobustEstimator.java | 373 |
final var innerEstimator = new MSACRobustEstimator<RadialDistortion>(new MSACRobustEstimatorListener<>() { // point to be reused when computing residuals private final Point2D testPoint = Point2D.create(CoordinatesType.INHOMOGENEOUS_COORDINATES); // non-robust radial distortion estimator private final LMSERadialDistortionEstimator radialDistortionEstimator = new LMSERadialDistortionEstimator(); // subset of distorted (i.e. measured) points private final List<Point2D> subsetDistorted = new ArrayList<>(); // subset of undistorted (i.e. ideal) points private final List<Point2D> subsetUndistorted = new ArrayList<>(); @Override public double getThreshold() { return threshold; } @Override public int getTotalSamples() { return distortedPoints.size(); } @Override public int getSubsetSize() { return RadialDistortionRobustEstimator.MIN_NUMBER_OF_POINTS; } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<RadialDistortion> solutions) { subsetDistorted.clear(); subsetDistorted.add(distortedPoints.get(samplesIndices[0])); subsetDistorted.add(distortedPoints.get(samplesIndices[1])); subsetUndistorted.clear(); subsetUndistorted.add(undistortedPoints.get(samplesIndices[0])); subsetUndistorted.add(undistortedPoints.get(samplesIndices[1])); try { radialDistortionEstimator.setPoints(distortedPoints, undistortedPoints); radialDistortionEstimator.setPoints(subsetDistorted, subsetUndistorted); final var distortion = radialDistortionEstimator.estimate(); solutions.add(distortion); } catch (final Exception e) { // if anything fails, no solution is added } } @Override public double computeResidual(final RadialDistortion currentEstimation, final int i) { final var distortedPoint = distortedPoints.get(i); final var undistortedPoint = undistortedPoints.get(i); currentEstimation.distort(undistortedPoint, testPoint); return testPoint.distanceTo(distortedPoint); } @Override public boolean isReady() { return MSACRadialDistortionRobustEstimator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/sfm/MSACRobustSinglePoint3DTriangulator.java | 176 |
com/irurueta/ar/sfm/PROSACRobustSinglePoint3DTriangulator.java | 292 |
com/irurueta/ar/sfm/RANSACRobustSinglePoint3DTriangulator.java | 176 |
final var innerEstimator = new MSACRobustEstimator<Point3D>(new MSACRobustEstimatorListener<>() { // point to be reused when computing residuals private final Point2D testPoint = Point2D.create(CoordinatesType.HOMOGENEOUS_COORDINATES); // non-robust 3D point triangulator private final SinglePoint3DTriangulator triangulator = SinglePoint3DTriangulator.create( useHomogeneousSolution ? Point3DTriangulatorType.LMSE_HOMOGENEOUS_TRIANGULATOR : Point3DTriangulatorType.LMSE_INHOMOGENEOUS_TRIANGULATOR); // subset of 2D points private final List<Point2D> subsetPoints = new ArrayList<>(); // subst of cameras private final List<PinholeCamera> subsetCameras = new ArrayList<>(); @Override public double getThreshold() { return threshold; } @Override public int getTotalSamples() { return points2D.size(); } @Override public int getSubsetSize() { return MIN_REQUIRED_VIEWS; } @Override public void estimatePreliminarSolutions(final int[] samplesIndices, final List<Point3D> solutions) { subsetPoints.clear(); subsetPoints.add(points2D.get(samplesIndices[0])); subsetPoints.add(points2D.get(samplesIndices[1])); subsetCameras.clear(); subsetCameras.add(cameras.get(samplesIndices[0])); subsetCameras.add(cameras.get(samplesIndices[1])); try { triangulator.setPointsAndCameras(subsetPoints, subsetCameras); final var triangulated = triangulator.triangulate(); solutions.add(triangulated); } catch (final Exception e) { // if anything fails, no solution is added } } @Override public double computeResidual(final Point3D currentEstimation, final int i) { final var point2D = points2D.get(i); final var camera = cameras.get(i); // project estimated point with camera camera.project(currentEstimation, testPoint); // return distance of projected point respect to the original one // as a residual return testPoint.distanceTo(point2D); } @Override public boolean isReady() { return MSACRobustSinglePoint3DTriangulator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 247 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 389 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); a.setElementAt(counter, 1, h11 * h22 + h21 * h12); a.setElementAt(counter, 2, h21 * h22); a.setElementAt(counter, 3, h11 * h32 + h31 * h12); a.setElementAt(counter, 4, h21 * h32 + h31 * h22); a.setElementAt(counter, 5, h31 * h32); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0) + Math.pow(a.getElementAt(counter, 4), 2.0) + Math.pow(a.getElementAt(counter, 5), 2.0)); |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 477 |
com/irurueta/ar/slam/StatePredictor.java | 564 |
} if (jacobianU != null) { jacobianU.initialize(0.0); // variation of position for (var i = 0; i < Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH; i++) { jacobianU.setElementAt(i, i, 1.0); } // variation of linear and angular speed for (int i = 7, j = Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH; i < STATE_WITH_POSITION_ADJUSTMENT_COMPONENTS; i++, j++) { jacobianU.setElementAt(i, j, 1.0); } } } catch (final WrongSizeException ignore) { // never thrown } } /** * Updates the system model (position, orientation, linear velocity and * angular velocity) assuming a constant velocity model (without * acceleration) when no velocity control signal is present. * * @param x initial system state containing: position-x, position-y, * position-z, quaternion-a, quaternion-b, quaternion-c, quaternion-d, * linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. Must have * length 13. * @param u linear ang angular velocity perturbations or controls and * position perturbations or controls: position-change-x, position-change-y, * position-change-z, linear-velocity-change-x, linear-velocity-change-y, * linear-velocity-change-z, angular-velocity-change-x, * angular-velocity-change-y, angular-velocity-change-z. Must have length 9. * @param dt time interval to compute prediction expressed in seconds. * @param result instance where updated system model will be stored. Must * have length 13. * @throws IllegalArgumentException if system state array, control array * or result array do not have proper size. */ public static void predictWithPositionAdjustment( final double[] x, final double[] u, final double dt, final double[] result) { predictWithPositionAdjustment(x, u, dt, result, null, null); } /** * Updates the system model (position, orientation, linear velocity and * angular velocity) assuming a constant velocity model (without * acceleration) when no velocity control signal is present. * * @param x initial system state containing: position-x, position-y, * position-z, quaternion-a, quaternion-b, quaternion-c, quaternion-d, * linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. Must have * length 13. * @param u linear ang angular velocity perturbations or controls and * position perturbations or controls: position-change-x, position-change-y, * position-change-z, linear-velocity-change-x, linear-velocity-change-y, * linear-velocity-change-z, angular-velocity-change-x, * angular-velocity-change-y, angular-velocity-change-z. Must have length 9. * @param dt time interval to compute prediction expressed in seconds. * @param jacobianX jacobian wrt system state. Must be 13x13. * @param jacobianU jacobian wrt control. Must be 13x9. * @return a new instance containing updated system model. * @throws IllegalArgumentException if system state array, control array * or jacobians do not have proper size. */ public static double[] predictWithPositionAdjustment( final double[] x, final double[] u, final double dt, final Matrix jacobianX, final Matrix jacobianU) { final var result = new double[STATE_WITH_POSITION_ADJUSTMENT_COMPONENTS]; predictWithPositionAdjustment(x, u, dt, result, jacobianX, jacobianU); return result; } /** * Updates the system model (position, orientation, linear velocity and * angular velocity) assuming a constant velocity model (without * acceleration) when no velocity control signal is present. * * @param x initial system state containing: position-x, position-y, * position-z, quaternion-a, quaternion-b, quaternion-c, quaternion-d, * linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. Must have * length 13. * @param u linear ang angular velocity perturbations or controls and * position perturbations or controls: position-change-x, position-change-y, * position-change-z, linear-velocity-change-x, linear-velocity-change-y, * linear-velocity-change-z, angular-velocity-change-x, * angular-velocity-change-y, angular-velocity-change-z. Must have length 9. * @param dt time interval to compute prediction expressed in seconds. * @return a new instance containing updated system model. * @throws IllegalArgumentException if system state or control array do not * have proper size. */ public static double[] predictWithPositionAdjustment(final double[] x, final double[] u, final double dt) { final var result = new double[STATE_WITH_POSITION_ADJUSTMENT_COMPONENTS]; predictWithPositionAdjustment(x, u, dt, result); return result; } /** * Updates the system model (position, orientation, linear velocity and * angular velocity) assuming a constant velocity model (without * acceleration) when no velocity control signal is present. * * @param x initial system state containing: position-x, position-y, * position-z, quaternion-a, quaternion-b, quaternion-c, quaternion-d, * linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. Must have * length 13. * @param u linear and angular velocity perturbations or controls, and * rotation perturbations or controls: quaternion-change-a, * quaternion-change-b, quaternion-change-c, quaternion-change-d, * linear-velocity-change-x, linear-velocity-change-y, * linear-velocity-change-z, angular-velocity-change-x, * angular-velocity-change-y, angular-velocity-change-z. Must have length * 10. * @param dt time interval to compute prediction expressed in seconds. * @param result instance where updated system model will be stored. Must * have length 13. * @param jacobianX jacobian wrt system state. Must be 13x13. * @param jacobianU jacobian wrt control. Must be 13x10. * @throws IllegalArgumentException if system state array, control array, * result or jacobians do not have proper size. */ public static void predictWithRotationAdjustment( final double[] x, final double[] u, final double dt, final double[] result, final Matrix jacobianX, final Matrix jacobianU) { if (x.length != STATE_WITH_ROTATION_ADJUSTMENT_COMPONENTS) { throw new IllegalArgumentException("x must have length 13"); |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 757 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 500 |
if (samples.length != MIN_NUMBER_OF_VIEWS) { return false; } leftSamples.add(samples[0]); rightSamples.add(samples[1]); final var leftPoint = Point2D.create(); leftPoint.setInhomogeneousCoordinates( samples[0].getPoint().getInhomX() - principalPointX, samples[0].getPoint().getInhomY() - principalPointY); leftPoints.add(leftPoint); final var rightPoint = Point2D.create(); rightPoint.setInhomogeneousCoordinates( samples[1].getPoint().getInhomX() - principalPointX, samples[1].getPoint().getInhomY() - principalPointY); rightPoints.add(rightPoint); qualityScores[i] = match.getQualityScore(); i++; } try { final var estimator = FundamentalMatrixRobustEstimator.create(leftPoints, rightPoints, qualityScores, configuration.getRobustFundamentalMatrixEstimatorMethod()); estimator.setNonRobustFundamentalMatrixEstimatorMethod( configuration.getNonRobustFundamentalMatrixEstimatorMethod()); estimator.setResultRefined(configuration.isFundamentalMatrixRefined()); estimator.setCovarianceKept(configuration.isFundamentalMatrixCovarianceKept()); estimator.setConfidence(configuration.getFundamentalMatrixConfidence()); estimator.setMaxIterations(configuration.getFundamentalMatrixMaxIterations()); switch (configuration.getRobustFundamentalMatrixEstimatorMethod()) { case LMEDS: ((LMedSFundamentalMatrixRobustEstimator) estimator).setStopThreshold( configuration.getFundamentalMatrixThreshold()); break; case MSAC: ((MSACFundamentalMatrixRobustEstimator) estimator).setThreshold( configuration.getFundamentalMatrixThreshold()); break; case PROMEDS: ((PROMedSFundamentalMatrixRobustEstimator) estimator).setStopThreshold( configuration.getFundamentalMatrixThreshold()); break; case PROSAC: |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 666 |
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 924 |
PositionPredictor.predict(r, vx, vy, vz, dt, r, rr, rv, null); // update orientation Matrix qq = null; Matrix qdq = null; Matrix qw = null; if (jacobianX != null) { qq = new Matrix(Quaternion.N_PARAMS, Quaternion.N_PARAMS); qw = new Matrix(Quaternion.N_PARAMS, ANGULAR_SPEED_COMPONENTS); } if (jacobianU != null) { qdq = new Matrix(Quaternion.N_PARAMS, Quaternion.N_PARAMS); } q = QuaternionPredictor.predictWithRotationAdjustment(q, dq, wx, wy, wz, dt, qq, qdq, qw); // apply control signals vx += uvx; vy += uvy; vz += uvz; wx += uwx; wy += uwy; wz += uwz; // set new state result[0] = r.getInhomX(); result[1] = r.getInhomY(); result[2] = r.getInhomZ(); result[3] = q.getA(); result[4] = q.getB(); result[5] = q.getC(); result[6] = q.getD(); result[7] = vx; result[8] = vy; result[9] = vz; result[10] = wx; result[11] = wy; result[12] = wz; // jacobians if (jacobianX != null) { // [Rr 0 Rv 0 ] // [0 Qq 0 Qw ] // [0 0 eye 0 ] // [0 0 0 eye] jacobianX.initialize(0.0); jacobianX.setSubmatrix(0, 0, 2, 2, rr); jacobianX.setSubmatrix(3, 3, 6, 6, qq); jacobianX.setSubmatrix(0, 7, 2, 9, rv); for (var i = 7; i < STATE_WITH_ROTATION_ADJUSTMENT_COMPONENTS; i++) { |
File | Line |
---|---|
com/irurueta/ar/sfm/DualImageOfAbsoluteConicInitialCamerasEstimator.java | 398 |
com/irurueta/ar/sfm/EssentialMatrixInitialCamerasEstimator.java | 535 |
} /** * Gets matched 2D points on left view. * * @return matched 2D points on left view. */ public List<Point2D> getLeftPoints() { return leftPoints; } /** * Sets matched 2D points on left view. * * @param leftPoints matched 2D points on left view. * @throws LockedException if estimator is locked. */ public void setLeftPoints(final List<Point2D> leftPoints) throws LockedException { if (isLocked()) { throw new LockedException(); } this.leftPoints = leftPoints; } /** * Gets matched 2D points on right view. * * @return matched 2D points on right view. */ public List<Point2D> getRightPoints() { return rightPoints; } /** * Sets matched 2D points on right view. * * @param rightPoints matched 2D points on right view. * @throws LockedException if estimator is locked. */ public void setRightPoints(final List<Point2D> rightPoints) throws LockedException { if (isLocked()) { throw new LockedException(); } this.rightPoints = rightPoints; } /** * Sets matched 2D points on left and right views. * * @param leftPoints matched 2D points on left view. * @param rightPoints matched 2D points on right view. * @throws LockedException if estimator is locked. * @throws IllegalArgumentException if provided lists don't have the same * size. */ public void setLeftAndRightPoints(final List<Point2D> leftPoints, final List<Point2D> rightPoints) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetLeftAndRightPoints(leftPoints, rightPoints); } /** * Gets type of corrector to use to triangulate matched points or null if * no corrector needs to be used. * * @return type of corrector to use. */ public CorrectorType getCorrectorType() { return correctorType; } /** * Sets type of corrector to use to triangulate matched points or null if * no corrector needs to be used. * * @param correctorType type of corrector to use. * @throws LockedException if estimator is locked. */ public void setCorrectorType(final CorrectorType correctorType) throws LockedException { if (isLocked()) { throw new LockedException(); } this.correctorType = correctorType; } /** * Indicates whether matched 2D points need to be triangulated or not. * * @return true if 2D points need to be triangulated, false otherwise. */ public boolean arePointsTriangulated() { return triangulatePoints; } /** * Specifies whether matched 2D points need to be triangulated or not. * * @param triangulatePoints true if 2D points need to be triangulated, false * otherwise. * @throws LockedException if estimator is locked. */ public void setPointsTriangulated(final boolean triangulatePoints) throws LockedException { if (isLocked()) { throw new LockedException(); } this.triangulatePoints = triangulatePoints; } /** * Indicates which triangulated points are marked as valid (lie in front * of both of the estimated cameras) and which ones aren't. * * @return true to mark valid and invalid triangulated points, false * otherwise. */ public boolean areValidTriangulatedPointsMarked() { return markValidTriangulatedPoints; } /** * Specifies whether triangulated points are marked as valid (lie in front * of both of the estimated cameras) and which ones aren't. * * @param markValidTriangulatedPoints true to mark valid and invalid * triangulated points, false otherwise. * @throws LockedException if estimator is locked. */ public void setValidTriangulatedPointsMarked(final boolean markValidTriangulatedPoints) throws LockedException { if (isLocked()) { throw new LockedException(); } this.markValidTriangulatedPoints = markValidTriangulatedPoints; } /** * Gets triangulated points, if available. * * @return triangulated points or null. */ public List<Point3D> getTriangulatedPoints() { return triangulatedPoints; } /** * Gets bitset indicating which of the triangulated points are valid and * which ones aren't. * * @return bitset indicating validity of triangulated points or null if not * available. */ public BitSet getValidTriangulatedPoints() { return validTriangulatedPoints; } /** * Generates a pair of metric cameras (up to an arbitrary space) by * estimating the intrinsic parameters of the views by solving the Kruppa * equations to obtain the Dual Image of Absolute Conic (DIAC). * The estimated intrinsic parameters can later be used to find the * essential matrix (assuming that both views have the same intrinsic * parameters), and the essential matrix along with provided matched 2D * points can be used to determine the best pair of camera pose and * translation that yields the largest number of triangulated points laying * in front of both of the estimated cameras. * This method uses default corrector type, does not keep triangulated * points or valid triangulated points, and uses default aspect ratio (1.0). * * @param fundamentalMatrix fundamental matrix relating both left and right * views. * @param principalPointX horizontal coordinate of principal point. This * value should be the coordinates of the center of an image assuming that * the coordinates start on the top-left or bottom-left corner. Using a * value close to zero will produce inaccurate results. * @param principalPointY vertical coordinate of principal point. This * value should be the coordinates of the center of an image assuming that * the coordinates start on the top-left or bottom-left corner. Using a * value close to zero will produce inaccurate results. * @param leftPoints points on left view matched with points on right view, * so they can be triangulated using estimated cameras. Both lists of points * must have the same size. * @param rightPoints points on right view matched with points on left view, * so they can be triangulated using estimated cameras. Both lists of points * must have the same size. * @param leftCamera instance where estimated left camera will be stored. * @param rightCamera instance where estimated right camera will be stored. * @return number of valid triangulated points which lie in front of the two * estimated cameras. * @throws InitialCamerasEstimationFailedException if estimation of cameras * fails for some reason, typically due to numerical * instabilities. * @throws IllegalArgumentException if provided lists of left and right * points don't have the same size. */ public static int generateInitialMetricCamerasUsingDIAC( |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationConstantVelocityModelSlamEstimator.java | 401 |
com/irurueta/ar/slam/AbsoluteOrientationSlamEstimator.java | 425 |
lastAngularSpeedX = stateAngularSpeedX; lastAngularSpeedY = stateAngularSpeedY; lastAngularSpeedZ = stateAngularSpeedZ; lastTimestampNanos = timestamp; if (listener != null) { listener.onFullSampleProcessed(this); } } /** * Resets position, linear velocity, linear acceleration, orientation and * angular speed to provided values. * This method implementation also resets Kalman filter state. * * @param statePositionX position along x-axis expressed in meters (m). * @param statePositionY position along y-axis expressed in meters (m). * @param statePositionZ position along z-axis expressed in meters (m). * @param stateVelocityX linear velocity along x-axis expressed in meters * per second (m/s). * @param stateVelocityY linear velocity along y-axis expressed in meters * per second (m/s). * @param stateVelocityZ linear velocity along z-axis expressed in meters * per second (m/s). * @param stateAccelerationX linear acceleration along x-axis expressed in * meters per squared second (m/s^2). * @param stateAccelerationY linear acceleration along y-axis expressed in * meters per squared second (m/s^2). * @param stateAccelerationZ linear acceleration along z-axis expressed in * meters per squared second (m/s^2). * @param stateQuaternionA A value of orientation quaternion. * @param stateQuaternionB B value of orientation quaternion. * @param stateQuaternionC C value of orientation quaternion. * @param stateQuaternionD D value of orientation quaternion. * @param stateAngularSpeedX angular speed along x-axis expressed in radians * per second (rad/s). * @param stateAngularSpeedY angular speed along y-axis expressed in radians * per second (rad/s). * @param stateAngularSpeedZ angular speed along z-axis expressed in radians * per second (rad/s). */ @Override protected void reset( final double statePositionX, final double statePositionY, final double statePositionZ, final double stateVelocityX, final double stateVelocityY, final double stateVelocityZ, final double stateAccelerationX, final double stateAccelerationY, final double stateAccelerationZ, final double stateQuaternionA, final double stateQuaternionB, final double stateQuaternionC, final double stateQuaternionD, final double stateAngularSpeedX, final double stateAngularSpeedY, final double stateAngularSpeedZ) { super.reset(statePositionX, statePositionY, statePositionZ, stateVelocityX, stateVelocityY, stateVelocityZ, stateAccelerationX, stateAccelerationY, stateAccelerationZ, stateQuaternionA, stateQuaternionB, stateQuaternionC, stateQuaternionD, stateAngularSpeedX, stateAngularSpeedY, stateAngularSpeedZ); //noinspection ConstantValue if (stateOrientation != null) { stateOrientation.setA(stateQuaternionA); stateOrientation.setB(stateQuaternionB); stateOrientation.setC(stateQuaternionC); stateOrientation.setD(stateQuaternionD); } //noinspection ConstantValue if (lastOrientation != null) { lastOrientation.setA(1.0); lastOrientation.setB(0.0); lastOrientation.setC(0.0); lastOrientation.setD(0.0); } if (x != null) { // position x[0] = statePositionX; x[1] = statePositionY; x[2] = statePositionZ; // quaternion x[3] = stateQuaternionA; x[4] = stateQuaternionB; x[5] = stateQuaternionC; x[6] = stateQuaternionD; // velocity x[7] = stateVelocityX; x[8] = stateVelocityY; x[9] = stateVelocityZ; // angular speed x[10] = stateAngularSpeedX; |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 726 |
com/irurueta/ar/slam/StatePredictor.java | 861 |
} if (jacobianU != null) { jacobianU.initialize(0.0); // variation of rotation jacobianU.setSubmatrix(3, 0, 6, 3, qdq); // variation of linear and angular speed for (int i = 7, j = Quaternion.N_PARAMS; i < STATE_WITH_ROTATION_ADJUSTMENT_COMPONENTS; i++, j++) { jacobianU.setElementAt(i, j, 1.0); } } } catch (final WrongSizeException ignore) { // never thrown } } /** * Updates the system model (position, orientation, linear velocity and * angular velocity) assuming a constant velocity model (without * acceleration) when no velocity control signal is present. * * @param x initial system state containing: position-x, position-y, * position-z, quaternion-a, quaternion-b, quaternion-c, quaternion-d, * linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. Must have * length 13. * @param u linear and angular velocity perturbations or controls, and * rotation perturbations or controls: quaternion-change-a, * quaternion-change-b, quaternion-change-c, quaternion-change-d, * linear-velocity-change-x, linear-velocity-change-y, * linear-velocity-change-z, angular-velocity-change-x, * angular-velocity-change-y, angular-velocity-change-z. Must have length * 10. * @param dt time interval to compute prediction expressed in seconds. * @param result instance where updated system model will be stored. Must * have length 13. * @throws IllegalArgumentException if system state array, control array or * result do not have proper length. */ public static void predictWithRotationAdjustment( final double[] x, final double[] u, final double dt, final double[] result) { predictWithRotationAdjustment(x, u, dt, result, null, null); } /** * Updates the system model (position, orientation, linear velocity and * angular velocity) assuming a constant velocity model (without * acceleration) when no velocity control signal is present. * * @param x initial system state containing: position-x, position-y, * position-z, quaternion-a, quaternion-b, quaternion-c, quaternion-d, * linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. Must have * length 13. * @param u linear and angular velocity perturbations or controls, and * rotation perturbations or controls: quaternion-change-a, * quaternion-change-b, quaternion-change-c, quaternion-change-d, * linear-velocity-change-x, linear-velocity-change-y, * linear-velocity-change-z, angular-velocity-change-x, * angular-velocity-change-y, angular-velocity-change-z. Must have length * 10. * @param dt time interval to compute prediction expressed in seconds. * @param jacobianX jacobian wrt system state. Must be 13x13. * @param jacobianU jacobian wrt control. Must be 13x10. * @return a new array containing updated system model. * @throws IllegalArgumentException if system state array, control array * or jacobians do not have proper size. */ public static double[] predictWithRotationAdjustment( final double[] x, final double[] u, final double dt, final Matrix jacobianX, final Matrix jacobianU) { final var result = new double[STATE_WITH_ROTATION_ADJUSTMENT_COMPONENTS]; predictWithRotationAdjustment(x, u, dt, result, jacobianX, jacobianU); return result; } /** * Updates the system model (position, orientation, linear velocity and * angular velocity) assuming a constant velocity model (without * acceleration) when no velocity control signal is present. * * @param x initial system state containing: position-x, position-y, * position-z, quaternion-a, quaternion-b, quaternion-c, quaternion-d, * linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. Must have * length 13. * @param u linear and angular velocity perturbations or controls, and * rotation perturbations or controls: quaternion-change-a, * quaternion-change-b, quaternion-change-c, quaternion-change-d, * linear-velocity-change-x, linear-velocity-change-y, * linear-velocity-change-z, angular-velocity-change-x, * angular-velocity-change-y, angular-velocity-change-z. Must have length * 10. * @param dt time interval to compute prediction expressed in seconds. * @return a new array containing updated system model. * @throws IllegalArgumentException if system state array or control array * do not have proper size. */ public static double[] predictWithRotationAdjustment(final double[] x, final double[] u, final double dt) { final var result = new double[STATE_WITH_ROTATION_ADJUSTMENT_COMPONENTS]; predictWithRotationAdjustment(x, u, dt, result); return result; } /** * Updates the system model (position, orientation, linear velocity and * angular velocity) assuming a constant velocity model (without * acceleration) when no velocity control signal is present. * * @param x initial system state containing: position-x, position-y, * position-z, quaternion-a, quaternion-b, quaternion-c, quaternion-d, * linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. Must have * length 13. * @param u linear and angular velocity perturbations or controls, position * perturbations or controls and rotation perturbation or control: * position-change-x, position-change-y, position-change-z, * quaternion-change-a, quaternion-change-b, quaternion-change-c, * quaternion-change-d, linear-velocity-change-x, linear-velocity-change-y, * linear-velocity-change-z, angular-velocity-change-x, * angular-velocity-change-y, angular-velocity-change-z. Must have length * 12. * @param dt time interval to compute prediction expressed in seconds. * @param result instance where updated system model will be stored. Must * have length 13. * @param jacobianX jacobian wrt system state. Must be 13x13. * @param jacobianU jacobian wrt control. Must be 13x13. * @throws IllegalArgumentException if system state array, control array, * result or jacobians do not have proper size. */ public static void predictWithPositionAndRotationAdjustment( final double[] x, final double[] u, final double dt, final double[] result, final Matrix jacobianX, final Matrix jacobianU) { if (x.length != STATE_WITH_POSITION_AND_ROTATION_ADJUSTMENT_COMPONENTS) { throw new IllegalArgumentException("x must have length 13"); |
File | Line |
---|---|
com/irurueta/ar/sfm/WeightedHomogeneousSinglePoint3DTriangulator.java | 161 |
com/irurueta/ar/sfm/WeightedInhomogeneousSinglePoint3DTriangulator.java | 161 |
public WeightedHomogeneousSinglePoint3DTriangulator( final List<Point2D> points2D, final List<PinholeCamera> cameras, final double[] weights, final SinglePoint3DTriangulatorListener listener) { this(listener); internalSetPointsCamerasAndWeights(points2D, cameras, weights); } /** * Returns weights assigned to each view. * The larger a weight is the more reliable a view is considered and * equations to triangulate a 3D point will be take precedence over other * views when estimating an averaged solution. * * @return weights assigned to each view. */ public double[] getWeights() { return weights; } /** * Sets list of matched 2D points for each view and their corresponding * cameras used to project them along with their weights. * * @param points2D list of matched 2D points on each view. Each point in the * list is assumed to be projected by the corresponding camera in the list. * @param cameras cameras for each view where 2D points are represented. * @param weights weights assigned to each view. * @throws LockedException if this instance is locked. * @throws IllegalArgumentException if provided lists don't have the same * length or their length is less than 2 views, which is the minimum * required to compute triangulation. */ public void setPointsCamerasAndWeights( final List<Point2D> points2D, final List<PinholeCamera> cameras, final double[] weights) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetPointsCamerasAndWeights(points2D, cameras, weights); } /** * Indicates whether this instance is ready to start the triangulation. * An instance is ready when both lists of 2D points and cameras are * provided, both lists have the same length, at least data for 2 views * is provided and the corresponding weights are also provided. * * @return true if this instance is ready, false otherwise. */ @Override public boolean isReady() { return areValidPointsCamerasAndWeights(points2D, cameras, weights); } /** * Indicates whether provided points, cameras and weights are valid to start * the triangulation. * In order to triangulate points, at least two cameras and their * corresponding 2 matched 2D points are required along with weights for * each view. * If more views are provided, an averaged solution can be found. * * @param points2D list of matched points on each view. * @param cameras cameras for each view where 2D points are represented. * @param weights weights assigned to each view. * @return true if data is enough to start triangulation, false otherwise. */ public static boolean areValidPointsCamerasAndWeights( final List<Point2D> points2D, final List<PinholeCamera> cameras, final double[] weights) { return areValidPointsAndCameras(points2D, cameras) && weights != null && weights.length == points2D.size(); } /** * Returns maximum number of correspondences to be weighted and taken into * account. * * @return maximum number of correspondences to be weighted. */ public int getMaxCorrespondences() { return maxCorrespondences; } /** * Sets maximum number of correspondences to be weighted and taken into * account. * * @param maxCorrespondences maximum number of correspondences to be * weighted. * @throws IllegalArgumentException if provided value is less than the * minimum required number of views, which is 2. * @throws LockedException if this instance is locked. */ public void setMaxCorrespondences(final int maxCorrespondences) throws LockedException { if (isLocked()) { throw new LockedException(); } if (maxCorrespondences < MIN_REQUIRED_VIEWS) { throw new IllegalArgumentException(); } this.maxCorrespondences = maxCorrespondences; } /** * Indicates if weights are sorted by so that largest weighted * correspondences are used first. * * @return true if weights are sorted, false otherwise. */ public boolean isSortWeightsEnabled() { return sortWeights; } /** * Specifies whether weights are sorted by so that largest weighted * correspondences are used first. * * @param sortWeights true if weights are sorted, false otherwise. * @throws LockedException if this instance is locked. */ public void setSortWeightsEnabled(final boolean sortWeights) throws LockedException { if (isLocked()) { throw new LockedException(); } this.sortWeights = sortWeights; } /** * Returns type of triangulator. * * @return type of triangulator. */ @Override public Point3DTriangulatorType getType() { return Point3DTriangulatorType.WEIGHTED_HOMOGENEOUS_TRIANGULATOR; |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSlamPairedViewsSparseReconstructor.java | 405 |
com/irurueta/ar/sfm/BaseSlamSparseReconstructor.java | 352 |
final var positionZ = lastPosZ + slamEstimator.getStatePositionZ(); final var velocityX = slamEstimator.getStateVelocityX(); final var velocityY = slamEstimator.getStateVelocityY(); final var velocityZ = slamEstimator.getStateVelocityZ(); final var accelerationX = slamEstimator.getStateAccelerationX(); final var accelerationY = slamEstimator.getStateAccelerationY(); final var accelerationZ = slamEstimator.getStateAccelerationZ(); final var quaternionA = slamEstimator.getStateQuaternionA(); final var quaternionB = slamEstimator.getStateQuaternionB(); final var quaternionC = slamEstimator.getStateQuaternionC(); final var quaternionD = slamEstimator.getStateQuaternionD(); final var angularSpeedX = slamEstimator.getStateAngularSpeedX(); final var angularSpeedY = slamEstimator.getStateAngularSpeedY(); final var angularSpeedZ = slamEstimator.getStateAngularSpeedZ(); //noinspection unchecked listener.onSlamDataAvailable((R) this, positionX, positionY, positionZ, velocityX, velocityY, velocityZ, accelerationX, accelerationY, accelerationZ, quaternionA, quaternionB, quaternionC, quaternionD, angularSpeedX, angularSpeedY, angularSpeedZ, slamEstimator.getStateCovariance()); } /** * Notifies estimated camera by means of SLAM if notification is enabled at * configuration time and intrinsics are already available. */ private void notifySlamCameraIfNeeded() { if (!configuration.isNotifyEstimatedSlamCameraEnabled()) { return; } // try with current camera var camera = currentEuclideanEstimatedCamera != null ? currentEuclideanEstimatedCamera.getCamera() : null; if (camera == null) { // if not available try with previous camera camera = previousEuclideanEstimatedCamera != null ? previousEuclideanEstimatedCamera.getCamera() : null; } try { PinholeCameraIntrinsicParameters intrinsicParameters = null; if (camera != null) { if (!camera.areIntrinsicParametersAvailable()) { // decompose camera to obtain intrinsic parameters camera.decompose(); } intrinsicParameters = camera.getIntrinsicParameters(); } else if (configuration.areIntrinsicParametersKnown()) { |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 896 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 637 |
if (samples.length != MIN_NUMBER_OF_VIEWS) { return false; } leftSamples.add(samples[0]); rightSamples.add(samples[1]); final var leftPoint = Point2D.create(); leftPoint.setInhomogeneousCoordinates( samples[0].getPoint().getInhomX() - principalPointX, samples[0].getPoint().getInhomY() - principalPointY); leftPoints.add(leftPoint); final var rightPoint = Point2D.create(); rightPoint.setInhomogeneousCoordinates( samples[1].getPoint().getInhomX() - principalPointX, samples[1].getPoint().getInhomY() - principalPointY); rightPoints.add(rightPoint); qualityScores[i] = match.getQualityScore(); i++; } try { final var homographyEstimator = PointCorrespondenceProjectiveTransformation2DRobustEstimator.create( configuration.getRobustPlanarHomographyEstimatorMethod()); homographyEstimator.setResultRefined(configuration.isPlanarHomographyRefined()); homographyEstimator.setCovarianceKept(configuration.isPlanarHomographyCovarianceKept()); homographyEstimator.setConfidence(configuration.getPlanarHomographyConfidence()); homographyEstimator.setMaxIterations(configuration.getPlanarHomographyMaxIterations()); switch (configuration.getRobustPlanarHomographyEstimatorMethod()) { case LMEDS: ((LMedSPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator) .setStopThreshold(configuration.getPlanarHomographyThreshold()); break; case MSAC: ((MSACPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator) .setThreshold(configuration.getPlanarHomographyThreshold()); break; case PROMEDS: ((PROMedSPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator) .setStopThreshold(configuration.getPlanarHomographyThreshold()); break; case PROSAC: |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1480 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 516 |
p2.getInhomY() - principalPointY); rightPoints.add(rightPoint); qualityScores[i] = match.getQualityScore(); i++; } try { final var estimator = FundamentalMatrixRobustEstimator.create(leftPoints, rightPoints, qualityScores, configuration.getRobustFundamentalMatrixEstimatorMethod()); estimator.setNonRobustFundamentalMatrixEstimatorMethod( configuration.getNonRobustFundamentalMatrixEstimatorMethod()); estimator.setResultRefined(configuration.isFundamentalMatrixRefined()); estimator.setCovarianceKept(configuration.isFundamentalMatrixCovarianceKept()); estimator.setConfidence(configuration.getFundamentalMatrixConfidence()); estimator.setMaxIterations(configuration.getFundamentalMatrixMaxIterations()); switch (configuration.getRobustFundamentalMatrixEstimatorMethod()) { case LMEDS: ((LMedSFundamentalMatrixRobustEstimator) estimator) .setStopThreshold(configuration.getFundamentalMatrixThreshold()); break; case MSAC: ((MSACFundamentalMatrixRobustEstimator) estimator) .setThreshold(configuration.getFundamentalMatrixThreshold()); break; case PROMEDS: ((PROMedSFundamentalMatrixRobustEstimator) estimator) .setStopThreshold(configuration.getFundamentalMatrixThreshold()); break; case PROSAC: var prosacEstimator = (PROSACFundamentalMatrixRobustEstimator) estimator; prosacEstimator.setThreshold(configuration.getFundamentalMatrixThreshold()); prosacEstimator.setComputeAndKeepInliersEnabled( configuration.getFundamentalMatrixComputeAndKeepInliers()); prosacEstimator.setComputeAndKeepResidualsEnabled( configuration.getFundamentalMatrixComputeAndKeepResiduals()); break; case RANSAC: var ransacEstimator = (RANSACFundamentalMatrixRobustEstimator) estimator; ransacEstimator.setThreshold(configuration.getFundamentalMatrixThreshold()); ransacEstimator.setComputeAndKeepInliersEnabled( configuration.getFundamentalMatrixComputeAndKeepInliers()); ransacEstimator.setComputeAndKeepResidualsEnabled( configuration.getFundamentalMatrixComputeAndKeepResiduals()); break; default: break; } final var fundamentalMatrix = estimator.estimate(); |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 978 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1701 |
if (intrinsic1 == null && intrinsic2 == null) { // estimate homography final var homography = homographyEstimator.estimate(); // estimate intrinsic parameters using the Image of Absolute // Conic (IAC) final var homographies = new ArrayList<Transformation2D>(); homographies.add(homography); final var iacEstimator = new LMSEImageOfAbsoluteConicEstimator(homographies); final var iac = iacEstimator.estimate(); intrinsic1 = intrinsic2 = iac.getIntrinsicParameters(); } else if (intrinsic1 == null) { // && intrinsic2 != null intrinsic1 = intrinsic2; } else if (intrinsic2 == null) { // && intrinsic1 != null intrinsic2 = intrinsic1; } fundamentalMatrixEstimator.setLeftIntrinsics(intrinsic1); fundamentalMatrixEstimator.setRightIntrinsics(intrinsic2); fundamentalMatrixEstimator.estimateAndReconstruct(); final var fundamentalMatrix = fundamentalMatrixEstimator.getFundamentalMatrix(); currentEstimatedFundamentalMatrix = new EstimatedFundamentalMatrix(); currentEstimatedFundamentalMatrix.setFundamentalMatrix(fundamentalMatrix); currentEstimatedFundamentalMatrix.setViewId1(viewId1); currentEstimatedFundamentalMatrix.setViewId2(viewId2); // determine quality score and inliers final var inliersData = homographyEstimator.getInliersData(); if (inliersData != null) { final var numInliers = inliersData.getNumInliers(); final var inliers = inliersData.getInliers(); final var length = inliers.length(); var fundamentalMatrixQualityScore = 0.0; for (i = 0; i < length; i++) { if (inliers.get(i)) { // inlier fundamentalMatrixQualityScore += qualityScores[i] / numInliers; } } currentEstimatedFundamentalMatrix.setQualityScore( fundamentalMatrixQualityScore); currentEstimatedFundamentalMatrix.setInliers(inliers); } // store left/right samples currentEstimatedFundamentalMatrix.setLeftSamples(leftSamples); currentEstimatedFundamentalMatrix.setRightSamples(rightSamples); return true; } catch (final Exception e) { return false; } } /** * Estimates a pair of cameras and reconstructed points. * * @param isInitialPairOfViews true if initial pair of views is being processed, * false otherwise. * @return true if cameras and points could be estimated, false if something * failed. */ private boolean estimatePairOfCamerasAndPoints(final boolean isInitialPairOfViews) { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEDualAbsoluteQuadricEstimator.java | 244 |
com/irurueta/ar/calibration/estimators/LMSEDualAbsoluteQuadricEstimator.java | 335 |
a = new Matrix(singularityEnforced ? 8 : 12, BaseQuadric.N_PARAMS); } Matrix cameraMatrix; double p11; double p12; double p13; double p14; double p21; double p22; double p23; double p24; double p31; double p32; double p33; double p34; var eqCounter = 0; final var minReqEqs = getMinRequiredEquations(); for (final var camera : cameras) { // normalize cameras to increase accuracy camera.normalize(); cameraMatrix = camera.getInternalMatrix(); p11 = cameraMatrix.getElementAt(0, 0); p21 = cameraMatrix.getElementAt(1, 0); p31 = cameraMatrix.getElementAt(2, 0); p12 = cameraMatrix.getElementAt(0, 1); p22 = cameraMatrix.getElementAt(1, 1); p32 = cameraMatrix.getElementAt(2, 1); p13 = cameraMatrix.getElementAt(0, 2); p23 = cameraMatrix.getElementAt(1, 2); p33 = cameraMatrix.getElementAt(2, 2); p14 = cameraMatrix.getElementAt(0, 3); p24 = cameraMatrix.getElementAt(1, 3); p34 = cameraMatrix.getElementAt(2, 3); // 1st row fill2ndRowAnd1stRowEquation(p11, p21, p12, p22, p13, p23, p14, p24, a, eqCounter); eqCounter++; // 2nd row fill3rdRowAnd1stRowEquation(p11, p31, p12, p32, p13, p33, p14, p34, a, eqCounter); eqCounter++; // 3rd row fill3rdRowAnd2ndRowEquation(p21, p31, p22, p32, p23, p33, p24, p34, a, eqCounter); eqCounter++; |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSlamSparseReconstructor.java | 329 |
com/irurueta/ar/sfm/BaseSlamTwoViewsSparseReconstructor.java | 251 |
activeEuclideanReconstructedPoints.add(euclideanPoint); } return true; } catch (final Exception e) { failed = true; //noinspection unchecked listener.onFail((R) this); return false; } } /** * Notifies SLAM state if notification is enabled at configuration time. */ private void notifySlamStateIfNeeded() { if (!configuration.isNotifyAvailableSlamDataEnabled()) { return; } final var positionX = slamEstimator.getStatePositionX(); final var positionY = slamEstimator.getStatePositionY(); final var positionZ = slamEstimator.getStatePositionZ(); final var velocityX = slamEstimator.getStateVelocityX(); final var velocityY = slamEstimator.getStateVelocityY(); final var velocityZ = slamEstimator.getStateVelocityZ(); final var accelerationX = slamEstimator.getStateAccelerationX(); final var accelerationY = slamEstimator.getStateAccelerationY(); final var accelerationZ = slamEstimator.getStateAccelerationZ(); final var quaternionA = slamEstimator.getStateQuaternionA(); final var quaternionB = slamEstimator.getStateQuaternionB(); final var quaternionC = slamEstimator.getStateQuaternionC(); final var quaternionD = slamEstimator.getStateQuaternionD(); final var angularSpeedX = slamEstimator.getStateAngularSpeedX(); final var angularSpeedY = slamEstimator.getStateAngularSpeedY(); final var angularSpeedZ = slamEstimator.getStateAngularSpeedZ(); //noinspection unchecked listener.onSlamDataAvailable((R) this, positionX, positionY, positionZ, velocityX, velocityY, velocityZ, accelerationX, accelerationY, accelerationZ, quaternionA, quaternionB, quaternionC, quaternionD, angularSpeedX, angularSpeedY, angularSpeedZ, slamEstimator.getStateCovariance()); } /** * Notifies estimated camera by means of SLAM if notification is enabled at * configuration time and intrinsics are already available. */ private void notifySlamCameraIfNeeded() { if (!configuration.isNotifyEstimatedSlamCameraEnabled()) { return; } |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationSlamEstimator.java | 575 |
com/irurueta/ar/slam/SlamEstimator.java | 523 |
stateVelocityX = x[7]; x[7] = state.getElementAtIndex(7); stateVelocityY = x[8]; x[8] = state.getElementAtIndex(8); stateVelocityZ = x[9]; x[9] = state.getElementAtIndex(9); // linear acceleration stateAccelerationX = x[10]; x[10] = state.getElementAtIndex(10); stateAccelerationY = x[11]; x[11] = state.getElementAtIndex(11); stateAccelerationZ = x[12]; x[12] = state.getElementAtIndex(12); // angular velocity stateAngularSpeedX = x[13]; x[13] = state.getElementAtIndex(13); stateAngularSpeedY = x[14]; x[14] = state.getElementAtIndex(14); stateAngularSpeedZ = x[15]; x[15] = state.getElementAtIndex(15); } /** * Updates state data of the device by using state matrix obtained from * Kalman filter after correction. * * @param state state matrix obtained from Kalman filter. */ private void updateCorrectedState(final Matrix state) { // position statePositionX = x[0] = state.getElementAtIndex(0); statePositionY = x[1] = state.getElementAtIndex(1); statePositionZ = x[2] = state.getElementAtIndex(2); // quaternion stateQuaternionA = x[3] = state.getElementAtIndex(3); stateQuaternionB = x[4] = state.getElementAtIndex(4); stateQuaternionC = x[5] = state.getElementAtIndex(5); stateQuaternionD = x[6] = state.getElementAtIndex(6); |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 178 |
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 421 |
PositionPredictor.predict(r, vx, vy, vz, dt, r, rr, rv, null); // update orientation Matrix qq = null; Matrix qw = null; if (jacobianX != null) { qq = new Matrix(Quaternion.N_PARAMS, Quaternion.N_PARAMS); qw = new Matrix(Quaternion.N_PARAMS, ANGULAR_SPEED_COMPONENTS); } q = QuaternionPredictor.predict(q, wx, wy, wz, dt, true, qq, qw); // apply control signals vx += uvx; vy += uvy; vz += uvz; wx += uwx; wy += uwy; wz += uwz; // set new state result[0] = r.getInhomX(); result[1] = r.getInhomY(); result[2] = r.getInhomZ(); result[3] = q.getA(); result[4] = q.getB(); result[5] = q.getC(); result[6] = q.getD(); result[7] = vx; result[8] = vy; result[9] = vz; result[10] = wx; result[11] = wy; result[12] = wz; // jacobians if (jacobianX != null) { // [Rr 0 Rv 0 ] // [0 Qq 0 Qw ] // [0 0 eye 0 ] // [0 0 0 eye] jacobianX.initialize(0.0); jacobianX.setSubmatrix(0, 0, 2, 2, rr); jacobianX.setSubmatrix(3, 3, 6, 6, qq); jacobianX.setSubmatrix(0, 7, 2, 9, rv); for (int i = 7; i < STATE_COMPONENTS; i++) { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 396 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 539 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); a.setElementAt(counter, 1, h21 * h22); a.setElementAt(counter, 2, h11 * h32 + h31 * h12); a.setElementAt(counter, 3, h21 * h32 + h31 * h22); a.setElementAt(counter, 4, h31 * h32); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0) + Math.pow(a.getElementAt(counter, 4), 2.0)); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 301 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 438 |
a.setElementAt(counter, 0, Math.pow(h11, 2.0) - Math.pow(h12, 2.0)); a.setElementAt(counter, 1, 2.0 * (h11 * h21 - h12 * h22)); a.setElementAt(counter, 2, Math.pow(h21, 2.0) - Math.pow(h22, 2.0)); a.setElementAt(counter, 3, 2.0 * (h11 * h31 - h12 * h32)); a.setElementAt(counter, 4, 2.0 * (h21 * h31 - h22 * h32)); a.setElementAt(counter, 5, Math.pow(h31, 2.0) - Math.pow(h32, 2.0)); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0) + Math.pow(a.getElementAt(counter, 4), 2.0) + Math.pow(a.getElementAt(counter, 5), 2.0)); |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/AffineFundamentalMatrixEstimator.java | 307 |
com/irurueta/ar/epipolar/estimators/EightPointsFundamentalMatrixEstimator.java | 330 |
fundMatrix.setElementAt(2, 2, v.getElementAt(4, 4)); if (normalizePoints && leftNormalization != null) { // denormalize fundMatrix final var transposedRightTransformationMatrix = rightNormalization.asMatrix().transposeAndReturnNew(); final var leftTransformationMatrix = leftNormalization.asMatrix(); // compute fundMatrix = transposedRightTransformationMatrix * // fundMatrix * leftTransformationMatrix fundMatrix.multiply(leftTransformationMatrix); transposedRightTransformationMatrix.multiply(fundMatrix); fundMatrix = transposedRightTransformationMatrix; // normalize by Frobenius norm to increase accuracy after point // de-normalization final var norm = Utils.normF(fundMatrix); fundMatrix.multiplyByScalar(1.0 / norm); } // enforce rank 2 decomposer.setInputMatrix(fundMatrix); decomposer.decompose(); // if rank is not already correct, then we enforce it final var rank = decomposer.getRank(); if (rank > FundamentalMatrix.FUNDAMENTAL_MATRIX_RANK) { // rank needs to be reduced final var u = decomposer.getU(); final var w = decomposer.getW(); v = decomposer.getV(); // transpose V v.transpose(); final var transV = v; // set last singular value to zero to enforce rank 2 w.setElementAt(2, 2, 0.0); // compute fundMatrix = U * W * V' w.multiply(transV); u.multiply(w); fundMatrix = u; } else if (rank < FundamentalMatrix.FUNDAMENTAL_MATRIX_RANK) { // rank is 1, which is lower than required fundamental matrix // rank (rank 2) throw new FundamentalMatrixEstimatorException(); } final var result = new FundamentalMatrix(fundMatrix); if (listener != null) { listener.onEstimateEnd(this, result); } return result; } catch (final InvalidFundamentalMatrixException | AlgebraException | NormalizerException e) { throw new FundamentalMatrixEstimatorException(e); } finally { locked = false; } } /** * Returns method of non-robust fundamental matrix estimator. * * @return method of fundamental matrix estimator. */ @Override public FundamentalMatrixEstimatorMethod getMethod() { return FundamentalMatrixEstimatorMethod.AFFINE_ALGORITHM; |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 984 |
com/irurueta/ar/slam/StatePredictor.java | 1162 |
} if (jacobianU != null) { jacobianU.initialize(0.0); // variation of position for (var i = 0; i < Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH; i++) { jacobianU.setElementAt(i, i, 1.0); } // variation of rotation jacobianU.setSubmatrix(3, 3, 6, 6, qdq); // variation of linear and angular speed for (var i = 7; i < STATE_WITH_POSITION_AND_ROTATION_ADJUSTMENT_COMPONENTS; i++) { jacobianU.setElementAt(i, i, 1.0); } } } catch (final WrongSizeException ignore) { // never thrown } } /** * Updates the system model (position, orientation, linear velocity and * angular velocity) assuming a constant velocity model (without * acceleration) when no velocity control signal is present. * * @param x initial system state containing: position-x, position-y, * position-z, quaternion-a, quaternion-b, quaternion-c, quaternion-d, * linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. Must have * length 13. * @param u linear and angular velocity perturbations or controls, position * perturbations or controls and rotation perturbation or control: * position-change-x, position-change-y, position-change-z, * quaternion-change-a, quaternion-change-b, quaternion-change-c, * quaternion-change-d, linear-velocity-change-x, linear-velocity-change-y, * linear-velocity-change-z, angular-velocity-change-x, * angular-velocity-change-y, angular-velocity-change-z. Must have length * 12. * @param dt time interval to compute prediction expressed in seconds. * @param result instance where updated system model will be stored. Must * have length 13. * @throws IllegalArgumentException if system state array, control array, * result or jacobians do not have proper size. */ public static void predictWithPositionAndRotationAdjustment( final double[] x, final double[] u, final double dt, final double[] result) { predictWithPositionAndRotationAdjustment(x, u, dt, result, null, null); } /** * Updates the system model (position, orientation, linear velocity and * angular velocity) assuming a constant velocity model (without * acceleration) when no velocity control signal is present. * * @param x initial system state containing: position-x, position-y, * position-z, quaternion-a, quaternion-b, quaternion-c, quaternion-d, * linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. Must have * length 13. * @param u linear and angular velocity perturbations or controls, position * perturbations or controls and rotation perturbation or control: * position-change-x, position-change-y, position-change-z, * quaternion-change-a, quaternion-change-b, quaternion-change-c, * quaternion-change-d, linear-velocity-change-x, linear-velocity-change-y, * linear-velocity-change-z, angular-velocity-change-x, * angular-velocity-change-y, angular-velocity-change-z. Must have length * 12. * @param dt time interval to compute prediction expressed in seconds. * @param jacobianX jacobian wrt system state. Must be 13x13. * @param jacobianU jacobian wrt control. Must be 13x13. * @return a new array containing updated system model. * @throws IllegalArgumentException if system state array, control array * or jacobians do not have proper size. */ public static double[] predictWithPositionAndRotationAdjustment( final double[] x, final double[] u, final double dt, final Matrix jacobianX, final Matrix jacobianU) { final var result = new double[STATE_WITH_POSITION_AND_ROTATION_ADJUSTMENT_COMPONENTS]; predictWithPositionAndRotationAdjustment(x, u, dt, result, jacobianX, jacobianU); return result; } /** * Updates the system model (position, orientation, linear velocity and * angular velocity) assuming a constant velocity model (without * acceleration) when no velocity control signal is present. * * @param x initial system state containing: position-x, position-y, * position-z, quaternion-a, quaternion-b, quaternion-c, quaternion-d, * linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. Must have * length 13. * @param u linear and angular velocity perturbations or controls, position * perturbations or controls and rotation perturbation or control: * position-change-x, position-change-y, position-change-z, * quaternion-change-a, quaternion-change-b, quaternion-change-c, * quaternion-change-d, linear-velocity-change-x, linear-velocity-change-y, * linear-velocity-change-z, angular-velocity-change-x, * angular-velocity-change-y, angular-velocity-change-z. Must have length * 12. * @param dt time interval to compute prediction expressed in seconds. * @return a new array containing updated system model. Must have length 13. * @throws IllegalArgumentException if system state array, control array or * result do not have proper size. */ public static double[] predictWithPositionAndRotationAdjustment( final double[] x, final double[] u, final double dt) { final var result = new double[STATE_WITH_POSITION_AND_ROTATION_ADJUSTMENT_COMPONENTS]; predictWithPositionAndRotationAdjustment(x, u, dt, result); return result; } } |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 676 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 419 |
protected abstract boolean hasAbsoluteOrientation(); /** * Indicates whether there are enough samples to estimate a fundamental * matrix. * * @param samples samples to check. * @return true if there are enough samples, false otherwise. */ private boolean hasEnoughSamples(final List<Sample2D> samples) { return hasEnoughSamplesOrMatches(samples != null ? samples.size() : 0); } /** * Indicates whether there are enough matches to estimate a fundamental * matrix. * * @param matches matches to check. * @return true if there are enough matches, false otherwise. */ private boolean hasEnoughMatches(final List<MatchedSamples> matches) { return hasEnoughSamplesOrMatches(matches != null ? matches.size() : 0); } /** * Indicates whether there are enough matches or samples to estimate a * fundamental matrix. * * @param count number of matches or samples. * @return true if there are enough matches or samples, false otherwise. */ private boolean hasEnoughSamplesOrMatches(final int count) { if (configuration.isGeneralSceneAllowed()) { if (configuration.getNonRobustFundamentalMatrixEstimatorMethod() == FundamentalMatrixEstimatorMethod.EIGHT_POINTS_ALGORITHM) { return count >= EightPointsFundamentalMatrixEstimator.MIN_REQUIRED_POINTS; } else if (configuration.getNonRobustFundamentalMatrixEstimatorMethod() == FundamentalMatrixEstimatorMethod.SEVEN_POINTS_ALGORITHM) { return count >= SevenPointsFundamentalMatrixEstimator.MIN_REQUIRED_POINTS; } } else if (configuration.isPlanarSceneAllowed()) { return count >= ProjectiveTransformation2DRobustEstimator.MINIMUM_SIZE; } return false; } /** * Estimates fundamental matrix for provided matches, when 3D points lay in * a general non-degenerate 3D configuration. * * @param matches pairs of matches to find fundamental matrix. * @param viewId1 id of first view. * @param viewId2 id of second view. * @return true if estimation succeeded, false otherwise. */ private boolean estimateFundamentalMatrix(final List<MatchedSamples> matches, final int viewId1, final int viewId2) { if (matches == null) { return false; } final var count = matches.size(); final var leftSamples = new ArrayList<Sample2D>(count); final var rightSamples = new ArrayList<Sample2D>(count); final var leftPoints = new ArrayList<Point2D>(count); final var rightPoints = new ArrayList<Point2D>(count); final var qualityScores = new double[count]; |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1168 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1892 |
corrector = Corrector.create(fundamentalMatrix, configuration.getPairedCamerasCorrectorType()); } // use all points used for fundamental matrix estimation final var samples1 = currentEstimatedFundamentalMatrix.getLeftSamples(); final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples(); final var points1 = new ArrayList<Point2D>(); final var points2 = new ArrayList<Point2D>(); final var length = samples1.size(); for (var i = 0; i < length; i++) { final var sample1 = samples1.get(i); final var sample2 = samples2.get(i); final var point1 = sample1.getPoint(); final var point2 = sample2.getPoint(); points1.add(point1); points2.add(point2); } // correct points if needed final List<Point2D> correctedPoints1; final List<Point2D> correctedPoints2; if (corrector != null) { corrector.setLeftAndRightPoints(points1, points2); corrector.correct(); correctedPoints1 = corrector.getLeftCorrectedPoints(); correctedPoints2 = corrector.getRightCorrectedPoints(); } else { correctedPoints1 = points1; correctedPoints2 = points2; } // triangulate points final SinglePoint3DTriangulator triangulator; if (configuration.getDaqUseHomogeneousPointTriangulator()) { triangulator = SinglePoint3DTriangulator.create(Point3DTriangulatorType.LMSE_HOMOGENEOUS_TRIANGULATOR); } else { triangulator = SinglePoint3DTriangulator.create( Point3DTriangulatorType.LMSE_INHOMOGENEOUS_TRIANGULATOR); } final var cameras = new ArrayList<PinholeCamera>(); cameras.add(camera1); cameras.add(camera2); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 591 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 866 |
a.setElementAt(counter, 2, Math.pow(h21, 2.0) - Math.pow(h22, 2.0)); a.setElementAt(counter, 3, Math.pow(h31, 2.0) - Math.pow(h32, 2.0)); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0)); a.setElementAt(counter, 0, a.getElementAt(counter, 0) / rowNorm); a.setElementAt(counter, 1, a.getElementAt(counter, 1) / rowNorm); a.setElementAt(counter, 2, a.getElementAt(counter, 2) / rowNorm); a.setElementAt(counter, 3, a.getElementAt(counter, 3) / rowNorm); counter++; } final var decomposer = new SingularValueDecomposer(a); decomposer.decompose(); if (decomposer.getNullity() > 1) { // homographies constitute a degenerate camera movement. // A linear combination of possible IAC's exist (i.e. solution is // not unique up to scale) throw new ImageOfAbsoluteConicEstimatorException(); } final var v = decomposer.getV(); // use last column of V as IAC vector // the last column of V contains IAC matrix (B), which is symmetric // and positive definite, ordered as follows: B11, B12, B22, B13, // B23, B33 final var b11 = v.getElementAt(0, 3); final var b12 = v.getElementAt(1, 3); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/WeightedDualAbsoluteQuadricEstimator.java | 415 |
com/irurueta/ar/calibration/estimators/WeightedDualAbsoluteQuadricEstimator.java | 528 |
final var transRow = new Matrix(BaseQuadric.N_PARAMS, 4); final var tmp = new Matrix(BaseQuadric.N_PARAMS, BaseQuadric.N_PARAMS); Matrix cameraMatrix; double p11; double p12; double p13; double p14; double p21; double p22; double p23; double p24; double p31; double p32; double p33; double p34; int eqCounter; var cameraCounter = 0; double weight; var previousNorm = 1.0; for (final var camera : cameras) { if (selected[cameraCounter]) { eqCounter = 0; // normalize cameras to increase accuracy camera.normalize(); cameraMatrix = camera.getInternalMatrix(); p11 = cameraMatrix.getElementAt(0, 0); p21 = cameraMatrix.getElementAt(1, 0); p31 = cameraMatrix.getElementAt(2, 0); p12 = cameraMatrix.getElementAt(0, 1); p22 = cameraMatrix.getElementAt(1, 1); p32 = cameraMatrix.getElementAt(2, 1); p13 = cameraMatrix.getElementAt(0, 2); p23 = cameraMatrix.getElementAt(1, 2); p33 = cameraMatrix.getElementAt(2, 2); p14 = cameraMatrix.getElementAt(0, 3); p24 = cameraMatrix.getElementAt(1, 3); p34 = cameraMatrix.getElementAt(2, 3); weight = weights[cameraCounter]; // 1st row fill2ndRowAnd1stRowEquation(p11, p21, p12, p22, p13, p23, p14, p24, row, eqCounter); |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationSlamEstimator.java | 496 |
com/irurueta/ar/slam/SlamEstimator.java | 450 |
if (x != null) { // position x[0] = statePositionX; x[1] = statePositionY; x[2] = statePositionZ; // quaternion x[3] = stateQuaternionA; x[4] = stateQuaternionB; x[5] = stateQuaternionC; x[6] = stateQuaternionD; // velocity x[7] = stateVelocityX; x[8] = stateVelocityY; x[9] = stateVelocityZ; // linear acceleration x[10] = stateAccelerationX; x[11] = stateAccelerationY; x[12] = stateAccelerationZ; // angular speed x[13] = stateAngularSpeedX; x[14] = stateAngularSpeedY; x[15] = stateAngularSpeedZ; try { // set initial Kalman filter state (state pre and pro must be two // different instances!) kalmanFilter.getStatePre().fromArray(x); kalmanFilter.getStatePost().fromArray(x); } catch (final WrongSizeException ignore) { // never thrown } } error = false; lastTimestampNanos = -1; predictionAvailable = false; } /** * Updates state data of the device by using state matrix obtained after * prediction from Kalman filter. * to ensure that state follows proper values (specially on quaternions), * we keep x values, which have been predicted using the state predictor, * which uses analytical values. * We then updated x using latest Kalman filter state for next iteration * on state predictor. * * @param state state matrix obtained from Kalman filter. */ private void updatePredictedState(final Matrix state) { // position statePositionX = x[0]; x[0] = state.getElementAtIndex(0); statePositionY = x[1]; x[1] = state.getElementAtIndex(1); statePositionZ = x[2]; x[2] = state.getElementAtIndex(2); // quaternion state predictor is more reliable than Kalman filter, for // that reason we ignore predicted quaternion values on Kalman filter and // simply keep predicted ones. Besides, typically gyroscope samples are // much more reliable than accelerometer ones. For that reason state // elements corresponding to quaternion (3 to 6) are not copied into mX // array. stateQuaternionA = x[3]; stateQuaternionB = x[4]; stateQuaternionC = x[5]; stateQuaternionD = x[6]; |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 484 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 621 |
final var rightPoints = new ArrayList<Point2D>(count); final var qualityScores = new double[count]; final double principalPointX; final double principalPointY; if (configuration.getInitialCamerasEstimatorMethod() == InitialCamerasEstimatorMethod.DUAL_ABSOLUTE_QUADRIC || configuration.getInitialCamerasEstimatorMethod() == InitialCamerasEstimatorMethod.DUAL_ABSOLUTE_QUADRIC_AND_ESSENTIAL_MATRIX) { principalPointX = configuration.getPrincipalPointX(); principalPointY = configuration.getPrincipalPointY(); } else { principalPointX = principalPointY = 0.0; } var i = 0; for (final var match : matches) { final var samples = match.getSamples(); if (samples.length != NUMBER_OF_VIEWS) { return false; } leftSamples.add(samples[0]); rightSamples.add(samples[1]); final var leftPoint = Point2D.create(); leftPoint.setInhomogeneousCoordinates( samples[0].getPoint().getInhomX() - principalPointX, samples[0].getPoint().getInhomY() - principalPointY); leftPoints.add(leftPoint); final var rightPoint = Point2D.create(); rightPoint.setInhomogeneousCoordinates( samples[1].getPoint().getInhomX() - principalPointX, samples[1].getPoint().getInhomY() - principalPointY); rightPoints.add(rightPoint); qualityScores[i] = match.getQualityScore(); i++; } try { final var estimator = FundamentalMatrixRobustEstimator.create(leftPoints, rightPoints, qualityScores, |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 815 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 960 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12 + h21 * h22 / sqrAspectRatio); a.setElementAt(counter, 1, h11 * h32 + h31 * h12); a.setElementAt(counter, 2, h21 * h32 + h31 * h22); a.setElementAt(counter, 3, h31 * h32); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0)); |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 813 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1519 |
final RANSACFundamentalMatrixRobustEstimator ransacEstimator = (RANSACFundamentalMatrixRobustEstimator) estimator; ransacEstimator.setThreshold(configuration.getFundamentalMatrixThreshold()); ransacEstimator.setComputeAndKeepInliersEnabled( configuration.getFundamentalMatrixComputeAndKeepInliers()); ransacEstimator.setComputeAndKeepResidualsEnabled( configuration.getFundamentalMatrixComputeAndKeepResiduals()); break; default: break; } final var fundamentalMatrix = estimator.estimate(); currentEstimatedFundamentalMatrix = new EstimatedFundamentalMatrix(); currentEstimatedFundamentalMatrix.setFundamentalMatrix(fundamentalMatrix); currentEstimatedFundamentalMatrix.setViewId1(viewId1); currentEstimatedFundamentalMatrix.setViewId2(viewId2); currentEstimatedFundamentalMatrix.setCovariance(estimator.getCovariance()); // determine quality score and inliers final var inliersData = estimator.getInliersData(); if (inliersData != null) { final var numInliers = inliersData.getNumInliers(); final var inliers = inliersData.getInliers(); final var length = inliers.length(); var fundamentalMatrixQualityScore = 0.0; for (i = 0; i < length; i++) { if (inliers.get(i)) { // inlier fundamentalMatrixQualityScore += qualityScores[i] / numInliers; } } currentEstimatedFundamentalMatrix.setQualityScore(fundamentalMatrixQualityScore); currentEstimatedFundamentalMatrix.setInliers(inliers); } // store left/right samples currentEstimatedFundamentalMatrix.setLeftSamples(leftSamples); currentEstimatedFundamentalMatrix.setRightSamples(rightSamples); return true; } catch (final Exception e) { return false; } } /** * Estimates fundamental matrix for provided matches, when 3D points lay in * a planar 3D scene. * * @param matches pairs of matches to find fundamental matrix. * @param viewId1 id of first view. * @param viewId2 id of second view. * @return true if estimation succeeded, false otherwise. */ private boolean estimatePlanarFundamentalMatrix(final List<MatchedSamples> matches, final int viewId1, final int viewId2) { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 275 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 306 |
a.setElementAt(counter, 5, h31 * h32); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0) + Math.pow(a.getElementAt(counter, 4), 2.0) + Math.pow(a.getElementAt(counter, 5), 2.0)); a.setElementAt(counter, 0, a.getElementAt(counter, 0) / rowNorm); a.setElementAt(counter, 1, a.getElementAt(counter, 1) / rowNorm); a.setElementAt(counter, 2, a.getElementAt(counter, 2) / rowNorm); a.setElementAt(counter, 3, a.getElementAt(counter, 3) / rowNorm); a.setElementAt(counter, 4, a.getElementAt(counter, 4) / rowNorm); a.setElementAt(counter, 5, a.getElementAt(counter, 5) / rowNorm); counter++; |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/MSACRadialDistortionRobustEstimator.java | 228 |
com/irurueta/ar/calibration/estimators/PROMedSRadialDistortionRobustEstimator.java | 429 |
com/irurueta/ar/calibration/estimators/PROSACRadialDistortionRobustEstimator.java | 390 |
} @Override public int getTotalSamples() { return distortedPoints.size(); } @Override public int getSubsetSize() { return RadialDistortionRobustEstimator.MIN_NUMBER_OF_POINTS; } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<RadialDistortion> solutions) { subsetDistorted.clear(); subsetDistorted.add(distortedPoints.get(samplesIndices[0])); subsetDistorted.add(distortedPoints.get(samplesIndices[1])); subsetUndistorted.clear(); subsetUndistorted.add(undistortedPoints.get(samplesIndices[0])); subsetUndistorted.add(undistortedPoints.get(samplesIndices[1])); try { radialDistortionEstimator.setPoints(distortedPoints, undistortedPoints); radialDistortionEstimator.setPoints(subsetDistorted, subsetUndistorted); final var distortion = radialDistortionEstimator.estimate(); solutions.add(distortion); } catch (final Exception e) { // if anything fails, no solution is added } } @Override public double computeResidual(final RadialDistortion currentEstimation, final int i) { final var distortedPoint = distortedPoints.get(i); final var undistortedPoint = undistortedPoints.get(i); currentEstimation.distort(undistortedPoint, testPoint); return testPoint.distanceTo(distortedPoint); } @Override public boolean isReady() { return MSACRadialDistortionRobustEstimator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 541 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 685 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); a.setElementAt(counter, 1, h11 * h22 + h21 * h12); a.setElementAt(counter, 2, h21 * h22); a.setElementAt(counter, 3, h31 * h32); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0)); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMedSRadialDistortionRobustEstimator.java | 266 |
com/irurueta/ar/calibration/estimators/MSACRadialDistortionRobustEstimator.java | 230 |
com/irurueta/ar/calibration/estimators/PROMedSRadialDistortionRobustEstimator.java | 431 |
com/irurueta/ar/calibration/estimators/PROSACRadialDistortionRobustEstimator.java | 392 |
@Override public int getTotalSamples() { return distortedPoints.size(); } @Override public int getSubsetSize() { return RadialDistortionRobustEstimator.MIN_NUMBER_OF_POINTS; } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<RadialDistortion> solutions) { subsetDistorted.clear(); subsetDistorted.add(distortedPoints.get(samplesIndices[0])); subsetDistorted.add(distortedPoints.get(samplesIndices[1])); subsetUndistorted.clear(); subsetUndistorted.add(undistortedPoints.get(samplesIndices[0])); subsetUndistorted.add(undistortedPoints.get(samplesIndices[1])); try { radialDistortionEstimator.setPoints(distortedPoints, undistortedPoints); radialDistortionEstimator.setPoints(subsetDistorted, subsetUndistorted); final var distortion = radialDistortionEstimator.estimate(); solutions.add(distortion); } catch (final Exception e) { // if anything fails, no solution is added } } @Override public double computeResidual(final RadialDistortion currentEstimation, final int i) { final var distortedPoint = distortedPoints.get(i); final var undistortedPoint = undistortedPoints.get(i); currentEstimation.distort(undistortedPoint, testPoint); return testPoint.distanceTo(distortedPoint); } @Override public boolean isReady() { return LMedSRadialDistortionRobustEstimator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 91 |
com/irurueta/ar/slam/StatePredictor.java | 95 |
private ConstantVelocityModelStatePredictor() { } /** * Updates the system model (position, orientation, linear velocity and * angular velocity) assuming a constant velocity model (without * acceleration) when no velocity control signal is present. * * @param x initial system state containing: position-x, position-y, * position-z, quaternion-a, quaternion-b, quaternion-c, quaternion-d, * linear-velocity-x, linear-velocity-y, linear-velocity-z, * angular-velocity-x, angular-velocity-y, angular-velocity-z. Must have * length 13. * @param u linear and angular velocity perturbations or controls: * linear-velocity-change-x, linear-velocity-change-y, * linear-velocity-change-z, angular-velocity-change-x, * angular-velocity-change-y, angular-velocity-change-z. Must have length 6. * @param dt time interval to compute prediction expressed in seconds. * @param result instance where updated system model will be stored. Must * have length 13. * @param jacobianX jacobian wrt system state. Must be 13x13. * @param jacobianU jacobian wrt control. Must be 13x6. * @throws IllegalArgumentException if system state array, control array, * result or jacobians do not have proper size. * @see <a href="https://github.com/joansola/slamtb">constVel.m at https://github.com/joansola/slamtb</a> */ public static void predict( final double[] x, final double[] u, final double dt, final double[] result, final Matrix jacobianX, final Matrix jacobianU) { if (x.length != STATE_COMPONENTS) { // x must have length 13 throw new IllegalArgumentException(); } if (u.length != CONTROL_COMPONENTS) { // u must have length 6 throw new IllegalArgumentException(); } if (result.length != STATE_COMPONENTS) { // result must have length 13 throw new IllegalArgumentException(); } if (jacobianX != null && (jacobianX.getRows() != STATE_COMPONENTS || jacobianX.getColumns() != STATE_COMPONENTS)) { // jacobian wrt x must be 13x13 throw new IllegalArgumentException(); } if (jacobianU != null && (jacobianU.getRows() != STATE_COMPONENTS || jacobianU.getColumns() != CONTROL_COMPONENTS)) { // jacobian wrt u must be 13x6 throw new IllegalArgumentException(); } // position final var r = new InhomogeneousPoint3D(x[0], x[1], x[2]); // orientation var q = new Quaternion(x[3], x[4], x[5], x[6]); // linear velocity var vx = x[7]; var vy = x[8]; var vz = x[9]; // angular velocity var wx = x[10]; |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 730 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 1006 |
a.setElementAt(counter, 2, Math.pow(h21, 2.0) - Math.pow(h22, 2.0)); a.setElementAt(counter, 3, Math.pow(h31, 2.0) - Math.pow(h32, 2.0)); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0)); factor = weight / rowNorm; a.setElementAt(counter, 0, a.getElementAt(counter, 0) * factor); a.setElementAt(counter, 1, a.getElementAt(counter, 1) * factor); a.setElementAt(counter, 2, a.getElementAt(counter, 2) * factor); a.setElementAt(counter, 3, a.getElementAt(counter, 3) * factor); counter++; } index++; } final var decomposer = new SingularValueDecomposer(a); decomposer.decompose(); if (decomposer.getNullity() > 1) { // homographies constitute a degenerate camera movement. // A linear combination of possible IAC's exist (i.e. solution is // not unique up to scale) throw new ImageOfAbsoluteConicEstimatorException(); } |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 744 |
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 883 |
double principalPointY; if (configuration.getPairedCamerasEstimatorMethod() == InitialCamerasEstimatorMethod.DUAL_ABSOLUTE_QUADRIC || configuration.getPairedCamerasEstimatorMethod() == InitialCamerasEstimatorMethod.DUAL_ABSOLUTE_QUADRIC_AND_ESSENTIAL_MATRIX) { principalPointX = configuration.getPrincipalPointX(); principalPointY = configuration.getPrincipalPointY(); } else { principalPointX = principalPointY = 0.0; } var i = 0; for (final var match : matches) { final var samples = match.getSamples(); if (samples.length != MIN_NUMBER_OF_VIEWS) { return false; } leftSamples.add(samples[0]); rightSamples.add(samples[1]); final var leftPoint = Point2D.create(); leftPoint.setInhomogeneousCoordinates( samples[0].getPoint().getInhomX() - principalPointX, samples[0].getPoint().getInhomY() - principalPointY); leftPoints.add(leftPoint); final var rightPoint = Point2D.create(); rightPoint.setInhomogeneousCoordinates( samples[1].getPoint().getInhomX() - principalPointX, samples[1].getPoint().getInhomY() - principalPointY); rightPoints.add(rightPoint); qualityScores[i] = match.getQualityScore(); i++; } try { final var estimator = FundamentalMatrixRobustEstimator.create(leftPoints, rightPoints, qualityScores, |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructorConfiguration.java | 452 |
com/irurueta/ar/sfm/BaseSparseReconstructorConfiguration.java | 879 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructorConfiguration.java | 454 |
robustFundamentalMatrixEstimatorMethod = method; //noinspection all return (T) this; } /** * Indicates whether estimated fundamental matrix is refined among all found inliers. * This is only used when general scenes are allowed. * * @return true if fundamental matrix is refined, false otherwise. */ public boolean isFundamentalMatrixRefined() { return refineFundamentalMatrix; } /** * Specifies whether estimated fundamental matrix is refined among all found inliers. * This is only used when general scenes are allowed. * * @param refineFundamentalMatrix true if fundamental matrix is refined, false otherwise. * @return this instance so that method can be easily chained. */ public T setFundamentalMatrixRefined(final boolean refineFundamentalMatrix) { this.refineFundamentalMatrix = refineFundamentalMatrix; //noinspection unchecked return (T) this; } /** * Indicates whether covariance of estimated fundamental matrix is kept after the * estimation. * This is only used when general scenes are allowed. * * @return true if covariance is kept, false otherwise. */ public boolean isFundamentalMatrixCovarianceKept() { return keepFundamentalMatrixCovariance; } /** * Specifies whether covariance of estimated fundamental matrix is kept after the * estimation. * This is only used when general scenes are allowed. * * @param keepFundamentalMatrixCovariance true if covariance is kept, false * otherwise. * @return this instance so that method can be easily chained. */ public T setFundamentalMatrixCovarianceKept(final boolean keepFundamentalMatrixCovariance) { this.keepFundamentalMatrixCovariance = keepFundamentalMatrixCovariance; //noinspection unchecked return (T) this; } /** * Gets confidence of robustly estimated fundamental matrix. * This is only used when general scenes are allowed. * * @return confidence of robustly estimated fundamental matrix. */ public double getFundamentalMatrixConfidence() { return fundamentalMatrixConfidence; } /** * Sets confidence of robustly estimated fundamental matrix. * This is only used when general scenes are allowed. * * @param fundamentalMatrixConfidence confidence of robustly estimated fundamental * matrix. * @return this instance so that method can be easily chained. */ public T setFundamentalMatrixConfidence(final double fundamentalMatrixConfidence) { this.fundamentalMatrixConfidence = fundamentalMatrixConfidence; //noinspection unchecked return (T) this; } /** * Gets maximum number of iterations to robustly estimate fundamental matrix. * This is only used when general scenes are allowed. * * @return maximum number of iterations to robustly estimate fundamental matrix. */ public int getFundamentalMatrixMaxIterations() { return fundamentalMatrixMaxIterations; } /** * Sets maximum number of iterations to robustly estimate fundamental matrix. * This is only used when general scenes are allowed. * * @param fundamentalMatrixMaxIterations maximum number of iterations to robustly * estimate fundamental matrix. * @return this instance so that method can be easily chained. */ public T setFundamentalMatrixMaxIterations(final int fundamentalMatrixMaxIterations) { this.fundamentalMatrixMaxIterations = fundamentalMatrixMaxIterations; //noinspection unchecked return (T) this; } /** * Gets threshold to determine whether samples for robust fundamental matrix * estimation are inliers or not. * This is only used when general scenes are allowed. * * @return threshold to determine whether samples for robust fundamental matrix * estimation are inliers or not. */ public double getFundamentalMatrixThreshold() { return fundamentalMatrixThreshold; } /** * Sets threshold to determine whether samples for robust fundamental matrix * estimation are inliers or not. * This is only used when general scenes are allowed. * * @param fundamentalMatrixThreshold threshold to determine whether samples for * robust fundamental matrix estimation are inliers * or not. * @return this instance so that method can be easily chained. */ public T setFundamentalMatrixThreshold(final double fundamentalMatrixThreshold) { this.fundamentalMatrixThreshold = fundamentalMatrixThreshold; //noinspection unchecked return (T) this; } /** * Indicates whether inliers must be kept during robust fundamental matrix * estimation. * This is only used when general scenes are allowed. * * @return true if inliers must be kept during robust fundamental matrix estimation, * false otherwise. */ public boolean getFundamentalMatrixComputeAndKeepInliers() { return fundamentalMatrixComputeAndKeepInliers; } /** * Specifies whether inliers must be kept during robust fundamental matrix * estimation. * This is only used when general scenes are allowed. * * @param fundamentalMatrixComputeAndKeepInliers true if inliers must be kept * during robust fundamental matrix * estimation, false otherwise. * @return this instance so that method can be easily chained. */ public T setFundamentalMatrixComputeAndKeepInliers(final boolean fundamentalMatrixComputeAndKeepInliers) { this.fundamentalMatrixComputeAndKeepInliers = fundamentalMatrixComputeAndKeepInliers; //noinspection unchecked return (T) this; } /** * Indicates whether residuals must be computed and kept during robust fundamental * matrix estimation. * This is only used when general scenes are allowed. * * @return true if residuals must be computed and kept, false otherwise. */ public boolean getFundamentalMatrixComputeAndKeepResiduals() { return fundamentalMatrixComputeAndKeepResiduals; } /** * Specifies whether residuals must be computed and kept during robust fundamental * matrix estimation. * This is only used when general scenes are allowed. * * @param fundamentalMatrixComputeAndKeepResiduals true if residuals must be computed * and kept, false otherwise. * @return this instance so that method can be easily chained. */ public T setFundamentalMatrixComputeAndKeepResiduals(final boolean fundamentalMatrixComputeAndKeepResiduals) { this.fundamentalMatrixComputeAndKeepResiduals = fundamentalMatrixComputeAndKeepResiduals; //noinspection unchecked return (T) this; } /** * Gets method to use for paired cameras' estimation. * * @return method to use for paired cameras' estimation. */ public InitialCamerasEstimatorMethod getPairedCamerasEstimatorMethod() { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/WeightedDualAbsoluteQuadricEstimator.java | 415 |
com/irurueta/ar/calibration/estimators/WeightedDualAbsoluteQuadricEstimator.java | 636 |
final var transRow = new Matrix(BaseQuadric.N_PARAMS, 4); final var tmp = new Matrix(BaseQuadric.N_PARAMS, BaseQuadric.N_PARAMS); Matrix cameraMatrix; double p11; double p12; double p13; double p14; double p21; double p22; double p23; double p24; double p31; double p32; double p33; double p34; int eqCounter; var cameraCounter = 0; double weight; var previousNorm = 1.0; for (final var camera : cameras) { if (selected[cameraCounter]) { eqCounter = 0; // normalize cameras to increase accuracy camera.normalize(); cameraMatrix = camera.getInternalMatrix(); p11 = cameraMatrix.getElementAt(0, 0); p21 = cameraMatrix.getElementAt(1, 0); p31 = cameraMatrix.getElementAt(2, 0); p12 = cameraMatrix.getElementAt(0, 1); p22 = cameraMatrix.getElementAt(1, 1); p32 = cameraMatrix.getElementAt(2, 1); p13 = cameraMatrix.getElementAt(0, 2); p23 = cameraMatrix.getElementAt(1, 2); p33 = cameraMatrix.getElementAt(2, 2); p14 = cameraMatrix.getElementAt(0, 3); p24 = cameraMatrix.getElementAt(1, 3); p34 = cameraMatrix.getElementAt(2, 3); weight = weights[cameraCounter]; |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 368 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 664 |
final var a = new Matrix(2 * nHomographies, 6); var index = 0; var counter = 0; ProjectiveTransformation2D t = null; final var h = new Matrix(ProjectiveTransformation2D.HOM_COORDS, ProjectiveTransformation2D.HOM_COORDS); // elements ij of homography (last column is not required) double h11; double h12; double h21; double h22; double h31; double h32; double rowNorm; double weight; double factor; for (final var homography : homographies) { if (selected[index]) { weight = weights[index]; // convert homography into projective so it can be normalized homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); a.setElementAt(counter, 1, h11 * h22 + h21 * h12); a.setElementAt(counter, 2, h21 * h22); a.setElementAt(counter, 3, h11 * h32 + h31 * h12); |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationConstantVelocityModelSlamEstimator.java | 544 |
com/irurueta/ar/slam/ConstantVelocityModelSlamEstimator.java | 492 |
stateVelocityX = x[7]; x[7] = state.getElementAtIndex(7); stateVelocityY = x[8]; x[8] = state.getElementAtIndex(8); stateVelocityZ = x[9]; x[9] = state.getElementAtIndex(9); // linear acceleration stateAccelerationX = accumulatedAccelerationSampleX; stateAccelerationY = accumulatedAccelerationSampleY; stateAccelerationZ = accumulatedAccelerationSampleZ; // angular velocity stateAngularSpeedX = x[10]; x[10] = state.getElementAtIndex(10); stateAngularSpeedY = x[11]; x[11] = state.getElementAtIndex(11); stateAngularSpeedZ = x[12]; x[12] = state.getElementAtIndex(12); } /** * Updates state data of the device by using state matrix obtained from * Kalman filter after correction. * * @param state state matrix obtained from Kalman filter. */ private void updateCorrectedState(final Matrix state) { // position statePositionX = x[0] = state.getElementAtIndex(0); statePositionY = x[1] = state.getElementAtIndex(1); statePositionZ = x[2] = state.getElementAtIndex(2); // quaternion stateQuaternionA = x[3] = state.getElementAtIndex(3); stateQuaternionB = x[4] = state.getElementAtIndex(4); stateQuaternionC = x[5] = state.getElementAtIndex(5); stateQuaternionD = x[6] = state.getElementAtIndex(6); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/DualAbsoluteQuadricRobustEstimator.java | 435 |
com/irurueta/ar/calibration/estimators/ImageOfAbsoluteConicRobustEstimator.java | 395 |
public void setListener(final DualAbsoluteQuadricRobustEstimatorListener listener) throws LockedException { if (isLocked()) { throw new LockedException(); } this.listener = listener; } /** * Indicates whether listener has been provided and is available for * retrieval. * * @return true if available, false otherwise. */ public boolean isListenerAvailable() { return listener != null; } /** * Indicates whether this instance is locked. * * @return true if this estimator is busy estimating the Dual Absolute * Quadric, false otherwise. */ public boolean isLocked() { return locked; } /** * Returns amount of progress variation before notifying a progress change * during estimation. * * @return amount of progress variation before notifying a progress change * during estimation. */ public float getProgressDelta() { return progressDelta; } /** * Sets amount of progress variation before notifying a progress change * during estimation. * * @param progressDelta amount of progress variation before notifying a * progress change during estimation. * @throws IllegalArgumentException if progress delta is less than zero or * greater than 1. * @throws LockedException if this estimator is locked because an estimation * is being computed. */ public void setProgressDelta(final float progressDelta) throws LockedException { if (isLocked()) { throw new LockedException(); } if (progressDelta < MIN_PROGRESS_DELTA || progressDelta > MAX_PROGRESS_DELTA) { throw new IllegalArgumentException(); } this.progressDelta = progressDelta; } /** * Returns amount of confidence expressed as a value between 0.0 and 1.0 * (which is equivalent to 100%). The amount of confidence indicates the * probability that the estimated result is correct. Usually this value will * be close to 1.0, but not exactly 1.0. * * @return amount of confidence as a value between 0.0 and 1.0. */ public double getConfidence() { return confidence; } /** * Sets amount of confidence expressed as a value between 0.0 and 1.0 (which * is equivalent to 100%). The amount of confidence indicates the * probability that the estimated result is correct. Usually this value will * be close to 1.0, but not exactly 1.0. * * @param confidence confidence to be set as a value between 0.0 and 1.0. * @throws IllegalArgumentException if provided value is not between 0.0 and * 1.0. * @throws LockedException if this estimator is locked because an estimator * is being computed. */ public void setConfidence(final double confidence) throws LockedException { if (isLocked()) { throw new LockedException(); } if (confidence < MIN_CONFIDENCE || confidence > MAX_CONFIDENCE) { throw new IllegalArgumentException(); } this.confidence = confidence; } /** * Returns maximum allowed number of iterations. If maximum allowed number * of iterations is achieved without converging to a result when calling * estimate(), a RobustEstimatorException will be raised. * * @return maximum allowed number of iterations. */ public int getMaxIterations() { return maxIterations; } /** * Sets maximum allowed number of iterations. When the maximum number of * iterations is exceeded, result will not be available, however an * approximate result will be available for retrieval. * * @param maxIterations maximum allowed number of iterations to be set. * @throws IllegalArgumentException if provided value is less than 1. * @throws LockedException if this estimator is locked because an estimation * is being computed. */ public void setMaxIterations(final int maxIterations) throws LockedException { if (isLocked()) { throw new LockedException(); } if (maxIterations < MIN_ITERATIONS) { throw new IllegalArgumentException(); } this.maxIterations = maxIterations; } /** * Obtains the list of cameras used to estimate the Dual Absolute Quadric * (DAQ). * * @return list of cameras to estimate the DAQ. */ public List<PinholeCamera> getCameras() { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/DualAbsoluteQuadricRobustEstimator.java | 435 |
com/irurueta/ar/calibration/estimators/RadialDistortionRobustEstimator.java | 296 |
com/irurueta/ar/epipolar/estimators/FundamentalMatrixRobustEstimator.java | 339 |
public void setListener(final DualAbsoluteQuadricRobustEstimatorListener listener) throws LockedException { if (isLocked()) { throw new LockedException(); } this.listener = listener; } /** * Indicates whether listener has been provided and is available for * retrieval. * * @return true if available, false otherwise. */ public boolean isListenerAvailable() { return listener != null; } /** * Indicates whether this instance is locked. * * @return true if this estimator is busy estimating the Dual Absolute * Quadric, false otherwise. */ public boolean isLocked() { return locked; } /** * Returns amount of progress variation before notifying a progress change * during estimation. * * @return amount of progress variation before notifying a progress change * during estimation. */ public float getProgressDelta() { return progressDelta; } /** * Sets amount of progress variation before notifying a progress change * during estimation. * * @param progressDelta amount of progress variation before notifying a * progress change during estimation. * @throws IllegalArgumentException if progress delta is less than zero or * greater than 1. * @throws LockedException if this estimator is locked because an estimation * is being computed. */ public void setProgressDelta(final float progressDelta) throws LockedException { if (isLocked()) { throw new LockedException(); } if (progressDelta < MIN_PROGRESS_DELTA || progressDelta > MAX_PROGRESS_DELTA) { throw new IllegalArgumentException(); } this.progressDelta = progressDelta; } /** * Returns amount of confidence expressed as a value between 0.0 and 1.0 * (which is equivalent to 100%). The amount of confidence indicates the * probability that the estimated result is correct. Usually this value will * be close to 1.0, but not exactly 1.0. * * @return amount of confidence as a value between 0.0 and 1.0. */ public double getConfidence() { return confidence; } /** * Sets amount of confidence expressed as a value between 0.0 and 1.0 (which * is equivalent to 100%). The amount of confidence indicates the * probability that the estimated result is correct. Usually this value will * be close to 1.0, but not exactly 1.0. * * @param confidence confidence to be set as a value between 0.0 and 1.0. * @throws IllegalArgumentException if provided value is not between 0.0 and * 1.0. * @throws LockedException if this estimator is locked because an estimator * is being computed. */ public void setConfidence(final double confidence) throws LockedException { if (isLocked()) { throw new LockedException(); } if (confidence < MIN_CONFIDENCE || confidence > MAX_CONFIDENCE) { throw new IllegalArgumentException(); } this.confidence = confidence; } /** * Returns maximum allowed number of iterations. If maximum allowed number * of iterations is achieved without converging to a result when calling * estimate(), a RobustEstimatorException will be raised. * * @return maximum allowed number of iterations. */ public int getMaxIterations() { return maxIterations; } /** * Sets maximum allowed number of iterations. When the maximum number of * iterations is exceeded, result will not be available, however an * approximate result will be available for retrieval. * * @param maxIterations maximum allowed number of iterations to be set. * @throws IllegalArgumentException if provided value is less than 1. * @throws LockedException if this estimator is locked because an estimation * is being computed. */ public void setMaxIterations(final int maxIterations) throws LockedException { if (isLocked()) { throw new LockedException(); } if (maxIterations < MIN_ITERATIONS) { throw new IllegalArgumentException(); } this.maxIterations = maxIterations; } /** * Obtains the list of cameras used to estimate the Dual Absolute Quadric * (DAQ). * * @return list of cameras to estimate the DAQ. */ public List<PinholeCamera> getCameras() { |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructorConfiguration.java | 37 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructorConfiguration.java | 34 |
public abstract class BaseSparseReconstructorConfiguration<T extends BaseSparseReconstructorConfiguration<T>> implements Serializable { /** * Default robust fundamental matrix estimator method. * This is only used when general scenes are allowed. */ public static final RobustEstimatorMethod DEFAULT_ROBUST_FUNDAMENTAL_MATRIX_ESTIMATOR_METHOD = RobustEstimatorMethod.PROSAC; /** * Default non-robust fundamental matrix estimator method used internally within a robust estimator. * This is only used when general scenes are allowed. */ public static final FundamentalMatrixEstimatorMethod DEFAULT_NON_ROBUST_FUNDAMENTAL_MATRIX_ESTIMATOR_METHOD = FundamentalMatrixEstimatorMethod.SEVEN_POINTS_ALGORITHM; /** * Indicates that estimated fundamental matrix is refined by default using all found inliers. * This is only used when general scenes are allowed. */ public static final boolean DEFAULT_REFINE_FUNDAMENTAL_MATRIX = true; /** * Indicates that fundamental matrix covariance is kept by default after the estimation. * This is only used when general scenes are allowed. */ public static final boolean DEFAULT_KEEP_FUNDAMENTAL_MATRIX_COVARIANCE = false; /** * Default confidence of robustly estimated fundamental matrix. By default, this is 99%. * This is only used when general scenes are allowed. */ public static final double DEFAULT_FUNDAMENTAL_MATRIX_CONFIDENCE = FundamentalMatrixRobustEstimator.DEFAULT_CONFIDENCE; /** * Default maximum number of iterations to make while robustly estimating fundamental matrix. * By default, this is 5000 iterations. This is only used when general scenes are allowed. */ public static final int DEFAULT_FUNDAMENTAL_MATRIX_MAX_ITERATIONS = FundamentalMatrixRobustEstimator.DEFAULT_MAX_ITERATIONS; /** * Default threshold to determine whether samples for robust fundamental matrix estimation are * inliers or not. * This is only used when general scenes are allowed. */ public static final double DEFAULT_FUNDAMENTAL_MATRIX_THRESHOLD = PROSACFundamentalMatrixRobustEstimator.DEFAULT_THRESHOLD; /** * Default value indicating that inlier data is kept after robust fundamental matrix estimation. * This is only used when general scenes are allowed. */ public static final boolean DEFAULT_FUNDAMENTAL_MATRIX_COMPUTE_AND_KEEP_INLIERS = true; /** * Default value indicating that residual data is kept after robust fundamental matrix estimation. * This is only used when general scenes are allowed. */ public static final boolean DEFAULT_FUNDAMENTAL_MATRIX_COMPUTE_AND_KEEP_RESIDUALS = true; /** * Default method to use for initial cameras' estimation. */ public static final InitialCamerasEstimatorMethod DEFAULT_INITIAL_CAMERAS_ESTIMATOR_METHOD = InitialCamerasEstimatorMethod.DUAL_ABSOLUTE_QUADRIC_AND_ESSENTIAL_MATRIX; /** * Indicates whether an homogeneous point triangulator is used for point triangulation when Dual * Absolute Quadric (DAQ) camera initialization is used. */ public static final boolean DEFAULT_DAQ_USE_HOMOGENEOUS_POINT_TRIANGULATOR = true; /** * Default aspect ratio for initial cameras. */ public static final double DEFAULT_INITIAL_CAMERAS_ASPECT_RATIO = 1.0; /** * Default horizontal principal point value to use for initial cameras estimation using * Dual Image of Absolute Conic (DIAC) or Dual Absolute Quadric (DAQ) methods. */ public static final double DEFAULT_INITIAL_CAMERAS_PRINCIPAL_POINT_X = 0.0; /** * Default vertical principal point value to use for initial cameras estimation using * Dual Image of Absolute Conic (DIAC) or Dual Absolute Quadric (DAQ) methods. */ public static final double DEFAULT_INITIAL_CAMERAS_PRINCIPAL_POINT_Y = 0.0; /** * Default corrector type to use for point triangulation when initial cameras are * being estimated using either Dual Image of Absolute Conic (DIAC), Dual Absolute Quadric * (DAQ) or essential matrix methods. */ public static final CorrectorType DEFAULT_INITIAL_CAMERAS_CORRECTOR_TYPE = CorrectorType.SAMPSON_CORRECTOR; /** * Default value indicating whether valid triangulated points are marked during initial * cameras estimation using either Dual Image of Absolute Conic (DIAC) or essential matrix * methods. */ public static final boolean DEFAULT_INITIAL_CAMERAS_MARK_VALID_TRIANGULATED_POINTS = true; /** * Indicates whether a general (points are laying in a general 3D position) scene is * allowed. * When true, an initial geometry estimation is attempted for general points. */ public static final boolean DEFAULT_ALLOW_GENERAL_SCENE = true; /** * Indicates whether a planar (points laying in a 3D plane) scene is allowed. * When true, an initial geometry estimation is attempted for planar points. */ public static final boolean DEFAULT_ALLOW_PLANAR_SCENE = true; /** * Default robust planar homography estimator method. * This is only used when planar scenes are allowed. */ public static final RobustEstimatorMethod DEFAULT_ROBUST_PLANAR_HOMOGRAPHY_ESTIMATOR_METHOD = RobustEstimatorMethod.PROMEDS; /** * Indicates that planar homography is refined by default using all found inliers. * This is only used when planar scenes are allowed. */ public static final boolean DEFAULT_REFINE_PLANAR_HOMOGRAPHY = true; /** * Indicates that planar homography covariance is kept by default after estimation. * This is only used when planar scenes are allowed. */ public static final boolean DEFAULT_KEEP_PLANAR_HOMOGRAPHY_COVARIANCE = false; /** * Default confidence of robustly estimated planar homography. By default, this is 99%. * This is only used when planar scenes are allowed. */ public static final double DEFAULT_PLANAR_HOMOGRAPHY_CONFIDENCE = ProjectiveTransformation2DRobustEstimator.DEFAULT_CONFIDENCE; /** * Default maximum number of iterations to make while robustly estimating planar * homography. By default, this is 5000 iterations. * This is only used when planar scenes are allowed. */ public static final int DEFAULT_PLANAR_HOMOGRAPHY_MAX_ITERATIONS = ProjectiveTransformation2DRobustEstimator.DEFAULT_MAX_ITERATIONS; /** * Default threshold to determine whether samples for robust projective 2D transformation * estimation are inliers or not. * This is only used when planar scenes are allowed. */ public static final double DEFAULT_PLANAR_HOMOGRAPHY_THRESHOLD = 1e-3; /** * Default value indicating that inlier data is kept after robust planar homography estimation. * This is only used when planar scenes are allowed. */ public static final boolean DEFAULT_PLANAR_HOMOGRAPHY_COMPUTE_AND_KEEP_INLIERS = true; /** * Default value indicating that residual data is kept after robust planar homography * estimation. * This is only used when planar scenes are allowed. */ public static final boolean DEFAULT_PLANAR_HOMOGRAPHY_COMPUTE_AND_KEEP_RESIDUALS = true; |
File | Line |
---|---|
com/irurueta/ar/sfm/MSACRobustSinglePoint3DTriangulator.java | 195 |
com/irurueta/ar/sfm/PROMedSRobustSinglePoint3DTriangulator.java | 350 |
com/irurueta/ar/sfm/PROSACRobustSinglePoint3DTriangulator.java | 312 |
com/irurueta/ar/sfm/RANSACRobustSinglePoint3DTriangulator.java | 195 |
} @Override public int getTotalSamples() { return points2D.size(); } @Override public int getSubsetSize() { return MIN_REQUIRED_VIEWS; } @Override public void estimatePreliminarSolutions(final int[] samplesIndices, final List<Point3D> solutions) { subsetPoints.clear(); subsetPoints.add(points2D.get(samplesIndices[0])); subsetPoints.add(points2D.get(samplesIndices[1])); subsetCameras.clear(); subsetCameras.add(cameras.get(samplesIndices[0])); subsetCameras.add(cameras.get(samplesIndices[1])); try { triangulator.setPointsAndCameras(subsetPoints, subsetCameras); final var triangulated = triangulator.triangulate(); solutions.add(triangulated); } catch (final Exception e) { // if anything fails, no solution is added } } @Override public double computeResidual(final Point3D currentEstimation, final int i) { final var point2D = points2D.get(i); final var camera = cameras.get(i); // project estimated point with camera camera.project(currentEstimation, testPoint); // return distance of projected point respect to the original one // as a residual return testPoint.distanceTo(point2D); } @Override public boolean isReady() { return MSACRobustSinglePoint3DTriangulator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 436 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 580 |
a.setElementAt(counter, 4, a.getElementAt(counter, 4) / rowNorm); counter++; // fill second equation a.setElementAt(counter, 0, Math.pow(h11, 2.0) - Math.pow(h12, 2.0)); a.setElementAt(counter, 1, Math.pow(h21, 2.0) - Math.pow(h22, 2.0)); a.setElementAt(counter, 2, 2.0 * (h11 * h31 - h12 * h32)); a.setElementAt(counter, 3, 2.0 * (h21 * h31 - h22 * h32)); a.setElementAt(counter, 4, Math.pow(h31, 2.0) - Math.pow(h32, 2.0)); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0) + Math.pow(a.getElementAt(counter, 4), 2.0)); |
File | Line |
---|---|
com/irurueta/ar/sfm/LMedSRobustSinglePoint3DTriangulator.java | 231 |
com/irurueta/ar/sfm/MSACRobustSinglePoint3DTriangulator.java | 197 |
com/irurueta/ar/sfm/PROMedSRobustSinglePoint3DTriangulator.java | 352 |
com/irurueta/ar/sfm/PROSACRobustSinglePoint3DTriangulator.java | 314 |
com/irurueta/ar/sfm/RANSACRobustSinglePoint3DTriangulator.java | 197 |
@Override public int getTotalSamples() { return points2D.size(); } @Override public int getSubsetSize() { return MIN_REQUIRED_VIEWS; } @Override public void estimatePreliminarSolutions(final int[] samplesIndices, final List<Point3D> solutions) { subsetPoints.clear(); subsetPoints.add(points2D.get(samplesIndices[0])); subsetPoints.add(points2D.get(samplesIndices[1])); subsetCameras.clear(); subsetCameras.add(cameras.get(samplesIndices[0])); subsetCameras.add(cameras.get(samplesIndices[1])); try { triangulator.setPointsAndCameras(subsetPoints, subsetCameras); final var triangulated = triangulator.triangulate(); solutions.add(triangulated); } catch (final Exception e) { // if anything fails, no solution is added } } @Override public double computeResidual(final Point3D currentEstimation, final int i) { final var point2D = points2D.get(i); final var camera = cameras.get(i); // project estimated point with camera camera.project(currentEstimation, testPoint); // return distance of projected point respect to the original one // as a residual return testPoint.distanceTo(point2D); } @Override public boolean isReady() { return LMedSRobustSinglePoint3DTriangulator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 938 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 1083 |
final var a = new Matrix(2 * nHomographies, 4); final var sqrAspectRatio = Math.pow(focalDistanceAspectRatio, 2.0); var index = 0; var counter = 0; ProjectiveTransformation2D t = null; final var h = new Matrix(ProjectiveTransformation2D.HOM_COORDS, ProjectiveTransformation2D.HOM_COORDS); // elements ij of homography (last column is not required) double h11; double h12; double h21; double h22; double h31; double h32; double rowNorm; double weight; double factor; for (final var homography : homographies) { if (selected[index]) { weight = weights[index]; // convert homography into projective so it can be normalized homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12 + h21 * h22 / sqrAspectRatio); a.setElementAt(counter, 1, h11 * h32 + h31 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSERadialDistortionEstimator.java | 282 |
com/irurueta/ar/calibration/estimators/WeightedRadialDistortionEstimator.java | 416 |
uDenormHomX = undistorted.getHomX(); uDenormHomY = undistorted.getHomY(); uDenormHomW = undistorted.getHomW(); uDenormInhomX = uDenormHomX / uDenormHomW; uDenormInhomY = uDenormHomY / uDenormHomW; // multiply intrinsic parameters by undistorted point uNormHomX = kInv.getElementAt(0, 0) * uDenormHomX + kInv.getElementAt(0, 1) * uDenormHomY + kInv.getElementAt(0, 2) * uDenormHomW; uNormHomY = kInv.getElementAt(1, 0) * uDenormHomX + kInv.getElementAt(1, 1) * uDenormHomY + kInv.getElementAt(1, 2) * uDenormHomW; uNormHomW = kInv.getElementAt(2, 0) * uDenormHomX + kInv.getElementAt(2, 1) * uDenormHomY + kInv.getElementAt(2, 2) * uDenormHomW; uNormInhomX = uNormHomX / uNormHomW; uNormInhomY = uNormHomY / uNormHomW; r2 = uNormInhomX * uNormInhomX + uNormInhomY * uNormInhomY; dInhomX = distorted.getInhomX(); dInhomY = distorted.getInhomY(); a = 1.0; rowNormX = rowNormY = 0.0; for (var i = 0; i < numKParams; i++) { a *= r2; // x and y coordinates generate linear dependent equations, for // that reason we need more than one point // x coordinates value = (uDenormInhomX - centerX) * a; |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelSlamEstimator.java | 371 |
com/irurueta/ar/slam/SlamEstimator.java | 398 |
lastAngularSpeedZ = stateAngularSpeedZ; lastTimestampNanos = timestamp; if (listener != null) { listener.onFullSampleProcessed(this); } } /** * Resets position, linear velocity, linear acceleration, orientation and * angular speed to provided values. * This method implementation also resets Kalman filter state. * * @param statePositionX position along x-axis expressed in meters (m). * @param statePositionY position along y-axis expressed in meters (m). * @param statePositionZ position along z-axis expressed in meters (m). * @param stateVelocityX linear velocity along x-axis expressed in meters * per second (m/s). * @param stateVelocityY linear velocity along y-axis expressed in meters * per second (m/s). * @param stateVelocityZ linear velocity along z-axis expressed in meters * per second (m/s). * @param stateAccelerationX linear acceleration along x-axis expressed in * meters per squared second (m/s^2). * @param stateAccelerationY linear acceleration along y-axis expressed in * meters per squared second (m/s^2). * @param stateAccelerationZ linear acceleration along z-axis expressed in * meters per squared second (m/s^2). * @param stateQuaternionA A value of orientation quaternion. * @param stateQuaternionB B value of orientation quaternion. * @param stateQuaternionC C value of orientation quaternion. * @param stateQuaternionD D value of orientation quaternion. * @param stateAngularSpeedX angular speed along x-axis expressed in radians * per second (rad/s). * @param stateAngularSpeedY angular speed along y-axis expressed in radians * per second (rad/s). * @param stateAngularSpeedZ angular speed along z-axis expressed in radians * per second (rad/s). */ @Override protected void reset( final double statePositionX, final double statePositionY, final double statePositionZ, final double stateVelocityX, final double stateVelocityY, final double stateVelocityZ, final double stateAccelerationX, final double stateAccelerationY, final double stateAccelerationZ, final double stateQuaternionA, final double stateQuaternionB, final double stateQuaternionC, final double stateQuaternionD, final double stateAngularSpeedX, final double stateAngularSpeedY, final double stateAngularSpeedZ) { super.reset(statePositionX, statePositionY, statePositionZ, stateVelocityX, stateVelocityY, stateVelocityZ, stateAccelerationX, stateAccelerationY, stateAccelerationZ, stateQuaternionA, stateQuaternionB, stateQuaternionC, stateQuaternionD, stateAngularSpeedX, stateAngularSpeedY, stateAngularSpeedZ); if (x != null) { // position x[0] = statePositionX; x[1] = statePositionY; x[2] = statePositionZ; // quaternion x[3] = stateQuaternionA; x[4] = stateQuaternionB; x[5] = stateQuaternionC; x[6] = stateQuaternionD; // velocity x[7] = stateVelocityX; x[8] = stateVelocityY; x[9] = stateVelocityZ; // angular speed x[10] = stateAngularSpeedX; |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/MSACFundamentalMatrixRobustEstimator.java | 221 |
com/irurueta/ar/epipolar/estimators/PROSACFundamentalMatrixRobustEstimator.java | 502 |
final var innerEstimator = new MSACRobustEstimator<FundamentalMatrix>(new MSACRobustEstimatorListener<>() { // subset of left points private final List<Point2D> subsetLeftPoints = new ArrayList<>(); // subset of right points private final List<Point2D> subsetRightPoints = new ArrayList<>(); @Override public double getThreshold() { return threshold; } @Override public int getTotalSamples() { return leftPoints.size(); } @Override public int getSubsetSize() { return getMinRequiredPoints(); } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<FundamentalMatrix> solutions) { subsetLeftPoints.clear(); subsetRightPoints.clear(); for (final var samplesIndex : samplesIndices) { subsetLeftPoints.add(leftPoints.get(samplesIndex)); subsetRightPoints.add(rightPoints.get(samplesIndex)); } nonRobustEstimate(solutions, subsetLeftPoints, subsetRightPoints); } @Override public double computeResidual(final FundamentalMatrix currentEstimation, final int i) { final var leftPoint = leftPoints.get(i); final var rightPoint = rightPoints.get(i); return residual(currentEstimation, leftPoint, rightPoint); } @Override public boolean isReady() { return MSACFundamentalMatrixRobustEstimator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 710 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 985 |
a.setElementAt(counter, 2, h21 * h22); a.setElementAt(counter, 3, h31 * h32); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0)); factor = weight / rowNorm; a.setElementAt(counter, 0, a.getElementAt(counter, 0) * factor); a.setElementAt(counter, 1, a.getElementAt(counter, 1) * factor); a.setElementAt(counter, 2, a.getElementAt(counter, 2) * factor); a.setElementAt(counter, 3, a.getElementAt(counter, 3) * factor); counter++; // fill second equation a.setElementAt(counter, 0, Math.pow(h11, 2.0) - Math.pow(h12, 2.0)); |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 755 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 951 |
(RANSACEPnPPointCorrespondencePinholeCameraRobustEstimator) cameraEstimator; ransacCameraEstimator.setThreshold(configuration.getAdditionalCamerasThreshold()); ransacCameraEstimator.setComputeAndKeepInliersEnabled( configuration.getAdditionalCamerasComputeAndKeepInliers()); ransacCameraEstimator.setComputeAndKeepResidualsEnabled( configuration.getAdditionalCamerasComputeAndKeepResiduals()); break; default: break; } cameraEstimator.setSuggestSkewnessValueEnabled( configuration.isAdditionalCamerasSuggestSkewnessValueEnabled()); cameraEstimator.setSuggestedSkewnessValue( configuration.getAdditionalCamerasSuggestedSkewnessValue()); cameraEstimator.setSuggestHorizontalFocalLengthEnabled( configuration.isAdditionalCamerasSuggestHorizontalFocalLengthEnabled()); cameraEstimator.setSuggestedHorizontalFocalLengthValue( configuration.getAdditionalCamerasSuggestedHorizontalFocalLengthValue()); cameraEstimator.setSuggestVerticalFocalLengthEnabled( configuration.isAdditionalCamerasSuggestVerticalFocalLengthEnabled()); cameraEstimator.setSuggestedVerticalFocalLengthValue( configuration.getAdditionalCamerasSuggestedVerticalFocalLengthValue()); cameraEstimator.setSuggestAspectRatioEnabled( configuration.isAdditionalCamerasSuggestAspectRatioEnabled()); cameraEstimator.setSuggestedAspectRatioValue( configuration.getAdditionalCamerasSuggestedAspectRatioValue()); cameraEstimator.setSuggestPrincipalPointEnabled( configuration.isAdditionalCamerasSuggestPrincipalPointEnabled()); cameraEstimator.setSuggestedPrincipalPointValue( configuration.getAdditionalCamerasSuggestedPrincipalPointValue()); currentCamera = cameraEstimator.estimate(); currentCameraCovariance = cameraEstimator.getCovariance(); //noinspection unchecked listener.onSamplesAccepted((R) this, viewCount, previousViewTrackedSamples, currentViewTrackedSamples); allPreviousViewSamples.clear(); allPreviousViewSamples.addAll(currentViewTrackedSamples); allPreviousViewSamples.addAll(currentViewNewlySpawnedSamples); previousViewTrackedSamples = currentViewTrackedSamples; previousViewId = currentViewId; currentViewId = viewCount; } |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1642 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 653 |
p2.getInhomY() - principalPointY); rightPoints.add(rightPoint); qualityScores[i] = match.getQualityScore(); i++; } try { final var homographyEstimator = PointCorrespondenceProjectiveTransformation2DRobustEstimator.create( configuration.getRobustPlanarHomographyEstimatorMethod()); homographyEstimator.setResultRefined(configuration.isPlanarHomographyRefined()); homographyEstimator.setCovarianceKept(configuration.isPlanarHomographyCovarianceKept()); homographyEstimator.setConfidence(configuration.getPlanarHomographyConfidence()); homographyEstimator.setMaxIterations(configuration.getPlanarHomographyMaxIterations()); switch (configuration.getRobustPlanarHomographyEstimatorMethod()) { case LMEDS: ((LMedSPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator) .setStopThreshold(configuration.getPlanarHomographyThreshold()); break; case MSAC: ((MSACPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator) .setThreshold(configuration.getPlanarHomographyThreshold()); break; case PROMEDS: ((PROMedSPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator) .setStopThreshold(configuration.getPlanarHomographyThreshold()); break; case PROSAC: final var prosacHomographyEstimator = (PROSACPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator; prosacHomographyEstimator.setThreshold(configuration.getPlanarHomographyThreshold()); prosacHomographyEstimator.setComputeAndKeepInliersEnabled( configuration.getPlanarHomographyComputeAndKeepInliers()); prosacHomographyEstimator.setComputeAndKeepResidualsEnabled( configuration.getPlanarHomographyComputeAndKeepResiduals()); break; case RANSAC: |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 231 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 525 |
a = new Matrix(MIN_REQUIRED_EQUATIONS, 6); } var counter = 0; ProjectiveTransformation2D t = null; final var h = new Matrix(ProjectiveTransformation2D.HOM_COORDS, ProjectiveTransformation2D.HOM_COORDS); // elements ij of homography (last column is not required) double h11; double h12; double h21; double h22; double h31; double h32; double rowNorm; for (final var homography : homographies) { // convert homography into projective so it can be normalized homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); a.setElementAt(counter, 1, h11 * h22 + h21 * h12); a.setElementAt(counter, 2, h21 * h22); a.setElementAt(counter, 3, h11 * h32 + h31 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMedSRadialDistortionRobustEstimator.java | 273 |
com/irurueta/ar/calibration/estimators/RANSACRadialDistortionRobustEstimator.java | 238 |
return RadialDistortionRobustEstimator.MIN_NUMBER_OF_POINTS; } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<RadialDistortion> solutions) { subsetDistorted.clear(); subsetDistorted.add(distortedPoints.get(samplesIndices[0])); subsetDistorted.add(distortedPoints.get(samplesIndices[1])); subsetUndistorted.clear(); subsetUndistorted.add(undistortedPoints.get(samplesIndices[0])); subsetUndistorted.add(undistortedPoints.get(samplesIndices[1])); try { radialDistortionEstimator.setPoints(distortedPoints, undistortedPoints); radialDistortionEstimator.setPoints(subsetDistorted, subsetUndistorted); final var distortion = radialDistortionEstimator.estimate(); solutions.add(distortion); } catch (final Exception e) { // if anything fails, no solution is added } } @Override public double computeResidual(final RadialDistortion currentEstimation, final int i) { final var distortedPoint = distortedPoints.get(i); final var undistortedPoint = undistortedPoints.get(i); currentEstimation.distort(undistortedPoint, testPoint); return testPoint.distanceTo(distortedPoint); } @Override public boolean isReady() { return LMedSRadialDistortionRobustEstimator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/MSACRadialDistortionRobustEstimator.java | 237 |
com/irurueta/ar/calibration/estimators/RANSACRadialDistortionRobustEstimator.java | 238 |
return RadialDistortionRobustEstimator.MIN_NUMBER_OF_POINTS; } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<RadialDistortion> solutions) { subsetDistorted.clear(); subsetDistorted.add(distortedPoints.get(samplesIndices[0])); subsetDistorted.add(distortedPoints.get(samplesIndices[1])); subsetUndistorted.clear(); subsetUndistorted.add(undistortedPoints.get(samplesIndices[0])); subsetUndistorted.add(undistortedPoints.get(samplesIndices[1])); try { radialDistortionEstimator.setPoints(distortedPoints, undistortedPoints); radialDistortionEstimator.setPoints(subsetDistorted, subsetUndistorted); final var distortion = radialDistortionEstimator.estimate(); solutions.add(distortion); } catch (final Exception e) { // if anything fails, no solution is added } } @Override public double computeResidual(final RadialDistortion currentEstimation, final int i) { final var distortedPoint = distortedPoints.get(i); final var undistortedPoint = undistortedPoints.get(i); currentEstimation.distort(undistortedPoint, testPoint); return testPoint.distanceTo(distortedPoint); } @Override public boolean isReady() { return MSACRadialDistortionRobustEstimator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROMedSRadialDistortionRobustEstimator.java | 438 |
com/irurueta/ar/calibration/estimators/RANSACRadialDistortionRobustEstimator.java | 238 |
return RadialDistortionRobustEstimator.MIN_NUMBER_OF_POINTS; } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<RadialDistortion> solutions) { subsetDistorted.clear(); subsetDistorted.add(distortedPoints.get(samplesIndices[0])); subsetDistorted.add(distortedPoints.get(samplesIndices[1])); subsetUndistorted.clear(); subsetUndistorted.add(undistortedPoints.get(samplesIndices[0])); subsetUndistorted.add(undistortedPoints.get(samplesIndices[1])); try { radialDistortionEstimator.setPoints(distortedPoints, undistortedPoints); radialDistortionEstimator.setPoints(subsetDistorted, subsetUndistorted); final var distortion = radialDistortionEstimator.estimate(); solutions.add(distortion); } catch (final Exception e) { // if anything fails, no solution is added } } @Override public double computeResidual(final RadialDistortion currentEstimation, final int i) { final var distortedPoint = distortedPoints.get(i); final var undistortedPoint = undistortedPoints.get(i); currentEstimation.distort(undistortedPoint, testPoint); return testPoint.distanceTo(distortedPoint); } @Override public boolean isReady() { return PROMedSRadialDistortionRobustEstimator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROSACRadialDistortionRobustEstimator.java | 399 |
com/irurueta/ar/calibration/estimators/RANSACRadialDistortionRobustEstimator.java | 238 |
return RadialDistortionRobustEstimator.MIN_NUMBER_OF_POINTS; } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<RadialDistortion> solutions) { subsetDistorted.clear(); subsetDistorted.add(distortedPoints.get(samplesIndices[0])); subsetDistorted.add(distortedPoints.get(samplesIndices[1])); subsetUndistorted.clear(); subsetUndistorted.add(undistortedPoints.get(samplesIndices[0])); subsetUndistorted.add(undistortedPoints.get(samplesIndices[1])); try { radialDistortionEstimator.setPoints(distortedPoints, undistortedPoints); radialDistortionEstimator.setPoints(subsetDistorted, subsetUndistorted); final var distortion = radialDistortionEstimator.estimate(); solutions.add(distortion); } catch (final Exception e) { // if anything fails, no solution is added } } @Override public double computeResidual(final RadialDistortion currentEstimation, final int i) { final var distortedPoint = distortedPoints.get(i); final var undistortedPoint = undistortedPoints.get(i); currentEstimation.distort(undistortedPoint, testPoint); return testPoint.distanceTo(distortedPoint); } @Override public boolean isReady() { return PROSACRadialDistortionRobustEstimator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 187 |
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 937 |
q = QuaternionPredictor.predict(q, wx, wy, wz, dt, true, qq, qw); // apply control signals vx += uvx; vy += uvy; vz += uvz; wx += uwx; wy += uwy; wz += uwz; // set new state result[0] = r.getInhomX(); result[1] = r.getInhomY(); result[2] = r.getInhomZ(); result[3] = q.getA(); result[4] = q.getB(); result[5] = q.getC(); result[6] = q.getD(); result[7] = vx; result[8] = vy; result[9] = vz; result[10] = wx; result[11] = wy; result[12] = wz; // jacobians if (jacobianX != null) { // [Rr 0 Rv 0 ] // [0 Qq 0 Qw ] // [0 0 eye 0 ] // [0 0 0 eye] jacobianX.initialize(0.0); jacobianX.setSubmatrix(0, 0, 2, 2, rr); jacobianX.setSubmatrix(3, 3, 6, 6, qq); jacobianX.setSubmatrix(0, 7, 2, 9, rv); for (int i = 7; i < STATE_COMPONENTS; i++) { |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 430 |
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 937 |
q = QuaternionPredictor.predict(q, wx, wy, wz, dt, true, qq, qw); // apply control signals vx += uvx; vy += uvy; vz += uvz; wx += uwx; wy += uwy; wz += uwz; // set new state result[0] = r.getInhomX(); result[1] = r.getInhomY(); result[2] = r.getInhomZ(); result[3] = q.getA(); result[4] = q.getB(); result[5] = q.getC(); result[6] = q.getD(); result[7] = vx; result[8] = vy; result[9] = vz; result[10] = wx; result[11] = wy; result[12] = wz; // jacobians if (jacobianX != null) { // [Rr 0 Rv 0 ] // [0 Qq 0 Qw ] // [0 0 eye 0 ] // [0 0 0 eye] jacobianX.initialize(0.0); jacobianX.setSubmatrix(0, 0, 2, 2, rr); jacobianX.setSubmatrix(3, 3, 6, 6, qq); jacobianX.setSubmatrix(0, 7, 2, 9, rv); for (int i = 7; i < STATE_WITH_POSITION_ADJUSTMENT_COMPONENTS; i++) { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 566 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 589 |
a.setElementAt(counter, 4, h31 * h32); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0) + Math.pow(a.getElementAt(counter, 4), 2.0)); factor = weight / rowNorm; a.setElementAt(counter, 0, a.getElementAt(counter, 0) * factor); a.setElementAt(counter, 1, a.getElementAt(counter, 1) * factor); a.setElementAt(counter, 2, a.getElementAt(counter, 2) * factor); a.setElementAt(counter, 3, a.getElementAt(counter, 3) * factor); a.setElementAt(counter, 4, a.getElementAt(counter, 4) * factor); counter++; |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSlamTwoViewsSparseReconstructor.java | 218 |
com/irurueta/ar/sfm/KnownBaselineTwoViewsSparseReconstructor.java | 71 |
final var camera1 = estimatedCamera1.getCamera(); final var camera2 = estimatedCamera2.getCamera(); camera1.decompose(); camera2.decompose(); final var center1 = camera1.getCameraCenter(); final var center2 = camera2.getCameraCenter(); final var estimatedBaseline = center1.distanceTo(center2); final var scale = baseline / estimatedBaseline; final var scaleTransformation = new MetricTransformation3D(scale); // update scale of cameras scaleTransformation.transform(camera1); scaleTransformation.transform(camera2); estimatedCamera1.setCamera(camera1); estimatedCamera2.setCamera(camera2); // update scale of reconstructed points final var numPoints = reconstructedPoints.size(); final var reconstructedPoints3D = new ArrayList<Point3D>(); for (final var reconstructedPoint : reconstructedPoints) { reconstructedPoints3D.add(reconstructedPoint.getPoint()); } scaleTransformation.transformAndOverwritePoints(reconstructedPoints3D); // set scaled points into result for (var i = 0; i < numPoints; i++) { reconstructedPoints.get(i).setPoint(reconstructedPoints3D.get(i)); } return true; } catch (final Exception e) { failed = true; //noinspection unchecked listener.onFail((R) this); |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1989 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 974 |
final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples(); final var points1 = new ArrayList<Point2D>(); final var points2 = new ArrayList<Point2D>(); final var length = samples1.size(); for (var i = 0; i < length; i++) { final var sample1 = samples1.get(i); final var sample2 = samples2.get(i); final var point1 = sample1.getPoint(); final var point2 = sample2.getPoint(); points1.add(point1); points2.add(point2); } try { final var estimator = new DualImageOfAbsoluteConicInitialCamerasEstimator(fundamentalMatrix, points1, points2); estimator.setPrincipalPoint(configuration.getPrincipalPointX(), configuration.getPrincipalPointY()); estimator.setAspectRatio(configuration.getInitialCamerasAspectRatio()); estimator.setCorrectorType(configuration.getInitialCamerasCorrectorType()); estimator.setPointsTriangulated(true); estimator.setValidTriangulatedPointsMarked(configuration.getInitialCamerasMarkValidTriangulatedPoints()); estimator.estimate(); // store cameras final var camera1 = estimator.getEstimatedLeftCamera(); final var camera2 = estimator.getEstimatedRightCamera(); |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSlamPairedViewsSparseReconstructor.java | 405 |
com/irurueta/ar/sfm/BaseSlamTwoViewsSparseReconstructor.java | 274 |
final var positionZ = lastPosZ + slamEstimator.getStatePositionZ(); final var velocityX = slamEstimator.getStateVelocityX(); final var velocityY = slamEstimator.getStateVelocityY(); final var velocityZ = slamEstimator.getStateVelocityZ(); final var accelerationX = slamEstimator.getStateAccelerationX(); final var accelerationY = slamEstimator.getStateAccelerationY(); final var accelerationZ = slamEstimator.getStateAccelerationZ(); final var quaternionA = slamEstimator.getStateQuaternionA(); final var quaternionB = slamEstimator.getStateQuaternionB(); final var quaternionC = slamEstimator.getStateQuaternionC(); final var quaternionD = slamEstimator.getStateQuaternionD(); final var angularSpeedX = slamEstimator.getStateAngularSpeedX(); final var angularSpeedY = slamEstimator.getStateAngularSpeedY(); final var angularSpeedZ = slamEstimator.getStateAngularSpeedZ(); //noinspection unchecked listener.onSlamDataAvailable((R) this, positionX, positionY, positionZ, velocityX, velocityY, velocityZ, accelerationX, accelerationY, accelerationZ, quaternionA, quaternionB, quaternionC, quaternionD, angularSpeedX, angularSpeedY, angularSpeedZ, slamEstimator.getStateCovariance()); } /** * Notifies estimated camera by means of SLAM if notification is enabled at * configuration time and intrinsics are already available. */ private void notifySlamCameraIfNeeded() { if (!configuration.isNotifyEstimatedSlamCameraEnabled()) { return; } |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 681 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 826 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); a.setElementAt(counter, 1, h21 * h22); a.setElementAt(counter, 2, h31 * h32); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0)); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEDualAbsoluteQuadricEstimator.java | 244 |
com/irurueta/ar/calibration/estimators/LMSEDualAbsoluteQuadricEstimator.java | 422 |
a = new Matrix(singularityEnforced ? 8 : 12, BaseQuadric.N_PARAMS); } Matrix cameraMatrix; double p11; double p12; double p13; double p14; double p21; double p22; double p23; double p24; double p31; double p32; double p33; double p34; var eqCounter = 0; final var minReqEqs = getMinRequiredEquations(); for (final var camera : cameras) { // normalize cameras to increase accuracy camera.normalize(); cameraMatrix = camera.getInternalMatrix(); p11 = cameraMatrix.getElementAt(0, 0); p21 = cameraMatrix.getElementAt(1, 0); p31 = cameraMatrix.getElementAt(2, 0); p12 = cameraMatrix.getElementAt(0, 1); p22 = cameraMatrix.getElementAt(1, 1); p32 = cameraMatrix.getElementAt(2, 1); p13 = cameraMatrix.getElementAt(0, 2); p23 = cameraMatrix.getElementAt(1, 2); p33 = cameraMatrix.getElementAt(2, 2); p14 = cameraMatrix.getElementAt(0, 3); p24 = cameraMatrix.getElementAt(1, 3); p34 = cameraMatrix.getElementAt(2, 3); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 423 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 445 |
a.setElementAt(counter, 4, h31 * h32); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0) + Math.pow(a.getElementAt(counter, 4), 2.0)); a.setElementAt(counter, 0, a.getElementAt(counter, 0) / rowNorm); a.setElementAt(counter, 1, a.getElementAt(counter, 1) / rowNorm); a.setElementAt(counter, 2, a.getElementAt(counter, 2) / rowNorm); a.setElementAt(counter, 3, a.getElementAt(counter, 3) / rowNorm); a.setElementAt(counter, 4, a.getElementAt(counter, 4) / rowNorm); counter++; |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 863 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 1003 |
a.setElementAt(counter, 0, Math.pow(h11, 2.0) - Math.pow(h12, 2.0) + (Math.pow(h21, 2.0) - Math.pow(h22, 2.0)) / sqrAspectRatio); a.setElementAt(counter, 1, 2.0 * (h11 * h31 - h12 * h32)); a.setElementAt(counter, 2, 2.0 * (h21 * h31 - h22 * h32)); a.setElementAt(counter, 3, Math.pow(h31, 2.0) - Math.pow(h32, 2.0)); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0)); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 368 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 518 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 664 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 805 |
final var a = new Matrix(2 * nHomographies, 6); var index = 0; var counter = 0; ProjectiveTransformation2D t = null; final var h = new Matrix(ProjectiveTransformation2D.HOM_COORDS, ProjectiveTransformation2D.HOM_COORDS); // elements ij of homography (last column is not required) double h11; double h12; double h21; double h22; double h31; double h32; double rowNorm; double weight; double factor; for (final var homography : homographies) { if (selected[index]) { weight = weights[index]; // convert homography into projective so it can be normalized homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); a.setElementAt(counter, 1, h11 * h22 + h21 * h12); |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 187 |
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 679 |
q = QuaternionPredictor.predict(q, wx, wy, wz, dt, true, qq, qw); // apply control signals vx += uvx; vy += uvy; vz += uvz; wx += uwx; wy += uwy; wz += uwz; // set new state result[0] = r.getInhomX(); result[1] = r.getInhomY(); result[2] = r.getInhomZ(); result[3] = q.getA(); result[4] = q.getB(); result[5] = q.getC(); result[6] = q.getD(); result[7] = vx; result[8] = vy; result[9] = vz; result[10] = wx; result[11] = wy; result[12] = wz; // jacobians if (jacobianX != null) { // [Rr 0 Rv 0 ] // [0 Qq 0 Qw ] // [0 0 eye 0 ] // [0 0 0 eye] jacobianX.initialize(0.0); jacobianX.setSubmatrix(0, 0, 2, 2, rr); jacobianX.setSubmatrix(3, 3, 6, 6, qq); jacobianX.setSubmatrix(0, 7, 2, 9, rv); for (int i = 7; i < STATE_COMPONENTS; i++) { |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 430 |
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 679 |
q = QuaternionPredictor.predict(q, wx, wy, wz, dt, true, qq, qw); // apply control signals vx += uvx; vy += uvy; vz += uvz; wx += uwx; wy += uwy; wz += uwz; // set new state result[0] = r.getInhomX(); result[1] = r.getInhomY(); result[2] = r.getInhomZ(); result[3] = q.getA(); result[4] = q.getB(); result[5] = q.getC(); result[6] = q.getD(); result[7] = vx; result[8] = vy; result[9] = vz; result[10] = wx; result[11] = wy; result[12] = wz; // jacobians if (jacobianX != null) { // [Rr 0 Rv 0 ] // [0 Qq 0 Qw ] // [0 0 eye 0 ] // [0 0 0 eye] jacobianX.initialize(0.0); jacobianX.setSubmatrix(0, 0, 2, 2, rr); jacobianX.setSubmatrix(3, 3, 6, 6, qq); jacobianX.setSubmatrix(0, 7, 2, 9, rv); for (int i = 7; i < STATE_WITH_POSITION_ADJUSTMENT_COMPONENTS; i++) { |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelSlamEstimator.java | 444 |
com/irurueta/ar/slam/SlamEstimator.java | 475 |
x[12] = stateAngularSpeedZ; try { // set initial Kalman filter state (state pre and pro must be two // different instances!) kalmanFilter.getStatePre().fromArray(x); kalmanFilter.getStatePost().fromArray(x); } catch (final WrongSizeException ignore) { // never thrown } } error = false; lastTimestampNanos = -1; predictionAvailable = false; } /** * Updates state data of the device by using state matrix obtained after * prediction from Kalman filter. * to ensure that state follows proper values (specially on quaternions), * we keep x values, which have been predicted using the state predictor, * which uses analytical values. * We then updated x using latest Kalman filter state for next iteration * on state predictor. * * @param state state matrix obtained from Kalman filter. */ private void updatePredictedState(final Matrix state) { // position statePositionX = x[0]; x[0] = state.getElementAtIndex(0); statePositionY = x[1]; x[1] = state.getElementAtIndex(1); statePositionZ = x[2]; x[2] = state.getElementAtIndex(2); // quaternion state predictor is more reliable than Kalman filter, for // that reason we ignore predicted quaternion values on Kalman filter and // simply keep predicted ones. Besides, typically gyroscope samples are // much more reliable than accelerometer ones. For that reason state // elements corresponding to quaternion (3 to 6) are not copied into mX // array. stateQuaternionA = x[3]; stateQuaternionB = x[4]; stateQuaternionC = x[5]; stateQuaternionD = x[6]; // velocity stateVelocityX = x[7]; x[7] = state.getElementAtIndex(7); stateVelocityY = x[8]; x[8] = state.getElementAtIndex(8); stateVelocityZ = x[9]; x[9] = state.getElementAtIndex(9); // linear acceleration stateAccelerationX = accumulatedAccelerationSampleX; |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/MSACDualAbsoluteQuadricRobustEstimator.java | 161 |
com/irurueta/ar/calibration/estimators/PROSACDualAbsoluteQuadricRobustEstimator.java | 278 |
com/irurueta/ar/calibration/estimators/RANSACDualAbsoluteQuadricRobustEstimator.java | 160 |
final var innerEstimator = new MSACRobustEstimator<DualAbsoluteQuadric>(new MSACRobustEstimatorListener<>() { // subset of cameras picked on each iteration private final List<PinholeCamera> subsetCameras = new ArrayList<>(); @Override public double getThreshold() { return threshold; } @Override public int getTotalSamples() { return cameras.size(); } @Override public int getSubsetSize() { return daqEstimator.getMinNumberOfRequiredCameras(); } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<DualAbsoluteQuadric> solutions) { subsetCameras.clear(); for (final var samplesIndex : samplesIndices) { subsetCameras.add(cameras.get(samplesIndex)); } try { daqEstimator.setLMSESolutionAllowed(false); daqEstimator.setCameras(subsetCameras); final var daq = daqEstimator.estimate(); solutions.add(daq); } catch (final Exception e) { // if anything fails, no solution is added } } @Override public double computeResidual(final DualAbsoluteQuadric currentEstimation, final int i) { return residual(currentEstimation, cameras.get(i)); } @Override public boolean isReady() { return MSACDualAbsoluteQuadricRobustEstimator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/MSACImageOfAbsoluteConicRobustEstimator.java | 170 |
com/irurueta/ar/calibration/estimators/PROSACImageOfAbsoluteConicRobustEstimator.java | 292 |
final var innerEstimator = new MSACRobustEstimator<ImageOfAbsoluteConic>(new MSACRobustEstimatorListener<>() { // subset of homographies picked on each iteration private final List<Transformation2D> subsetHomographies = new ArrayList<>(); @Override public double getThreshold() { return threshold; } @Override public int getTotalSamples() { return homographies.size(); } @Override public int getSubsetSize() { return iacEstimator.getMinNumberOfRequiredHomographies(); } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<ImageOfAbsoluteConic> solutions) { subsetHomographies.clear(); for (final var samplesIndex : samplesIndices) { subsetHomographies.add(homographies.get(samplesIndex)); } try { iacEstimator.setLMSESolutionAllowed(false); iacEstimator.setHomographies(subsetHomographies); final var iac = iacEstimator.estimate(); solutions.add(iac); } catch (final Exception e) { // if anything fails, no solution is added } } @Override public double computeResidual(final ImageOfAbsoluteConic currentEstimation, final int i) { return residual(currentEstimation, homographies.get(i)); } @Override public boolean isReady() { return MSACImageOfAbsoluteConicRobustEstimator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/calibration/CameraCalibrator.java | 566 |
com/irurueta/ar/calibration/estimators/ImageOfAbsoluteConicRobustEstimator.java | 219 |
} /** * Returns boolean indicating whether camera skewness is assumed to be zero * or not. * Skewness determines whether LCD sensor cells are properly aligned or not, * where zero indicates perfect alignment. * Typically, skewness is a value equal or very close to zero. * * @return true if camera skewness is assumed to be zero, otherwise camera * skewness is estimated. */ public boolean isZeroSkewness() { return iacEstimator.isZeroSkewness(); } /** * Sets boolean indicating whether camera skewness is assumed to be zero or * not. * Skewness determines whether LCD sensor cells are properly aligned or not, * where zero indicates perfect alignment. * Typically, skewness is a value equal or very close to zero. * * @param zeroSkewness true if camera skewness is assumed to be zero, * otherwise camera skewness is estimated. * @throws LockedException if this instance is locked. */ public void setZeroSkewness(final boolean zeroSkewness) throws LockedException { if (isLocked()) { throw new LockedException(); } iacEstimator.setZeroSkewness(zeroSkewness); } /** * Returns boolean indicating whether principal point is assumed to be at * origin of coordinates or not. * Typically principal point is located at image center (origin of * coordinates), and usually matches the center of radial distortion if it * is taken into account. * * @return true if principal point is assumed to be at origin of * coordinates, false if principal point must be estimated. */ public boolean isPrincipalPointAtOrigin() { return iacEstimator.isPrincipalPointAtOrigin(); } /** * Sets boolean indicating whether principal point is assumed to be at * origin of coordinates or not. * Typically principal point is located at image center (origin of * coordinates), and usually matches the center of radial distortion if it * is taken into account. * * @param principalPointAtOrigin true if principal point is assumed to bet * at origin of coordinates, false if principal point must be estimated. * @throws LockedException if estimator is locked. */ public void setPrincipalPointAtOrigin(final boolean principalPointAtOrigin) throws LockedException { if (isLocked()) { throw new LockedException(); } iacEstimator.setPrincipalPointAtOrigin(principalPointAtOrigin); } /** * Returns boolean indicating whether aspect ratio of focal distances (i.e. * vertical focal distance divided by horizontal focal distance) is known or * not. * Notice that focal distance aspect ratio is not related to image size * aspect ratio. Typically, LCD sensor cells are square and hence aspect * ratio of focal distances is known and equal to 1. * This value is only taken into account if skewness is assumed to be zero, * otherwise it is ignored. * * @return true if focal distance aspect ratio is known, false otherwise. */ public boolean isFocalDistanceAspectRatioKnown() { return iacEstimator.isFocalDistanceAspectRatioKnown(); } /** * Sets boolean indicating whether aspect ratio of focal distances (i.e. * vertical focal distance divided by horizontal focal distance) is known or * not. * Notice that focal distance aspect ratio is not related to image size * aspect ratio. Typically, LCD sensor cells are square and hence aspect * ratio of focal distances is known and equal to 1. * This value is only taken into account if skewness is assumed to be zero, * otherwise it is ignored. * * @param focalDistanceAspectRatioKnown true if focal distance aspect ratio * is known, false otherwise. * @throws LockedException if estimator is locked. */ public void setFocalDistanceAspectRatioKnown(final boolean focalDistanceAspectRatioKnown) throws LockedException { if (isLocked()) { throw new LockedException(); } iacEstimator.setFocalDistanceAspectRatioKnown(focalDistanceAspectRatioKnown); } /** * Returns aspect ratio of focal distances (i.e. vertical focal distance * divided by horizontal focal distance). * This value is only taken into account if skewness is assumed to be zero * and focal distance aspect ratio is marked as known, otherwise it is * ignored. * By default, this is 1.0, since it is taken into account that typically * LCD sensor cells are square and hence aspect ratio focal distances is * known and equal to 1. * Notice that focal distance aspect ratio is not related to image size * aspect ratio. * Notice that a negative aspect ratio indicates that vertical axis is * reversed. This can be useful in some situations where image vertical * coordinates are reversed respect to the physical world (i.e. in computer * graphics typically image vertical coordinates go downwards, while in * physical world they go upwards). * * @return aspect ratio of focal distances. */ public double getFocalDistanceAspectRatio() { return iacEstimator.getFocalDistanceAspectRatio(); } /** * Sets aspect ratio of focal distances (i.e. vertical focal distance * divided by horizontal focal distance). * This value is only taken into account if skewness is assumed to be zero * and focal distance aspect ratio is marked as known, otherwise it is * ignored. * By default, this is 1.0, since it is taken into account that typically * LCD sensor cells are square and hence aspect ratio focal distances is * known and equal to 1. * Notice that focal distance aspect ratio is not related to image size * aspect ratio. * Notice that a negative aspect ratio indicates that vertical axis is * reversed. This can be useful in some situations where image vertical * coordinates are reversed respect to the physical world (i.e. in computer * graphics typically image vertical coordinates go downwards, while in * physical world they go upwards). * * @param focalDistanceAspectRatio aspect ratio of focal distances to be set. * @throws LockedException if estimator is locked. * @throws IllegalArgumentException if focal distance aspect ratio is too * close to zero, as it might produce numerical instabilities. */ public void setFocalDistanceAspectRatio(final double focalDistanceAspectRatio) throws LockedException { if (isLocked()) { throw new LockedException(); } iacEstimator.setFocalDistanceAspectRatio(focalDistanceAspectRatio); } /** * Indicates whether this instance is locked because calibration is in * progress. * * @return true if this instance, false otherwise. */ public boolean isLocked() { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/DualAbsoluteQuadricEstimator.java | 250 |
com/irurueta/ar/calibration/estimators/ImageOfAbsoluteConicEstimator.java | 207 |
} /** * Returns boolean indicating whether camera skewness is assumed to be zero * or not. * Skewness determines whether LCD sensor cells are properly aligned or not, * where zero indicates perfect alignment. * Typically, skewness is a value equal or very close to zero. * * @return true if camera skewness is assumed to be zero, otherwise camera * skewness is estimated */ public boolean isZeroSkewness() { return zeroSkewness; } /** * Sets boolean indicating whether camera skewness is assumed to be zero or * not. * Skewness determines whether LCD sensor cells are properly aligned or not, * where zero indicates perfect alignment. * Typically, skewness is a value equal or very close to zero. * * @param zeroSkewness true if camera skewness is assumed to be zero, * otherwise camera skewness is estimated * @throws LockedException if estimator is locked */ public void setZeroSkewness(final boolean zeroSkewness) throws LockedException { if (isLocked()) { throw new LockedException(); } this.zeroSkewness = zeroSkewness; } /** * Returns boolean indicating whether principal point is assumed to be at * origin of coordinates or not. * Typically principal point is located at image center (origin of * coordinates), and usually matches the center of radial distortion if * it is taken into account. * * @return true if principal point is assumed to be at origin of * coordinates, false if principal point must be estimated */ public boolean isPrincipalPointAtOrigin() { return principalPointAtOrigin; } /** * Sets boolean indicating whether principal point is assumed to be at * origin of coordinates or not. * Typically principal point is located at image center (origin of * coordinates), and usually matches the center of radial distortion if it * is taken into account. * * @param principalPointAtOrigin true if principal point is assumed to be at * origin of coordinates, false if principal point must be estimated * @throws LockedException if estimator is locked */ public void setPrincipalPointAtOrigin(final boolean principalPointAtOrigin) throws LockedException { if (isLocked()) { throw new LockedException(); } this.principalPointAtOrigin = principalPointAtOrigin; } /** * Returns boolean indicating whether aspect ratio of focal distances (i.e. * vertical focal distance divided by horizontal focal distance) is known or * not. * Notice that focal distance aspect ratio is not related to image size * aspect ratio. Typically, LCD sensor cells are square and hence aspect * ratio of focal distances is known and equal to 1. * This value is only taken into account if skewness is assumed to be zero, * otherwise it is ignored. * * @return true if focal distance aspect ratio is known, false otherwise */ public boolean isFocalDistanceAspectRatioKnown() { return focalDistanceAspectRatioKnown; } /** * Sets value indicating whether aspect ratio of focal distances (i.e. * vertical focal distance divided by horizontal focal distance) is known or * not. * Notice that focal distance aspect ratio is not related to image size * aspect ratio. Typically, LCD sensor cells are square and hence aspect * ratio of focal distances is known and equal to 1. * This value is only taken into account if skewness is assumed to be zero, * otherwise it is ignored. * * @param focalDistanceAspectRatioKnown true if focal distance aspect ratio * is known, false otherwise. * @throws LockedException if estimator is locked. */ public void setFocalDistanceAspectRatioKnown(final boolean focalDistanceAspectRatioKnown) throws LockedException { if (isLocked()) { throw new LockedException(); } this.focalDistanceAspectRatioKnown = focalDistanceAspectRatioKnown; } /** * Returns aspect ratio of focal distances (i.e. vertical focal distance * divided by horizontal focal distance). * This value is only taken into account if skewness is assumed to be zero * and focal distance aspect ratio is marked as known, otherwise it is * ignored. * By default, this is 1.0, since it is taken into account that typically * LCD sensor cells are square and hence aspect ratio focal distances is * known and equal to 1. * Notice that focal distance aspect ratio is not related to image size * aspect ratio. * Notice that a negative aspect ratio indicates that vertical axis is * reversed. This can be useful in some situations where image vertical * coordinates are reversed respect to the physical world (i.e. in computer * graphics typically image vertical coordinates go downwards, while in * physical world they go upwards). * * @return aspect ratio of focal distances. */ public double getFocalDistanceAspectRatio() { return focalDistanceAspectRatio; } /** * Sets aspect ratio of focal distances (i.e. vertical focal distance * divided by horizontal focal distance). * This value is only taken into account if skewness is assumed to be zero * and focal distance aspect ratio is marked as known, otherwise it is * ignored. * By default, this is 1.0, since it is taken into account that typically * LCD sensor cells are square and hence aspect ratio focal distances is * known and equal to 1. * Notice that focal distance aspect ratio is not related to image size * aspect ratio. * Notice that a negative aspect ratio indicates that vertical axis is * reversed. This can be useful in some situations where image vertical * coordinates are reversed respect to the physical world (i.e. in computer * graphics typically image vertical coordinates go downwards, while in * physical world they go upwards). * * @param focalDistanceAspectRatio aspect ratio of focal distances to be set. * @throws LockedException if estimator is locked. * @throws IllegalArgumentException if focal distance aspect ratio is too * close to zero, as it might produce numerical instabilities. */ public void setFocalDistanceAspectRatio(final double focalDistanceAspectRatio) throws LockedException { if (isLocked()) { throw new LockedException(); } if (Math.abs(focalDistanceAspectRatio) < MIN_ABS_FOCAL_DISTANCE_ASPECT_RATIO) { throw new IllegalArgumentException(); } this.focalDistanceAspectRatio = focalDistanceAspectRatio; } /** * Indicates whether a singular DAQ is enforced or not. * Dual Absolute Quadric is singular (has rank 3) in any projective space, * however, due to noise in samples, estimated DAQ might not be fully * singular. * * @return true when singular DAQ is enforced, false otherwise. */ public boolean isSingularityEnforced() { |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseAbsoluteOrientationSlamSparseReconstructor.java | 192 |
com/irurueta/ar/sfm/BaseSlamSparseReconstructor.java | 280 |
scaleAndOrientationTransformation.transform(metricCamera2, euclideanCamera2); } final var sqrScale = scale * scale; previousEuclideanEstimatedCamera = new EstimatedCamera(); previousEuclideanEstimatedCamera.setCamera(euclideanCamera1); previousEuclideanEstimatedCamera.setViewId(previousMetricEstimatedCamera.getViewId()); previousEuclideanEstimatedCamera.setQualityScore(previousMetricEstimatedCamera.getQualityScore()); if (previousMetricEstimatedCamera.getCovariance() != null) { previousEuclideanEstimatedCamera.setCovariance(previousMetricEstimatedCamera.getCovariance() .multiplyByScalarAndReturnNew(sqrScale)); } currentEuclideanEstimatedCamera = new EstimatedCamera(); currentEuclideanEstimatedCamera.setCamera(euclideanCamera2); currentEuclideanEstimatedCamera.setViewId(currentMetricEstimatedCamera.getViewId()); currentEuclideanEstimatedCamera.setQualityScore(currentMetricEstimatedCamera.getQualityScore()); if (currentMetricEstimatedCamera.getCovariance() != null) { currentEuclideanEstimatedCamera.setCovariance(currentMetricEstimatedCamera.getCovariance() .multiplyByScalarAndReturnNew(sqrScale)); } // update scale of reconstructed points final var numPoints = activeMetricReconstructedPoints.size(); final var metricReconstructedPoints3D = new ArrayList<Point3D>(); for (final var reconstructedPoint : activeMetricReconstructedPoints) { metricReconstructedPoints3D.add(reconstructedPoint.getPoint()); } final var euclideanReconstructedPoints3D = scaleAndOrientationTransformation.transformPointsAndReturnNew( |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/RadialDistortionRobustEstimator.java | 311 |
com/irurueta/ar/sfm/RobustSinglePoint3DTriangulator.java | 271 |
} /** * Indicates if this instance is locked because estimation is being computed. * * @return true if locked, false otherwise. */ public boolean isLocked() { return locked; } /** * Returns amount of progress variation before notifying a progress change * during estimation. * * @return amount of progress variation before notifying a progress change * during estimation. */ public float getProgressDelta() { return progressDelta; } /** * Sets amount of progress variation before notifying a progress change * during estimation. * * @param progressDelta amount of progress variation before notifying a * progress change during estimation. * @throws IllegalArgumentException if progress delta is less than zero or * greater than 1. * @throws LockedException if this estimator is locked because an estimation * is being computed. */ public void setProgressDelta(final float progressDelta) throws LockedException { if (isLocked()) { throw new LockedException(); } if (progressDelta < MIN_PROGRESS_DELTA || progressDelta > MAX_PROGRESS_DELTA) { throw new IllegalArgumentException(); } this.progressDelta = progressDelta; } /** * Returns amount of confidence expressed as a value between 0.0 and 1.0 * (which is equivalent to 100%). The amount of confidence indicates the * probability that the estimated result is correct. Usually this value will * be close to 1.0, but not exactly 1.0. * * @return amount of confidence as a value between 0.0 and 1.0. */ public double getConfidence() { return confidence; } /** * Sets amount of confidence expressed as a value between 0.0 and 1.0 (which * is equivalent to 100%). The amount of confidence indicates the * probability that the estimated result is correct. Usually this value will * be close to 1.0, but not exactly 1.0. * * @param confidence confidence to be set as a value between 0.0 and 1.0. * @throws IllegalArgumentException if provided value is not between 0.0 and * 1.0. * @throws LockedException if this estimator is locked because an estimator * is being computed. */ public void setConfidence(final double confidence) throws LockedException { if (isLocked()) { throw new LockedException(); } if (confidence < MIN_CONFIDENCE || confidence > MAX_CONFIDENCE) { throw new IllegalArgumentException(); } this.confidence = confidence; } /** * Returns maximum allowed number of iterations. If maximum allowed number * of iterations is achieved without converging to a result when calling * estimate(), a RobustEstimatorException will be raised. * * @return maximum allowed number of iterations. */ public int getMaxIterations() { return maxIterations; } /** * Sets maximum allowed number of iterations. When the maximum number of * iterations is exceeded, result will not be available, however an * approximate result will be available for retrieval * * @param maxIterations maximum allowed number of iterations to be set. * @throws IllegalArgumentException if provided value is less than 1. * @throws LockedException if this estimator is locked because an estimation * is being computed. */ public void setMaxIterations(final int maxIterations) throws LockedException { if (isLocked()) { throw new LockedException(); } if (maxIterations < MIN_ITERATIONS) { throw new IllegalArgumentException(); } this.maxIterations = maxIterations; } /** * Sets list of corresponding distorted and undistorted points. * * @param distortedPoints list of distorted points. Distorted points are * obtained after radial distortion is applied to an undistorted point. * @param undistortedPoints list of undistorted points. * @throws LockedException if estimator is locked. * @throws IllegalArgumentException if provided lists of points don't have * the same size. */ public void setPoints(final List<Point2D> distortedPoints, final List<Point2D> undistortedPoints) |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/DualAbsoluteQuadricRobustEstimator.java | 450 |
com/irurueta/ar/sfm/RobustSinglePoint3DTriangulator.java | 271 |
} /** * Indicates whether this instance is locked. * * @return true if this estimator is busy estimating the Dual Absolute * Quadric, false otherwise. */ public boolean isLocked() { return locked; } /** * Returns amount of progress variation before notifying a progress change * during estimation. * * @return amount of progress variation before notifying a progress change * during estimation. */ public float getProgressDelta() { return progressDelta; } /** * Sets amount of progress variation before notifying a progress change * during estimation. * * @param progressDelta amount of progress variation before notifying a * progress change during estimation. * @throws IllegalArgumentException if progress delta is less than zero or * greater than 1. * @throws LockedException if this estimator is locked because an estimation * is being computed. */ public void setProgressDelta(final float progressDelta) throws LockedException { if (isLocked()) { throw new LockedException(); } if (progressDelta < MIN_PROGRESS_DELTA || progressDelta > MAX_PROGRESS_DELTA) { throw new IllegalArgumentException(); } this.progressDelta = progressDelta; } /** * Returns amount of confidence expressed as a value between 0.0 and 1.0 * (which is equivalent to 100%). The amount of confidence indicates the * probability that the estimated result is correct. Usually this value will * be close to 1.0, but not exactly 1.0. * * @return amount of confidence as a value between 0.0 and 1.0. */ public double getConfidence() { return confidence; } /** * Sets amount of confidence expressed as a value between 0.0 and 1.0 (which * is equivalent to 100%). The amount of confidence indicates the * probability that the estimated result is correct. Usually this value will * be close to 1.0, but not exactly 1.0. * * @param confidence confidence to be set as a value between 0.0 and 1.0. * @throws IllegalArgumentException if provided value is not between 0.0 and * 1.0. * @throws LockedException if this estimator is locked because an estimator * is being computed. */ public void setConfidence(final double confidence) throws LockedException { if (isLocked()) { throw new LockedException(); } if (confidence < MIN_CONFIDENCE || confidence > MAX_CONFIDENCE) { throw new IllegalArgumentException(); } this.confidence = confidence; } /** * Returns maximum allowed number of iterations. If maximum allowed number * of iterations is achieved without converging to a result when calling * estimate(), a RobustEstimatorException will be raised. * * @return maximum allowed number of iterations. */ public int getMaxIterations() { return maxIterations; } /** * Sets maximum allowed number of iterations. When the maximum number of * iterations is exceeded, result will not be available, however an * approximate result will be available for retrieval. * * @param maxIterations maximum allowed number of iterations to be set. * @throws IllegalArgumentException if provided value is less than 1. * @throws LockedException if this estimator is locked because an estimation * is being computed. */ public void setMaxIterations(final int maxIterations) throws LockedException { if (isLocked()) { throw new LockedException(); } if (maxIterations < MIN_ITERATIONS) { throw new IllegalArgumentException(); } this.maxIterations = maxIterations; } /** * Obtains the list of cameras used to estimate the Dual Absolute Quadric * (DAQ). * * @return list of cameras to estimate the DAQ. */ public List<PinholeCamera> getCameras() { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/ImageOfAbsoluteConicRobustEstimator.java | 410 |
com/irurueta/ar/sfm/RobustSinglePoint3DTriangulator.java | 271 |
} /** * Indicates if this instance is locked because estimation is being * computed. * * @return true if locked, false otherwise. */ public boolean isLocked() { return locked; } /** * Returns amount of progress variation before notifying a progress change * during estimation. * * @return amount of progress variation before notifying a progress change * during estimation. */ public float getProgressDelta() { return progressDelta; } /** * Sets amount of progress variation before notifying a progress change * during estimation. * * @param progressDelta amount of progress variation before notifying a * progress change during estimation. * @throws IllegalArgumentException if progress delta is less than zero or * greater than 1. * @throws LockedException if this estimator is locked because an estimation * is being computed. */ public void setProgressDelta(final float progressDelta) throws LockedException { if (isLocked()) { throw new LockedException(); } if (progressDelta < MIN_PROGRESS_DELTA || progressDelta > MAX_PROGRESS_DELTA) { throw new IllegalArgumentException(); } this.progressDelta = progressDelta; } /** * Returns amount of confidence expressed as a value between 0.0 and 1.0 * (which is equivalent to 100%). The amount of confidence indicates the * probability that the estimated result is correct. Usually this value will * be close to 1.0, but not exactly 1.0. * * @return amount of confidence as a value between 0.0 and 1.0. */ public double getConfidence() { return confidence; } /** * Sets amount of confidence expressed as a value between 0.0 and 1.0 (which * is equivalent to 100%). The amount of confidence indicates the * probability that the estimated result is correct. Usually this value will * be close to 1.0, but not exactly 1.0. * * @param confidence confidence to be set as a value between 0.0 and 1.0. * @throws IllegalArgumentException if provided value is not between 0.0 and * 1.0. * @throws LockedException if this estimator is locked because an estimator * is being computed. */ public void setConfidence(final double confidence) throws LockedException { if (isLocked()) { throw new LockedException(); } if (confidence < MIN_CONFIDENCE || confidence > MAX_CONFIDENCE) { throw new IllegalArgumentException(); } this.confidence = confidence; } /** * Returns maximum allowed number of iterations. If maximum allowed number * of iterations is achieved without converging to a result when calling * estimate(), a RobustEstimatorException will be raised. * * @return maximum allowed number of iterations. */ public int getMaxIterations() { return maxIterations; } /** * Sets maximum allowed number of iterations. When the maximum number of * iterations is exceeded, result will not be available, however an * approximate result will be available for retrieval. * * @param maxIterations maximum allowed number of iterations to be set. * @throws IllegalArgumentException if provided value is less than 1. * @throws LockedException if this estimator is locked because an estimation * is being computed. */ public void setMaxIterations(final int maxIterations) throws LockedException { if (isLocked()) { throw new LockedException(); } if (maxIterations < MIN_ITERATIONS) { throw new IllegalArgumentException(); } this.maxIterations = maxIterations; } /** * Gets list of homographies to estimate IAC. * * @return list of homographies to estimate IAC. */ public List<Transformation2D> getHomographies() { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 368 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 939 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 1085 |
final var a = new Matrix(2 * nHomographies, 6); var index = 0; var counter = 0; ProjectiveTransformation2D t = null; final var h = new Matrix(ProjectiveTransformation2D.HOM_COORDS, ProjectiveTransformation2D.HOM_COORDS); // elements ij of homography (last column is not required) double h11; double h12; double h21; double h22; double h31; double h32; double rowNorm; double weight; double factor; for (final var homography : homographies) { if (selected[index]) { weight = weights[index]; // convert homography into projective so it can be normalized homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/FundamentalMatrixRobustEstimator.java | 354 |
com/irurueta/ar/sfm/RobustSinglePoint3DTriangulator.java | 271 |
} /** * Returns boolean indicating if estimator is locked because estimation is * under progress. * * @return true if estimator is locked, false otherwise. */ public boolean isLocked() { return locked; } /** * Returns amount of progress variation before notifying a progress change * during estimation. * * @return amount of progress variation before notifying a progress change * during estimation. */ public float getProgressDelta() { return progressDelta; } /** * Sets amount of progress variation before notifying a progress change * during estimation. * * @param progressDelta amount of progress variation before notifying a * progress change during estimation. * @throws IllegalArgumentException if progress delta is less than zero or * greater than 1. * @throws LockedException if this estimator is locked because an estimation * is being computed. */ public void setProgressDelta(final float progressDelta) throws LockedException { if (isLocked()) { throw new LockedException(); } if (progressDelta < MIN_PROGRESS_DELTA || progressDelta > MAX_PROGRESS_DELTA) { throw new IllegalArgumentException(); } this.progressDelta = progressDelta; } /** * Returns amount of confidence expressed as a value between 0.0 and 1.0 * (which is equivalent to 100%). The amount of confidence indicates the * probability that the estimated result is correct. Usually this value will * be close to 1.0, but not exactly 1.0. * * @return amount of confidence as a value between 0.0 and 1.0. */ public double getConfidence() { return confidence; } /** * Sets amount of confidence expressed as a value between 0.0 and 1.0 (which * is equivalent to 100%). The amount of confidence indicates the * probability that the estimated result is correct. Usually this value will * be close to 1.0, but not exactly 1.0. * * @param confidence confidence to be set as a value between 0.0 and 1.0. * @throws IllegalArgumentException if provided value is not between 0.0 and * 1.0. * @throws LockedException if this estimator is locked because an estimator * is being computed. */ public void setConfidence(final double confidence) throws LockedException { if (isLocked()) { throw new LockedException(); } if (confidence < MIN_CONFIDENCE || confidence > MAX_CONFIDENCE) { throw new IllegalArgumentException(); } this.confidence = confidence; } /** * Returns maximum allowed number of iterations. If maximum allowed number * of iterations is achieved without converting to a result when calling * estimate(), a RobustEstimatorException will be raised. * * @return maximum allowed number of iterations. */ public int getMaxIterations() { return maxIterations; } /** * Sets maximum allowed number of iterations. When the maximum number of * iterations is exceeded, result will not be available, however an * approximate result will be available for retrieval. * * @param maxIterations maximum allowed number of iterations to be set. * @throws IllegalArgumentException if provided value is less than 1. * @throws LockedException if this estimator is locked because an estimation * is being computed. */ public void setMaxIterations(final int maxIterations) throws LockedException { if (isLocked()) { throw new LockedException(); } if (maxIterations < MIN_ITERATIONS) { throw new IllegalArgumentException(); } this.maxIterations = maxIterations; } /** * Gets data related to inliers found after estimation. * * @return data related to inliers found after estimation. */ public InliersData getInliersData() { |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1847 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 840 |
final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix(); fundamentalMatrix.normalize(); final var estimator = new DualAbsoluteQuadricInitialCamerasEstimator(fundamentalMatrix); estimator.setAspectRatio(configuration.getInitialCamerasAspectRatio()); estimator.estimate(); final var camera1 = estimator.getEstimatedLeftCamera(); final var camera2 = estimator.getEstimatedRightCamera(); camera1.decompose(); camera2.decompose(); final var intrinsicZeroPrincipalPoint1 = camera1.getIntrinsicParameters(); final var intrinsicZeroPrincipalPoint2 = camera2.getIntrinsicParameters(); final var principalPointX = configuration.getPrincipalPointX(); final var principalPointY = configuration.getPrincipalPointY(); final var intrinsic1 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint1); intrinsic1.setHorizontalPrincipalPoint(intrinsic1.getHorizontalPrincipalPoint() + principalPointX); intrinsic1.setVerticalPrincipalPoint(intrinsic1.getVerticalPrincipalPoint() + principalPointY); camera1.setIntrinsicParameters(intrinsic1); final var intrinsic2 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint2); intrinsic2.setHorizontalPrincipalPoint(intrinsic2.getHorizontalPrincipalPoint() + principalPointX); intrinsic2.setVerticalPrincipalPoint(intrinsic2.getVerticalPrincipalPoint() + principalPointY); camera2.setIntrinsicParameters(intrinsic2); |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 994 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 1079 |
estimator.setAspectRatio(configuration.getInitialCamerasAspectRatio()); estimator.setCorrectorType(configuration.getInitialCamerasCorrectorType()); estimator.setPointsTriangulated(true); estimator.setValidTriangulatedPointsMarked(configuration.getInitialCamerasMarkValidTriangulatedPoints()); estimator.estimate(); // store cameras final var camera1 = estimator.getEstimatedLeftCamera(); final var camera2 = estimator.getEstimatedRightCamera(); estimatedCamera1 = new EstimatedCamera(); estimatedCamera1.setCamera(camera1); estimatedCamera2 = new EstimatedCamera(); estimatedCamera2.setCamera(camera2); // store points final var triangulatedPoints = estimator.getTriangulatedPoints(); final var validTriangulatedPoints = estimator.getValidTriangulatedPoints(); reconstructedPoints = new ArrayList<>(); final var size = triangulatedPoints.size(); for (var i = 0; i < size; i++) { final var reconstructedPoint = new ReconstructedPoint3D(); reconstructedPoint.setPoint(triangulatedPoints.get(i)); reconstructedPoint.setInlier(validTriangulatedPoints.get(i)); reconstructedPoints.add(reconstructedPoint); } return true; } catch (final Exception e) { return false; } } /** * Estimates initial cameras and reconstructed points using the essential * matrix and provided intrinsic parameters that must have been set during * offline calibration. * * @return true if cameras and points could be estimated, false if something * failed. */ private boolean estimateInitialCamerasAndPointsEssential() { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 566 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 840 |
a.setElementAt(counter, 2, h21 * h22); a.setElementAt(counter, 3, h31 * h32); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0)); a.setElementAt(counter, 0, a.getElementAt(counter, 0) / rowNorm); a.setElementAt(counter, 1, a.getElementAt(counter, 1) / rowNorm); a.setElementAt(counter, 2, a.getElementAt(counter, 2) / rowNorm); a.setElementAt(counter, 3, a.getElementAt(counter, 3) / rowNorm); counter++; // in case we want an exact solution (up to scale) when LMSE is // disabled, we stop after 2 equations if (!isLMSESolutionAllowed() && (counter >= MIN_REQUIRED_EQUATIONS - 2)) { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 134 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 256 |
} /** * Estimates Image of Absolute Conic (IAC). * * @return estimated IAC. * @throws LockedException if estimator is locked. * @throws NotReadyException if input has not yet been provided. * @throws ImageOfAbsoluteConicEstimatorException if an error occurs during * estimation, usually because repeated homographies are * provided, or homographies corresponding to degenerate * camera movements such as pure parallel translations * where no additional data is really provided. Indeed, * if provided homographies belong to the group of affine * transformations (or other groups contained within * such as metric or Euclidean ones), this exception will * raise because camera movements will be degenerate. To * avoid this exception, homographies must be purely * projective. */ @Override public ImageOfAbsoluteConic estimate() throws LockedException, NotReadyException, ImageOfAbsoluteConicEstimatorException { if (isLocked()) { throw new LockedException(); } if (!isReady()) { throw new NotReadyException(); } try { locked = true; if (listener != null) { listener.onEstimateStart(this); } final ImageOfAbsoluteConic iac; if (zeroSkewness && principalPointAtOrigin) { if (focalDistanceAspectRatioKnown) { iac = estimateZeroSkewnessPrincipalPointAtOriginAndKnownFocalDistanceAspectRatio(); } else { iac = estimateZeroSkewnessAndPrincipalPointAtOrigin(); } } else if (zeroSkewness) { // && !mPrincipalPointAtOrigin if (focalDistanceAspectRatioKnown) { iac = estimateZeroSkewnessAndKnownFocalDistanceAspectRatio(); } else { iac = estimateZeroSkewness(); } } else if (principalPointAtOrigin) { // && !mZeroSkewness iac = estimatePrincipalPointAtOrigin(); } else { iac = estimateNoConstraints(); } if (listener != null) { listener.onEstimateEnd(this); } return iac; } finally { locked = false; } } /** * Returns type of IAC estimator. * * @return type of IAC estimator. */ @Override public ImageOfAbsoluteConicEstimatorType getType() { return ImageOfAbsoluteConicEstimatorType.LMSE_IAC_ESTIMATOR; |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSlamSparseReconstructor.java | 79 |
com/irurueta/ar/sfm/BaseSlamTwoViewsSparseReconstructor.java | 78 |
protected BaseSlamSparseReconstructor(final C configuration, final L listener) { super(configuration, listener); } /** * Provides a new accelerometer sample to update SLAM estimation. * This method must be called whenever the accelerometer sensor receives new * data. * If re-constructor is not running, calling this method has no effect. * * @param timestamp timestamp of accelerometer sample since epoch time and * expressed in nanoseconds. * @param accelerationX linear acceleration along x-axis expressed in meters * per squared second (m/s^2). * @param accelerationY linear acceleration along y-axis expressed in meters * per squared second (m/s^2). * @param accelerationZ linear acceleration along z-axis expressed in meters * per squared second (m/s^2). */ public void updateAccelerometerSample(final long timestamp, final float accelerationX, final float accelerationY, final float accelerationZ) { if (slamEstimator != null) { slamEstimator.updateAccelerometerSample(timestamp, accelerationX, accelerationY, accelerationZ); } } /** * Provides a new accelerometer sample to update SLAM estimation. * This method must be called whenever the accelerometer sensor receives new * data. * If re-constructor is not running, calling this method has no effect. * * @param timestamp timestamp of accelerometer sample since epoch time and * expressed in nanoseconds. * @param data array containing x,y,z components of linear acceleration * expressed in meters per squared second (m/s^2). * @throws IllegalArgumentException if provided array does not have length * 3. */ public void updateAccelerometerSample(final long timestamp, final float[] data) { if (slamEstimator != null) { slamEstimator.updateAccelerometerSample(timestamp, data); } } /** * Provides a new gyroscope sample to update SLAM estimation. * If re-constructor is not running, calling this method has no effect. * * @param timestamp timestamp of gyroscope sample since epoch time and * expressed in nanoseconds. * @param angularSpeedX angular speed of rotation along x-axis expressed in * radians per second (rad/s). * @param angularSpeedY angular speed of rotation along y-axis expressed in * radians per second (rad/s). * @param angularSpeedZ angular speed of rotation along z-axis expressed in * radians per second (rad/s). */ public void updateGyroscopeSample(final long timestamp, final float angularSpeedX, final float angularSpeedY, final float angularSpeedZ) { if (slamEstimator != null) { slamEstimator.updateGyroscopeSample(timestamp, angularSpeedX, angularSpeedY, angularSpeedZ); } } /** * Provides a new gyroscope sample to update SLAM estimation. * If re-constructor is not running, calling this method has no effect. * * @param timestamp timestamp of gyroscope sample since epoch time and * expressed in nanoseconds. * @param data angular speed of rotation along x,y,z axes expressed in * radians per second (rad/s). * @throws IllegalArgumentException if provided array does not have length * 3. */ public void updateGyroscopeSample(final long timestamp, final float[] data) { if (slamEstimator != null) { slamEstimator.updateGyroscopeSample(timestamp, data); } } /** * Configures calibration data on SLAM estimator if available. */ protected void setUpCalibrationData() { |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationConstantVelocityModelSlamEstimator.java | 562 |
com/irurueta/ar/slam/AbsoluteOrientationSlamEstimator.java | 596 |
x[12] = state.getElementAtIndex(12); } /** * Updates state data of the device by using state matrix obtained from * Kalman filter after correction. * * @param state state matrix obtained from Kalman filter. */ private void updateCorrectedState(final Matrix state) { // position statePositionX = x[0] = state.getElementAtIndex(0); statePositionY = x[1] = state.getElementAtIndex(1); statePositionZ = x[2] = state.getElementAtIndex(2); // quaternion stateQuaternionA = x[3] = state.getElementAtIndex(3); stateQuaternionB = x[4] = state.getElementAtIndex(4); stateQuaternionC = x[5] = state.getElementAtIndex(5); stateQuaternionD = x[6] = state.getElementAtIndex(6); stateOrientation.setA(stateQuaternionA); stateOrientation.setB(stateQuaternionB); stateOrientation.setC(stateQuaternionC); stateOrientation.setD(stateQuaternionD); stateOrientation.normalize(); // velocity stateVelocityX = x[7] = state.getElementAtIndex(7); stateVelocityY = x[8] = state.getElementAtIndex(8); stateVelocityZ = x[9] = state.getElementAtIndex(9); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 589 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 728 |
a.setElementAt(counter, 0, Math.pow(h11, 2.0) - Math.pow(h12, 2.0)); a.setElementAt(counter, 1, 2.0 * (h11 * h21 - h12 * h22)); a.setElementAt(counter, 2, Math.pow(h21, 2.0) - Math.pow(h22, 2.0)); a.setElementAt(counter, 3, Math.pow(h31, 2.0) - Math.pow(h32, 2.0)); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0)); |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 870 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 962 |
cameraEstimator.setSuggestSkewnessValueEnabled( configuration.isAdditionalCamerasSuggestSkewnessValueEnabled()); cameraEstimator.setSuggestedSkewnessValue( configuration.getAdditionalCamerasSuggestedSkewnessValue()); cameraEstimator.setSuggestHorizontalFocalLengthEnabled( configuration.isAdditionalCamerasSuggestHorizontalFocalLengthEnabled()); cameraEstimator.setSuggestedHorizontalFocalLengthValue( configuration.getAdditionalCamerasSuggestedHorizontalFocalLengthValue()); cameraEstimator.setSuggestVerticalFocalLengthEnabled( configuration.isAdditionalCamerasSuggestVerticalFocalLengthEnabled()); cameraEstimator.setSuggestedVerticalFocalLengthValue( configuration.getAdditionalCamerasSuggestedVerticalFocalLengthValue()); cameraEstimator.setSuggestAspectRatioEnabled( configuration.isAdditionalCamerasSuggestAspectRatioEnabled()); cameraEstimator.setSuggestedAspectRatioValue( configuration.getAdditionalCamerasSuggestedAspectRatioValue()); cameraEstimator.setSuggestPrincipalPointEnabled( configuration.isAdditionalCamerasSuggestPrincipalPointEnabled()); cameraEstimator.setSuggestedPrincipalPointValue( configuration.getAdditionalCamerasSuggestedPrincipalPointValue()); currentCamera = cameraEstimator.estimate(); currentCameraCovariance = cameraEstimator.getCovariance(); //noinspection unchecked listener.onSamplesAccepted((R) this, viewCount, previousViewTrackedSamples, currentViewTrackedSamples); allPreviousViewSamples.clear(); allPreviousViewSamples.addAll(currentViewTrackedSamples); allPreviousViewSamples.addAll(currentViewNewlySpawnedSamples); previousViewTrackedSamples = currentViewTrackedSamples; previousViewId = currentViewId; currentViewId = viewCount; } catch (final Exception e) { // camera estimation failed samplesRejected = true; } } else { |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationConstantVelocityModelSlamEstimator.java | 355 |
com/irurueta/ar/slam/AbsoluteOrientationSlamEstimator.java | 377 |
ConstantVelocityModelStatePredictor.predictWithRotationAdjustment(x, u, deltaTimestamp, x, jacobianPredictionX, jacobianPredictionU); // update Kalman filter transition matrix taking into account current // state kalmanFilter.setTransitionMatrix(jacobianPredictionX); // update control matrix from control vector jacobian kalmanFilter.setControlMatrix(jacobianPredictionU); if (calibrationData != null && calibrationData.getControlMean() != null && calibrationData.getControlCovariance() != null) { // if calibrator is available, propagate covariance to set process // covariance matrix if (normalDist == null) { normalDist = new MultivariateNormalDist(STATE_LENGTH); } try { calibrationData.propagateWithControlJacobian(jacobianPredictionU, normalDist); // update kalman filter process noise final var processNoise = kalmanFilter.getProcessNoiseCov(); // copy normal dist covariance into processNoise normalDist.getCovariance(processNoise); } catch (final InvalidCovarianceMatrixException e) { // ignore } } try { // also predict the state using Kalman filter with current control // data control.fromArray(u, true); updatePredictedState(kalmanFilter.predict(control)); // copy predicted state to corrected state kalmanFilter.getStatePre().copyTo(kalmanFilter.getStatePost()); kalmanFilter.getErrorCovPre().copyTo(kalmanFilter.getErrorCovPost()); predictionAvailable = true; } catch (final Exception e) { error = true; } lastOrientation.fromRotation(stateOrientation); |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 773 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1480 |
samples[1].getPoint().getInhomY() - principalPointY); rightPoints.add(rightPoint); qualityScores[i] = match.getQualityScore(); i++; } try { final var estimator = FundamentalMatrixRobustEstimator.create(leftPoints, rightPoints, qualityScores, configuration.getRobustFundamentalMatrixEstimatorMethod()); estimator.setNonRobustFundamentalMatrixEstimatorMethod( configuration.getNonRobustFundamentalMatrixEstimatorMethod()); estimator.setResultRefined(configuration.isFundamentalMatrixRefined()); estimator.setCovarianceKept(configuration.isFundamentalMatrixCovarianceKept()); estimator.setConfidence(configuration.getFundamentalMatrixConfidence()); estimator.setMaxIterations(configuration.getFundamentalMatrixMaxIterations()); switch (configuration.getRobustFundamentalMatrixEstimatorMethod()) { case LMEDS: ((LMedSFundamentalMatrixRobustEstimator) estimator).setStopThreshold( configuration.getFundamentalMatrixThreshold()); break; case MSAC: ((MSACFundamentalMatrixRobustEstimator) estimator).setThreshold( configuration.getFundamentalMatrixThreshold()); break; case PROMEDS: ((PROMedSFundamentalMatrixRobustEstimator) estimator).setStopThreshold( configuration.getFundamentalMatrixThreshold()); break; case PROSAC: |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 2079 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 1061 |
final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples(); final var points1 = new ArrayList<Point2D>(); final var points2 = new ArrayList<Point2D>(); final var length = samples1.size(); for (var i = 0; i < length; i++) { final var sample1 = samples1.get(i); final var sample2 = samples2.get(i); final var point1 = sample1.getPoint(); final var point2 = sample2.getPoint(); points1.add(point1); points2.add(point2); } try { final var estimator = new EssentialMatrixInitialCamerasEstimator(fundamentalMatrix, intrinsic1, intrinsic2, points1, points2); estimator.setCorrectorType(configuration.getInitialCamerasCorrectorType()); estimator.setPointsTriangulated(true); estimator.setValidTriangulatedPointsMarked(configuration.getInitialCamerasMarkValidTriangulatedPoints()); estimator.estimate(); // store cameras final var camera1 = estimator.getEstimatedLeftCamera(); final var camera2 = estimator.getEstimatedRightCamera(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 231 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 380 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 525 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 665 |
a = new Matrix(MIN_REQUIRED_EQUATIONS, 6); } var counter = 0; ProjectiveTransformation2D t = null; final var h = new Matrix(ProjectiveTransformation2D.HOM_COORDS, ProjectiveTransformation2D.HOM_COORDS); // elements ij of homography (last column is not required) double h11; double h12; double h21; double h22; double h31; double h32; double rowNorm; for (final var homography : homographies) { // convert homography into projective so it can be normalized homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); a.setElementAt(counter, 1, h11 * h22 + h21 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 991 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 1106 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12 + h21 * h22 / sqrAspectRatio); a.setElementAt(counter, 1, h31 * h32); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0)); |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1129 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1851 |
estimator.setAspectRatio(configuration.getPairedCamerasAspectRatio()); estimator.estimate(); final var camera1 = estimator.getEstimatedLeftCamera(); final var camera2 = estimator.getEstimatedRightCamera(); camera1.decompose(); camera2.decompose(); final var intrinsicZeroPrincipalPoint1 = camera1.getIntrinsicParameters(); final var intrinsicZeroPrincipalPoint2 = camera2.getIntrinsicParameters(); final var principalPointX = configuration.getPrincipalPointX(); final var principalPointY = configuration.getPrincipalPointY(); final var intrinsic1 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint1); intrinsic1.setHorizontalPrincipalPoint(intrinsic1.getHorizontalPrincipalPoint() + principalPointX); intrinsic1.setVerticalPrincipalPoint(intrinsic1.getVerticalPrincipalPoint() + principalPointY); camera1.setIntrinsicParameters(intrinsic1); final var intrinsic2 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint2); intrinsic2.setHorizontalPrincipalPoint(intrinsic2.getHorizontalPrincipalPoint() + principalPointX); intrinsic2.setVerticalPrincipalPoint(intrinsic2.getVerticalPrincipalPoint() + principalPointY); camera2.setIntrinsicParameters(intrinsic2); previousMetricEstimatedCamera = new EstimatedCamera(); previousMetricEstimatedCamera.setCamera(camera1); |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelSlamEstimator.java | 325 |
com/irurueta/ar/slam/SlamEstimator.java | 349 |
ConstantVelocityModelStatePredictor.predict(x, u, deltaTimestamp, x, jacobianPredictionX, jacobianPredictionU); // update Kalman filter transition matrix taking into account current // state kalmanFilter.setTransitionMatrix(jacobianPredictionX); // update control matrix from control vector jacobian kalmanFilter.setControlMatrix(jacobianPredictionU); if (calibrationData != null && calibrationData.getControlMean() != null && calibrationData.getControlCovariance() != null) { // if calibrator is available, propagate covariance to set process // covariance matrix if (normalDist == null) { normalDist = new MultivariateNormalDist(STATE_LENGTH); } try { calibrationData.propagateWithControlJacobian(jacobianPredictionU, normalDist); // update Kalman filter process noise final var processNoise = kalmanFilter.getProcessNoiseCov(); // copy normal dist covariance into processNoise normalDist.getCovariance(processNoise); } catch (final InvalidCovarianceMatrixException e) { // ignore } } try { // also predict the state using Kalman filter with current control // data control.fromArray(u, true); updatePredictedState(kalmanFilter.predict(control)); // copy predicted state to corrected state kalmanFilter.getStatePre().copyTo(kalmanFilter.getStatePost()); kalmanFilter.getErrorCovPre().copyTo(kalmanFilter.getErrorCovPost()); predictionAvailable = true; } catch (final Exception e) { error = true; } |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/HomographyDecomposer.java | 661 |
com/irurueta/ar/epipolar/estimators/HomographyDecomposer.java | 733 |
private double decomposeFromThreeDifferentSingularValuesNegative( final double d1, final double d2, final double d3, final double[] n, final Matrix r, final double[] t, final boolean positive1, final boolean positive3) { final var d1Sqr = d1 * d1; final var d2Sqr = d2 * d2; final var d3Sqr = d3 * d3; // compute plane normal final var denom = d1Sqr - d3Sqr; var x1 = Math.sqrt((d1Sqr - d2Sqr) / denom); if (!positive1) { x1 = -x1; } final var x2 = 0.0; var x3 = Math.sqrt((d2Sqr - d3Sqr) / denom); if (!positive3) { x3 = -x3; } // fill plane normal solution n[0] = x1; n[1] = x2; n[2] = x3; // compute rotation final var x1Sqr = x1 * x1; final var x3Sqr = x3 * x3; final var sinTheta = (d1 + d3) * x1 * x3 / d2; |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1358 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 2061 |
} /** * Estimates initial cameras and reconstructed points using the essential * matrix and provided intrinsic parameters that must have been set during * offline calibration. * * @param intrinsic1 intrinsic parameters of 1st camera. * @param intrinsic2 intrinsic parameters of 2nd camera. * @return true if cameras and points could be estimated, false if something * failed. */ private boolean estimateInitialCamerasAndPointsEssential( final PinholeCameraIntrinsicParameters intrinsic1, final PinholeCameraIntrinsicParameters intrinsic2) { final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix(); // use all points used for fundamental matrix estimation final var samples1 = currentEstimatedFundamentalMatrix.getLeftSamples(); final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples(); final var points1 = new ArrayList<Point2D>(); final var points2 = new ArrayList<Point2D>(); final var length = samples1.size(); for (var i = 0; i < length; i++) { final var sample1 = samples1.get(i); final var sample2 = samples2.get(i); final var point1 = sample1.getPoint(); final var point2 = sample2.getPoint(); points1.add(point1); points2.add(point2); } try { final var estimator = new EssentialMatrixInitialCamerasEstimator(fundamentalMatrix, intrinsic1, intrinsic2, points1, points2); estimator.setCorrectorType(configuration.getPairedCamerasCorrectorType()); |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1691 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 701 |
break; } final var fundamentalMatrixEstimator = new PlanarBestFundamentalMatrixEstimatorAndReconstructor(); fundamentalMatrixEstimator.setHomographyEstimator(homographyEstimator); fundamentalMatrixEstimator.setLeftAndRightPoints(leftPoints, rightPoints); fundamentalMatrixEstimator.setQualityScores(qualityScores); var intrinsic1 = configuration.getInitialIntrinsic1(); var intrinsic2 = configuration.getInitialIntrinsic1(); if (intrinsic1 == null && intrinsic2 == null) { // estimate homography final var homography = homographyEstimator.estimate(); // estimate intrinsic parameters using the Image of Absolute // Conic (IAC) final var homographies = new ArrayList<Transformation2D>(); homographies.add(homography); final var iacEstimator = new LMSEImageOfAbsoluteConicEstimator(homographies); final var iac = iacEstimator.estimate(); intrinsic1 = intrinsic2 = iac.getIntrinsicParameters(); } else if (intrinsic1 == null) { //&& intrinsic2 != null intrinsic1 = intrinsic2; } else if (intrinsic2 == null) { //&& intrinsic1 != null intrinsic2 = intrinsic1; } fundamentalMatrixEstimator.setLeftIntrinsics(intrinsic1); fundamentalMatrixEstimator.setRightIntrinsics(intrinsic2); fundamentalMatrixEstimator.estimateAndReconstruct(); final var fundamentalMatrix = fundamentalMatrixEstimator.getFundamentalMatrix(); |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1286 |
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1394 |
estimator.setAspectRatio(configuration.getPairedCamerasAspectRatio()); estimator.setCorrectorType(configuration.getPairedCamerasCorrectorType()); estimator.setPointsTriangulated(true); estimator.setValidTriangulatedPointsMarked(configuration.getPairedCamerasMarkValidTriangulatedPoints()); estimator.estimate(); // store cameras final var camera1 = estimator.getEstimatedLeftCamera(); final var camera2 = estimator.getEstimatedRightCamera(); previousMetricEstimatedCamera = new EstimatedCamera(); previousMetricEstimatedCamera.setCamera(camera1); currentMetricEstimatedCamera = new EstimatedCamera(); currentMetricEstimatedCamera.setCamera(camera2); // store points final var triangulatedPoints = estimator.getTriangulatedPoints(); final var validTriangulatedPoints = estimator.getValidTriangulatedPoints(); metricReconstructedPoints = new ArrayList<>(); final var size = triangulatedPoints.size(); for (var i = 0; i < size; i++) { final var reconstructedPoint = new ReconstructedPoint3D(); reconstructedPoint.setPoint(triangulatedPoints.get(i)); reconstructedPoint.setInlier(validTriangulatedPoints.get(i)); metricReconstructedPoints.add(reconstructedPoint); } return transformPairOfCamerasAndPoints(isInitialPairOfViews, hasAbsoluteOrientation()); |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationConstantVelocityModelSlamEstimator.java | 355 |
com/irurueta/ar/slam/ConstantVelocityModelSlamEstimator.java | 325 |
com/irurueta/ar/slam/SlamEstimator.java | 349 |
ConstantVelocityModelStatePredictor.predictWithRotationAdjustment(x, u, deltaTimestamp, x, jacobianPredictionX, jacobianPredictionU); // update Kalman filter transition matrix taking into account current // state kalmanFilter.setTransitionMatrix(jacobianPredictionX); // update control matrix from control vector jacobian kalmanFilter.setControlMatrix(jacobianPredictionU); if (calibrationData != null && calibrationData.getControlMean() != null && calibrationData.getControlCovariance() != null) { // if calibrator is available, propagate covariance to set process // covariance matrix if (normalDist == null) { normalDist = new MultivariateNormalDist(STATE_LENGTH); } try { calibrationData.propagateWithControlJacobian(jacobianPredictionU, normalDist); // update kalman filter process noise final var processNoise = kalmanFilter.getProcessNoiseCov(); // copy normal dist covariance into processNoise normalDist.getCovariance(processNoise); } catch (final InvalidCovarianceMatrixException e) { // ignore } } try { // also predict the state using Kalman filter with current control // data control.fromArray(u, true); updatePredictedState(kalmanFilter.predict(control)); // copy predicted state to corrected state kalmanFilter.getStatePre().copyTo(kalmanFilter.getStatePost()); kalmanFilter.getErrorCovPre().copyTo(kalmanFilter.getErrorCovPost()); predictionAvailable = true; } catch (final Exception e) { error = true; } |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationSlamEstimator.java | 377 |
com/irurueta/ar/slam/ConstantVelocityModelSlamEstimator.java | 325 |
com/irurueta/ar/slam/SlamEstimator.java | 349 |
StatePredictor.predictWithRotationAdjustment(x, u, deltaTimestamp, x, jacobianPredictionX, jacobianPredictionU); // update Kalman filter transition matrix taking into account current // state kalmanFilter.setTransitionMatrix(jacobianPredictionX); // update control matrix from control vector jacobian kalmanFilter.setControlMatrix(jacobianPredictionU); if (calibrationData != null && calibrationData.getControlMean() != null && calibrationData.getControlCovariance() != null) { // if calibrator is available, propagate covariance to set process // covariance matrix if (normalDist == null) { normalDist = new MultivariateNormalDist(STATE_LENGTH); } try { calibrationData.propagateWithControlJacobian(jacobianPredictionU, normalDist); // update kalman filter process noise final var processNoise = kalmanFilter.getProcessNoiseCov(); // copy normal dist covariance into processNoise normalDist.getCovariance(processNoise); } catch (final InvalidCovarianceMatrixException e) { // ignore } } try { // also predict the state using Kalman filter with current control // data control.fromArray(u, true); updatePredictedState(kalmanFilter.predict(control)); // copy predicted state to corrected state kalmanFilter.getStatePre().copyTo(kalmanFilter.getStatePost()); kalmanFilter.getErrorCovPre().copyTo(kalmanFilter.getErrorCovPost()); predictionAvailable = true; } catch (final Exception e) { error = true; } |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1262 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1985 |
final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix(); // use inlier points used for fundamental matrix estimation final var samples1 = currentEstimatedFundamentalMatrix.getLeftSamples(); final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples(); final var points1 = new ArrayList<Point2D>(); final var points2 = new ArrayList<Point2D>(); final var length = samples1.size(); for (var i = 0; i < length; i++) { final var sample1 = samples1.get(i); final var sample2 = samples2.get(i); final var point1 = sample1.getPoint(); final var point2 = sample2.getPoint(); points1.add(point1); points2.add(point2); } try { final var estimator = new DualImageOfAbsoluteConicInitialCamerasEstimator(fundamentalMatrix, points1, points2); estimator.setPrincipalPoint(configuration.getPrincipalPointX(), configuration.getPrincipalPointY()); estimator.setAspectRatio(configuration.getPairedCamerasAspectRatio()); |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1074 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 797 |
estimator.setAspectRatio(configuration.getPairedCamerasAspectRatio()); estimator.estimate(); final var camera1 = estimator.getEstimatedLeftCamera(); final var camera2 = estimator.getEstimatedRightCamera(); camera1.decompose(); camera2.decompose(); final var intrinsicZeroPrincipalPoint1 = camera1.getIntrinsicParameters(); final var intrinsicZeroPrincipalPoint2 = camera2.getIntrinsicParameters(); final var principalPointX = configuration.getPrincipalPointX(); final var principalPointY = configuration.getPrincipalPointY(); final var intrinsic1 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint1); intrinsic1.setHorizontalPrincipalPoint(intrinsic1.getHorizontalPrincipalPoint() + principalPointX); intrinsic1.setVerticalPrincipalPoint(intrinsic1.getVerticalPrincipalPoint() + principalPointY); final var intrinsic2 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint2); intrinsic2.setHorizontalPrincipalPoint(intrinsic2.getHorizontalPrincipalPoint() + principalPointX); intrinsic2.setVerticalPrincipalPoint(intrinsic2.getVerticalPrincipalPoint() + principalPointY); // fix fundamental matrix to account for principal point different // from zero fixFundamentalMatrix(fundamentalMatrix, intrinsicZeroPrincipalPoint1, intrinsicZeroPrincipalPoint2, intrinsic1, intrinsic2); return estimateInitialCamerasAndPointsEssential(intrinsic1, intrinsic2) |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSlamSparseReconstructor.java | 306 |
com/irurueta/ar/sfm/KnownBaselineSparseReconstructor.java | 121 |
metricReconstructedPoints3D.add(reconstructedPoint.getPoint()); } final var euclideanReconstructedPoints3D = scaleTransformation.transformPointsAndReturnNew( metricReconstructedPoints3D); // set scaled points into result activeEuclideanReconstructedPoints = new ArrayList<>(); ReconstructedPoint3D euclideanPoint; ReconstructedPoint3D metricPoint; for (var i = 0; i < numPoints; i++) { metricPoint = activeMetricReconstructedPoints.get(i); euclideanPoint = new ReconstructedPoint3D(); euclideanPoint.setId(metricPoint.getId()); euclideanPoint.setPoint(euclideanReconstructedPoints3D.get(i)); euclideanPoint.setInlier(metricPoint.isInlier()); euclideanPoint.setQualityScore(metricPoint.getQualityScore()); if (metricPoint.getCovariance() != null) { euclideanPoint.setCovariance(metricPoint.getCovariance().multiplyByScalarAndReturnNew(sqrScale)); } euclideanPoint.setColorData(metricPoint.getColorData()); activeEuclideanReconstructedPoints.add(euclideanPoint); } return true; } catch (final Exception e) { failed = true; //noinspection unchecked listener.onFail((R) this); |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSlamPairedViewsSparseReconstructor.java | 287 |
com/irurueta/ar/sfm/PairedViewsSparseReconstructor.java | 103 |
referenceEuclideanTransformation.setRotation(invEuclideanCameraRotation); referenceEuclideanTransformation.setTranslation(lastEuclideanCameraCenter); } try { // transform cameras final var previousEuclideanCamera = referenceEuclideanTransformation.transformAndReturnNew( previousMetricCamera); final var currentEuclideanCamera = referenceEuclideanTransformation.transformAndReturnNew( currentMetricCamera); previousEuclideanEstimatedCamera = new EstimatedCamera(); previousEuclideanEstimatedCamera.setCamera(previousEuclideanCamera); previousEuclideanEstimatedCamera.setViewId(previousMetricEstimatedCamera.getViewId()); previousEuclideanEstimatedCamera.setQualityScore(previousMetricEstimatedCamera.getQualityScore()); if (previousMetricEstimatedCamera.getCovariance() != null) { previousEuclideanEstimatedCamera.setCovariance(previousMetricEstimatedCamera.getCovariance() .multiplyByScalarAndReturnNew(sqrScale)); } currentEuclideanEstimatedCamera = new EstimatedCamera(); currentEuclideanEstimatedCamera.setCamera(currentEuclideanCamera); currentEuclideanEstimatedCamera.setViewId(currentMetricEstimatedCamera.getViewId()); currentEuclideanEstimatedCamera.setQualityScore(currentMetricEstimatedCamera.getQualityScore()); if (currentMetricEstimatedCamera.getCovariance() != null) { currentEuclideanEstimatedCamera.setCovariance(currentMetricEstimatedCamera.getCovariance() .multiplyByScalarAndReturnNew(sqrScale)); } // transform points euclideanReconstructedPoints = new ArrayList<>(); |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSlamPairedViewsSparseReconstructorConfiguration.java | 92 |
com/irurueta/ar/sfm/BaseSlamSparseReconstructorConfiguration.java | 92 |
public BaseSlamPairedViewsSparseReconstructorConfiguration() { // initialize default covariance try { cameraPositionCovariance = Matrix.identity(Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH, Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH); cameraPositionCovariance.multiplyByScalar(DEFAULT_CAMERA_POSITION_VARIANCE); } catch (final AlgebraException ignore) { // never happens } } /** * Gets calibration data for accelerometer and gyroscope. * This data is usually captured and estimated in an offline step previous to the * actual scene reconstruction. * Calibration data is usually obtained by keeping the system in a constant state * of motion (e.g. acceleration and rotation). * If this is null, no calibration data will be used. * * @return calibration data or null. */ public C getCalibrationData() { return calibrationData; } /** * Specifies calibration data for accelerometer and gyroscope. * This data is usually captured and estimated in an offline step previous to the * actual scene reconstruction. * Calibration data is usually obtained by keeping the system in a constant state * of motion (e.g. acceleration and rotation). * If set to null, no calibration data will be used. * * @param calibrationData calibration data or null. * @return this instance so that method can be easily chained. */ public T setCalibrationData(final C calibrationData) { this.calibrationData = calibrationData; //noinspection unchecked return (T) this; } /** * Gets matrix containing covariance of measured camera positions. * This should usually be an "almost" diagonal matrix, where diagonal elements are * close to the position estimation error variance. * Values of this matrix are device specific and depends on factors such as * resolution of images, pictures quality, gyroscope and accelerometer accuracy. * This matrix must be a 3x3 symmetric positive definite matrix. * * @return covariance of measured camera positions. */ public Matrix getCameraPositionCovariance() { return cameraPositionCovariance; } /** * Sets matrix containing covariance of measured camera positions. * This should usually be an "almost" diagonal matrix, where diagonal elements are * close to the position estimation error variance. * Values of this matrix are device specific and depends on factors such as * resolution of images, pictures quality, gyroscope and accelerometer accuracy. * This matrix must be 3x3 symmetric positive definite matrix. * * @param cameraPositionCovariance covariance of measured camera positions. * @return this instance so that method can be easily chained. * @throws IllegalArgumentException if provided matrix is not 3x3. */ public T setCameraPositionCovariance(final Matrix cameraPositionCovariance) { if (cameraPositionCovariance.getRows() != Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH || cameraPositionCovariance.getColumns() != Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH) { throw new IllegalArgumentException(); } this.cameraPositionCovariance = cameraPositionCovariance; //noinspection unchecked return (T) this; } /** * Sets independent variance of coordinates of measured camera positions. * When using this method, camera position covariance matrix is set as a diagonal * matrix whose diagonal elements are equal to provided value. * * @param variance variance of coordinates of measured camera positions. * @return this instance so that method can be easily chained. */ public T setCameraPositionVariance(final double variance) { try { cameraPositionCovariance = Matrix.identity(Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH, Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH); cameraPositionCovariance.multiplyByScalar(variance); } catch (final AlgebraException ignore) { |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 766 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 870 |
cameraEstimator.setSuggestSkewnessValueEnabled( configuration.isAdditionalCamerasSuggestSkewnessValueEnabled()); cameraEstimator.setSuggestedSkewnessValue( configuration.getAdditionalCamerasSuggestedSkewnessValue()); cameraEstimator.setSuggestHorizontalFocalLengthEnabled( configuration.isAdditionalCamerasSuggestHorizontalFocalLengthEnabled()); cameraEstimator.setSuggestedHorizontalFocalLengthValue( configuration.getAdditionalCamerasSuggestedHorizontalFocalLengthValue()); cameraEstimator.setSuggestVerticalFocalLengthEnabled( configuration.isAdditionalCamerasSuggestVerticalFocalLengthEnabled()); cameraEstimator.setSuggestedVerticalFocalLengthValue( configuration.getAdditionalCamerasSuggestedVerticalFocalLengthValue()); cameraEstimator.setSuggestAspectRatioEnabled( configuration.isAdditionalCamerasSuggestAspectRatioEnabled()); cameraEstimator.setSuggestedAspectRatioValue( configuration.getAdditionalCamerasSuggestedAspectRatioValue()); cameraEstimator.setSuggestPrincipalPointEnabled( configuration.isAdditionalCamerasSuggestPrincipalPointEnabled()); cameraEstimator.setSuggestedPrincipalPointValue( configuration.getAdditionalCamerasSuggestedPrincipalPointValue()); currentCamera = cameraEstimator.estimate(); currentCameraCovariance = cameraEstimator.getCovariance(); //noinspection unchecked listener.onSamplesAccepted((R) this, viewCount, previousViewTrackedSamples, currentViewTrackedSamples); allPreviousViewSamples.clear(); allPreviousViewSamples.addAll(currentViewTrackedSamples); allPreviousViewSamples.addAll(currentViewNewlySpawnedSamples); previousViewTrackedSamples = currentViewTrackedSamples; previousViewId = currentViewId; currentViewId = viewCount; } |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationConstantVelocityModelSlamEstimator.java | 518 |
com/irurueta/ar/slam/AbsoluteOrientationSlamEstimator.java | 549 |
private void updatePredictedState(Matrix state) { // position statePositionX = x[0]; x[0] = state.getElementAtIndex(0); statePositionY = x[1]; x[1] = state.getElementAtIndex(1); statePositionZ = x[2]; x[2] = state.getElementAtIndex(2); // quaternion state predictor is more reliable than Kalman filter, for // that reason we ignore predicted quaternion values on Kalman filter and // simply keep predicted ones. Besides, typically gyroscope samples are // much more reliable than accelerometer ones. For that reason state // elements corresponding to quaternion (3 to 6) are not copied into mX // array. stateQuaternionA = x[3]; stateQuaternionB = x[4]; stateQuaternionC = x[5]; stateQuaternionD = x[6]; stateOrientation.setA(stateQuaternionA); stateOrientation.setB(stateQuaternionB); stateOrientation.setC(stateQuaternionC); stateOrientation.setD(stateQuaternionD); // velocity stateVelocityX = x[7]; x[7] = state.getElementAtIndex(7); stateVelocityY = x[8]; x[8] = state.getElementAtIndex(8); stateVelocityZ = x[9]; x[9] = state.getElementAtIndex(9); // linear acceleration stateAccelerationX = accumulatedAccelerationSampleX; |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseAbsoluteOrientationSlamSparseReconstructor.java | 221 |
com/irurueta/ar/sfm/BaseSlamSparseReconstructor.java | 309 |
final var euclideanReconstructedPoints3D = scaleAndOrientationTransformation.transformPointsAndReturnNew( metricReconstructedPoints3D); // set scaled points into result activeEuclideanReconstructedPoints = new ArrayList<>(); ReconstructedPoint3D euclideanPoint; ReconstructedPoint3D metricPoint; for (var i = 0; i < numPoints; i++) { metricPoint = activeMetricReconstructedPoints.get(i); euclideanPoint = new ReconstructedPoint3D(); euclideanPoint.setId(metricPoint.getId()); euclideanPoint.setPoint(euclideanReconstructedPoints3D.get(i)); euclideanPoint.setInlier(metricPoint.isInlier()); euclideanPoint.setQualityScore(metricPoint.getQualityScore()); if (metricPoint.getCovariance() != null) { euclideanPoint.setCovariance(metricPoint.getCovariance().multiplyByScalarAndReturnNew(sqrScale)); } euclideanPoint.setColorData(metricPoint.getColorData()); activeEuclideanReconstructedPoints.add(euclideanPoint); } return true; } catch (final Exception e) { failed = true; //noinspection unchecked listener.onFail((R) this); return false; } } |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 711 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 731 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 986 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 1007 |
a.setElementAt(counter, 3, h31 * h32); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0)); factor = weight / rowNorm; a.setElementAt(counter, 0, a.getElementAt(counter, 0) * factor); a.setElementAt(counter, 1, a.getElementAt(counter, 1) * factor); a.setElementAt(counter, 2, a.getElementAt(counter, 2) * factor); a.setElementAt(counter, 3, a.getElementAt(counter, 3) * factor); counter++; |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/AffineFundamentalMatrixEstimator.java | 189 |
com/irurueta/ar/epipolar/estimators/SevenPointsFundamentalMatrixEstimator.java | 238 |
if (listener != null) { listener.onEstimateStart(this); } final var nPoints = leftPoints.size(); try { ProjectiveTransformation2D leftNormalization = null; ProjectiveTransformation2D rightNormalization = null; final List<Point2D> leftPoints; final List<Point2D> rightPoints; if (normalizePoints) { // normalize points on left view final var normalizer = new Point2DNormalizer(this.leftPoints); normalizer.compute(); leftNormalization = normalizer.getTransformation(); // normalize points on right view normalizer.setPoints(this.rightPoints); normalizer.compute(); rightNormalization = normalizer.getTransformation(); // normalize to increase accuracy leftNormalization.normalize(); rightNormalization.normalize(); leftPoints = leftNormalization.transformPointsAndReturnNew(this.leftPoints); rightPoints = rightNormalization.transformPointsAndReturnNew(this.rightPoints); } else { leftPoints = this.leftPoints; rightPoints = this.rightPoints; } final Matrix a; if (isLMSESolutionAllowed()) { a = new Matrix(nPoints, 5); |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/MSACFundamentalMatrixRobustEstimator.java | 232 |
com/irurueta/ar/epipolar/estimators/PROMedSFundamentalMatrixRobustEstimator.java | 501 |
com/irurueta/ar/epipolar/estimators/PROSACFundamentalMatrixRobustEstimator.java | 513 |
} @Override public int getTotalSamples() { return leftPoints.size(); } @Override public int getSubsetSize() { return getMinRequiredPoints(); } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<FundamentalMatrix> solutions) { subsetLeftPoints.clear(); subsetRightPoints.clear(); for (final var samplesIndex : samplesIndices) { subsetLeftPoints.add(leftPoints.get(samplesIndex)); subsetRightPoints.add(rightPoints.get(samplesIndex)); } nonRobustEstimate(solutions, subsetLeftPoints, subsetRightPoints); } @Override public double computeResidual(final FundamentalMatrix currentEstimation, final int i) { final var leftPoint = leftPoints.get(i); final var rightPoint = rightPoints.get(i); return residual(currentEstimation, leftPoint, rightPoint); } @Override public boolean isReady() { return MSACFundamentalMatrixRobustEstimator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/MSACDualAbsoluteQuadricRobustEstimator.java | 169 |
com/irurueta/ar/calibration/estimators/PROMedSDualAbsoluteQuadricRobustEstimator.java | 320 |
com/irurueta/ar/calibration/estimators/PROSACDualAbsoluteQuadricRobustEstimator.java | 286 |
com/irurueta/ar/calibration/estimators/RANSACDualAbsoluteQuadricRobustEstimator.java | 168 |
} @Override public int getTotalSamples() { return cameras.size(); } @Override public int getSubsetSize() { return daqEstimator.getMinNumberOfRequiredCameras(); } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<DualAbsoluteQuadric> solutions) { subsetCameras.clear(); for (final var samplesIndex : samplesIndices) { subsetCameras.add(cameras.get(samplesIndex)); } try { daqEstimator.setLMSESolutionAllowed(false); daqEstimator.setCameras(subsetCameras); final var daq = daqEstimator.estimate(); solutions.add(daq); } catch (final Exception e) { // if anything fails, no solution is added } } @Override public double computeResidual(final DualAbsoluteQuadric currentEstimation, final int i) { return residual(currentEstimation, cameras.get(i)); } @Override public boolean isReady() { return MSACDualAbsoluteQuadricRobustEstimator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/MSACImageOfAbsoluteConicRobustEstimator.java | 178 |
com/irurueta/ar/calibration/estimators/PROMedSImageOfAbsoluteConicRobustEstimator.java | 331 |
com/irurueta/ar/calibration/estimators/PROSACImageOfAbsoluteConicRobustEstimator.java | 300 |
} @Override public int getTotalSamples() { return homographies.size(); } @Override public int getSubsetSize() { return iacEstimator.getMinNumberOfRequiredHomographies(); } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<ImageOfAbsoluteConic> solutions) { subsetHomographies.clear(); for (final var samplesIndex : samplesIndices) { subsetHomographies.add(homographies.get(samplesIndex)); } try { iacEstimator.setLMSESolutionAllowed(false); iacEstimator.setHomographies(subsetHomographies); final var iac = iacEstimator.estimate(); solutions.add(iac); } catch (final Exception e) { // if anything fails, no solution is added } } @Override public double computeResidual(final ImageOfAbsoluteConic currentEstimation, final int i) { return residual(currentEstimation, homographies.get(i)); } @Override public boolean isReady() { return MSACImageOfAbsoluteConicRobustEstimator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/LMedSFundamentalMatrixRobustEstimator.java | 270 |
com/irurueta/ar/epipolar/estimators/MSACFundamentalMatrixRobustEstimator.java | 234 |
com/irurueta/ar/epipolar/estimators/PROMedSFundamentalMatrixRobustEstimator.java | 503 |
com/irurueta/ar/epipolar/estimators/PROSACFundamentalMatrixRobustEstimator.java | 515 |
@Override public int getTotalSamples() { return leftPoints.size(); } @Override public int getSubsetSize() { return getMinRequiredPoints(); } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<FundamentalMatrix> solutions) { subsetLeftPoints.clear(); subsetRightPoints.clear(); for (final var samplesIndex : samplesIndices) { subsetLeftPoints.add(leftPoints.get(samplesIndex)); subsetRightPoints.add(rightPoints.get(samplesIndex)); } nonRobustEstimate(solutions, subsetLeftPoints, subsetRightPoints); } @Override public double computeResidual(final FundamentalMatrix currentEstimation, final int i) { final var leftPoint = leftPoints.get(i); final var rightPoint = rightPoints.get(i); return residual(currentEstimation, leftPoint, rightPoint); } @Override public boolean isReady() { return LMedSFundamentalMatrixRobustEstimator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/MSACFundamentalMatrixRobustEstimator.java | 221 |
com/irurueta/ar/epipolar/estimators/RANSACFundamentalMatrixRobustEstimator.java | 298 |
final var innerEstimator = new MSACRobustEstimator<FundamentalMatrix>(new MSACRobustEstimatorListener<>() { // subset of left points private final List<Point2D> subsetLeftPoints = new ArrayList<>(); // subset of right points private final List<Point2D> subsetRightPoints = new ArrayList<>(); @Override public double getThreshold() { return threshold; } @Override public int getTotalSamples() { return leftPoints.size(); } @Override public int getSubsetSize() { return getMinRequiredPoints(); } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<FundamentalMatrix> solutions) { subsetLeftPoints.clear(); subsetRightPoints.clear(); for (final var samplesIndex : samplesIndices) { subsetLeftPoints.add(leftPoints.get(samplesIndex)); subsetRightPoints.add(rightPoints.get(samplesIndex)); } nonRobustEstimate(solutions, subsetLeftPoints, subsetRightPoints); } @Override public double computeResidual(final FundamentalMatrix currentEstimation, final int i) { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 234 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 802 |
var counter = 0; ProjectiveTransformation2D t = null; final var h = new Matrix(ProjectiveTransformation2D.HOM_COORDS, ProjectiveTransformation2D.HOM_COORDS); // elements ij of homography (last column is not required) double h11; double h12; double h21; double h22; double h31; double h32; double rowNorm; for (final var homography : homographies) { // convert homography into projective so it can be normalized homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 383 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 802 |
var counter = 0; ProjectiveTransformation2D t = null; final var h = new Matrix(ProjectiveTransformation2D.HOM_COORDS, ProjectiveTransformation2D.HOM_COORDS); // elements ij of homography (last column is not required) double h11; double h12; double h21; double h22; double h31; double h32; double rowNorm; for (final var homography : homographies) { // convert homography into projective so it can be normalized homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 528 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 802 |
var counter = 0; ProjectiveTransformation2D t = null; final var h = new Matrix(ProjectiveTransformation2D.HOM_COORDS, ProjectiveTransformation2D.HOM_COORDS); // elements ij of homography (last column is not required) double h11; double h12; double h21; double h22; double h31; double h32; double rowNorm; for (final var homography : homographies) { // convert homography into projective so it can be normalized homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 668 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 802 |
var counter = 0; ProjectiveTransformation2D t = null; final var h = new Matrix(ProjectiveTransformation2D.HOM_COORDS, ProjectiveTransformation2D.HOM_COORDS); // elements ij of homography (last column is not required) double h11; double h12; double h21; double h22; double h31; double h32; double rowNorm; for (final var homography : homographies) { // convert homography into projective so it can be normalized homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMedSImageOfAbsoluteConicRobustEstimator.java | 205 |
com/irurueta/ar/calibration/estimators/MSACImageOfAbsoluteConicRobustEstimator.java | 180 |
com/irurueta/ar/calibration/estimators/PROMedSImageOfAbsoluteConicRobustEstimator.java | 333 |
com/irurueta/ar/calibration/estimators/PROSACImageOfAbsoluteConicRobustEstimator.java | 302 |
@Override public int getTotalSamples() { return homographies.size(); } @Override public int getSubsetSize() { return iacEstimator.getMinNumberOfRequiredHomographies(); } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<ImageOfAbsoluteConic> solutions) { subsetHomographies.clear(); for (final var samplesIndex : samplesIndices) { subsetHomographies.add(homographies.get(samplesIndex)); } try { iacEstimator.setLMSESolutionAllowed(false); iacEstimator.setHomographies(subsetHomographies); final var iac = iacEstimator.estimate(); solutions.add(iac); } catch (final Exception e) { // if anything fails, no solution is added } } @Override public double computeResidual(final ImageOfAbsoluteConic currentEstimation, final int i) { return residual(currentEstimation, homographies.get(i)); } @Override public boolean isReady() { return LMedSImageOfAbsoluteConicRobustEstimator.this.isReady(); |
File | Line |
---|---|
com/irurueta/ar/epipolar/refiners/FundamentalMatrixRefiner.java | 216 |
com/irurueta/ar/epipolar/refiners/HomogeneousRightEpipoleRefiner.java | 167 |
rightPoint = samples2.get(i); leftPoint.normalize(); rightPoint.normalize(); x.setElementAt(pos, 0, leftPoint.getHomX()); x.setElementAt(pos, 1, leftPoint.getHomY()); x.setElementAt(pos, 2, leftPoint.getHomW()); x.setElementAt(pos, 3, rightPoint.getHomX()); x.setElementAt(pos, 4, rightPoint.getHomY()); x.setElementAt(pos, 5, rightPoint.getHomW()); y[pos] = residuals[i]; pos++; } } final var evaluator = new LevenbergMarquardtMultiDimensionFunctionEvaluator() { private final Point2D leftPoint = Point2D.create(CoordinatesType.HOMOGENEOUS_COORDINATES); private final Point2D rightPoint = Point2D.create(CoordinatesType.HOMOGENEOUS_COORDINATES); private final FundamentalMatrix fundMatrix = new FundamentalMatrix(); |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 912 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1642 |
samples[1].getPoint().getInhomY() - principalPointY); rightPoints.add(rightPoint); qualityScores[i] = match.getQualityScore(); i++; } try { final var homographyEstimator = PointCorrespondenceProjectiveTransformation2DRobustEstimator.create( configuration.getRobustPlanarHomographyEstimatorMethod()); homographyEstimator.setResultRefined(configuration.isPlanarHomographyRefined()); homographyEstimator.setCovarianceKept(configuration.isPlanarHomographyCovarianceKept()); homographyEstimator.setConfidence(configuration.getPlanarHomographyConfidence()); homographyEstimator.setMaxIterations(configuration.getPlanarHomographyMaxIterations()); switch (configuration.getRobustPlanarHomographyEstimatorMethod()) { case LMEDS: ((LMedSPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator) .setStopThreshold(configuration.getPlanarHomographyThreshold()); break; case MSAC: ((MSACPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator) .setThreshold(configuration.getPlanarHomographyThreshold()); break; case PROMEDS: ((PROMedSPointCorrespondenceProjectiveTransformation2DRobustEstimator) homographyEstimator) .setStopThreshold(configuration.getPlanarHomographyThreshold()); break; case PROSAC: |
File | Line |
---|---|
com/irurueta/ar/sfm/LMSEHomogeneousSinglePoint3DTriangulator.java | 217 |
com/irurueta/ar/sfm/LMSEHomogeneousSinglePoint3DTriangulator.java | 236 |
com/irurueta/ar/sfm/WeightedHomogeneousSinglePoint3DTriangulator.java | 372 |
com/irurueta/ar/sfm/WeightedHomogeneousSinglePoint3DTriangulator.java | 395 |
a.setElementAt(row, 3, homX * principalPlane.getD() - homW * verticalAxisPlane.getD()); // normalize row (equation) to increase accuracy rowNorm = Math.sqrt(Math.pow(a.getElementAt(row, 0), 2.0) + Math.pow(a.getElementAt(row, 1), 2.0) + Math.pow(a.getElementAt(row, 2), 2.0) + Math.pow(a.getElementAt(row, 3), 2.0)); a.setElementAt(row, 0, a.getElementAt(row, 0) / rowNorm); a.setElementAt(row, 1, a.getElementAt(row, 1) / rowNorm); a.setElementAt(row, 2, a.getElementAt(row, 2) / rowNorm); a.setElementAt(row, 3, a.getElementAt(row, 3) / rowNorm); |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1129 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 844 |
estimator.setAspectRatio(configuration.getPairedCamerasAspectRatio()); estimator.estimate(); final var camera1 = estimator.getEstimatedLeftCamera(); final var camera2 = estimator.getEstimatedRightCamera(); camera1.decompose(); camera2.decompose(); final var intrinsicZeroPrincipalPoint1 = camera1.getIntrinsicParameters(); final var intrinsicZeroPrincipalPoint2 = camera2.getIntrinsicParameters(); final var principalPointX = configuration.getPrincipalPointX(); final var principalPointY = configuration.getPrincipalPointY(); final var intrinsic1 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint1); intrinsic1.setHorizontalPrincipalPoint(intrinsic1.getHorizontalPrincipalPoint() + principalPointX); intrinsic1.setVerticalPrincipalPoint(intrinsic1.getVerticalPrincipalPoint() + principalPointY); camera1.setIntrinsicParameters(intrinsic1); final var intrinsic2 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint2); intrinsic2.setHorizontalPrincipalPoint(intrinsic2.getHorizontalPrincipalPoint() + principalPointX); intrinsic2.setVerticalPrincipalPoint(intrinsic2.getVerticalPrincipalPoint() + principalPointY); camera2.setIntrinsicParameters(intrinsic2); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 567 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 592 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 841 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 867 |
a.setElementAt(counter, 3, h31 * h32); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0)); a.setElementAt(counter, 0, a.getElementAt(counter, 0) / rowNorm); a.setElementAt(counter, 1, a.getElementAt(counter, 1) / rowNorm); a.setElementAt(counter, 2, a.getElementAt(counter, 2) / rowNorm); a.setElementAt(counter, 3, a.getElementAt(counter, 3) / rowNorm); counter++; |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/WeightedDualAbsoluteQuadricEstimator.java | 581 |
com/irurueta/ar/calibration/estimators/WeightedDualAbsoluteQuadricEstimator.java | 684 |
fill3rdRowAnd1stRowEquation(p11, p31, p12, p32, p13, p33, p14, p34, a, eqCounter); applyWeight(row, eqCounter, weight); eqCounter++; // 3rd row fill3rdRowAnd2ndRowEquation(p21, p31, p22, p32, p23, p33, p24, p34, a, eqCounter); applyWeight(row, eqCounter, weight); // transRow = row' row.transpose(transRow); transRow.multiply(row, tmp); tmp.multiplyByScalar(1.0 / previousNorm); // a += 1.0 / previousNorm * tmp a.add(tmp); // normalize previousNorm = Utils.normF(a); a.multiplyByScalar(1.0 / previousNorm); } cameraCounter++; } final var decomposer = new SingularValueDecomposer(a); enforceRank3IfNeeded(decomposer, result); } catch (final AlgebraException | SortingException | NumericalException e) { throw new DualAbsoluteQuadricEstimatorException(e); } } /** * Estimates Dual Absolute Quadric (DAQ) assuming that principal point is * zero. * * @param result instance where resulting estimated Dual Absolute Quadrics * will be stored. * @throws DualAbsoluteQuadricEstimatorException if an error occurs during * estimation, usually because repeated cameras are * provided, or cameras corresponding to critical motion * sequences such as pure parallel translations are * provided, where no additional data is really provided. */ private void estimatePrincipalPointAtOrigin(DualAbsoluteQuadric result) |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseAbsoluteOrientationSlamSparseReconstructor.java | 221 |
com/irurueta/ar/sfm/KnownBaselineSparseReconstructor.java | 124 |
final var euclideanReconstructedPoints3D = scaleAndOrientationTransformation.transformPointsAndReturnNew( metricReconstructedPoints3D); // set scaled points into result activeEuclideanReconstructedPoints = new ArrayList<>(); ReconstructedPoint3D euclideanPoint; ReconstructedPoint3D metricPoint; for (var i = 0; i < numPoints; i++) { metricPoint = activeMetricReconstructedPoints.get(i); euclideanPoint = new ReconstructedPoint3D(); euclideanPoint.setId(metricPoint.getId()); euclideanPoint.setPoint(euclideanReconstructedPoints3D.get(i)); euclideanPoint.setInlier(metricPoint.isInlier()); euclideanPoint.setQualityScore(metricPoint.getQualityScore()); if (metricPoint.getCovariance() != null) { euclideanPoint.setCovariance(metricPoint.getCovariance().multiplyByScalarAndReturnNew(sqrScale)); } euclideanPoint.setColorData(metricPoint.getColorData()); activeEuclideanReconstructedPoints.add(euclideanPoint); } return true; } catch (final Exception e) { failed = true; //noinspection unchecked listener.onFail((R) this); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 247 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 685 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); a.setElementAt(counter, 1, h11 * h22 + h21 * h12); a.setElementAt(counter, 2, h21 * h22); a.setElementAt(counter, 3, h11 * h32 + h31 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 541 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 389 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); a.setElementAt(counter, 1, h11 * h22 + h21 * h12); a.setElementAt(counter, 2, h21 * h22); a.setElementAt(counter, 3, h31 * h32); |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/AffineFundamentalMatrixEstimator.java | 84 |
com/irurueta/ar/epipolar/estimators/SevenPointsFundamentalMatrixEstimator.java | 104 |
public AffineFundamentalMatrixEstimator(final List<Point2D> leftPoints, final List<Point2D> rightPoints) { super(leftPoints, rightPoints); allowLMSESolution = DEFAULT_ALLOW_LMSE_SOLUTION; normalizePoints = DEFAULT_NORMALIZE_POINT_CORRESPONDENCES; } /** * Returns boolean indicating whether an LMSE (the Least Mean Square Error) * solution is allowed or not. When an LMSE solution is allowed, more than 8 * matched points can be used for fundamental matrix estimation. If LMSE * solution is not allowed then only the 4 former matched points will be * taken into account. * * @return true if an LMSE solution is allowed, false otherwise. */ public boolean isLMSESolutionAllowed() { return allowLMSESolution; } /** * Sets boolean indicating whether an LMSE (the Least Mean Square Error) * solution is allowed or not. When an LMSE solution is allowed, more than 8 * matched points can be used for fundamental matrix estimation. If LMSE * solution is not allowed then only the 4 former matched points will be * taken into account. * * @param allowed true if an LMSE solution is allowed, false otherwise. * @throws LockedException if this instance is locked because an estimation * is in progress. */ public void setLMSESolutionAllowed(final boolean allowed) throws LockedException { if (isLocked()) { throw new LockedException(); } allowLMSESolution = allowed; } /** * Indicates whether provided matched 2D points must be normalized to * increase the accuracy of the estimation. * * @return true if points must be normalized, false otherwise. */ public boolean arePointsNormalized() { return normalizePoints; } /** * Sets boolean indicating whether provided matched 2D points must be * normalized to increase the accuracy of the estimation. * * @param normalizePoints true if points must be normalized, false * otherwise. * @throws LockedException if this instance is locked because an estimation * is in progress. */ public void setPointsNormalized(final boolean normalizePoints) throws LockedException { if (isLocked()) { throw new LockedException(); } this.normalizePoints = normalizePoints; } /** * Returns boolean indicating whether estimator is ready to start the * fundamental matrix estimation. * This is true when the required minimum number of matched points is * provided to obtain a solution and both left and right views have the * same number of matched points. * * @return true if estimator is ready to start the fundamental matrix * estimation, false otherwise. */ @Override public boolean isReady() { return leftPoints != null && rightPoints != null && leftPoints.size() == rightPoints.size() && leftPoints.size() >= MIN_REQUIRED_POINTS; } |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseAbsoluteOrientationSlamSparseReconstructor.java | 196 |
com/irurueta/ar/sfm/KnownBaselineSparseReconstructor.java | 99 |
previousEuclideanEstimatedCamera = new EstimatedCamera(); previousEuclideanEstimatedCamera.setCamera(euclideanCamera1); previousEuclideanEstimatedCamera.setViewId(previousMetricEstimatedCamera.getViewId()); previousEuclideanEstimatedCamera.setQualityScore(previousMetricEstimatedCamera.getQualityScore()); if (previousMetricEstimatedCamera.getCovariance() != null) { previousEuclideanEstimatedCamera.setCovariance(previousMetricEstimatedCamera.getCovariance() .multiplyByScalarAndReturnNew(sqrScale)); } currentEuclideanEstimatedCamera = new EstimatedCamera(); currentEuclideanEstimatedCamera.setCamera(euclideanCamera2); currentEuclideanEstimatedCamera.setViewId(currentMetricEstimatedCamera.getViewId()); currentEuclideanEstimatedCamera.setQualityScore(currentMetricEstimatedCamera.getQualityScore()); if (currentMetricEstimatedCamera.getCovariance() != null) { currentEuclideanEstimatedCamera.setCovariance(currentMetricEstimatedCamera.getCovariance() .multiplyByScalarAndReturnNew(sqrScale)); } // update scale of reconstructed points final var numPoints = activeMetricReconstructedPoints.size(); final var metricReconstructedPoints3D = new ArrayList<Point3D>(); for (final var reconstructedPoint : activeMetricReconstructedPoints) { |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSlamSparseReconstructor.java | 284 |
com/irurueta/ar/sfm/KnownBaselineSparseReconstructor.java | 99 |
previousEuclideanEstimatedCamera = new EstimatedCamera(); previousEuclideanEstimatedCamera.setCamera(euclideanCamera1); previousEuclideanEstimatedCamera.setViewId(previousMetricEstimatedCamera.getViewId()); previousEuclideanEstimatedCamera.setQualityScore(previousMetricEstimatedCamera.getQualityScore()); if (previousMetricEstimatedCamera.getCovariance() != null) { previousEuclideanEstimatedCamera.setCovariance( previousMetricEstimatedCamera.getCovariance().multiplyByScalarAndReturnNew(sqrScale)); } currentEuclideanEstimatedCamera = new EstimatedCamera(); currentEuclideanEstimatedCamera.setCamera(euclideanCamera2); currentEuclideanEstimatedCamera.setViewId(currentMetricEstimatedCamera.getViewId()); currentEuclideanEstimatedCamera.setQualityScore(currentMetricEstimatedCamera.getQualityScore()); if (currentMetricEstimatedCamera.getCovariance() != null) { currentEuclideanEstimatedCamera.setCovariance( currentMetricEstimatedCamera.getCovariance().multiplyByScalarAndReturnNew(sqrScale)); } // update scale of reconstructed points final var numPoints = activeMetricReconstructedPoints.size(); final var metricReconstructedPoints3D = new ArrayList<Point3D>(); for (final var reconstructedPoint : activeMetricReconstructedPoints) { |
File | Line |
---|---|
com/irurueta/ar/calibration/AlternatingCameraCalibrator.java | 692 |
com/irurueta/ar/calibration/ErrorOptimizationCameraCalibrator.java | 732 |
} } /** * Refreshes listener of distortion estimator */ protected void refreshDistortionEstimatorListener() { if (distortionEstimatorListener == null) { distortionEstimatorListener = new RadialDistortionRobustEstimatorListener() { @Override public void onEstimateStart(final RadialDistortionRobustEstimator estimator) { radialDistortionProgress = 0.0f; notifyProgress(); } @Override public void onEstimateEnd(final RadialDistortionRobustEstimator estimator) { radialDistortionProgress = 1.0f; notifyProgress(); } @Override public void onEstimateNextIteration( final RadialDistortionRobustEstimator estimator, final int iteration) { // not needed } @Override public void onEstimateProgressChange( final RadialDistortionRobustEstimator estimator, final float progress) { radialDistortionProgress = progress; notifyProgress(); } }; } try { distortionEstimator.setListener(distortionEstimatorListener); } catch (final LockedException e) { Logger.getLogger(AlternatingCameraCalibrator.class.getName()).log(Level.WARNING, "Could not set radial distortion estimator listener", e); } } /** * Sets robust estimator method to be used for radial distortion estimation. * If method changes, then a new radial distortion estimator is created and * configured. * * @param distortionMethod robust estimator method to be used for * radial distortion estimation. */ private void internalSetDistortionMethod(final RobustEstimatorMethod distortionMethod) { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/MSACRadialDistortionRobustEstimator.java | 147 |
com/irurueta/ar/calibration/estimators/RANSACRadialDistortionRobustEstimator.java | 147 |
public MSACRadialDistortionRobustEstimator( final List<Point2D> distortedPoints, final List<Point2D> undistortedPoints, final Point2D distortionCenter, final RadialDistortionRobustEstimatorListener listener) { super(distortedPoints, undistortedPoints, distortionCenter, listener); threshold = DEFAULT_THRESHOLD; } /** * Returns threshold to determine whether points are inliers or not when * testing possible estimation solutions. * The threshold refers to the amount of error (i.e. Euclidean distance) a * possible solution has on projected 2D points. * * @return threshold to determine whether points are inliers or not when * testing possible estimation solutions. */ public double getThreshold() { return threshold; } /** * Sets threshold to determine whether points are inliers or not when * testing possible estimation solutions. * The threshold refers to the amount of error (i.e. Euclidean distance) a * possible solution has on projected 2D points. * * @param threshold threshold to be set. * @throws IllegalArgumentException if provided value is equal or less than * zero. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. */ public void setThreshold(final double threshold) throws LockedException { if (isLocked()) { throw new LockedException(); } if (threshold <= MIN_THRESHOLD) { throw new IllegalArgumentException(); } this.threshold = threshold; } /** * Estimates a radial distortion using a robust estimator and * the best set of matched 2D points found using the robust estimator. * * @return a radial distortion. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws NotReadyException if provided input data is not enough to start * the estimation. * @throws RobustEstimatorException if estimation fails for any reason * (i.e. numerical instability, no solution available, etc). */ @SuppressWarnings("DuplicatedCode") @Override public RadialDistortion estimate() throws LockedException, NotReadyException, RobustEstimatorException { if (isLocked()) { throw new LockedException(); } if (!isReady()) { throw new NotReadyException(); } final var innerEstimator = new MSACRobustEstimator<RadialDistortion>(new MSACRobustEstimatorListener<>() { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/KruppaDualImageOfAbsoluteConicEstimator.java | 610 |
com/irurueta/ar/calibration/estimators/KruppaDualImageOfAbsoluteConicEstimator.java | 941 |
if (x >= 0.0 && y >= 0.0) { final var horizontalFocalLength = Math.sqrt(x); final var verticalFocalLength = Math.sqrt(y); valid = buildDiac(horizontalFocalLength, verticalFocalLength, diac); } if (valid) { break; } } } else { throw new KruppaDualImageOfAbsoluteConicEstimatorException(); } if (valid) { // copy to result result.setParameters(diac.getA(), diac.getB(), diac.getC(), diac.getD(), diac.getE(), diac.getF()); } else { // no valid DIAC could be found throw new KruppaDualImageOfAbsoluteConicEstimatorException(); } } catch (final KruppaDualImageOfAbsoluteConicEstimatorException ex) { throw ex; } catch (final Exception ex) { throw new KruppaDualImageOfAbsoluteConicEstimatorException(ex); } } /** * Gets x value from current y value. * X and y values are the squared values of estimated focal length * components. * This method is used internally when aspect ratio is not known. * * @param y y value to obtain x value from. * @param s internal value from Kruppa's equations. * @param t internal value from Kruppa's equations. * @param u internal value from Kruppa's equations. * @param v internal value from Kruppa's equations. * @param w internal value from Kruppa's equations. * @return x value. */ private double getXFromY(final double y, final double s, final double t, final double u, final double v, |
File | Line |
---|---|
com/irurueta/ar/calibration/AlternatingCameraCalibrator.java | 530 |
com/irurueta/ar/calibration/ErrorOptimizationCameraCalibrator.java | 550 |
double[] qualityScores = null; if (distortionMethod == RobustEstimatorMethod.PROSAC || distortionMethod == RobustEstimatorMethod.PROMEDS) { qualityScores = new double[totalPoints]; } // estimate camera pose for each sample var pointCounter = 0; var sampleCounter = 0; for (final var sample : samples) { if (sample.getHomography() == null) { // homography computation failed, so we cannot compute camera // pose for this sample, or use this sample for radial distortion // estimation continue; } sample.computeCameraPose(intrinsic); // transform ideal pattern markers using estimated homography final List<Point2D> idealPatternMarkers; if (sample.getPattern() != null) { // use points generated by pattern in sample idealPatternMarkers = sample.getPattern().getIdealPoints(); } else { // use fallback pattern points idealPatternMarkers = idealFallbackPatternMarkers; } final var transformedIdealPatternMarkers = sample.getHomography() .transformPointsAndReturnNew(idealPatternMarkers); // transformedIdealPatternMarkers are considered the undistorted // points, because camera follows a pure pinhole model without // distortion, and we have transformed the ideal points using a // pure projective homography without distortion. // sample.getSampledMarkers() contains the sampled coordinates using // the actual camera, which will be distorted // the sampled markers are the ones considered to be distorted for // radial distortion estimation purposes, because they are obtained // directly from the camera // stack together all distorted and undistorted points from all // samples distortedPoints.addAll(sample.getSampledMarkers()); undistortedPoints.addAll(transformedIdealPatternMarkers); final var markersSize = transformedIdealPatternMarkers.size(); |
File | Line |
---|---|
com/irurueta/ar/slam/StatePredictor.java | 183 |
com/irurueta/ar/slam/StatePredictor.java | 762 |
final var uwz = u[8]; try { // update velocity final var vv = new Matrix(SPEED_COMPONENTS, SPEED_COMPONENTS); final var va = new Matrix(SPEED_COMPONENTS, ACCELERATION_COMPONENTS); final var v = VelocityPredictor.predict(vx, vy, vz, ax, ay, az, dt, vv, va); // update position Matrix rr = null; Matrix rv = null; Matrix ra = null; if (jacobianX != null) { rr = new Matrix(Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH, Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH); rv = new Matrix(Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH, SPEED_COMPONENTS); ra = new Matrix(Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH, ACCELERATION_COMPONENTS); } PositionPredictor.predict(r, vx, vy, vz, ax, ay, az, dt, r, rr, rv, ra); // update orientation Matrix qq = null; Matrix qw = null; |
File | Line |
---|---|
com/irurueta/ar/slam/StatePredictor.java | 427 |
com/irurueta/ar/slam/StatePredictor.java | 1018 |
throw new IllegalArgumentException(); } // position final var r = new InhomogeneousPoint3D(x[0], x[1], x[2]); // orientation var q = new Quaternion(x[3], x[4], x[5], x[6]); // linear velocity var vx = x[7]; var vy = x[8]; var vz = x[9]; // linear acceleration var ax = x[10]; var ay = x[11]; var az = x[12]; // angular velocity var wx = x[13]; var wy = x[14]; var wz = x[15]; // position change (control) final var drx = u[0]; final var dry = u[1]; final var drz = u[2]; // linear velocity change (control) final var uvx = u[3]; |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 757 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 637 |
if (samples.length != MIN_NUMBER_OF_VIEWS) { return false; } leftSamples.add(samples[0]); rightSamples.add(samples[1]); final var leftPoint = Point2D.create(); leftPoint.setInhomogeneousCoordinates( samples[0].getPoint().getInhomX() - principalPointX, samples[0].getPoint().getInhomY() - principalPointY); leftPoints.add(leftPoint); final var rightPoint = Point2D.create(); rightPoint.setInhomogeneousCoordinates( samples[1].getPoint().getInhomX() - principalPointX, samples[1].getPoint().getInhomY() - principalPointY); rightPoints.add(rightPoint); qualityScores[i] = match.getQualityScore(); i++; } try { final var estimator = FundamentalMatrixRobustEstimator.create(leftPoints, rightPoints, qualityScores, |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 896 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 500 |
if (samples.length != MIN_NUMBER_OF_VIEWS) { return false; } leftSamples.add(samples[0]); rightSamples.add(samples[1]); final var leftPoint = Point2D.create(); leftPoint.setInhomogeneousCoordinates( samples[0].getPoint().getInhomX() - principalPointX, samples[0].getPoint().getInhomY() - principalPointY); leftPoints.add(leftPoint); final var rightPoint = Point2D.create(); rightPoint.setInhomogeneousCoordinates( samples[1].getPoint().getInhomX() - principalPointX, samples[1].getPoint().getInhomY() - principalPointY); rightPoints.add(rightPoint); qualityScores[i] = match.getQualityScore(); i++; } try { final var homographyEstimator = PointCorrespondenceProjectiveTransformation2DRobustEstimator.create( |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1266 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 974 |
final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples(); final var points1 = new ArrayList<Point2D>(); final var points2 = new ArrayList<Point2D>(); final var length = samples1.size(); for (var i = 0; i < length; i++) { final var sample1 = samples1.get(i); final var sample2 = samples2.get(i); final var point1 = sample1.getPoint(); final var point2 = sample2.getPoint(); points1.add(point1); points2.add(point2); } try { final var estimator = new DualImageOfAbsoluteConicInitialCamerasEstimator(fundamentalMatrix, points1, points2); estimator.setPrincipalPoint(configuration.getPrincipalPointX(), configuration.getPrincipalPointY()); estimator.setAspectRatio(configuration.getPairedCamerasAspectRatio()); |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructorConfiguration.java | 435 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructorConfiguration.java | 225 |
private FundamentalMatrixEstimatorMethod mNonRobustFundamentalMatrixEstimatorMethod = DEFAULT_NON_ROBUST_FUNDAMENTAL_MATRIX_ESTIMATOR_METHOD; /** * Method to use for robust fundamental matrix estimation. * This is only used when general scenes are allowed. */ private RobustEstimatorMethod mRobustFundamentalMatrixEstimatorMethod = DEFAULT_ROBUST_FUNDAMENTAL_MATRIX_ESTIMATOR_METHOD; /** * Indicates whether estimated fundamental matrix is refined among all found inliers. * This is only used when general scenes are allowed. */ private boolean refineFundamentalMatrix = DEFAULT_REFINE_FUNDAMENTAL_MATRIX; /** * Indicates whether covariance of estimated fundamental matrix is kept after the * estimation. * This is only used when general scenes are allowed. */ private boolean keepFundamentalMatrixCovariance = DEFAULT_KEEP_FUNDAMENTAL_MATRIX_COVARIANCE; /** * Confidence of robustly estimated fundamental matrix. * This is only used when general scenes are allowed. */ private double fundamentalMatrixConfidence = DEFAULT_FUNDAMENTAL_MATRIX_CONFIDENCE; /** * Maximum number of iterations to robustly estimate fundamental matrix. * This is only used when general scenes are allowed. */ private int fundamentalMatrixMaxIterations = DEFAULT_FUNDAMENTAL_MATRIX_MAX_ITERATIONS; /** * Threshold to determine whether samples for robust fundamental matrix estimation are * inliers or not. * This is only used when general scenes are allowed. */ private double fundamentalMatrixThreshold = DEFAULT_FUNDAMENTAL_MATRIX_THRESHOLD; /** * Indicates whether inliers must be kept during robust fundamental matrix estimation. * This is only used when general scenes are allowed. */ private boolean fundamentalMatrixComputeAndKeepInliers = DEFAULT_FUNDAMENTAL_MATRIX_COMPUTE_AND_KEEP_INLIERS; /** * Indicates whether residuals must be computed and kept during robust fundamental matrix * estimation. * This is only used when general scenes are allowed. */ private boolean fundamentalMatrixComputeAndKeepResiduals = DEFAULT_FUNDAMENTAL_MATRIX_COMPUTE_AND_KEEP_RESIDUALS; /** * Method to use for initial cameras' estimation. */ private InitialCamerasEstimatorMethod initialCamerasEstimatorMethod = DEFAULT_INITIAL_CAMERAS_ESTIMATOR_METHOD; /** * Indicates whether an homogeneous point triangulator is used for point triangulation * when Dual Absolute Quadric (DAQ) camera initialization is used. */ private boolean daqUseHomogeneousPointTriangulator = DEFAULT_DAQ_USE_HOMOGENEOUS_POINT_TRIANGULATOR; /** * Aspect ratio for initial cameras. */ private double initialCamerasAspectRatio = DEFAULT_INITIAL_CAMERAS_ASPECT_RATIO; /** * Horizontal principal point value to use for initial cameras estimation using * Dual Image of Absolute Conic (DIAC) or Dual Absolute Quadric (DAQ) methods. */ private double principalPointX = DEFAULT_INITIAL_CAMERAS_PRINCIPAL_POINT_X; /** * Vertical principal point value to use for initial cameras estimation using * Dual Image of Absolute Conic (DIAC) or Dual Absolute Quadric (DAC) methods. */ private double principalPointY = DEFAULT_INITIAL_CAMERAS_PRINCIPAL_POINT_Y; /** * Corrector type to use for point triangulation when initial cameras are being estimated * using either Dual Image of Absolute Conic (DIAC) or essential matrix methods or null * if no corrector is used. */ private CorrectorType initialCamerasCorrectorType = DEFAULT_INITIAL_CAMERAS_CORRECTOR_TYPE; /** * Value indicating whether valid triangulated points are marked during initial * cameras estimation using either Dual Image of Absolute Conic (DIAC) or essential * matrix methods. */ private boolean initialCamerasMarkValidTriangulatedPoints = DEFAULT_INITIAL_CAMERAS_MARK_VALID_TRIANGULATED_POINTS; /** * Intrinsic parameters of first camera estimated using the essential matrix * method. */ private PinholeCameraIntrinsicParameters initialIntrinsic1; /** * Intrinsic parameters of second camera estimated using the essential matrix * method. */ private PinholeCameraIntrinsicParameters initialIntrinsic2; /** * Indicates whether a general scene (points laying in a general 3D position) is * allowed. * When true, an initial geometry estimation is attempted for general points. */ private boolean allowGeneralScene = DEFAULT_ALLOW_GENERAL_SCENE; /** * Indicates whether a planar scene (points laying in a 3D plane) is allowed. * When true, an initial geometry estimation is attempted for planar points. */ private boolean allowPlanarScene = DEFAULT_ALLOW_PLANAR_SCENE; /** * Robust method to use for planar homography estimation. * This is only used when planar scenes are allowed. */ private RobustEstimatorMethod robustPlanarHomographyEstimatorMethod = DEFAULT_ROBUST_PLANAR_HOMOGRAPHY_ESTIMATOR_METHOD; /** * Indicates whether planar homography is refined using all found inliers or not. * This is only used when planar scenes are allowed. */ private boolean refinePlanarHomography = DEFAULT_REFINE_PLANAR_HOMOGRAPHY; /** * Indicates whether planar homography covariance is kept after estimation. * This is only used when planar scenes are allowed. */ private boolean keepPlanarHomographyCovariance = DEFAULT_KEEP_PLANAR_HOMOGRAPHY_COVARIANCE; /** * Confidence of robustly estimated planar homography. By default, this is 99%. * This is only used when planar scenes are allowed. */ private double planarHomographyConfidence = DEFAULT_PLANAR_HOMOGRAPHY_CONFIDENCE; /** * Maximum number of iterations to make while robustly estimating planar homography. * By default, this is 5000. * This is only used when planar scenes are allowed. */ private int planarHomographyMaxIterations = DEFAULT_PLANAR_HOMOGRAPHY_MAX_ITERATIONS; /** * Threshold to determine whether samples for robust projective 2D transformation estimation * are inliers or not. */ private double planarHomographyThreshold = DEFAULT_PLANAR_HOMOGRAPHY_THRESHOLD; /** * Value indicating that inlier data is kept after robust planar homography estimation. * This is only used when planar scenes are allowed. */ private boolean planarHomographyComputeAndKeepInliers = DEFAULT_PLANAR_HOMOGRAPHY_COMPUTE_AND_KEEP_INLIERS; /** * Value indicating that residual data is kept after robust planar homography estimation. * This is only used when planar scenes are allowed. */ private boolean planarHomographyComputeAndKeepResiduals = DEFAULT_PLANAR_HOMOGRAPHY_COMPUTE_AND_KEEP_RESIDUALS; |
File | Line |
---|---|
com/irurueta/ar/sfm/DualImageOfAbsoluteConicInitialCamerasEstimator.java | 246 |
com/irurueta/ar/sfm/EssentialMatrixInitialCamerasEstimator.java | 401 |
@Override public void estimate() throws LockedException, NotReadyException, InitialCamerasEstimationFailedException { if (isLocked()) { throw new LockedException(); } if (!isReady()) { throw new NotReadyException(); } try { locked = true; if (listener != null) { listener.onStart(this); } if (triangulatePoints) { triangulatedPoints = new ArrayList<>(); } else { triangulatedPoints = null; } final var nPoints = leftPoints.size(); if (markValidTriangulatedPoints) { validTriangulatedPoints = new BitSet(nPoints); } else { validTriangulatedPoints = null; } if (estimatedLeftCamera == null) { estimatedLeftCamera = new PinholeCamera(); } if (estimatedRightCamera == null) { estimatedRightCamera = new PinholeCamera(); } |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationConstantVelocityModelSlamEstimator.java | 401 |
com/irurueta/ar/slam/ConstantVelocityModelSlamEstimator.java | 369 |
lastAngularSpeedX = stateAngularSpeedX; lastAngularSpeedY = stateAngularSpeedY; lastAngularSpeedZ = stateAngularSpeedZ; lastTimestampNanos = timestamp; if (listener != null) { listener.onFullSampleProcessed(this); } } /** * Resets position, linear velocity, linear acceleration, orientation and * angular speed to provided values. * This method implementation also resets Kalman filter state. * * @param statePositionX position along x-axis expressed in meters (m). * @param statePositionY position along y-axis expressed in meters (m). * @param statePositionZ position along z-axis expressed in meters (m). * @param stateVelocityX linear velocity along x-axis expressed in meters * per second (m/s). * @param stateVelocityY linear velocity along y-axis expressed in meters * per second (m/s). * @param stateVelocityZ linear velocity along z-axis expressed in meters * per second (m/s). * @param stateAccelerationX linear acceleration along x-axis expressed in * meters per squared second (m/s^2). * @param stateAccelerationY linear acceleration along y-axis expressed in * meters per squared second (m/s^2). * @param stateAccelerationZ linear acceleration along z-axis expressed in * meters per squared second (m/s^2). * @param stateQuaternionA A value of orientation quaternion. * @param stateQuaternionB B value of orientation quaternion. * @param stateQuaternionC C value of orientation quaternion. * @param stateQuaternionD D value of orientation quaternion. * @param stateAngularSpeedX angular speed along x-axis expressed in radians * per second (rad/s). * @param stateAngularSpeedY angular speed along y-axis expressed in radians * per second (rad/s). * @param stateAngularSpeedZ angular speed along z-axis expressed in radians * per second (rad/s). */ @Override protected void reset( final double statePositionX, final double statePositionY, final double statePositionZ, final double stateVelocityX, final double stateVelocityY, final double stateVelocityZ, final double stateAccelerationX, final double stateAccelerationY, final double stateAccelerationZ, final double stateQuaternionA, final double stateQuaternionB, final double stateQuaternionC, final double stateQuaternionD, final double stateAngularSpeedX, final double stateAngularSpeedY, final double stateAngularSpeedZ) { super.reset(statePositionX, statePositionY, statePositionZ, stateVelocityX, stateVelocityY, stateVelocityZ, stateAccelerationX, stateAccelerationY, stateAccelerationZ, stateQuaternionA, stateQuaternionB, stateQuaternionC, stateQuaternionD, stateAngularSpeedX, stateAngularSpeedY, stateAngularSpeedZ); //noinspection ConstantValue if (stateOrientation != null) { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 323 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 464 |
counter++; } final var decomposer = new SingularValueDecomposer(a); decomposer.decompose(); if (decomposer.getNullity() > 1) { // homographies constitute a degenerate camera movement. // A linear combination of possible IAC's exist (i.e. solution is // not unique up to scale) throw new ImageOfAbsoluteConicEstimatorException(); } final var v = decomposer.getV(); // use last column of V as IAC vector // the last column of V contains IAC matrix (B), which is symmetric // and positive definite, ordered as follows: B11, B12, B22, B13, // B23, B33 final var b11 = v.getElementAt(0, 5); final var b12 = v.getElementAt(1, 5); final var b22 = v.getElementAt(2, 5); final var b13 = v.getElementAt(3, 5); final var b23 = v.getElementAt(4, 5); final var b33 = v.getElementAt(5, 5); // A conic is defined as [A B D] // [B C E] // [D E F] return new ImageOfAbsoluteConic(b11, b12, b22, b13, b23, b33); } catch (final AlgebraException e) { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 715 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 861 |
a.setElementAt(counter, 2, a.getElementAt(counter, 2) / rowNorm); counter++; // fill second equation a.setElementAt(counter, 0, Math.pow(h11, 2.0) - Math.pow(h12, 2.0)); a.setElementAt(counter, 1, Math.pow(h21, 2.0) - Math.pow(h22, 2.0)); a.setElementAt(counter, 2, Math.pow(h31, 2.0) - Math.pow(h32, 2.0)); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0)); |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/HomographyDecomposer.java | 116 |
com/irurueta/ar/epipolar/estimators/PlanarFundamentalMatrixEstimator.java | 104 |
final HomographyDecomposerListener listener) { this(homography, leftIntrinsics, rightIntrinsics); this.listener = listener; } /** * Gets 2D transformation relating two views (left view to right view). * * @return 2D transformation relating two views. */ public Transformation2D getHomography() { return homography; } /** * Sets 2D transformation relating two views (left view to right view). * * @param homography 2D transformation relating two views. * @throws LockedException if estimator is locked. */ public void setHomography(final Transformation2D homography) throws LockedException { if (isLocked()) { throw new LockedException(); } this.homography = homography; } /** * Gets intrinsic parameters to be used on left view. * * @return intrinsic parameters to be used on left view. */ public PinholeCameraIntrinsicParameters getLeftIntrinsics() { return leftIntrinsics; } /** * Sets intrinsic parameters to be used on left view. * * @param leftIntrinsics intrinsic parameters to be used on left view. * @throws LockedException if estimator is locked. */ public void setLeftIntrinsics(final PinholeCameraIntrinsicParameters leftIntrinsics) throws LockedException { if (isLocked()) { throw new LockedException(); } this.leftIntrinsics = leftIntrinsics; } /** * Gets intrinsic parameters to be used on right view. * * @return intrinsic parameters to be used on right view. */ public PinholeCameraIntrinsicParameters getRightIntrinsics() { return rightIntrinsics; } /** * Sets intrinsic parameters to be used on right view. * * @param rightIntrinsics intrinsic parameters to be used on right view. * @throws LockedException if estimator is locked. */ public void setRightIntrinsics(final PinholeCameraIntrinsicParameters rightIntrinsics) throws LockedException { if (isLocked()) { throw new LockedException(); } this.rightIntrinsics = rightIntrinsics; } /** * Gets listener to handle events raised by this instance. * * @return listener to handle events raised by this instance. */ public HomographyDecomposerListener getListener() { |
File | Line |
---|---|
com/irurueta/ar/sfm/LMSEInhomogeneousSinglePoint3DTriangulator.java | 194 |
com/irurueta/ar/sfm/WeightedHomogeneousSinglePoint3DTriangulator.java | 343 |
com/irurueta/ar/sfm/WeightedInhomogeneousSinglePoint3DTriangulator.java | 347 |
camera = cameras.get(i); // to increase accuracy point.normalize(); camera.normalize(); final var homX = point.getHomX(); final var homY = point.getHomY(); final var homW = point.getHomW(); // pick rows of camera corresponding to different planes // (we do not normalize planes, as it would introduce errors) // 1st camera row (p1T) camera.verticalAxisPlane(verticalAxisPlane); // 2nd camera row (p2T) camera.horizontalAxisPlane(horizontalAxisPlane); // 3rd camera row (p3T) camera.principalPlane(principalPlane); // 1st equation a.setElementAt(row, 0, homX * principalPlane.getA() - homW * verticalAxisPlane.getA()); a.setElementAt(row, 1, homX * principalPlane.getB() - homW * verticalAxisPlane.getB()); a.setElementAt(row, 2, homX * principalPlane.getC() - homW * verticalAxisPlane.getC()); |
File | Line |
---|---|
com/irurueta/ar/sfm/MSACRobustSinglePoint3DTriangulator.java | 110 |
com/irurueta/ar/sfm/RANSACRobustSinglePoint3DTriangulator.java | 111 |
public MSACRobustSinglePoint3DTriangulator( final List<Point2D> points, final List<PinholeCamera> cameras, final RobustSinglePoint3DTriangulatorListener listener) { super(points, cameras, listener); threshold = DEFAULT_THRESHOLD; } /** * Returns threshold to determine whether points are inliers or not when * testing possible estimation solutions. * The threshold refers to the amount of error (i.e. Euclidean distance) a * possible solution has on projected 2D points. * * @return threshold to determine whether points are inliers or not when * testing possible estimation solutions. */ public double getThreshold() { return threshold; } /** * Sets threshold to determine whether points are inliers or not when * testing possible estimation solutions. * The threshold refers to the amount of error (i.e. Euclidean distance) a * possible solution has on projected 2D points. * * @param threshold threshold to be set. * @throws IllegalArgumentException if provided value is equal or less than * zero. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. */ public void setThreshold(final double threshold) throws LockedException { if (isLocked()) { throw new LockedException(); } if (threshold <= MIN_THRESHOLD) { throw new IllegalArgumentException(); } this.threshold = threshold; } /** * Triangulates provided matched 2D points being projected by each * corresponding camera into a single 3D point. * At least 2 matched 2D points and their corresponding 2 cameras are * required to compute triangulation. If more views are provided, an * averaged solution can be found. * * @return computed triangulated 3D point. * @throws LockedException if this instance is locked. * @throws NotReadyException if lists of points and cameras don't have the * same length or less than 2 views are provided. * @throws RobustEstimatorException if estimation fails for any reason * (i.e. numerical instability, no solution available, etc). */ @SuppressWarnings("DuplicatedCode") @Override public Point3D triangulate() throws LockedException, NotReadyException, RobustEstimatorException { if (isLocked()) { throw new LockedException(); } if (!isReady()) { throw new NotReadyException(); } final var innerEstimator = new MSACRobustEstimator<Point3D>(new MSACRobustEstimatorListener<>() { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEDualAbsoluteQuadricEstimator.java | 151 |
com/irurueta/ar/calibration/estimators/WeightedDualAbsoluteQuadricEstimator.java | 318 |
} /** * Estimates the Dual Absolute Quadric using provided cameras. * * @param result instance where estimated Dual Absolute Quadric (DAQ) will * be stored. * @throws LockedException if estimator is locked. * @throws NotReadyException if no valid input data has already been * provided. * @throws DualAbsoluteQuadricEstimatorException if an error occurs during * estimation, usually because input data is not valid or * numerically unstable. */ @Override public void estimate(final DualAbsoluteQuadric result) throws LockedException, NotReadyException, DualAbsoluteQuadricEstimatorException { if (isLocked()) { throw new LockedException(); } if (!isReady()) { throw new NotReadyException(); } try { locked = true; if (listener != null) { listener.onEstimateStart(this); } if (principalPointAtOrigin) { if (zeroSkewness) { if (focalDistanceAspectRatioKnown) { estimateZeroSkewnessPrincipalPointAtOriginAndKnownFocalDistanceAspectRatio(result); } else { estimateZeroSkewnessAndPrincipalPointAtOrigin(result); } } else { estimatePrincipalPointAtOrigin(result); } } if (listener != null) { listener.onEstimateEnd(this); } } finally { locked = false; } } /** * Returns type of Dual Absolute Quadric estimator. * * @return type of DAQ estimator. */ @Override public DualAbsoluteQuadricEstimatorType getType() { return DualAbsoluteQuadricEstimatorType.LMSE_DUAL_ABSOLUTE_QUADRIC_ESTIMATOR; |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEDualAbsoluteQuadricEstimator.java | 265 |
com/irurueta/ar/calibration/estimators/WeightedDualAbsoluteQuadricEstimator.java | 440 |
com/irurueta/ar/calibration/estimators/WeightedDualAbsoluteQuadricEstimator.java | 553 |
com/irurueta/ar/calibration/estimators/WeightedDualAbsoluteQuadricEstimator.java | 661 |
camera.normalize(); cameraMatrix = camera.getInternalMatrix(); p11 = cameraMatrix.getElementAt(0, 0); p21 = cameraMatrix.getElementAt(1, 0); p31 = cameraMatrix.getElementAt(2, 0); p12 = cameraMatrix.getElementAt(0, 1); p22 = cameraMatrix.getElementAt(1, 1); p32 = cameraMatrix.getElementAt(2, 1); p13 = cameraMatrix.getElementAt(0, 2); p23 = cameraMatrix.getElementAt(1, 2); p33 = cameraMatrix.getElementAt(2, 2); p14 = cameraMatrix.getElementAt(0, 3); p24 = cameraMatrix.getElementAt(1, 3); p34 = cameraMatrix.getElementAt(2, 3); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEDualAbsoluteQuadricEstimator.java | 356 |
com/irurueta/ar/calibration/estimators/WeightedDualAbsoluteQuadricEstimator.java | 440 |
com/irurueta/ar/calibration/estimators/WeightedDualAbsoluteQuadricEstimator.java | 553 |
com/irurueta/ar/calibration/estimators/WeightedDualAbsoluteQuadricEstimator.java | 661 |
camera.normalize(); cameraMatrix = camera.getInternalMatrix(); p11 = cameraMatrix.getElementAt(0, 0); p21 = cameraMatrix.getElementAt(1, 0); p31 = cameraMatrix.getElementAt(2, 0); p12 = cameraMatrix.getElementAt(0, 1); p22 = cameraMatrix.getElementAt(1, 1); p32 = cameraMatrix.getElementAt(2, 1); p13 = cameraMatrix.getElementAt(0, 2); p23 = cameraMatrix.getElementAt(1, 2); p33 = cameraMatrix.getElementAt(2, 2); p14 = cameraMatrix.getElementAt(0, 3); p24 = cameraMatrix.getElementAt(1, 3); p34 = cameraMatrix.getElementAt(2, 3); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEDualAbsoluteQuadricEstimator.java | 443 |
com/irurueta/ar/calibration/estimators/WeightedDualAbsoluteQuadricEstimator.java | 440 |
com/irurueta/ar/calibration/estimators/WeightedDualAbsoluteQuadricEstimator.java | 553 |
com/irurueta/ar/calibration/estimators/WeightedDualAbsoluteQuadricEstimator.java | 661 |
camera.normalize(); cameraMatrix = camera.getInternalMatrix(); p11 = cameraMatrix.getElementAt(0, 0); p21 = cameraMatrix.getElementAt(1, 0); p31 = cameraMatrix.getElementAt(2, 0); p12 = cameraMatrix.getElementAt(0, 1); p22 = cameraMatrix.getElementAt(1, 1); p32 = cameraMatrix.getElementAt(2, 1); p13 = cameraMatrix.getElementAt(0, 2); p23 = cameraMatrix.getElementAt(1, 2); p33 = cameraMatrix.getElementAt(2, 2); p14 = cameraMatrix.getElementAt(0, 3); p24 = cameraMatrix.getElementAt(1, 3); p34 = cameraMatrix.getElementAt(2, 3); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 812 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 988 |
double rowNorm; for (final var homography : homographies) { // convert homography into projective so it can be normalized homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12 + h21 * h22 / sqrAspectRatio); a.setElementAt(counter, 1, h11 * h32 + h31 * h12); |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSlamPairedViewsSparseReconstructor.java | 206 |
com/irurueta/ar/sfm/BaseSlamSparseReconstructor.java | 159 |
} /** * Configures calibration data on SLAM estimator if available. */ protected void setUpCalibrationData() { final var calibrationData = configuration.getCalibrationData(); if (calibrationData != null) { slamEstimator.setCalibrationData(calibrationData); } } /** * Configures listener of SLAM estimator */ protected void setUpSlamEstimatorListener() { slamEstimator.setListener(new BaseSlamEstimator.BaseSlamEstimatorListener<>() { @Override public void onFullSampleReceived(final BaseSlamEstimator<D> estimator) { // not used } @Override public void onFullSampleProcessed(final BaseSlamEstimator<D> estimator) { notifySlamStateAndCamera(); } @Override public void onCorrectWithPositionMeasure(final BaseSlamEstimator<D> estimator) { // not used } @Override public void onCorrectedWithPositionMeasure(final BaseSlamEstimator<D> estimator) { notifySlamStateAndCamera(); } private void notifySlamStateAndCamera() { notifySlamStateIfNeeded(); notifySlamCameraIfNeeded(); } }); } |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationConstantVelocityModelSlamEstimator.java | 470 |
com/irurueta/ar/slam/ConstantVelocityModelSlamEstimator.java | 424 |
if (x != null) { // position x[0] = statePositionX; x[1] = statePositionY; x[2] = statePositionZ; // quaternion x[3] = stateQuaternionA; x[4] = stateQuaternionB; x[5] = stateQuaternionC; x[6] = stateQuaternionD; // velocity x[7] = stateVelocityX; x[8] = stateVelocityY; x[9] = stateVelocityZ; // angular speed x[10] = stateAngularSpeedX; x[11] = stateAngularSpeedY; x[12] = stateAngularSpeedZ; try { // set initial Kalman filter state (state pre and pro must be two // different instances!) kalmanFilter.getStatePre().fromArray(x); kalmanFilter.getStatePost().fromArray(x); } catch (final WrongSizeException ignore) { // never thrown } } error = false; lastTimestampNanos = -1; predictionAvailable = false; } /** * Updates state data of the device by using state matrix obtained after * prediction from Kalman filter. * to ensure that state follows proper values (specially on quaternions), * we keep x values, which have been predicted using the state predictor, * which uses analytical values. * We then updated x using latest Kalman filter state for next iteration * on state predictor. * * @param state state matrix obtained from Kalman filter. */ private void updatePredictedState(Matrix state) { |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1371 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1984 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 2074 |
final PinholeCameraIntrinsicParameters intrinsic1, final PinholeCameraIntrinsicParameters intrinsic2) { final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix(); // use all points used for fundamental matrix estimation final var samples1 = currentEstimatedFundamentalMatrix.getLeftSamples(); final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples(); final var points1 = new ArrayList<Point2D>(); final var points2 = new ArrayList<Point2D>(); final var length = samples1.size(); for (var i = 0; i < length; i++) { final var sample1 = samples1.get(i); final var sample2 = samples2.get(i); final var point1 = sample1.getPoint(); final var point2 = sample2.getPoint(); points1.add(point1); points2.add(point2); } try { final var estimator = new EssentialMatrixInitialCamerasEstimator(fundamentalMatrix, intrinsic1, intrinsic2, |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSlamSparseReconstructor.java | 409 |
com/irurueta/ar/sfm/BaseSlamTwoViewsSparseReconstructor.java | 313 |
intrinsicParameters = configuration.getAdditionalCamerasIntrinsics(); } if (intrinsicParameters == null) { return; } final var positionX = slamEstimator.getStatePositionX(); final var positionY = slamEstimator.getStatePositionY(); final var positionZ = slamEstimator.getStatePositionZ(); slamPosition.setInhomogeneousCoordinates(positionX, positionY, positionZ); final var quaternionA = slamEstimator.getStateQuaternionA(); final var quaternionB = slamEstimator.getStateQuaternionB(); final var quaternionC = slamEstimator.getStateQuaternionC(); final var quaternionD = slamEstimator.getStateQuaternionD(); slamRotation.setA(quaternionA); slamRotation.setB(quaternionB); slamRotation.setC(quaternionC); slamRotation.setD(quaternionD); slamCamera.setIntrinsicAndExtrinsicParameters(intrinsicParameters, slamRotation, slamPosition); //noinspection unchecked listener.onSlamCameraEstimated((R) this, slamCamera); } catch (final GeometryException ignore) { |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 969 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 1056 |
private boolean estimateInitialCamerasAndPointsDIAC() { final var fundamentalMatrix = estimatedFundamentalMatrix.getFundamentalMatrix(); // use inlier points used for fundamental matrix estimation final var samples1 = estimatedFundamentalMatrix.getLeftSamples(); final var samples2 = estimatedFundamentalMatrix.getRightSamples(); final var points1 = new ArrayList<Point2D>(); final var points2 = new ArrayList<Point2D>(); final var length = samples1.size(); for (var i = 0; i < length; i++) { final var sample1 = samples1.get(i); final var sample2 = samples2.get(i); final var point1 = sample1.getPoint(); final var point2 = sample2.getPoint(); points1.add(point1); points2.add(point2); } try { final var estimator = new DualImageOfAbsoluteConicInitialCamerasEstimator(fundamentalMatrix, points1, |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROMedSDualAbsoluteQuadricRobustEstimator.java | 189 |
com/irurueta/ar/sfm/PROMedSRobustSinglePoint3DTriangulator.java | 204 |
this(cameras, listener); internalSetQualityScores(qualityScores); } /** * Returns threshold to be used to keep the algorithm iterating in case that * best estimated threshold using median of residuals is not small enough. * Once a solution is found that generates a threshold below this value, the * algorithm will stop. * The stop threshold can be used to prevent the PROMedS algorithm iterating * too many times in cases where samples have a very similar accuracy. * For instance, in cases where proportion of outliers is very small (close * to 0%), and samples are very accurate (i.e. 1e-6), the algorithm would * iterate for a long time trying to find the best solution when indeed * there is no need to do that if a reasonable threshold has already been * reached. * Because of this behaviour the stop threshold can be set to a value much * lower than the one typically used in RANSAC, and yet the algorithm could * still produce even smaller thresholds in estimated results. * * @return stop threshold to stop the algorithm prematurely when a certain * accuracy has been reached. */ public double getStopThreshold() { return stopThreshold; } /** * Sets threshold to be used to keep the algorithm iterating in case that * best estimated threshold using median of residuals is not small enough. * Once a solution is found that generates a threshold below this value, the * algorithm will stop. * The stop threshold can be used to prevent the PROMedS algorithm iterating * too many times in cases where samples have a very similar accuracy. * For instance, in cases where proportion of outliers is very small (close * to 0%), and samples are very accurate (i.e. 1e-6), the algorithm would * iterate for a long time trying to find the best solution when indeed * there is no need to do that if a reasonable threshold has already been * reached. * Because of this behaviour the stop threshold can be set to a value much * lower than the one typically used in RANSAC, and yet the algorithm could * still produce even smaller thresholds in estimated results. * * @param stopThreshold stop threshold to stop the algorithm prematurely * when a certain accuracy has been reached. * @throws IllegalArgumentException if provided value is zero or negative. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. */ public void setStopThreshold(final double stopThreshold) throws LockedException { if (isLocked()) { throw new LockedException(); } if (stopThreshold <= MIN_STOP_THRESHOLD) { throw new IllegalArgumentException(); } this.stopThreshold = stopThreshold; } /** * Returns quality scores corresponding to each provided camera. * The larger the score value the better the quality of the sampled * camera. * * @return quality scores corresponding to each camera. */ @Override public double[] getQualityScores() { return qualityScores; } /** * Sets quality scores corresponding to each provided camera. * The larger the score value the better the quality of the sampled * camera. * * @param qualityScores quality scores corresponding to each camera. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws IllegalArgumentException if provided quality scores length is * smaller than minimum required number of cameras (i.e. 2 cameras). */ @Override public void setQualityScores(final double[] qualityScores) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetQualityScores(qualityScores); } /** * Indicates if estimator is ready to start the DAQ estimation. * This is true when input data (i.e. cameras) is provided and list contains * at least the minimum number of required cameras, and also quality scores * are provided. * * @return true if estimator is ready, false otherwise. */ @Override public boolean isReady() { return super.isReady() && qualityScores != null && qualityScores.length == cameras.size(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROSACDualAbsoluteQuadricRobustEstimator.java | 177 |
com/irurueta/ar/sfm/PROSACRobustSinglePoint3DTriangulator.java | 187 |
this(cameras, listener); internalSetQualityScores(qualityScores); } /** * Returns threshold to determine whether cameras are inliers or not when * testing possible estimation solutions. * The threshold refers to the amount of error a possible solution has. * * @return threshold to determine whether cameras are inliers or not when * testing possible estimation solutions. */ public double getThreshold() { return threshold; } /** * Sets threshold to determine whether cameras are inliers or not when * testing possible estimation solutions. * The threshold refers to the amount of error a possible solution has. * * @param threshold threshold to determine whether cameras are inliers or * not when testing possible estimation solutions. * @throws IllegalArgumentException if provided value is equal or less than * zero. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. */ public void setThreshold(final double threshold) throws LockedException { if (isLocked()) { throw new LockedException(); } if (threshold <= MIN_THRESHOLD) { throw new IllegalArgumentException(); } this.threshold = threshold; } /** * Returns quality scores corresponding to each provided camera. * The larger the score value the better the quality of the sampled camera. * * @return quality scores corresponding to each camera. */ @Override public double[] getQualityScores() { return qualityScores; } /** * Sets quality scores corresponding to each provided camera. * The larger the score value the better the quality of the sampled camera. * * @param qualityScores quality scores corresponding to each camera. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws IllegalArgumentException if provided quality scores length is * smaller than minimum required number of cameras (i.e. 2 cameras). */ @Override public void setQualityScores(final double[] qualityScores) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetQualityScores(qualityScores); } /** * Indicates if estimator is ready to start the DAQ estimation. * This is true when input data (i.e. cameras) is provided and contains * at least the minimum number of required cameras, and also quality scores * are provided. * * @return true if estimator is ready, false otherwise. */ @Override public boolean isReady() { return super.isReady() && qualityScores != null && qualityScores.length == cameras.size(); |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/PROSACFundamentalMatrixRobustEstimator.java | 430 |
com/irurueta/ar/epipolar/estimators/RANSACFundamentalMatrixRobustEstimator.java | 226 |
} /** * Indicates whether inliers must be computed and kept. * * @return true if inliers must be computed and kept, false if inliers * only need to be computed but not kept. */ public boolean isComputeAndKeepInliersEnabled() { return computeAndKeepInliers; } /** * Specifies whether inliers must be computed and kept. * * @param computeAndKeepInliers true if inliers must be computed and kept, * false if inliers only need to be computed but not kept. * @throws LockedException if estimator is locked. */ public void setComputeAndKeepInliersEnabled(final boolean computeAndKeepInliers) throws LockedException { if (isLocked()) { throw new LockedException(); } this.computeAndKeepInliers = computeAndKeepInliers; } /** * Indicates whether residuals must be computed and kept. * * @return true if residuals must be computed and kept, false if residuals * only need to be computed but not kept. */ public boolean isComputeAndKeepResidualsEnabled() { return computeAndKeepResiduals; } /** * Specifies whether residuals must be computed and kept. * * @param computeAndKeepResiduals true if residuals must be computed and * kept, false if residuals only need to be computed but not kept. * @throws LockedException if estimator is locked. */ public void setComputeAndKeepResidualsEnabled(final boolean computeAndKeepResiduals) throws LockedException { if (isLocked()) { throw new LockedException(); } this.computeAndKeepResiduals = computeAndKeepResiduals; } /** * Estimates a radial distortion using a robust estimator and * the best set of matched 2D points found using the robust estimator. * * @return a radial distortion. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws NotReadyException if provided input data is not enough to start * the estimation. * @throws RobustEstimatorException if estimation fails for any reason * (i.e. numerical instability, no solution available, etc). */ @SuppressWarnings("DuplicatedCode") @Override public FundamentalMatrix estimate() throws LockedException, NotReadyException, RobustEstimatorException { if (isLocked()) { throw new LockedException(); } if (!isReady()) { throw new NotReadyException(); } final var innerEstimator = new PROSACRobustEstimator<FundamentalMatrix>(new PROSACRobustEstimatorListener<>() { |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationSlamEstimator.java | 521 |
com/irurueta/ar/slam/ConstantVelocityModelSlamEstimator.java | 444 |
x[15] = stateAngularSpeedZ; try { // set initial Kalman filter state (state pre and pro must be two // different instances!) kalmanFilter.getStatePre().fromArray(x); kalmanFilter.getStatePost().fromArray(x); } catch (final WrongSizeException ignore) { // never thrown } } error = false; lastTimestampNanos = -1; predictionAvailable = false; } /** * Updates state data of the device by using state matrix obtained after * prediction from Kalman filter. * to ensure that state follows proper values (specially on quaternions), * we keep x values, which have been predicted using the state predictor, * which uses analytical values. * We then updated x using latest Kalman filter state for next iteration * on state predictor. * * @param state state matrix obtained from Kalman filter. */ private void updatePredictedState(final Matrix state) { // position statePositionX = x[0]; x[0] = state.getElementAtIndex(0); statePositionY = x[1]; x[1] = state.getElementAtIndex(1); statePositionZ = x[2]; x[2] = state.getElementAtIndex(2); // quaternion state predictor is more reliable than Kalman filter, for // that reason we ignore predicted quaternion values on Kalman filter and // simply keep predicted ones. Besides, typically gyroscope samples are // much more reliable than accelerometer ones. For that reason state // elements corresponding to quaternion (3 to 6) are not copied into mX // array. stateQuaternionA = x[3]; stateQuaternionB = x[4]; stateQuaternionC = x[5]; stateQuaternionD = x[6]; |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROMedSDualAbsoluteQuadricRobustEstimator.java | 189 |
com/irurueta/ar/calibration/estimators/PROMedSImageOfAbsoluteConicRobustEstimator.java | 197 |
com/irurueta/ar/calibration/estimators/PROMedSRadialDistortionRobustEstimator.java | 288 |
com/irurueta/ar/sfm/PROMedSRobustSinglePoint3DTriangulator.java | 204 |
this(cameras, listener); internalSetQualityScores(qualityScores); } /** * Returns threshold to be used to keep the algorithm iterating in case that * best estimated threshold using median of residuals is not small enough. * Once a solution is found that generates a threshold below this value, the * algorithm will stop. * The stop threshold can be used to prevent the PROMedS algorithm iterating * too many times in cases where samples have a very similar accuracy. * For instance, in cases where proportion of outliers is very small (close * to 0%), and samples are very accurate (i.e. 1e-6), the algorithm would * iterate for a long time trying to find the best solution when indeed * there is no need to do that if a reasonable threshold has already been * reached. * Because of this behaviour the stop threshold can be set to a value much * lower than the one typically used in RANSAC, and yet the algorithm could * still produce even smaller thresholds in estimated results. * * @return stop threshold to stop the algorithm prematurely when a certain * accuracy has been reached. */ public double getStopThreshold() { return stopThreshold; } /** * Sets threshold to be used to keep the algorithm iterating in case that * best estimated threshold using median of residuals is not small enough. * Once a solution is found that generates a threshold below this value, the * algorithm will stop. * The stop threshold can be used to prevent the PROMedS algorithm iterating * too many times in cases where samples have a very similar accuracy. * For instance, in cases where proportion of outliers is very small (close * to 0%), and samples are very accurate (i.e. 1e-6), the algorithm would * iterate for a long time trying to find the best solution when indeed * there is no need to do that if a reasonable threshold has already been * reached. * Because of this behaviour the stop threshold can be set to a value much * lower than the one typically used in RANSAC, and yet the algorithm could * still produce even smaller thresholds in estimated results. * * @param stopThreshold stop threshold to stop the algorithm prematurely * when a certain accuracy has been reached. * @throws IllegalArgumentException if provided value is zero or negative. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. */ public void setStopThreshold(final double stopThreshold) throws LockedException { if (isLocked()) { throw new LockedException(); } if (stopThreshold <= MIN_STOP_THRESHOLD) { throw new IllegalArgumentException(); } this.stopThreshold = stopThreshold; } /** * Returns quality scores corresponding to each provided camera. * The larger the score value the better the quality of the sampled * camera. * * @return quality scores corresponding to each camera. */ @Override public double[] getQualityScores() { return qualityScores; } /** * Sets quality scores corresponding to each provided camera. * The larger the score value the better the quality of the sampled * camera. * * @param qualityScores quality scores corresponding to each camera. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws IllegalArgumentException if provided quality scores length is * smaller than minimum required number of cameras (i.e. 2 cameras). */ @Override public void setQualityScores(final double[] qualityScores) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetQualityScores(qualityScores); } /** * Indicates if estimator is ready to start the DAQ estimation. * This is true when input data (i.e. cameras) is provided and list contains * at least the minimum number of required cameras, and also quality scores * are provided. * * @return true if estimator is ready, false otherwise. */ @Override public boolean isReady() { return super.isReady() && qualityScores != null && qualityScores.length == cameras.size(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROMedSDualAbsoluteQuadricRobustEstimator.java | 388 |
com/irurueta/ar/calibration/estimators/PROMedSImageOfAbsoluteConicRobustEstimator.java | 400 |
com/irurueta/ar/calibration/estimators/PROMedSRadialDistortionRobustEstimator.java | 512 |
com/irurueta/ar/sfm/PROMedSRobustSinglePoint3DTriangulator.java | 428 |
listener.onEstimateProgressChange(PROMedSDualAbsoluteQuadricRobustEstimator.this, progress); } } @Override public double[] getQualityScores() { return qualityScores; } }); try { locked = true; innerEstimator.setConfidence(confidence); innerEstimator.setMaxIterations(maxIterations); innerEstimator.setProgressDelta(progressDelta); return innerEstimator.estimate(); } catch (final com.irurueta.numerical.LockedException e) { throw new LockedException(e); } catch (final com.irurueta.numerical.NotReadyException e) { throw new NotReadyException(e); } finally { locked = false; } } /** * Returns method being used for robust estimation. * * @return method being used for robust estimation. */ @Override public RobustEstimatorMethod getMethod() { return RobustEstimatorMethod.PROMEDS; } /** * Sets quality scores corresponding to each camera. * This method is used internally and does not check whether instance is * locked or not. * * @param qualityScores quality scores to be set. * @throws IllegalArgumentException if provided quality scores length is * smaller than the minimum number of required homographies for * current settings. */ private void internalSetQualityScores(final double[] qualityScores) { if (qualityScores.length < daqEstimator.getMinNumberOfRequiredCameras()) { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROSACDualAbsoluteQuadricRobustEstimator.java | 177 |
com/irurueta/ar/calibration/estimators/PROSACImageOfAbsoluteConicRobustEstimator.java | 185 |
com/irurueta/ar/calibration/estimators/PROSACRadialDistortionRobustEstimator.java | 270 |
com/irurueta/ar/sfm/PROSACRobustSinglePoint3DTriangulator.java | 187 |
this(cameras, listener); internalSetQualityScores(qualityScores); } /** * Returns threshold to determine whether cameras are inliers or not when * testing possible estimation solutions. * The threshold refers to the amount of error a possible solution has. * * @return threshold to determine whether cameras are inliers or not when * testing possible estimation solutions. */ public double getThreshold() { return threshold; } /** * Sets threshold to determine whether cameras are inliers or not when * testing possible estimation solutions. * The threshold refers to the amount of error a possible solution has. * * @param threshold threshold to determine whether cameras are inliers or * not when testing possible estimation solutions. * @throws IllegalArgumentException if provided value is equal or less than * zero. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. */ public void setThreshold(final double threshold) throws LockedException { if (isLocked()) { throw new LockedException(); } if (threshold <= MIN_THRESHOLD) { throw new IllegalArgumentException(); } this.threshold = threshold; } /** * Returns quality scores corresponding to each provided camera. * The larger the score value the better the quality of the sampled camera. * * @return quality scores corresponding to each camera. */ @Override public double[] getQualityScores() { return qualityScores; } /** * Sets quality scores corresponding to each provided camera. * The larger the score value the better the quality of the sampled camera. * * @param qualityScores quality scores corresponding to each camera. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws IllegalArgumentException if provided quality scores length is * smaller than minimum required number of cameras (i.e. 2 cameras). */ @Override public void setQualityScores(final double[] qualityScores) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetQualityScores(qualityScores); } /** * Indicates if estimator is ready to start the DAQ estimation. * This is true when input data (i.e. cameras) is provided and contains * at least the minimum number of required cameras, and also quality scores * are provided. * * @return true if estimator is ready, false otherwise. */ @Override public boolean isReady() { return super.isReady() && qualityScores != null && qualityScores.length == cameras.size(); |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseAbsoluteOrientationSlamSparseReconstructor.java | 166 |
com/irurueta/ar/sfm/BaseSlamSparseReconstructor.java | 253 |
slamEstimator.correctWithPositionMeasure(euclideanCamera2.getCameraCenter(), configuration.getCameraPositionCovariance()); if (!isInitialPairOfViews) { slamPosX = slamEstimator.getStatePositionX(); slamPosY = slamEstimator.getStatePositionY(); slamPosZ = slamEstimator.getStatePositionZ(); slamPosition.setInhomogeneousCoordinates(slamPosX, slamPosY, slamPosZ); // adjust scale of current camera final var euclideanCenter2 = euclideanCamera2.getCameraCenter(); final var euclideanPosX = euclideanCenter2.getInhomX(); final var euclideanPosY = euclideanCenter2.getInhomY(); final var euclideanPosZ = euclideanCenter2.getInhomZ(); final var scaleVariationX = euclideanPosX / slamPosX; final var scaleVariationY = euclideanPosY / slamPosY; final var scaleVariationZ = euclideanPosZ / slamPosZ; final var scaleVariation = (scaleVariationX + scaleVariationY + scaleVariationZ) / 3.0; scale *= scaleVariation; currentScale = scale; |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1262 |
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1372 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 2075 |
final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix(); // use inlier points used for fundamental matrix estimation final var samples1 = currentEstimatedFundamentalMatrix.getLeftSamples(); final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples(); final var points1 = new ArrayList<Point2D>(); final var points2 = new ArrayList<Point2D>(); final var length = samples1.size(); for (var i = 0; i < length; i++) { final var sample1 = samples1.get(i); final var sample2 = samples2.get(i); final var point1 = sample1.getPoint(); final var point2 = sample2.getPoint(); points1.add(point1); points2.add(point2); } try { final var estimator = new DualImageOfAbsoluteConicInitialCamerasEstimator(fundamentalMatrix, points1, |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationConstantVelocityModelSlamEstimator.java | 403 |
com/irurueta/ar/slam/SlamEstimator.java | 398 |
lastAngularSpeedZ = stateAngularSpeedZ; lastTimestampNanos = timestamp; if (listener != null) { listener.onFullSampleProcessed(this); } } /** * Resets position, linear velocity, linear acceleration, orientation and * angular speed to provided values. * This method implementation also resets Kalman filter state. * * @param statePositionX position along x-axis expressed in meters (m). * @param statePositionY position along y-axis expressed in meters (m). * @param statePositionZ position along z-axis expressed in meters (m). * @param stateVelocityX linear velocity along x-axis expressed in meters * per second (m/s). * @param stateVelocityY linear velocity along y-axis expressed in meters * per second (m/s). * @param stateVelocityZ linear velocity along z-axis expressed in meters * per second (m/s). * @param stateAccelerationX linear acceleration along x-axis expressed in * meters per squared second (m/s^2). * @param stateAccelerationY linear acceleration along y-axis expressed in * meters per squared second (m/s^2). * @param stateAccelerationZ linear acceleration along z-axis expressed in * meters per squared second (m/s^2). * @param stateQuaternionA A value of orientation quaternion. * @param stateQuaternionB B value of orientation quaternion. * @param stateQuaternionC C value of orientation quaternion. * @param stateQuaternionD D value of orientation quaternion. * @param stateAngularSpeedX angular speed along x-axis expressed in radians * per second (rad/s). * @param stateAngularSpeedY angular speed along y-axis expressed in radians * per second (rad/s). * @param stateAngularSpeedZ angular speed along z-axis expressed in radians * per second (rad/s). */ @Override protected void reset( final double statePositionX, final double statePositionY, final double statePositionZ, final double stateVelocityX, final double stateVelocityY, final double stateVelocityZ, final double stateAccelerationX, final double stateAccelerationY, final double stateAccelerationZ, final double stateQuaternionA, final double stateQuaternionB, final double stateQuaternionC, final double stateQuaternionD, final double stateAngularSpeedX, final double stateAngularSpeedY, final double stateAngularSpeedZ) { super.reset(statePositionX, statePositionY, statePositionZ, stateVelocityX, stateVelocityY, stateVelocityZ, stateAccelerationX, stateAccelerationY, stateAccelerationZ, stateQuaternionA, stateQuaternionB, stateQuaternionC, stateQuaternionD, stateAngularSpeedX, stateAngularSpeedY, stateAngularSpeedZ); //noinspection ConstantValue if (stateOrientation != null) { |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationSlamEstimator.java | 427 |
com/irurueta/ar/slam/SlamEstimator.java | 398 |
lastAngularSpeedZ = stateAngularSpeedZ; lastTimestampNanos = timestamp; if (listener != null) { listener.onFullSampleProcessed(this); } } /** * Resets position, linear velocity, linear acceleration, orientation and * angular speed to provided values. * This method implementation also resets Kalman filter state. * * @param statePositionX position along x-axis expressed in meters (m). * @param statePositionY position along y-axis expressed in meters (m). * @param statePositionZ position along z-axis expressed in meters (m). * @param stateVelocityX linear velocity along x-axis expressed in meters * per second (m/s). * @param stateVelocityY linear velocity along y-axis expressed in meters * per second (m/s). * @param stateVelocityZ linear velocity along z-axis expressed in meters * per second (m/s). * @param stateAccelerationX linear acceleration along x-axis expressed in * meters per squared second (m/s^2). * @param stateAccelerationY linear acceleration along y-axis expressed in * meters per squared second (m/s^2). * @param stateAccelerationZ linear acceleration along z-axis expressed in * meters per squared second (m/s^2). * @param stateQuaternionA A value of orientation quaternion. * @param stateQuaternionB B value of orientation quaternion. * @param stateQuaternionC C value of orientation quaternion. * @param stateQuaternionD D value of orientation quaternion. * @param stateAngularSpeedX angular speed along x-axis expressed in radians * per second (rad/s). * @param stateAngularSpeedY angular speed along y-axis expressed in radians * per second (rad/s). * @param stateAngularSpeedZ angular speed along z-axis expressed in radians * per second (rad/s). */ @Override protected void reset( final double statePositionX, final double statePositionY, final double statePositionZ, final double stateVelocityX, final double stateVelocityY, final double stateVelocityZ, final double stateAccelerationX, final double stateAccelerationY, final double stateAccelerationZ, final double stateQuaternionA, final double stateQuaternionB, final double stateQuaternionC, final double stateQuaternionD, final double stateAngularSpeedX, final double stateAngularSpeedY, final double stateAngularSpeedZ) { super.reset(statePositionX, statePositionY, statePositionZ, stateVelocityX, stateVelocityY, stateVelocityZ, stateAccelerationX, stateAccelerationY, stateAccelerationZ, stateQuaternionA, stateQuaternionB, stateQuaternionC, stateQuaternionD, stateAngularSpeedX, stateAngularSpeedY, stateAngularSpeedZ); //noinspection ConstantValue if (stateOrientation != null) { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 396 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 826 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); a.setElementAt(counter, 1, h21 * h22); a.setElementAt(counter, 2, h11 * h32 + h31 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 681 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 539 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); a.setElementAt(counter, 1, h21 * h22); a.setElementAt(counter, 2, h31 * h32); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROMedSDualAbsoluteQuadricRobustEstimator.java | 246 |
com/irurueta/ar/calibration/estimators/PROSACDualAbsoluteQuadricRobustEstimator.java | 213 |
} /** * Returns quality scores corresponding to each provided camera. * The larger the score value the better the quality of the sampled * camera. * * @return quality scores corresponding to each camera. */ @Override public double[] getQualityScores() { return qualityScores; } /** * Sets quality scores corresponding to each provided camera. * The larger the score value the better the quality of the sampled * camera. * * @param qualityScores quality scores corresponding to each camera. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws IllegalArgumentException if provided quality scores length is * smaller than minimum required number of cameras (i.e. 2 cameras). */ @Override public void setQualityScores(final double[] qualityScores) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetQualityScores(qualityScores); } /** * Indicates if estimator is ready to start the DAQ estimation. * This is true when input data (i.e. cameras) is provided and list contains * at least the minimum number of required cameras, and also quality scores * are provided. * * @return true if estimator is ready, false otherwise. */ @Override public boolean isReady() { return super.isReady() && qualityScores != null && qualityScores.length == cameras.size(); } /** * Estimates the Dual Absolute Quadric using provided cameras. * * @return estimated Dual Absolute Quadric (DAQ). * @throws LockedException if robust estimator is locked. * @throws NotReadyException if no valid input data has already been * provided. * @throws RobustEstimatorException if estimation fails for any reason * (i.e. numerical instability, no solution available, etc). */ @SuppressWarnings("DuplicatedCode") @Override public DualAbsoluteQuadric estimate() throws LockedException, NotReadyException, RobustEstimatorException { if (isLocked()) { throw new LockedException(); } if (!isReady()) { throw new NotReadyException(); } final var innerEstimator = new PROMedSRobustEstimator<DualAbsoluteQuadric>(new PROMedSRobustEstimatorListener<>() { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROMedSImageOfAbsoluteConicRobustEstimator.java | 254 |
com/irurueta/ar/calibration/estimators/PROSACImageOfAbsoluteConicRobustEstimator.java | 223 |
} /** * Returns quality scores corresponding to each provided homography. * The larger the score value the better the quality of the sampled * homography * * @return quality scores corresponding to each homography. */ @Override public double[] getQualityScores() { return qualityScores; } /** * Sets quality scores corresponding to each provided homography. * The larger the score value the better the quality of the sampled * homography. * * @param qualityScores quality scores corresponding to each homography. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws IllegalArgumentException if provided quality scores length is * smaller than minimum required number of homographies (i.e. 1 * homography). */ @Override public void setQualityScores(final double[] qualityScores) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetQualityScores(qualityScores); } /** * Indicates if estimator is ready to start the IAC estimation. * This is true when input data (i.e. homographies) is provided and list * contains at least the minimum number of required homographies, and * also quality scores are provided. * * @return true if estimator is ready, false otherwise. */ @Override public boolean isReady() { return super.isReady() && qualityScores != null && qualityScores.length == homographies.size(); } /** * Estimates Image of Absolute Conic (IAC) * * @return estimated IAC * @throws LockedException if robust estimator is locked because an * estimation is already in progress * @throws NotReadyException if provided input data is not enough to start * the estimation * @throws RobustEstimatorException if estimation fails for any reason * (i.e. numerical instability, no solution available, etc) */ @SuppressWarnings("DuplicatedCode") @Override public ImageOfAbsoluteConic estimate() throws LockedException, NotReadyException, RobustEstimatorException { if (isLocked()) { throw new LockedException(); } if (!isReady()) { throw new NotReadyException(); } final var innerEstimator = new PROMedSRobustEstimator<ImageOfAbsoluteConic>( |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROMedSRadialDistortionRobustEstimator.java | 346 |
com/irurueta/ar/calibration/estimators/PROSACRadialDistortionRobustEstimator.java | 307 |
} /** * Returns quality scores corresponding to each provided point. * The larger the score value the better the quality of the sampled point. * * @return quality scores corresponding to each point. */ @Override public double[] getQualityScores() { return qualityScores; } /** * Sets quality scores corresponding to each provided point. * The larger the score value the better the quality of the sampled point. * * @param qualityScores quality scores corresponding to each point. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws IllegalArgumentException if provided quality scores length is * smaller than MINIMUM_SIZE (i.e. 3 samples). */ @Override public void setQualityScores(final double[] qualityScores) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetQualityScores(qualityScores); } /** * Indicates if estimator is ready to start the radial distortion * estimation. * This is true when input data (i.e. 2D points and quality scores) are * provided and a minimum of 2 points are available. * * @return true if estimator is ready, false otherwise. */ @Override public boolean isReady() { return super.isReady() && qualityScores != null && qualityScores.length == distortedPoints.size(); } /** * Estimates a radial distortion using a robust estimator and * the best set of matched 2D points found using the robust estimator. * * @return a radial distortion. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws NotReadyException if provided input data is not enough to start * the estimation. * @throws RobustEstimatorException if estimation fails for any reason * (i.e. numerical instability, no solution available, etc). */ @SuppressWarnings("DuplicatedCode") @Override public RadialDistortion estimate() throws LockedException, NotReadyException, RobustEstimatorException { if (isLocked()) { throw new LockedException(); } if (!isReady()) { throw new NotReadyException(); } final var innerEstimator = new PROMedSRobustEstimator<RadialDistortion>(new PROMedSRobustEstimatorListener<>() { |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSlamSparseReconstructor.java | 165 |
com/irurueta/ar/sfm/BaseSlamTwoViewsSparseReconstructor.java | 164 |
final var calibrationData = configuration.getCalibrationData(); if (calibrationData != null) { slamEstimator.setCalibrationData(calibrationData); } } /** * Configures listener of SLAM estimator */ protected void setUpSlamEstimatorListener() { slamEstimator.setListener(new BaseSlamEstimator.BaseSlamEstimatorListener<>() { @Override public void onFullSampleReceived(final BaseSlamEstimator<D> estimator) { // not used } @Override public void onFullSampleProcessed(final BaseSlamEstimator<D> estimator) { notifySlamStateAndCamera(); } @Override public void onCorrectWithPositionMeasure(final BaseSlamEstimator<D> estimator) { // not used } @Override public void onCorrectedWithPositionMeasure(final BaseSlamEstimator<D> estimator) { notifySlamStateAndCamera(); } private void notifySlamStateAndCamera() { notifySlamStateIfNeeded(); notifySlamCameraIfNeeded(); } }); } /** * Update scene scale using SLAM data. * * @param isInitialPairOfViews true if initial pair of views is being processed, false otherwise. * @return true if scale was successfully updated, false otherwise. */ protected boolean updateScale(final boolean isInitialPairOfViews) { |
File | Line |
---|---|
com/irurueta/ar/sfm/PROMedSRobustSinglePoint3DTriangulator.java | 262 |
com/irurueta/ar/sfm/PROSACRobustSinglePoint3DTriangulator.java | 224 |
} /** * Returns quality scores corresponding to each provided view. * The larger the score value the better the quality of the sampled view. * * @return quality scores corresponding to each view. */ @Override public double[] getQualityScores() { return qualityScores; } /** * Sets quality scores corresponding to each provided view. * The larger the score value the better the quality of the sampled view. * * @param qualityScores quality scores corresponding to each view. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws IllegalArgumentException if provided quality scores length is * smaller than MIN_REQUIRED_VIEWS (i.e. 2 views). */ @Override public void setQualityScores(final double[] qualityScores) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetQualityScores(qualityScores); } /** * Indicates if triangulator is ready to start the 3D point triangulation. * This is true when input data (i.e. 2D points, cameras and quality scores) * are provided and a minimum of 2 views are available. * * @return true if estimator is ready, false otherwise. */ @Override public boolean isReady() { return super.isReady() && qualityScores != null && qualityScores.length == points2D.size(); } /** * Triangulates provided matched 2D points being projected by each * corresponding camera into a single 3D point. * At least 2 matched 2D points and their corresponding 2 cameras are * required to compute triangulation. If more views are provided, an * averaged solution can be found. * * @return computed triangulated 3D point. * @throws LockedException if this instance is locked. * @throws NotReadyException if lists of points and cameras don't have the * same length or less than 2 views are provided. * @throws RobustEstimatorException if estimation fails for any reason * (i.e. numerical instability, no solution available, etc). */ @SuppressWarnings("DuplicatedCode") @Override public Point3D triangulate() throws LockedException, NotReadyException, RobustEstimatorException { if (isLocked()) { throw new LockedException(); } if (!isReady()) { throw new NotReadyException(); } final var innerEstimator = new PROMedSRobustEstimator<Point3D>(new PROMedSRobustEstimatorListener<>() { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/MSACDualAbsoluteQuadricRobustEstimator.java | 101 |
com/irurueta/ar/calibration/estimators/RANSACDualAbsoluteQuadricRobustEstimator.java | 100 |
public MSACDualAbsoluteQuadricRobustEstimator( final List<PinholeCamera> cameras, final DualAbsoluteQuadricRobustEstimatorListener listener) { super(cameras, listener); threshold = DEFAULT_THRESHOLD; } /** * Returns threshold to determine whether cameras are inliers or not * when testing possible estimation solutions. * The threshold refers to the amount of error a possible solution has. * * @return threshold to determine whether cameras are inliers when testing * possible estimation solutions. */ public double getThreshold() { return threshold; } /** * Sets threshold to determine whether cameras are inliers or not when * testing possible estimation solutions. * The threshold refers to the amount of error a possible solution has. * * @param threshold threshold to determine whether cameras are inliers or * not when testing possible estimation solutions. * @throws IllegalArgumentException if provided value is equal or less than * zero. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. */ public void setThreshold(final double threshold) throws LockedException { if (isLocked()) { throw new LockedException(); } if (threshold <= MIN_THRESHOLD) { throw new IllegalArgumentException(); } this.threshold = threshold; } /** * Estimates the Dual Absolute Quadric using provided cameras. * * @return estimated Dual Absolute Quadric (DAQ). * @throws LockedException if robust estimator is locked. * @throws NotReadyException if no valid input data has already been * provided. * @throws RobustEstimatorException if estimation fails for any reason * (i.e. numerical instability, no solution available, etc). */ @SuppressWarnings("DuplicatedCode") @Override public DualAbsoluteQuadric estimate() throws LockedException, NotReadyException, RobustEstimatorException { if (isLocked()) { throw new LockedException(); } if (!isReady()) { throw new NotReadyException(); } final var innerEstimator = new MSACRobustEstimator<DualAbsoluteQuadric>(new MSACRobustEstimatorListener<>() { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/MSACImageOfAbsoluteConicRobustEstimator.java | 107 |
com/irurueta/ar/calibration/estimators/RANSACImageOfAbsoluteConicRobustEstimator.java | 106 |
public MSACImageOfAbsoluteConicRobustEstimator( final List<Transformation2D> homographies, final ImageOfAbsoluteConicRobustEstimatorListener listener) { super(homographies, listener); threshold = DEFAULT_THRESHOLD; } /** * Returns threshold to determine whether homographies are inliers or not * when testing possible estimation solutions. * The threshold refers to the amount of error a possible solution has on * the ortho-normality assumption of rotation matrices. * * @return threshold to determine whether homographies are inliers or not * when testing possible estimation solutions. */ public double getThreshold() { return threshold; } /** * Sets threshold to determine whether homographies are inliers or not when * testing possible estimation solutions. * The threshold refers to the amount of error a possible solution has on * the ortho-normality assumption of rotation matrices. * * @param threshold threshold to determine whether homographies are inliers * or not when testing possible estimation solutions. * @throws IllegalArgumentException if provided value is equal or less than * zero. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. */ public void setThreshold(final double threshold) throws LockedException { if (isLocked()) { throw new LockedException(); } if (threshold <= MIN_THRESHOLD) { throw new IllegalArgumentException(); } this.threshold = threshold; } /** * Estimates Image of Absolute Conic (IAC). * * @return estimated IAC. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws NotReadyException if provided input data is not enough to start * the estimation. * @throws RobustEstimatorException if estimation fails for any reason * (i.e. numerical instability, no solution available, etc). */ @SuppressWarnings("DuplicatedCode") @Override public ImageOfAbsoluteConic estimate() throws LockedException, NotReadyException, RobustEstimatorException { if (isLocked()) { throw new LockedException(); } if (!isReady()) { throw new NotReadyException(); } final var innerEstimator = new MSACRobustEstimator<ImageOfAbsoluteConic>(new MSACRobustEstimatorListener<>() { |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1216 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 931 |
metricReconstructedPoints = new ArrayList<>(); final var points = new ArrayList<Point2D>(); final var numPoints = correctedPoints1.size(); Point3D triangulatedPoint; ReconstructedPoint3D reconstructedPoint; for (var i = 0; i < numPoints; i++) { points.clear(); points.add(correctedPoints1.get(i)); points.add(correctedPoints2.get(i)); triangulator.setPointsAndCameras(points, cameras); triangulatedPoint = triangulator.triangulate(); reconstructedPoint = new ReconstructedPoint3D(); reconstructedPoint.setPoint(triangulatedPoint); // only points reconstructed in front of both cameras are // considered valid final var front1 = camera1.isPointInFrontOfCamera(triangulatedPoint); final var front2 = camera2.isPointInFrontOfCamera(triangulatedPoint); reconstructedPoint.setInlier(front1 && front2); |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/PROSACFundamentalMatrixRobustEstimator.java | 584 |
com/irurueta/ar/epipolar/estimators/RANSACFundamentalMatrixRobustEstimator.java | 375 |
} }); try { locked = true; inliersData = null; innerEstimator.setComputeAndKeepInliersEnabled(computeAndKeepInliers || refineResult); innerEstimator.setComputeAndKeepResidualsEnabled(computeAndKeepResiduals || refineResult); innerEstimator.setConfidence(confidence); innerEstimator.setMaxIterations(maxIterations); innerEstimator.setProgressDelta(progressDelta); final var result = innerEstimator.estimate(); inliersData = innerEstimator.getInliersData(); return attemptRefine(result); } catch (final com.irurueta.numerical.LockedException e) { throw new LockedException(e); } catch (final com.irurueta.numerical.NotReadyException e) { throw new NotReadyException(e); } finally { locked = false; } } /** * Returns method being used for robust estimation. * * @return method being used for robust estimation. */ @Override public RobustEstimatorMethod getMethod() { return RobustEstimatorMethod.PROSAC; |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1376 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 1061 |
final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples(); final var points1 = new ArrayList<Point2D>(); final var points2 = new ArrayList<Point2D>(); final var length = samples1.size(); for (var i = 0; i < length; i++) { final var sample1 = samples1.get(i); final var sample2 = samples2.get(i); final var point1 = sample1.getPoint(); final var point2 = sample2.getPoint(); points1.add(point1); points2.add(point2); } try { final var estimator = new EssentialMatrixInitialCamerasEstimator(fundamentalMatrix, intrinsic1, intrinsic2, points1, points2); estimator.setCorrectorType(configuration.getPairedCamerasCorrectorType()); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROMedSDualAbsoluteQuadricRobustEstimator.java | 190 |
com/irurueta/ar/epipolar/estimators/PROMedSFundamentalMatrixRobustEstimator.java | 361 |
com/irurueta/ar/sfm/PROMedSRobustSinglePoint3DTriangulator.java | 205 |
internalSetQualityScores(qualityScores); } /** * Returns threshold to be used to keep the algorithm iterating in case that * best estimated threshold using median of residuals is not small enough. * Once a solution is found that generates a threshold below this value, the * algorithm will stop. * The stop threshold can be used to prevent the PROMedS algorithm iterating * too many times in cases where samples have a very similar accuracy. * For instance, in cases where proportion of outliers is very small (close * to 0%), and samples are very accurate (i.e. 1e-6), the algorithm would * iterate for a long time trying to find the best solution when indeed * there is no need to do that if a reasonable threshold has already been * reached. * Because of this behaviour the stop threshold can be set to a value much * lower than the one typically used in RANSAC, and yet the algorithm could * still produce even smaller thresholds in estimated results. * * @return stop threshold to stop the algorithm prematurely when a certain * accuracy has been reached. */ public double getStopThreshold() { return stopThreshold; } /** * Sets threshold to be used to keep the algorithm iterating in case that * best estimated threshold using median of residuals is not small enough. * Once a solution is found that generates a threshold below this value, the * algorithm will stop. * The stop threshold can be used to prevent the PROMedS algorithm iterating * too many times in cases where samples have a very similar accuracy. * For instance, in cases where proportion of outliers is very small (close * to 0%), and samples are very accurate (i.e. 1e-6), the algorithm would * iterate for a long time trying to find the best solution when indeed * there is no need to do that if a reasonable threshold has already been * reached. * Because of this behaviour the stop threshold can be set to a value much * lower than the one typically used in RANSAC, and yet the algorithm could * still produce even smaller thresholds in estimated results. * * @param stopThreshold stop threshold to stop the algorithm prematurely * when a certain accuracy has been reached. * @throws IllegalArgumentException if provided value is zero or negative. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. */ public void setStopThreshold(final double stopThreshold) throws LockedException { if (isLocked()) { throw new LockedException(); } if (stopThreshold <= MIN_STOP_THRESHOLD) { throw new IllegalArgumentException(); } this.stopThreshold = stopThreshold; } /** * Returns quality scores corresponding to each provided camera. * The larger the score value the better the quality of the sampled * camera. * * @return quality scores corresponding to each camera. */ @Override public double[] getQualityScores() { return qualityScores; } /** * Sets quality scores corresponding to each provided camera. * The larger the score value the better the quality of the sampled * camera. * * @param qualityScores quality scores corresponding to each camera. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws IllegalArgumentException if provided quality scores length is * smaller than minimum required number of cameras (i.e. 2 cameras). */ @Override public void setQualityScores(final double[] qualityScores) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetQualityScores(qualityScores); } /** * Indicates if estimator is ready to start the DAQ estimation. * This is true when input data (i.e. cameras) is provided and list contains * at least the minimum number of required cameras, and also quality scores * are provided. * * @return true if estimator is ready, false otherwise. */ @Override public boolean isReady() { return super.isReady() && qualityScores != null && qualityScores.length == cameras.size(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROMedSImageOfAbsoluteConicRobustEstimator.java | 198 |
com/irurueta/ar/epipolar/estimators/PROMedSFundamentalMatrixRobustEstimator.java | 361 |
internalSetQualityScores(qualityScores); } /** * Returns threshold to be used to keep the algorithm iterating in case that * best estimated threshold using median of residuals is not small enough. * Once a solution is found that generates a threshold below this value, the * algorithm will stop. * The stop threshold can be used to prevent the PROMedS algorithm iterating * too many times in cases where samples have a very similar accuracy. * For instance, in cases where proportion of outliers is very small (close * to 0%), and samples are very accurate (i.e. 1e-6), the algorithm would * iterate for a long time trying to find the best solution when indeed * there is no need to do that if a reasonable threshold has already been * reached. * Because of this behaviour the stop threshold can be set to a value much * lower than the one typically used in RANSAC, and yet the algorithm could * still produce even smaller thresholds in estimated results. * * @return stop threshold to stop the algorithm prematurely when a certain * accuracy has been reached. */ public double getStopThreshold() { return stopThreshold; } /** * Sets threshold to be used to keep the algorithm iterating in case that * best estimated threshold using median of residuals is not small enough. * Once a solution is found that generates a threshold below this value, the * algorithm will stop. * The stop threshold can be used to prevent the PROMedS algorithm iterating * too many times in cases where samples have a very similar accuracy. * For instance, in cases where proportion of outliers is very small (close * to 0%), and samples are very accurate (i.e. 1e-6), the algorithm would * iterate for a long time trying to find the best solution when indeed * there is no need to do that if a reasonable threshold has already been * reached. * Because of this behaviour the stop threshold can be set to a value much * lower than the one typically used in RANSAC, and yet the algorithm could * still produce even smaller thresholds in estimated results. * * @param stopThreshold stop threshold to stop the algorithm prematurely * when a certain accuracy has been reached. * @throws IllegalArgumentException if provided value is zero or negative. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. */ public void setStopThreshold(final double stopThreshold) throws LockedException { if (isLocked()) { throw new LockedException(); } if (stopThreshold <= MIN_STOP_THRESHOLD) { throw new IllegalArgumentException(); } this.stopThreshold = stopThreshold; } /** * Returns quality scores corresponding to each provided homography. * The larger the score value the better the quality of the sampled * homography * * @return quality scores corresponding to each homography. */ @Override public double[] getQualityScores() { return qualityScores; } /** * Sets quality scores corresponding to each provided homography. * The larger the score value the better the quality of the sampled * homography. * * @param qualityScores quality scores corresponding to each homography. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws IllegalArgumentException if provided quality scores length is * smaller than minimum required number of homographies (i.e. 1 * homography). */ @Override public void setQualityScores(final double[] qualityScores) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetQualityScores(qualityScores); } /** * Indicates if estimator is ready to start the IAC estimation. * This is true when input data (i.e. homographies) is provided and list * contains at least the minimum number of required homographies, and * also quality scores are provided. * * @return true if estimator is ready, false otherwise. */ @Override public boolean isReady() { return super.isReady() && qualityScores != null && qualityScores.length == homographies.size(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROMedSRadialDistortionRobustEstimator.java | 289 |
com/irurueta/ar/epipolar/estimators/PROMedSFundamentalMatrixRobustEstimator.java | 361 |
internalSetQualityScores(qualityScores); } /** * Returns threshold to be used to keep the algorithm iterating in case that * best estimated threshold using median of residuals is not small enough. * Once a solution is found that generates a threshold below this value, the * algorithm will stop. * The stop threshold can be used to prevent the LMedS algorithm iterating * too many times in cases where samples have a very similar accuracy. * For instance, in cases where proportion of outliers is very small (close * to 0%), and samples are very accurate (i.e. 1e-6), the algorithm would * iterate for a long time trying to find the best solution when indeed * there is no need to do that if a reasonable threshold has already been * reached. * Because of this behaviour the stop threshold can be set to a value much * lower than the one typically used in RANSAC, and yet the algorithm could * still produce even smaller thresholds in estimated results. * * @return stop threshold to stop the algorithm prematurely when a certain * accuracy has been reached. */ public double getStopThreshold() { return stopThreshold; } /** * Sets threshold to be used to keep the algorithm iterating in case that * best estimated threshold using median of residuals is not small enough. * Once a solution is found that generates a threshold below this value, the * algorithm will stop. * The stop threshold can be used to prevent the LMedS algorithm iterating * too many times in cases where samples have a very similar accuracy. * For instance, in cases where proportion of outliers is very small (close * to 0%), and samples are very accurate (i.e. 1e-6), the algorithm would * iterate for a long time trying to find the best solution when indeed * there is no need to do that if a reasonable threshold has already been * reached. * Because of this behaviour the stop threshold can be set to a value much * lower than the one typically used in RANSAC, and yet the algorithm could * still produce even smaller thresholds in estimated results. * * @param stopThreshold stop threshold to stop the algorithm prematurely * when a certain accuracy has been reached. * @throws IllegalArgumentException if provided value is zero or negative. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. */ public void setStopThreshold(final double stopThreshold) throws LockedException { if (isLocked()) { throw new LockedException(); } if (stopThreshold <= MIN_STOP_THRESHOLD) { throw new IllegalArgumentException(); } this.stopThreshold = stopThreshold; } /** * Returns quality scores corresponding to each provided point. * The larger the score value the better the quality of the sampled point. * * @return quality scores corresponding to each point. */ @Override public double[] getQualityScores() { return qualityScores; } /** * Sets quality scores corresponding to each provided point. * The larger the score value the better the quality of the sampled point. * * @param qualityScores quality scores corresponding to each point. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws IllegalArgumentException if provided quality scores length is * smaller than MINIMUM_SIZE (i.e. 3 samples). */ @Override public void setQualityScores(final double[] qualityScores) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetQualityScores(qualityScores); } /** * Indicates if estimator is ready to start the radial distortion * estimation. * This is true when input data (i.e. 2D points and quality scores) are * provided and a minimum of 2 points are available. * * @return true if estimator is ready, false otherwise. */ @Override public boolean isReady() { return super.isReady() && qualityScores != null && qualityScores.length == distortedPoints.size(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROSACDualAbsoluteQuadricRobustEstimator.java | 178 |
com/irurueta/ar/epipolar/estimators/PROSACFundamentalMatrixRobustEstimator.java | 350 |
com/irurueta/ar/sfm/PROSACRobustSinglePoint3DTriangulator.java | 188 |
internalSetQualityScores(qualityScores); } /** * Returns threshold to determine whether cameras are inliers or not when * testing possible estimation solutions. * The threshold refers to the amount of error a possible solution has. * * @return threshold to determine whether cameras are inliers or not when * testing possible estimation solutions. */ public double getThreshold() { return threshold; } /** * Sets threshold to determine whether cameras are inliers or not when * testing possible estimation solutions. * The threshold refers to the amount of error a possible solution has. * * @param threshold threshold to determine whether cameras are inliers or * not when testing possible estimation solutions. * @throws IllegalArgumentException if provided value is equal or less than * zero. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. */ public void setThreshold(final double threshold) throws LockedException { if (isLocked()) { throw new LockedException(); } if (threshold <= MIN_THRESHOLD) { throw new IllegalArgumentException(); } this.threshold = threshold; } /** * Returns quality scores corresponding to each provided camera. * The larger the score value the better the quality of the sampled camera. * * @return quality scores corresponding to each camera. */ @Override public double[] getQualityScores() { return qualityScores; } /** * Sets quality scores corresponding to each provided camera. * The larger the score value the better the quality of the sampled camera. * * @param qualityScores quality scores corresponding to each camera. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws IllegalArgumentException if provided quality scores length is * smaller than minimum required number of cameras (i.e. 2 cameras). */ @Override public void setQualityScores(final double[] qualityScores) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetQualityScores(qualityScores); } /** * Indicates if estimator is ready to start the DAQ estimation. * This is true when input data (i.e. cameras) is provided and contains * at least the minimum number of required cameras, and also quality scores * are provided. * * @return true if estimator is ready, false otherwise. */ @Override public boolean isReady() { return super.isReady() && qualityScores != null && qualityScores.length == cameras.size(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROSACImageOfAbsoluteConicRobustEstimator.java | 186 |
com/irurueta/ar/epipolar/estimators/PROSACFundamentalMatrixRobustEstimator.java | 350 |
internalSetQualityScores(qualityScores); } /** * Returns threshold to determine whether homographies are inliers or not * when testing possible estimation solutions. * The threshold refers to the amount of error a possible solution has on * the ortho-normality assumption of rotation matrices. * * @return threshold to determine whether homographies are inliers or not * when testing possible estimation solutions. */ public double getThreshold() { return threshold; } /** * Sets threshold to determine whether homographies are inliers or not when * testing possible estimation solutions. * The threshold refers to the amount of error a possible solution has on * the ortho-normality assumption of rotation matrices. * * @param threshold threshold to determine whether homographies are inliers * or not when testing possible estimation solutions. * @throws IllegalArgumentException if provided value is equal or less than * zero. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. */ public void setThreshold(final double threshold) throws LockedException { if (isLocked()) { throw new LockedException(); } if (threshold <= MIN_THRESHOLD) { throw new IllegalArgumentException(); } this.threshold = threshold; } /** * Returns quality scores corresponding to each provided homography. * The larger the score value the better the quality of the sampled * homography * * @return quality scores corresponding to each homography. */ @Override public double[] getQualityScores() { return qualityScores; } /** * Sets quality scores corresponding to each provided homography. * The larger the score value the better the quality of the sampled * homography * * @param qualityScores quality scores corresponding to each homography * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws IllegalArgumentException if provided quality scores length is * smaller than minimum required number of homographies (i.e. 1 * homography) */ @Override public void setQualityScores(final double[] qualityScores) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetQualityScores(qualityScores); } /** * Indicates if estimator is ready to start the IAC estimation. * This is true when input data (i.e. homographies) is provided and list * contains at least the minimum number of required homographies, and * also quality scores are provided. * * @return true if estimator is ready, false otherwise. */ @Override public boolean isReady() { return super.isReady() && qualityScores != null && qualityScores.length == homographies.size(); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROSACRadialDistortionRobustEstimator.java | 271 |
com/irurueta/ar/epipolar/estimators/PROSACFundamentalMatrixRobustEstimator.java | 350 |
internalSetQualityScores(qualityScores); } /** * Returns threshold to determine whether points are inliers or not when * testing possible estimation solutions. * The threshold refers to the amount of error (i.e. Euclidean distance) a * possible solution has on projected 2D points. * * @return threshold to determine whether points are inliers or not when * testing possible estimation solutions. */ public double getThreshold() { return threshold; } /** * Sets threshold to determine whether points are inliers or not when * testing possible estimation solutions. * The threshold refers to the amount of error (i.e. Euclidean distance) a * possible solution has on projected 2D points. * * @param threshold threshold to be set. * @throws IllegalArgumentException if provided value is equal or less than * zero. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. */ public void setThreshold(final double threshold) throws LockedException { if (isLocked()) { throw new LockedException(); } if (threshold <= MIN_THRESHOLD) { throw new IllegalArgumentException(); } this.threshold = threshold; } /** * Returns quality scores corresponding to each provided point. * The larger the score value the better the quality of the sampled point. * * @return quality scores corresponding to each point. */ @Override public double[] getQualityScores() { return qualityScores; } /** * Sets quality scores corresponding to each provided point. * The larger the score value the better the quality of the sampled point. * * @param qualityScores quality scores corresponding to each point. * @throws LockedException if robust estimator is locked because an * estimation is already in progress. * @throws IllegalArgumentException if provided quality scores length is * smaller than MINIMUM_SIZE (i.e. 2 samples). */ @Override public void setQualityScores(final double[] qualityScores) throws LockedException { if (isLocked()) { throw new LockedException(); } internalSetQualityScores(qualityScores); } /** * Indicates if estimator is ready to start the radial distortion * estimation. * This is true when input data (i.e. 2D points and quality scores) are * provided and a minimum of 2 points are available. * * @return true if estimator is ready, false otherwise. */ @Override public boolean isReady() { return super.isReady() && qualityScores != null && qualityScores.length == distortedPoints.size(); |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1071 |
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1126 |
final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix(); final var estimator = new DualAbsoluteQuadricInitialCamerasEstimator(fundamentalMatrix); estimator.setAspectRatio(configuration.getPairedCamerasAspectRatio()); estimator.estimate(); final var camera1 = estimator.getEstimatedLeftCamera(); final var camera2 = estimator.getEstimatedRightCamera(); camera1.decompose(); camera2.decompose(); final var intrinsicZeroPrincipalPoint1 = camera1.getIntrinsicParameters(); final var intrinsicZeroPrincipalPoint2 = camera2.getIntrinsicParameters(); final var principalPointX = configuration.getPrincipalPointX(); final var principalPointY = configuration.getPrincipalPointY(); final var intrinsic1 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint1); intrinsic1.setHorizontalPrincipalPoint(intrinsic1.getHorizontalPrincipalPoint() + principalPointX); intrinsic1.setVerticalPrincipalPoint(intrinsic1.getVerticalPrincipalPoint() + principalPointY); |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSlamPairedViewsSparseReconstructor.java | 213 |
com/irurueta/ar/sfm/BaseSlamTwoViewsSparseReconstructor.java | 164 |
final var calibrationData = configuration.getCalibrationData(); if (calibrationData != null) { slamEstimator.setCalibrationData(calibrationData); } } /** * Configures listener of SLAM estimator */ protected void setUpSlamEstimatorListener() { slamEstimator.setListener(new BaseSlamEstimator.BaseSlamEstimatorListener<>() { @Override public void onFullSampleReceived(final BaseSlamEstimator<D> estimator) { // not used } @Override public void onFullSampleProcessed(final BaseSlamEstimator<D> estimator) { notifySlamStateAndCamera(); } @Override public void onCorrectWithPositionMeasure(final BaseSlamEstimator<D> estimator) { // not used } @Override public void onCorrectedWithPositionMeasure(final BaseSlamEstimator<D> estimator) { notifySlamStateAndCamera(); } private void notifySlamStateAndCamera() { notifySlamStateIfNeeded(); notifySlamCameraIfNeeded(); } }); } |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1848 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 794 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 841 |
fundamentalMatrix.normalize(); final var estimator = new DualAbsoluteQuadricInitialCamerasEstimator(fundamentalMatrix); estimator.setAspectRatio(configuration.getInitialCamerasAspectRatio()); estimator.estimate(); final var camera1 = estimator.getEstimatedLeftCamera(); final var camera2 = estimator.getEstimatedRightCamera(); camera1.decompose(); camera2.decompose(); final var intrinsicZeroPrincipalPoint1 = camera1.getIntrinsicParameters(); final var intrinsicZeroPrincipalPoint2 = camera2.getIntrinsicParameters(); final var principalPointX = configuration.getPrincipalPointX(); final var principalPointY = configuration.getPrincipalPointY(); final var intrinsic1 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint1); intrinsic1.setHorizontalPrincipalPoint(intrinsic1.getHorizontalPrincipalPoint() + principalPointX); intrinsic1.setVerticalPrincipalPoint(intrinsic1.getVerticalPrincipalPoint() + principalPointY); |
File | Line |
---|---|
com/irurueta/ar/sfm/LMSEInhomogeneousSinglePoint3DTriangulator.java | 219 |
com/irurueta/ar/sfm/LMSEInhomogeneousSinglePoint3DTriangulator.java | 238 |
com/irurueta/ar/sfm/WeightedInhomogeneousSinglePoint3DTriangulator.java | 376 |
com/irurueta/ar/sfm/WeightedInhomogeneousSinglePoint3DTriangulator.java | 398 |
b[row] = homW * verticalAxisPlane.getD() - homX * principalPlane.getD(); // normalize equation to increase accuracy rowNorm = Math.sqrt(Math.pow(a.getElementAt(row, 0), 2.0) + Math.pow(a.getElementAt(row, 1), 2.0) + Math.pow(a.getElementAt(row, 2), 2.0)); a.setElementAt(row, 0, a.getElementAt(row, 0) / rowNorm); a.setElementAt(row, 1, a.getElementAt(row, 1) / rowNorm); a.setElementAt(row, 2, a.getElementAt(row, 2) / rowNorm); b[row] /= rowNorm; |
File | Line |
---|---|
com/irurueta/ar/slam/StatePredictor.java | 721 |
com/irurueta/ar/slam/StatePredictor.java | 1017 |
|| jacobianU.getColumns() != CONTROL_WITH_ROTATION_ADJUSTMENT_COMPONENTS)) { throw new IllegalArgumentException("jacobian wrt u must be 16x13"); } // position final var r = new InhomogeneousPoint3D(x[0], x[1], x[2]); // orientation var q = new Quaternion(x[3], x[4], x[5], x[6]); // linear velocity var vx = x[7]; var vy = x[8]; var vz = x[9]; // linear acceleration var ax = x[10]; var ay = x[11]; var az = x[12]; // angular velocity var wx = x[13]; var wy = x[14]; var wz = x[15]; // rotation change (control) final var dq = new Quaternion(u[0], u[1], u[2], u[3]); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 467 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 609 |
} final var decomposer = new SingularValueDecomposer(a); decomposer.decompose(); if (decomposer.getNullity() > 1) { // homographies constitute a degenerate camera movement. // A linear combination of possible IAC's exist (i.e. solution is // not unique up to scale) throw new ImageOfAbsoluteConicEstimatorException(); } final var v = decomposer.getV(); // use last column of V as IAC vector // the last column of V contains IAC matrix (B), which is symmetric // and positive definite, ordered as follows: B11, B12, B22, B13, // B23, B33 final var b11 = v.getElementAt(0, 4); final var b22 = v.getElementAt(1, 4); final var b13 = v.getElementAt(2, 4); final var b23 = v.getElementAt(3, 4); final var b33 = v.getElementAt(4, 4); // A conic is defined as [A B D] // [B C E] // [D E F] // Since skewness is zero b12 = B = 0.0 return new ImageOfAbsoluteConic(b11, 0.0, b22, b13, b23, b33); } catch (final AlgebraException e) { |
File | Line |
---|---|
com/irurueta/ar/slam/StatePredictor.java | 144 |
com/irurueta/ar/slam/StatePredictor.java | 425 |
|| jacobianU.getColumns() != CONTROL_COMPONENTS)) { // jacobian wrt u must be 16x9 throw new IllegalArgumentException(); } // position final var r = new InhomogeneousPoint3D(x[0], x[1], x[2]); // orientation var q = new Quaternion(x[3], x[4], x[5], x[6]); // linear velocity var vx = x[7]; var vy = x[8]; var vz = x[9]; // linear acceleration var ax = x[10]; var ay = x[11]; var az = x[12]; // angular velocity var wx = x[13]; var wy = x[14]; var wz = x[15]; // linear velocity change (control) final var uvx = u[0]; |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 815 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 1106 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12 + h21 * h22 / sqrAspectRatio); a.setElementAt(counter, 1, h11 * h32 + h31 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 991 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 960 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12 + h21 * h22 / sqrAspectRatio); a.setElementAt(counter, 1, h31 * h32); |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 853 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 594 |
currentEstimatedFundamentalMatrix.setRightSamples(rightSamples); return true; } catch (final Exception e) { return false; } } /** * Estimates fundamental matrix for provided matches, when 3D points lay in * a planar 3D scene. * * @param matches pairs of matches to find fundamental matrix. * @param viewId1 id of first view. * @param viewId2 id of second view. * @return true if estimation succeeded, false otherwise. */ private boolean estimatePlanarFundamentalMatrix(final List<MatchedSamples> matches, final int viewId1, final int viewId2) { if (matches == null) { return false; } final var count = matches.size(); final var leftSamples = new ArrayList<Sample2D>(); final var rightSamples = new ArrayList<Sample2D>(); final var leftPoints = new ArrayList<Point2D>(); final var rightPoints = new ArrayList<Point2D>(); final var qualityScores = new double[count]; final double principalPointX; final double principalPointY; if (configuration.getPairedCamerasEstimatorMethod() == InitialCamerasEstimatorMethod.DUAL_ABSOLUTE_QUADRIC |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationSlamEstimator.java | 621 |
com/irurueta/ar/slam/SlamEstimator.java | 563 |
stateOrientation.normalize(); // velocity stateVelocityX = x[7] = state.getElementAtIndex(7); stateVelocityY = x[8] = state.getElementAtIndex(8); stateVelocityZ = x[9] = state.getElementAtIndex(9); // linear acceleration stateAccelerationX = x[10] = state.getElementAtIndex(10); stateAccelerationY = x[11] = state.getElementAtIndex(11); stateAccelerationZ = x[12] = state.getElementAtIndex(12); // angular velocity stateAngularSpeedX = x[13] = state.getElementAtIndex(13); stateAngularSpeedY = x[14] = state.getElementAtIndex(14); stateAngularSpeedZ = x[15] = state.getElementAtIndex(15); } } |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 851 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 868 |
a.setElementAt(counter, 2, h31 * h32); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0)); factor = weight / rowNorm; a.setElementAt(counter, 0, a.getElementAt(counter, 0) * factor); a.setElementAt(counter, 1, a.getElementAt(counter, 1) * factor); a.setElementAt(counter, 2, a.getElementAt(counter, 2) * factor); counter++; |
File | Line |
---|---|
com/irurueta/ar/sfm/LMSEHomogeneousSinglePoint3DTriangulator.java | 197 |
com/irurueta/ar/sfm/LMSEInhomogeneousSinglePoint3DTriangulator.java | 198 |
com/irurueta/ar/sfm/WeightedInhomogeneousSinglePoint3DTriangulator.java | 351 |
camera.fixCameraSign(); final var homX = point.getHomX(); final var homY = point.getHomY(); final var homW = point.getHomW(); // pick rows of camera corresponding to different planes // (we do not normalize planes, as it would introduce errors) // 1st camera row (p1T) camera.verticalAxisPlane(verticalAxisPlane); // 2nd camera row (p2T) camera.horizontalAxisPlane(horizontalAxisPlane); // 3rd camera row (p3T) camera.principalPlane(principalPlane); // 1st equation a.setElementAt(row, 0, homX * principalPlane.getA() - homW * verticalAxisPlane.getA()); a.setElementAt(row, 1, homX * principalPlane.getB() - homW * verticalAxisPlane.getB()); a.setElementAt(row, 2, homX * principalPlane.getC() - homW * verticalAxisPlane.getC()); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 880 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 1024 |
counter++; } final var decomposer = new SingularValueDecomposer(a); decomposer.decompose(); if (decomposer.getNullity() > 1) { // homographies constitute a degenerate camera movement. // A linear combination of possible IAC's exist (i.e. solution is // not unique up to scale) throw new ImageOfAbsoluteConicEstimatorException(); } final var v = decomposer.getV(); // use last column of V as IAC vector // the last column of V contains IAC matrix (B), which is symmetric // and positive definite, ordered as follows: B11, B12, B22, B13, // B23, B33 final var b11 = v.getElementAt(0, 3); final var b13 = v.getElementAt(1, 3); final var b23 = v.getElementAt(2, 3); final var b33 = v.getElementAt(3, 3); final var b22 = b11 / sqrAspectRatio; // A conic is defined as [A B D] // [B C E] // [D E F] // Since skewness is zero b12 = B = 0.0 return new ImageOfAbsoluteConic(b11, 0.0, b22, b13, b23, b33); } catch (final AlgebraException e) { |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1216 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1940 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 931 |
metricReconstructedPoints = new ArrayList<>(); final var points = new ArrayList<Point2D>(); final var numPoints = correctedPoints1.size(); Point3D triangulatedPoint; ReconstructedPoint3D reconstructedPoint; for (var i = 0; i < numPoints; i++) { points.clear(); points.add(correctedPoints1.get(i)); points.add(correctedPoints2.get(i)); triangulator.setPointsAndCameras(points, cameras); triangulatedPoint = triangulator.triangulate(); reconstructedPoint = new ReconstructedPoint3D(); reconstructedPoint.setPoint(triangulatedPoint); // only points reconstructed in front of both cameras are // considered valid final var front1 = camera1.isPointInFrontOfCamera(triangulatedPoint); final var front2 = camera2.isPointInFrontOfCamera(triangulatedPoint); |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 374 |
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 877 |
throw new IllegalArgumentException(); } // position final var r = new InhomogeneousPoint3D(x[0], x[1], x[2]); // orientation var q = new Quaternion(x[3], x[4], x[5], x[6]); // linear velocity var vx = x[7]; var vy = x[8]; var vz = x[9]; // angular velocity var wx = x[10]; var wy = x[11]; var wz = x[12]; // position change (control) final var drx = u[0]; final var dry = u[1]; final var drz = u[2]; // linear velocity change (control) final var uvx = u[3]; |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 244 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 988 |
double rowNorm; for (final var homography : homographies) { // convert homography into projective so it can be normalized homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 393 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 988 |
double rowNorm; for (final var homography : homographies) { // convert homography into projective so it can be normalized homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 538 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 988 |
double rowNorm; for (final var homography : homographies) { // convert homography into projective so it can be normalized homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 678 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 988 |
double rowNorm; for (final var homography : homographies) { // convert homography into projective so it can be normalized homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); |
File | Line |
---|---|
com/irurueta/ar/epipolar/EpipolarDistanceFundamentalMatrixComparator.java | 628 |
com/irurueta/ar/epipolar/EpipolarDistanceFundamentalMatrixComparator.java | 713 |
final var xMaxY = -(maxY * l2real.getB() + l2real.getC()) / l2real.getA(); // if epipolar line does not intersect second image, we need // to repeat with a different sample repeat = (((yMinX < minY) && (yMaxX < minY)) || ((yMinX > maxY) && (yMaxX > maxY)) || ((xMinY < minX) && (xMaxY < minX)) || ((xMinY > maxX) && (xMaxY > maxX))); counter++; currentIter++; } if (counter >= nSamples) { continue; } // choose point lying on epipolar line l2real : // m2.getX() * l2real.getA() + m2.getY() * l2real.getB() + // l2real.getC() = 0 // choose random horizontal component within provided disparity: inhomX = m1.getInhomX() + randomizer.nextDouble(minHorizontalDisparity, maxHorizontalDisparity); if (Math.abs(l2real.getB()) > Double.MIN_VALUE) { inhomY = -(inhomX * l2real.getA() + l2real.getC()) / l2real.getB(); |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1817 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 810 |
final double principalPointY = configuration.getPrincipalPointY(); final var intrinsic1 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint1); intrinsic1.setHorizontalPrincipalPoint(intrinsic1.getHorizontalPrincipalPoint() + principalPointX); intrinsic1.setVerticalPrincipalPoint(intrinsic1.getVerticalPrincipalPoint() + principalPointY); final var intrinsic2 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint2); intrinsic2.setHorizontalPrincipalPoint(intrinsic2.getHorizontalPrincipalPoint() + principalPointX); intrinsic2.setVerticalPrincipalPoint(intrinsic2.getVerticalPrincipalPoint() + principalPointY); // fix fundamental matrix to account for principal point different // from zero fixFundamentalMatrix(fundamentalMatrix, intrinsicZeroPrincipalPoint1, intrinsicZeroPrincipalPoint2, intrinsic1, intrinsic2); return estimateInitialCamerasAndPointsEssential(intrinsic1, intrinsic2); } catch (final Exception e) { return false; } } /** * Estimates initial cameras and reconstructed points using the Dual * Absolute Quadric. * * @return true if cameras and points could be estimated, false if something * failed. */ private boolean estimateInitialCamerasAndPointsDAQ() { try { final var fundamentalMatrix = currentEstimatedFundamentalMatrix.getFundamentalMatrix(); |
File | Line |
---|---|
com/irurueta/ar/slam/PositionPredictor.java | 70 |
com/irurueta/ar/slam/PositionPredictor.java | 488 |
throw new IllegalArgumentException("jacobian wrt r must be 3x3"); } if (jacobianV != null && (jacobianV.getRows() != Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH || jacobianV.getColumns() != SPEED_COMPONENTS)) { throw new IllegalArgumentException( "jacobian wrt speed must be 3x3"); } if (jacobianA != null && (jacobianA.getRows() != Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH || jacobianA.getColumns() != ACCELERATION_COMPONENTS)) { throw new IllegalArgumentException( "jacobian wrt acceleration must be 3x3"); } if (jacobianR != null) { // set to the identity jacobianR.initialize(0.0); jacobianR.setElementAt(0, 0, 1.0); jacobianR.setElementAt(1, 1, 1.0); jacobianR.setElementAt(2, 2, 1.0); } if (jacobianV != null) { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 247 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 539 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 826 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); a.setElementAt(counter, 1, h11 * h22 + h21 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 396 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 389 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 685 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); a.setElementAt(counter, 1, h21 * h22); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 541 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 539 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 826 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); a.setElementAt(counter, 1, h11 * h22 + h21 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 681 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 389 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 685 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); a.setElementAt(counter, 1, h21 * h22); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 1022 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 1138 |
a.setElementAt(counter, 1, a.getElementAt(counter, 1) / rowNorm); counter++; // fill second equation a.setElementAt(counter, 0, Math.pow(h11, 2.0) - Math.pow(h12, 2.0) + (Math.pow(h21, 2.0) - Math.pow(h22, 2.0)) / sqrAspectRatio); a.setElementAt(counter, 1, Math.pow(h31, 2.0) - Math.pow(h32, 2.0)); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0)); |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 835 |
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1010 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1539 |
final var inliersData = estimator.getInliersData(); if (inliersData != null) { final var numInliers = inliersData.getNumInliers(); final var inliers = inliersData.getInliers(); final var length = inliers.length(); var fundamentalMatrixQualityScore = 0.0; for (i = 0; i < length; i++) { if (inliers.get(i)) { // inlier fundamentalMatrixQualityScore += qualityScores[i] / numInliers; } } currentEstimatedFundamentalMatrix.setQualityScore(fundamentalMatrixQualityScore); currentEstimatedFundamentalMatrix.setInliers(inliers); } // store left/right samples currentEstimatedFundamentalMatrix.setLeftSamples(leftSamples); currentEstimatedFundamentalMatrix.setRightSamples(rightSamples); return true; } catch (final Exception e) { return false; } } /** * Estimates fundamental matrix for provided matches, when 3D points lay in * a planar 3D scene. * * @param matches pairs of matches to find fundamental matrix. * @param viewId1 id of first view. * @param viewId2 id of second view. * @return true if estimation succeeded, false otherwise. */ private boolean estimatePlanarFundamentalMatrix(final List<MatchedSamples> matches, final int viewId1, |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 978 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 711 |
if (intrinsic1 == null && intrinsic2 == null) { // estimate homography final var homography = homographyEstimator.estimate(); // estimate intrinsic parameters using the Image of Absolute // Conic (IAC) final var homographies = new ArrayList<Transformation2D>(); homographies.add(homography); final var iacEstimator = new LMSEImageOfAbsoluteConicEstimator(homographies); final var iac = iacEstimator.estimate(); intrinsic1 = intrinsic2 = iac.getIntrinsicParameters(); } else if (intrinsic1 == null) { // && intrinsic2 != null intrinsic1 = intrinsic2; } else if (intrinsic2 == null) { // && intrinsic1 != null intrinsic2 = intrinsic1; } fundamentalMatrixEstimator.setLeftIntrinsics(intrinsic1); fundamentalMatrixEstimator.setRightIntrinsics(intrinsic2); fundamentalMatrixEstimator.estimateAndReconstruct(); final var fundamentalMatrix = fundamentalMatrixEstimator.getFundamentalMatrix(); |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 576 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 743 |
final var inliersData = estimator.getInliersData(); if (inliersData != null) { final var numInliers = inliersData.getNumInliers(); final var inliers = inliersData.getInliers(); final var length = inliers.length(); var fundamentalMatrixQualityScore = 0.0; for (i = 0; i < length; i++) { if (inliers.get(i)) { // inlier fundamentalMatrixQualityScore += qualityScores[i] / numInliers; } } estimatedFundamentalMatrix.setQualityScore(fundamentalMatrixQualityScore); estimatedFundamentalMatrix.setInliers(inliers); } // store left/right samples estimatedFundamentalMatrix.setLeftSamples(leftSamples); estimatedFundamentalMatrix.setRightSamples(rightSamples); return true; } catch (final Exception e) { return false; } } /** * Estimates fundamental matrix for provided matches, when 3D points lay in * a planar 3D scene. * * @param matches pairs of matches to find fundamental matrix. * @param viewId1 id of first view. * @param viewId2 id of second view. * @return true if estimation succeeded, false otherwise. */ private boolean estimatePlanarFundamentalMatrix( |
File | Line |
---|---|
com/irurueta/ar/slam/StatePredictor.java | 146 |
com/irurueta/ar/slam/StatePredictor.java | 722 |
com/irurueta/ar/slam/StatePredictor.java | 1018 |
throw new IllegalArgumentException(); } // position final var r = new InhomogeneousPoint3D(x[0], x[1], x[2]); // orientation var q = new Quaternion(x[3], x[4], x[5], x[6]); // linear velocity var vx = x[7]; var vy = x[8]; var vz = x[9]; // linear acceleration var ax = x[10]; var ay = x[11]; var az = x[12]; // angular velocity var wx = x[13]; var wy = x[14]; var wz = x[15]; // linear velocity change (control) final var uvx = u[0]; |
File | Line |
---|---|
com/irurueta/ar/slam/StatePredictor.java | 427 |
com/irurueta/ar/slam/StatePredictor.java | 722 |
throw new IllegalArgumentException(); } // position final var r = new InhomogeneousPoint3D(x[0], x[1], x[2]); // orientation var q = new Quaternion(x[3], x[4], x[5], x[6]); // linear velocity var vx = x[7]; var vy = x[8]; var vz = x[9]; // linear acceleration var ax = x[10]; var ay = x[11]; var az = x[12]; // angular velocity var wx = x[13]; var wy = x[14]; var wz = x[15]; // position change (control) final var drx = u[0]; |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 706 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 722 |
a.setElementAt(counter, 2, h31 * h32); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0)); a.setElementAt(counter, 0, a.getElementAt(counter, 0) / rowNorm); a.setElementAt(counter, 1, a.getElementAt(counter, 1) / rowNorm); a.setElementAt(counter, 2, a.getElementAt(counter, 2) / rowNorm); counter++; |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 835 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1733 |
final var inliersData = estimator.getInliersData(); if (inliersData != null) { final var numInliers = inliersData.getNumInliers(); final var inliers = inliersData.getInliers(); final var length = inliers.length(); var fundamentalMatrixQualityScore = 0.0; for (i = 0; i < length; i++) { if (inliers.get(i)) { // inlier fundamentalMatrixQualityScore += qualityScores[i] / numInliers; } } currentEstimatedFundamentalMatrix.setQualityScore(fundamentalMatrixQualityScore); currentEstimatedFundamentalMatrix.setInliers(inliers); } // store left/right samples currentEstimatedFundamentalMatrix.setLeftSamples(leftSamples); currentEstimatedFundamentalMatrix.setRightSamples(rightSamples); return true; } catch (final Exception e) { return false; } } /** * Estimates fundamental matrix for provided matches, when 3D points lay in * a planar 3D scene. * * @param matches pairs of matches to find fundamental matrix. * @param viewId1 id of first view. * @param viewId2 id of second view. * @return true if estimation succeeded, false otherwise. */ private boolean estimatePlanarFundamentalMatrix(final List<MatchedSamples> matches, final int viewId1, |
File | Line |
---|---|
com/irurueta/ar/epipolar/refiners/HomogeneousRightEpipoleRefiner.java | 190 |
com/irurueta/ar/epipolar/refiners/InhomogeneousRightEpipoleRefiner.java | 194 |
private final FundamentalMatrix fundMatrix = new FundamentalMatrix(); private final GradientEstimator gradientEstimator = new GradientEstimator(point -> { try { epipole.setCoordinates(point); computeFundamentalMatrix(homography, epipole, fundMatrix); return residual(fundMatrix, leftPoint, rightPoint); } catch (final InvalidFundamentalMatrixException e) { throw new EvaluationException(e); } }); @Override public int getNumberOfDimensions() { return nDims; } @Override public double[] createInitialParametersArray() { return initParams; } @Override public double evaluate(final int i, final double[] point, final double[] params, final double[] derivatives) throws EvaluationException { try { leftPoint.setHomogeneousCoordinates(point[0], point[1], point[2]); |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1172 |
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1265 |
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1375 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1896 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1988 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 2078 |
final var samples1 = currentEstimatedFundamentalMatrix.getLeftSamples(); final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples(); final var points1 = new ArrayList<Point2D>(); final var points2 = new ArrayList<Point2D>(); final var length = samples1.size(); for (var i = 0; i < length; i++) { final var sample1 = samples1.get(i); final var sample2 = samples2.get(i); final var point1 = sample1.getPoint(); final var point2 = sample2.getPoint(); points1.add(point1); points2.add(point2); } |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 887 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 973 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 1060 |
final var samples1 = estimatedFundamentalMatrix.getLeftSamples(); final var samples2 = estimatedFundamentalMatrix.getRightSamples(); final var points1 = new ArrayList<Point2D>(); final var points2 = new ArrayList<Point2D>(); final var length = samples1.size(); for (var i = 0; i < length; i++) { final var sample1 = samples1.get(i); final var sample2 = samples2.get(i); final var point1 = sample1.getPoint(); final var point2 = sample2.getPoint(); points1.add(point1); points2.add(point2); } |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 230 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 190 |
protected BasePairedViewsSparseReconstructor(final C configuration, final L listener) { if (configuration == null || listener == null) { throw new NullPointerException(); } this.configuration = configuration; this.listener = listener; } /** * Gets configuration for this re-constructor. * * @return configuration for this reconstructor. */ public C getConfiguration() { return configuration; } /** * Gets listener in charge of handling events such as when reconstruction * starts, ends, when certain data is needed or when estimation of data has * been computed. * * @return listener in charge of handling events. */ public L getListener() { return listener; } /** * Indicates whether reconstruction is running or not. * * @return true if reconstruction is running, false if reconstruction has * stopped for any reason. */ public boolean isRunning() { return running; } /** * Indicates whether reconstruction has been cancelled or not. * * @return true if reconstruction has been cancelled, false otherwise. */ public boolean isCancelled() { return cancelled; } /** * Indicates whether reconstruction has failed or not. * * @return true if reconstruction has failed, false otherwise. */ public boolean hasFailed() { return failed; } /** * Indicates whether the reconstruction has finished. * * @return true if reconstruction has finished, false otherwise. */ public boolean isFinished() { return finished; } /** * Gets counter of number of processed views. * * @return counter of number of processed views. */ public int getViewCount() { return viewCount; } /** * Gets estimated fundamental matrix for current view. * This fundamental matrix relates current view with the previously processed one. * * @return current estimated fundamental matrix. */ public EstimatedFundamentalMatrix getCurrentEstimatedFundamentalMatrix() { return currentEstimatedFundamentalMatrix; } /** * Gets estimated euclidean camera for current view (i.e. with actual scale). * * @return current estimated euclidean camera. */ public EstimatedCamera getCurrentEuclideanEstimatedCamera() { |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/PROMedSDualAbsoluteQuadricRobustEstimator.java | 388 |
com/irurueta/ar/calibration/estimators/PROSACDualAbsoluteQuadricRobustEstimator.java | 355 |
com/irurueta/ar/calibration/estimators/PROSACImageOfAbsoluteConicRobustEstimator.java | 369 |
com/irurueta/ar/calibration/estimators/PROSACRadialDistortionRobustEstimator.java | 473 |
com/irurueta/ar/sfm/PROMedSRobustSinglePoint3DTriangulator.java | 428 |
com/irurueta/ar/sfm/PROSACRobustSinglePoint3DTriangulator.java | 387 |
listener.onEstimateProgressChange(PROMedSDualAbsoluteQuadricRobustEstimator.this, progress); } } @Override public double[] getQualityScores() { return qualityScores; } }); try { locked = true; innerEstimator.setConfidence(confidence); innerEstimator.setMaxIterations(maxIterations); innerEstimator.setProgressDelta(progressDelta); return innerEstimator.estimate(); } catch (final com.irurueta.numerical.LockedException e) { throw new LockedException(e); } catch (final com.irurueta.numerical.NotReadyException e) { throw new NotReadyException(e); } finally { locked = false; } } /** * Returns method being used for robust estimation. * * @return method being used for robust estimation. */ @Override public RobustEstimatorMethod getMethod() { return RobustEstimatorMethod.PROMEDS; |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/PROMedSFundamentalMatrixRobustEstimator.java | 501 |
com/irurueta/ar/epipolar/estimators/RANSACFundamentalMatrixRobustEstimator.java | 309 |
} @Override public int getTotalSamples() { return leftPoints.size(); } @Override public int getSubsetSize() { return getMinRequiredPoints(); } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<FundamentalMatrix> solutions) { subsetLeftPoints.clear(); subsetRightPoints.clear(); for (final var samplesIndex : samplesIndices) { subsetLeftPoints.add(leftPoints.get(samplesIndex)); subsetRightPoints.add(rightPoints.get(samplesIndex)); } nonRobustEstimate(solutions, subsetLeftPoints, subsetRightPoints); } @Override public double computeResidual(final FundamentalMatrix currentEstimation, final int i) { |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/LMedSFundamentalMatrixRobustEstimator.java | 270 |
com/irurueta/ar/epipolar/estimators/RANSACFundamentalMatrixRobustEstimator.java | 311 |
@Override public int getTotalSamples() { return leftPoints.size(); } @Override public int getSubsetSize() { return getMinRequiredPoints(); } @Override public void estimatePreliminarSolutions( final int[] samplesIndices, final List<FundamentalMatrix> solutions) { subsetLeftPoints.clear(); subsetRightPoints.clear(); for (final var samplesIndex : samplesIndices) { subsetLeftPoints.add(leftPoints.get(samplesIndex)); subsetRightPoints.add(rightPoints.get(samplesIndex)); } nonRobustEstimate(solutions, subsetLeftPoints, subsetRightPoints); } @Override public double computeResidual(final FundamentalMatrix currentEstimation, final int i) { |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/MSACFundamentalMatrixRobustEstimator.java | 298 |
com/irurueta/ar/epipolar/estimators/PROMedSFundamentalMatrixRobustEstimator.java | 574 |
} }); try { locked = true; inliersData = null; innerEstimator.setConfidence(confidence); innerEstimator.setMaxIterations(maxIterations); innerEstimator.setProgressDelta(progressDelta); final var result = innerEstimator.estimate(); inliersData = innerEstimator.getInliersData(); return attemptRefine(result); } catch (final com.irurueta.numerical.LockedException e) { throw new LockedException(e); } catch (final com.irurueta.numerical.NotReadyException e) { throw new NotReadyException(e); } finally { locked = false; } } /** * Returns method being used for robust estimation. * * @return method being used for robust estimation. */ @Override public RobustEstimatorMethod getMethod() { return RobustEstimatorMethod.MSAC; |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1191 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 906 |
final List<Point2D> correctedPoints2; if (corrector != null) { corrector.setLeftAndRightPoints(points1, points2); corrector.correct(); correctedPoints1 = corrector.getLeftCorrectedPoints(); correctedPoints2 = corrector.getRightCorrectedPoints(); } else { correctedPoints1 = points1; correctedPoints2 = points2; } // triangulate points final SinglePoint3DTriangulator triangulator; if (configuration.getDaqUseHomogeneousPointTriangulator()) { triangulator = SinglePoint3DTriangulator.create(Point3DTriangulatorType.LMSE_HOMOGENEOUS_TRIANGULATOR); } else { triangulator = SinglePoint3DTriangulator.create( Point3DTriangulatorType.LMSE_INHOMOGENEOUS_TRIANGULATOR); } final var cameras = new ArrayList<PinholeCamera>(); cameras.add(camera1); cameras.add(camera2); |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1915 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 906 |
final List<Point2D> correctedPoints2; if (corrector != null) { corrector.setLeftAndRightPoints(points1, points2); corrector.correct(); correctedPoints1 = corrector.getLeftCorrectedPoints(); correctedPoints2 = corrector.getRightCorrectedPoints(); } else { correctedPoints1 = points1; correctedPoints2 = points2; } // triangulate points final SinglePoint3DTriangulator triangulator; if (configuration.getDaqUseHomogeneousPointTriangulator()) { triangulator = SinglePoint3DTriangulator.create(Point3DTriangulatorType.LMSE_HOMOGENEOUS_TRIANGULATOR); } else { triangulator = SinglePoint3DTriangulator.create( Point3DTriangulatorType.LMSE_INHOMOGENEOUS_TRIANGULATOR); } final var cameras = new ArrayList<PinholeCamera>(); cameras.add(camera1); cameras.add(camera2); |
File | Line |
---|---|
com/irurueta/ar/slam/StatePredictor.java | 183 |
com/irurueta/ar/slam/StatePredictor.java | 469 |
com/irurueta/ar/slam/StatePredictor.java | 762 |
com/irurueta/ar/slam/StatePredictor.java | 1063 |
final var uwz = u[8]; try { // update velocity final var vv = new Matrix(SPEED_COMPONENTS, SPEED_COMPONENTS); final var va = new Matrix(SPEED_COMPONENTS, ACCELERATION_COMPONENTS); final var v = VelocityPredictor.predict(vx, vy, vz, ax, ay, az, dt, vv, va); // update position Matrix rr = null; Matrix rv = null; Matrix ra = null; if (jacobianX != null) { rr = new Matrix(Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH, Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH); rv = new Matrix(Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH, SPEED_COMPONENTS); ra = new Matrix(Point3D.POINT3D_INHOMOGENEOUS_COORDINATES_LENGTH, ACCELERATION_COMPONENTS); } PositionPredictor.predict(r, vx, vy, vz, ax, ay, az, dt, r, rr, rv, ra); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/ImageOfAbsoluteConicEstimator.java | 273 |
com/irurueta/ar/calibration/estimators/KruppaDualImageOfAbsoluteConicEstimator.java | 222 |
} /** * Returns boolean indicating whether aspect ratio of focal distances (i.e. * vertical focal distance divided by horizontal focal distance) is known or * not. * Notice that focal distance aspect ratio is not related to image size * aspect ratio. Typically, LCD sensor cells are square and hence aspect * ratio of focal distances is known and equal to 1. * This value is only taken into account if skewness is assumed to be zero, * otherwise it is ignored. * * @return true if focal distance aspect ratio is known, false otherwise. */ public boolean isFocalDistanceAspectRatioKnown() { return focalDistanceAspectRatioKnown; } /** * Sets value indicating whether aspect ratio of focal distances (i.e. * vertical focal distance divided by horizontal focal distance) is known or * not. * Notice that focal distance aspect ratio is not related to image size * aspect ratio. Typically, LCD sensor cells are square and hence aspect * ratio of focal distances is known and equal to 1. * This value is only taken into account if skewness is assumed to be zero, * otherwise it is ignored. * * @param focalDistanceAspectRatioKnown true if focal distance aspect ratio * is known, false otherwise. * @throws LockedException if estimator is locked. */ public void setFocalDistanceAspectRatioKnown(final boolean focalDistanceAspectRatioKnown) throws LockedException { if (isLocked()) { throw new LockedException(); } this.focalDistanceAspectRatioKnown = focalDistanceAspectRatioKnown; } /** * Returns aspect ratio of focal distances (i.e. vertical focal distance * divided by horizontal focal distance). * This value is only taken into account if skewness is assumed to be zero * and focal distance aspect ratio is marked as known, otherwise it is * ignored. * By default, this is 1.0, since it is taken into account that typically * LCD sensor cells are square and hence aspect ratio focal distances is * known and equal to 1. * Notice that focal distance aspect ratio is not related to image size * aspect ratio * Notice that a negative aspect ratio indicates that vertical axis is * reversed. This can be useful in some situations where image vertical * coordinates are reversed respect to the physical world (i.e. in computer * graphics typically image vertical coordinates go downwards, while in * physical world they go upwards). * * @return aspect ratio of focal distances. */ public double getFocalDistanceAspectRatio() { return focalDistanceAspectRatio; } /** * Sets aspect ratio of focal distances (i.e. vertical focal distance * divided by horizontal focal distance). * This value is only taken into account if skewness is assumed to be zero * and focal distance aspect ratio is marked as known, otherwise it is * ignored. * By default, this is 1.0, since it is taken into account that typically * LCD sensor cells are square and hence aspect ratio focal distances is * known and equal to 1. * Notice that focal distance aspect ratio is not related to image size * aspect ratio. * Notice that a negative aspect ratio indicates that vertical axis is * reversed. This can be useful in some situations where image vertical * coordinates are reversed respect to the physical world (i.e. in computer * graphics typically image vertical coordinates go downwards, while in * physical world they go upwards). * * @param focalDistanceAspectRatio aspect ratio of focal distances to be * set. * @throws LockedException if estimator is locked. * @throws IllegalArgumentException if focal distance aspect ratio is too * close to zero, as it might produce numerical instabilities. */ public void setFocalDistanceAspectRatio(final double focalDistanceAspectRatio) throws LockedException { if (isLocked()) { throw new LockedException(); } if (Math.abs(focalDistanceAspectRatio) < MIN_ABS_FOCAL_DISTANCE_ASPECT_RATIO) { throw new IllegalArgumentException(); } this.focalDistanceAspectRatio = focalDistanceAspectRatio; } /** * Indicates whether this instance is locked. * * @return true if this estimator is busy estimating an IAC, false * otherwise. */ public boolean isLocked() { return locked; } /** * Returns listener to be notified of events such as when estimation starts, * ends or estimation progress changes. * * @return listener to be notified of events. */ public ImageOfAbsoluteConicEstimatorListener getListener() { |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSlamPairedViewsSparseReconstructor.java | 322 |
com/irurueta/ar/sfm/PairedViewsSparseReconstructor.java | 137 |
euclideanReconstructedPoint = new ReconstructedPoint3D(); euclideanReconstructedPoint.setPoint(euclideanPoint); euclideanReconstructedPoint.setInlier(metricReconstructedPoint.isInlier()); euclideanReconstructedPoint.setId(metricReconstructedPoint.getId()); euclideanReconstructedPoint.setColorData(metricReconstructedPoint.getColorData()); if (metricReconstructedPoint.getCovariance() != null) { euclideanReconstructedPoint.setCovariance(metricReconstructedPoint.getCovariance() .multiplyByScalarAndReturnNew(sqrScale)); } euclideanReconstructedPoint.setQualityScore(metricReconstructedPoint.getQualityScore()); euclideanReconstructedPoints.add(euclideanReconstructedPoint); } } catch (final AlgebraException e) { return false; } return super.transformPairOfCamerasAndPoints(isInitialPairOfViews, hasAbsoluteOrientation); } |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSlamPairedViewsSparseReconstructor.java | 470 |
com/irurueta/ar/sfm/BaseSlamSparseReconstructor.java | 418 |
final var positionZ = lastPosZ + slamEstimator.getStatePositionZ(); slamPosition.setInhomogeneousCoordinates(positionX, positionY, positionZ); final var quaternionA = slamEstimator.getStateQuaternionA(); final var quaternionB = slamEstimator.getStateQuaternionB(); final var quaternionC = slamEstimator.getStateQuaternionC(); final var quaternionD = slamEstimator.getStateQuaternionD(); slamRotation.setA(quaternionA); slamRotation.setB(quaternionB); slamRotation.setC(quaternionC); slamRotation.setD(quaternionD); slamCamera.setIntrinsicAndExtrinsicParameters(intrinsicParameters, slamRotation, slamPosition); //noinspection unchecked listener.onSlamCameraEstimated((R) this, slamCamera); } catch (final GeometryException ignore) { // do nothing } } } |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 247 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 960 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 1106 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 396 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 960 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 1106 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 541 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 960 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 1106 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 681 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 960 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 1106 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 815 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 389 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 539 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 685 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 826 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12 + h21 * h22 / sqrAspectRatio); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 991 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 389 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 539 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 685 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 826 |
homography.asMatrix(h); if (t == null) { t = new ProjectiveTransformation2D(h); } else { t.setT(h); } // normalize t.normalize(); // obtain elements of projective transformation matrix // there is no need to retrieve internal matrix h, as we already // hold a reference h11 = h.getElementAt(0, 0); h12 = h.getElementAt(0, 1); h21 = h.getElementAt(1, 0); h22 = h.getElementAt(1, 1); h31 = h.getElementAt(2, 0); h32 = h.getElementAt(2, 1); // fill first equation a.setElementAt(counter, 0, h11 * h12 + h21 * h22 / sqrAspectRatio); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMedSDualAbsoluteQuadricRobustEstimator.java | 265 |
com/irurueta/ar/calibration/estimators/LMedSImageOfAbsoluteConicRobustEstimator.java | 272 |
com/irurueta/ar/calibration/estimators/LMedSRadialDistortionRobustEstimator.java | 347 |
com/irurueta/ar/sfm/LMedSRobustSinglePoint3DTriangulator.java | 304 |
listener.onEstimateProgressChange(LMedSDualAbsoluteQuadricRobustEstimator.this, progress); } } }); try { locked = true; innerEstimator.setConfidence(confidence); innerEstimator.setMaxIterations(maxIterations); innerEstimator.setProgressDelta(progressDelta); innerEstimator.setStopThreshold(stopThreshold); return innerEstimator.estimate(); } catch (final com.irurueta.numerical.LockedException e) { throw new LockedException(e); } catch (final com.irurueta.numerical.NotReadyException e) { throw new NotReadyException(e); } finally { locked = false; } } /** * Returns method being used for robust estimation. * * @return method being used for robust estimation. */ @Override public RobustEstimatorMethod getMethod() { return RobustEstimatorMethod.LMEDS; } } |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1266 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 1061 |
final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples(); final var points1 = new ArrayList<Point2D>(); final var points2 = new ArrayList<Point2D>(); final var length = samples1.size(); for (var i = 0; i < length; i++) { final var sample1 = samples1.get(i); final var sample2 = samples2.get(i); final var point1 = sample1.getPoint(); final var point2 = sample2.getPoint(); points1.add(point1); points2.add(point2); } try { final var estimator = new DualImageOfAbsoluteConicInitialCamerasEstimator(fundamentalMatrix, points1, |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1376 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 974 |
final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples(); final var points1 = new ArrayList<Point2D>(); final var points2 = new ArrayList<Point2D>(); final var length = samples1.size(); for (var i = 0; i < length; i++) { final var sample1 = samples1.get(i); final var sample2 = samples2.get(i); final var point1 = sample1.getPoint(); final var point2 = sample2.getPoint(); points1.add(point1); points2.add(point2); } try { final var estimator = new EssentialMatrixInitialCamerasEstimator(fundamentalMatrix, intrinsic1, intrinsic2, |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1989 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 1061 |
final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples(); final var points1 = new ArrayList<Point2D>(); final var points2 = new ArrayList<Point2D>(); final var length = samples1.size(); for (var i = 0; i < length; i++) { final var sample1 = samples1.get(i); final var sample2 = samples2.get(i); final var point1 = sample1.getPoint(); final var point2 = sample2.getPoint(); points1.add(point1); points2.add(point2); } try { final var estimator = new DualImageOfAbsoluteConicInitialCamerasEstimator(fundamentalMatrix, points1, |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 2079 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 974 |
final var samples2 = currentEstimatedFundamentalMatrix.getRightSamples(); final var points1 = new ArrayList<Point2D>(); final var points2 = new ArrayList<Point2D>(); final var length = samples1.size(); for (var i = 0; i < length; i++) { final var sample1 = samples1.get(i); final var sample2 = samples2.get(i); final var point1 = sample1.getPoint(); final var point2 = sample2.getPoint(); points1.add(point1); points2.add(point2); } try { final var estimator = new EssentialMatrixInitialCamerasEstimator(fundamentalMatrix, intrinsic1, intrinsic2, |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 425 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 573 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 596 |
+ Math.pow(a.getElementAt(counter, 5), 2.0)); factor = weight / rowNorm; a.setElementAt(counter, 0, a.getElementAt(counter, 0) * factor); a.setElementAt(counter, 1, a.getElementAt(counter, 1) * factor); a.setElementAt(counter, 2, a.getElementAt(counter, 2) * factor); a.setElementAt(counter, 3, a.getElementAt(counter, 3) * factor); a.setElementAt(counter, 4, a.getElementAt(counter, 4) * factor); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 451 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 573 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 596 |
+ Math.pow(a.getElementAt(counter, 5), 2.0)); factor = weight / rowNorm; a.setElementAt(counter, 0, a.getElementAt(counter, 0) * factor); a.setElementAt(counter, 1, a.getElementAt(counter, 1) * factor); a.setElementAt(counter, 2, a.getElementAt(counter, 2) * factor); a.setElementAt(counter, 3, a.getElementAt(counter, 3) * factor); a.setElementAt(counter, 4, a.getElementAt(counter, 4) * factor); |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1074 |
com/irurueta/ar/sfm/BaseSparseReconstructor.java | 1851 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 844 |
estimator.setAspectRatio(configuration.getPairedCamerasAspectRatio()); estimator.estimate(); final var camera1 = estimator.getEstimatedLeftCamera(); final var camera2 = estimator.getEstimatedRightCamera(); camera1.decompose(); camera2.decompose(); final var intrinsicZeroPrincipalPoint1 = camera1.getIntrinsicParameters(); final var intrinsicZeroPrincipalPoint2 = camera2.getIntrinsicParameters(); final var principalPointX = configuration.getPrincipalPointX(); final var principalPointY = configuration.getPrincipalPointY(); final var intrinsic1 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint1); intrinsic1.setHorizontalPrincipalPoint(intrinsic1.getHorizontalPrincipalPoint() + principalPointX); intrinsic1.setVerticalPrincipalPoint(intrinsic1.getVerticalPrincipalPoint() + principalPointY); |
File | Line |
---|---|
com/irurueta/ar/sfm/BasePairedViewsSparseReconstructor.java | 1129 |
com/irurueta/ar/sfm/BaseTwoViewsSparseReconstructor.java | 797 |
estimator.setAspectRatio(configuration.getPairedCamerasAspectRatio()); estimator.estimate(); final var camera1 = estimator.getEstimatedLeftCamera(); final var camera2 = estimator.getEstimatedRightCamera(); camera1.decompose(); camera2.decompose(); final var intrinsicZeroPrincipalPoint1 = camera1.getIntrinsicParameters(); final var intrinsicZeroPrincipalPoint2 = camera2.getIntrinsicParameters(); final var principalPointX = configuration.getPrincipalPointX(); final var principalPointY = configuration.getPrincipalPointY(); final var intrinsic1 = new PinholeCameraIntrinsicParameters(intrinsicZeroPrincipalPoint1); intrinsic1.setHorizontalPrincipalPoint(intrinsic1.getHorizontalPrincipalPoint() + principalPointX); intrinsic1.setVerticalPrincipalPoint(intrinsic1.getVerticalPrincipalPoint() + principalPointY); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 306 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 445 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 443 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 589 |
a.setElementAt(counter, 5, Math.pow(h31, 2.0) - Math.pow(h32, 2.0)); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0) + Math.pow(a.getElementAt(counter, 4), 2.0) |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 275 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 417 |
com/irurueta/ar/calibration/estimators/WeightedImageOfAbsoluteConicEstimator.java | 443 |
a.setElementAt(counter, 5, h31 * h32); // normalize row rowNorm = Math.sqrt(Math.pow(a.getElementAt(counter, 0), 2.0) + Math.pow(a.getElementAt(counter, 1), 2.0) + Math.pow(a.getElementAt(counter, 2), 2.0) + Math.pow(a.getElementAt(counter, 3), 2.0) + Math.pow(a.getElementAt(counter, 4), 2.0) + Math.pow(a.getElementAt(counter, 5), 2.0)); |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/FundamentalMatrixRobustEstimator.java | 728 |
com/irurueta/ar/epipolar/refiners/RightEpipoleRefiner.java | 240 |
protected double residual( final FundamentalMatrix fundamentalMatrix, final Point2D leftPoint, final Point2D rightPoint) { try { leftPoint.normalize(); rightPoint.normalize(); fundamentalMatrix.normalize(); fundamentalMatrix.leftEpipolarLine(rightPoint, testLine); final var leftDistance = Math.abs(testLine.signedDistance(leftPoint)); fundamentalMatrix.rightEpipolarLine(leftPoint, testLine); final var rightDistance = Math.abs(testLine.signedDistance(rightPoint)); // return average distance as an error residual return 0.5 * (leftDistance + rightDistance); } catch (final NotReadyException e) { return Double.MAX_VALUE; } } /** * Estimates a fundamental matrix using a non-robust method and provided * subset of matched points and stores the solution in provided array of * solutions. * * @param solutions list where solutions will be stored. * @param subsetLeftPoints subset of left view matched points. * @param subsetRightPoints subset of right view matched points. */ protected void nonRobustEstimate( |
File | Line |
---|---|
com/irurueta/ar/sfm/LMSEHomogeneousSinglePoint3DTriangulator.java | 251 |
com/irurueta/ar/sfm/WeightedHomogeneousSinglePoint3DTriangulator.java | 407 |
} } // make SVD to find solution of A * M = 0 final var decomposer = new SingularValueDecomposer(a); decomposer.decompose(); if (decomposer.getNullity() > 1) { // degenerate case. Unique solution (up to scale) cannot be found throw new Point3DTriangulationException(); } final var v = decomposer.getV(); // last column of v will contain homogeneous coordinates of // triangulated point result.setHomogeneousCoordinates(v.getElementAt(0, 3), v.getElementAt(1, 3), v.getElementAt(2, 3), v.getElementAt(3, 3)); if (listener != null) { listener.onTriangulateEnd(this); } } catch (final AlgebraException | CameraException e) { |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationConstantVelocityModelSlamEstimator.java | 562 |
com/irurueta/ar/slam/ConstantVelocityModelSlamEstimator.java | 510 |
com/irurueta/ar/slam/SlamEstimator.java | 544 |
x[12] = state.getElementAtIndex(12); } /** * Updates state data of the device by using state matrix obtained from * Kalman filter after correction. * * @param state state matrix obtained from Kalman filter. */ private void updateCorrectedState(final Matrix state) { // position statePositionX = x[0] = state.getElementAtIndex(0); statePositionY = x[1] = state.getElementAtIndex(1); statePositionZ = x[2] = state.getElementAtIndex(2); // quaternion stateQuaternionA = x[3] = state.getElementAtIndex(3); stateQuaternionB = x[4] = state.getElementAtIndex(4); stateQuaternionC = x[5] = state.getElementAtIndex(5); stateQuaternionD = x[6] = state.getElementAtIndex(6); |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 194 |
com/irurueta/ar/slam/StatePredictor.java | 226 |
com/irurueta/ar/slam/StatePredictor.java | 513 |
com/irurueta/ar/slam/StatePredictor.java | 810 |
com/irurueta/ar/slam/StatePredictor.java | 1111 |
wx += uwx; wy += uwy; wz += uwz; // set new state result[0] = r.getInhomX(); result[1] = r.getInhomY(); result[2] = r.getInhomZ(); result[3] = q.getA(); result[4] = q.getB(); result[5] = q.getC(); result[6] = q.getD(); result[7] = vx; result[8] = vy; result[9] = vz; result[10] = wx; |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 437 |
com/irurueta/ar/slam/StatePredictor.java | 226 |
com/irurueta/ar/slam/StatePredictor.java | 513 |
com/irurueta/ar/slam/StatePredictor.java | 810 |
com/irurueta/ar/slam/StatePredictor.java | 1111 |
wx += uwx; wy += uwy; wz += uwz; // set new state result[0] = r.getInhomX(); result[1] = r.getInhomY(); result[2] = r.getInhomZ(); result[3] = q.getA(); result[4] = q.getB(); result[5] = q.getC(); result[6] = q.getD(); result[7] = vx; result[8] = vy; result[9] = vz; result[10] = wx; |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 686 |
com/irurueta/ar/slam/StatePredictor.java | 226 |
com/irurueta/ar/slam/StatePredictor.java | 513 |
com/irurueta/ar/slam/StatePredictor.java | 810 |
com/irurueta/ar/slam/StatePredictor.java | 1111 |
wx += uwx; wy += uwy; wz += uwz; // set new state result[0] = r.getInhomX(); result[1] = r.getInhomY(); result[2] = r.getInhomZ(); result[3] = q.getA(); result[4] = q.getB(); result[5] = q.getC(); result[6] = q.getD(); result[7] = vx; result[8] = vy; result[9] = vz; result[10] = wx; |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 944 |
com/irurueta/ar/slam/StatePredictor.java | 226 |
com/irurueta/ar/slam/StatePredictor.java | 513 |
com/irurueta/ar/slam/StatePredictor.java | 810 |
com/irurueta/ar/slam/StatePredictor.java | 1111 |
wx += uwx; wy += uwy; wz += uwz; // set new state result[0] = r.getInhomX(); result[1] = r.getInhomY(); result[2] = r.getInhomZ(); result[3] = q.getA(); result[4] = q.getB(); result[5] = q.getC(); result[6] = q.getD(); result[7] = vx; result[8] = vy; result[9] = vz; result[10] = wx; |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/DualAbsoluteQuadricRobustEstimator.java | 901 |
com/irurueta/ar/calibration/estimators/DualAbsoluteQuadricRobustEstimator.java | 936 |
final double p13, final double p33, final double p14, final double p34) { final var a = daq.getA(); final var b = daq.getB(); final var c = daq.getC(); final var d = daq.getD(); final var e = daq.getE(); final var f = daq.getF(); final var g = daq.getG(); final var h = daq.getH(); final var i = daq.getI(); final var j = daq.getJ(); return a * p31 * p11 + b * p32 * p12 + c * p33 * p13 + d * (p32 * p11 + p32 * p12) |
File | Line |
---|---|
com/irurueta/ar/epipolar/estimators/FundamentalMatrixRobustEstimator.java | 728 |
com/irurueta/ar/epipolar/refiners/FundamentalMatrixRefiner.java | 338 |
com/irurueta/ar/epipolar/refiners/RightEpipoleRefiner.java | 240 |
protected double residual( final FundamentalMatrix fundamentalMatrix, final Point2D leftPoint, final Point2D rightPoint) { try { leftPoint.normalize(); rightPoint.normalize(); fundamentalMatrix.normalize(); fundamentalMatrix.leftEpipolarLine(rightPoint, testLine); final var leftDistance = Math.abs(testLine.signedDistance(leftPoint)); fundamentalMatrix.rightEpipolarLine(leftPoint, testLine); final var rightDistance = Math.abs(testLine.signedDistance(rightPoint)); // return average distance as an error residual return 0.5 * (leftDistance + rightDistance); } catch (final NotReadyException e) { return Double.MAX_VALUE; } } |
File | Line |
---|---|
com/irurueta/ar/slam/AbsoluteOrientationConstantVelocityModelSlamEstimator.java | 301 |
com/irurueta/ar/slam/ConstantVelocityModelSlamEstimator.java | 278 |
final long timestamp = getMostRecentTimestampNanos(); if (lastTimestampNanos < 0) { // first time receiving control data, we just set linear acceleration // and angular speed into system state stateAccelerationX = accumulatedAccelerationSampleX; stateAccelerationY = accumulatedAccelerationSampleY; stateAccelerationZ = accumulatedAccelerationSampleZ; lastAngularSpeedX = stateAngularSpeedX = x[10] = accumulatedAngularSpeedSampleX; lastAngularSpeedY = stateAngularSpeedY = x[11] = accumulatedAngularSpeedSampleY; lastAngularSpeedZ = stateAngularSpeedZ = x[12] = accumulatedAngularSpeedSampleZ; try { kalmanFilter.getStatePre().fromArray(x); kalmanFilter.getStatePost().fromArray(x); } catch (final WrongSizeException ignore) { // never thrown } lastTimestampNanos = timestamp; if (listener != null) { listener.onFullSampleProcessed(this); } return; } |
File | Line |
---|---|
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 138 |
com/irurueta/ar/slam/ConstantVelocityModelStatePredictor.java | 372 |
|| jacobianU.getColumns() != CONTROL_COMPONENTS)) { // jacobian wrt u must be 13x6 throw new IllegalArgumentException(); } // position final var r = new InhomogeneousPoint3D(x[0], x[1], x[2]); // orientation var q = new Quaternion(x[3], x[4], x[5], x[6]); // linear velocity var vx = x[7]; var vy = x[8]; var vz = x[9]; // angular velocity var wx = x[10]; var wy = x[11]; var wz = x[12]; // linear velocity change (control) final var uvx = u[0]; |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 283 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 430 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 452 |
+ Math.pow(a.getElementAt(counter, 5), 2.0)); a.setElementAt(counter, 0, a.getElementAt(counter, 0) / rowNorm); a.setElementAt(counter, 1, a.getElementAt(counter, 1) / rowNorm); a.setElementAt(counter, 2, a.getElementAt(counter, 2) / rowNorm); a.setElementAt(counter, 3, a.getElementAt(counter, 3) / rowNorm); a.setElementAt(counter, 4, a.getElementAt(counter, 4) / rowNorm); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 314 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 430 |
com/irurueta/ar/calibration/estimators/LMSEImageOfAbsoluteConicEstimator.java | 452 |
+ Math.pow(a.getElementAt(counter, 5), 2.0)); a.setElementAt(counter, 0, a.getElementAt(counter, 0) / rowNorm); a.setElementAt(counter, 1, a.getElementAt(counter, 1) / rowNorm); a.setElementAt(counter, 2, a.getElementAt(counter, 2) / rowNorm); a.setElementAt(counter, 3, a.getElementAt(counter, 3) / rowNorm); a.setElementAt(counter, 4, a.getElementAt(counter, 4) / rowNorm); |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/MSACDualAbsoluteQuadricRobustEstimator.java | 236 |
com/irurueta/ar/calibration/estimators/MSACImageOfAbsoluteConicRobustEstimator.java | 245 |
com/irurueta/ar/calibration/estimators/MSACRadialDistortionRobustEstimator.java | 312 |
com/irurueta/ar/sfm/MSACRobustSinglePoint3DTriangulator.java | 268 |
listener.onEstimateProgressChange(MSACDualAbsoluteQuadricRobustEstimator.this, progress); } } }); try { locked = true; innerEstimator.setConfidence(confidence); innerEstimator.setMaxIterations(maxIterations); innerEstimator.setProgressDelta(progressDelta); return innerEstimator.estimate(); } catch (final com.irurueta.numerical.LockedException e) { throw new LockedException(e); } catch (final com.irurueta.numerical.NotReadyException e) { throw new NotReadyException(e); } finally { locked = false; } } /** * Returns method being used for robust estimation. * * @return method being used for robust estimation. */ @Override public RobustEstimatorMethod getMethod() { return RobustEstimatorMethod.MSAC; } } |
File | Line |
---|---|
com/irurueta/ar/calibration/estimators/RANSACDualAbsoluteQuadricRobustEstimator.java | 237 |
com/irurueta/ar/calibration/estimators/RANSACImageOfAbsoluteConicRobustEstimator.java | 247 |
com/irurueta/ar/calibration/estimators/RANSACRadialDistortionRobustEstimator.java | 312 |
com/irurueta/ar/sfm/RANSACRobustSinglePoint3DTriangulator.java | 270 |
RANSACDualAbsoluteQuadricRobustEstimator.this, progress); } } }); try { locked = true; innerEstimator.setConfidence(confidence); innerEstimator.setMaxIterations(maxIterations); innerEstimator.setProgressDelta(progressDelta); return innerEstimator.estimate(); } catch (final com.irurueta.numerical.LockedException e) { throw new LockedException(e); } catch (final com.irurueta.numerical.NotReadyException e) { throw new NotReadyException(e); } finally { locked = false; } } /** * Returns method being used for robust estimation. * * @return method being used for robust estimation. */ @Override public RobustEstimatorMethod getMethod() { return RobustEstimatorMethod.RANSAC; } } |
File | Line |
---|---|
com/irurueta/ar/sfm/BaseAbsoluteOrientationSlamTwoViewsSparseReconstructor.java | 93 |
com/irurueta/ar/sfm/BaseSlamTwoViewsSparseReconstructor.java | 207 |
protected boolean updateScaleAndOrientation() { // obtain baseline (camera separation from slam estimator data final var posX = slamEstimator.getStatePositionX(); final var posY = slamEstimator.getStatePositionY(); final var posZ = slamEstimator.getStatePositionZ(); // to estimate baseline, we assume that first camera is placed at // world origin final var baseline = Math.sqrt(posX * posX + posY * posY + posZ * posZ); try { final var camera1 = estimatedCamera1.getCamera(); final var camera2 = estimatedCamera2.getCamera(); camera1.decompose(); camera2.decompose(); final var center1 = camera1.getCameraCenter(); final var center2 = camera2.getCameraCenter(); // R1' = R1*Rdiff // Rdiff = R1^T*R1' // where R1' is the desired orientation (obtained by sampling a // sensor) // and R1 is always the identity for the 1st camera. // Hence R1' = Rdiff // t1' is the desired translation which is zero for the 1st // camera. // We want: P1' = K*[R1' t1'] = K*[R1' 0] // And we have P1 = K[I 0] // We need a transformation T so that: // P1' = P1*T^-1 = K[I 0][R1' 0] // [0 1] // Hence: T^-1 = [R1' 0] // [0 1] // or T = [R1'^T 0] // [0 1] // because we are also applying a transformation of scale s, // the combination of both transformations is // T = [s*R1'^T 0] // [0 1] final var r = firstOrientation.inverseRotationAndReturnNew(); |
File | Line |
---|---|
com/irurueta/ar/sfm/WeightedHomogeneousSinglePoint3DTriangulator.java | 425 |
com/irurueta/ar/sfm/WeightedInhomogeneousSinglePoint3DTriangulator.java | 421 |
v.getElementAt(3, 3)); if (listener != null) { listener.onTriangulateEnd(this); } } catch (final AlgebraException | SortingException e) { throw new Point3DTriangulationException(e); } finally { locked = false; } } /** * Internal method to set list of matched 2D points for each view and their * corresponding cameras used to project them along with their weights. * This method does not check whether instance is locked. * * @param points2D list of matched 2D points on each view. Each point in the * list is assumed to be projected by the corresponding camera in the list. * @param cameras cameras for each view where 2D points are represented. * @param weights weights assigned to each view. * @throws IllegalArgumentException if provided lists don't have the same * length or their length is less than 2 views, which is the minimum * required to compute triangulation. */ private void internalSetPointsCamerasAndWeights( final List<Point2D> points2D, final List<PinholeCamera> cameras, final double[] weights) { if (!areValidPointsCamerasAndWeights(points2D, cameras, weights)) { throw new IllegalArgumentException(); } this.points2D = points2D; this.cameras = cameras; this.weights = weights; } } |