ACloudViewer  3.9.4
A Modern Library for 3D Data Processing
two_view_geometry.cc
Go to the documentation of this file.
1 // Copyright (c) 2018, ETH Zurich and UNC Chapel Hill.
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 // * Redistributions of source code must retain the above copyright
8 // notice, this list of conditions and the following disclaimer.
9 //
10 // * Redistributions in binary form must reproduce the above copyright
11 // notice, this list of conditions and the following disclaimer in the
12 // documentation and/or other materials provided with the distribution.
13 //
14 // * Neither the name of ETH Zurich and UNC Chapel Hill nor the names of
15 // its contributors may be used to endorse or promote products derived
16 // from this software without specific prior written permission.
17 //
18 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
22 // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 // POSSIBILITY OF SUCH DAMAGE.
29 //
30 // Author: Johannes L. Schoenberger (jsch-at-demuc-dot-de)
31 
33 
34 #include <unordered_set>
35 
36 #include "base/camera.h"
37 #include "base/essential_matrix.h"
38 #include "base/homography_matrix.h"
39 #include "base/pose.h"
40 #include "base/projection.h"
41 #include "base/triangulation.h"
46 #include "optim/loransac.h"
47 #include "optim/ransac.h"
48 #include "util/random.h"
49 
50 namespace colmap {
51 namespace {
52 
53 FeatureMatches ExtractInlierMatches(const FeatureMatches& matches,
54  const size_t num_inliers,
55  const std::vector<char>& inlier_mask) {
56  FeatureMatches inlier_matches(num_inliers);
57  size_t j = 0;
58  for (size_t i = 0; i < matches.size(); ++i) {
59  if (inlier_mask[i]) {
60  inlier_matches[j] = matches[i];
61  j += 1;
62  }
63  }
64  return inlier_matches;
65 }
66 
67 FeatureMatches ExtractOutlierMatches(const FeatureMatches& matches,
68  const FeatureMatches& inlier_matches) {
69  CHECK_GE(matches.size(), inlier_matches.size());
70 
71  std::unordered_set<std::pair<point2D_t, point2D_t>> inlier_matches_set;
72  inlier_matches_set.reserve(inlier_matches.size());
73  for (const auto& match : inlier_matches) {
74  inlier_matches_set.emplace(match.point2D_idx1, match.point2D_idx2);
75  }
76 
77  FeatureMatches outlier_matches;
78  outlier_matches.reserve(matches.size() - inlier_matches.size());
79 
80  for (const auto& match : matches) {
81  if (inlier_matches_set.count(
82  std::make_pair(match.point2D_idx1, match.point2D_idx2)) == 0) {
83  outlier_matches.push_back(match);
84  }
85  }
86 
87  return outlier_matches;
88 }
89 
90 inline bool IsImagePointInBoundingBox(const Eigen::Vector2d& point,
91  const double minx, const double maxx,
92  const double miny, const double maxy) {
93  return point.x() >= minx && point.x() <= maxx && point.y() >= miny &&
94  point.y() <= maxy;
95 }
96 
97 } // namespace
98 
100  F.transposeInPlace();
101  E.transposeInPlace();
102  H = H.inverse().eval();
103 
104  const Eigen::Vector4d orig_qvec = qvec;
105  const Eigen::Vector3d orig_tvec = tvec;
106  InvertPose(orig_qvec, orig_tvec, &qvec, &tvec);
107 
108  for (auto& match : inlier_matches) {
109  std::swap(match.point2D_idx1, match.point2D_idx2);
110  }
111 }
112 
113 void TwoViewGeometry::Estimate(const Camera& camera1,
114  const std::vector<Eigen::Vector2d>& points1,
115  const Camera& camera2,
116  const std::vector<Eigen::Vector2d>& points2,
117  const FeatureMatches& matches,
118  const Options& options) {
119  if (camera1.HasPriorFocalLength() && camera2.HasPriorFocalLength()) {
120  EstimateCalibrated(camera1, points1, camera2, points2, matches, options);
121  } else {
122  EstimateUncalibrated(camera1, points1, camera2, points2, matches, options);
123  }
124 }
125 
127  const Camera& camera1, const std::vector<Eigen::Vector2d>& points1,
128  const Camera& camera2, const std::vector<Eigen::Vector2d>& points2,
129  const FeatureMatches& matches, const Options& options) {
130  FeatureMatches remaining_matches = matches;
131  std::vector<TwoViewGeometry> two_view_geometries;
132  while (true) {
133  TwoViewGeometry two_view_geometry;
134  two_view_geometry.Estimate(camera1, points1, camera2, points2,
135  remaining_matches, options);
136  if (two_view_geometry.config == ConfigurationType::DEGENERATE) {
137  break;
138  }
139 
140  if (options.multiple_ignore_watermark) {
141  if (two_view_geometry.config != ConfigurationType::WATERMARK) {
142  two_view_geometries.push_back(two_view_geometry);
143  }
144  } else {
145  two_view_geometries.push_back(two_view_geometry);
146  }
147 
148  remaining_matches = ExtractOutlierMatches(remaining_matches,
149  two_view_geometry.inlier_matches);
150  }
151 
152  if (two_view_geometries.empty()) {
153  config = ConfigurationType::DEGENERATE;
154  } else if (two_view_geometries.size() == 1) {
155  *this = two_view_geometries[0];
156  } else {
157  config = ConfigurationType::MULTIPLE;
158 
159  for (const auto& two_view_geometry : two_view_geometries) {
160  inlier_matches.insert(inlier_matches.end(),
161  two_view_geometry.inlier_matches.begin(),
162  two_view_geometry.inlier_matches.end());
163  }
164  }
165 }
166 
168  const Camera& camera1, const std::vector<Eigen::Vector2d>& points1,
169  const Camera& camera2, const std::vector<Eigen::Vector2d>& points2) {
170  // We need a valid epopolar geometry to estimate the relative pose.
171  if (config != CALIBRATED && config != UNCALIBRATED && config != PLANAR &&
173  return false;
174  }
175 
176  // Extract normalized inlier points.
177  std::vector<Eigen::Vector2d> inlier_points1_normalized;
178  inlier_points1_normalized.reserve(inlier_matches.size());
179  std::vector<Eigen::Vector2d> inlier_points2_normalized;
180  inlier_points2_normalized.reserve(inlier_matches.size());
181  for (const auto& match : inlier_matches) {
182  const point2D_t idx1 = match.point2D_idx1;
183  const point2D_t idx2 = match.point2D_idx2;
184  inlier_points1_normalized.push_back(camera1.ImageToWorld(points1[idx1]));
185  inlier_points2_normalized.push_back(camera2.ImageToWorld(points2[idx2]));
186  }
187 
188  Eigen::Matrix3d R;
189  std::vector<Eigen::Vector3d> points3D;
190 
191  if (config == CALIBRATED || config == UNCALIBRATED) {
192  // Try to recover relative pose for calibrated and uncalibrated
193  // configurations. In the uncalibrated case, this most likely leads to a
194  // ill-defined reconstruction, but sometimes it succeeds anyways after e.g.
195  // subsequent bundle-adjustment etc.
196  PoseFromEssentialMatrix(E, inlier_points1_normalized,
197  inlier_points2_normalized, &R, &tvec, &points3D);
198  } else if (config == PLANAR || config == PANORAMIC ||
200  Eigen::Vector3d n;
202  H, camera1.CalibrationMatrix(), camera2.CalibrationMatrix(),
203  inlier_points1_normalized, inlier_points2_normalized, &R, &tvec, &n,
204  &points3D);
205  } else {
206  return false;
207  }
208 
210 
211  if (points3D.empty()) {
212  tri_angle = 0;
213  } else {
215  Eigen::Vector3d::Zero(), -R.transpose() * tvec, points3D));
216  }
217 
218  if (config == PLANAR_OR_PANORAMIC) {
219  if (tvec.norm() == 0) {
220  config = PANORAMIC;
221  tri_angle = 0;
222  } else {
223  config = PLANAR;
224  }
225  }
226 
227  return true;
228 }
229 
231  const Camera& camera1, const std::vector<Eigen::Vector2d>& points1,
232  const Camera& camera2, const std::vector<Eigen::Vector2d>& points2,
233  const FeatureMatches& matches, const Options& options) {
234  options.Check();
235 
236  if (matches.size() < options.min_num_inliers) {
237  config = ConfigurationType::DEGENERATE;
238  return;
239  }
240 
241  // Extract corresponding points.
242  std::vector<Eigen::Vector2d> matched_points1(matches.size());
243  std::vector<Eigen::Vector2d> matched_points2(matches.size());
244  std::vector<Eigen::Vector2d> matched_points1_normalized(matches.size());
245  std::vector<Eigen::Vector2d> matched_points2_normalized(matches.size());
246  for (size_t i = 0; i < matches.size(); ++i) {
247  const point2D_t idx1 = matches[i].point2D_idx1;
248  const point2D_t idx2 = matches[i].point2D_idx2;
249  matched_points1[i] = points1[idx1];
250  matched_points2[i] = points2[idx2];
251  matched_points1_normalized[i] = camera1.ImageToWorld(points1[idx1]);
252  matched_points2_normalized[i] = camera2.ImageToWorld(points2[idx2]);
253  }
254 
255  // Estimate epipolar models.
256 
257  auto E_ransac_options = options.ransac_options;
258  E_ransac_options.max_error =
261  2;
262 
264  E_ransac(E_ransac_options);
265  const auto E_report =
266  E_ransac.Estimate(matched_points1_normalized, matched_points2_normalized);
267  E = E_report.model;
268 
271  F_ransac(options.ransac_options);
272  const auto F_report = F_ransac.Estimate(matched_points1, matched_points2);
273  F = F_report.model;
274 
275  // Estimate planar or panoramic model.
276 
278  options.ransac_options);
279  const auto H_report = H_ransac.Estimate(matched_points1, matched_points2);
280  H = H_report.model;
281 
282  if ((!E_report.success && !F_report.success && !H_report.success) ||
283  (E_report.support.num_inliers < options.min_num_inliers &&
284  F_report.support.num_inliers < options.min_num_inliers &&
285  H_report.support.num_inliers < options.min_num_inliers)) {
286  config = ConfigurationType::DEGENERATE;
287  return;
288  }
289 
290  // Determine inlier ratios of different models.
291 
292  const double E_F_inlier_ratio =
293  static_cast<double>(E_report.support.num_inliers) /
294  F_report.support.num_inliers;
295  const double H_F_inlier_ratio =
296  static_cast<double>(H_report.support.num_inliers) /
297  F_report.support.num_inliers;
298  const double H_E_inlier_ratio =
299  static_cast<double>(H_report.support.num_inliers) /
300  E_report.support.num_inliers;
301 
302  const std::vector<char>* best_inlier_mask = nullptr;
303  size_t num_inliers = 0;
304 
305  if (E_report.success && E_F_inlier_ratio > options.min_E_F_inlier_ratio &&
306  E_report.support.num_inliers >= options.min_num_inliers) {
307  // Calibrated configuration.
308 
309  // Always use the model with maximum matches.
310  if (E_report.support.num_inliers >= F_report.support.num_inliers) {
311  num_inliers = E_report.support.num_inliers;
312  best_inlier_mask = &E_report.inlier_mask;
313  } else {
314  num_inliers = F_report.support.num_inliers;
315  best_inlier_mask = &F_report.inlier_mask;
316  }
317 
318  if (H_E_inlier_ratio > options.max_H_inlier_ratio) {
320  if (H_report.support.num_inliers > num_inliers) {
321  num_inliers = H_report.support.num_inliers;
322  best_inlier_mask = &H_report.inlier_mask;
323  }
324  } else {
325  config = ConfigurationType::CALIBRATED;
326  }
327  } else if (F_report.success &&
328  F_report.support.num_inliers >= options.min_num_inliers) {
329  // Uncalibrated configuration.
330 
331  num_inliers = F_report.support.num_inliers;
332  best_inlier_mask = &F_report.inlier_mask;
333 
334  if (H_F_inlier_ratio > options.max_H_inlier_ratio) {
335  config = ConfigurationType::PLANAR_OR_PANORAMIC;
336  if (H_report.support.num_inliers > num_inliers) {
337  num_inliers = H_report.support.num_inliers;
338  best_inlier_mask = &H_report.inlier_mask;
339  }
340  } else {
341  config = ConfigurationType::UNCALIBRATED;
342  }
343  } else if (H_report.success &&
344  H_report.support.num_inliers >= options.min_num_inliers) {
345  num_inliers = H_report.support.num_inliers;
346  best_inlier_mask = &H_report.inlier_mask;
347  config = ConfigurationType::PLANAR_OR_PANORAMIC;
348  } else {
349  config = ConfigurationType::DEGENERATE;
350  return;
351  }
352 
353  if (best_inlier_mask != nullptr) {
355  ExtractInlierMatches(matches, num_inliers, *best_inlier_mask);
356 
357  if (options.detect_watermark &&
358  DetectWatermark(camera1, matched_points1, camera2, matched_points2,
359  num_inliers, *best_inlier_mask, options)) {
360  config = ConfigurationType::WATERMARK;
361  }
362  }
363 }
364 
366  const Camera& camera1, const std::vector<Eigen::Vector2d>& points1,
367  const Camera& camera2, const std::vector<Eigen::Vector2d>& points2,
368  const FeatureMatches& matches, const Options& options) {
369  options.Check();
370 
371  if (matches.size() < options.min_num_inliers) {
372  config = ConfigurationType::DEGENERATE;
373  return;
374  }
375 
376  // Extract corresponding points.
377  std::vector<Eigen::Vector2d> matched_points1(matches.size());
378  std::vector<Eigen::Vector2d> matched_points2(matches.size());
379  for (size_t i = 0; i < matches.size(); ++i) {
380  matched_points1[i] = points1[matches[i].point2D_idx1];
381  matched_points2[i] = points2[matches[i].point2D_idx2];
382  }
383 
384  // Estimate epipolar model.
385 
388  F_ransac(options.ransac_options);
389  const auto F_report = F_ransac.Estimate(matched_points1, matched_points2);
390  F = F_report.model;
391 
392  // Estimate planar or panoramic model.
393 
395  options.ransac_options);
396  const auto H_report = H_ransac.Estimate(matched_points1, matched_points2);
397  H = H_report.model;
398 
399  if ((!F_report.success && !H_report.success) ||
400  (F_report.support.num_inliers < options.min_num_inliers &&
401  H_report.support.num_inliers < options.min_num_inliers)) {
402  config = ConfigurationType::DEGENERATE;
403  return;
404  }
405 
406  // Determine inlier ratios of different models.
407 
408  const double H_F_inlier_ratio =
409  static_cast<double>(H_report.support.num_inliers) /
410  F_report.support.num_inliers;
411 
412  if (H_F_inlier_ratio > options.max_H_inlier_ratio) {
413  config = ConfigurationType::PLANAR_OR_PANORAMIC;
414  } else {
415  config = ConfigurationType::UNCALIBRATED;
416  }
417 
418  inlier_matches = ExtractInlierMatches(matches, F_report.support.num_inliers,
419  F_report.inlier_mask);
420 
421  if (options.detect_watermark &&
422  DetectWatermark(camera1, matched_points1, camera2, matched_points2,
423  F_report.support.num_inliers, F_report.inlier_mask,
424  options)) {
425  config = ConfigurationType::WATERMARK;
426  }
427 }
428 
430  const Camera& camera1, const std::vector<Eigen::Vector2d>& points1,
431  const Camera& camera2, const std::vector<Eigen::Vector2d>& points2,
432  const size_t num_inliers, const std::vector<char>& inlier_mask,
433  const Options& options) {
434  options.Check();
435 
436  // Check if inlier points in border region and extract inlier matches.
437 
438  const double diagonal1 = std::sqrt(camera1.Width() * camera1.Width() +
439  camera1.Height() * camera1.Height());
440  const double diagonal2 = std::sqrt(camera2.Width() * camera2.Width() +
441  camera2.Height() * camera2.Height());
442  const double minx1 = options.watermark_border_size * diagonal1;
443  const double miny1 = minx1;
444  const double maxx1 = camera1.Width() - minx1;
445  const double maxy1 = camera1.Height() - miny1;
446  const double minx2 = options.watermark_border_size * diagonal2;
447  const double miny2 = minx2;
448  const double maxx2 = camera2.Width() - minx2;
449  const double maxy2 = camera2.Height() - miny2;
450 
451  std::vector<Eigen::Vector2d> inlier_points1(num_inliers);
452  std::vector<Eigen::Vector2d> inlier_points2(num_inliers);
453 
454  size_t num_matches_in_border = 0;
455 
456  size_t j = 0;
457  for (size_t i = 0; i < inlier_mask.size(); ++i) {
458  if (inlier_mask[i]) {
459  const auto& point1 = points1[i];
460  const auto& point2 = points2[i];
461 
462  inlier_points1[j] = point1;
463  inlier_points2[j] = point2;
464  j += 1;
465 
466  if (!IsImagePointInBoundingBox(point1, minx1, maxx1, miny1, maxy1) &&
467  !IsImagePointInBoundingBox(point2, minx2, maxx2, miny2, maxy2)) {
468  num_matches_in_border += 1;
469  }
470  }
471  }
472 
473  const double matches_in_border_ratio =
474  static_cast<double>(num_matches_in_border) / num_inliers;
475 
476  if (matches_in_border_ratio < options.watermark_min_inlier_ratio) {
477  return false;
478  }
479 
480  // Check if matches follow a translational model.
481 
482  RANSACOptions ransac_options = options.ransac_options;
483  ransac_options.min_inlier_ratio = options.watermark_min_inlier_ratio;
484 
486  ransac(ransac_options);
487  const auto report = ransac.Estimate(inlier_points1, inlier_points2);
488 
489  const double inlier_ratio =
490  static_cast<double>(report.support.num_inliers) / num_inliers;
491 
492  return inlier_ratio >= options.watermark_min_inlier_ratio;
493 }
494 
495 } // namespace colmap
Eigen::Vector2d ImageToWorld(const Eigen::Vector2d &image_point) const
Definition: camera.cc:219
double ImageToWorldThreshold(const double threshold) const
Definition: camera.cc:226
bool HasPriorFocalLength() const
Definition: camera.h:168
Eigen::Matrix3d CalibrationMatrix() const
Definition: camera.cc:75
size_t Width() const
Definition: camera.h:160
size_t Height() const
Definition: camera.h:162
Report Estimate(const std::vector< typename Estimator::X_t > &X, const std::vector< typename Estimator::Y_t > &Y)
Definition: loransac.h:72
CLOUDVIEWER_HOST_DEVICE Pair< First, Second > make_pair(const First &_first, const Second &_second)
Definition: SlabTraits.h:49
void PoseFromHomographyMatrix(const Eigen::Matrix3d &H, const Eigen::Matrix3d &K1, const Eigen::Matrix3d &K2, const std::vector< Eigen::Vector2d > &points1, const std::vector< Eigen::Vector2d > &points2, Eigen::Matrix3d *R, Eigen::Vector3d *t, Eigen::Vector3d *n, std::vector< Eigen::Vector3d > *points3D)
void InvertPose(const Eigen::Vector4d &qvec, const Eigen::Vector3d &tvec, Eigen::Vector4d *inv_qvec, Eigen::Vector3d *inv_tvec)
Definition: pose.cc:192
std::vector< double > CalculateTriangulationAngles(const Eigen::Vector3d &proj_center1, const Eigen::Vector3d &proj_center2, const std::vector< Eigen::Vector3d > &points3D)
uint32_t point2D_t
Definition: types.h:67
double Median(const std::vector< T > &elems)
Definition: math.h:189
Eigen::Vector4d RotationMatrixToQuaternion(const Eigen::Matrix3d &rot_mat)
Definition: pose.cc:70
void PoseFromEssentialMatrix(const Eigen::Matrix3d &E, const std::vector< Eigen::Vector2d > &points1, const std::vector< Eigen::Vector2d > &points2, Eigen::Matrix3d *R, Eigen::Vector3d *t, std::vector< Eigen::Vector3d > *points3D)
std::vector< FeatureMatch > FeatureMatches
Definition: types.h:80
void swap(cloudViewer::core::SmallVectorImpl< T > &LHS, cloudViewer::core::SmallVectorImpl< T > &RHS)
Implement std::swap in terms of SmallVector swap.
Definition: SmallVector.h:1370
double min_inlier_ratio
Definition: ransac.h:29
void EstimateUncalibrated(const Camera &camera1, const std::vector< Eigen::Vector2d > &points1, const Camera &camera2, const std::vector< Eigen::Vector2d > &points2, const FeatureMatches &matches, const Options &options)
void EstimateCalibrated(const Camera &camera1, const std::vector< Eigen::Vector2d > &points1, const Camera &camera2, const std::vector< Eigen::Vector2d > &points2, const FeatureMatches &matches, const Options &options)
void EstimateMultiple(const Camera &camera1, const std::vector< Eigen::Vector2d > &points1, const Camera &camera2, const std::vector< Eigen::Vector2d > &points2, const FeatureMatches &matches, const Options &options)
static bool DetectWatermark(const Camera &camera1, const std::vector< Eigen::Vector2d > &points1, const Camera &camera2, const std::vector< Eigen::Vector2d > &points2, const size_t num_inliers, const std::vector< char > &inlier_mask, const Options &options)
void Estimate(const Camera &camera1, const std::vector< Eigen::Vector2d > &points1, const Camera &camera2, const std::vector< Eigen::Vector2d > &points2, const FeatureMatches &matches, const Options &options)
bool EstimateRelativePose(const Camera &camera1, const std::vector< Eigen::Vector2d > &points1, const Camera &camera2, const std::vector< Eigen::Vector2d > &points2)