ACloudViewer  3.9.4
A Modern Library for 3D Data Processing
ColorMapUtils.cpp
Go to the documentation of this file.
1 // ----------------------------------------------------------------------------
2 // - CloudViewer: www.cloudViewer.org -
3 // ----------------------------------------------------------------------------
4 // Copyright (c) 2018-2024 www.cloudViewer.org
5 // SPDX-License-Identifier: MIT
6 // ----------------------------------------------------------------------------
7 
9 
10 #include <Image.h>
11 #include <Parallel.h>
12 #include <RGBDImage.h>
14 #include <ecvHObjectCaster.h>
15 #include <ecvKDTreeFlann.h>
16 #include <ecvMesh.h>
17 #include <ecvPointCloud.h>
18 
20 
21 namespace cloudViewer {
22 namespace pipelines {
23 namespace color_map {
24 
25 static std::tuple<float, float, float> Project3DPointAndGetUVDepth(
26  const Eigen::Vector3d X,
27  const camera::PinholeCameraParameters& camera_parameter) {
28  std::pair<double, double> f = camera_parameter.intrinsic_.GetFocalLength();
29  std::pair<double, double> p =
30  camera_parameter.intrinsic_.GetPrincipalPoint();
31  Eigen::Vector4d Vt =
32  camera_parameter.extrinsic_ * Eigen::Vector4d(X(0), X(1), X(2), 1);
33  float u = float((Vt(0) * f.first) / Vt(2) + p.first);
34  float v = float((Vt(1) * f.second) / Vt(2) + p.second);
35  float z = float(Vt(2));
36  return std::make_tuple(u, v, z);
37 }
38 
39 template <typename T>
40 static std::tuple<bool, T> QueryImageIntensity(
41  const geometry::Image& img,
42  const utility::optional<ImageWarpingField>& optional_warping_field,
43  const Eigen::Vector3d& V,
44  const camera::PinholeCameraParameters& camera_parameter,
45  utility::optional<int> channel,
46  int image_boundary_margin) {
47  // We use double here for u, v such that it is consistent with 0.12 release
48  // numerically, since double->float->int can be different from double->int.
49  double u, v, depth;
50  std::tie(u, v, depth) = Project3DPointAndGetUVDepth(V, camera_parameter);
51  // TODO: check why we use the u, ve before warpping for TestImageBoundary.
52  if (img.TestImageBoundary(u, v, image_boundary_margin)) {
53  if (optional_warping_field.has_value()) {
54  Eigen::Vector2d uv_shift =
55  optional_warping_field.value().GetImageWarpingField(u, v);
56  u = uv_shift(0);
57  v = uv_shift(1);
58  }
59  if (img.TestImageBoundary(u, v, image_boundary_margin)) {
60  int u_round = int(round(u));
61  int v_round = int(round(v));
62  if (channel.has_value()) {
63  return std::make_tuple(
64  true,
65  *img.PointerAt<T>(u_round, v_round, channel.value()));
66  } else {
67  return std::make_tuple(true,
68  *img.PointerAt<T>(u_round, v_round));
69  }
70  }
71  }
72  return std::make_tuple(false, 0);
73 }
74 
75 std::tuple<std::vector<geometry::Image>,
76  std::vector<geometry::Image>,
77  std::vector<geometry::Image>,
78  std::vector<geometry::Image>,
79  std::vector<geometry::Image>>
80 CreateUtilImagesFromRGBD(const std::vector<geometry::RGBDImage>& images_rgbd) {
81  std::vector<geometry::Image> images_gray;
82  std::vector<geometry::Image> images_dx;
83  std::vector<geometry::Image> images_dy;
84  std::vector<geometry::Image> images_color;
85  std::vector<geometry::Image> images_depth;
86  for (size_t i = 0; i < images_rgbd.size(); i++) {
87  auto gray_image = images_rgbd[i].color_.CreateFloatImage();
88  auto gray_image_filtered =
89  gray_image->Filter(geometry::Image::FilterType::Gaussian3);
90  images_gray.push_back(*gray_image_filtered);
91  images_dx.push_back(*gray_image_filtered->Filter(
93  images_dy.push_back(*gray_image_filtered->Filter(
95  auto color = std::make_shared<geometry::Image>(images_rgbd[i].color_);
96  auto depth = std::make_shared<geometry::Image>(images_rgbd[i].depth_);
97  images_color.push_back(*color);
98  images_depth.push_back(*depth);
99  }
100  return std::make_tuple(images_gray, images_dx, images_dy, images_color,
101  images_depth);
102 }
103 
104 std::vector<geometry::Image> CreateDepthBoundaryMasks(
105  const std::vector<geometry::Image>& images_depth,
106  double depth_threshold_for_discontinuity_check,
107  int half_dilation_kernel_size_for_discontinuity_map) {
108  auto n_images = images_depth.size();
109  std::vector<geometry::Image> masks;
110  for (size_t i = 0; i < n_images; i++) {
111  utility::LogDebug("[MakeDepthMasks] geometry::Image {:d}/{:d}", i,
112  n_images);
113  masks.push_back(*images_depth[i].CreateDepthBoundaryMask(
114  depth_threshold_for_discontinuity_check,
115  half_dilation_kernel_size_for_discontinuity_map));
116  }
117  return masks;
118 }
119 
120 std::tuple<std::vector<std::vector<int>>, std::vector<std::vector<int>>>
122  const ccMesh& mesh,
123  const std::vector<geometry::Image>& images_depth,
124  const std::vector<geometry::Image>& images_mask,
125  const camera::PinholeCameraTrajectory& camera_trajectory,
126  double maximum_allowable_depth,
127  double depth_threshold_for_visibility_check) {
128  size_t n_camera = camera_trajectory.parameters_.size();
129  size_t n_vertex = mesh.getVerticeSize();
130  // visibility_image_to_vertex[c]: vertices visible by camera c.
131  std::vector<std::vector<int>> visibility_image_to_vertex;
132  visibility_image_to_vertex.resize(n_camera);
133  // visibility_vertex_to_image[v]: cameras that can see vertex v.
134  std::vector<std::vector<int>> visibility_vertex_to_image;
135  visibility_vertex_to_image.resize(n_vertex);
136 
137  std::vector<Eigen::Vector3d> cached_vertices = mesh.getEigenVertices();
138 #pragma omp parallel for schedule(static) \
139  num_threads(utility::EstimateMaxThreads())
140  for (int camera_id = 0; camera_id < int(n_camera); camera_id++) {
141  for (int vertex_id = 0; vertex_id < int(n_vertex); vertex_id++) {
142  const Eigen::Vector3d& X = cached_vertices[vertex_id];
143  float u, v, d;
144  std::tie(u, v, d) = Project3DPointAndGetUVDepth(
145  X, camera_trajectory.parameters_[camera_id]);
146  int u_d = int(round(u));
147  int v_d = int(round(v));
148  // Skip if vertex in image boundary.
149  if (d < 0.0 ||
150  !images_depth[camera_id].TestImageBoundary(u_d, v_d)) {
151  continue;
152  }
153  // Skip if vertex's depth is too large (e.g. background).
154  float d_sensor =
155  *images_depth[camera_id].PointerAt<float>(u_d, v_d);
156  if (d_sensor > maximum_allowable_depth) {
157  continue;
158  }
159  // Check depth boundary mask. If a vertex is located at the boundary
160  // of an object, its color will be highly diverse from different
161  // viewing angles.
162  if (*images_mask[camera_id].PointerAt<uint8_t>(u_d, v_d) == 255) {
163  continue;
164  }
165  // Check depth errors.
166  if (std::fabs(d - d_sensor) >=
167  depth_threshold_for_visibility_check) {
168  continue;
169  }
170  visibility_image_to_vertex[camera_id].push_back(vertex_id);
171 #pragma omp critical(CreateVertexAndImageVisibility)
172  { visibility_vertex_to_image[vertex_id].push_back(camera_id); }
173  }
174  }
175 
176  for (int camera_id = 0; camera_id < int(n_camera); camera_id++) {
177  size_t n_visible_vertex = visibility_image_to_vertex[camera_id].size();
179  "[cam {:d}]: {:d}/{:d} ({:.5f}%) vertices are visible",
180  camera_id, n_visible_vertex, n_vertex,
181  double(n_visible_vertex) / n_vertex * 100);
182  }
183 
184  return std::make_tuple(visibility_vertex_to_image,
185  visibility_image_to_vertex);
186 }
187 
189  const ccMesh& mesh,
190  const std::vector<geometry::Image>& images_gray,
191  const utility::optional<std::vector<ImageWarpingField>>& warping_fields,
192  const camera::PinholeCameraTrajectory& camera_trajectory,
193  const std::vector<std::vector<int>>& visibility_vertex_to_image,
194  std::vector<double>& proxy_intensity,
195  int image_boundary_margin) {
196  auto n_vertex = mesh.getVerticeSize();
197  proxy_intensity.resize(n_vertex);
198 
199 #pragma omp parallel for schedule(static) \
200  num_threads(utility::EstimateMaxThreads())
201  for (int i = 0; i < int(n_vertex); i++) {
202  proxy_intensity[i] = 0.0;
203  float sum = 0.0;
204  for (size_t iter = 0; iter < visibility_vertex_to_image[i].size();
205  iter++) {
206  int j = visibility_vertex_to_image[i][iter];
207  float gray;
208  bool valid = false;
209  if (warping_fields.has_value()) {
210  std::tie(valid, gray) = QueryImageIntensity<float>(
211  images_gray[j], warping_fields.value()[j],
212  mesh.getVertice(i), camera_trajectory.parameters_[j],
213  utility::nullopt, image_boundary_margin);
214  } else {
215  std::tie(valid, gray) = QueryImageIntensity<float>(
216  images_gray[j], utility::nullopt, mesh.getVertice(i),
217  camera_trajectory.parameters_[j], utility::nullopt,
218  image_boundary_margin);
219  }
220 
221  if (valid) {
222  sum += 1.0;
223  proxy_intensity[i] += gray;
224  }
225  }
226  if (sum > 0) {
227  proxy_intensity[i] /= sum;
228  }
229  }
230 }
231 
233  ccMesh& mesh,
234  const std::vector<geometry::Image>& images_color,
235  const utility::optional<std::vector<ImageWarpingField>>& warping_fields,
236  const camera::PinholeCameraTrajectory& camera_trajectory,
237  const std::vector<std::vector<int>>& visibility_vertex_to_image,
238  int image_boundary_margin,
239  int invisible_vertex_color_knn) {
240  size_t n_vertex = mesh.getVerticeSize();
241  ccPointCloud* cloud =
243  cloud->unallocateColors();
244  if (!cloud->resizeTheRGBTable()) {
245  utility::LogError("[SetGeometryColorAverage] not enough memory!");
246  }
247  std::vector<size_t> valid_vertices;
248  std::vector<size_t> invalid_vertices;
249  std::vector<Eigen::Vector3d> temp_colors(n_vertex, Eigen::Vector3d::Zero());
250 #pragma omp parallel for schedule(static) \
251  num_threads(utility::EstimateMaxThreads())
252  for (int i = 0; i < (int)n_vertex; i++) {
253  double sum = 0.0;
254  for (size_t iter = 0; iter < visibility_vertex_to_image[i].size();
255  iter++) {
256  int j = visibility_vertex_to_image[i][iter];
257  uint8_t r_temp, g_temp, b_temp;
258  bool valid = false;
259  utility::optional<ImageWarpingField> optional_warping_field;
260  if (warping_fields.has_value()) {
261  optional_warping_field = warping_fields.value()[j];
262  } else {
263  optional_warping_field = utility::nullopt;
264  }
265  std::tie(valid, r_temp) = QueryImageIntensity<uint8_t>(
266  images_color[j], optional_warping_field, mesh.getVertice(i),
267  camera_trajectory.parameters_[j], 0, image_boundary_margin);
268  std::tie(valid, g_temp) = QueryImageIntensity<uint8_t>(
269  images_color[j], optional_warping_field, mesh.getVertice(i),
270  camera_trajectory.parameters_[j], 1, image_boundary_margin);
271  std::tie(valid, b_temp) = QueryImageIntensity<uint8_t>(
272  images_color[j], optional_warping_field, mesh.getVertice(i),
273  camera_trajectory.parameters_[j], 2, image_boundary_margin);
274  float r = (float)r_temp / 255.0f;
275  float g = (float)g_temp / 255.0f;
276  float b = (float)b_temp / 255.0f;
277  if (valid) {
278  temp_colors[i] += Eigen::Vector3d(r, g, b);
279  sum += 1.0;
280  }
281  }
282 #pragma omp critical(SetGeometryColorAverage)
283  {
284  if (sum > 0.0) {
285  cloud->setEigenColor(i, temp_colors[i] / sum);
286  valid_vertices.push_back(i);
287  } else {
288  cloud->setEigenColor(i, temp_colors[i]);
289  invalid_vertices.push_back(i);
290  }
291  }
292  }
293  if (invisible_vertex_color_knn > 0) {
294  std::shared_ptr<ccMesh> valid_mesh = mesh.SelectByIndex(valid_vertices);
295  geometry::KDTreeFlann kd_tree(*valid_mesh);
296 #pragma omp parallel for schedule(static) \
297  num_threads(utility::EstimateMaxThreads())
298  for (int i = 0; i < (int)invalid_vertices.size(); ++i) {
299  size_t invalid_vertex = invalid_vertices[i];
300  std::vector<int> indices; // indices to valid_mesh
301  std::vector<double> dists;
302  kd_tree.SearchKNN(mesh.getVertice(invalid_vertex),
303  invisible_vertex_color_knn, indices, dists);
304  Eigen::Vector3d new_color(0, 0, 0);
305  for (const int& index : indices) {
306  new_color += valid_mesh->getVertexColor(index);
307  }
308  if (indices.size() > 0) {
309  new_color /= static_cast<double>(indices.size());
310  }
311  cloud->setPointColor(invalid_vertex, new_color);
312  }
313  }
314 }
315 
316 } // namespace color_map
317 } // namespace pipelines
318 } // namespace cloudViewer
math::float4 color
void * X
Definition: SmallVector.cpp:45
static ccPointCloud * ToPointCloud(ccHObject *obj, bool *isLockedVertices=nullptr)
Converts current object to 'equivalent' ccPointCloud.
Triangular mesh.
Definition: ecvMesh.h:35
std::shared_ptr< ccMesh > SelectByIndex(const std::vector< size_t > &indices, bool cleanup=true) const
ccGenericPointCloud * getAssociatedCloud() const override
Returns the vertices cloud.
Definition: ecvMesh.h:143
Eigen::Vector3d getVertice(size_t index) const
std::vector< Eigen::Vector3d > getEigenVertices() const
unsigned int getVerticeSize() const
A 3D cloud and its associated features (color, normals, scalar fields, etc.)
void setEigenColor(size_t index, const Eigen::Vector3d &color)
void unallocateColors()
Erases the cloud colors.
bool resizeTheRGBTable(bool fillWithWhite=false)
Resizes the RGB colors array.
void setPointColor(size_t pointIndex, const ecvColor::Rgb &col)
Sets a particular point color.
std::pair< double, double > GetFocalLength() const
Returns the focal length in a tuple of X-axis and Y-axis focal lengths.
std::pair< double, double > GetPrincipalPoint() const
Contains both intrinsic and extrinsic pinhole camera parameters.
PinholeCameraIntrinsic intrinsic_
PinholeCameraIntrinsic object.
Eigen::Matrix4d_u extrinsic_
Camera extrinsic parameters.
std::vector< PinholeCameraParameters > parameters_
List of PinholeCameraParameters objects.
The Image class stores image with customizable width, height, num of channels and bytes per channel.
Definition: Image.h:33
bool TestImageBoundary(double u, double v, double inner_margin=0.0) const
Test if coordinate (u, v) is located in the inner_marge of the image.
@ Gaussian3
Gaussian filter of size 3 x 3.
@ Sobel3Dx
Sobel filter along X-axis.
@ Sobel3Dy
Sobel filter along Y-axis.
T * PointerAt(int u, int v) const
Function to access the raw data of a single-channel Image.
KDTree with FLANN for nearest neighbor search.
int SearchKNN(const T &query, int knn, std::vector< int > &indices, std::vector< double > &distance2) const
constexpr bool has_value() const noexcept
Definition: Optional.h:440
constexpr T const & value() const &
Definition: Optional.h:465
#define LogError(...)
Definition: Logging.h:60
#define LogDebug(...)
Definition: Logging.h:90
__host__ __device__ float2 fabs(float2 v)
Definition: cutil_math.h:1254
void SetProxyIntensityForVertex(const ccMesh &mesh, const std::vector< geometry::Image > &images_gray, const utility::optional< std::vector< ImageWarpingField >> &warping_fields, const camera::PinholeCameraTrajectory &camera_trajectory, const std::vector< std::vector< int >> &visibility_vertex_to_image, std::vector< double > &proxy_intensity, int image_boundary_margin)
std::tuple< std::vector< std::vector< int > >, std::vector< std::vector< int > > > CreateVertexAndImageVisibility(const ccMesh &mesh, const std::vector< geometry::Image > &images_depth, const std::vector< geometry::Image > &images_mask, const camera::PinholeCameraTrajectory &camera_trajectory, double maximum_allowable_depth, double depth_threshold_for_visibility_check)
std::tuple< std::vector< geometry::Image >, std::vector< geometry::Image >, std::vector< geometry::Image >, std::vector< geometry::Image >, std::vector< geometry::Image > > CreateUtilImagesFromRGBD(const std::vector< geometry::RGBDImage > &images_rgbd)
std::vector< geometry::Image > CreateDepthBoundaryMasks(const std::vector< geometry::Image > &images_depth, double depth_threshold_for_discontinuity_check, int half_dilation_kernel_size_for_discontinuity_map)
static std::tuple< float, float, float > Project3DPointAndGetUVDepth(const Eigen::Vector3d X, const camera::PinholeCameraParameters &camera_parameter)
void SetGeometryColorAverage(ccMesh &mesh, const std::vector< geometry::Image > &images_color, const utility::optional< std::vector< ImageWarpingField >> &warping_fields, const camera::PinholeCameraTrajectory &camera_trajectory, const std::vector< std::vector< int >> &visibility_vertex_to_image, int image_boundary_margin, int invisible_vertex_color_knn)
static std::tuple< bool, T > QueryImageIntensity(const geometry::Image &img, const utility::optional< ImageWarpingField > &optional_warping_field, const Eigen::Vector3d &V, const camera::PinholeCameraParameters &camera_parameter, utility::optional< int > channel, int image_boundary_margin)
constexpr nullopt_t nullopt
Definition: Optional.h:136
Generic file read and write utility for python interface.