11 #include <unordered_map>
24 static const std::unordered_map<std::string, std::string>
27 "The input RGBD image should have a uint16_t depth image and "
28 "RGB image with any DType and the same size."},
29 {
"depth",
"The input depth image should be a uint16_t image."},
30 {
"intrinsics",
"Intrinsic parameters of the camera."},
31 {
"extrinsics",
"Extrinsic parameters of the camera."},
32 {
"depth_scale",
"The depth is scaled by 1 / depth_scale."},
33 {
"depth_max",
"Truncated at depth_max distance."},
35 "Sampling factor to support coarse point cloud extraction. "
36 "Unless normals are requested, there is no low pass "
37 "filtering, so aliasing is possible for stride>1."},
39 "Also compute normals for the point cloud. If True, the point "
40 "cloud will only contain points with valid normals. If "
41 "normals are requested, the depth map is first filtered to "
42 "ensure smooth normals."},
44 "Neighbor search max neighbors parameter [default = 30]."},
46 "neighbors search radius parameter to use HybridSearch. "
47 "[Recommended ~1.4x voxel size]."}};
50 py::class_<PointCloud, PyGeometry<PointCloud>, std::shared_ptr<PointCloud>,
52 pointcloud(m,
"PointCloud",
54 A point cloud contains a list of 3D points. The point cloud class stores the
55 attribute data in key-value maps, where the key is a string representing the
56 attribute name and the value is a Tensor containing the attribute data.
58 The attributes of the point cloud have different levels::
60 import cloudViewer as cv3d
62 device = cv3d.core.Device("CPU:0")
63 dtype = cv3d.core.float32
65 # Create an empty point cloud
66 # Use pcd.point to access the points' attributes
67 pcd = cv3d.t.geometry.PointCloud(device)
69 # Default attribute: "positions".
70 # This attribute is created by default and is required by all point clouds.
71 # The shape must be (N, 3). The device of "positions" determines the device
73 pcd.point.positions = cv3d.core.Tensor([[0, 0, 0],
75 [2, 2, 2]], dtype, device)
77 # Common attributes: "normals", "colors".
78 # Common attributes are used in built-in point cloud operations. The
79 # spellings must be correct. For example, if "normal" is used instead of
80 # "normals", some internal operations that expects "normals" will not work.
81 # "normals" and "colors" must have shape (N, 3) and must be on the same
82 # device as the point cloud.
83 pcd.point.normals = cv3d.core.Tensor([[0, 0, 1],
85 [1, 0, 0]], dtype, device)
86 pcd.point.colors = cv3d.core.Tensor([[0.0, 0.0, 0.0],
88 [0.2, 0.2, 0.2]], dtype, device)
90 # User-defined attributes.
91 # You can also attach custom attributes. The value tensor must be on the
92 # same device as the point cloud. The are no restrictions on the shape and
94 pcd.point.intensities = cv3d.core.Tensor([0.3, 0.1, 0.4], dtype, device)
95 pcd.point.labels = cv3d.core.Tensor([3, 1, 4], cv3d.core.int32, device)
100 .def(py::init<const core::Device&>(),
101 "Construct an empty pointcloud on the provided ``device`` "
102 "(default: 'CPU:0').",
104 .def(py::init<const core::Tensor&>(),
"positions"_a)
105 .def(py::init<
const std::unordered_map<std::string,
107 "map_keys_to_tensors"_a)
110 py::detail::bind_copy_functions<PointCloud>(pointcloud);
113 pointcloud.def(py::pickle(
125 "Cannot unpickle PointCloud! Expecting a tuple of "
133 "Device ({}) is not available. PointCloud will be "
140 for (
auto& kv : map_keys_to_tensors) {
141 pcd.SetPointAttr(kv.first, kv.second);
150 pointcloud.def_property_readonly(
152 "Point's attributes: positions, colors, normals, etc.");
156 "Transfer the point cloud to a specified device.",
157 "device"_a,
"copy"_a =
false);
159 "Returns a copy of the point cloud on the same device.");
166 "Transfer the point cloud to CPU. If the point cloud is "
167 "already on CPU, no copy will be performed.");
170 [](
const PointCloud& pointcloud,
int device_id) {
173 "Transfer the point cloud to a CUDA device. If the point cloud is "
174 "already on the specified CUDA device, no copy will be performed.",
179 "Returns the min bound for point coordinates.");
181 "Returns the max bound for point coordinates.");
183 "Returns the center for point coordinates.");
185 pointcloud.def(
"append",
187 return self.Append(other);
189 pointcloud.def(
"__add__",
191 return self.Append(other);
195 "Transforms the points and normals (if exist).");
197 "relative"_a =
true,
"Translates points.");
201 "Rotate points and normals (if exist).");
204 "boolean_mask"_a,
"invert"_a =
false,
205 "Select points from input pointcloud, based on boolean mask "
206 "indices into output point cloud.");
208 "invert"_a =
false,
"remove_duplicates"_a =
false,
209 "Select points from input pointcloud, based on indices into "
210 "output point cloud.");
213 [](
const PointCloud& pointcloud,
const double voxel_size,
214 const std::string& reduction) {
215 return pointcloud.VoxelDownSample(voxel_size, reduction);
217 "Downsamples a point cloud with a specified voxel size and a "
219 "voxel_size"_a,
"reduction"_a =
"mean",
220 R
"doc(Downsamples a point cloud with a specified voxel size.
223 voxel_size (float): The size of the voxel used to downsample the point cloud.
225 reduction (str): The approach to pool point properties in a voxel. Can only be "mean" at current.
228 A downsampled point cloud with point properties reduced in each voxel.
232 We will load the Eagle dataset, downsample it, and show the result::
234 eagle = cv3d.data.EaglePointCloud()
235 pcd = cv3d.t.io.read_point_cloud(eagle.path)
236 pcd_down = pcd.voxel_down_sample(voxel_size=0.05)
237 cv3d.visualization.draw([{'name': 'pcd', 'geometry': pcd}, {'name': 'pcd_down', 'geometry': pcd_down}])
240 "Downsamples a point cloud by selecting every kth index "
241 "point and its attributes.",
244 "Downsample a pointcloud by selecting random index point "
245 "and its attributes.",
247 pointcloud.def(
"farthest_point_down_sample",
249 "Downsample a pointcloud into output pointcloud with a set "
250 "of points has farthest distance.The sampling is performed "
251 "by selecting the farthest point from previous selected "
252 "points iteratively",
254 "Index to start downsampling from. Valid index is a "
255 "non-negative number less than number of points in the "
257 "start_index"_a = 0);
259 "nb_points"_a,
"search_radius"_a,
260 R
"(Remove points that have less than nb_points neighbors in a
261 sphere of a given search radius.
264 nb_points: Number of neighbor points required within the radius.
265 search_radius: Radius of the sphere.
268 Tuple of filtered point cloud and boolean mask tensor for selected values
269 w.r.t. input point cloud.)");
271 "remove_statistical_outliers",
274 R
"(Remove points that are further away from their \p nb_neighbor
275 neighbors in average. This function is not recommended to use on GPU.
278 nb_neighbors: Number of neighbors around the target point.
279 std_ratio: Standard deviation ratio.
282 Tuple of filtered point cloud and boolean mask tensor for selected values
283 w.r.t. input point cloud.)");
284 pointcloud.def("remove_duplicated_points",
286 "Remove duplicated points and there associated attributes.");
289 "remove_nan"_a =
true,
"remove_infinite"_a =
true,
290 R
"(Remove all points from the point cloud that have a nan entry, or
291 infinite value. It also removes the corresponding attributes.
294 remove_nan: Remove NaN values from the PointCloud.
295 remove_infinite: Remove infinite values from the PointCloud.
298 Tuple of filtered point cloud and boolean mask tensor for selected values
299 w.r.t. input point cloud.)");
301 "color"_a,
"Assigns uniform color to the point cloud.");
304 "Normalize point normals to length 1.");
307 py::call_guard<py::gil_scoped_release>(), py::arg(
"max_nn") = 30,
308 py::arg(
"radius") = py::none(),
309 "Function to estimate point normals. If the point cloud normals "
310 "exist, the estimated normals are oriented with respect to the "
311 "same. It uses KNN search (Not recommended to use on GPU) if only "
312 "max_nn parameter is provided, Radius search (Not recommended to "
313 "use on GPU) if only radius is provided and Hybrid Search "
314 "(Recommended) if radius parameter is also provided.");
315 pointcloud.def(
"orient_normals_to_align_with_direction",
317 "Function to orient the normals of a point cloud.",
318 "orientation_reference"_a = core::Tensor::Init<float>(
320 pointcloud.def(
"orient_normals_towards_camera_location",
322 "Function to orient the normals of a point cloud.",
326 "orient_normals_consistent_tangent_plane",
328 "lambda_penalty"_a = 0.0,
"cos_alpha_tol"_a = 1.0,
329 R
"(Function to consistently orient the normals of a point cloud based on tangent planes.
331 The algorithm is described in Hoppe et al., "Surface Reconstruction from Unorganized Points", 1992.
332 Additional information about the choice of lambda_penalty and cos_alpha_tol for complex
333 point clouds can be found in Piazza, Valentini, Varetti, "Mesh Reconstruction from Point Cloud", 2023
334 (https://eugeniovaretti.github.io/meshreco/Piazza_Valentini_Varetti_MeshReconstructionFromPointCloud_2023.pdf).
337 k (int): Number of neighbors to use for tangent plane estimation.
338 lambda_penalty (float): A non-negative real parameter that influences the distance
339 metric used to identify the true neighbors of a point in complex
340 geometries. It penalizes the distance between a point and the tangent
341 plane defined by the reference point and its normal vector, helping to
342 mitigate misclassification issues encountered with traditional
343 Euclidean distance metrics.
344 cos_alpha_tol (float): Cosine threshold angle used to determine the
345 inclusion boundary of neighbors based on the direction of the normal
349 We use Bunny point cloud to compute its normals and orient them consistently.
350 The initial reconstruction adheres to Hoppe's algorithm (raw), whereas the
351 second reconstruction utilises the lambda_penalty and cos_alpha_tol parameters.
352 Due to the high density of the Bunny point cloud available in CloudViewer a larger
353 value of the parameter k is employed to test the algorithm. Usually you do
354 not have at disposal such a refined point clouds, thus you cannot find a
355 proper choice of k: refer to
356 https://eugeniovaretti.github.io/meshreco for these cases.::
358 import cloudViewer as cv3d
361 data = cv3d.data.BunnyMesh()
363 # Case 1, Hoppe (raw):
364 pcd = cv3d.io.read_point_cloud(data.path)
366 # Compute normals and orient them consistently, using k=100 neighbours
367 pcd.estimate_normals()
368 pcd.orient_normals_consistent_tangent_plane(100)
370 # Create mesh from point cloud using Poisson Algorithm
371 poisson_mesh = cv3d.geometry.ccMesh.create_from_point_cloud_poisson(pcd, depth=8, width=0, scale=1.1, linear_fit=False)[0]
372 poisson_mesh.paint_uniform_color(np.array([[0.5],[0.5],[0.5]]))
373 poisson_mesh.compute_vertex_normals()
374 cv3d.visualization.draw_geometries([poisson_mesh])
376 # Case 2, reconstruction using lambda_penalty and cos_alpha_tol parameters:
377 pcd_robust = cv3d.io.read_point_cloud(data.path)
379 # Compute normals and orient them consistently, using k=100 neighbours
380 pcd_robust.estimate_normals()
381 pcd_robust.orient_normals_consistent_tangent_plane(100, 10, 0.5)
383 # Create mesh from point cloud using Poisson Algorithm
384 poisson_mesh_robust = cv3d.geometry.ccMesh.create_from_point_cloud_poisson(pcd_robust, depth=8, width=0, scale=1.1, linear_fit=False)[0]
385 poisson_mesh_robust.paint_uniform_color(np.array([[0.5],[0.5],[0.5]]))
386 poisson_mesh_robust.compute_vertex_normals()
388 cv3d.visualization.draw_geometries([poisson_mesh_robust]) )");
391 py::call_guard<py::gil_scoped_release>(), py::arg(
"max_nn") = 30,
392 py::arg(
"radius") = py::none(),
393 "Function to estimate point color gradients. It uses KNN search "
394 "(Not recommended to use on GPU) if only max_nn parameter is "
395 "provided, Radius search (Not recommended to use on GPU) if only "
396 "radius is provided and Hybrid Search (Recommended) if radius "
397 "parameter is also provided.");
400 pointcloud.def_static(
402 py::call_guard<py::gil_scoped_release>(),
"depth"_a,
"intrinsics"_a,
405 "depth_scale"_a = 1000.0f,
"depth_max"_a = 3.0f,
"stride"_a = 1,
406 "with_normals"_a =
false,
407 "Factory function to create a pointcloud (with only 'points') from "
408 "a depth image and a camera model.\n\n Given depth value d at (u, "
409 "v) image coordinate, the corresponding 3d point is:\n\n z = d / "
410 "depth_scale\n\n x = (u - cx) * z / fx\n\n y = (v - cy) * z / fy");
411 pointcloud.def_static(
413 py::call_guard<py::gil_scoped_release>(),
"rgbd_image"_a,
417 "depth_scale"_a = 1000.0f,
"depth_max"_a = 3.0f,
"stride"_a = 1,
418 "with_normals"_a =
false,
419 "Factory function to create a pointcloud (with properties "
420 "{'points', 'colors'}) from an RGBD image and a camera model.\n\n"
421 "Given depth value d at (u, v) image coordinate, the corresponding "
422 "3d point is:\n\n z = d / depth_scale\n\n x = (u - cx) * z / "
424 "= (v - cy) * z / fy");
425 pointcloud.def_static(
428 "Create a PointCloud from a legacy CloudViewer PointCloud.");
432 "width"_a,
"height"_a,
"intrinsics"_a,
433 py::arg_v(
"extrinsics",
436 "cloudViewer.core.Tensor.eye(4)"),
437 "depth_scale"_a = 1000.0,
"depth_max"_a = 3.0,
438 "Project a point cloud to a depth image.");
440 "width"_a,
"height"_a,
"intrinsics"_a,
441 py::arg_v(
"extrinsics",
444 "cloudViewer.core.Tensor.eye(4)"),
445 "depth_scale"_a = 1000.0,
"depth_max"_a = 3.0,
446 "Project a colored point cloud to a RGBD image.");
449 "camera_location"_a,
"radius"_a,
450 R
"(Removes hidden points from a point cloud and returns a mesh of
451 the remaining points. Based on Katz et al. 'Direct Visibility of Point Sets',
452 2007. Additional information about the choice of radius for noisy point clouds
453 can be found in Mehra et. al. 'Visibility of Noisy Point Cloud Data', 2010.
454 This is a wrapper for a CPU implementation and a copy of the point cloud data
455 and resulting visible triangle mesh and indiecs will be made.
458 camera_location: All points not visible from that location will be removed.
460 radius: The radius of the spherical projection.
463 Tuple of visible triangle mesh and indices of visible points on the same
464 device as the point cloud.
468 We use armadillo mesh to compute the visible points from given camera::
470 # Convert mesh to a point cloud and estimate dimensions.
471 armadillo_data = cv3d.data.ArmadilloMesh()
472 pcd = cv3d.io.read_triangle_mesh(
473 armadillo_data.path).sample_points_poisson_disk(5000)
475 diameter = np.linalg.norm(
476 np.asarray(pcd.get_max_bound()) - np.asarray(pcd.get_min_bound()))
478 # Define parameters used for hidden_point_removal.
479 camera = cv3d.core.Tensor([0, 0, diameter], cv3d.core.float32)
480 radius = diameter * 100
482 # Get all points that are visible from given view point.
483 pcd = cv3d.t.geometry.PointCloud.from_legacy(pcd)
484 _, pt_map = pcd.hidden_point_removal(camera, radius)
485 pcd = pcd.select_by_index(pt_map)
486 cv3d.visualization.draw([pcd], point_size=5))");
489 "min_points"_a,
"print_progress"_a =
false,
490 R
"(Cluster PointCloud using the DBSCAN algorithm Ester et al.,'A
491 Density-Based Algorithm for Discovering Clusters in Large Spatial Databases
492 with Noise', 1996. This is a wrapper for a CPU implementation and a copy of the
493 point cloud data and resulting labels will be made.
496 eps: Density parameter that is used to find neighbouring points.
498 min_points: Minimum number of points to form a cluster.
500 print_progress (default False): If 'True' the progress is visualized in the console.
503 A Tensor list of point labels on the same device as the point cloud, -1
504 indicates noise according to the algorithm.
508 We use Redwood dataset for demonstration::
510 import matplotlib.pyplot as plt
512 sample_ply_data = cv3d.data.PLYPointCloud()
513 pcd = cv3d.t.io.read_point_cloud(sample_ply_data.path)
514 labels = pcd.cluster_dbscan(eps=0.02, min_points=10, print_progress=True)
516 max_label = labels.max().item()
517 colors = plt.get_cmap("tab20")(
518 labels.numpy() / (max_label if max_label > 0 else 1))
519 colors = cv3d.core.Tensor(colors[:, :3], cv3d.core.float32)
520 colors[labels < 0] = 0
521 pcd.point.colors = colors
522 cv3d.visualization.draw([pcd]))");
525 "distance_threshold"_a = 0.01,
"ransac_n"_a = 3,
526 "num_iterations"_a = 100,
"probability"_a = 0.999,
527 R
"(Segments a plane in the point cloud using the RANSAC algorithm.
528 This is a wrapper for a CPU implementation and a copy of the point cloud data and
529 resulting plane model and inlier indiecs will be made.
532 distance_threshold (default 0.01): Max distance a point can be from the plane model, and still be considered an inlier.
534 ransac_n (default 3): Number of initial points to be considered inliers in each iteration.
535 num_iterations (default 100): Maximum number of iterations.
537 probability (default 0.999): Expected probability of finding the optimal plane.
540 Tuple of the plane model `ax + by + cz + d = 0` and the indices of
541 the plane inliers on the same device as the point cloud.
545 We use Redwood dataset to compute its plane model and inliers::
547 sample_pcd_data = cv3d.data.PCDPointCloud()
548 pcd = cv3d.t.io.read_point_cloud(sample_pcd_data.path)
549 plane_model, inliers = pcd.segment_plane(distance_threshold=0.01,
552 inlier_cloud = pcd.select_by_index(inliers)
553 inlier_cloud = inlier_cloud.paint_uniform_color([1.0, 0, 0])
554 outlier_cloud = pcd.select_by_index(inliers, invert=True)
555 cv3d.visualization.draw([inlier_cloud, outlier_cloud]))");
558 "joggle_inputs"_a =
false,
559 R
"doc(Compute the convex hull of a triangle mesh using qhull. This runs on the CPU.
562 joggle_inputs (default False): Handle precision problems by randomly perturbing the input data. Set to True if perturbing the input is acceptable but you need convex simplicial output. If False, neighboring facets may be merged in case of precision problems. See `QHull docs <http://www.qhull.org/html/qh-impre.htm#joggle>`__ for more details.
565 TriangleMesh representing the convexh hull. This contains an
566 extra vertex property `point_indices` that contains the index of the
567 corresponding vertex in the original mesh.
570 We will load the Eagle dataset, compute and display it's convex hull::
572 eagle = cv3d.data.EaglePointCloud()
573 pcd = cv3d.t.io.read_point_cloud(eagle.path)
574 hull = pcd.compute_convex_hull()
575 cv3d.visualization.draw([{'name': 'eagle', 'geometry': pcd}, {'name': 'convex hull', 'geometry': hull}])
577 pointcloud.def("compute_boundary_points",
579 "max_nn"_a = 30,
"angle_threshold"_a = 90.0,
580 R
"doc(Compute the boundary points of a point cloud.
581 The implementation is inspired by the PCL implementation. Reference:
582 https://pointclouds.org/documentation/classpcl_1_1_boundary_estimation.html
585 radius: Neighbor search radius parameter.
586 max_nn (default 30): Maximum number of neighbors to search.
587 angle_threshold (default 90.0): Angle threshold to decide if a point is on the boundary.
590 Tensor of boundary points and its boolean mask tensor.
593 We will load the DemoCropPointCloud dataset, compute its boundary points::
595 ply_point_cloud = cv3d.data.DemoCropPointCloud()
596 pcd = cv3d.t.io.read_point_cloud(ply_point_cloud.point_cloud_path)
597 boundaries, mask = pcd.compute_boundary_points(radius, max_nn)
598 boundaries.paint_uniform_color([1.0, 0.0, 0.0])
599 cv3d.visualization.draw([pcd, boundaries])
604 "Convert to a legacy CloudViewer PointCloud.");
606 "get_axis_aligned_bounding_box",
608 "Create an axis-aligned bounding box from attribute 'positions'.");
611 "Create an oriented bounding box from attribute 'positions'.");
612 pointcloud.def(
"crop",
616 "Function to crop pointcloud into output pointcloud.",
617 "aabb"_a,
"invert"_a =
false);
618 pointcloud.def(
"crop",
622 "Function to crop pointcloud into output pointcloud.",
623 "obb"_a,
"invert"_a =
false);
632 m,
"PointCloud",
"select_by_mask",
634 "Boolean indexing tensor of shape {n,} containing true value for "
635 "the indices that is to be selected.."},
636 {
"invert",
"Set to `True` to invert the selection of indices."}});
638 m,
"PointCloud",
"select_by_index",
640 "Int64 indexing tensor of shape {n,} containing index value that "
641 "is to be selected."},
643 "Set to `True` to invert the selection of indices, and also "
644 "ignore the duplicated indices."},
645 {
"remove_duplicates",
646 "Set to `True` to remove the duplicated indices."}});
648 m,
"PointCloud",
"voxel_down_sample",
649 {{
"voxel_size",
"Voxel size. A positive number."}});
651 m,
"PointCloud",
"uniform_down_sample",
653 "Sample rate, the selected point indices are [0, k, 2k, …]."}});
655 m,
"PointCloud",
"random_down_sample",
657 "Sampling ratio, the ratio of sample to total number of points "
658 "in the pointcloud."}});
660 m,
"PointCloud",
"farthest_point_down_sample",
661 {{
"num_samples",
"Number of points to be sampled."},
662 {
"start_index",
"Index of point to start downsampling from."}});
664 m,
"PointCloud",
"remove_radius_outliers",
666 "Number of neighbor points required within the radius."},
667 {
"search_radius",
"Radius of the sphere."}});
669 m,
"PointCloud",
"paint_uniform_color",
671 "Color of the pointcloud. Floating color values are clipped "
672 "between 0.0 and 1.0."}});
674 m,
"PointCloud",
"orient_normals_to_align_with_direction",
675 {{
"orientation_reference",
676 "Normals are oriented with respect to orientation_reference."}});
678 m,
"PointCloud",
"orient_normals_towards_camera_location",
680 "Normals are oriented with towards the camera_location."}});
682 m,
"PointCloud",
"crop",
683 {{
"aabb",
"AxisAlignedBoundingBox to crop points."},
685 "Crop the points outside of the bounding box or inside of the "
688 m,
"PointCloud",
"crop",
689 {{
"obb",
"OrientedBoundingBox to crop points."},
691 "Crop the points outside of the bounding box or inside of the "
694 "axis"_a,
"resolution"_a = 16,
"translation"_a = 0.0,
696 R
"(Sweeps the point set rotationally about an axis.
699 angle (float): The rotation angle in degree.
701 axis (cloudViewer.core.Tensor): The rotation axis.
703 resolution (int): The resolution defines the number of intermediate sweeps
704 about the rotation axis.
706 translation (float): The translation along the rotation axis.
709 A line set with the result of the sweep operation.
714 This code generates a number of helices from a point cloud::
716 import cloudViewer as cv3d
718 pcd = cv3d.t.geometry.PointCloud(np.random.rand(10,3))
719 helices = pcd.extrude_rotation(3*360, [0,1,0], resolution=3*16, translation=2)
720 cv3d.visualization.draw([{'name': 'helices', 'geometry': helices}])
725 "scale"_a = 1.0,
"capping"_a =
true,
726 R
"(Sweeps the point cloud along a direction vector.
730 vector (cloudViewer.core.Tensor): The direction vector.
732 scale (float): Scalar factor which essentially scales the direction vector.
735 A line set with the result of the sweep operation.
740 This code generates a set of straight lines from a point cloud::
742 import cloudViewer as cv3d
744 pcd = cv3d.t.geometry.PointCloud(np.random.rand(10,3))
745 lines = pcd.extrude_linear([0,1,0])
746 cv3d.visualization.draw([{'name': 'lines', 'geometry': lines}])
751 R
"(Partition the point cloud by recursively doing PCA.
753 This function creates a new point attribute with the name "partition_ids" storing
754 the partition id for each point.
757 max_points (int): The maximum allowed number of points in a partition.
762 This code computes partitions of a point cloud such that each partition
763 contains at most 20 points::
765 import cloudViewer as cv3d
767 pcd = cv3d.t.geometry.PointCloud(np.random.rand(100,3))
768 num_partitions = pcd.pca_partition(max_points=20)
770 # print the partition ids and the number of points for each of them.
771 print(np.unique(pcd.point.partition_ids.numpy(), return_counts=True))
776 "metrics"_a,
"params"_a,
777 R
"(Compute various metrics between two point clouds.
779 Currently, Chamfer distance, Hausdorff distance and F-Score `[Knapitsch2017] <../tutorial/reference.html#Knapitsch2017>`_ are supported.
780 The Chamfer distance is the sum of the mean distance to the nearest neighbor
781 from the points of the first point cloud to the second point cloud. The F-Score
782 at a fixed threshold radius is the harmonic mean of the Precision and Recall.
783 Recall is the percentage of surface points from the first point cloud that have
784 the second point cloud points within the threshold radius, while Precision is
785 the percentage of points from the second point cloud that have the first point
786 cloud points within the threshold radius.
792 \text{Chamfer Distance: } d_{CD}(X,Y) &= \frac{1}{|X|}\sum_{i \in X} || x_i - n(x_i, Y) || + \frac{1}{|Y|}\sum_{i \in Y} || y_i - n(y_i, X) ||\\
793 \text{Hausdorff distance: } d_H(X,Y) &= \max \left\{ \max_{i \in X} || x_i - n(x_i, Y) ||, \max_{i \in Y} || y_i - n(y_i, X) || \right\}\\
794 \text{Precision: } P(X,Y|d) &= \frac{100}{|X|} \sum_{i \in X} || x_i - n(x_i, Y) || < d \\
795 \text{Recall: } R(X,Y|d) &= \frac{100}{|Y|} \sum_{i \in Y} || y_i - n(y_i, X) || < d \\
796 \text{F-Score: } F(X,Y|d) &= \frac{2 P(X,Y|d) R(X,Y|d)}{P(X,Y|d) + R(X,Y|d)} \\
800 pcd2 (t.geometry.PointCloud): Other point cloud to compare with.
801 metrics (Sequence[t.geometry.Metric]): List of Metric s to compute. Multiple metrics can be computed at once for efficiency.
802 params (t.geometry.MetricParameters): This holds parameters required by different metrics.
805 Tensor containing the requested metrics.
809 from cv3d.t.geometry import TriangleMesh, PointCloud, Metric, MetricParameters
810 # box is a cube with one vertex at the origin and a side length 1
811 pos = TriangleMesh.create_box().vertex.positions
812 pcd1 = PointCloud(pos.clone())
813 pcd2 = PointCloud(pos * 1.1)
815 # (1, 3, 3, 1) vertices are shifted by (0, 0.1, 0.1*sqrt(2), 0.1*sqrt(3))
817 metric_params = MetricParameters(
818 fscore_radius=cv3d.utility.FloatVector((0.01, 0.11, 0.15, 0.18)))
819 metrics = pcd1.compute_metrics(
820 pcd2, (Metric.ChamferDistance, Metric.HausdorffDistance, Metric.FScore),
824 np.testing.assert_allclose(
825 metrics.cpu().numpy(),
826 (0.22436734, np.sqrt(3) / 10, 100. / 8, 400. / 8, 700. / 8, 100.),
std::string ToString() const
Returns string representation of device, e.g. "CPU:0", "CUDA:0".
bool IsAvailable() const
Returns true if the device is available.
static Tensor Eye(int64_t n, Dtype dtype, const Device &device)
Create an identity matrix of size n x n.
static Tensor Zeros(const SizeVector &shape, Dtype dtype, const Device &device=Device("CPU:0"))
Create a tensor fill with zeros.
A bounding box that is aligned along the coordinate axes and defined by the min_bound and max_bound.
Mix-in class for geometry types that can be visualized.
A bounding box oriented along an arbitrary frame of reference.
A point cloud contains a list of 3D points.
void OrientNormalsToAlignWithDirection(const core::Tensor &orientation_reference=core::Tensor::Init< float >({0, 0, 1}, core::Device("CPU:0")))
Function to orient the normals of a point cloud.
std::tuple< PointCloud, core::Tensor > RemoveNonFinitePoints(bool remove_nan=true, bool remove_infinite=true) const
Remove all points from the point cloud that have a nan entry, or infinite value. It also removes the ...
std::tuple< core::Tensor, core::Tensor > SegmentPlane(const double distance_threshold=0.01, const int ransac_n=3, const int num_iterations=100, const double probability=0.99999999) const
Segment PointCloud plane using the RANSAC algorithm. This is a wrapper for a CPU implementation and a...
core::Tensor GetCenter() const
Returns the center for point coordinates.
PointCloud & NormalizeNormals()
Normalize point normals to length 1.
PointCloud & Rotate(const core::Tensor &R, const core::Tensor ¢er)
Rotates the PointPositions and PointNormals (if exists).
PointCloud Crop(const AxisAlignedBoundingBox &aabb, bool invert=false) const
Function to crop pointcloud into output pointcloud.
PointCloud FarthestPointDownSample(const size_t num_samples, const size_t start_index=0) const
Downsample a pointcloud into output pointcloud with a set of points has farthest distance.
PointCloud SelectByIndex(const core::Tensor &indices, bool invert=false, bool remove_duplicates=false) const
Select points from input pointcloud, based on indices list into output point cloud.
core::Tensor GetMaxBound() const
Returns the max bound for point coordinates.
TriangleMesh ComputeConvexHull(bool joggle_inputs=false) const
static PointCloud FromLegacy(const cloudViewer::geometry::PointCloud &pcd_legacy, core::Dtype dtype=core::Float32, const core::Device &device=core::Device("CPU:0"))
Create a PointCloud from a legacy CloudViewer PointCloud.
PointCloud RandomDownSample(double sampling_ratio) const
Downsample a pointcloud by selecting random index point and its attributes.
std::tuple< TriangleMesh, core::Tensor > HiddenPointRemoval(const core::Tensor &camera_location, double radius) const
This is an implementation of the Hidden Point Removal operator described in Katz et....
OrientedBoundingBox GetOrientedBoundingBox() const
Create an oriented bounding box from attribute "positions".
void EstimateNormals(const utility::optional< int > max_nn=30, const utility::optional< double > radius=utility::nullopt)
Function to estimate point normals. If the point cloud normals exist, the estimated normals are orien...
core::Tensor GetMinBound() const
Returns the min bound for point coordinates.
int PCAPartition(int max_points)
std::string ToString() const
Text description.
LineSet ExtrudeRotation(double angle, const core::Tensor &axis, int resolution=16, double translation=0.0, bool capping=true) const
void OrientNormalsTowardsCameraLocation(const core::Tensor &camera_location=core::Tensor::Zeros({3}, core::Float32, core::Device("CPU:0")))
Function to orient the normals of a point cloud.
std::tuple< PointCloud, core::Tensor > RemoveStatisticalOutliers(size_t nb_neighbors, double std_ratio) const
Remove points that are further away from their nb_neighbor neighbors in average. This function is not...
PointCloud To(const core::Device &device, bool copy=false) const
PointCloud SelectByMask(const core::Tensor &boolean_mask, bool invert=false) const
Select points from input pointcloud, based on boolean mask indices into output point cloud.
PointCloud UniformDownSample(size_t every_k_points) const
Downsamples a point cloud by selecting every kth index point and its attributes.
AxisAlignedBoundingBox GetAxisAlignedBoundingBox() const
Create an axis-aligned bounding box from attribute "positions".
void EstimateColorGradients(const utility::optional< int > max_nn=30, const utility::optional< double > radius=utility::nullopt)
Function to compute point color gradients. If radius is provided, then HybridSearch is used,...
PointCloud & PaintUniformColor(const core::Tensor &color)
Assigns uniform color to the point cloud.
cloudViewer::geometry::PointCloud ToLegacy() const
Convert to a legacy CloudViewer PointCloud.
core::Device GetDevice() const override
Returns the device attribute of this PointCloud.
static PointCloud CreateFromDepthImage(const Image &depth, const core::Tensor &intrinsics, const core::Tensor &extrinsics=core::Tensor::Eye(4, core::Float32, core::Device("CPU:0")), float depth_scale=1000.0f, float depth_max=3.0f, int stride=1, bool with_normals=false)
Factory function to create a point cloud from a depth image and a camera model.
LineSet ExtrudeLinear(const core::Tensor &vector, double scale=1.0, bool capping=true) const
std::tuple< PointCloud, core::Tensor > ComputeBoundaryPoints(double radius, int max_nn=30, double angle_threshold=90.0) const
Compute the boundary points of a point cloud. The implementation is inspired by the PCL implementatio...
geometry::RGBDImage ProjectToRGBDImage(int width, int height, const core::Tensor &intrinsics, const core::Tensor &extrinsics=core::Tensor::Eye(4, core::Float32, core::Device("CPU:0")), float depth_scale=1000.0f, float depth_max=3.0f)
Project a point cloud to an RGBD image.
PointCloud & Translate(const core::Tensor &translation, bool relative=true)
Translates the PointPositions of the PointCloud.
geometry::Image ProjectToDepthImage(int width, int height, const core::Tensor &intrinsics, const core::Tensor &extrinsics=core::Tensor::Eye(4, core::Float32, core::Device("CPU:0")), float depth_scale=1000.0f, float depth_max=3.0f)
Project a point cloud to a depth image.
PointCloud Clone() const
Returns copy of the point cloud on the same device.
std::tuple< PointCloud, core::Tensor > RemoveRadiusOutliers(size_t nb_points, double search_radius) const
Remove points that have less than nb_points neighbors in a sphere of a given radius.
std::tuple< PointCloud, core::Tensor > RemoveDuplicatedPoints() const
Remove duplicated points and there associated attributes.
const TensorMap & GetPointAttr() const
Getter for point_attr_ TensorMap. Used in Pybind.
core::Tensor ComputeMetrics(const PointCloud &pcd2, std::vector< Metric > metrics={Metric::ChamferDistance}, MetricParameters params=MetricParameters()) const
PointCloud & Scale(double scale, const core::Tensor ¢er)
Scales the PointPositions of the PointCloud.
core::Tensor ClusterDBSCAN(double eps, size_t min_points, bool print_progress=false) const
Cluster PointCloud using the DBSCAN algorithm Ester et al., "A Density-Based Algorithm for Discoverin...
PointCloud & Transform(const core::Tensor &transformation)
Transforms the PointPositions and PointNormals (if exist) of the PointCloud.
void OrientNormalsConsistentTangentPlane(size_t k, const double lambda=0.0, const double cos_alpha_tol=1.0)
Function to consistently orient estimated normals based on consistent tangent planes as described in ...
static PointCloud CreateFromRGBDImage(const RGBDImage &rgbd_image, const core::Tensor &intrinsics, const core::Tensor &extrinsics=core::Tensor::Eye(4, core::Float32, core::Device("CPU:0")), float depth_scale=1000.0f, float depth_max=3.0f, int stride=1, bool with_normals=false)
Factory function to create a point cloud from an RGB-D image and a camera model.
void ClassMethodDocInject(py::module &pybind_module, const std::string &class_name, const std::string &function_name, const std::unordered_map< std::string, std::string > &map_parameter_body_docs)
::ccPointCloud PointCloud
static const std::unordered_map< std::string, std::string > map_shared_argument_docstrings
void pybind_pointcloud(py::module &m)
Generic file read and write utility for python interface.