19 static const std::unordered_map<std::string, std::string>
21 {
"criteria",
"Odometry convergence criteria."},
22 {
"criteria_list",
"List of Odometry convergence criteria."},
23 {
"depth_outlier_trunc",
24 "Depth difference threshold used to filter projective "
27 "Huber norm parameter used in depth loss."},
29 "Converts depth pixel values to meters by dividing the scale "
31 {
"init_source_to_target",
32 "(4, 4) initial transformation matrix from source to target."},
33 {
"intrinsics",
"(3, 3) intrinsic matrix for projection."},
34 {
"intensity_huber_delta",
35 "Huber norm parameter used in intensity loss."},
37 "Estimation method used to apply RGBD odometry. "
38 "One of (``PointToPlane``, ``Intensity``, ``Hybrid``)"},
39 {
"params",
"Odometry loss parameters."},
40 {
"source",
"The source RGBD image."},
42 "(row, col, channel = 1) Float32 source depth image obtained "
43 "by PreprocessDepth before calling this function."},
45 "(row, col, channel = 1) Float32 source intensity image "
46 "obtained by RGBToGray before calling this function"},
48 "(row, col, channel = 3) Float32 source vertex image obtained "
49 "by CreateVertexMap before calling this function."},
50 {
"target",
"The target RGBD image."},
52 "(row, col, channel = 1) Float32 target depth image obtained "
53 "by PreprocessDepth before calling this function."},
55 "(row, col, channel = 1) Float32 target depth gradient image "
56 "along x-axis obtained by FilterSobel before calling this "
59 "(row, col, channel = 1) Float32 target depth gradient image "
60 "along y-axis obtained by FilterSobel before calling this "
63 "(row, col, channel = 1) Float32 target intensity image "
64 "obtained by RGBToGray before calling this function"},
65 {
"target_intensity_dx",
66 "(row, col, channel = 1) Float32 target intensity gradient "
67 "image along x-axis obtained by FilterSobel before calling "
69 {
"target_intensity_dy",
70 "(row, col, channel = 1) Float32 target intensity gradient "
71 "image along y-axis obtained by FilterSobel before calling "
74 "(row, col, channel = 3) Float32 target normal image obtained "
75 "by CreateNormalMap before calling this function."},
77 "(row, col, channel = 3) Float32 target vertex image obtained "
78 "by CreateVertexMap before calling this function."}};
81 py::module m_odometry =
82 m.def_submodule(
"odometry",
"Tensor odometry pipeline.");
83 py::native_enum<Method>(m_odometry,
"Method",
"enum.Enum",
84 "Tensor odometry estimation method.")
90 py::class_<OdometryConvergenceCriteria> odometry_convergence_criteria(
91 m_odometry,
"OdometryConvergenceCriteria",
92 "Convergence criteria of odometry. "
93 "Odometry algorithm stops if the relative change of fitness and "
94 "rmse hit ``relative_fitness`` and ``relative_rmse`` individually, "
95 "or the iteration number exceeds ``max_iteration``.");
96 py::detail::bind_copy_functions<OdometryConvergenceCriteria>(
97 odometry_convergence_criteria);
98 odometry_convergence_criteria
99 .def(py::init<int, double, double>(),
"max_iteration"_a,
100 "relative_rmse"_a = 1e-6,
"relative_fitness"_a = 1e-6)
101 .def_readwrite(
"max_iteration",
103 "Maximum iteration before iteration stops.")
107 "If relative change (difference) of inliner RMSE score is "
108 "lower than ``relative_rmse``, the iteration stops.")
112 "If relative change (difference) of fitness score is lower "
113 "than ``relative_fitness``, the iteration stops.")
116 "OdometryConvergenceCriteria("
117 "max_iteration={:d}, "
118 "relative_rmse={:e}, "
119 "relative_fitness={:e})",
125 py::class_<OdometryResult> odometry_result(m_odometry,
"OdometryResult",
126 "Odometry results.");
127 py::detail::bind_copy_functions<OdometryResult>(odometry_result);
129 .def(py::init<core::Tensor, double, double>(),
130 py::arg_v(
"transformation",
133 "cloudViewer.core.Tensor.eye(4, "
134 "dtype=cloudViewer.core.Dtype.Float64)"),
135 "inlier_rmse"_a = 0.0,
"fitness"_a = 0.0)
137 "``4 x 4`` float64 tensor on CPU: The estimated "
138 "transformation matrix.")
140 "float: RMSE of all inlier correspondences. Lower "
144 "float: The overlapping area (# of inlier correspondences "
145 "/ # of points in target). Higher is better.")
148 "OdometryResult[fitness={:e}, inlier_rmse={:e}]."
149 "\nAccess transformation to get result.",
154 py::class_<OdometryLossParams> odometry_loss_params(
155 m_odometry,
"OdometryLossParams",
"Odometry loss parameters.");
156 py::detail::bind_copy_functions<OdometryLossParams>(odometry_loss_params);
158 .def(py::init<double, double, double>(),
159 "depth_outlier_trunc"_a = 0.07,
"depth_huber_delta"_a = 0.05,
160 "intensity_huber_delta"_a = 0.1)
161 .def_readwrite(
"depth_outlier_trunc",
163 "float: Depth difference threshold used to filter "
164 "projective associations.")
165 .def_readwrite(
"depth_huber_delta",
167 "float: Huber norm parameter used in depth loss.")
169 "intensity_huber_delta",
171 "float: Huber norm parameter used in intensity loss.")
174 "OdometryLossParams[depth_outlier_trunc={:e}, "
175 "depth_huber_delta={:e}, intensity_huber_delta={:e}].",
181 py::call_guard<py::gil_scoped_release>(),
182 "Function for Multi Scale RGBD odometry.",
"source"_a,
"target"_a,
184 "init_source_to_target"_a =
186 "depth_scale"_a = 1000.0f,
"depth_max"_a = 3.0f,
188 std::vector<OdometryConvergenceCriteria>({10, 5, 3}),
194 "compute_odometry_result_point_to_plane",
196 py::call_guard<py::gil_scoped_release>(),
197 R
"(Estimates the OdometryResult (4x4 rigid transformation T from
198 source to target, with inlier rmse and fitness). Performs one
199 iteration of RGBD odometry using
200 Loss function: :math:`[(V_p - V_q)^T N_p]^2`
202 :math:`V_p` denotes the vertex at pixel p in the source,
203 :math:`V_q` denotes the vertex at pixel q in the target.
204 :math:`N_p` denotes the normal at pixel p in the source.
205 q is obtained by transforming p with init_source_to_target then
206 projecting with intrinsics.
207 Reference: KinectFusion, ISMAR 2011.)",
208 "source_vertex_map"_a,
"target_vertex_map"_a,
"target_normal_map"_a,
209 "intrinsics"_a,
"init_source_to_target"_a,
"depth_outlier_trunc"_a,
210 "depth_huber_delta"_a);
212 "compute_odometry_result_point_to_plane",
216 "compute_odometry_result_intensity",
218 py::call_guard<py::gil_scoped_release>(),
219 R
"(Estimates the OdometryResult (4x4 rigid transformation T from
220 source to target, with inlier rmse and fitness). Performs one
221 iteration of RGBD odometry using
222 Loss function: :math:`(I_p - I_q)^2`
224 :math:`I_p` denotes the intensity at pixel p in the source,
225 :math:`I_q` denotes the intensity at pixel q in the target.
226 q is obtained by transforming p with init_source_to_target then
227 projecting with intrinsics.
229 Real-time visual odometry from dense RGB-D images,
230 ICCV Workshops, 2017.)",
231 "source_depth"_a,
"target_depth"_a,
"source_intensity"_a,
232 "target_intensity"_a,
"target_intensity_dx"_a,
233 "target_intensity_dy"_a,
"source_vertex_map"_a,
"intrinsics"_a,
234 "init_source_to_target"_a,
"depth_outlier_trunc"_a,
235 "intensity_huber_delta"_a);
237 "compute_odometry_result_intensity",
242 py::call_guard<py::gil_scoped_release>(),
243 R
"(Estimates the OdometryResult (4x4 rigid transformation T from
244 source to target, with inlier rmse and fitness). Performs one
245 iteration of RGBD odometry using
246 Loss function: :math:`(I_p - I_q)^2 + \lambda(D_p - (D_q)')^2`
248 :math:`I_p` denotes the intensity at pixel p in the source,
249 :math:`I_q` denotes the intensity at pixel q in the target.
250 :math:`D_p` denotes the depth pixel p in the source,
251 :math:`D_q` denotes the depth pixel q in the target.
252 q is obtained by transforming p with init_source_to_target then
253 projecting with intrinsics.
254 Reference: J. Park, Q.Y. Zhou, and V. Koltun,
255 Colored Point Cloud Registration Revisited, ICCV, 2017.)",
256 "source_depth"_a,
"target_depth"_a,
"source_intensity"_a,
257 "target_intensity"_a,
"target_depth_dx"_a,
"target_depth_dy"_a,
258 "target_intensity_dx"_a,
"target_intensity_dy"_a,
259 "source_vertex_map"_a,
"intrinsics"_a,
"init_source_to_target"_a,
260 "depth_outlier_trunc"_a,
"depth_huber_delta"_a,
261 "intensity_huber_delta"_a);
265 m_odometry.def(
"compute_odometry_information_matrix",
267 py::call_guard<py::gil_scoped_release>(),
"source_depth"_a,
268 "target_depth"_a,
"intrinsic"_a,
"source_to_target"_a,
269 "dist_threshold"_a,
"depth_scale"_a = 1000.0,
270 "depth_max"_a = 3.0);
filament::Texture::InternalFormat format
static Tensor Eye(int64_t n, Dtype dtype, const Device &device)
Create an identity matrix of size n x n.
int max_iteration_
Maximum iteration before iteration stops.
float depth_outlier_trunc_
Depth difference threshold used to filter projective associations.
float intensity_huber_delta_
double inlier_rmse_
RMSE of all inlier. Lower is better.
core::Tensor transformation_
The estimated transformation matrix of dtype Float64 on CPU device.
void FunctionDocInject(py::module &pybind_module, const std::string &function_name, const std::unordered_map< std::string, std::string > &map_parameter_body_docs)
static const std::unordered_map< std::string, std::string > map_shared_argument_docstrings
void pybind_odometry(py::module &m)
OdometryResult RGBDOdometryMultiScale(const RGBDImage &source, const RGBDImage &target, const Tensor &intrinsics, const Tensor &init_source_to_target, const float depth_scale, const float depth_max, const std::vector< OdometryConvergenceCriteria > &criteria, const Method method, const OdometryLossParams ¶ms)
Create an RGBD image pyramid given the original source and target RGBD images, and perform hierarchic...
core::Tensor ComputeOdometryInformationMatrix(const geometry::Image &source_depth, const geometry::Image &target_depth, const core::Tensor &intrinsic, const core::Tensor &source_to_target, const float dist_thr, const float depth_scale, const float depth_max)
OdometryResult ComputeOdometryResultPointToPlane(const Tensor &source_vertex_map, const Tensor &target_vertex_map, const Tensor &target_normal_map, const Tensor &intrinsics, const Tensor &init_source_to_target, const float depth_outlier_trunc, const float depth_huber_delta)
Estimates the 4x4 rigid transformation T from source to target, with inlier rmse and fitness....
OdometryResult ComputeOdometryResultHybrid(const Tensor &source_depth, const Tensor &target_depth, const Tensor &source_intensity, const Tensor &target_intensity, const Tensor &target_depth_dx, const Tensor &target_depth_dy, const Tensor &target_intensity_dx, const Tensor &target_intensity_dy, const Tensor &source_vertex_map, const Tensor &intrinsics, const Tensor &init_source_to_target, const float depth_outlier_trunc, const float depth_huber_delta, const float intensity_huber_delta)
Estimates the 4x4 rigid transformation T from source to target, with inlier rmse and fitness....
OdometryResult ComputeOdometryResultIntensity(const Tensor &source_depth, const Tensor &target_depth, const Tensor &source_intensity, const Tensor &target_intensity, const Tensor &target_intensity_dx, const Tensor &target_intensity_dy, const Tensor &source_vertex_map, const Tensor &intrinsics, const Tensor &init_source_to_target, const float depth_outlier_trunc, const float intensity_huber_delta)
Estimates the 4x4 rigid transformation T from source to target, with inlier rmse and fitness....
Generic file read and write utility for python interface.