Pipelines

colored_pointcloud_registration.py

 1# ----------------------------------------------------------------------------
 2# -                        Open3D: www.open3d.org                            -
 3# ----------------------------------------------------------------------------
 4# The MIT License (MIT)
 5#
 6# Copyright (c) 2018-2021 www.open3d.org
 7#
 8# Permission is hereby granted, free of charge, to any person obtaining a copy
 9# of this software and associated documentation files (the "Software"), to deal
10# in the Software without restriction, including without limitation the rights
11# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12# copies of the Software, and to permit persons to whom the Software is
13# furnished to do so, subject to the following conditions:
14#
15# The above copyright notice and this permission notice shall be included in
16# all copies or substantial portions of the Software.
17#
18# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
23# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24# IN THE SOFTWARE.
25# ----------------------------------------------------------------------------
26"""ICP variant that uses both geometry and color for registration"""
27
28import open3d as o3d
29import numpy as np
30import copy
31
32
33def draw_registration_result(source, target, transformation):
34    source_temp = copy.deepcopy(source)
35    source_temp.transform(transformation)
36    o3d.visualization.draw([source_temp, target])
37
38
39print("Load two point clouds and show initial pose ...")
40ply_data = o3d.data.DemoColoredICPPointClouds()
41source = o3d.io.read_point_cloud(ply_data.paths[0])
42target = o3d.io.read_point_cloud(ply_data.paths[1])
43
44if __name__ == "__main__":
45    # Draw initial alignment.
46    current_transformation = np.identity(4)
47    draw_registration_result(source, target, current_transformation)
48
49    # Colored pointcloud registration.
50    # This is implementation of following paper:
51    # J. Park, Q.-Y. Zhou, V. Koltun,
52    # Colored Point Cloud Registration Revisited, ICCV 2017.
53    voxel_radius = [0.04, 0.02, 0.01]
54    max_iter = [50, 30, 14]
55    current_transformation = np.identity(4)
56    print("Colored point cloud registration ...\n")
57    for scale in range(3):
58        iter = max_iter[scale]
59        radius = voxel_radius[scale]
60        print([iter, radius, scale])
61
62        print("1. Downsample with a voxel size %.2f" % radius)
63        source_down = source.voxel_down_sample(radius)
64        target_down = target.voxel_down_sample(radius)
65
66        print("2. Estimate normal")
67        source_down.estimate_normals(
68            o3d.geometry.KDTreeSearchParamHybrid(radius=radius * 2, max_nn=30))
69        target_down.estimate_normals(
70            o3d.geometry.KDTreeSearchParamHybrid(radius=radius * 2, max_nn=30))
71
72        print("3. Applying colored point cloud registration")
73        result_icp = o3d.pipelines.registration.registration_colored_icp(
74            source_down, target_down, radius, current_transformation,
75            o3d.pipelines.registration.TransformationEstimationForColoredICP(),
76            o3d.pipelines.registration.ICPConvergenceCriteria(
77                relative_fitness=1e-6, relative_rmse=1e-6, max_iteration=iter))
78        current_transformation = result_icp.transformation
79        print(result_icp, "\n")
80    draw_registration_result(source, target, result_icp.transformation)

icp_registration.py

 1# ----------------------------------------------------------------------------
 2# -                        Open3D: www.open3d.org                            -
 3# ----------------------------------------------------------------------------
 4# The MIT License (MIT)
 5#
 6# Copyright (c) 2018-2021 www.open3d.org
 7#
 8# Permission is hereby granted, free of charge, to any person obtaining a copy
 9# of this software and associated documentation files (the "Software"), to deal
10# in the Software without restriction, including without limitation the rights
11# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12# copies of the Software, and to permit persons to whom the Software is
13# furnished to do so, subject to the following conditions:
14#
15# The above copyright notice and this permission notice shall be included in
16# all copies or substantial portions of the Software.
17#
18# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
23# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24# IN THE SOFTWARE.
25# ----------------------------------------------------------------------------
26"""ICP (Iterative Closest Point) registration algorithm"""
27
28import open3d as o3d
29import numpy as np
30import copy
31
32
33def draw_registration_result(source, target, transformation):
34    source_temp = copy.deepcopy(source)
35    target_temp = copy.deepcopy(target)
36    source_temp.paint_uniform_color([1, 0.706, 0])
37    target_temp.paint_uniform_color([0, 0.651, 0.929])
38    source_temp.transform(transformation)
39    o3d.visualization.draw([source_temp, target_temp])
40
41
42def point_to_point_icp(source, target, threshold, trans_init):
43    print("Apply point-to-point ICP")
44    reg_p2p = o3d.pipelines.registration.registration_icp(
45        source, target, threshold, trans_init,
46        o3d.pipelines.registration.TransformationEstimationPointToPoint())
47    print(reg_p2p)
48    print("Transformation is:")
49    print(reg_p2p.transformation, "\n")
50    draw_registration_result(source, target, reg_p2p.transformation)
51
52
53def point_to_plane_icp(source, target, threshold, trans_init):
54    print("Apply point-to-plane ICP")
55    reg_p2l = o3d.pipelines.registration.registration_icp(
56        source, target, threshold, trans_init,
57        o3d.pipelines.registration.TransformationEstimationPointToPlane())
58    print(reg_p2l)
59    print("Transformation is:")
60    print(reg_p2l.transformation, "\n")
61    draw_registration_result(source, target, reg_p2l.transformation)
62
63
64if __name__ == "__main__":
65    pcd_data = o3d.data.DemoICPPointClouds()
66    source = o3d.io.read_point_cloud(pcd_data.paths[0])
67    target = o3d.io.read_point_cloud(pcd_data.paths[1])
68    threshold = 0.02
69    trans_init = np.asarray([[0.862, 0.011, -0.507, 0.5],
70                             [-0.139, 0.967, -0.215, 0.7],
71                             [0.487, 0.255, 0.835, -1.4], [0.0, 0.0, 0.0, 1.0]])
72    draw_registration_result(source, target, trans_init)
73
74    print("Initial alignment")
75    evaluation = o3d.pipelines.registration.evaluate_registration(
76        source, target, threshold, trans_init)
77    print(evaluation, "\n")
78
79    point_to_point_icp(source, target, threshold, trans_init)
80    point_to_plane_icp(source, target, threshold, trans_init)

multiway_registration.py

  1# ----------------------------------------------------------------------------
  2# -                        Open3D: www.open3d.org                            -
  3# ----------------------------------------------------------------------------
  4# The MIT License (MIT)
  5#
  6# Copyright (c) 2018-2021 www.open3d.org
  7#
  8# Permission is hereby granted, free of charge, to any person obtaining a copy
  9# of this software and associated documentation files (the "Software"), to deal
 10# in the Software without restriction, including without limitation the rights
 11# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 12# copies of the Software, and to permit persons to whom the Software is
 13# furnished to do so, subject to the following conditions:
 14#
 15# The above copyright notice and this permission notice shall be included in
 16# all copies or substantial portions of the Software.
 17#
 18# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 19# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 20# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 21# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 22# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
 23# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
 24# IN THE SOFTWARE.
 25# ----------------------------------------------------------------------------
 26"""Align multiple pieces of geometry in a global space"""
 27
 28import open3d as o3d
 29import numpy as np
 30
 31
 32def load_point_clouds(voxel_size=0.0):
 33    pcd_data = o3d.data.DemoICPPointClouds()
 34    pcds = []
 35    for i in range(3):
 36        pcd = o3d.io.read_point_cloud(pcd_data.paths[i])
 37        pcd_down = pcd.voxel_down_sample(voxel_size=voxel_size)
 38        pcds.append(pcd_down)
 39    return pcds
 40
 41
 42def pairwise_registration(source, target, max_correspondence_distance_coarse,
 43                          max_correspondence_distance_fine):
 44    print("Apply point-to-plane ICP")
 45    icp_coarse = o3d.pipelines.registration.registration_icp(
 46        source, target, max_correspondence_distance_coarse, np.identity(4),
 47        o3d.pipelines.registration.TransformationEstimationPointToPlane())
 48    icp_fine = o3d.pipelines.registration.registration_icp(
 49        source, target, max_correspondence_distance_fine,
 50        icp_coarse.transformation,
 51        o3d.pipelines.registration.TransformationEstimationPointToPlane())
 52    transformation_icp = icp_fine.transformation
 53    information_icp = o3d.pipelines.registration.get_information_matrix_from_point_clouds(
 54        source, target, max_correspondence_distance_fine,
 55        icp_fine.transformation)
 56    return transformation_icp, information_icp
 57
 58
 59def full_registration(pcds, max_correspondence_distance_coarse,
 60                      max_correspondence_distance_fine):
 61    pose_graph = o3d.pipelines.registration.PoseGraph()
 62    odometry = np.identity(4)
 63    pose_graph.nodes.append(o3d.pipelines.registration.PoseGraphNode(odometry))
 64    n_pcds = len(pcds)
 65    for source_id in range(n_pcds):
 66        for target_id in range(source_id + 1, n_pcds):
 67            transformation_icp, information_icp = pairwise_registration(
 68                pcds[source_id], pcds[target_id],
 69                max_correspondence_distance_coarse,
 70                max_correspondence_distance_fine)
 71            print("Build o3d.pipelines.registration.PoseGraph")
 72            if target_id == source_id + 1:  # odometry case
 73                odometry = np.dot(transformation_icp, odometry)
 74                pose_graph.nodes.append(
 75                    o3d.pipelines.registration.PoseGraphNode(
 76                        np.linalg.inv(odometry)))
 77                pose_graph.edges.append(
 78                    o3d.pipelines.registration.PoseGraphEdge(source_id,
 79                                                             target_id,
 80                                                             transformation_icp,
 81                                                             information_icp,
 82                                                             uncertain=False))
 83            else:  # loop closure case
 84                pose_graph.edges.append(
 85                    o3d.pipelines.registration.PoseGraphEdge(source_id,
 86                                                             target_id,
 87                                                             transformation_icp,
 88                                                             information_icp,
 89                                                             uncertain=True))
 90    return pose_graph
 91
 92
 93if __name__ == "__main__":
 94    voxel_size = 0.02
 95    pcds_down = load_point_clouds(voxel_size)
 96    o3d.visualization.draw(pcds_down)
 97
 98    print("Full registration ...")
 99    max_correspondence_distance_coarse = voxel_size * 15
100    max_correspondence_distance_fine = voxel_size * 1.5
101    with o3d.utility.VerbosityContextManager(
102            o3d.utility.VerbosityLevel.Debug) as cm:
103        pose_graph = full_registration(pcds_down,
104                                       max_correspondence_distance_coarse,
105                                       max_correspondence_distance_fine)
106
107    print("Optimizing PoseGraph ...")
108    option = o3d.pipelines.registration.GlobalOptimizationOption(
109        max_correspondence_distance=max_correspondence_distance_fine,
110        edge_prune_threshold=0.25,
111        reference_node=0)
112    with o3d.utility.VerbosityContextManager(
113            o3d.utility.VerbosityLevel.Debug) as cm:
114        o3d.pipelines.registration.global_optimization(
115            pose_graph,
116            o3d.pipelines.registration.GlobalOptimizationLevenbergMarquardt(),
117            o3d.pipelines.registration.GlobalOptimizationConvergenceCriteria(),
118            option)
119
120    print("Transform points and display")
121    for point_id in range(len(pcds_down)):
122        print(pose_graph.nodes[point_id].pose)
123        pcds_down[point_id].transform(pose_graph.nodes[point_id].pose)
124    o3d.visualization.draw(pcds_down)

pose_graph_optimization.py

 1# ----------------------------------------------------------------------------
 2# -                        Open3D: www.open3d.org                            -
 3# ----------------------------------------------------------------------------
 4# The MIT License (MIT)
 5#
 6# Copyright (c) 2018-2021 www.open3d.org
 7#
 8# Permission is hereby granted, free of charge, to any person obtaining a copy
 9# of this software and associated documentation files (the "Software"), to deal
10# in the Software without restriction, including without limitation the rights
11# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12# copies of the Software, and to permit persons to whom the Software is
13# furnished to do so, subject to the following conditions:
14#
15# The above copyright notice and this permission notice shall be included in
16# all copies or substantial portions of the Software.
17#
18# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
23# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24# IN THE SOFTWARE.
25# ----------------------------------------------------------------------------
26
27import open3d as o3d
28import numpy as np
29import os
30
31if __name__ == "__main__":
32
33    o3d.utility.set_verbosity_level(o3d.utility.VerbosityLevel.Debug)
34
35    print("")
36    print(
37        "Parameters for o3d.pipelines.registration.PoseGraph optimization ...")
38    method = o3d.pipelines.registration.GlobalOptimizationLevenbergMarquardt()
39    criteria = o3d.pipelines.registration.GlobalOptimizationConvergenceCriteria(
40    )
41    option = o3d.pipelines.registration.GlobalOptimizationOption()
42    print("")
43    print(method)
44    print(criteria)
45    print(option)
46    print("")
47
48    print(
49        "Optimizing Fragment o3d.pipelines.registration.PoseGraph using open3d ..."
50    )
51
52    pose_graph_data = o3d.data.DemoPoseGraphOptimization()
53    pose_graph_fragment = o3d.io.read_pose_graph(
54        pose_graph_data.pose_graph_fragment_path)
55    print(pose_graph_fragment)
56    o3d.pipelines.registration.global_optimization(pose_graph_fragment, method,
57                                                   criteria, option)
58    o3d.io.write_pose_graph(
59        os.path.join('pose_graph_example_fragment_optimized.json'),
60        pose_graph_fragment)
61    print("")
62
63    print(
64        "Optimizing Global o3d.pipelines.registration.PoseGraph using open3d ..."
65    )
66    pose_graph_global = o3d.io.read_pose_graph(
67        pose_graph_data.pose_graph_global_path)
68    print(pose_graph_global)
69    o3d.pipelines.registration.global_optimization(pose_graph_global, method,
70                                                   criteria, option)
71    o3d.io.write_pose_graph(
72        os.path.join('pose_graph_example_global_optimized.json'),
73        pose_graph_global)

registration_fgr.py

  1# ----------------------------------------------------------------------------
  2# -                        Open3D: www.open3d.org                            -
  3# ----------------------------------------------------------------------------
  4# The MIT License (MIT)
  5#
  6# Copyright (c) 2018-2021 www.open3d.org
  7#
  8# Permission is hereby granted, free of charge, to any person obtaining a copy
  9# of this software and associated documentation files (the "Software"), to deal
 10# in the Software without restriction, including without limitation the rights
 11# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 12# copies of the Software, and to permit persons to whom the Software is
 13# furnished to do so, subject to the following conditions:
 14#
 15# The above copyright notice and this permission notice shall be included in
 16# all copies or substantial portions of the Software.
 17#
 18# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 19# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 20# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 21# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 22# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
 23# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
 24# IN THE SOFTWARE.
 25# ----------------------------------------------------------------------------
 26
 27import open3d as o3d
 28
 29import argparse
 30
 31
 32def preprocess_point_cloud(pcd, voxel_size):
 33    pcd_down = pcd.voxel_down_sample(voxel_size)
 34    pcd_down.estimate_normals(
 35        o3d.geometry.KDTreeSearchParamHybrid(radius=voxel_size * 2.0,
 36                                             max_nn=30))
 37    pcd_fpfh = o3d.pipelines.registration.compute_fpfh_feature(
 38        pcd_down,
 39        o3d.geometry.KDTreeSearchParamHybrid(radius=voxel_size * 5.0,
 40                                             max_nn=100))
 41    return (pcd_down, pcd_fpfh)
 42
 43
 44if __name__ == '__main__':
 45    pcd_data = o3d.data.DemoICPPointClouds()
 46    parser = argparse.ArgumentParser(
 47        'Global point cloud registration example with RANSAC')
 48    parser.add_argument('src',
 49                        type=str,
 50                        default=pcd_data.paths[0],
 51                        nargs='?',
 52                        help='path to src point cloud')
 53    parser.add_argument('dst',
 54                        type=str,
 55                        default=pcd_data.paths[1],
 56                        nargs='?',
 57                        help='path to dst point cloud')
 58    parser.add_argument('--voxel_size',
 59                        type=float,
 60                        default=0.05,
 61                        help='voxel size in meter used to downsample inputs')
 62    parser.add_argument(
 63        '--distance_multiplier',
 64        type=float,
 65        default=1.5,
 66        help='multipler used to compute distance threshold'
 67        'between correspondences.'
 68        'Threshold is computed by voxel_size * distance_multiplier.')
 69    parser.add_argument('--max_iterations',
 70                        type=int,
 71                        default=64,
 72                        help='number of max FGR iterations')
 73    parser.add_argument(
 74        '--max_tuples',
 75        type=int,
 76        default=1000,
 77        help='max number of accepted tuples for correpsondence filtering')
 78
 79    args = parser.parse_args()
 80
 81    voxel_size = args.voxel_size
 82    distance_threshold = args.distance_multiplier * voxel_size
 83
 84    o3d.utility.set_verbosity_level(o3d.utility.VerbosityLevel.Debug)
 85    print('Reading inputs')
 86    src = o3d.io.read_point_cloud(args.src)
 87    dst = o3d.io.read_point_cloud(args.dst)
 88
 89    print('Downsampling inputs')
 90    src_down, src_fpfh = preprocess_point_cloud(src, voxel_size)
 91    dst_down, dst_fpfh = preprocess_point_cloud(dst, voxel_size)
 92
 93    print('Running FGR')
 94    result = o3d.pipelines.registration.registration_fgr_based_on_feature_matching(
 95        src_down, dst_down, src_fpfh, dst_fpfh,
 96        o3d.pipelines.registration.FastGlobalRegistrationOption(
 97            maximum_correspondence_distance=distance_threshold,
 98            iteration_number=args.max_iterations,
 99            maximum_tuple_count=args.max_tuples))
100
101    src.paint_uniform_color([1, 0, 0])
102    dst.paint_uniform_color([0, 1, 0])
103    o3d.visualization.draw([src.transform(result.transformation), dst])

registration_ransac.py

  1# ----------------------------------------------------------------------------
  2# -                        Open3D: www.open3d.org                            -
  3# ----------------------------------------------------------------------------
  4# The MIT License (MIT)
  5#
  6# Copyright (c) 2018-2021 www.open3d.org
  7#
  8# Permission is hereby granted, free of charge, to any person obtaining a copy
  9# of this software and associated documentation files (the "Software"), to deal
 10# in the Software without restriction, including without limitation the rights
 11# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 12# copies of the Software, and to permit persons to whom the Software is
 13# furnished to do so, subject to the following conditions:
 14#
 15# The above copyright notice and this permission notice shall be included in
 16# all copies or substantial portions of the Software.
 17#
 18# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 19# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 20# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 21# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 22# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
 23# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
 24# IN THE SOFTWARE.
 25# ----------------------------------------------------------------------------
 26
 27import open3d as o3d
 28
 29import argparse
 30
 31
 32def preprocess_point_cloud(pcd, voxel_size):
 33    pcd_down = pcd.voxel_down_sample(voxel_size)
 34    pcd_down.estimate_normals(
 35        o3d.geometry.KDTreeSearchParamHybrid(radius=voxel_size * 2.0,
 36                                             max_nn=30))
 37    pcd_fpfh = o3d.pipelines.registration.compute_fpfh_feature(
 38        pcd_down,
 39        o3d.geometry.KDTreeSearchParamHybrid(radius=voxel_size * 5.0,
 40                                             max_nn=100))
 41    return (pcd_down, pcd_fpfh)
 42
 43
 44if __name__ == '__main__':
 45    pcd_data = o3d.data.DemoICPPointClouds()
 46    parser = argparse.ArgumentParser(
 47        'Global point cloud registration example with RANSAC')
 48    parser.add_argument('src',
 49                        type=str,
 50                        default=pcd_data.paths[0],
 51                        nargs='?',
 52                        help='path to src point cloud')
 53    parser.add_argument('dst',
 54                        type=str,
 55                        default=pcd_data.paths[1],
 56                        nargs='?',
 57                        help='path to dst point cloud')
 58    parser.add_argument('--voxel_size',
 59                        type=float,
 60                        default=0.05,
 61                        help='voxel size in meter used to downsample inputs')
 62    parser.add_argument(
 63        '--distance_multiplier',
 64        type=float,
 65        default=1.5,
 66        help='multipler used to compute distance threshold'
 67        'between correspondences.'
 68        'Threshold is computed by voxel_size * distance_multiplier.')
 69    parser.add_argument('--max_iterations',
 70                        type=int,
 71                        default=1000000,
 72                        help='number of max RANSAC iterations')
 73    parser.add_argument('--confidence',
 74                        type=float,
 75                        default=0.999,
 76                        help='RANSAC confidence')
 77    parser.add_argument(
 78        '--mutual_filter',
 79        action='store_true',
 80        help='whether to use mutual filter for putative correspondences')
 81
 82    args = parser.parse_args()
 83
 84    voxel_size = args.voxel_size
 85    distance_threshold = args.distance_multiplier * voxel_size
 86
 87    o3d.utility.set_verbosity_level(o3d.utility.VerbosityLevel.Debug)
 88    print('Reading inputs')
 89    src = o3d.io.read_point_cloud(args.src)
 90    dst = o3d.io.read_point_cloud(args.dst)
 91
 92    print('Downsampling inputs')
 93    src_down, src_fpfh = preprocess_point_cloud(src, voxel_size)
 94    dst_down, dst_fpfh = preprocess_point_cloud(dst, voxel_size)
 95
 96    print('Running RANSAC')
 97    result = o3d.pipelines.registration.registration_ransac_based_on_feature_matching(
 98        src_down,
 99        dst_down,
100        src_fpfh,
101        dst_fpfh,
102        mutual_filter=args.mutual_filter,
103        max_correspondence_distance=distance_threshold,
104        estimation_method=o3d.pipelines.registration.
105        TransformationEstimationPointToPoint(False),
106        ransac_n=3,
107        checkers=[
108            o3d.pipelines.registration.CorrespondenceCheckerBasedOnEdgeLength(
109                0.9),
110            o3d.pipelines.registration.CorrespondenceCheckerBasedOnDistance(
111                distance_threshold)
112        ],
113        criteria=o3d.pipelines.registration.RANSACConvergenceCriteria(
114            args.max_iterations, args.confidence))
115
116    src.paint_uniform_color([1, 0, 0])
117    dst.paint_uniform_color([0, 1, 0])
118    o3d.visualization.draw([src.transform(result.transformation), dst])

rgbd_integration_uniform.py

 1# ----------------------------------------------------------------------------
 2# -                        Open3D: www.open3d.org                            -
 3# ----------------------------------------------------------------------------
 4# The MIT License (MIT)
 5#
 6# Copyright (c) 2018-2021 www.open3d.org
 7#
 8# Permission is hereby granted, free of charge, to any person obtaining a copy
 9# of this software and associated documentation files (the "Software"), to deal
10# in the Software without restriction, including without limitation the rights
11# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12# copies of the Software, and to permit persons to whom the Software is
13# furnished to do so, subject to the following conditions:
14#
15# The above copyright notice and this permission notice shall be included in
16# all copies or substantial portions of the Software.
17#
18# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
23# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24# IN THE SOFTWARE.
25# ----------------------------------------------------------------------------
26
27import open3d as o3d
28import numpy as np
29
30import os, sys
31
32pyexample_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
33sys.path.append(pyexample_path)
34
35from open3d_example import read_trajectory
36
37if __name__ == "__main__":
38    rgbd_data = o3d.data.SampleRedwoodRGBDImages()
39    camera_poses = read_trajectory(rgbd_data.odometry_log_path)
40    camera_intrinsics = o3d.camera.PinholeCameraIntrinsic(
41        o3d.camera.PinholeCameraIntrinsicParameters.PrimeSenseDefault)
42    volume = o3d.pipelines.integration.UniformTSDFVolume(
43        length=4.0,
44        resolution=512,
45        sdf_trunc=0.04,
46        color_type=o3d.pipelines.integration.TSDFVolumeColorType.RGB8,
47    )
48
49    for i in range(len(camera_poses)):
50        print("Integrate {:d}-th image into the volume.".format(i))
51        color = o3d.io.read_image(rgbd_data.color_paths[i])
52        depth = o3d.io.read_image(rgbd_data.depth_paths[i])
53
54        rgbd = o3d.geometry.RGBDImage.create_from_color_and_depth(
55            color, depth, depth_trunc=4.0, convert_rgb_to_intensity=False)
56        volume.integrate(
57            rgbd,
58            camera_intrinsics,
59            np.linalg.inv(camera_poses[i].pose),
60        )
61
62    print("Extract triangle mesh")
63    mesh = volume.extract_triangle_mesh()
64    mesh.compute_vertex_normals()
65    o3d.visualization.draw_geometries([mesh])
66
67    print("Extract voxel-aligned debugging point cloud")
68    voxel_pcd = volume.extract_voxel_point_cloud()
69    o3d.visualization.draw_geometries([voxel_pcd])
70
71    print("Extract voxel-aligned debugging voxel grid")
72    voxel_grid = volume.extract_voxel_grid()
73    # o3d.visualization.draw_geometries([voxel_grid])
74
75    # print("Extract point cloud")
76    # pcd = volume.extract_point_cloud()
77    # o3d.visualization.draw_geometries([pcd])

rgbd_odometry.py

 1# ----------------------------------------------------------------------------
 2# -                        Open3D: www.open3d.org                            -
 3# ----------------------------------------------------------------------------
 4# The MIT License (MIT)
 5#
 6# Copyright (c) 2018-2021 www.open3d.org
 7#
 8# Permission is hereby granted, free of charge, to any person obtaining a copy
 9# of this software and associated documentation files (the "Software"), to deal
10# in the Software without restriction, including without limitation the rights
11# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12# copies of the Software, and to permit persons to whom the Software is
13# furnished to do so, subject to the following conditions:
14#
15# The above copyright notice and this permission notice shall be included in
16# all copies or substantial portions of the Software.
17#
18# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
23# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24# IN THE SOFTWARE.
25# ----------------------------------------------------------------------------
26"""Find camera movement between two consecutive RGBD image pairs"""
27
28import open3d as o3d
29import numpy as np
30
31if __name__ == "__main__":
32    pinhole_camera_intrinsic = o3d.camera.PinholeCameraIntrinsic(
33        o3d.camera.PinholeCameraIntrinsicParameters.PrimeSenseDefault)
34    rgbd_data = o3d.data.SampleRedwoodRGBDImages()
35    source_color = o3d.io.read_image(rgbd_data.color_paths[0])
36    source_depth = o3d.io.read_image(rgbd_data.depth_paths[0])
37    target_color = o3d.io.read_image(rgbd_data.color_paths[1])
38    target_depth = o3d.io.read_image(rgbd_data.depth_paths[1])
39
40    source_rgbd_image = o3d.geometry.RGBDImage.create_from_color_and_depth(
41        source_color, source_depth)
42    target_rgbd_image = o3d.geometry.RGBDImage.create_from_color_and_depth(
43        target_color, target_depth)
44    target_pcd = o3d.geometry.PointCloud.create_from_rgbd_image(
45        target_rgbd_image, pinhole_camera_intrinsic)
46
47    option = o3d.pipelines.odometry.OdometryOption()
48    odo_init = np.identity(4)
49    print(option)
50
51    [success_color_term, trans_color_term,
52     info] = o3d.pipelines.odometry.compute_rgbd_odometry(
53         source_rgbd_image, target_rgbd_image,
54         pinhole_camera_intrinsic, odo_init,
55         o3d.pipelines.odometry.RGBDOdometryJacobianFromColorTerm(), option)
56    [success_hybrid_term, trans_hybrid_term,
57     info] = o3d.pipelines.odometry.compute_rgbd_odometry(
58         source_rgbd_image, target_rgbd_image,
59         pinhole_camera_intrinsic, odo_init,
60         o3d.pipelines.odometry.RGBDOdometryJacobianFromHybridTerm(), option)
61
62    if success_color_term:
63        print("Using RGB-D Odometry")
64        print(trans_color_term)
65        source_pcd_color_term = o3d.geometry.PointCloud.create_from_rgbd_image(
66            source_rgbd_image, pinhole_camera_intrinsic)
67        source_pcd_color_term.transform(trans_color_term)
68        o3d.visualization.draw([target_pcd, source_pcd_color_term])
69
70    if success_hybrid_term:
71        print("Using Hybrid RGB-D Odometry")
72        print(trans_hybrid_term)
73        source_pcd_hybrid_term = o3d.geometry.PointCloud.create_from_rgbd_image(
74            source_rgbd_image, pinhole_camera_intrinsic)
75        source_pcd_hybrid_term.transform(trans_hybrid_term)
76        o3d.visualization.draw([target_pcd, source_pcd_hybrid_term])

robust_icp.py

 1# ----------------------------------------------------------------------------
 2# -                        Open3D: www.open3d.org                            -
 3# ----------------------------------------------------------------------------
 4# The MIT License (MIT)
 5#
 6# Copyright (c) 2018-2021 www.open3d.org
 7#
 8# Permission is hereby granted, free of charge, to any person obtaining a copy
 9# of this software and associated documentation files (the "Software"), to deal
10# in the Software without restriction, including without limitation the rights
11# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12# copies of the Software, and to permit persons to whom the Software is
13# furnished to do so, subject to the following conditions:
14#
15# The above copyright notice and this permission notice shall be included in
16# all copies or substantial portions of the Software.
17#
18# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
23# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24# IN THE SOFTWARE.
25# ----------------------------------------------------------------------------
26"""Outlier rejection using robust kernels for ICP"""
27
28import open3d as o3d
29import numpy as np
30import copy
31
32
33def draw_registration_result(source, target, transformation):
34    source_temp = copy.deepcopy(source)
35    target_temp = copy.deepcopy(target)
36    source_temp.paint_uniform_color([1, 0.706, 0])
37    target_temp.paint_uniform_color([0, 0.651, 0.929])
38    source_temp.transform(transformation)
39    o3d.visualization.draw([source_temp, target_temp])
40
41
42def apply_noise(pcd, mu, sigma):
43    noisy_pcd = copy.deepcopy(pcd)
44    points = np.asarray(noisy_pcd.points)
45    points += np.random.normal(mu, sigma, size=points.shape)
46    noisy_pcd.points = o3d.utility.Vector3dVector(points)
47    return noisy_pcd
48
49
50if __name__ == "__main__":
51    pcd_data = o3d.data.DemoICPPointClouds()
52    source = o3d.io.read_point_cloud(pcd_data.paths[0])
53    target = o3d.io.read_point_cloud(pcd_data.paths[1])
54    trans_init = np.asarray([[0.862, 0.011, -0.507, 0.5],
55                             [-0.139, 0.967, -0.215, 0.7],
56                             [0.487, 0.255, 0.835, -1.4], [0.0, 0.0, 0.0, 1.0]])
57
58    # Mean and standard deviation.
59    mu, sigma = 0, 0.1
60    source_noisy = apply_noise(source, mu, sigma)
61
62    print("Displaying source point cloud + noise:")
63    o3d.visualization.draw([source_noisy])
64
65    print(
66        "Displaying original source and target point cloud with initial transformation:"
67    )
68    draw_registration_result(source, target, trans_init)
69
70    threshold = 1.0
71    print("Using the noisy source pointcloud to perform robust ICP.\n")
72    print("Robust point-to-plane ICP, threshold={}:".format(threshold))
73    loss = o3d.pipelines.registration.TukeyLoss(k=sigma)
74    print("Using robust loss:", loss)
75    p2l = o3d.pipelines.registration.TransformationEstimationPointToPlane(loss)
76    reg_p2l = o3d.pipelines.registration.registration_icp(
77        source_noisy, target, threshold, trans_init, p2l)
78    print(reg_p2l)
79    print("Transformation is:")
80    print(reg_p2l.transformation)
81    draw_registration_result(source, target, reg_p2l.transformation)