Python open3d.PointCloud() Examples
The following are 24
code examples of open3d.PointCloud().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
open3d
, or try the search function
.

Example #1
Source File: visualise.py From differentiable-point-clouds with MIT License | 9 votes |
def vis_pc(xyz, color_axis=-1, rgb=None): # TODO move to the other module and do import in the module import open3d pcd = open3d.PointCloud() pcd.points = open3d.Vector3dVector(xyz) if color_axis >= 0: if color_axis == 3: axis_vis = np.arange(0, xyz.shape[0], dtype=np.float32) else: axis_vis = xyz[:, color_axis] min_ = np.min(axis_vis) max_ = np.max(axis_vis) colors = cm.gist_rainbow((axis_vis - min_) / (max_ - min_))[:, 0:3] pcd.colors = open3d.Vector3dVector(colors) if rgb is not None: pcd.colors = open3d.Vector3dVector(rgb) open3d.draw_geometries([pcd])
Example #2
Source File: kittipoles.py From polex with MIT License | 6 votes |
def view_local_maps(seq): sequence = dataset.sequence(seq) seqdir = os.path.join('kitti', '{:03d}'.format(seq)) with np.load(os.path.join(seqdir, localmapfile)) as data: for i, map in enumerate(data['maps']): T_w_m = sequence.poses[map['imid']].dot(T_cam0_mc).dot(T_mc_m) mapboundsvis = util.create_wire_box(mapextent, [0.0, 0.0, 1.0]) mapboundsvis.transform(T_w_m) polemap = [] for poleparams in map['poleparams']: x, y, zs, ze, a, _ = poleparams pole = util.create_wire_box( [a, a, ze - zs], color=[1.0, 1.0, 0.0]) T_m_p = np.identity(4) T_m_p[:3, 3] = [x - 0.5 * a, y - 0.5 * a, zs] pole.transform(T_w_m.dot(T_m_p)) polemap.append(pole) accucloud = o3.PointCloud() for j in range(map['istart'], map['iend']): velo = sequence.get_velo(j) cloud = o3.PointCloud() cloud.points = o3.Vector3dVector(velo[:, :3]) cloud.colors = o3.Vector3dVector( util.intensity2color(velo[:, 3])) cloud.transform( sequence.poses[j].dot(sequence.calib.T_cam0_velo)) accucloud.points.extend(cloud.points) accucloud.colors.extend(cloud.colors) o3.draw_geometries([accucloud, mapboundsvis] + polemap)
Example #3
Source File: downsample_gt.py From differentiable-point-clouds with MIT License | 6 votes |
def downsample_point_clouds(): cfg = parse_arguments() vox_size = cfg.downsample_voxel_size synth_set = cfg.synth_set inp_dir = os.path.join(cfg.inp_dir, synth_set) files = glob.glob('{}/*.mat'.format(inp_dir)) out_dir = cfg.out_dir out_synthset = os.path.join(out_dir, cfg.synth_set) mkdir_if_missing(out_synthset) for k, model_file in enumerate(files): print("{}/{}".format(k, len(files))) file_name = os.path.basename(model_file) sample_name, _ = os.path.splitext(file_name) obj = scipy.io.loadmat(model_file) out_filename = "{}/{}.mat".format(out_synthset, sample_name) if os.path.isfile(out_filename): print("already exists:", sample_name) continue Vgt = obj["points"] pcd = open3d.PointCloud() pcd.points = open3d.Vector3dVector(Vgt) downpcd = open3d.voxel_down_sample(pcd, voxel_size=vox_size) down_xyz = np.asarray(downpcd.points) scipy.io.savemat(out_filename, {"points": down_xyz})
Example #4
Source File: util.py From polex with MIT License | 5 votes |
def xyzi2pc(xyz, intensities=None): pc = o3.PointCloud() pc.points = o3.Vector3dVector(xyz) if intensities is not None: pc.colors = o3.Vector3dVector(intensity2color(intensities / 255.0)) return pc
Example #5
Source File: point_cloud.py From pylot with Apache License 2.0 | 5 votes |
def get_pixel_location(self, pixel, camera_setup): """ Gets the 3D world location from pixel coordinates. Args: pixel (:py:class:`~pylot.utils.Vector2D`): Pixel coordinates. camera_setup (:py:class:`~pylot.drivers.sensor_setup.CameraSetup`): The setup of the camera. Returns: :py:class:`~pylot.utils.Location`: The 3D world location, or None if all the point cloud points are behind. """ # Select only points that are in front. fwd_points = self.points[np.where(self.points[:, 2] > 0.0)] if len(fwd_points) == 0: return None intrinsic_mat = camera_setup.get_intrinsic_matrix() # Project our 2D pixel location into 3D space, onto the z=1 plane. p3d = np.dot(inv(intrinsic_mat), np.array([[pixel.x], [pixel.y], [1.0]])) if self._lidar_setup.lidar_type == 'sensor.lidar.ray_cast': location = PointCloud.get_closest_point_in_point_cloud( fwd_points, Vector2D(p3d[0], p3d[1]), normalized=True) # Use the depth from the retrieved location. p3d *= np.array([location.z]) p3d = p3d.transpose() # Convert from camera to unreal coordinates if the lidar type is # sensor.lidar.ray_cast to_world_transform = camera_setup.get_unreal_transform() camera_point_cloud = to_world_transform.transform_points(p3d)[0] pixel_location = Location(camera_point_cloud[0], camera_point_cloud[1], camera_point_cloud[2]) elif self._lidar_setup.lidar_type == 'velodyne': location = PointCloud.get_closest_point_in_point_cloud( fwd_points, Vector2D(p3d[0], p3d[1]), normalized=False) # Use the depth from the retrieved location. p3d[2] = location.z p3d = p3d.transpose() pixel_location = Location(p3d[0, 0], p3d[0, 1], p3d[0, 2]) return pixel_location
Example #6
Source File: point_cloud.py From pylot with Apache License 2.0 | 5 votes |
def save(self, timestamp, data_path, file_base): """Saves the point cloud to a file. Args: timestamp (:obj:`int`): Timestamp associated with the point cloud. data_path (:obj:`str`): Path where to save the point cloud. file_base (:obj:`str`): Base name of the file. """ import open3d as o3d file_name = os.path.join(data_path, '{}-{}.ply'.format(file_base, timestamp)) pcd = o3d.PointCloud() pcd.points = o3d.Vector3dVector(self.points) o3d.write_point_cloud(file_name, pcd)
Example #7
Source File: registration.py From 3DRegNet with MIT License | 5 votes |
def initializePointCoud(pts): pcd = o3.PointCloud() pcd.points = o3.Vector3dVector(pts) return pcd
Example #8
Source File: setupPly.py From 3DRegNet with MIT License | 5 votes |
def initializePointCoud(pts): pcd = o3d.PointCloud() pcd.points = o3d.Vector3dVector(pts) return pcd
Example #9
Source File: ncltpoles.py From polex with MIT License | 5 votes |
def view_local_maps(sessionname): sessiondir = os.path.join('nclt', sessionname) session = pynclt.session(sessionname) maps = np.load(os.path.join(sessiondir, get_localmapfile()))['maps'] for i, map in enumerate(maps): print('Map #{}'.format(i)) mapboundsvis = util.create_wire_box(mapextent, [0.0, 0.0, 1.0]) mapboundsvis.transform(map['T_w_m']) polevis = [] for poleparams in map['poleparams']: x, y, zs, ze, a = poleparams[:5] pole = util.create_wire_box( [a, a, ze - zs], color=[1.0, 1.0, 0.0]) T_m_p = np.identity(4) T_m_p[:3, 3] = [x - 0.5 * a, y - 0.5 * a, zs] pole.transform(map['T_w_m'].dot(T_m_p)) polevis.append(pole) accucloud = o3.PointCloud() for j in range(map['istart'], map['iend']): points, intensities = session.get_velo(j) cloud = o3.PointCloud() cloud.points = o3.Vector3dVector(points) cloud.colors = o3.Vector3dVector( util.intensity2color(intensities / 255.0)) cloud.transform(session.T_w_r_odo_velo[j]) accucloud.points.extend(cloud.points) accucloud.colors.extend(cloud.colors) o3.draw_geometries([accucloud, mapboundsvis] + polevis)
Example #10
Source File: point_cloud.py From pylot with Apache License 2.0 | 5 votes |
def __str__(self): return 'PointCloud(transform: {}, number of points: {})'.format( self.transform, len(self.points))
Example #11
Source File: pynclt.py From polex with MIT License | 5 votes |
def load_snapshot(sessionname): cloud = o3.PointCloud() trajectory = o3.LineSet() with np.load(os.path.join(resultdir, sessionname, snapshotfile)) as data: cloud.points = o3.Vector3dVector(data['points']) cloud.colors = o3.Vector3dVector( util.intensity2color(data['intensities'] / 255.0)) trajectory.points = o3.Vector3dVector(data['trajectory']) lines = np.reshape(range(data['trajectory'].shape[0] - 1), [-1, 1]) \ + [0, 1] trajectory.lines = o3.Vector2iVector(lines) trajectory.colors = o3.Vector3dVector( np.tile([0.0, 0.5, 0.0], [lines.shape[0], 1])) return cloud, trajectory
Example #12
Source File: vis_point_cloud.py From pyrobot with MIT License | 5 votes |
def main(): bot = Robot("kinect2") vis = open3d.Visualizer() vis.create_window("3D Map") pcd = open3d.PointCloud() coord = open3d.create_mesh_coordinate_frame(1, [0, 0, 0]) vis.add_geometry(pcd) vis.add_geometry(coord) # We update the viewer every a few seconds update_time = 4 while True: start_time = time.time() pts, colors = bot.camera.get_current_pcd() pcd.clear() # note that open3d.Vector3dVector is slow pcd.points = open3d.Vector3dVector(pts) pcd.colors = open3d.Vector3dVector(colors / 255.0) vis.update_geometry() vis.poll_events() vis.update_renderer() s_t = time.time() while time.time() - s_t < update_time: vis.poll_events() vis.update_renderer() time.sleep(0.1) end_time = time.time() process_time = end_time - start_time print("Updating every {0:.2f}s".format(process_time))
Example #13
Source File: realtime_point_cloud.py From pyrobot with MIT License | 5 votes |
def main(): parser = argparse.ArgumentParser(description="Argument Parser") parser.add_argument( "--floor_height", type=float, default=0.03, help="the z coordinate of the floor" ) args = parser.parse_args() np.set_printoptions(precision=4, suppress=True) bot = Robot("locobot") bot.camera.set_pan_tilt(0, 0.7, wait=True) bot.arm.go_home() bot.arm.set_joint_positions([1.96, 0.52, -0.51, 1.67, 0.01], plan=False) vis = open3d.Visualizer() vis.create_window("3D Map") pcd = open3d.PointCloud() coord = open3d.create_mesh_coordinate_frame(1, [0, 0, 0]) vis.add_geometry(pcd) vis.add_geometry(coord) while True: pts, colors = bot.camera.get_current_pcd(in_cam=False) pts, colors = filter_points(pts, colors, z_lowest=args.floor_height) pcd.clear() # note that open3d.Vector3dVector is slow pcd.points = open3d.Vector3dVector(pts) pcd.colors = open3d.Vector3dVector(colors / 255.0) vis.update_geometry() vis.poll_events() vis.update_renderer() time.sleep(0.1)
Example #14
Source File: vis_3d_map.py From pyrobot with MIT License | 5 votes |
def main(): bot = Robot("locobot") vis = open3d.Visualizer() vis.create_window("3D Map") pcd = open3d.PointCloud() coord = open3d.create_mesh_coordinate_frame(1, [0, 0, 0]) vis.add_geometry(pcd) vis.add_geometry(coord) # We update the viewer every a few seconds update_time = 4 while True: start_time = time.time() pts, colors = bot.base.base_state.vslam.get_3d_map() pcd.clear() # note that open3d.Vector3dVector is slow pcd.points = open3d.Vector3dVector(pts) pcd.colors = open3d.Vector3dVector(colors / 255.0) vis.update_geometry() vis.poll_events() vis.update_renderer() s_t = time.time() while time.time() - s_t < update_time: vis.poll_events() vis.update_renderer() time.sleep(0.1) end_time = time.time() process_time = end_time - start_time print("Updating every {0:.2f}s".format(process_time))
Example #15
Source File: evaluate_kitti_our.py From D3Feat with MIT License | 5 votes |
def deal_with_one_pair(source_keypts, target_keypts, trans, num_keypts, threshold): """ calculate the relative repeatability under {num_keypts} settings for one pair. """ gtTrans = trans pcd = open3d.PointCloud() pcd.points = open3d.utility.Vector3dVector(source_keypts) pcd.transform(gtTrans) source_keypts = np.asarray(pcd.points) distance = cdist(source_keypts, target_keypts, metric='euclidean') num_repeat = np.sum(distance.min(axis=0) < threshold) return num_repeat * 1.0 / num_keypts
Example #16
Source File: compute_alignment.py From differentiable-point-clouds with MIT License | 5 votes |
def draw_registration_result(src, trgt): source = open3d.PointCloud() source.points = open3d.Vector3dVector(src) target = open3d.PointCloud() target.points = open3d.Vector3dVector(trgt) source.paint_uniform_color([1, 0.706, 0]) target.paint_uniform_color([0, 0.651, 0.929]) open3d.draw_geometries([source, target])
Example #17
Source File: evaluate_3dmatch_our.py From D3Feat with MIT License | 5 votes |
def deal_with_one_scene(scene, desc_name, timestr, num_keypts): """ calculate the relative repeatability under {num_keypts} settings for {scene} """ pcdpath = f"../data/3DMatch/fragments/{scene}/" keyptspath = f"../geometric_registration/{desc_name}_{timestr}/keypoints/{scene}" gtpath = f'../geometric_registration/gt_result/{scene}-evaluation/' gtLog = loadlog(gtpath) # register each pair num_frag = len([filename for filename in os.listdir(pcdpath) if filename.endswith('ply')]) num_repeat_list = [] for id1 in range(num_frag): for id2 in range(id1 + 1, num_frag): cloud_bin_s = f'cloud_bin_{id1}' cloud_bin_t = f'cloud_bin_{id2}' key = f'{cloud_bin_s.split("_")[-1]}_{cloud_bin_t.split("_")[-1]}' if key not in gtLog.keys(): continue source_keypts = get_keypts(keyptspath, cloud_bin_s) target_keypts = get_keypts(keyptspath, cloud_bin_t) source_keypts = source_keypts[-num_keypts:, :] target_keypts = target_keypts[-num_keypts:, :] gtTrans = gtLog[key] pcd = open3d.PointCloud() pcd.points = open3d.utility.Vector3dVector(target_keypts) pcd.transform(gtTrans) target_keypts = np.asarray(pcd.points) distance = cdist(source_keypts, target_keypts, metric='euclidean') num_repeat = np.sum(distance.min(axis=0) < 0.1) num_repeat_list.append(num_repeat * 1.0 / num_keypts) # print(f"Scene {scene} repeatability: {sum(num_repeat_list) / len(num_repeat_list)}") return sum(num_repeat_list) / len(num_repeat_list)
Example #18
Source File: ncltpoles.py From polex with MIT License | 4 votes |
def save_local_maps(sessionname, visualize=False): print(sessionname) session = pynclt.session(sessionname) util.makedirs(session.dir) istart, imid, iend = get_map_indices(session) maps = [] with progressbar.ProgressBar(max_value=len(iend)) as bar: for i in range(len(iend)): scans = [] for idx, val in enumerate(range(istart[i], iend[i])): xyz, _ = session.get_velo(val) scan = o3.PointCloud() scan.points = o3.Vector3dVector(xyz) scans.append(scan) T_w_mc = util.project_xy( session.T_w_r_odo_velo[imid[i]].dot(T_r_mc)) T_w_m = T_w_mc.dot(T_mc_m) T_m_w = util.invert_ht(T_w_m) T_w_r = session.T_w_r_odo_velo[istart[i]:iend[i]] T_m_r = np.matmul(T_m_w, T_w_r) occupancymap = mapping.occupancymap(scans, T_m_r, mapshape, mapsize) poleparams = poles.detect_poles(occupancymap, mapsize) if visualize: cloud = o3.PointCloud() for T, scan in zip(T_w_r, scans): s = copy.copy(scan) s.transform(T) cloud.points.extend(s.points) mapboundsvis = util.create_wire_box(mapextent, [0.0, 0.0, 1.0]) mapboundsvis.transform(T_w_m) polevis = [] for j in range(poleparams.shape[0]): x, y, zs, ze, a = poleparams[j, :5] pole = util.create_wire_box( [a, a, ze - zs], color=[1.0, 1.0, 0.0]) T_m_p = np.identity(4) T_m_p[:3, 3] = [x - 0.5 * a, y - 0.5 * a, zs] pole.transform(T_w_m.dot(T_m_p)) polevis.append(pole) o3.draw_geometries(polevis + [cloud, mapboundsvis]) map = {'poleparams': poleparams, 'T_w_m': T_w_m, 'istart': istart[i], 'imid': imid[i], 'iend': iend[i]} maps.append(map) bar.update(i) np.savez(os.path.join(session.dir, get_localmapfile()), maps=maps)
Example #19
Source File: evaluate_eth.py From D3Feat with MIT License | 4 votes |
def register2Fragments(id1, id2, keyptspath, descpath, resultpath, gtLog, desc_name, inlier_ratio, distance_threshold): """ Register point cloud {id1} and {id2} using the keypts location and descriptors. """ # cloud_bin_s = f'Hokuyo_{id1}' # cloud_bin_t = f'Hokuyo_{id2}' cloud_bin_s = f'cloud_bin_{id1}' cloud_bin_t = f'cloud_bin_{id2}' write_file = f'{cloud_bin_s}_{cloud_bin_t}.rt.txt' if os.path.exists(os.path.join(resultpath, write_file)): return 0, 0, 0 source_keypts = get_keypts(keyptspath, cloud_bin_s) target_keypts = get_keypts(keyptspath, cloud_bin_t) source_desc = get_desc(descpath, cloud_bin_s, desc_name) target_desc = get_desc(descpath, cloud_bin_t, desc_name) source_desc = np.nan_to_num(source_desc) target_desc = np.nan_to_num(target_desc) # Select {num_keypts} points based on the scores. The descriptors and keypts are already sorted based on the detection score. num_keypts = 250 source_keypts = source_keypts[-num_keypts:, :] source_desc = source_desc[-num_keypts:, :] target_keypts = target_keypts[-num_keypts:, :] target_desc = target_desc[-num_keypts:, :] # Select {num_keypts} points randomly. # num_keypts = 250 # source_indices = np.random.choice(range(source_keypts.shape[0]), num_keypts) # target_indices = np.random.choice(range(target_keypts.shape[0]), num_keypts) # source_keypts = source_keypts[source_indices, :] # source_desc = source_desc[source_indices, :] # target_keypts = target_keypts[target_indices, :] # target_desc = target_desc[target_indices, :] key = f'{cloud_bin_s.split("_")[-1]}_{cloud_bin_t.split("_")[-1]}' if key not in gtLog.keys(): # skip the pairs that have less than 30% overlap. num_inliers = 0 inlier_ratio = 0 gt_flag = 0 else: # find mutually cloest point. corr = build_correspondence(source_desc, target_desc) gtTrans = gtLog[key] frag1 = source_keypts[corr[:, 0]] frag2_pc = open3d.PointCloud() frag2_pc.points = open3d.utility.Vector3dVector(target_keypts[corr[:, 1]]) frag2_pc.transform(gtTrans) frag2 = np.asarray(frag2_pc.points) distance = np.sqrt(np.sum(np.power(frag1 - frag2, 2), axis=1)) num_inliers = np.sum(distance < distance_threshold) if num_inliers / len(distance) < inlier_ratio: print(key) print("num_corr:", len(corr), "inlier_ratio:", num_inliers / len(distance)) inlier_ratio = num_inliers / len(distance) gt_flag = 1 # write the result into resultpath so that it can be re-shown. s = f"{cloud_bin_s}\t{cloud_bin_t}\t{num_inliers}\t{inlier_ratio:.8f}\t{gt_flag}" with open(os.path.join(resultpath, f'{cloud_bin_s}_{cloud_bin_t}.rt.txt'), 'w+') as f: f.write(s) return num_inliers, inlier_ratio, gt_flag
Example #20
Source File: point_cloud.py From pylot with Apache License 2.0 | 4 votes |
def from_carla_point_cloud(cls, carla_pc, lidar_setup): """Creates a pylot point cloud from a carla point cloud. Returns: :py:class:`.PointCloud`: A point cloud. """ points = np.frombuffer(carla_pc.raw_data, dtype=np.dtype('f4')) points = copy.deepcopy(points) points = np.reshape(points, (int(points.shape[0] / 3), 3)) return cls(points, lidar_setup)
Example #21
Source File: ncltpoles.py From polex with MIT License | 4 votes |
def save_global_map(): globalmappos = np.empty([0, 2]) mapfactors = np.full(len(pynclt.sessions), np.nan) poleparams = np.empty([0, 6]) for isession, s in enumerate(pynclt.sessions): print(s) session = pynclt.session(s) istart, imid, iend = get_map_indices(session) localmappos = session.T_w_r_gt_velo[imid, :2, 3] if globalmappos.size == 0: imaps = range(localmappos.shape[0]) else: imaps = [] for imap in range(localmappos.shape[0]): distance = np.linalg.norm( localmappos[imap] - globalmappos, axis=1).min() if distance > remapdistance: imaps.append(imap) globalmappos = np.vstack([globalmappos, localmappos[imaps]]) mapfactors[isession] = np.true_divide(len(imaps), len(imid)) with progressbar.ProgressBar(max_value=len(imaps)) as bar: for iimap, imap in enumerate(imaps): scans = [] for iscan in range(istart[imap], iend[imap]): xyz, _ = session.get_velo(iscan) scan = o3.PointCloud() scan.points = o3.Vector3dVector(xyz) scans.append(scan) T_w_mc = np.identity(4) T_w_mc[:3, 3] = session.T_w_r_gt_velo[imid[imap], :3, 3] T_w_m = T_w_mc.dot(T_mc_m) T_m_w = util.invert_ht(T_w_m) T_m_r = np.matmul( T_m_w, session.T_w_r_gt_velo[istart[imap]:iend[imap]]) occupancymap = mapping.occupancymap( scans, T_m_r, mapshape, mapsize) localpoleparams = poles.detect_poles(occupancymap, mapsize) localpoleparams[:, :2] += T_w_m[:2, 3] poleparams = np.vstack([poleparams, localpoleparams]) bar.update(iimap) xy = poleparams[:, :2] a = poleparams[:, [4]] boxes = np.hstack([xy - 0.5 * a, xy + 0.5 * a]) clustermeans = np.empty([0, 5]) scores = [] for ci in cluster.cluster_boxes(boxes): ci = list(ci) if len(ci) < n_mapdetections: continue clustermeans = np.vstack([clustermeans, np.average( poleparams[ci, :-1], axis=0, weights=poleparams[ci, -1])]) scores.append(np.mean(poleparams[ci, -1])) clustermeans = np.hstack([clustermeans, np.array(scores).reshape([-1, 1])]) globalmapfile = os.path.join('nclt', get_globalmapname() + '.npz') np.savez(globalmapfile, polemeans=clustermeans, mapfactors=mapfactors, mappos=globalmappos) plot_global_map(globalmapfile)
Example #22
Source File: pcdlib.py From pyrobot with MIT License | 4 votes |
def main(): home_dir = expanduser("~") parser = argparse.ArgumentParser() parser.add_argument( "--img_dir", help="path to the directory that saves" " depth and rgb images from ORB_SLAMA2", default="%s/.ros/Imgs" % home_dir, type=str, ) parser.add_argument( "--depth_max", default=1.5, help="maximum value for the depth", type=float ) parser.add_argument( "--depth_min", default=0.2, help="minimum value for the depth", type=float ) parser.add_argument( "--cfg_filename", default="realsense_d435.yaml", help="which config file to use", type=str, ) parser.add_argument( "--subsample_pixs", default=1, help="sample every n pixels in row/column" "when the point cloud is reconstructed", type=int, ) args = parser.parse_args() rospy.init_node("reconstruct_world", anonymous=True) rgb_dir = os.path.join(args.img_dir, "RGBImgs") depth_dir = os.path.join(args.img_dir, "DepthImgs") pcd_processor = PointCloudProcessor( rgb_dir=rgb_dir, depth_dir=depth_dir, cfg_filename=args.cfg_filename, subsample_pixs=args.subsample_pixs, depth_threshold=(args.depth_min, args.depth_max), ) time.sleep(2) points, colors = pcd_processor.get_current_pcd() if USE_OPEN3D: pcd = open3d.PointCloud() pcd.points = open3d.Vector3dVector(points) pcd.colors = open3d.Vector3dVector(colors / 255.0) coord = open3d.create_mesh_coordinate_frame(1, [0, 0, 0]) open3d.draw_geometries([pcd, coord])
Example #23
Source File: compute_alignment.py From differentiable-point-clouds with MIT License | 4 votes |
def open3d_icp(src, trgt, init_rotation=np.eye(3, 3)): source = open3d.PointCloud() source.points = open3d.Vector3dVector(src) target = open3d.PointCloud() target.points = open3d.Vector3dVector(trgt) init_rotation_4x4 = np.eye(4, 4) init_rotation_4x4[0:3, 0:3] = init_rotation threshold = 0.2 reg_p2p = open3d.registration_icp(source, target, threshold, init_rotation_4x4, open3d.TransformationEstimationPointToPoint()) return reg_p2p
Example #24
Source File: visualise.py From differentiable-point-clouds with MIT License | 3 votes |
def vis_voxels(cfg, voxels, rgb=None, vis_axis=0): # TODO move to the other module and do import in the module import open3d threshold = cfg.vis_threshold xyz, occupancies = voxel2pc(voxels, threshold) # Pass xyz to Open3D.PointCloud and visualize pcd = open3d.PointCloud() pcd.points = open3d.Vector3dVector(xyz) if rgb is not None: rgbs = np.reshape(rgb, (-1, 3)) colors = rgbs[occupancies, :] colors = np.clip(colors, 0.0, 1.0) pcd.colors = open3d.Vector3dVector(colors) else: voxels = np.squeeze(voxels) sh = voxels.shape rgb = np.zeros((sh[0], sh[1], sh[2], 3), dtype=np.float32) for k in range(sh[0]): color = cm.gist_rainbow(float(k) / (sh[0] - 1))[:3] if vis_axis == 0: rgb[k, :, :, :] = color elif vis_axis == 1: rgb[:, k, :, :] = color elif vis_axis == 2: rgb[:, :, k, :] = color else: assert(False) rgbs = np.reshape(rgb, (-1, 3)) colors = rgbs[occupancies, :] pcd.colors = open3d.Vector3dVector(colors) if False: axis_vis = xyz[:, 0] min_ = np.min(axis_vis) max_ = np.max(axis_vis) colors = cm.gist_rainbow((axis_vis - min_) / (max_ - min_))[:, 0:3] pcd.colors = open3d.Vector3dVector(colors) # open3d.write_point_cloud("sync.ply", pcd) # Load saved point cloud and transform it into NumPy array # pcd_load = open3d.read_point_cloud("sync.ply") # xyz_load = np.asarray(pcd_load.points) # print(xyz_load) # visualization open3d.draw_geometries([pcd])