diff --git a/core/nbv_dataset.py b/core/nbv_dataset.py index a3c5e1f..702134e 100644 --- a/core/nbv_dataset.py +++ b/core/nbv_dataset.py @@ -162,8 +162,8 @@ class NBVReconstructionDataset(BaseDataset): ) combined_scanned_views_pts = np.concatenate(scanned_views_pts, axis=0) - voxel_downsampled_combined_scanned_pts_np = ( - PtsUtil.voxel_downsample_point_cloud(combined_scanned_views_pts, 0.002) + voxel_downsampled_combined_scanned_pts_np, _ = ( + PtsUtil.voxelize_points(combined_scanned_views_pts, 0.002) ) random_downsampled_combined_scanned_pts_np = ( PtsUtil.random_downsample_point_cloud( diff --git a/utils/pts.py b/utils/pts.py index 0551149..4716ce1 100644 --- a/utils/pts.py +++ b/utils/pts.py @@ -12,12 +12,6 @@ class PtsUtil: downsampled_pc = o3d_pc.voxel_down_sample(voxel_size) return np.asarray(downsampled_pc.points) - @staticmethod - def transform_point_cloud(points, pose_mat): - points_h = np.concatenate([points, np.ones((points.shape[0], 1))], axis=1) - points_h = np.dot(pose_mat, points_h.T).T - return points_h[:, :3] - @staticmethod def random_downsample_point_cloud(point_cloud, num_points, require_idx=False): if point_cloud.shape[0] == 0: @@ -29,6 +23,28 @@ class PtsUtil: return point_cloud[idx], idx return point_cloud[idx] + @staticmethod + def fps_downsample_point_cloud(point_cloud, num_points, require_mask=False): + N = point_cloud.shape[0] + mask = np.zeros(N, dtype=bool) + + sampled_indices = np.zeros(num_points, dtype=int) + sampled_indices[0] = np.random.randint(0, N) + mask[sampled_indices[0]] = True + distances = np.linalg.norm(point_cloud - point_cloud[sampled_indices[0]], axis=1) + for i in range(1, num_points): + farthest_index = np.argmax(distances) + sampled_indices[i] = farthest_index + mask[farthest_index] = True + + new_distances = np.linalg.norm(point_cloud - point_cloud[farthest_index], axis=1) + distances = np.minimum(distances, new_distances) + + sampled_points = point_cloud[sampled_indices] + if require_mask: + return sampled_points, mask + return sampled_points + @staticmethod def random_downsample_point_cloud_tensor(point_cloud, num_points): idx = torch.randint(0, len(point_cloud), (num_points,)) @@ -40,6 +56,12 @@ class PtsUtil: unique_voxels = np.unique(voxel_indices, axis=0, return_inverse=True) return unique_voxels + @staticmethod + def transform_point_cloud(points, pose_mat): + points_h = np.concatenate([points, np.ones((points.shape[0], 1))], axis=1) + points_h = np.dot(pose_mat, points_h.T).T + return points_h[:, :3] + @staticmethod def get_overlapping_points(point_cloud_L, point_cloud_R, voxel_size=0.005, require_idx=False): voxels_L, indices_L = PtsUtil.voxelize_points(point_cloud_L, voxel_size) @@ -56,18 +78,6 @@ class PtsUtil: return overlapping_points, mask_L return overlapping_points - @staticmethod - def new_filter_points(points, normals, cam_pose, theta=75, require_idx=False): - camera_axis = -cam_pose[:3, 2] - normals_normalized = normals / np.linalg.norm(normals, axis=1, keepdims=True) - cos_theta = np.dot(normals_normalized, camera_axis) - theta_rad = np.deg2rad(theta) - idx = cos_theta > np.cos(theta_rad) - filtered_points= points[idx] - if require_idx: - return filtered_points, idx - return filtered_points - @staticmethod def filter_points(points, points_normals, cam_pose, voxel_size=0.002, theta=45, z_range=(0.2, 0.45)):