update strategy and overlap rate compute method
This commit is contained in:
parent
a14bdc2c55
commit
71676e2f4e
@ -1,10 +1,9 @@
|
|||||||
from PytorchBoot.application import PytorchBootApplication
|
from PytorchBoot.application import PytorchBootApplication
|
||||||
from runners.strategy_generator import StrategyGenerator
|
from runners.strategy_generator import StrategyGenerator
|
||||||
from runners.data_generator import DataGenerator
|
|
||||||
|
|
||||||
@PytorchBootApplication("generate")
|
@PytorchBootApplication("generate")
|
||||||
class GenerateApp:
|
class GenerateApp:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def start():
|
def start():
|
||||||
#StrategyGenerator("configs\strategy_generate_config.yaml").run()
|
StrategyGenerator("configs/strategy_generate_config.yaml").run()
|
||||||
DataGenerator("configs/data_generate_config.yaml").run()
|
|
@ -1,24 +0,0 @@
|
|||||||
|
|
||||||
runner:
|
|
||||||
general:
|
|
||||||
seed: 0
|
|
||||||
device: cpu
|
|
||||||
cuda_visible_devices: "0,1,2,3,4,5,6,7"
|
|
||||||
|
|
||||||
|
|
||||||
experiment:
|
|
||||||
name: debug
|
|
||||||
root_dir: "experiments"
|
|
||||||
|
|
||||||
generate:
|
|
||||||
voxel_threshold: 0.005
|
|
||||||
overlap_threshold: 0.3
|
|
||||||
dataset_list:
|
|
||||||
- OmniObject3d
|
|
||||||
|
|
||||||
datasets:
|
|
||||||
OmniObject3d:
|
|
||||||
model_dir: "/media/hofee/data/data/object_meshes"
|
|
||||||
output_dir: "/media/hofee/data/data/omni_sample_output"
|
|
||||||
|
|
||||||
|
|
@ -12,12 +12,14 @@ runner:
|
|||||||
|
|
||||||
generate:
|
generate:
|
||||||
voxel_threshold: 0.005
|
voxel_threshold: 0.005
|
||||||
overlap_threshold: 0.3
|
overlap_threshold: 0.5
|
||||||
|
save_points: True
|
||||||
dataset_list:
|
dataset_list:
|
||||||
- OmniObject3d
|
- OmniObject3d
|
||||||
|
|
||||||
datasets:
|
datasets:
|
||||||
OmniObject3d:
|
OmniObject3d:
|
||||||
root_dir: "C:\\Document\\Local Project\\nbv_rec\\sample_dataset"
|
model_dir: "/media/hofee/data/data/scaled_object_meshes"
|
||||||
|
root_dir: "/media/hofee/data/data/nbv_rec/sample"
|
||||||
|
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ sys.path.append(r"C:\Document\Local Project\nbv_rec\nbv_reconstruction")
|
|||||||
from utils.data_load import DataLoadUtil
|
from utils.data_load import DataLoadUtil
|
||||||
from utils.pose import PoseUtil
|
from utils.pose import PoseUtil
|
||||||
|
|
||||||
@stereotype.dataset("nbv_reconstruction_dataset", comment="to be modified")
|
@stereotype.dataset("nbv_reconstruction_dataset", comment="not tested")
|
||||||
class NBVReconstructionDataset(BaseDataset):
|
class NBVReconstructionDataset(BaseDataset):
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
super(NBVReconstructionDataset, self).__init__(config)
|
super(NBVReconstructionDataset, self).__init__(config)
|
||||||
|
@ -1,34 +0,0 @@
|
|||||||
import os
|
|
||||||
import json
|
|
||||||
from PytorchBoot.runners.runner import Runner
|
|
||||||
from PytorchBoot.config import ConfigManager
|
|
||||||
from PytorchBoot.utils import Log
|
|
||||||
import PytorchBoot.stereotype as stereotype
|
|
||||||
|
|
||||||
|
|
||||||
@stereotype.runner("data_generator", comment="unfinished")
|
|
||||||
class DataGenerator(Runner):
|
|
||||||
def __init__(self, config):
|
|
||||||
super().__init__(config)
|
|
||||||
self.load_experiment("generate")
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
dataset_name_list = ConfigManager.get("runner", "generate" ,"dataset_list")
|
|
||||||
for dataset_name in dataset_name_list:
|
|
||||||
self.generate(dataset_name)
|
|
||||||
|
|
||||||
def generate(self, dataset_name):
|
|
||||||
dataset_config = ConfigManager.get("datasets", dataset_name)
|
|
||||||
model_dir = dataset_config["model_dir"]
|
|
||||||
output_dir = dataset_config["output_dir"]
|
|
||||||
Log.debug(model_dir)
|
|
||||||
Log.debug(output_dir)
|
|
||||||
|
|
||||||
def create_experiment(self, backup_name=None):
|
|
||||||
super().create_experiment(backup_name)
|
|
||||||
output_dir = os.path.join(str(self.experiment_path), "output")
|
|
||||||
os.makedirs(output_dir)
|
|
||||||
|
|
||||||
def load_experiment(self, backup_name=None):
|
|
||||||
super().load_experiment(backup_name)
|
|
||||||
|
|
@ -1,11 +1,15 @@
|
|||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
from PytorchBoot.runners.runner import Runner
|
from PytorchBoot.runners.runner import Runner
|
||||||
from PytorchBoot.config import ConfigManager
|
from PytorchBoot.config import ConfigManager
|
||||||
|
from PytorchBoot.utils import Log
|
||||||
import PytorchBoot.stereotype as stereotype
|
import PytorchBoot.stereotype as stereotype
|
||||||
|
|
||||||
from utils.data_load import DataLoadUtil
|
from utils.data_load import DataLoadUtil
|
||||||
from utils.reconstruction import ReconstructionUtil
|
from utils.reconstruction import ReconstructionUtil
|
||||||
|
from utils.pts import PtsUtil
|
||||||
|
|
||||||
@stereotype.runner("strategy_generator")
|
@stereotype.runner("strategy_generator")
|
||||||
class StrategyGenerator(Runner):
|
class StrategyGenerator(Runner):
|
||||||
@ -14,17 +18,19 @@ class StrategyGenerator(Runner):
|
|||||||
self.load_experiment("generate")
|
self.load_experiment("generate")
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
dataset_name_list = ConfigManager.get("runner", "generate" "dataset_list")
|
dataset_name_list = ConfigManager.get("runner", "generate", "dataset_list")
|
||||||
voxel_threshold, overlap_threshold = ConfigManager.get("runner","generate","voxel_threshold"), ConfigManager.get("runner","generate","overlap_threshold")
|
voxel_threshold, overlap_threshold = ConfigManager.get("runner","generate","voxel_threshold"), ConfigManager.get("runner","generate","overlap_threshold")
|
||||||
|
self.save_pts = ConfigManager.get("runner","generate","save_points")
|
||||||
for dataset_name in dataset_name_list:
|
for dataset_name in dataset_name_list:
|
||||||
root_dir = ConfigManager.get("datasets", dataset_name, "root_dir")
|
root_dir = ConfigManager.get("datasets", dataset_name, "root_dir")
|
||||||
output_dir = ConfigManager.get("datasets", dataset_name, "output_dir")
|
model_dir = ConfigManager.get("datasets", dataset_name, "model_dir")
|
||||||
if not os.path.exists(output_dir):
|
scene_name_list = os.listdir(root_dir)
|
||||||
os.makedirs(output_dir)
|
cnt = 0
|
||||||
|
total = len(scene_name_list)
|
||||||
scene_idx_list = DataLoadUtil.get_scene_idx_list(root_dir)
|
for scene_name in scene_name_list:
|
||||||
for scene_idx in scene_idx_list:
|
Log.info(f"({dataset_name})Processing [{cnt}/{total}]: {scene_name}")
|
||||||
self.generate_sequence(root_dir, output_dir, scene_idx,voxel_threshold, overlap_threshold)
|
self.generate_sequence(root_dir, model_dir, scene_name,voxel_threshold, overlap_threshold)
|
||||||
|
cnt += 1
|
||||||
|
|
||||||
def create_experiment(self, backup_name=None):
|
def create_experiment(self, backup_name=None):
|
||||||
super().create_experiment(backup_name)
|
super().create_experiment(backup_name)
|
||||||
@ -34,26 +40,40 @@ class StrategyGenerator(Runner):
|
|||||||
def load_experiment(self, backup_name=None):
|
def load_experiment(self, backup_name=None):
|
||||||
super().load_experiment(backup_name)
|
super().load_experiment(backup_name)
|
||||||
|
|
||||||
def generate_sequence(self,root, output_dir, seq, voxel_threshold, overlap_threshold):
|
def generate_sequence(self, root, model_dir, scene_name, voxel_threshold, overlap_threshold):
|
||||||
frame_idx_list = DataLoadUtil.get_frame_idx_list(root, seq)
|
frame_num = DataLoadUtil.get_scene_seq_length(root, scene_name)
|
||||||
model_pts = DataLoadUtil.load_model_points(root, seq)
|
model_pts = DataLoadUtil.load_original_model_points(model_dir, scene_name)
|
||||||
|
down_sampled_model_pts = PtsUtil.voxel_downsample_point_cloud(model_pts, voxel_threshold)
|
||||||
|
obj_pose = DataLoadUtil.load_target_object_pose(root, scene_name)
|
||||||
|
down_sampled_transformed_model_pts = PtsUtil.transform_point_cloud(down_sampled_model_pts, obj_pose)
|
||||||
pts_list = []
|
pts_list = []
|
||||||
for frame_idx in frame_idx_list:
|
|
||||||
path = DataLoadUtil.get_path(root, seq, frame_idx)
|
for frame_idx in range(frame_num):
|
||||||
|
path = DataLoadUtil.get_path(root, scene_name, frame_idx)
|
||||||
|
|
||||||
point_cloud = DataLoadUtil.get_point_cloud_world_from_path(path)
|
point_cloud = DataLoadUtil.get_point_cloud_world_from_path(path)
|
||||||
sampled_point_cloud = ReconstructionUtil.downsample_point_cloud(point_cloud, voxel_threshold)
|
sampled_point_cloud = PtsUtil.voxel_downsample_point_cloud(point_cloud, voxel_threshold)
|
||||||
|
if self.save_pts:
|
||||||
|
pts_dir = os.path.join(root,scene_name, "pts")
|
||||||
|
if not os.path.exists(pts_dir):
|
||||||
|
os.makedirs(pts_dir)
|
||||||
|
np.savetxt(os.path.join(pts_dir, f"{frame_idx}.txt"), sampled_point_cloud)
|
||||||
pts_list.append(sampled_point_cloud)
|
pts_list.append(sampled_point_cloud)
|
||||||
limited_useful_view, _ = ReconstructionUtil.compute_next_best_view_sequence_with_overlap(model_pts, pts_list, threshold=voxel_threshold, overlap_threshold=overlap_threshold)
|
limited_useful_view, _ = ReconstructionUtil.compute_next_best_view_sequence_with_overlap(down_sampled_transformed_model_pts, pts_list, threshold=voxel_threshold, overlap_threshold=overlap_threshold)
|
||||||
data_pairs = self.generate_data_pairs(limited_useful_view)
|
data_pairs = self.generate_data_pairs(limited_useful_view)
|
||||||
seq_save_data = {
|
seq_save_data = {
|
||||||
"data_pairs": data_pairs,
|
"data_pairs": data_pairs,
|
||||||
"best_sequence": limited_useful_view,
|
"best_sequence": limited_useful_view,
|
||||||
"max_coverage_rate": limited_useful_view[-1][1]
|
"max_coverage_rate": limited_useful_view[-1][1]
|
||||||
}
|
}
|
||||||
output_label_path = DataLoadUtil.get_label_path(output_dir, seq)
|
Log.success(f"Scene <{scene_name}> Finished, Max Coverage Rate: {limited_useful_view[-1][1]}, Best Sequence length: {len(limited_useful_view)}")
|
||||||
|
|
||||||
|
output_label_path = DataLoadUtil.get_label_path(root, scene_name)
|
||||||
with open(output_label_path, 'w') as f:
|
with open(output_label_path, 'w') as f:
|
||||||
json.dump(seq_save_data, f)
|
json.dump(seq_save_data, f)
|
||||||
|
|
||||||
|
DataLoadUtil.save_downsampled_world_model_points(root, scene_name, down_sampled_transformed_model_pts)
|
||||||
|
|
||||||
def generate_data_pairs(self, useful_view):
|
def generate_data_pairs(self, useful_view):
|
||||||
data_pairs = []
|
data_pairs = []
|
||||||
for next_view_idx in range(len(useful_view)):
|
for next_view_idx in range(len(useful_view)):
|
||||||
|
@ -17,11 +17,50 @@ class DataLoadUtil:
|
|||||||
return path
|
return path
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load_model_points(root, scene_name):
|
def get_sampled_model_points_path(root, scene_name):
|
||||||
model_path = os.path.join(root, scene_name, "sampled_model_points.txt")
|
path = os.path.join(root,scene_name, f"sampled_model_points.txt")
|
||||||
|
return path
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_scene_seq_length(root, scene_name):
|
||||||
|
camera_params_path = os.path.join(root, scene_name, "camera_params")
|
||||||
|
return len(os.listdir(camera_params_path))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load_downsampled_world_model_points(root, scene_name):
|
||||||
|
model_path = DataLoadUtil.get_sampled_model_points_path(root, scene_name)
|
||||||
|
model_points = np.loadtxt(model_path)
|
||||||
|
return model_points
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def save_downsampled_world_model_points(root, scene_name, model_points):
|
||||||
|
model_path = DataLoadUtil.get_sampled_model_points_path(root, scene_name)
|
||||||
|
np.savetxt(model_path, model_points)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load_original_model_points(model_dir, object_name):
|
||||||
|
model_path = os.path.join(model_dir, object_name, "mesh.obj")
|
||||||
mesh = trimesh.load(model_path)
|
mesh = trimesh.load(model_path)
|
||||||
return mesh.vertices
|
return mesh.vertices
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load_scene_info(root, scene_name):
|
||||||
|
scene_info_path = os.path.join(root, scene_name, "scene_info.json")
|
||||||
|
with open(scene_info_path, "r") as f:
|
||||||
|
scene_info = json.load(f)
|
||||||
|
return scene_info
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load_target_object_pose(root, scene_name):
|
||||||
|
scene_info = DataLoadUtil.load_scene_info(root, scene_name)
|
||||||
|
target_name = scene_info["target_name"]
|
||||||
|
transformation = scene_info[target_name]
|
||||||
|
location = transformation["location"]
|
||||||
|
rotation_euler = transformation["rotation_euler"]
|
||||||
|
pose_mat = trimesh.transformations.euler_matrix(*rotation_euler)
|
||||||
|
pose_mat[:3, 3] = location
|
||||||
|
return pose_mat
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load_depth(path):
|
def load_depth(path):
|
||||||
depth_path = os.path.join(os.path.dirname(path), "depth", os.path.basename(path) + ".png")
|
depth_path = os.path.join(os.path.dirname(path), "depth", os.path.basename(path) + ".png")
|
||||||
@ -83,8 +122,9 @@ class DataLoadUtil:
|
|||||||
y = (j - cam_intrinsic[1, 2]) * z / cam_intrinsic[1, 1]
|
y = (j - cam_intrinsic[1, 2]) * z / cam_intrinsic[1, 1]
|
||||||
|
|
||||||
points_camera = np.stack((x, y, z), axis=-1).reshape(-1, 3)
|
points_camera = np.stack((x, y, z), axis=-1).reshape(-1, 3)
|
||||||
mask = mask.reshape(-1, 3)
|
mask = mask.reshape(-1)
|
||||||
target_mask = np.all(mask == target_mask_label)
|
|
||||||
|
target_mask = mask == target_mask_label
|
||||||
target_points_camera = points_camera[target_mask]
|
target_points_camera = points_camera[target_mask]
|
||||||
target_points_camera_aug = np.concatenate([target_points_camera, np.ones((target_points_camera.shape[0], 1))], axis=-1)
|
target_points_camera_aug = np.concatenate([target_points_camera, np.ones((target_points_camera.shape[0], 1))], axis=-1)
|
||||||
|
|
||||||
@ -104,10 +144,10 @@ class DataLoadUtil:
|
|||||||
return point_cloud['points_world']
|
return point_cloud['points_world']
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_point_cloud_list_from_seq(root, seq_idx, num_frames):
|
def get_point_cloud_list_from_seq(root, scene_name, num_frames):
|
||||||
point_cloud_list = []
|
point_cloud_list = []
|
||||||
for idx in range(num_frames):
|
for frame_idx in range(num_frames):
|
||||||
path = DataLoadUtil.get_path(root, seq_idx, idx)
|
path = DataLoadUtil.get_path(root, scene_name, frame_idx)
|
||||||
point_cloud = DataLoadUtil.get_point_cloud_world_from_path(path)
|
point_cloud = DataLoadUtil.get_point_cloud_world_from_path(path)
|
||||||
point_cloud_list.append(point_cloud)
|
point_cloud_list.append(point_cloud)
|
||||||
return point_cloud_list
|
return point_cloud_list
|
||||||
|
17
utils/pts.py
Normal file
17
utils/pts.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
import numpy as np
|
||||||
|
import open3d as o3d
|
||||||
|
|
||||||
|
class PtsUtil:
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def voxel_downsample_point_cloud(point_cloud, voxel_size=0.005):
|
||||||
|
o3d_pc = o3d.geometry.PointCloud()
|
||||||
|
o3d_pc.points = o3d.utility.Vector3dVector(point_cloud)
|
||||||
|
downsampled_pc = o3d_pc.voxel_down_sample(voxel_size)
|
||||||
|
return np.asarray(downsampled_pc.points)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def transform_point_cloud(points, pose_mat):
|
||||||
|
points_h = np.concatenate([points, np.ones((points.shape[0], 1))], axis=1)
|
||||||
|
points_h = np.dot(pose_mat, points_h.T).T
|
||||||
|
return points_h[:, :3]
|
@ -1,6 +1,6 @@
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
import open3d as o3d
|
|
||||||
from scipy.spatial import cKDTree
|
from scipy.spatial import cKDTree
|
||||||
|
from utils.pts import PtsUtil
|
||||||
|
|
||||||
class ReconstructionUtil:
|
class ReconstructionUtil:
|
||||||
|
|
||||||
@ -13,18 +13,12 @@ class ReconstructionUtil:
|
|||||||
return coverage_rate
|
return coverage_rate
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def compute_overlap_rate(point_cloud1, point_cloud2, threshold=0.01):
|
def compute_overlap_rate(new_point_cloud, combined_point_cloud, threshold=0.01):
|
||||||
kdtree1 = cKDTree(point_cloud1)
|
kdtree = cKDTree(combined_point_cloud)
|
||||||
kdtree2 = cKDTree(point_cloud2)
|
distances, _ = kdtree.query(new_point_cloud)
|
||||||
distances1, _ = kdtree2.query(point_cloud1)
|
overlapping_points = np.sum(distances < threshold)
|
||||||
distances2, _ = kdtree1.query(point_cloud2)
|
overlap_rate = overlapping_points / new_point_cloud.shape[0]
|
||||||
overlapping_points1 = np.sum(distances1 < threshold)
|
return overlap_rate
|
||||||
overlapping_points2 = np.sum(distances2 < threshold)
|
|
||||||
|
|
||||||
overlap_rate1 = overlapping_points1 / point_cloud1.shape[0]
|
|
||||||
overlap_rate2 = overlapping_points2 / point_cloud2.shape[0]
|
|
||||||
|
|
||||||
return (overlap_rate1 + overlap_rate2) / 2
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def combine_point_with_view_sequence(point_list, view_sequence):
|
def combine_point_with_view_sequence(point_list, view_sequence):
|
||||||
@ -41,46 +35,14 @@ class ReconstructionUtil:
|
|||||||
|
|
||||||
for view_index, view in enumerate(views):
|
for view_index, view in enumerate(views):
|
||||||
candidate_views = combined_point_cloud + [view]
|
candidate_views = combined_point_cloud + [view]
|
||||||
down_sampled_combined_point_cloud = ReconstructionUtil.downsample_point_cloud(candidate_views, threshold)
|
down_sampled_combined_point_cloud = PtsUtil.voxel_downsample_point_cloud(candidate_views, threshold)
|
||||||
new_coverage = ReconstructionUtil.compute_coverage_rate(target_point_cloud, down_sampled_combined_point_cloud, threshold)
|
new_coverage = ReconstructionUtil.compute_coverage_rate(target_point_cloud, down_sampled_combined_point_cloud, threshold)
|
||||||
coverage_increase = new_coverage - current_coverage
|
coverage_increase = new_coverage - current_coverage
|
||||||
if coverage_increase > best_coverage_increase:
|
if coverage_increase > best_coverage_increase:
|
||||||
best_coverage_increase = coverage_increase
|
best_coverage_increase = coverage_increase
|
||||||
best_view = view_index
|
best_view = view_index
|
||||||
return best_view, best_coverage_increase
|
return best_view, best_coverage_increase
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def compute_next_best_view_sequence(target_point_cloud, point_cloud_list, threshold=0.01):
|
|
||||||
selected_views = []
|
|
||||||
current_coverage = 0.0
|
|
||||||
remaining_views = list(range(len(point_cloud_list)))
|
|
||||||
view_sequence = []
|
|
||||||
target_point_cloud = ReconstructionUtil.downsample_point_cloud(target_point_cloud, threshold)
|
|
||||||
while remaining_views:
|
|
||||||
best_view = None
|
|
||||||
best_coverage_increase = -1
|
|
||||||
|
|
||||||
for view_index in remaining_views:
|
|
||||||
candidate_views = selected_views + [point_cloud_list[view_index]]
|
|
||||||
combined_point_cloud = np.vstack(candidate_views)
|
|
||||||
|
|
||||||
down_sampled_combined_point_cloud = ReconstructionUtil.downsample_point_cloud(combined_point_cloud,threshold)
|
|
||||||
new_coverage = ReconstructionUtil.compute_coverage_rate(target_point_cloud, down_sampled_combined_point_cloud, threshold)
|
|
||||||
coverage_increase = new_coverage - current_coverage
|
|
||||||
|
|
||||||
if coverage_increase > best_coverage_increase:
|
|
||||||
best_coverage_increase = coverage_increase
|
|
||||||
best_view = view_index
|
|
||||||
|
|
||||||
if best_view is not None:
|
|
||||||
if best_coverage_increase <=1e-3:
|
|
||||||
break
|
|
||||||
selected_views.append(point_cloud_list[best_view])
|
|
||||||
current_coverage += best_coverage_increase
|
|
||||||
view_sequence.append((best_view, current_coverage))
|
|
||||||
remaining_views.remove(best_view)
|
|
||||||
return view_sequence, remaining_views
|
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def compute_next_best_view_sequence_with_overlap(target_point_cloud, point_cloud_list, threshold=0.01, overlap_threshold=0.3):
|
def compute_next_best_view_sequence_with_overlap(target_point_cloud, point_cloud_list, threshold=0.01, overlap_threshold=0.3):
|
||||||
@ -88,7 +50,6 @@ class ReconstructionUtil:
|
|||||||
current_coverage = 0.0
|
current_coverage = 0.0
|
||||||
remaining_views = list(range(len(point_cloud_list)))
|
remaining_views = list(range(len(point_cloud_list)))
|
||||||
view_sequence = []
|
view_sequence = []
|
||||||
target_point_cloud = ReconstructionUtil.downsample_point_cloud(target_point_cloud, threshold)
|
|
||||||
|
|
||||||
while remaining_views:
|
while remaining_views:
|
||||||
best_view = None
|
best_view = None
|
||||||
@ -98,15 +59,15 @@ class ReconstructionUtil:
|
|||||||
|
|
||||||
if selected_views:
|
if selected_views:
|
||||||
combined_old_point_cloud = np.vstack(selected_views)
|
combined_old_point_cloud = np.vstack(selected_views)
|
||||||
down_sampled_old_point_cloud = ReconstructionUtil.downsample_point_cloud(combined_old_point_cloud,threshold)
|
down_sampled_old_point_cloud = PtsUtil.voxel_downsample_point_cloud(combined_old_point_cloud,threshold)
|
||||||
down_sampled_new_view_point_cloud = ReconstructionUtil.downsample_point_cloud(point_cloud_list[view_index],threshold)
|
down_sampled_new_view_point_cloud = PtsUtil.voxel_downsample_point_cloud(point_cloud_list[view_index],threshold)
|
||||||
overlap_rate = ReconstructionUtil.compute_overlap_rate(down_sampled_old_point_cloud,down_sampled_new_view_point_cloud , threshold)
|
overlap_rate = ReconstructionUtil.compute_overlap_rate(down_sampled_new_view_point_cloud,down_sampled_old_point_cloud, threshold)
|
||||||
if overlap_rate < overlap_threshold:
|
if overlap_rate < overlap_threshold:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
candidate_views = selected_views + [point_cloud_list[view_index]]
|
candidate_views = selected_views + [point_cloud_list[view_index]]
|
||||||
combined_point_cloud = np.vstack(candidate_views)
|
combined_point_cloud = np.vstack(candidate_views)
|
||||||
down_sampled_combined_point_cloud = ReconstructionUtil.downsample_point_cloud(combined_point_cloud,threshold)
|
down_sampled_combined_point_cloud = PtsUtil.voxel_downsample_point_cloud(combined_point_cloud,threshold)
|
||||||
new_coverage = ReconstructionUtil.compute_coverage_rate(target_point_cloud, down_sampled_combined_point_cloud, threshold)
|
new_coverage = ReconstructionUtil.compute_coverage_rate(target_point_cloud, down_sampled_combined_point_cloud, threshold)
|
||||||
coverage_increase = new_coverage - current_coverage
|
coverage_increase = new_coverage - current_coverage
|
||||||
#print(f"view_index: {view_index}, coverage_increase: {coverage_increase}")
|
#print(f"view_index: {view_index}, coverage_increase: {coverage_increase}")
|
||||||
@ -128,12 +89,5 @@ class ReconstructionUtil:
|
|||||||
break
|
break
|
||||||
|
|
||||||
return view_sequence, remaining_views
|
return view_sequence, remaining_views
|
||||||
|
|
||||||
|
|
||||||
def downsample_point_cloud(point_cloud, voxel_size=0.005):
|
|
||||||
o3d_pc = o3d.geometry.PointCloud()
|
|
||||||
o3d_pc.points = o3d.utility.Vector3dVector(point_cloud)
|
|
||||||
downsampled_pc = o3d_pc.voxel_down_sample(voxel_size)
|
|
||||||
return np.asarray(downsampled_pc.points)
|
|
||||||
|
|
||||||
|
|
Loading…
x
Reference in New Issue
Block a user