update scan_points strategy
This commit is contained in:
parent
551282a0ec
commit
c8b8a44252
@ -12,7 +12,8 @@ runner:
|
|||||||
|
|
||||||
generate:
|
generate:
|
||||||
voxel_threshold: 0.01
|
voxel_threshold: 0.01
|
||||||
overlap_threshold: 0.5
|
soft_overlap_threshold: 0.3
|
||||||
|
hard_overlap_threshold: 0.6
|
||||||
filter_degree: 75
|
filter_degree: 75
|
||||||
to_specified_dir: True # if True, output_dir is used, otherwise, root_dir is used
|
to_specified_dir: True # if True, output_dir is used, otherwise, root_dir is used
|
||||||
save_points: True
|
save_points: True
|
||||||
@ -20,17 +21,17 @@ runner:
|
|||||||
save_best_combined_points: False
|
save_best_combined_points: False
|
||||||
save_mesh: True
|
save_mesh: True
|
||||||
overwrite: False
|
overwrite: False
|
||||||
seq_num: 50
|
seq_num: 10
|
||||||
dataset_list:
|
dataset_list:
|
||||||
- OmniObject3d
|
- OmniObject3d
|
||||||
|
|
||||||
datasets:
|
datasets:
|
||||||
OmniObject3d:
|
OmniObject3d:
|
||||||
#"/media/hofee/data/data/temp_output"
|
#"/media/hofee/data/data/temp_output"
|
||||||
root_dir: "/media/hofee/repository/new_sample"
|
root_dir: "/media/hofee/repository/new_full_box_data"
|
||||||
model_dir: "/media/hofee/data/data/scaled_object_meshes"
|
model_dir: "/media/hofee/data/data/scaled_object_meshes"
|
||||||
from: 0
|
from: 0
|
||||||
to: -1 # -1 means all
|
to: -1 # -1 means end
|
||||||
#output_dir: "/media/hofee/data/data/label_output"
|
#output_dir: "/media/hofee/data/data/label_output"
|
||||||
|
|
||||||
|
|
||||||
|
@ -7,9 +7,12 @@ runner:
|
|||||||
name: debug
|
name: debug
|
||||||
root_dir: experiments
|
root_dir: experiments
|
||||||
generate:
|
generate:
|
||||||
|
port: 5003
|
||||||
|
from: 3000
|
||||||
|
to: -1 # -1 means all
|
||||||
object_dir: /media/hofee/data/data/scaled_object_meshes
|
object_dir: /media/hofee/data/data/scaled_object_meshes
|
||||||
table_model_path: /media/hofee/data/data/others/table.obj
|
table_model_path: /media/hofee/data/data/others/table.obj
|
||||||
output_dir: /media/hofee/repository/new_nbv_reconstruction_data_512
|
output_dir: /media/hofee/repository/new_full_data
|
||||||
binocular_vision: true
|
binocular_vision: true
|
||||||
plane_size: 10
|
plane_size: 10
|
||||||
max_views: 512
|
max_views: 512
|
||||||
|
@ -35,7 +35,7 @@ class StrategyGenerator(Runner):
|
|||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
dataset_name_list = ConfigManager.get("runner", "generate", "dataset_list")
|
dataset_name_list = ConfigManager.get("runner", "generate", "dataset_list")
|
||||||
voxel_threshold, overlap_threshold = ConfigManager.get("runner","generate","voxel_threshold"), ConfigManager.get("runner","generate","overlap_threshold")
|
voxel_threshold, soft_overlap_threshold, hard_overlap_threshold = ConfigManager.get("runner","generate","voxel_threshold"), ConfigManager.get("runner","generate","soft_overlap_threshold"), ConfigManager.get("runner","generate","hard_overlap_threshold")
|
||||||
for dataset_idx in range(len(dataset_name_list)):
|
for dataset_idx in range(len(dataset_name_list)):
|
||||||
dataset_name = dataset_name_list[dataset_idx]
|
dataset_name = dataset_name_list[dataset_idx]
|
||||||
status_manager.set_progress("generate_strategy", "strategy_generator", "dataset", dataset_idx, len(dataset_name_list))
|
status_manager.set_progress("generate_strategy", "strategy_generator", "dataset", dataset_idx, len(dataset_name_list))
|
||||||
@ -47,23 +47,23 @@ class StrategyGenerator(Runner):
|
|||||||
if to_idx == -1:
|
if to_idx == -1:
|
||||||
to_idx = len(scene_name_list)
|
to_idx = len(scene_name_list)
|
||||||
cnt = 0
|
cnt = 0
|
||||||
total = len(scene_name_list)
|
total = len(scene_name_list[from_idx:to_idx])
|
||||||
Log.info(f"Processing Dataset: {dataset_name}, From: {from_idx}, To: {to_idx}")
|
Log.info(f"Processing Dataset: {dataset_name}, From: {from_idx}, To: {to_idx}")
|
||||||
for scene_name in scene_name_list[from_idx:to_idx]:
|
for scene_name in scene_name_list[from_idx:to_idx]:
|
||||||
Log.info(f"({dataset_name})Processing [{cnt}/{total}]: {scene_name}")
|
Log.info(f"({dataset_name})Processing [{cnt}/{total}]: {scene_name}")
|
||||||
status_manager.set_progress("generate_strategy", "strategy_generator", "scene", cnt, total)
|
status_manager.set_progress("generate_strategy", "strategy_generator", "scene", cnt, total)
|
||||||
diag = DataLoadUtil.get_bbox_diag(model_dir, scene_name)
|
#diag = DataLoadUtil.get_bbox_diag(model_dir, scene_name)
|
||||||
voxel_threshold = diag*0.02
|
voxel_threshold = 0.002
|
||||||
status_manager.set_status("generate_strategy", "strategy_generator", "voxel_threshold", voxel_threshold)
|
status_manager.set_status("generate_strategy", "strategy_generator", "voxel_threshold", voxel_threshold)
|
||||||
output_label_path = DataLoadUtil.get_label_path(root_dir, scene_name,0)
|
output_label_path = DataLoadUtil.get_label_path(root_dir, scene_name,0)
|
||||||
if os.path.exists(output_label_path) and not self.overwrite:
|
if os.path.exists(output_label_path) and not self.overwrite:
|
||||||
Log.info(f"Scene <{scene_name}> Already Exists, Skip")
|
Log.info(f"Scene <{scene_name}> Already Exists, Skip")
|
||||||
cnt += 1
|
cnt += 1
|
||||||
continue
|
continue
|
||||||
try:
|
|
||||||
self.generate_sequence(root_dir, model_dir, scene_name,voxel_threshold, overlap_threshold)
|
self.generate_sequence(root_dir, model_dir, scene_name,voxel_threshold, soft_overlap_threshold, hard_overlap_threshold)
|
||||||
except Exception as e:
|
# except Exception as e:
|
||||||
Log.error(f"Scene <{scene_name}> Failed, Error: {e}")
|
# Log.error(f"Scene <{scene_name}> Failed, Error: {e}")
|
||||||
cnt += 1
|
cnt += 1
|
||||||
status_manager.set_progress("generate_strategy", "strategy_generator", "scene", total, total)
|
status_manager.set_progress("generate_strategy", "strategy_generator", "scene", total, total)
|
||||||
status_manager.set_progress("generate_strategy", "strategy_generator", "dataset", len(dataset_name_list), len(dataset_name_list))
|
status_manager.set_progress("generate_strategy", "strategy_generator", "dataset", len(dataset_name_list), len(dataset_name_list))
|
||||||
@ -76,7 +76,7 @@ class StrategyGenerator(Runner):
|
|||||||
def load_experiment(self, backup_name=None):
|
def load_experiment(self, backup_name=None):
|
||||||
super().load_experiment(backup_name)
|
super().load_experiment(backup_name)
|
||||||
|
|
||||||
def generate_sequence(self, root, model_dir, scene_name, voxel_threshold, overlap_threshold):
|
def generate_sequence(self, root, model_dir, scene_name, voxel_threshold, soft_overlap_threshold, hard_overlap_threshold):
|
||||||
status_manager.set_status("generate_strategy", "strategy_generator", "scene", scene_name)
|
status_manager.set_status("generate_strategy", "strategy_generator", "scene", scene_name)
|
||||||
frame_num = DataLoadUtil.get_scene_seq_length(root, scene_name)
|
frame_num = DataLoadUtil.get_scene_seq_length(root, scene_name)
|
||||||
model_points_normals = DataLoadUtil.load_points_normals(root, scene_name)
|
model_points_normals = DataLoadUtil.load_points_normals(root, scene_name)
|
||||||
@ -84,47 +84,83 @@ class StrategyGenerator(Runner):
|
|||||||
down_sampled_model_pts = PtsUtil.voxel_downsample_point_cloud(model_pts, voxel_threshold)
|
down_sampled_model_pts = PtsUtil.voxel_downsample_point_cloud(model_pts, voxel_threshold)
|
||||||
display_table_info = DataLoadUtil.get_display_table_info(root, scene_name)
|
display_table_info = DataLoadUtil.get_display_table_info(root, scene_name)
|
||||||
radius = display_table_info["radius"]
|
radius = display_table_info["radius"]
|
||||||
top = DataLoadUtil.get_display_table_top(root, scene_name)
|
scan_points_path = os.path.join(root,scene_name, "scan_points.txt")
|
||||||
scan_points = ReconstructionUtil.generate_scan_points(display_table_top=top,display_table_radius=radius)
|
if os.path.exists(scan_points_path):
|
||||||
|
scan_points = np.loadtxt(scan_points_path)
|
||||||
|
else:
|
||||||
|
scan_points = ReconstructionUtil.generate_scan_points(display_table_top=0,display_table_radius=radius)
|
||||||
|
np.savetxt(scan_points_path, scan_points)
|
||||||
pts_list = []
|
pts_list = []
|
||||||
scan_points_indices_list = []
|
scan_points_indices_list = []
|
||||||
|
non_zero_cnt = 0
|
||||||
for frame_idx in range(frame_num):
|
for frame_idx in range(frame_num):
|
||||||
if self.load_pts and os.path.exists(os.path.join(root,scene_name, "pts", f"{frame_idx}.txt")):
|
|
||||||
sampled_point_cloud = np.loadtxt(os.path.join(root,scene_name, "pts", f"{frame_idx}.txt"))
|
|
||||||
indices = np.loadtxt(os.path.join(root,scene_name, "pts", f"{frame_idx}_indices.txt")).astype(np.int32).tolist()
|
|
||||||
status_manager.set_progress("generate_strategy", "strategy_generator", "loading frame", frame_idx, frame_num)
|
status_manager.set_progress("generate_strategy", "strategy_generator", "loading frame", frame_idx, frame_num)
|
||||||
|
pts_path = os.path.join(root,scene_name, "pts", f"{frame_idx}.txt")
|
||||||
|
if self.load_pts and pts_path:
|
||||||
|
with open(pts_path, 'r') as f:
|
||||||
|
pts_str = f.read()
|
||||||
|
if pts_str == "":
|
||||||
|
sampled_point_cloud = np.asarray([])
|
||||||
|
else:
|
||||||
|
sampled_point_cloud = np.loadtxt(pts_path)
|
||||||
|
indices_path = os.path.join(root,scene_name, "covered_scan_pts", f"{frame_idx}_indices.txt")
|
||||||
|
with open(indices_path, 'r') as f:
|
||||||
|
indices_str = f.read()
|
||||||
|
if indices_str == "":
|
||||||
|
indices = []
|
||||||
|
else:
|
||||||
|
indices = np.loadtxt(indices_path).astype(np.int32).tolist()
|
||||||
|
if isinstance(indices, int):
|
||||||
|
indices = [indices]
|
||||||
|
|
||||||
pts_list.append(sampled_point_cloud)
|
pts_list.append(sampled_point_cloud)
|
||||||
|
if sampled_point_cloud.shape[0] != 0:
|
||||||
|
non_zero_cnt += 1
|
||||||
scan_points_indices_list.append(indices)
|
scan_points_indices_list.append(indices)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
path = DataLoadUtil.get_path(root, scene_name, frame_idx)
|
path = DataLoadUtil.get_path(root, scene_name, frame_idx)
|
||||||
cam_params = DataLoadUtil.load_cam_info(path, binocular=True)
|
cam_params = DataLoadUtil.load_cam_info(path, binocular=True)
|
||||||
status_manager.set_progress("generate_strategy", "strategy_generator", "loading frame", frame_idx, frame_num)
|
|
||||||
point_cloud, display_table_pts = DataLoadUtil.get_target_point_cloud_world_from_path(path, binocular=True, get_display_table_pts=True)
|
point_cloud, display_table_pts = DataLoadUtil.get_target_point_cloud_world_from_path(path, binocular=True, get_display_table_pts=True)
|
||||||
|
|
||||||
|
if point_cloud.shape[0] != 0:
|
||||||
sampled_point_cloud = ReconstructionUtil.filter_points(point_cloud, model_points_normals, cam_pose=cam_params["cam_to_world"], voxel_size=voxel_threshold, theta=self.filter_degree)
|
sampled_point_cloud = ReconstructionUtil.filter_points(point_cloud, model_points_normals, cam_pose=cam_params["cam_to_world"], voxel_size=voxel_threshold, theta=self.filter_degree)
|
||||||
|
non_zero_cnt += 1
|
||||||
|
else:
|
||||||
|
sampled_point_cloud = point_cloud
|
||||||
|
|
||||||
covered_pts, indices = ReconstructionUtil.compute_covered_scan_points(scan_points, display_table_pts)
|
covered_pts, indices = ReconstructionUtil.compute_covered_scan_points(scan_points, display_table_pts)
|
||||||
|
|
||||||
if self.save_pts:
|
if self.save_pts:
|
||||||
pts_dir = os.path.join(root,scene_name, "pts")
|
pts_dir = os.path.join(root,scene_name, "pts")
|
||||||
covered_pts_dir = os.path.join(pts_dir, "covered_scan_pts")
|
#display_dir = os.path.join(root,scene_name, "display_pts")
|
||||||
|
covered_pts_dir = os.path.join(root,scene_name, "covered_scan_pts")
|
||||||
if not os.path.exists(pts_dir):
|
if not os.path.exists(pts_dir):
|
||||||
os.makedirs(pts_dir)
|
os.makedirs(pts_dir)
|
||||||
if not os.path.exists(covered_pts_dir):
|
if not os.path.exists(covered_pts_dir):
|
||||||
os.makedirs(covered_pts_dir)
|
os.makedirs(covered_pts_dir)
|
||||||
|
# if not os.path.exists(display_dir):
|
||||||
|
# os.makedirs(display_dir)
|
||||||
np.savetxt(os.path.join(pts_dir, f"{frame_idx}.txt"), sampled_point_cloud)
|
np.savetxt(os.path.join(pts_dir, f"{frame_idx}.txt"), sampled_point_cloud)
|
||||||
|
#np.savetxt(os.path.join(display_dir, f"{frame_idx}.txt"), display_table_pts)
|
||||||
np.savetxt(os.path.join(covered_pts_dir, f"{frame_idx}.txt"), covered_pts)
|
np.savetxt(os.path.join(covered_pts_dir, f"{frame_idx}.txt"), covered_pts)
|
||||||
np.savetxt(os.path.join(pts_dir, f"{frame_idx}_indices.txt"), indices)
|
np.savetxt(os.path.join(covered_pts_dir, f"{frame_idx}_indices.txt"), indices)
|
||||||
pts_list.append(sampled_point_cloud)
|
pts_list.append(sampled_point_cloud)
|
||||||
scan_points_indices_list.append(indices)
|
scan_points_indices_list.append(indices)
|
||||||
status_manager.set_progress("generate_strategy", "strategy_generator", "loading frame", frame_num, frame_num)
|
status_manager.set_progress("generate_strategy", "strategy_generator", "loading frame", frame_num, frame_num)
|
||||||
|
|
||||||
seq_num = min(self.seq_num, len(pts_list))
|
seq_num = min(self.seq_num, non_zero_cnt)
|
||||||
init_view_list = range(seq_num)
|
init_view_list = []
|
||||||
|
for i in range(seq_num):
|
||||||
|
if pts_list[i].shape[0] < 100:
|
||||||
|
continue
|
||||||
|
init_view_list.append(i)
|
||||||
|
|
||||||
seq_idx = 0
|
seq_idx = 0
|
||||||
for init_view in init_view_list:
|
for init_view in init_view_list:
|
||||||
status_manager.set_progress("generate_strategy", "strategy_generator", "computing sequence", seq_idx, len(init_view_list))
|
status_manager.set_progress("generate_strategy", "strategy_generator", "computing sequence", seq_idx, len(init_view_list))
|
||||||
limited_useful_view, _, _ = ReconstructionUtil.compute_next_best_view_sequence_with_overlap(down_sampled_model_pts, pts_list,init_view=init_view, threshold=voxel_threshold, overlap_threshold=overlap_threshold, status_info=self.status_info)
|
limited_useful_view, _, _ = ReconstructionUtil.compute_next_best_view_sequence_with_overlap(down_sampled_model_pts, pts_list, scan_points_indices_list = scan_points_indices_list,init_view=init_view,
|
||||||
|
threshold=voxel_threshold, soft_overlap_threshold=soft_overlap_threshold, hard_overlap_threshold= hard_overlap_threshold, scan_points_threshold=10, status_info=self.status_info)
|
||||||
data_pairs = self.generate_data_pairs(limited_useful_view)
|
data_pairs = self.generate_data_pairs(limited_useful_view)
|
||||||
seq_save_data = {
|
seq_save_data = {
|
||||||
"data_pairs": data_pairs,
|
"data_pairs": data_pairs,
|
||||||
|
@ -322,7 +322,7 @@ class DataLoadUtil:
|
|||||||
random_downsample_N=65536,
|
random_downsample_N=65536,
|
||||||
voxel_size=0.005,
|
voxel_size=0.005,
|
||||||
target_mask_label=(0, 255, 0, 255),
|
target_mask_label=(0, 255, 0, 255),
|
||||||
display_table_mask_label=(255, 0, 0, 255),
|
display_table_mask_label=(0, 0, 255, 255),
|
||||||
get_display_table_pts=False
|
get_display_table_pts=False
|
||||||
):
|
):
|
||||||
cam_info = DataLoadUtil.load_cam_info(path, binocular=binocular)
|
cam_info = DataLoadUtil.load_cam_info(path, binocular=binocular)
|
||||||
@ -369,6 +369,12 @@ class DataLoadUtil:
|
|||||||
mask_R,
|
mask_R,
|
||||||
display_table_mask_label,
|
display_table_mask_label,
|
||||||
)["points_world"]
|
)["points_world"]
|
||||||
|
display_pts_L = PtsUtil.random_downsample_point_cloud(
|
||||||
|
display_pts_L, random_downsample_N
|
||||||
|
)
|
||||||
|
point_cloud_R = PtsUtil.random_downsample_point_cloud(
|
||||||
|
display_pts_R, random_downsample_N
|
||||||
|
)
|
||||||
display_pts_overlap = DataLoadUtil.get_overlapping_points(
|
display_pts_overlap = DataLoadUtil.get_overlapping_points(
|
||||||
display_pts_L, display_pts_R, voxel_size
|
display_pts_L, display_pts_R, voxel_size
|
||||||
)
|
)
|
||||||
|
@ -19,6 +19,8 @@ class PtsUtil:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def random_downsample_point_cloud(point_cloud, num_points):
|
def random_downsample_point_cloud(point_cloud, num_points):
|
||||||
|
if point_cloud.shape[0] == 0:
|
||||||
|
return point_cloud
|
||||||
idx = np.random.choice(len(point_cloud), num_points, replace=True)
|
idx = np.random.choice(len(point_cloud), num_points, replace=True)
|
||||||
return point_cloud[idx]
|
return point_cloud[idx]
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ class ReconstructionUtil:
|
|||||||
def compute_coverage_rate(target_point_cloud, combined_point_cloud, threshold=0.01):
|
def compute_coverage_rate(target_point_cloud, combined_point_cloud, threshold=0.01):
|
||||||
kdtree = cKDTree(combined_point_cloud)
|
kdtree = cKDTree(combined_point_cloud)
|
||||||
distances, _ = kdtree.query(target_point_cloud)
|
distances, _ = kdtree.query(target_point_cloud)
|
||||||
covered_points = np.sum(distances < threshold)
|
covered_points = np.sum(distances < threshold*2)
|
||||||
coverage_rate = covered_points / target_point_cloud.shape[0]
|
coverage_rate = covered_points / target_point_cloud.shape[0]
|
||||||
return coverage_rate
|
return coverage_rate
|
||||||
|
|
||||||
@ -17,6 +17,9 @@ class ReconstructionUtil:
|
|||||||
kdtree = cKDTree(combined_point_cloud)
|
kdtree = cKDTree(combined_point_cloud)
|
||||||
distances, _ = kdtree.query(new_point_cloud)
|
distances, _ = kdtree.query(new_point_cloud)
|
||||||
overlapping_points = np.sum(distances < threshold)
|
overlapping_points = np.sum(distances < threshold)
|
||||||
|
if new_point_cloud.shape[0] == 0:
|
||||||
|
overlap_rate = 0
|
||||||
|
else:
|
||||||
overlap_rate = overlapping_points / new_point_cloud.shape[0]
|
overlap_rate = overlapping_points / new_point_cloud.shape[0]
|
||||||
return overlap_rate
|
return overlap_rate
|
||||||
|
|
||||||
@ -43,12 +46,23 @@ class ReconstructionUtil:
|
|||||||
best_view = view_index
|
best_view = view_index
|
||||||
return best_view, best_coverage_increase
|
return best_view, best_coverage_increase
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_new_added_points(old_combined_pts, new_pts, threshold=0.005):
|
||||||
|
if old_combined_pts.size == 0:
|
||||||
|
return new_pts
|
||||||
|
if new_pts.size == 0:
|
||||||
|
return np.array([])
|
||||||
|
|
||||||
|
tree = cKDTree(old_combined_pts)
|
||||||
|
distances, _ = tree.query(new_pts, k=1)
|
||||||
|
new_added_points = new_pts[distances > threshold]
|
||||||
|
return new_added_points
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def compute_next_best_view_sequence_with_overlap(target_point_cloud, point_cloud_list, scan_points_indices_list, threshold=0.01, overlap_threshold=0.3, init_view = 0, status_info=None):
|
def compute_next_best_view_sequence_with_overlap(target_point_cloud, point_cloud_list, scan_points_indices_list, threshold=0.01, soft_overlap_threshold=0.5, hard_overlap_threshold=0.7, init_view = 0, scan_points_threshold=5, status_info=None):
|
||||||
selected_views = [point_cloud_list[init_view]]
|
selected_views = [point_cloud_list[init_view]]
|
||||||
combined_point_cloud = np.vstack(selected_views)
|
combined_point_cloud = np.vstack(selected_views)
|
||||||
combined_scan_points_indices = scan_points_indices_list[init_view]
|
history_indices = [scan_points_indices_list[init_view]]
|
||||||
down_sampled_combined_point_cloud = PtsUtil.voxel_downsample_point_cloud(combined_point_cloud,threshold)
|
down_sampled_combined_point_cloud = PtsUtil.voxel_downsample_point_cloud(combined_point_cloud,threshold)
|
||||||
new_coverage = ReconstructionUtil.compute_coverage_rate(target_point_cloud, down_sampled_combined_point_cloud, threshold)
|
new_coverage = ReconstructionUtil.compute_coverage_rate(target_point_cloud, down_sampled_combined_point_cloud, threshold)
|
||||||
current_coverage = new_coverage
|
current_coverage = new_coverage
|
||||||
@ -62,10 +76,17 @@ class ReconstructionUtil:
|
|||||||
best_coverage_increase = -1
|
best_coverage_increase = -1
|
||||||
|
|
||||||
for view_index in remaining_views:
|
for view_index in remaining_views:
|
||||||
|
if point_cloud_list[view_index].shape[0] == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
if selected_views:
|
if selected_views:
|
||||||
new_scan_points_indices = scan_points_indices_list[view_index]
|
new_scan_points_indices = scan_points_indices_list[view_index]
|
||||||
if not ReconstructionUtil.check_scan_points_overlap(combined_scan_points_indices, new_scan_points_indices):
|
|
||||||
|
if not ReconstructionUtil.check_scan_points_overlap(history_indices, new_scan_points_indices, scan_points_threshold):
|
||||||
|
overlap_threshold = hard_overlap_threshold
|
||||||
|
else:
|
||||||
|
overlap_threshold = soft_overlap_threshold
|
||||||
|
|
||||||
combined_old_point_cloud = np.vstack(selected_views)
|
combined_old_point_cloud = np.vstack(selected_views)
|
||||||
down_sampled_old_point_cloud = PtsUtil.voxel_downsample_point_cloud(combined_old_point_cloud,threshold)
|
down_sampled_old_point_cloud = PtsUtil.voxel_downsample_point_cloud(combined_old_point_cloud,threshold)
|
||||||
down_sampled_new_view_point_cloud = PtsUtil.voxel_downsample_point_cloud(point_cloud_list[view_index],threshold)
|
down_sampled_new_view_point_cloud = PtsUtil.voxel_downsample_point_cloud(point_cloud_list[view_index],threshold)
|
||||||
@ -88,7 +109,7 @@ class ReconstructionUtil:
|
|||||||
break
|
break
|
||||||
selected_views.append(point_cloud_list[best_view])
|
selected_views.append(point_cloud_list[best_view])
|
||||||
remaining_views.remove(best_view)
|
remaining_views.remove(best_view)
|
||||||
combined_scan_points_indices = ReconstructionUtil.combine_scan_points_indices(combined_scan_points_indices, scan_points_indices_list[best_view])
|
history_indices.append(scan_points_indices_list[best_view])
|
||||||
current_coverage += best_coverage_increase
|
current_coverage += best_coverage_increase
|
||||||
cnt_processed_view += 1
|
cnt_processed_view += 1
|
||||||
if status_info is not None:
|
if status_info is not None:
|
||||||
@ -110,7 +131,7 @@ class ReconstructionUtil:
|
|||||||
return view_sequence, remaining_views, down_sampled_combined_point_cloud
|
return view_sequence, remaining_views, down_sampled_combined_point_cloud
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def filter_points(points, points_normals, cam_pose, voxel_size=0.005, theta=45):
|
def filter_points(points, points_normals, cam_pose, voxel_size=0.005, theta=75):
|
||||||
sampled_points = PtsUtil.voxel_downsample_point_cloud(points, voxel_size)
|
sampled_points = PtsUtil.voxel_downsample_point_cloud(points, voxel_size)
|
||||||
kdtree = cKDTree(points_normals[:,:3])
|
kdtree = cKDTree(points_normals[:,:3])
|
||||||
_, indices = kdtree.query(sampled_points)
|
_, indices = kdtree.query(sampled_points)
|
||||||
@ -143,6 +164,7 @@ class ReconstructionUtil:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def compute_covered_scan_points(scan_points, point_cloud, threshold=0.01):
|
def compute_covered_scan_points(scan_points, point_cloud, threshold=0.01):
|
||||||
|
|
||||||
tree = cKDTree(point_cloud)
|
tree = cKDTree(point_cloud)
|
||||||
covered_points = []
|
covered_points = []
|
||||||
indices = []
|
indices = []
|
||||||
@ -153,10 +175,10 @@ class ReconstructionUtil:
|
|||||||
return covered_points, indices
|
return covered_points, indices
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def check_scan_points_overlap(indices1, indices2, threshold=5):
|
def check_scan_points_overlap(history_indices, indices2, threshold=5):
|
||||||
return len(set(indices1).intersection(set(indices2))) > threshold
|
for indices1 in history_indices:
|
||||||
|
if len(set(indices1).intersection(set(indices2))) >= threshold:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def combine_scan_points_indices(indices1, indices2):
|
|
||||||
combined_indices = set(indices1) | set(indices2)
|
|
||||||
return sorted(combined_indices)
|
|
Loading…
x
Reference in New Issue
Block a user