update reconstruction
This commit is contained in:
parent
d098c9f951
commit
bb9b3f81c3
@ -28,8 +28,8 @@ runner:
|
|||||||
datasets:
|
datasets:
|
||||||
OmniObject3d:
|
OmniObject3d:
|
||||||
#"/media/hofee/data/data/temp_output"
|
#"/media/hofee/data/data/temp_output"
|
||||||
root_dir: "/media/hofee/repository/new_full_box_data"
|
root_dir: "C:\\Document\\Local Project\\nbv_rec\\nbv_reconstruction\\test\\test_sample"
|
||||||
model_dir: "/media/hofee/data/data/scaled_object_meshes"
|
model_dir: "H:\\AI\\Datasets\\scaled_object_meshes"
|
||||||
from: 0
|
from: 0
|
||||||
to: -1 # -1 means end
|
to: -1 # -1 means end
|
||||||
#output_dir: "/media/hofee/data/data/label_output"
|
#output_dir: "/media/hofee/data/data/label_output"
|
||||||
|
@ -31,22 +31,6 @@ def save_scan_points(root, scene, scan_points: np.ndarray):
|
|||||||
scan_points_path = os.path.join(root,scene, "scan_points.txt")
|
scan_points_path = os.path.join(root,scene, "scan_points.txt")
|
||||||
save_np_pts(scan_points_path, scan_points)
|
save_np_pts(scan_points_path, scan_points)
|
||||||
|
|
||||||
|
|
||||||
def old_get_world_points(depth, cam_intrinsic, cam_extrinsic):
|
|
||||||
h, w = depth.shape
|
|
||||||
i, j = np.meshgrid(np.arange(w), np.arange(h), indexing="xy")
|
|
||||||
# ----- Debug Trace ----- #
|
|
||||||
import ipdb; ipdb.set_trace()
|
|
||||||
# ------------------------ #
|
|
||||||
z = depth
|
|
||||||
x = (i - cam_intrinsic[0, 2]) * z / cam_intrinsic[0, 0]
|
|
||||||
y = (j - cam_intrinsic[1, 2]) * z / cam_intrinsic[1, 1]
|
|
||||||
points_camera = np.stack((x, y, z), axis=-1).reshape(-1, 3)
|
|
||||||
points_camera_aug = np.concatenate((points_camera, np.ones((points_camera.shape[0], 1))), axis=-1)
|
|
||||||
points_camera_world = np.dot(cam_extrinsic, points_camera_aug.T).T[:, :3]
|
|
||||||
|
|
||||||
return points_camera_world
|
|
||||||
|
|
||||||
def get_world_points(depth, mask, cam_intrinsic, cam_extrinsic):
|
def get_world_points(depth, mask, cam_intrinsic, cam_extrinsic):
|
||||||
z = depth[mask]
|
z = depth[mask]
|
||||||
i, j = np.nonzero(mask)
|
i, j = np.nonzero(mask)
|
||||||
@ -74,7 +58,7 @@ def get_scan_points_indices(scan_points, mask, display_table_mask_label, cam_int
|
|||||||
return selected_points_indices
|
return selected_points_indices
|
||||||
|
|
||||||
|
|
||||||
def save_scene_data(root, scene, scene_idx=0, scene_total=1):
|
def save_scene_data(root, scene, scene_idx=0, scene_total=1,file_type="txt"):
|
||||||
|
|
||||||
''' configuration '''
|
''' configuration '''
|
||||||
target_mask_label = (0, 255, 0, 255)
|
target_mask_label = (0, 255, 0, 255)
|
||||||
@ -128,8 +112,9 @@ def save_scene_data(root, scene, scene_idx=0, scene_total=1):
|
|||||||
sampled_target_points_L, sampled_target_points_R, voxel_size
|
sampled_target_points_L, sampled_target_points_R, voxel_size
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if has_points:
|
||||||
has_points = target_points.shape[0] > 0
|
has_points = target_points.shape[0] > 0
|
||||||
|
|
||||||
if has_points:
|
if has_points:
|
||||||
points_normals = DataLoadUtil.load_points_normals(root, scene, display_table_as_world_space_origin=True)
|
points_normals = DataLoadUtil.load_points_normals(root, scene, display_table_as_world_space_origin=True)
|
||||||
target_points = PtsUtil.filter_points(
|
target_points = PtsUtil.filter_points(
|
||||||
@ -145,8 +130,8 @@ def save_scene_data(root, scene, scene_idx=0, scene_total=1):
|
|||||||
if not has_points:
|
if not has_points:
|
||||||
target_points = np.zeros((0, 3))
|
target_points = np.zeros((0, 3))
|
||||||
|
|
||||||
save_target_points(root, scene, frame_id, target_points)
|
save_target_points(root, scene, frame_id, target_points, file_type=file_type)
|
||||||
save_scan_points_indices(root, scene, frame_id, scan_points_indices)
|
save_scan_points_indices(root, scene, frame_id, scan_points_indices, file_type=file_type)
|
||||||
|
|
||||||
save_scan_points(root, scene, scan_points) # The "done" flag of scene preprocess
|
save_scan_points(root, scene, scan_points) # The "done" flag of scene preprocess
|
||||||
|
|
||||||
@ -168,7 +153,7 @@ if __name__ == "__main__":
|
|||||||
total = to_idx - from_idx
|
total = to_idx - from_idx
|
||||||
for scene in scene_list[from_idx:to_idx]:
|
for scene in scene_list[from_idx:to_idx]:
|
||||||
start = time.time()
|
start = time.time()
|
||||||
save_scene_data(root, scene, cnt, total)
|
save_scene_data(root, scene, cnt, total, file_type="npy")
|
||||||
cnt+=1
|
cnt+=1
|
||||||
end = time.time()
|
end = time.time()
|
||||||
print(f"Time cost: {end-start}")
|
print(f"Time cost: {end-start}")
|
@ -84,27 +84,38 @@ class StrategyGenerator(Runner):
|
|||||||
pts_list = []
|
pts_list = []
|
||||||
scan_points_indices_list = []
|
scan_points_indices_list = []
|
||||||
non_zero_cnt = 0
|
non_zero_cnt = 0
|
||||||
|
|
||||||
for frame_idx in range(frame_num):
|
for frame_idx in range(frame_num):
|
||||||
status_manager.set_progress("generate_strategy", "strategy_generator", "loading frame", frame_idx, frame_num)
|
status_manager.set_progress("generate_strategy", "strategy_generator", "loading frame", frame_idx, frame_num)
|
||||||
pts_path = os.path.join(root,scene_name, "target_pts", f"{frame_idx}.txt")
|
pts_path = os.path.join(root,scene_name, "pts", f"{frame_idx}.npy")
|
||||||
sampled_point_cloud = np.loadtxt(pts_path)
|
idx_path = os.path.join(root,scene_name, "scan_points_indices", f"{frame_idx}.npy")
|
||||||
indices = None # ReconstructionUtil.compute_covered_scan_points(scan_points, display_table_pts)
|
point_cloud = np.load(pts_path)
|
||||||
|
sampled_point_cloud = PtsUtil.voxel_downsample_point_cloud(point_cloud, voxel_threshold)
|
||||||
|
indices = np.load(idx_path)
|
||||||
pts_list.append(sampled_point_cloud)
|
pts_list.append(sampled_point_cloud)
|
||||||
|
|
||||||
scan_points_indices_list.append(indices)
|
scan_points_indices_list.append(indices)
|
||||||
|
if sampled_point_cloud.shape[0] > 0:
|
||||||
|
non_zero_cnt += 1
|
||||||
status_manager.set_progress("generate_strategy", "strategy_generator", "loading frame", frame_num, frame_num)
|
status_manager.set_progress("generate_strategy", "strategy_generator", "loading frame", frame_num, frame_num)
|
||||||
|
|
||||||
seq_num = min(self.seq_num, non_zero_cnt)
|
seq_num = min(self.seq_num, non_zero_cnt)
|
||||||
init_view_list = []
|
init_view_list = []
|
||||||
for i in range(seq_num):
|
idx = 0
|
||||||
if pts_list[i].shape[0] < 100:
|
while len(init_view_list) < seq_num:
|
||||||
continue
|
if pts_list[idx].shape[0] > 100:
|
||||||
init_view_list.append(i)
|
init_view_list.append(idx)
|
||||||
|
idx += 1
|
||||||
|
|
||||||
seq_idx = 0
|
seq_idx = 0
|
||||||
|
import time
|
||||||
for init_view in init_view_list:
|
for init_view in init_view_list:
|
||||||
status_manager.set_progress("generate_strategy", "strategy_generator", "computing sequence", seq_idx, len(init_view_list))
|
status_manager.set_progress("generate_strategy", "strategy_generator", "computing sequence", seq_idx, len(init_view_list))
|
||||||
|
start = time.time()
|
||||||
limited_useful_view, _, _ = ReconstructionUtil.compute_next_best_view_sequence_with_overlap(down_sampled_model_pts, pts_list, scan_points_indices_list = scan_points_indices_list,init_view=init_view,
|
limited_useful_view, _, _ = ReconstructionUtil.compute_next_best_view_sequence_with_overlap(down_sampled_model_pts, pts_list, scan_points_indices_list = scan_points_indices_list,init_view=init_view,
|
||||||
threshold=voxel_threshold, soft_overlap_threshold=soft_overlap_threshold, hard_overlap_threshold= hard_overlap_threshold, scan_points_threshold=10, status_info=self.status_info)
|
threshold=voxel_threshold, soft_overlap_threshold=soft_overlap_threshold, hard_overlap_threshold= hard_overlap_threshold, scan_points_threshold=10, status_info=self.status_info)
|
||||||
|
end = time.time()
|
||||||
|
print(f"Time: {end-start}")
|
||||||
data_pairs = self.generate_data_pairs(limited_useful_view)
|
data_pairs = self.generate_data_pairs(limited_useful_view)
|
||||||
seq_save_data = {
|
seq_save_data = {
|
||||||
"data_pairs": data_pairs,
|
"data_pairs": data_pairs,
|
||||||
|
@ -23,28 +23,6 @@ class ReconstructionUtil:
|
|||||||
overlap_rate = overlapping_points / new_point_cloud.shape[0]
|
overlap_rate = overlapping_points / new_point_cloud.shape[0]
|
||||||
return overlap_rate
|
return overlap_rate
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def combine_point_with_view_sequence(point_list, view_sequence):
|
|
||||||
selected_views = []
|
|
||||||
for view_index, _ in view_sequence:
|
|
||||||
selected_views.append(point_list[view_index])
|
|
||||||
return np.vstack(selected_views)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def compute_next_view_coverage_list(views, combined_point_cloud, target_point_cloud, threshold=0.01):
|
|
||||||
best_view = None
|
|
||||||
best_coverage_increase = -1
|
|
||||||
current_coverage = ReconstructionUtil.compute_coverage_rate(target_point_cloud, combined_point_cloud, threshold)
|
|
||||||
|
|
||||||
for view_index, view in enumerate(views):
|
|
||||||
candidate_views = combined_point_cloud + [view]
|
|
||||||
down_sampled_combined_point_cloud = PtsUtil.voxel_downsample_point_cloud(candidate_views, threshold)
|
|
||||||
new_coverage = ReconstructionUtil.compute_coverage_rate(target_point_cloud, down_sampled_combined_point_cloud, threshold)
|
|
||||||
coverage_increase = new_coverage - current_coverage
|
|
||||||
if coverage_increase > best_coverage_increase:
|
|
||||||
best_coverage_increase = coverage_increase
|
|
||||||
best_view = view_index
|
|
||||||
return best_view, best_coverage_increase
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_new_added_points(old_combined_pts, new_pts, threshold=0.005):
|
def get_new_added_points(old_combined_pts, new_pts, threshold=0.005):
|
||||||
@ -60,54 +38,70 @@ class ReconstructionUtil:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def compute_next_best_view_sequence_with_overlap(target_point_cloud, point_cloud_list, scan_points_indices_list, threshold=0.01, soft_overlap_threshold=0.5, hard_overlap_threshold=0.7, init_view = 0, scan_points_threshold=5, status_info=None):
|
def compute_next_best_view_sequence_with_overlap(target_point_cloud, point_cloud_list, scan_points_indices_list, threshold=0.01, soft_overlap_threshold=0.5, hard_overlap_threshold=0.7, init_view = 0, scan_points_threshold=5, status_info=None):
|
||||||
selected_views = [point_cloud_list[init_view]]
|
selected_views = [init_view]
|
||||||
combined_point_cloud = np.vstack(selected_views)
|
combined_point_cloud = point_cloud_list[init_view]
|
||||||
history_indices = [scan_points_indices_list[init_view]]
|
history_indices = [scan_points_indices_list[init_view]]
|
||||||
down_sampled_combined_point_cloud = PtsUtil.voxel_downsample_point_cloud(combined_point_cloud,threshold)
|
|
||||||
new_coverage = ReconstructionUtil.compute_coverage_rate(target_point_cloud, down_sampled_combined_point_cloud, threshold)
|
max_rec_pts = np.vstack(point_cloud_list)
|
||||||
|
downsampled_max_rec_pts = PtsUtil.voxel_downsample_point_cloud(max_rec_pts, threshold)
|
||||||
|
|
||||||
|
max_rec_pts_num = downsampled_max_rec_pts.shape[0]
|
||||||
|
max_rec_pts_coverage = ReconstructionUtil.compute_coverage_rate(target_point_cloud, downsampled_max_rec_pts, threshold)
|
||||||
|
|
||||||
|
new_coverage = ReconstructionUtil.compute_coverage_rate(downsampled_max_rec_pts, combined_point_cloud, threshold)
|
||||||
current_coverage = new_coverage
|
current_coverage = new_coverage
|
||||||
remaining_views = list(range(len(point_cloud_list)))
|
remaining_views = list(range(len(point_cloud_list)))
|
||||||
view_sequence = [(init_view, current_coverage)]
|
view_sequence = [(init_view, current_coverage)]
|
||||||
cnt_processed_view = 0
|
cnt_processed_view = 0
|
||||||
remaining_views.remove(init_view)
|
remaining_views.remove(init_view)
|
||||||
|
curr_rec_pts_num = combined_point_cloud.shape[0]
|
||||||
|
|
||||||
|
import time
|
||||||
while remaining_views:
|
while remaining_views:
|
||||||
best_view = None
|
best_view = None
|
||||||
best_coverage_increase = -1
|
best_coverage_increase = -1
|
||||||
|
best_combined_point_cloud = None
|
||||||
|
|
||||||
for view_index in remaining_views:
|
for view_index in remaining_views:
|
||||||
if point_cloud_list[view_index].shape[0] == 0:
|
if point_cloud_list[view_index].shape[0] == 0:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if selected_views:
|
if selected_views:
|
||||||
new_scan_points_indices = scan_points_indices_list[view_index]
|
new_scan_points_indices = scan_points_indices_list[view_index]
|
||||||
|
|
||||||
if not ReconstructionUtil.check_scan_points_overlap(history_indices, new_scan_points_indices, scan_points_threshold):
|
if not ReconstructionUtil.check_scan_points_overlap(history_indices, new_scan_points_indices, scan_points_threshold):
|
||||||
overlap_threshold = hard_overlap_threshold
|
overlap_threshold = hard_overlap_threshold
|
||||||
else:
|
else:
|
||||||
overlap_threshold = soft_overlap_threshold
|
overlap_threshold = soft_overlap_threshold
|
||||||
|
start = time.time()
|
||||||
combined_old_point_cloud = np.vstack(selected_views)
|
overlap_rate = ReconstructionUtil.compute_overlap_rate(point_cloud_list[view_index],combined_point_cloud, threshold)
|
||||||
down_sampled_old_point_cloud = PtsUtil.voxel_downsample_point_cloud(combined_old_point_cloud,threshold)
|
end = time.time()
|
||||||
down_sampled_new_view_point_cloud = PtsUtil.voxel_downsample_point_cloud(point_cloud_list[view_index],threshold)
|
# print(f"overlap_rate Time: {end-start}")
|
||||||
overlap_rate = ReconstructionUtil.compute_overlap_rate(down_sampled_new_view_point_cloud,down_sampled_old_point_cloud, threshold)
|
|
||||||
if overlap_rate < overlap_threshold:
|
if overlap_rate < overlap_threshold:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
candidate_views = selected_views + [point_cloud_list[view_index]]
|
start = time.time()
|
||||||
combined_point_cloud = np.vstack(candidate_views)
|
new_combined_point_cloud = np.vstack([combined_point_cloud, point_cloud_list[view_index]])
|
||||||
down_sampled_combined_point_cloud = PtsUtil.voxel_downsample_point_cloud(combined_point_cloud,threshold)
|
new_downsampled_combined_point_cloud = PtsUtil.voxel_downsample_point_cloud(new_combined_point_cloud,threshold)
|
||||||
new_coverage = ReconstructionUtil.compute_coverage_rate(target_point_cloud, down_sampled_combined_point_cloud, threshold)
|
new_coverage = ReconstructionUtil.compute_coverage_rate(downsampled_max_rec_pts, new_downsampled_combined_point_cloud, threshold)
|
||||||
|
end = time.time()
|
||||||
|
#print(f"compute_coverage_rate Time: {end-start}")
|
||||||
coverage_increase = new_coverage - current_coverage
|
coverage_increase = new_coverage - current_coverage
|
||||||
if coverage_increase > best_coverage_increase:
|
if coverage_increase > best_coverage_increase:
|
||||||
best_coverage_increase = coverage_increase
|
best_coverage_increase = coverage_increase
|
||||||
best_view = view_index
|
best_view = view_index
|
||||||
|
best_combined_point_cloud = new_downsampled_combined_point_cloud
|
||||||
|
|
||||||
|
|
||||||
if best_view is not None:
|
if best_view is not None:
|
||||||
if best_coverage_increase <=3e-3:
|
if best_coverage_increase <=1e-3:
|
||||||
break
|
break
|
||||||
selected_views.append(point_cloud_list[best_view])
|
|
||||||
|
selected_views.append(best_view)
|
||||||
|
best_rec_pts_num = best_combined_point_cloud.shape[0]
|
||||||
|
print(f"Current rec pts num: {curr_rec_pts_num}, Best rec pts num: {best_rec_pts_num}, Max rec pts num: {max_rec_pts_num}")
|
||||||
|
print(f"Current coverage: {current_coverage}, Best coverage increase: {best_coverage_increase}, Max coverage: {max_rec_pts_coverage}")
|
||||||
|
|
||||||
|
curr_rec_pts_num = best_rec_pts_num
|
||||||
|
combined_point_cloud = best_combined_point_cloud
|
||||||
remaining_views.remove(best_view)
|
remaining_views.remove(best_view)
|
||||||
history_indices.append(scan_points_indices_list[best_view])
|
history_indices.append(scan_points_indices_list[best_view])
|
||||||
current_coverage += best_coverage_increase
|
current_coverage += best_coverage_increase
|
||||||
@ -123,12 +117,15 @@ class ReconstructionUtil:
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
# ----- Debug Trace ----- #
|
||||||
|
import ipdb; ipdb.set_trace()
|
||||||
|
# ------------------------ #
|
||||||
if status_info is not None:
|
if status_info is not None:
|
||||||
sm = status_info["status_manager"]
|
sm = status_info["status_manager"]
|
||||||
app_name = status_info["app_name"]
|
app_name = status_info["app_name"]
|
||||||
runner_name = status_info["runner_name"]
|
runner_name = status_info["runner_name"]
|
||||||
sm.set_progress(app_name, runner_name, "processed view", len(point_cloud_list), len(point_cloud_list))
|
sm.set_progress(app_name, runner_name, "processed view", len(point_cloud_list), len(point_cloud_list))
|
||||||
return view_sequence, remaining_views, down_sampled_combined_point_cloud
|
return view_sequence, remaining_views, combined_point_cloud
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
Loading…
x
Reference in New Issue
Block a user