server merge

This commit is contained in:
hofee 2024-10-04 16:25:24 +00:00
parent a358dd98a9
commit 41c8c060ca
2 changed files with 98 additions and 5 deletions

View File

@ -0,0 +1,92 @@
runner:
general:
seed: 1
device: cuda
cuda_visible_devices: "0,1,2,3,4,5,6,7"
experiment:
name: w_gf_wo_lf_full_debug
root_dir: "experiments"
epoch: 1 # -1 stands for last epoch
test:
dataset_list:
- OmniObject3d_train
blender_script_path: ""
output_dir: ""
pipeline: nbv_reconstruction_global_pts_pipeline
dataset:
OmniObject3d_train:
root_dir: "/home/data/hofee/project/nbv_rec/data/nbv_rec_data_512_preproc_npy"
model_dir: "/home/data/hofee/project/nbv_rec/data/scaled_object_meshes"
source: seq_nbv_reconstruction_dataset
split_file: "/home/data/hofee/project/nbv_rec/data/OmniObject3d_train.txt"
type: test
filter_degree: 75
ratio: 1
batch_size: 1
num_workers: 12
pts_num: 4096
load_from_preprocess: True
pipeline:
nbv_reconstruction_local_pts_pipeline:
modules:
pts_encoder: pointnet_encoder
seq_encoder: transformer_seq_encoder
pose_encoder: pose_encoder
view_finder: gf_view_finder
eps: 1e-5
global_scanned_feat: False
nbv_reconstruction_global_pts_pipeline:
modules:
pts_encoder: pointnet_encoder
pose_seq_encoder: transformer_pose_seq_encoder
pose_encoder: pose_encoder
view_finder: gf_view_finder
eps: 1e-5
global_scanned_feat: True
module:
pointnet_encoder:
in_dim: 3
out_dim: 1024
global_feat: True
feature_transform: False
transformer_seq_encoder:
pts_embed_dim: 1024
pose_embed_dim: 256
num_heads: 4
ffn_dim: 256
num_layers: 3
output_dim: 2048
transformer_pose_seq_encoder:
pose_embed_dim: 256
num_heads: 4
ffn_dim: 256
num_layers: 3
output_dim: 1024
gf_view_finder:
t_feat_dim: 128
pose_feat_dim: 256
main_feat_dim: 2048
regression_head: Rx_Ry_and_T
pose_mode: rot_matrix
per_point_feature: False
sample_mode: ode
sampling_steps: 500
sde_mode: ve
pose_encoder:
pose_dim: 9
out_dim: 256

View File

@ -6,7 +6,7 @@ from PytorchBoot.utils.log_util import Log
import torch import torch
import os import os
import sys import sys
sys.path.append(r"/media/hofee/data/project/python/nbv_reconstruction/nbv_reconstruction") sys.path.append(r"/home/data/hofee/project/nbv_rec/nbv_reconstruction")
from utils.data_load import DataLoadUtil from utils.data_load import DataLoadUtil
from utils.pose import PoseUtil from utils.pose import PoseUtil
@ -140,15 +140,16 @@ if __name__ == "__main__":
torch.manual_seed(seed) torch.manual_seed(seed)
np.random.seed(seed) np.random.seed(seed)
config = { config = {
"root_dir": "/media/hofee/data/project/python/nbv_reconstruction/sample_for_training/scenes", "root_dir": "/home/data/hofee/project/nbv_rec/data/nbv_rec_data_512_preproc_npy",
"split_file": "/media/hofee/data/project/python/nbv_reconstruction/sample_for_training/OmniObject3d_train.txt", "split_file": "/home/data/hofee/project/nbv_rec/data/OmniObject3d_train.txt",
"model_dir": "/media/hofee/data/data/scaled_object_meshes", "model_dir": "/home/data/hofee/project/nbv_rec/data/scaled_object_meshes",
"ratio": 0.5, "ratio": 0.005,
"batch_size": 2, "batch_size": 2,
"filter_degree": 75, "filter_degree": 75,
"num_workers": 0, "num_workers": 0,
"pts_num": 32684, "pts_num": 32684,
"type": namespace.Mode.TEST, "type": namespace.Mode.TEST,
"load_from_preprocess": True
} }
ds = SeqNBVReconstructionDataset(config) ds = SeqNBVReconstructionDataset(config)
print(len(ds)) print(len(ds))