Skip to content
Snippets Groups Projects
Commit 96594ae6 authored by Guillaume Duret's avatar Guillaume Duret
Browse files

some cleaning and comments

parent 688609e4
No related branches found
No related tags found
No related merge requests found
...@@ -9,7 +9,7 @@ from scipy.spatial import distance ...@@ -9,7 +9,7 @@ from scipy.spatial import distance
import argparse import argparse
def generate_folders( dataset_path, name, list_categories, scenario): def generate_folders( dataset_path, name, list_categories):
full_name = dataset_path + '/' + name full_name = dataset_path + '/' + name
is_exist = os.path.exists(full_name) is_exist = os.path.exists(full_name)
if not is_exist: if not is_exist:
...@@ -68,37 +68,30 @@ if __name__ == '__main__': ...@@ -68,37 +68,30 @@ if __name__ == '__main__':
parser.add_argument('--dataset_id', type=str, default='', required=True) parser.add_argument('--dataset_id', type=str, default='', required=True)
parser.add_argument('--occlusion_target_min', type=float, default='', required=True) parser.add_argument('--occlusion_target_min', type=float, default='', required=True)
parser.add_argument('--occlusion_target_max', type=float, default='', required=True) parser.add_argument('--occlusion_target_max', type=float, default='', required=True)
#parser.add_argument('--rearrange', dest='rearrange', default=False, action='store_true')
#parser.add_argument('--compute', dest='compute', default=False, action='store_true')
parser.add_argument('--rearrange', type=str, default='no', required=True) parser.add_argument('--rearrange', type=str, default='no', required=True)
parser.add_argument('--compute', type=str, default='no', required=True) parser.add_argument('--compute', type=str, default='no', required=True)
# Parse the argument # Parse the argument
args = parser.parse_args() args = parser.parse_args()
scenario = "Worlds"
### parameters ### ### parameters ###
Categories = [] # to read Categories = [] # to read
Nb_instance = 1 Nb_instance = 1
occ_target_min = args.occlusion_target_min occ_target_min = args.occlusion_target_min
occ_target_max = args.occlusion_target_max occ_target_max = args.occlusion_target_max
dataset_src = f"/gpfsscratch/rech/uli/ubn15wo/DATA/data{args.dataset_id}" dataset_src = f"/gpfsscratch/rech/uli/ubn15wo/DATA/data{args.dataset_id}" #TODO, path of the raw data to process.
#dataset_src = "/media/gduret/DATA/dataset/s2rg/Fruits_all_medium/data"
choice = "low" # depth of rgb resolution datas choice = "low" # depth of rgb resolution datas #TODO, low is the adviced value.
data_options = {"high": "ground_truth_rgb", data_options = {"high": "ground_truth_rgb",
"low": "ground_truth_depth"} "low": "ground_truth_depth"}
dataset_type = data_options[choice] dataset_type = data_options[choice]
dataset_path = f"/gpfsscratch/rech/uli/ubn15wo/FruitBin{args.dataset_id}" #GUIMOD_New_{choice}_{args.dataset_id}" dataset_path = f"/gpfsscratch/rech/uli/ubn15wo/FruitBin{args.dataset_id}" #TODO, path and name of the destination for the precessed dataset.
#dataset_path = f"/home/gduret/Documents/FruitBin{args.dataset_id}/" dataset_name = f"FruitBin_{choice}_{Nb_instance}_{occ_target_min}_{occ_target_max}" #TODO, name of the subdataset preprocessed for scenarios.
dataset_name = f"FruitBin_{choice}_{Nb_instance}_{occ_target_min}_{occ_target_max}"
#dataset_name = f"/gpfsscratch/rech/uli/ubn15wo/dataset_new{args.dataset_id}/s2rg/Fruits_all_medium/GUIMOD_{choice}" list_categories = ["banana1", "kiwi1", "pear2", "apricot", "orange2", "peach1", "lemon2", "apple2"] #TODO, to change if different objects
list_categories = ["banana1", "kiwi1", "pear2", "apricot", "orange2", "peach1", "lemon2", "apple2"] Nb_camera = 15 # TODO, to change if different number of cameras.
Nb_camera = 15
#Nb_world = 10000
generate_folders(dataset_path , dataset_name, list_categories, scenario) generate_folders(dataset_path , dataset_name, list_categories)
if choice == 'high': if choice == 'high':
camera = np.matrix([[1386.4138492513919, 0.0, 960.5], camera = np.matrix([[1386.4138492513919, 0.0, 960.5],
...@@ -117,15 +110,14 @@ if __name__ == '__main__': ...@@ -117,15 +110,14 @@ if __name__ == '__main__':
[0.0, (2 / 3), 0.0], [0.0, (2 / 3), 0.0],
[0.0, 0.0, 1.0]]) [0.0, 0.0, 1.0]])
new_size = (640, 480) new_size = (640, 480) # size used for training baseline of 6D pose estimation
new_camera = trans @ camera new_camera = trans @ camera
#np.savetxt(f'{dataset_name}/Generated/camera_{choice}.txt', camera)
print("rearrange", args.rearrange) print("rearrange", args.rearrange)
print("compute", args.compute) print("compute", args.compute)
if args.rearrange == 'yes': if args.rearrange == 'yes': # step nedeed before process, to do only one time
reform_data(dataset_src, dataset_path, dataset_type, Nb_camera, args.World_begin, args.Nb_worlds) reform_data(dataset_src, dataset_path, dataset_type, Nb_camera, args.World_begin, args.Nb_worlds)
objs = {"banana1": [ 0.02949700132012367249, 0.1511049866676330566, 0.06059300713241100311 ], objs = {"banana1": [ 0.02949700132012367249, 0.1511049866676330566, 0.06059300713241100311 ],
...@@ -156,6 +148,6 @@ if __name__ == '__main__': ...@@ -156,6 +148,6 @@ if __name__ == '__main__':
bbox = get_3D_bbox(ext) bbox = get_3D_bbox(ext)
np.savetxt(f'{dataset_path}/{dataset_name}/Generated/{categories}/{categories}_bbox_3d.txt', bbox) # save np.savetxt(f'{dataset_path}/{dataset_name}/Generated/{categories}/{categories}_bbox_3d.txt', bbox) # save
if args.compute == 'yes' : if args.compute == 'yes' : # process of a sub dataset for specific scenarios, it can be repeated to generate multiple ready to train sub-datasets
process_compute(dataset_path, dataset_path+'/'+dataset_name, camera, new_camera, new_size, Nb_camera, args.World_begin, args.Nb_worlds, list_categories, occ_target_min, occ_target_max, False) process_compute(dataset_path, dataset_path+'/'+dataset_name, camera, new_camera, new_size, Nb_camera, args.World_begin, args.Nb_worlds, list_categories, occ_target_min, occ_target_max, False)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment