Skip to content
Snippets Groups Projects
Commit a57649e3 authored by Guillaume Duret's avatar Guillaume Duret
Browse files

more flexibility, change name and add without split

parent ddfede08
No related branches found
No related tags found
No related merge requests found
...@@ -9,37 +9,42 @@ from scipy.spatial import distance ...@@ -9,37 +9,42 @@ from scipy.spatial import distance
import argparse import argparse
def generate_folders(name, list_categories, scenario): def generate_folders( dataset_path, name, list_categories, scenario):
is_exist = os.path.exists(name) full_name = dataset_path + name
is_exist = os.path.exists(full_name)
if not is_exist: if not is_exist:
os.mkdir(name) os.mkdir(full_name)
folders = ["RGB", "RGB_Gen", "RGB_resized", "Meta_Gen", "Depth", "Depth_Gen", "Depth_resized", "Mask", "Meta", "Pose", "Bbox_2d", "Bbox_2d_loose", "Bbox_3d", "Bbox_3d_Gen", "Instance_Segmentation", "Semantic_Segmentation", "Instance_Mask", "Labels", "Instance_Mask_resized", "Occlusion", "Models", "Pose_transformed", "Bbox", "FPS", "FPS_resized"] folders = ["RGB", "RGB_Gen", "RGB_resized", "Meta_Gen", "Depth", "Depth_Gen", "Depth_resized", "Meta", "Pose", "Bbox_2d", "Bbox_2d_loose", "Bbox_3d", "Bbox_3d_Gen", "Instance_Segmentation", "Semantic_Segmentation", "Instance_Mask", "Labels", "Instance_Mask_resized", "Occlusion", "Models", "Pose_transformed", "Bbox", "FPS", "FPS_resized"]
for f in folders: for f in folders:
is_exist = os.path.exists(f"{name}/{f}") is_exist = os.path.exists(f"{dataset_path}/{f}")
if not is_exist: if not is_exist:
if f not in ["RGB_Gen", "RGB_resized", "Depth", "Depth_Gen", "Depth_resized", "Instance_Mask", "Labels", "Instance_Mask_resized", "Meta_Gen", "Models", "Pose_transformed", "Bbox", "Bbox_3d_Gen", "FPS" , "FPS_resized"]: if f not in ["RGB_Gen", "RGB_resized", "Depth_Gen", "Depth_resized", "Instance_Mask", "Labels", "Instance_Mask_resized", "Meta_Gen", "Models", "Pose_transformed", "Bbox", "Bbox_3d_Gen", "FPS" , "FPS_resized"]:
os.mkdir(f"{name}/{f}") # general data not dependent of category os.mkdir(f"{dataset_path}/{f}") # general data not dependent of category
else: else:
for cat in list_categories: for cat in list_categories:
is_exist2 = os.path.exists(f"{name}/Generated/{cat}") is_exist2 = os.path.exists(f"{full_name}/Generated/{cat}")
if not is_exist2: if not is_exist2:
os.makedirs(f"{name}/Generated/{cat}") os.makedirs(f"{full_name}/Generated/{cat}")
is_exist2 = os.path.exists(f"{name}/Generated/{cat}/Pose_transformed") is_exist2 = os.path.exists(f"{full_name}/Generated/{cat}/Pose_transformed")
if not is_exist2: if not is_exist2:
os.makedirs(f"{name}/Generated/{cat}/Pose_transformed") os.makedirs(f"{full_name}/Generated/{cat}/Pose_transformed")
for scenario in ["Worlds", "Cameras", "Mix_all"] : for scenario in ["Worlds", "Cameras", "Mix_all", "all"] :
is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Training/{cat}/{f}") #for scenario in ["all"] :
is_exist2 = os.path.exists(f"{full_name}/Generated_{scenario}_Training/{cat}/{f}")
if not is_exist2: if not is_exist2:
os.makedirs(f"{name}/Generated_{scenario}_Training/{cat}/{f}") os.makedirs(f"{full_name}/Generated_{scenario}_Training/{cat}/{f}")
is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Evaluating/{cat}/{f}") is_exist2 = os.path.exists(f"{full_name}/Generated_{scenario}_Evaluating/{cat}/{f}")
if not is_exist2: if not is_exist2:
os.makedirs(f"{name}/Generated_{scenario}_Evaluating/{cat}/{f}") os.makedirs(f"{full_name}/Generated_{scenario}_Evaluating/{cat}/{f}")
is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Testing/{cat}/{f}") is_exist2 = os.path.exists(f"{full_name}/Generated_{scenario}_Testing/{cat}/{f}")
if not is_exist2: if not is_exist2:
os.makedirs(f"{name}/Generated_{scenario}_Testing/{cat}/{f}") os.makedirs(f"{full_name}/Generated_{scenario}_Testing/{cat}/{f}")
is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_dont_save/{cat}/{f}") is_exist2 = os.path.exists(f"{full_name}/Generated_{scenario}_dont_save/{cat}/{f}")
if not is_exist2: if not is_exist2:
os.makedirs(f"{name}/Generated_{scenario}_dont_save/{cat}/{f}") os.makedirs(f"{full_name}/Generated_{scenario}_dont_save/{cat}/{f}")
is_exist2 = os.path.exists(f"{full_name}/Generated_{scenario}/{cat}/{f}")
if not is_exist2:
os.makedirs(f"{full_name}/Generated_{scenario}/{cat}/{f}")
...@@ -60,11 +65,12 @@ if __name__ == '__main__': ...@@ -60,11 +65,12 @@ if __name__ == '__main__':
# Add an argument # Add an argument
parser.add_argument('--Nb_worlds', type=int, required=True) parser.add_argument('--Nb_worlds', type=int, required=True)
parser.add_argument('--World_begin', type=int, required=True) parser.add_argument('--World_begin', type=int, required=True)
parser.add_argument('--dataset_id', type=str, required=True) parser.add_argument('--dataset_id', type=str, default='', required=True)
parser.add_argument('--occlusion_target', type=float, default='', required=True)
#parser.add_argument('--rearrange', dest='rearrange', default=False, action='store_true') #parser.add_argument('--rearrange', dest='rearrange', default=False, action='store_true')
#parser.add_argument('--compute', dest='compute', default=False, action='store_true') #parser.add_argument('--compute', dest='compute', default=False, action='store_true')
parser.add_argument('--rearrange', type=str, required=True) parser.add_argument('--rearrange', type=str, default='no', required=True)
parser.add_argument('--compute', type=str, required=True) parser.add_argument('--compute', type=str, default='no', required=True)
# Parse the argument # Parse the argument
args = parser.parse_args() args = parser.parse_args()
...@@ -73,23 +79,24 @@ if __name__ == '__main__': ...@@ -73,23 +79,24 @@ if __name__ == '__main__':
### parameters ### ### parameters ###
Categories = [] # to read Categories = [] # to read
Nb_instance = 1 Nb_instance = 1
occ_target = 0.5 occ_target = args.occlusion_target
dataset_src = f"/gpfsscratch/rech/uli/ubn15wo/data{args.dataset_id}" #dataset_src = f"/gpfsscratch/rech/uli/ubn15wo/data{args.dataset_id}"
#dataset_src = "/media/mahmoud/E/Fruits_easy/data" dataset_src = "/media/gduret/DATA/dataset/s2rg/Fruits_all_medium/data"
#dataset_src = "/media/gduret/DATA/dataset/s2rg/Fruits_all_medium/data"
choice = "low" # depth of rgb resolution datas choice = "low" # depth of rgb resolution datas
data_options = {"high": "ground_truth_rgb", data_options = {"high": "ground_truth_rgb",
"low": "ground_truth_depth"} "low": "ground_truth_depth"}
dataset_type = data_options[choice] dataset_type = data_options[choice]
dataset_name = f"/gpfsscratch/rech/uli/ubn15wo/GUIMOD_New_{choice}_{args.dataset_id}" #dataset_name = f"/gpfsscratch/rech/uli/ubn15wo/GUIMOD_New_{choice}_{args.dataset_id}"
dataset_path = "/home/gduret/Documents/Datasets/"
dataset_name = f"FruitsBin_{choice}_{Nb_instance}_{occ_target}"
#dataset_name = f"/gpfsscratch/rech/uli/ubn15wo/dataset_new{args.dataset_id}/s2rg/Fruits_all_medium/GUIMOD_{choice}" #dataset_name = f"/gpfsscratch/rech/uli/ubn15wo/dataset_new{args.dataset_id}/s2rg/Fruits_all_medium/GUIMOD_{choice}"
list_categories = ["banana1", "kiwi1", "pear2", "apricot", "orange2", "peach1", "lemon2", "apple2" ] list_categories = ["banana1", "kiwi1", "pear2", "apricot", "orange2", "peach1", "lemon2", "apple2"]
Nb_camera = 15 Nb_camera = 15
#Nb_world = 10000 #Nb_world = 10000
generate_folders(dataset_name, list_categories, scenario) generate_folders(dataset_path , dataset_name, list_categories, scenario)
if choice == 'high': if choice == 'high':
camera = np.matrix([[1386.4138492513919, 0.0, 960.5], camera = np.matrix([[1386.4138492513919, 0.0, 960.5],
...@@ -117,7 +124,7 @@ if __name__ == '__main__': ...@@ -117,7 +124,7 @@ if __name__ == '__main__':
print("compute", args.compute) print("compute", args.compute)
if args.rearrange == 'yes': if args.rearrange == 'yes':
reform_data(dataset_src, dataset_name, dataset_type, Nb_camera, args.World_begin, args.Nb_worlds) reform_data(dataset_src, dataset_path, dataset_type, Nb_camera, args.World_begin, args.Nb_worlds)
objs = {"banana1": [ 0.02949700132012367249, 0.1511049866676330566, 0.06059300713241100311 ], objs = {"banana1": [ 0.02949700132012367249, 0.1511049866676330566, 0.06059300713241100311 ],
"kiwi1": [ 0.04908600077033042908, 0.07206099480390548706, 0.04909799993038177490 ], "kiwi1": [ 0.04908600077033042908, 0.07206099480390548706, 0.04909799993038177490 ],
...@@ -136,17 +143,17 @@ if __name__ == '__main__': ...@@ -136,17 +143,17 @@ if __name__ == '__main__':
fps_points = apply_fps(pcd, 8) fps_points = apply_fps(pcd, 8)
np.savetxt(f'{dataset_name}/Generated/{categories}/{categories}_fps_3d.txt', fps_points) np.savetxt(f'{dataset_path}/{dataset_name}/Generated/{categories}/{categories}_fps_3d.txt', fps_points)
point_cloud_in_numpy = np.asarray(pcd.points) point_cloud_in_numpy = np.asarray(pcd.points)
dim = calc_pts_diameter2(point_cloud_in_numpy) * 100 dim = calc_pts_diameter2(point_cloud_in_numpy) * 100
np.savetxt(f'{dataset_name}/Generated/{categories}/{categories}_diameter.txt', np.array([dim])) np.savetxt(f'{dataset_path}/{dataset_name}/Generated/{categories}/{categories}_diameter.txt', np.array([dim]))
size_bb = objs[categories] size_bb = objs[categories]
ext = [x / 2 for x in size_bb] ext = [x / 2 for x in size_bb]
bbox = get_3D_bbox(ext) bbox = get_3D_bbox(ext)
np.savetxt(f'{dataset_name}/Generated/{categories}/{categories}_bbox_3d.txt', bbox) # save np.savetxt(f'{dataset_path}/{dataset_name}/Generated/{categories}/{categories}_bbox_3d.txt', bbox) # save
if args.compute == 'yes' : if args.compute == 'yes' :
process_compute(dataset_name, camera, new_camera, new_size, Nb_camera, args.World_begin, args.Nb_worlds, list_categories, occ_target, False) process_compute(dataset_path, dataset_path+'/'+dataset_name, camera, new_camera, new_size, Nb_camera, args.World_begin, args.Nb_worlds, list_categories, occ_target, False)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment