diff --git a/compute_features.py b/compute_features.py index cee73a35acd1f93f49d67e949c93eb27ee43fbc4..958e47f89dca4ea4ebc04d910b9204f8bcd2dca0 100644 --- a/compute_features.py +++ b/compute_features.py @@ -21,7 +21,7 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl destination_folders_list = {} for scenario_loop in scenarios: - destination_folders_list[scenario_loop] = [f"Generated_{scenario_loop}_Testing", f"Generated_{scenario_loop}_Evaluating", f"Generated_{scenario_loop}_Training" ] + destination_folders_list[scenario_loop] = [f"Generated_{scenario_loop}_Testing", f"Generated_{scenario_loop}_Evaluating", f"Generated_{scenario_loop}_Training", f"Generated_{scenario_loop}_dont_save" ] list_count_categories = {} for scenario_loop in scenarios : @@ -33,10 +33,9 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl for i in range(World_begin, World_begin + Nb_world): # worlds - print(i) - if i > 4 : + if i > 8000 : destination_folders["Worlds"] = f"Generated_Worlds_Testing" - elif i > 3 : + elif i > 6000 : destination_folders["Worlds"] = f"Generated_Worlds_Evaluating" else : destination_folders["Worlds"] = f"Generated_Worlds_Training" @@ -53,14 +52,14 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl else : destination_folders["Cameras"] = f"Generated_Cameras_Training" - if i > 4 and j > 12 : + if i > 8000 and j > 12 : destination_folders["Mix_all"] = f"Generated_Mix_all_Testing" - elif i > 3 and i <=4 and j > 9 and j >= 12 : + elif i > 6000 and i <=8000 and j > 9 and j >= 12 : destination_folders["Mix_all"] = f"Generated_Mix_all_Evaluating" - elif i <= 3 and j <= 9 : + elif i <= 6000 and j <= 9 : destination_folders["Mix_all"] = f"Generated_Mix_all_Training" else : - destination_folders["Mix_all"] = "dont_save" + destination_folders["Mix_all"] = f"Generated_Mix_all_dont_save" categories_occ_array, categories_array = compute_id_good_occ(data_name, p, categories_instance_array_id_to_cat, categories_instance_array_cat_to_id, occ_target) @@ -79,10 +78,10 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl raise TypeError("size of datas are differents !!") for scenario_loop in scenarios: - if not destination_folders[scenario_loop] == "dont_save" : - if os.path.isfile(f'{data_name}/{destination_folders[scenario_loop]}/Count_{p-1}.json'): - with open(f'{data_name}/{destination_folders[scenario_loop]}/Count_{p-1}.json') as f: - list_count_categories[scenario_loop][destination_folders[scenario_loop]] = json.load(f) + for destination_folder_loop in destination_folders_list[scenario_loop] : + if os.path.isfile(f'{data_name}/{destination_folder_loop}/Count_{p-1}.json'): + with open(f'{data_name}/{destination_folder_loop}/Count_{p-1}.json') as f: + list_count_categories[scenario_loop][destination_folder_loop] = json.load(f) #res_all = [] for categories in list_categories: @@ -112,17 +111,17 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl meta['camera'] = f"grabber_{j}" meta['occlusion'] = occ_target meta['Nb_instance_category'] = Nb_instance - - if not os.path.isfile(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Meta_Gen/{categories}.json'): - with open(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Meta_Gen/{categories}.json', mode='w') as f: + + if not os.path.isfile(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Meta_Gen/{categories}_{p}.json'): + with open(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Meta_Gen/{categories}_{p}.json', mode='w') as f: feeds = {} feeds[meta['id_generated']]=meta f.write(json.dumps(feeds, indent=2)) else: - with open(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Meta_Gen/{categories}.json') as feedsjson: + with open(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Meta_Gen/{categories}_{p}.json') as feedsjson: feeds = json.load(feedsjson) feeds[meta['id_generated']]=meta - with open(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Meta_Gen/{categories}.json', mode='w') as f: + with open(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Meta_Gen/{categories}_{p}.json', mode='w') as f: f.write(json.dumps(feeds, indent=4)) if (Nb_instance_occ == 1 and Nb_instance == 0 ): # condition of only one instance of occ >= 0.5 and no other < 0.05 @@ -141,24 +140,18 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl for scenario_loop in scenarios: if not destination_folders[scenario_loop] == "dont_save" : np.save(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Pose_transformed/{p}.npy', num_arr) # save - else: - continue if data_Bbox_2d[k]['id'] == categories_occ_array[categories][0]: bbox = bbox_2d(data_Bbox_2d[k]) for scenario_loop in scenarios: if not destination_folders[scenario_loop] == "dont_save" : np.savetxt(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Bbox/{p}.txt', np.array(bbox).reshape((1, 4))) # save - else: - continue if data_Bbox_3d[k]['id'] == categories_occ_array[categories][0]: bbox3d_size = data_Bbox_3d[k]['bbox']['size'] for scenario_loop in scenarios: if not destination_folders[scenario_loop] == "dont_save" : np.savetxt(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Bbox_3d_Gen/{p}.txt', bbox3d_size) # save - else: - continue id = categories_occ_array[categories][0] img = cv2.imread(f"{data_name}/Instance_Segmentation/{p}.png", cv2.IMREAD_UNCHANGED) # plt.imread(path) @@ -174,6 +167,8 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl image = cv2.imread(f"{data_name}/{destination_folders[scenario_loop]}/{categories}/Instance_Mask/{p}.png", 0) image = image/255.0 contours, _ = cv2.findContours(image.astype(np.uint8), cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE) + if len(contours) > 1 : + continue for l in range(len(contours[0])): x = contours[0][l][0][0]/640.0 res.append(x) @@ -246,5 +241,6 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl f.write(json.dumps(list_count_categories[scenario_loop][destination_folder_loop], indent=4)) with open(f'{data_name}/Count_{p}.json', mode='w') as f: f.write(json.dumps(list_count_categories, indent=4)) - #print(list_count_categories) + print(list_count_categories) + print(f'{data_name}/{destination_folder_loop}/Count_{p}.json') diff --git a/main.py b/main.py index a746677cadde9540c6372750d97009852a548c6c..8405c9c241a7f9c43bcce957654d0e6a3c6e221c 100644 --- a/main.py +++ b/main.py @@ -36,9 +36,9 @@ def generate_folders(name, list_categories, scenario): is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Testing/{cat}/{f}") if not is_exist2: os.makedirs(f"{name}/Generated_{scenario}_Testing/{cat}/{f}") - is_exist2 = os.path.exists(f"{name}/dont_save/{cat}/{f}") + is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_dont_save/{cat}/{f}") if not is_exist2: - os.makedirs(f"{name}/dont_save/{cat}/{f}") + os.makedirs(f"{name}/Generated_{scenario}_dont_save/{cat}/{f}") @@ -53,13 +53,15 @@ def calc_pts_diameter2(pts): diameter = np.max(dists) return diameter -if __name__ == '__main__': - +if __name__ == '__main__': # Create the parser parser = argparse.ArgumentParser() # Add an argument parser.add_argument('--Nb_worlds', type=int, required=True) parser.add_argument('--World_begin', type=int, required=True) + parser.add_argument('--dataset_id', type=str, required=True) + parser.add_argument('--rearrange', type=bool, required=True) + parser.add_argument('--compute', type=bool, required=True) # Parse the argument args = parser.parse_args() @@ -69,14 +71,20 @@ if __name__ == '__main__': Categories = [] # to read Nb_instance = 1 occ_target = 0.5 - dataset_src = "/media/gduret/DATA/dataset/s2rg/Fruits_all_medium/data" + + dataset_src = f"/gpfsscratch/rech/uli/ubn15wo/data{args.dataset_id}" + #dataset_src = "/media/mahmoud/E/Fruits_easy/data" + #dataset_src = "/media/gduret/DATA/dataset/s2rg/Fruits_all_medium/data" + choice = "low" # depth of rgb resolution datas data_options = {"high": "ground_truth_rgb", "low": "ground_truth_depth"} dataset_type = data_options[choice] - dataset_name = f"GUIMOD_{choice}" - list_categories = ["banana1", "kiwi1", "pear2", "strawberry1", "apricot", "orange2", "peach1", "lemon2", "apple2" ] + dataset_name = f"/gpfsscratch/rech/uli/ubn15wo/GUIMOD_New_{choice}_{args.dataset_id}" + #dataset_name = f"/gpfsscratch/rech/uli/ubn15wo/dataset_new{args.dataset_id}/s2rg/Fruits_all_medium/GUIMOD_{choice}" + list_categories = ["banana1", "kiwi1", "pear2", "apricot", "orange2", "peach1", "lemon2", "apple2" ] Nb_camera = 15 + #Nb_world = 10000 generate_folders(dataset_name, list_categories, scenario) @@ -103,19 +111,20 @@ if __name__ == '__main__': #np.savetxt(f'{dataset_name}/Generated/camera_{choice}.txt', camera) - reform_data(dataset_src, dataset_name, dataset_type, Nb_camera, args.World_begin, args.Nb_worlds) + if args.rearrange : + reform_data(dataset_src, dataset_name, dataset_type, Nb_camera, args.World_begin, args.Nb_worlds) - list_categories = ["banana1", "kiwi1", "pear2", "strawberry1", "apricot", "orange2", "peach1", "lemon2", "apple2" ] objs = {"banana1": [ 0.02949700132012367249, 0.1511049866676330566, 0.06059300713241100311 ], "kiwi1": [ 0.04908600077033042908, 0.07206099480390548706, 0.04909799993038177490 ], "pear2": [ 0.06601099669933319092, 0.1287339925765991211, 0.06739201396703720093 ], - "strawberry1": [0.01698100194334983826, 0.02203200198709964752, 0.01685700193047523499], "apricot": [0.04213499650359153748, 0.05482299625873565674, 0.04333199933171272278], "orange2": [ 0.07349500805139541626, 0.07585700601339340210, 0.07458199560642242432 ], "peach1": [ 0.07397901266813278198, 0.07111301273107528687, 0.07657301425933837891 ], "lemon2": [0.04686100035905838013, 0.04684200137853622437, 0.07244800776243209839], "apple2": [0.05203099921345710754, 0.04766000062227249146, 0.05089000239968299866]} + # "strawberry1": [0.01698100194334983826, 0.02203200198709964752, 0.01685700193047523499], + for categories in list_categories: point_cloud = f"Models/{categories}/{categories.lower()}.ply" pcd = o3d.io.read_point_cloud(point_cloud) @@ -133,5 +142,6 @@ if __name__ == '__main__': bbox = get_3D_bbox(ext) np.savetxt(f'{dataset_name}/Generated/{categories}/{categories}_bbox_3d.txt', bbox) # save - process_compute(dataset_name, camera, new_camera, new_size, Nb_camera, args.World_begin, args.Nb_worlds, list_categories, occ_target, False) + if args.compute : + process_compute(dataset_name, camera, new_camera, new_size, Nb_camera, args.World_begin, args.Nb_worlds, list_categories, occ_target, False) diff --git a/main_rearrange.py b/main_rearrange.py new file mode 100644 index 0000000000000000000000000000000000000000..65a10dfa10f52e5101a426eca001aeea07311039 --- /dev/null +++ b/main_rearrange.py @@ -0,0 +1,142 @@ +import os +import numpy as np +from prepare_data import reform_data +from fps_alg import apply_fps +from bbox_3d import get_3D_bbox +from compute_features import process_compute +import open3d as o3d +from scipy.spatial import distance +import argparse + +def generate_folders(name, list_categories, scenario): + is_exist = os.path.exists(name) + if not is_exist: + os.mkdir(name) + folders = ["RGB", "RGB_Gen", "RGB_resized", "Meta_Gen", "Depth", "Mask", "Meta", "Pose", "Bbox_2d", "Bbox_2d_loose", "Bbox_3d", "Bbox_3d_Gen", "Instance_Segmentation", "Semantic_Segmentation", "Instance_Mask", "Instance_Mask_resized", "Occlusion", "Models", "Pose_transformed", "Bbox", "FPS", "FPS_resized"] + for f in folders: + is_exist = os.path.exists(f"{name}/{f}") + if not is_exist: + if f not in ["RGB_Gen", "RGB_resized", "Instance_Mask", "Instance_Mask_resized", "Meta_Gen", "Models", "Pose_transformed", "Bbox", "Bbox_3d_Gen", "FPS" , "FPS_resized"]: + os.mkdir(f"{name}/{f}") + else: + for cat in list_categories: + is_exist2 = os.path.exists(f"{name}/Generated/{cat}") + if not is_exist2: + os.makedirs(f"{name}/Generated/{cat}") + is_exist2 = os.path.exists(f"{name}/Generated/{cat}/Pose_transformed") + if not is_exist2: + os.makedirs(f"{name}/Generated/{cat}/Pose_transformed") + for scenario in ["Worlds", "Cameras", "Mix_all"] : + is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Training/{cat}/{f}") + if not is_exist2: + os.makedirs(f"{name}/Generated_{scenario}_Training/{cat}/{f}") + is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Evaluating/{cat}/{f}") + if not is_exist2: + os.makedirs(f"{name}/Generated_{scenario}_Evaluating/{cat}/{f}") + is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Testing/{cat}/{f}") + if not is_exist2: + os.makedirs(f"{name}/Generated_{scenario}_Testing/{cat}/{f}") + is_exist2 = os.path.exists(f"{name}/dont_save/{cat}/{f}") + if not is_exist2: + os.makedirs(f"{name}/dont_save/{cat}/{f}") + + + +def calc_pts_diameter2(pts): + """Calculates the diameter of a set of 3D points (i.e. the maximum distance + between any two points in the set). Faster but requires more memory than + calc_pts_diameter. + :param pts: nx3 ndarray with 3D points. + :return: The calculated diameter. + """ + dists = distance.cdist(pts, pts, 'euclidean') + diameter = np.max(dists) + return diameter + +if __name__ == '__main__': + # Create the parser + parser = argparse.ArgumentParser() + # Add an argument + parser.add_argument('--Nb_worlds', type=int, required=True) + parser.add_argument('--World_begin', type=int, required=True) + parser.add_argument('--dataset_id', type=str, required=True) + # Parse the argument + args = parser.parse_args() + + scenario = "Worlds" + + ### parameters ### + Categories = [] # to read + Nb_instance = 1 + occ_target = 0.5 + + dataset_src = f"/gpfsscratch/rech/uli/ubn15wo/data{args.dataset_id}" + #dataset_src = "/media/mahmoud/E/Fruits_easy/data" + #dataset_src = "/media/gduret/DATA/dataset/s2rg/Fruits_all_medium/data" + + choice = "low" # depth of rgb resolution datas + data_options = {"high": "ground_truth_rgb", + "low": "ground_truth_depth"} + dataset_type = data_options[choice] + dataset_name = f"/gpfsscratch/rech/uli/ubn15wo/dataset{args.dataset_id}/s2rg/Fruits_all_medium/GUIMOD_{choice}" + list_categories = ["banana1", "kiwi1", "pear2", "strawberry1", "apricot", "orange2", "peach1", "lemon2", "apple2" ] + Nb_camera = 15 + #Nb_world = 10000 + + generate_folders(dataset_name, list_categories, scenario) + + if choice == 'high': + camera = np.matrix([[1386.4138492513919, 0.0, 960.5], + [0.0, 1386.4138492513919, 540.5], + [0.0, 0.0, 1.0]]) + # (640/1920 = 1 / 3), (480/1080 = 4 / 9) + trans = np.matrix([[1 / 3, 0.0, 0.0], + [0.0, (4 / 9), 0.0], + [0.0, 0.0, 1.0]]) + elif choice == 'low': + camera = np.matrix([[1086.5054444841007, 0.0, 640.5], + [0.0, 1086.5054444841007, 360.5], + [0.0, 0.0, 1.0]]) + # + trans = np.matrix([[0.5, 0.0, 0.0], + [0.0, (2 / 3), 0.0], + [0.0, 0.0, 1.0]]) + + new_size = (640, 480) + + new_camera = trans @ camera + + #np.savetxt(f'{dataset_name}/Generated/camera_{choice}.txt', camera) + + reform_data(dataset_src, dataset_name, dataset_type, Nb_camera, args.World_begin, args.Nb_worlds) + + list_categories = ["banana1", "kiwi1", "pear2", "strawberry1", "apricot", "orange2", "peach1", "lemon2", "apple2" ] + objs = {"banana1": [ 0.02949700132012367249, 0.1511049866676330566, 0.06059300713241100311 ], + "kiwi1": [ 0.04908600077033042908, 0.07206099480390548706, 0.04909799993038177490 ], + "pear2": [ 0.06601099669933319092, 0.1287339925765991211, 0.06739201396703720093 ], + "strawberry1": [0.01698100194334983826, 0.02203200198709964752, 0.01685700193047523499], + "apricot": [0.04213499650359153748, 0.05482299625873565674, 0.04333199933171272278], + "orange2": [ 0.07349500805139541626, 0.07585700601339340210, 0.07458199560642242432 ], + "peach1": [ 0.07397901266813278198, 0.07111301273107528687, 0.07657301425933837891 ], + "lemon2": [0.04686100035905838013, 0.04684200137853622437, 0.07244800776243209839], + "apple2": [0.05203099921345710754, 0.04766000062227249146, 0.05089000239968299866]} + + for categories in list_categories: + point_cloud = f"Models/{categories}/{categories.lower()}.ply" + pcd = o3d.io.read_point_cloud(point_cloud) + + fps_points = apply_fps(pcd, 8) + + np.savetxt(f'{dataset_name}/Generated/{categories}/{categories}_fps_3d.txt', fps_points) + + point_cloud_in_numpy = np.asarray(pcd.points) + dim = calc_pts_diameter2(point_cloud_in_numpy) * 100 + np.savetxt(f'{dataset_name}/Generated/{categories}/{categories}_diameter.txt', np.array([dim])) + + size_bb = objs[categories] + ext = [x / 2 for x in size_bb] + bbox = get_3D_bbox(ext) + np.savetxt(f'{dataset_name}/Generated/{categories}/{categories}_bbox_3d.txt', bbox) # save + + #process_compute(dataset_name, camera, new_camera, new_size, Nb_camera, args.World_begin, args.Nb_worlds, list_categories, occ_target, False) + diff --git a/prepare_data.py b/prepare_data.py index 8830d80b2bb81de78255b57eb7ea04cdd77ea24c..4fded51df59985d9d1ebf9e6b5aff7c94386d45e 100644 --- a/prepare_data.py +++ b/prepare_data.py @@ -8,6 +8,7 @@ def reform_data(src, data_name, data_option, Nb_camera, World_begin, Nb_world): shutil.copy(src_meta, dst_meta) for j in range(1, Nb_camera+1): # cameras count = ((i-1)*Nb_camera) + j + print(count) if data_option == "ground_truth_rgb": files_img = os.listdir(f"{src}/{i}/grabber_{j}/color/image/") src_img = f"{src}/{i}/grabber_{j}/color/image/{files_img[0]}" diff --git a/slurm/process_dataset.slurm b/slurm/process_dataset.slurm new file mode 100644 index 0000000000000000000000000000000000000000..65a93b9d7c889b59781ee381bb74c47ad040a1cf --- /dev/null +++ b/slurm/process_dataset.slurm @@ -0,0 +1,40 @@ +#!/bin/bash +#SBATCH --job-name=Software # nom du job +#SBATCH -A uli@cpu +#SBATCH --ntasks=1 # Nombre total de processus MPI +#SBATCH --ntasks-per-node=1 # Nombre de processus MPI par noeud +#SBATCH --cpus-per-task=2 +#SBATCH --partition=cpu_p1 +#SBATCH --qos=qos_cpu-t4 +# /!\ Attention, la ligne suivante est trompeuse mais dans le vocabulaire +# de Slurm "multithread" fait bien référence à l'hyperthreading. +#SBATCH --hint=nomultithread # 1 processus MPI par coeur physique (pas d'hyperthreading) +#SBATCH --time=35:00:00 # Temps d’exécution maximum demande (HH:MM:SS) +#SBATCH --output=Output/postprocess%j.out # Nom du fichier de sortie +#SBATCH --error=Output/postprocess%j.out # Nom du fichier d'erreur (ici commun avec la sortie) + +# on se place dans le répertoire de soumission +cd ${SLURM_SUBMIT_DIR} + +# nettoyage des modules charges en interactif et herites par defaut +module purge + +# chargement des modules +module load python/3.7.10 + +# echo des commandes lancées +set -x + +# exécution du code depuis l'espace d’exécution autorisé. +# Selon l’implémentation de MPI installée dans le conteneur, on positionnera l'option --mpi=pmix_v2 ou --mpi=pmix_v3 + +id_begin=$1 +Nb=$2 +id_dataset=$3 +rearrange=$4 +compute=$5 + +conda activate stvNet + +time(python main.py --World_begin="$id_begin" --Nb_world="$Nb" --dataset_id="$id_dataset" --rearrange=$rearrange --compute=$compute) +