From 0db2777fb2d6f92af99e98a54009b454dc9ddbfc Mon Sep 17 00:00:00 2001 From: Gduret <guillaume.duret@ec-lyon.fr> Date: Thu, 2 Mar 2023 20:21:51 +0100 Subject: [PATCH] fisrt scenario spliting --- compute_features.py | 46 +++++++++++++++++++++++++++------------------ main.py | 29 +++++++++++++++++++++------- 2 files changed, 50 insertions(+), 25 deletions(-) diff --git a/compute_features.py b/compute_features.py index 7367d1e..e153c5a 100644 --- a/compute_features.py +++ b/compute_features.py @@ -18,8 +18,18 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl [1.0000000, 0.0000000, 0.0000000]]) list_count_categories = {} - + + for i in range(World_begin, World_begin + Nb_world): # worlds + + scenario = "Worlds" + + if i > 4 : + destination_folder = f"Generated_{scenario}_Testing" + elif i > 3 : + destination_folder = f"Generated_{scenario}_Evaluating" + else : + destination_folder = f"Generated_{scenario}_Training" catergories_instance_array_id_to_cat, catergories_instance_array_cat_to_id, catergories_label_to_id = compute_categories_id(data_name, i) @@ -44,8 +54,8 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl if len(data_Bbox_2d) != len(data_3D_pose) : raise TypeError("size of datas are differents !!") - if os.path.isfile(f'{data_name}/Generated/Count_{p-1}.json'): - with open(f'{data_name}/Generated/Count_{p-1}.json') as f: + if os.path.isfile(f'{data_name}/{destination_folder}/Count_{p-1}.json'): + with open(f'{data_name}/{destination_folder}/Count_{p-1}.json') as f: list_count_categories = json.load(f) for categories in list_categories: @@ -74,16 +84,16 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl meta['occlusion'] = occ_target meta['Nb_instance_category'] = Nb_instance - if not os.path.isfile(f'{data_name}/Generated/{categories}/Meta_Gen/{categories}.json'): - with open(f'{data_name}/Generated/{categories}/Meta_Gen/{categories}.json', mode='w') as f: + if not os.path.isfile(f'{data_name}/{destination_folder}/{categories}/Meta_Gen/{categories}.json'): + with open(f'{data_name}/{destination_folder}/{categories}/Meta_Gen/{categories}.json', mode='w') as f: feeds = {} feeds[meta['id_generated']]=meta f.write(json.dumps(feeds, indent=2)) else: - with open(f'{data_name}/Generated/{categories}/Meta_Gen/{categories}.json') as feedsjson: + with open(f'{data_name}/{destination_folder}/{categories}/Meta_Gen/{categories}.json') as feedsjson: feeds = json.load(feedsjson) feeds[meta['id_generated']]=meta - with open(f'{data_name}/Generated/{categories}/Meta_Gen/{categories}.json', mode='w') as f: + with open(f'{data_name}/{destination_folder}/{categories}/Meta_Gen/{categories}.json', mode='w') as f: f.write(json.dumps(feeds, indent=4)) if (Nb_instance == 1): @@ -98,19 +108,19 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl T_exp = transformation @ xyz T_exp = np.array(T_exp) num_arr = np.c_[R_exp, T_exp[0]] - np.save(f'{data_name}/Generated/{categories}/Pose_transformed/{p}.npy', num_arr) # save + np.save(f'{data_name}/{destination_folder}/{categories}/Pose_transformed/{p}.npy', num_arr) # save else: continue if data_Bbox_2d[k]['id'] == catergories_occ_array[categories][0]: bbox = bbox_2d(data_Bbox_2d[k]) - np.savetxt(f'{data_name}/Generated/{categories}/Bbox/{p}.txt', np.array(bbox).reshape((1, 4))) # save + np.savetxt(f'{data_name}/{destination_folder}/{categories}/Bbox/{p}.txt', np.array(bbox).reshape((1, 4))) # save else: continue if data_Bbox_3d[k]['id'] == catergories_occ_array[categories][0]: bbox3d_size = data_Bbox_3d[k]['bbox']['size'] - np.savetxt(f'{data_name}/Generated/{categories}/Bbox_3d_Gen/{p}.txt', bbox3d_size) # save + np.savetxt(f'{data_name}/{destination_folder}/{categories}/Bbox_3d_Gen/{p}.txt', bbox3d_size) # save else: continue @@ -118,17 +128,17 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl img = cv2.imread(f"{data_name}/Instance_Segmentation/{p}.png", cv2.IMREAD_UNCHANGED) # plt.imread(path) instance_img = instance(img, id) - cv2.imwrite(f"{data_name}/Generated/{categories}/Instance_Mask/{p}.png", 255*instance_img) + cv2.imwrite(f"{data_name}/{destination_folder}/{categories}/Instance_Mask/{p}.png", 255*instance_img) instance_img_resized = cv2.resize(instance_img, new_size) - cv2.imwrite(f"{data_name}/Generated/{categories}/Instance_Mask_resized/{p}.png", 255*instance_img_resized) + cv2.imwrite(f"{data_name}/{destination_folder}/{categories}/Instance_Mask_resized/{p}.png", 255*instance_img_resized) img = cv2.imread(f"{data_name}/RGB/{p}.png") - cv2.imwrite(f"{data_name}/Generated/{categories}/RGB_Gen/{p}.png", img) + cv2.imwrite(f"{data_name}/{destination_folder}/{categories}/RGB_Gen/{p}.png", img) img_resized = cv2.resize(img, new_size) - cv2.imwrite(f"{data_name}/Generated/{categories}/RGB_resized/{p}.png", img_resized) + cv2.imwrite(f"{data_name}/{destination_folder}/{categories}/RGB_resized/{p}.png", img_resized) np.set_printoptions(precision=15) - pose = np.load(f'{data_name}/Generated/{categories}/Pose_transformed/{p}.npy') + pose = np.load(f'{data_name}/{destination_folder}/{categories}/Pose_transformed/{p}.npy') R_exp = pose[0:3, 0:3] tVec = pose[0:3, 3] @@ -144,7 +154,7 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl out.append(x) out.append(y) ind += 2 - np.savetxt(f'{data_name}/Generated/{categories}/FPS/{p}.txt', np.array(out).reshape(1, len(out))) + np.savetxt(f'{data_name}/{destination_folder}/{categories}/FPS/{p}.txt', np.array(out).reshape(1, len(out))) points_resized = process2(fps_points, R_exp, tVec, camera_resized, img_resized, vis) out_resized = [int(catergories_occ_array[categories][0])] #len have to be 1 ! @@ -155,9 +165,9 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl out_resized.append(x_resized) out_resized.append(y_resized) ind_resized += 2 - np.savetxt(f'{data_name}/Generated/{categories}/FPS_resized/{p}.txt', np.array(out_resized).reshape(1, len(out_resized))) + np.savetxt(f'{data_name}/{destination_folder}/{categories}/FPS_resized/{p}.txt', np.array(out_resized).reshape(1, len(out_resized))) - with open(f'{data_name}/Generated/Count_{p}.json', mode='w') as f: + with open(f'{data_name}/{destination_folder}/Count_{p}.json', mode='w') as f: f.write(json.dumps(list_count_categories, indent=4)) print(list_count_categories) diff --git a/main.py b/main.py index 70878ad..6541bb5 100644 --- a/main.py +++ b/main.py @@ -8,7 +8,7 @@ import open3d as o3d from scipy.spatial import distance import argparse -def generate_folders(name, list_categories): +def generate_folders(name, list_categories, scenario): is_exist = os.path.exists(name) if not is_exist: os.mkdir(name) @@ -20,9 +20,20 @@ def generate_folders(name, list_categories): os.mkdir(f"{name}/{f}") else: for cat in list_categories: - is_exist2 = os.path.exists(f"{name}/Generated/{cat}/{f}") + is_exist2 = os.path.exists(f"{name}/Generated/{cat}") if not is_exist2: - os.makedirs(f"{name}/Generated/{cat}/{f}") + os.makedirs(f"{name}/Generated/{cat}") + is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Training/{cat}/{f}") + if not is_exist2: + os.makedirs(f"{name}/Generated_{scenario}_Training/{cat}/{f}") + is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Evaluating/{cat}/{f}") + if not is_exist2: + os.makedirs(f"{name}/Generated_{scenario}_Evaluating/{cat}/{f}") + is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Testing/{cat}/{f}") + if not is_exist2: + os.makedirs(f"{name}/Generated_{scenario}_Testing/{cat}/{f}") + + def calc_pts_diameter2(pts): """Calculates the diameter of a set of 3D points (i.e. the maximum distance @@ -45,12 +56,14 @@ if __name__ == '__main__': # Parse the argument args = parser.parse_args() + scenario = "Worlds" + ### parameters ### Categories = [] # to read Nb_instance = 1 occ_target = 0.5 dataset_src = "/media/gduret/DATA/dataset/s2rg/Fruits_all_medium/data" - choice = "high" # depth of rgb resolution datas + choice = "low" # depth of rgb resolution datas data_options = {"high": "ground_truth_rgb", "low": "ground_truth_depth"} dataset_type = data_options[choice] @@ -58,7 +71,7 @@ if __name__ == '__main__': list_categories = ["banana1", "kiwi1", "pear2", "strawberry1", "apricot", "orange2", "peach1", "lemon2", "apple2" ] Nb_camera = 15 - generate_folders(dataset_name, list_categories) + generate_folders(dataset_name, list_categories, scenario) if choice == 'high': camera = np.matrix([[1386.4138492513919, 0.0, 960.5], @@ -80,7 +93,8 @@ if __name__ == '__main__': new_size = (640, 480) new_camera = trans @ camera - np.savetxt(f'{dataset_name}/Generated/camera_{choice}.txt', camera) + + #np.savetxt(f'{dataset_name}/Generated/camera_{choice}.txt', camera) reform_data(dataset_src, dataset_name, dataset_type, Nb_camera, args.World_begin, args.Nb_worlds) @@ -100,6 +114,7 @@ if __name__ == '__main__': pcd = o3d.io.read_point_cloud(point_cloud) fps_points = apply_fps(pcd, 8) + np.savetxt(f'{dataset_name}/Generated/{categories}/{categories}_fps_3d.txt', fps_points) point_cloud_in_numpy = np.asarray(pcd.points) @@ -111,5 +126,5 @@ if __name__ == '__main__': bbox = get_3D_bbox(ext) np.savetxt(f'{dataset_name}/Generated/{categories}/{categories}_bbox_3d.txt', bbox) # save - process_compute(dataset_name, camera, new_camera, new_size, Nb_camera, args.World_begin, args.Nb_worlds, list_categories, occ_target, True) + process_compute(dataset_name, camera, new_camera, new_size, Nb_camera, args.World_begin, args.Nb_worlds, list_categories, occ_target, False) -- GitLab