diff --git a/compute_features.py b/compute_features.py index e153c5a90e9652bf70f98f10196bb7d3b3b637fb..babf84be459ba8f06bbd26e6f90e0cdbb11fbeac 100644 --- a/compute_features.py +++ b/compute_features.py @@ -17,32 +17,56 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl [0.0000000, 0.0000000, -1.0000000], [1.0000000, 0.0000000, 0.0000000]]) + scenarios = ["Worlds", "Cameras", "Mix_all"] + + destination_folders_list = {} + for scenario_loop in scenarios: + destination_folders_list[scenario_loop] = [f"Generated_{scenario_loop}_Testing", f"Generated_{scenario_loop}_Evaluating", f"Generated_{scenario_loop}_Training" ] + list_count_categories = {} + for scenario_loop in scenarios : + list_count_categories[scenario_loop] = {} + for destination_folder_loop in destination_folders_list[scenario_loop] : # [f"Generated_{scenario}_Testing", f"Generated_{scenario}_Evaluating", f"Generated_{scenario}_Training"] : + list_count_categories[scenario_loop][destination_folder_loop] = {} + destination_folders = {} for i in range(World_begin, World_begin + Nb_world): # worlds - scenario = "Worlds" if i > 4 : - destination_folder = f"Generated_{scenario}_Testing" + destination_folders["Worlds"] = f"Generated_Worlds_Testing" elif i > 3 : - destination_folder = f"Generated_{scenario}_Evaluating" + destination_folders["Worlds"] = f"Generated_Worlds_Evaluating" else : - destination_folder = f"Generated_{scenario}_Training" + destination_folders["Worlds"] = f"Generated_Worlds_Training" catergories_instance_array_id_to_cat, catergories_instance_array_cat_to_id, catergories_label_to_id = compute_categories_id(data_name, i) for j in range(1, Nb_camera+1): # cameras p = ((i-1)*Nb_camera) + j + if j > 12 : + destination_folders["Cameras"] = f"Generated_Cameras_Testing" + elif j > 9 : + destination_folders["Cameras"] = f"Generated_Cameras_Evaluating" + else : + destination_folders["Cameras"] = f"Generated_Cameras_Training" + + if i > 4 and j > 12 : + destination_folders["Mix_all"] = f"Generated_Mix_all_Testing" + elif i > 3 and i <=4 and j > 9 and j >= 12 : + destination_folders["Mix_all"] = f"Generated_Mix_all_Evaluating" + elif i <= 3 and j <= 9 : + destination_folders["Mix_all"] = f"Generated_Mix_all_Training" + else : + destination_folders["Mix_all"] = "dont_save" + catergories_occ_array = compute_id_good_occ(data_name, p, catergories_instance_array_id_to_cat, catergories_instance_array_cat_to_id, occ_target) ### 3D Poses ### with open(f'{data_name}/Pose/{p}.json', 'r') as f: data_3D_pose = json.load(f) - #print(data) - #print("len(data)", len(data_3D_pose)) ### 2D BBox ### with open(f"{data_name}/Bbox_2d/{p}.json", 'r') as f: @@ -53,48 +77,51 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl if len(data_Bbox_2d) != len(data_3D_pose) : raise TypeError("size of datas are differents !!") - - if os.path.isfile(f'{data_name}/{destination_folder}/Count_{p-1}.json'): - with open(f'{data_name}/{destination_folder}/Count_{p-1}.json') as f: - list_count_categories = json.load(f) + + for scenario_loop in scenarios: + if not destination_folders[scenario_loop] == "dont_save" : + if os.path.isfile(f'{data_name}/{destination_folders[scenario_loop]}/Count_{p-1}.json'): + with open(f'{data_name}/{destination_folders[scenario_loop]}/Count_{p-1}.json') as f: + list_count_categories[scenario_loop][destination_folders[scenario_loop]] = json.load(f) for categories in list_categories: if categories in catergories_occ_array.keys(): Nb_instance = len(catergories_occ_array[categories]) - meta = {} - if categories in list_count_categories.keys(): - print("hello") - else: - #list_count_categories[categories] = {categories} - list_count_categories[categories] = {} - - if Nb_instance in list_count_categories[categories].keys() : - list_count_categories[categories][Nb_instance] += 1 - else : - list_count_categories[categories][Nb_instance] = 1 - - meta['id_generated'] = list_count_categories[categories][Nb_instance] - meta['id_original'] = p - meta['id_category'] = catergories_label_to_id[categories] - meta['id_instance'] = catergories_occ_array[categories] - meta['id_dataset'] = 1 - meta['world'] = i - meta['camera'] = f"grabber_{j}" - meta['occlusion'] = occ_target - meta['Nb_instance_category'] = Nb_instance - - if not os.path.isfile(f'{data_name}/{destination_folder}/{categories}/Meta_Gen/{categories}.json'): - with open(f'{data_name}/{destination_folder}/{categories}/Meta_Gen/{categories}.json', mode='w') as f: - feeds = {} - feeds[meta['id_generated']]=meta - f.write(json.dumps(feeds, indent=2)) - else: - with open(f'{data_name}/{destination_folder}/{categories}/Meta_Gen/{categories}.json') as feedsjson: - feeds = json.load(feedsjson) - feeds[meta['id_generated']]=meta - with open(f'{data_name}/{destination_folder}/{categories}/Meta_Gen/{categories}.json', mode='w') as f: - f.write(json.dumps(feeds, indent=4)) + for scenario_loop in scenarios: + + meta = {} + # + if not destination_folders[scenario_loop] in list_count_categories[scenario_loop].keys(): + list_count_categories[scenario_loop][destination_folders[scenario_loop]] = {} + if not categories in list_count_categories[scenario_loop][destination_folders[scenario_loop]].keys(): + list_count_categories[scenario_loop][destination_folders[scenario_loop]][categories] = {} + if f"{Nb_instance}_instances" in list_count_categories[scenario_loop][destination_folders[scenario_loop]][categories].keys() : + list_count_categories[scenario_loop][destination_folders[scenario_loop]][categories][f"{Nb_instance}_instances"] += 1 + else : + list_count_categories[scenario_loop][destination_folders[scenario_loop]][categories][f"{Nb_instance}_instances"] = 1 + + meta['id_generated'] = list_count_categories[scenario_loop][destination_folders[scenario_loop]][categories][f"{Nb_instance}_instances"] + meta['id_original'] = p + meta['id_category'] = catergories_label_to_id[categories] + meta['id_instance'] = catergories_occ_array[categories] + meta['id_dataset'] = 1 + meta['world'] = i + meta['camera'] = f"grabber_{j}" + meta['occlusion'] = occ_target + meta['Nb_instance_category'] = Nb_instance + + if not os.path.isfile(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Meta_Gen/{categories}.json'): + with open(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Meta_Gen/{categories}.json', mode='w') as f: + feeds = {} + feeds[meta['id_generated']]=meta + f.write(json.dumps(feeds, indent=2)) + else: + with open(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Meta_Gen/{categories}.json') as feedsjson: + feeds = json.load(feedsjson) + feeds[meta['id_generated']]=meta + with open(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Meta_Gen/{categories}.json', mode='w') as f: + f.write(json.dumps(feeds, indent=4)) if (Nb_instance == 1): for k in range(len(data_3D_pose)): @@ -108,19 +135,23 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl T_exp = transformation @ xyz T_exp = np.array(T_exp) num_arr = np.c_[R_exp, T_exp[0]] - np.save(f'{data_name}/{destination_folder}/{categories}/Pose_transformed/{p}.npy', num_arr) # save + np.save(f'{data_name}/Generated/{categories}/Pose_transformed/{p}.npy', num_arr) # save else: continue if data_Bbox_2d[k]['id'] == catergories_occ_array[categories][0]: bbox = bbox_2d(data_Bbox_2d[k]) - np.savetxt(f'{data_name}/{destination_folder}/{categories}/Bbox/{p}.txt', np.array(bbox).reshape((1, 4))) # save + for scenario_loop in scenarios: + if not destination_folders[scenario_loop] == "dont_save" : + np.savetxt(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Bbox/{p}.txt', np.array(bbox).reshape((1, 4))) # save else: continue if data_Bbox_3d[k]['id'] == catergories_occ_array[categories][0]: bbox3d_size = data_Bbox_3d[k]['bbox']['size'] - np.savetxt(f'{data_name}/{destination_folder}/{categories}/Bbox_3d_Gen/{p}.txt', bbox3d_size) # save + for scenario_loop in scenarios: + if not destination_folders[scenario_loop] == "dont_save" : + np.savetxt(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Bbox_3d_Gen/{p}.txt', bbox3d_size) # save else: continue @@ -128,17 +159,26 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl img = cv2.imread(f"{data_name}/Instance_Segmentation/{p}.png", cv2.IMREAD_UNCHANGED) # plt.imread(path) instance_img = instance(img, id) - cv2.imwrite(f"{data_name}/{destination_folder}/{categories}/Instance_Mask/{p}.png", 255*instance_img) + for scenario_loop in scenarios: + if not destination_folders[scenario_loop] == "dont_save" : + cv2.imwrite(f"{data_name}/{destination_folders[scenario_loop]}/{categories}/Instance_Mask/{p}.png", 255*instance_img) instance_img_resized = cv2.resize(instance_img, new_size) - cv2.imwrite(f"{data_name}/{destination_folder}/{categories}/Instance_Mask_resized/{p}.png", 255*instance_img_resized) + for scenario_loop in scenarios: + if not destination_folders[scenario_loop] == "dont_save" : + cv2.imwrite(f"{data_name}/{destination_folders[scenario_loop]}/{categories}/Instance_Mask_resized/{p}.png", 255*instance_img_resized) img = cv2.imread(f"{data_name}/RGB/{p}.png") - cv2.imwrite(f"{data_name}/{destination_folder}/{categories}/RGB_Gen/{p}.png", img) + for scenario_loop in scenarios: + if not destination_folders[scenario_loop] == "dont_save" : + cv2.imwrite(f"{data_name}/{destination_folders[scenario_loop]}/{categories}/RGB_Gen/{p}.png", img) img_resized = cv2.resize(img, new_size) - cv2.imwrite(f"{data_name}/{destination_folder}/{categories}/RGB_resized/{p}.png", img_resized) + for scenario_loop in scenarios: + if not destination_folders[scenario_loop] == "dont_save" : + cv2.imwrite(f"{data_name}/{destination_folders[scenario_loop]}/{categories}/RGB_resized/{p}.png", img_resized) np.set_printoptions(precision=15) - pose = np.load(f'{data_name}/{destination_folder}/{categories}/Pose_transformed/{p}.npy') + #for scenario_loop in scenarios: + pose = np.load(f'{data_name}/Generated/{categories}/Pose_transformed/{p}.npy') R_exp = pose[0:3, 0:3] tVec = pose[0:3, 3] @@ -154,7 +194,10 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl out.append(x) out.append(y) ind += 2 - np.savetxt(f'{data_name}/{destination_folder}/{categories}/FPS/{p}.txt', np.array(out).reshape(1, len(out))) + + for scenario_loop in scenarios: + if not destination_folders[scenario_loop] == "dont_save" : + np.savetxt(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/FPS/{p}.txt', np.array(out).reshape(1, len(out))) points_resized = process2(fps_points, R_exp, tVec, camera_resized, img_resized, vis) out_resized = [int(catergories_occ_array[categories][0])] #len have to be 1 ! @@ -165,10 +208,16 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl out_resized.append(x_resized) out_resized.append(y_resized) ind_resized += 2 - np.savetxt(f'{data_name}/{destination_folder}/{categories}/FPS_resized/{p}.txt', np.array(out_resized).reshape(1, len(out_resized))) - - with open(f'{data_name}/{destination_folder}/Count_{p}.json', mode='w') as f: - f.write(json.dumps(list_count_categories, indent=4)) - print(list_count_categories) + for scenario_loop in scenarios: + if not destination_folders[scenario_loop] == "dont_save" : + np.savetxt(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/FPS_resized/{p}.txt', np.array(out_resized).reshape(1, len(out_resized))) + + for scenario_loop in scenarios: + for destination_folder_loop in destination_folders_list[scenario_loop] : # [f"Generated_{scenario}_Testing", f"Generated_{scenario}_Evaluating", f"Generated_{scenario}_Training"] : + with open(f'{data_name}/{destination_folder_loop}/Count_{p}.json', mode='w') as f: + f.write(json.dumps(list_count_categories[scenario_loop][destination_folder_loop], indent=4)) + with open(f'{data_name}/Count_{p}.json', mode='w') as f: + f.write(json.dumps(list_count_categories, indent=4)) + #print(list_count_categories) diff --git a/fps_alg.py b/fps_alg.py index fa2f93f64d93135cbf2cd3086aa82850a6401336..802a7ea4e01f5b052f20d7248443b470d300a4b8 100644 --- a/fps_alg.py +++ b/fps_alg.py @@ -132,8 +132,8 @@ def process(pcd_box, pcd2, R_exp, tVec, camera, img): keypoint_2d = cv2.projectPoints(pcd2_in_numpy, R_exp, tVec, camera, np.zeros(shape=[5, 1], dtype='float64')) keypoint_2d2 = cv2.projectPoints(pcd2_in_numpy2, R_exp, tVec, camera, np.zeros(shape=[5, 1], dtype='float64')) - for n in range(len(pcd2_in_numpy)): - print(pcd2_in_numpy[n], '==>', keypoint_2d[0][n]) + # for n in range(len(pcd2_in_numpy)): + # print(pcd2_in_numpy[n], '==>', keypoint_2d[0][n]) points = [] for n in range(len(pcd2_in_numpy)): @@ -203,5 +203,5 @@ def generate_fps(data_name, camera, Nb_camera, Nb_world, list_categories, occ_ta out[0][ind + 1] = point[0][1] / img.shape[0] ind += 2 np.savetxt(f'{data_name}/Generated/FPS/{categories}/{p}.txt', out) - print("stop") + diff --git a/main.py b/main.py index d031724b04fce617f5cb677ce263fc5974ac19aa..149e226a57e43762ce47b28d21d38f6d9324645b 100644 --- a/main.py +++ b/main.py @@ -23,15 +23,22 @@ def generate_folders(name, list_categories, scenario): is_exist2 = os.path.exists(f"{name}/Generated/{cat}") if not is_exist2: os.makedirs(f"{name}/Generated/{cat}") - is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Training/{cat}/{f}") + is_exist2 = os.path.exists(f"{name}/Generated/{cat}/Pose_transformed") if not is_exist2: - os.makedirs(f"{name}/Generated_{scenario}_Training/{cat}/{f}") - is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Evaluating/{cat}/{f}") - if not is_exist2: - os.makedirs(f"{name}/Generated_{scenario}_Evaluating/{cat}/{f}") - is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Testing/{cat}/{f}") - if not is_exist2: - os.makedirs(f"{name}/Generated_{scenario}_Testing/{cat}/{f}") + os.makedirs(f"{name}/Generated/{cat}/Pose_transformed") + for scenario in ["Worlds", "Cameras", "Mix_all"] : + is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Training/{cat}/{f}") + if not is_exist2: + os.makedirs(f"{name}/Generated_{scenario}_Training/{cat}/{f}") + is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Evaluating/{cat}/{f}") + if not is_exist2: + os.makedirs(f"{name}/Generated_{scenario}_Evaluating/{cat}/{f}") + is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Testing/{cat}/{f}") + if not is_exist2: + os.makedirs(f"{name}/Generated_{scenario}_Testing/{cat}/{f}") + is_exist2 = os.path.exists(f"{name}/dont_save/{cat}/{f}") + if not is_exist2: + os.makedirs(f"{name}/dont_save/{cat}/{f}") diff --git a/pose.py b/pose.py index 6f8e477f506260722826c2b3d63b9c6fcc6379bc..6f29610c95e3042d8db920b78cb3b239ed3d200c 100644 --- a/pose.py +++ b/pose.py @@ -38,9 +38,6 @@ def compute_categories_id(data_name, world): # id_instances.append(i['id']) # print(i['id']) - print("catergories_instance_array_cat_to_id : ", catergories_instance_array_cat_to_id) - print("catergories_instance_array_id_to_cat : ", catergories_instance_array_id_to_cat) - # Closing file f.close() @@ -63,8 +60,6 @@ def compute_id_good_occ(data_name, count, catergories_instance_array_id_to_cat, if i['occlusion_value'] > 0.5 : catergories_occ_array[catergories_instance_array_id_to_cat[i['id']]].append(i['id']) - print(catergories_occ_array) - # Closing file f2.close() @@ -99,9 +94,6 @@ def transform_pose(data_name, Nb_camera, Nb_world, list_categories, occ_target): with open(f'{data_name}/Pose/{p}.json', 'r') as f: data = json.load(f) - #print(data) - print("len(data)", len(data)) - for k in range(len(data)): for categories in list_categories: diff --git a/test_fps.py b/test_fps.py index 9ed329ec5eb4d8f62ca35cd405e9be0283f43148..0a012a158e560f70568a4930443f957d0bcf1ac0 100644 --- a/test_fps.py +++ b/test_fps.py @@ -214,7 +214,7 @@ new = np.matrix([[0.0000000, -1.0000000, 0.0000000], t_org = pose[0:3, 3] tVec = new @ t_org -print(tVec) +#print(tVec) img = image.imread('/media/mahmoud/F/GUIMOD/data/1/grabber_1/color/image/0_0.png') camera = [[1386.4138492513919, 0.0, 960.5], [0.0, 1386.4138492513919, 540.5], [0.0, 0.0, 1.0]] diff --git a/test_resize.py b/test_resize.py index c014c5975f1ef22a590f3e70f8476481789afea2..84443b9312f564a0f3255e5d5debf643d5db0617 100644 --- a/test_resize.py +++ b/test_resize.py @@ -111,8 +111,8 @@ def process2(pcd, R_exp, tVec, camera, img, vis= True): pcd_fps_numpy = np.asarray(pcd) keypoint_2d = cv2.projectPoints(pcd_fps_numpy, R_exp, tVec, camera, np.zeros(shape=[8, 1], dtype='float64')) - for n in range(len(pcd_fps_numpy)): - print(pcd_fps_numpy[n], '==>', keypoint_2d[0][n]) + # for n in range(len(pcd_fps_numpy)): + # print(pcd_fps_numpy[n], '==>', keypoint_2d[0][n]) if vis: out = np.zeros((img.shape[0], img.shape[1], 16)) @@ -139,8 +139,8 @@ def process(pcd_box, pcd2, R_exp, tVec, camera, img): keypoint_2d = cv2.projectPoints(pcd2_in_numpy, R_exp, tVec, camera, np.zeros(shape=[5, 1], dtype='float64')) keypoint_2d2 = cv2.projectPoints(pcd2_in_numpy2, R_exp, tVec, camera, np.zeros(shape=[5, 1], dtype='float64')) - for n in range(len(pcd2_in_numpy)): - print(pcd2_in_numpy[n], '==>', keypoint_2d[0][n]) + # for n in range(len(pcd2_in_numpy)): + # print(pcd2_in_numpy[n], '==>', keypoint_2d[0][n]) points = [] for n in range(len(pcd2_in_numpy)): diff --git a/utils.py b/utils.py index 2e09900181b6c4ba0515ce9fb69945c4b8af7759..b8f4b334e003e027307742ec8f7f69a73a306112 100644 --- a/utils.py +++ b/utils.py @@ -36,9 +36,6 @@ def compute_categories_id(data_name, world): # id_instances.append(i['id']) # print(i['id']) - print("catergories_instance_array_cat_to_id : ", catergories_instance_array_cat_to_id) - print("catergories_instance_array_id_to_cat : ", catergories_instance_array_id_to_cat) - # Closing file f.close() @@ -57,15 +54,10 @@ def compute_id_good_occ(data_name, count, catergories_instance_array_id_to_cat, #print(cat) catergories_occ_array[cat] = [] - print(data2) - for i in data2: if i['occlusion_value'] >= Occ_wanted : - print(i['id']) catergories_occ_array[catergories_instance_array_id_to_cat[i['id']]].append(i['id']) - print(catergories_occ_array) - # Closing file f2.close()