Skip to content
Snippets Groups Projects
Commit 91527d70 authored by Guillaume Duret's avatar Guillaume Duret
Browse files

add labels for yolo, add stonger occlusion filter, minor issues fixed

parent c1fbacf6
No related branches found
No related tags found
No related merge requests found
......@@ -33,7 +33,7 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
for i in range(World_begin, World_begin + Nb_world): # worlds
print(i)
if i > 4 :
destination_folders["Worlds"] = f"Generated_Worlds_Testing"
elif i > 3 :
......@@ -41,7 +41,7 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
else :
destination_folders["Worlds"] = f"Generated_Worlds_Training"
catergories_instance_array_id_to_cat, catergories_instance_array_cat_to_id, catergories_label_to_id = compute_categories_id(data_name, i)
categories_instance_array_id_to_cat, categories_instance_array_cat_to_id, categories_label_to_id = compute_categories_id(data_name, i)
for j in range(1, Nb_camera+1): # cameras
p = ((i-1)*Nb_camera) + j
......@@ -62,7 +62,7 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
else :
destination_folders["Mix_all"] = "dont_save"
catergories_occ_array = compute_id_good_occ(data_name, p, catergories_instance_array_id_to_cat, catergories_instance_array_cat_to_id, occ_target)
categories_occ_array, categories_array = compute_id_good_occ(data_name, p, categories_instance_array_id_to_cat, categories_instance_array_cat_to_id, occ_target)
### 3D Poses ###
with open(f'{data_name}/Pose/{p}.json', 'r') as f:
......@@ -84,9 +84,11 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
with open(f'{data_name}/{destination_folders[scenario_loop]}/Count_{p-1}.json') as f:
list_count_categories[scenario_loop][destination_folders[scenario_loop]] = json.load(f)
#res_all = []
for categories in list_categories:
if categories in catergories_occ_array.keys():
Nb_instance = len(catergories_occ_array[categories])
if categories in categories_occ_array.keys():
Nb_instance = len(categories_array[categories])
Nb_instance_occ = len(categories_occ_array[categories])
for scenario_loop in scenarios:
......@@ -103,14 +105,14 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
meta['id_generated'] = list_count_categories[scenario_loop][destination_folders[scenario_loop]][categories][f"{Nb_instance}_instances"]
meta['id_original'] = p
meta['id_category'] = catergories_label_to_id[categories]
meta['id_instance'] = catergories_occ_array[categories]
meta['id_category'] = categories_label_to_id[categories]
meta['id_instance'] = categories_occ_array[categories]
meta['id_dataset'] = 1
meta['world'] = i
meta['camera'] = f"grabber_{j}"
meta['occlusion'] = occ_target
meta['Nb_instance_category'] = Nb_instance
if not os.path.isfile(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Meta_Gen/{categories}.json'):
with open(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Meta_Gen/{categories}.json', mode='w') as f:
feeds = {}
......@@ -122,10 +124,11 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
feeds[meta['id_generated']]=meta
with open(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Meta_Gen/{categories}.json', mode='w') as f:
f.write(json.dumps(feeds, indent=4))
if (Nb_instance_occ == 1 and Nb_instance == 0 ): # condition of only one instance of occ >= 0.5 and no other < 0.05
if (Nb_instance == 1):
for k in range(len(data_3D_pose)):
if data_3D_pose[k]['id'] == catergories_occ_array[categories][0]:
if data_3D_pose[k]['id'] == categories_occ_array[categories][0]:
rpy = data_3D_pose[k]['pose']['rpy']
rot = convert2(rpy)
R_exp = transformation @ rot
......@@ -135,11 +138,13 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
T_exp = transformation @ xyz
T_exp = np.array(T_exp)
num_arr = np.c_[R_exp, T_exp[0]]
np.save(f'{data_name}/Generated/{categories}/Pose_transformed/{p}.npy', num_arr) # save
for scenario_loop in scenarios:
if not destination_folders[scenario_loop] == "dont_save" :
np.save(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Pose_transformed/{p}.npy', num_arr) # save
else:
continue
if data_Bbox_2d[k]['id'] == catergories_occ_array[categories][0]:
if data_Bbox_2d[k]['id'] == categories_occ_array[categories][0]:
bbox = bbox_2d(data_Bbox_2d[k])
for scenario_loop in scenarios:
if not destination_folders[scenario_loop] == "dont_save" :
......@@ -147,7 +152,7 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
else:
continue
if data_Bbox_3d[k]['id'] == catergories_occ_array[categories][0]:
if data_Bbox_3d[k]['id'] == categories_occ_array[categories][0]:
bbox3d_size = data_Bbox_3d[k]['bbox']['size']
for scenario_loop in scenarios:
if not destination_folders[scenario_loop] == "dont_save" :
......@@ -155,18 +160,38 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
else:
continue
id = catergories_occ_array[categories][0]
id = categories_occ_array[categories][0]
img = cv2.imread(f"{data_name}/Instance_Segmentation/{p}.png", cv2.IMREAD_UNCHANGED) # plt.imread(path)
instance_img = instance(img, id)
for scenario_loop in scenarios:
if not destination_folders[scenario_loop] == "dont_save" :
cv2.imwrite(f"{data_name}/{destination_folders[scenario_loop]}/{categories}/Instance_Mask/{p}.png", 255*instance_img)
id_obj = 0.0
res = [id_obj]
image = cv2.imread(f"{data_name}/{destination_folders[scenario_loop]}/{categories}/Instance_Mask/{p}.png", 0)
image = image/255.0
contours, _ = cv2.findContours(image.astype(np.uint8), cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
for l in range(len(contours[0])):
x = contours[0][l][0][0]/640.0
res.append(x)
y = contours[0][l][0][1]/480.0
res.append(y)
#id_obj += 1.0
#res_all.append(res)
a_file = open(f"{data_name}/{destination_folders[scenario_loop]}/{categories}/Labels/{p}.txt", "w")
#for row in res_all:
np.savetxt(a_file, np.array(res).reshape(1, len(res)))
a_file.close()
instance_img_resized = cv2.resize(instance_img, new_size)
for scenario_loop in scenarios:
if not destination_folders[scenario_loop] == "dont_save" :
cv2.imwrite(f"{data_name}/{destination_folders[scenario_loop]}/{categories}/Instance_Mask_resized/{p}.png", 255*instance_img_resized)
img = cv2.imread(f"{data_name}/RGB/{p}.png")
for scenario_loop in scenarios:
if not destination_folders[scenario_loop] == "dont_save" :
......@@ -178,7 +203,9 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
np.set_printoptions(precision=15)
#for scenario_loop in scenarios:
pose = np.load(f'{data_name}/Generated/{categories}/Pose_transformed/{p}.npy')
for scenario_loop in scenarios:
if not destination_folders[scenario_loop] == "dont_save" :
pose = np.load(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Pose_transformed/{p}.npy')
R_exp = pose[0:3, 0:3]
tVec = pose[0:3, 3]
......@@ -186,7 +213,7 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
center = fps_points.mean(0)
fps_points = np.append(fps_points, [center], axis=0)
points = process2(fps_points, R_exp, tVec, camera, img, vis)
out = [int(catergories_occ_array[categories][0])] #len have to be 1 !!
out = [int(categories_occ_array[categories][0])] #len have to be 1 !!
ind = 1
for point in points:
x = point[0][0] / img.shape[1]
......@@ -200,7 +227,7 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
np.savetxt(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/FPS/{p}.txt', np.array(out).reshape(1, len(out)))
points_resized = process2(fps_points, R_exp, tVec, camera_resized, img_resized, vis)
out_resized = [int(catergories_occ_array[categories][0])] #len have to be 1 !
out_resized = [int(categories_occ_array[categories][0])] #len have to be 1 !
ind_resized = 1
for point_resized in points_resized:
x_resized = point_resized[0][0] / img_resized.shape[1]
......
......@@ -12,11 +12,11 @@ def generate_folders(name, list_categories, scenario):
is_exist = os.path.exists(name)
if not is_exist:
os.mkdir(name)
folders = ["RGB", "RGB_Gen", "RGB_resized", "Meta_Gen", "Depth", "Mask", "Meta", "Pose", "Bbox_2d", "Bbox_2d_loose", "Bbox_3d", "Bbox_3d_Gen", "Instance_Segmentation", "Semantic_Segmentation", "Instance_Mask", "Instance_Mask_resized", "Occlusion", "Models", "Pose_transformed", "Bbox", "FPS", "FPS_resized"]
folders = ["RGB", "RGB_Gen", "RGB_resized", "Meta_Gen", "Depth", "Mask", "Meta", "Pose", "Bbox_2d", "Bbox_2d_loose", "Bbox_3d", "Bbox_3d_Gen", "Instance_Segmentation", "Semantic_Segmentation", "Instance_Mask", "Labels", "Instance_Mask_resized", "Occlusion", "Models", "Pose_transformed", "Bbox", "FPS", "FPS_resized"]
for f in folders:
is_exist = os.path.exists(f"{name}/{f}")
if not is_exist:
if f not in ["RGB_Gen", "RGB_resized", "Instance_Mask", "Instance_Mask_resized", "Meta_Gen", "Models", "Pose_transformed", "Bbox", "Bbox_3d_Gen", "FPS" , "FPS_resized"]:
if f not in ["RGB_Gen", "RGB_resized", "Instance_Mask", "Labels", "Instance_Mask_resized", "Meta_Gen", "Models", "Pose_transformed", "Bbox", "Bbox_3d_Gen", "FPS" , "FPS_resized"]:
os.mkdir(f"{name}/{f}")
else:
for cat in list_categories:
......
......@@ -3,9 +3,7 @@ import json
def compute_categories_id(data_name, world):
#Category = 'banana1'
#Category = 'pear2'
#Category = "orange2"
# Opening JSON file
f = open(f'{data_name}/Meta/{world}.json')
......@@ -16,49 +14,46 @@ def compute_categories_id(data_name, world):
# Iterating through the json
# list
catergories_label_to_id={}
catergories_id_to_label={}
catergories_instance_array_cat_to_id={}
catergories_instance_array_id_to_cat={}
categories_label_to_id={}
categories_id_to_label={}
categories_instance_array_cat_to_id={}
categories_instance_array_id_to_cat={}
for k in data['categories']:
catergories_label_to_id[k['label']]=k['id']
catergories_id_to_label[k['id']]=k['label']
catergories_instance_array_cat_to_id[k['label']]=[]
categories_label_to_id[k['label']]=k['id']
categories_id_to_label[k['id']]=k['label']
categories_instance_array_cat_to_id[k['label']]=[]
for k in data['objects']:
#print(k)
#catergories_instance_array[catergories_id_to_label[i['category_id']]]
catergories_instance_array_id_to_cat[k['id']] = catergories_id_to_label[k['category_id']]
catergories_instance_array_cat_to_id[catergories_id_to_label[k['category_id']]].append(k['id'])
# if i['category_id'] == id_category :
# print("Hello fruits instance")
# id_instances.append(i['id'])
# print(i['id'])
categories_instance_array_id_to_cat[k['id']] = categories_id_to_label[k['category_id']]
categories_instance_array_cat_to_id[categories_id_to_label[k['category_id']]].append(k['id'])
# Closing file
f.close()
return catergories_instance_array_id_to_cat, catergories_instance_array_cat_to_id, catergories_label_to_id
return categories_instance_array_id_to_cat, categories_instance_array_cat_to_id, categories_label_to_id
def compute_id_good_occ(data_name, count, catergories_instance_array_id_to_cat, catergories_instance_array_cat_to_id, Occ_wanted):
def compute_id_good_occ(data_name, count, categories_instance_array_id_to_cat, categories_instance_array_cat_to_id, Occ_wanted):
f2 = open(f'{data_name}/Occlusion/{count}.json')
data2 = json.load(f2)
catergories_occ_array = {}
categories_occ_array = {}
categories_array = {}
for cat in catergories_instance_array_cat_to_id :
#print(cat)
catergories_occ_array[cat] = []
for cat in categories_instance_array_cat_to_id :
categories_occ_array[cat] = []
categories_array[cat] = []
for i in data2:
if i['occlusion_value'] >= Occ_wanted :
catergories_occ_array[catergories_instance_array_id_to_cat[i['id']]].append(i['id'])
categories_occ_array[categories_instance_array_id_to_cat[i['id']]].append(i['id'])
elif i['occlusion_value'] >= 0.05 and i['occlusion_value'] < Occ_wanted:
categories_array[categories_instance_array_id_to_cat[i['id']]].append(i['id'])
# Closing file
f2.close()
return catergories_occ_array
\ No newline at end of file
return categories_occ_array, categories_array
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment