Skip to content
Snippets Groups Projects
Commit b01af3a4 authored by Guillaume Duret's avatar Guillaume Duret
Browse files

flexible evaluation+ more epoch

parent e275fe64
No related branches found
No related tags found
No related merge requests found
......@@ -10,8 +10,8 @@ import os
import sys
from plyfile import PlyData, PlyElement
def read_diameter(object_name):
filename = f'Generated_Worlds_/Generated/{class_name}/{class_name}_diameter.txt'
def read_diameter(path, object_name):
filename = f'{path}/Generated/{object_name}/{object_name}_diameter.txt'
with open(filename) as f:
diameter_in_cm = float(f.readline())
#return diameter_in_cm * 0.01
......@@ -246,7 +246,8 @@ def eval_pose(r_est, t_est, r_gt, t_gt, pc, k, diameter, sym=False):
if __name__ == '__main__':
ap = argparse.ArgumentParser()
ap.add_argument("--path_evaluation", type=str, required=True)
ap.add_argument("--path_data", type=str, required=True)
ap.add_argument("--folder_evaluation", type=str, required=True)
ap.add_argument("-cls_name", "--class_name", type=str,
default='kiwi1',
help="[apple2, apricot, banana1, kiwi1, lemon2, orange2, peach1, pear2]")
......@@ -257,9 +258,11 @@ if __name__ == '__main__':
class_name = args["class_name"]
symmetry = args["symmetry"]
path_data=args["path_data"]
folder_evaluation= args["folder_evaluation"]
basePath = args["path_evaluation"] + "/" + args["class_name"]
basePath = args["path_data"] + "/" + args["folder_evaluation"] + "/" + args["class_name"]
#basePath = os.path.dirname(
# os.path.realpath(__file__)) + '/Generated_Worlds_/Generated_Worlds_Evaluating/' + class_name
......@@ -278,7 +281,7 @@ if __name__ == '__main__':
print("len(data)", len(data))
print("len(pc)", len(pc))
diameter = read_diameter(class_name)
diameter = read_diameter(path_data, class_name)
print(diameter)
for i in range(len(data)):
pc[i][0], pc[i][1], pc[i][2] = data[i][0], data[i][1], data[i][2]
......@@ -289,13 +292,15 @@ if __name__ == '__main__':
count_add = 0
count_iadd = 0
count_proj = 0
length_data=len(os.listdir(f"{basePath}/Pose_prediction"))
dataset = path_data.split('/')[-1]
length_data=len(os.listdir(f"{basePath}/Pose_prediction{dataset}_{folder_evaluation}_{class_name}"))
print("number of evaluating data :", length_data)
for files in os.listdir(f"{basePath}/Pose_prediction"):
for files in os.listdir(f"{basePath}/Pose_prediction{dataset}_{folder_evaluation}_{class_name}"):
# ============== Loading Pose ===============
pose_est = np.load(f'{basePath}/Pose_prediction/{files}')
pose_est = np.load(f'{basePath}/Pose_prediction{dataset}_{folder_evaluation}_{class_name}/{files}')
r_est = pose_est[:3, :3]
t_est = np.array(pose_est[:3, 3]).reshape(3, 1)
print("t_est", t_est)
......@@ -318,7 +323,7 @@ if __name__ == '__main__':
count_proj += 1
if is_adi:
count_iadd += 1
print("results for class : ", class_name)
print(f"ADD_Res: {count_add / length_data}")
print(f"ADI_Res: {count_iadd / length_data}")
print(f"Proj_Res: {count_proj / length_data}")
......
......@@ -675,10 +675,10 @@ class generatorClass: # simulates generator behaviour, unused
modelsDict = {
'uNet_classes': modelDictVal(uNet, data.classTrainingGenerator, tf.keras.losses.BinaryCrossentropy(), False, True,
epochs=50, lr=0.0001, augmentation=True),
epochs=80, lr=0.0001, augmentation=True),
'stvNet_new_coords': modelDictVal(stvNetNew, data.coordsTrainingGenerator, tf.keras.losses.Huber(), True, False,
epochs=50, lr=0.0001, metrics=['mae', 'mse'], altLabels=False, augmentation=False)
epochs=200, lr=0.0001, metrics=['mae', 'mse'], altLabels=False, augmentation=False)
}
if __name__ == "__main__":
......
......@@ -53,22 +53,13 @@ def getMean(hypDict): # get weighted average of coordinates
return meanDict
def predict_pose(class_name, image, fps_points):
def predict_pose(class_name, image, fps_points, vecModel, classModel):
nnInput = np.array([image])
# loading our model to predict unit vectors per pixel per keypoint on image
vecModel = models.stvNetNew(outVectors=True, outClasses=False)
vecModel.load_weights(f'models/stvNet_new_coords_{class_name}') # loading weights for standard labels model
vecModel.compile(optimizer=tf.keras.optimizers.Adam(), loss=tf.keras.losses.Huber())
# loading our class model for image segmentation
classModel = models.uNet(outVectors=False, outClasses=True)
classModel.load_weights(f'models/uNet_classes_{class_name}')
classModel.compile(optimizer=tf.keras.optimizers.Adam(), loss=tf.keras.losses.BinaryCrossentropy())
#with tf.devica("cpu:0")
vecPred = vecModel.predict(nnInput)[0]
classPred = classModel.predict(nnInput)[0]
# print("Vector Prediction shape: " + str(vecPred.shape))
# print("Class Prediction shape: " + str(classPred.shape))
# showImage(classPred) # let's see our class prediction output
......@@ -77,7 +68,7 @@ def predict_pose(class_name, image, fps_points):
#print(classPred)
population = np.where(classPred > 0.1)[:2] # .9
population = list(zip(population[0], population[1]))
print(len(population)) # the number of class pixels found
print("Len Population : ", len(population)) # the number of class pixels found
#print(population)
# ====================
......@@ -144,29 +135,51 @@ def predict_pose(class_name, image, fps_points):
if __name__ == '__main__':
ap = argparse.ArgumentParser()
ap.add_argument("-cls_name", "--class_name", type=str,
default='kiwi1',
help="[kiwi1, pear2, banana1, orange, peach1]")
help="[kiwi1, pear2, banana1, orange, peach1]", required=True)
ap.add_argument("--path_data", type=str, required=True)
ap.add_argument("--folder_evaluation", type=str, required=True)
args = vars(ap.parse_args())
class_name = args["class_name"]
path_data = args["path_data"]
folder_evaluation = args["folder_evaluation"]
# class_name = 'pear'
basePath = os.path.dirname(os.path.realpath(__file__)) + '/Generated_Worlds_/Generated_Worlds_Evaluating/' + class_name
fps = np.loadtxt(f'Generated_Worlds_/Generated/{class_name}/{class_name}_fps_3d.txt')
#basePath = os.path.dirname(os.path.realpath(__file__)) + '/Generated_Worlds_/Generated_Worlds_Evaluating/' + class_name
basePath = f"{path_data}/{folder_evaluation}/{class_name}"
#fps = np.loadtxt(f'Generated_Worlds_/Generated/{class_name}/{class_name}_fps_3d.txt')
fps = np.loadtxt(f'{path_data}/Generated/{class_name}/{class_name}_fps_3d.txt')
images_ls, labels_ls, mask_ls, choice_ls = data.getAllValDataFruits(class_name)
print(len(images_ls))
#getAllValDataFruits(base_path, training_folder, evaluation_folder, modelClass='cat'):
images_ls, labels_ls, mask_ls, choice_ls = data.getAllValDataFruits(path_data, "Generated_Worlds_Training", folder_evaluation, class_name)
path_images=f"{path_data}/{folder_evaluation}/{class_name}/RGB_resized"
dataset = path_data.split('/')[-1]
if not os.path.exists(f"{basePath}/Pose_prediction{dataset}_{folder_evaluation}_{class_name}"):
os.makedirs(f"{basePath}/Pose_prediction{dataset}_{folder_evaluation}_{class_name}")
if not os.path.exists(f"{basePath}/Pose_prediction"):
os.makedirs(f"{basePath}/Pose_prediction")
# loading our model to predict unit vectors per pixel per keypoint on image
vecModel = models.stvNetNew(outVectors=True, outClasses=False)
vecModel.load_weights(f'models/stvNet_new_coords_{class_name}') # loading weights for standard labels model
vecModel.compile(optimizer=tf.keras.optimizers.Adam(), loss=tf.keras.losses.Huber())
# loading our class model for image segmentation
classModel = models.uNet(outVectors=False, outClasses=True)
classModel.load_weights(f'models/uNet_classes_{class_name}')
classModel.compile(optimizer=tf.keras.optimizers.Adam(), loss=tf.keras.losses.BinaryCrossentropy())
#for img in os.listdir(path_images):
for i, img in enumerate(images_ls):
img_id = choice_ls[i].split('.png')
#img_id = img.split('.')
img_id = int(img_id[0])
print("id : ", img_id)
try :
r_pre, t_pre = predict_pose(class_name, img, fps)
r_pre, t_pre = predict_pose(class_name, img, fps, vecModel, classModel)
r = R.from_rotvec(r_pre.reshape(3, ))
r_pre_mx = np.array(r.as_matrix())
t_pre = np.array(t_pre).reshape(3, )
......@@ -174,10 +187,10 @@ if __name__ == '__main__':
res = np.zeros((3, 4))
res[:3, :3] = r_pre_mx
res[:3, 3] = t_pre
print(res)
np.save(f'{basePath}/Pose_prediction/{img_id}.npy', res) # save
#print(res)
print("saving : ",img_id)
np.save(f'{basePath}/Pose_prediction{dataset}_{folder_evaluation}_{class_name}/{img_id}.npy', res) # save
except :
print("The image is not good, mess than 50 pix segmentation")
print("The image is not good, less than 50 pix segmentation ? ")
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment