diff --git a/compute_label.py b/compute_label.py
index 559f303b9a93cad0994b1bb815793e64a86eb2c9..60039e39300e52902a068c0ef0291d5c695e53fc 100644
--- a/compute_label.py
+++ b/compute_label.py
@@ -2,6 +2,7 @@ import numpy as np
 import yaml
 import os
 import argparse
+import shutil
 
 
 if __name__ == "__main__":
@@ -10,10 +11,12 @@ if __name__ == "__main__":
     # Add an argument
     parser.add_argument('--path_dataset', type=str, required=True)
     parser.add_argument('--target_folder', type=str, required=True)
+    parser.add_argument('--path_DF_data', type=str, required=True)
     # Parse the argument
     args = parser.parse_args()
 
     target_folder = args.target_folder
+    path_data_df = args.path_DF_data
 
     #objects = ["apple2", "banana1", "pear2"]
     objects = ["apple2", "apricot", "banana1", "kiwi1", "lemon2", "orange2", "peach1", "pear2"]
@@ -25,54 +28,113 @@ if __name__ == "__main__":
     #path_data = "GUIMOD_low/Pose_transformed"
     #path_data = "/gpfsscratch/rech/uli/ubn15wo/dataset1/s2rg/Fruits_all_medium/GUIMOD_low/Generated_Worlds_Training/orange2/Pose_transformed/"
     #path_data = f"/gpfsscratch/rech/uli/ubn15wo/dataset1/s2rg/Fruits_all_medium/GUIMOD_low/{target_folder}/"
-    path_data = f"{args.path_dataset}/{target_folder}"
+
+    for split in ["_Training","_Evaluating"]:
+        path_data = f"{args.path_dataset}/{target_folder}{split}"
+
+        if not os.path.exists(path_data_df) : 
+            os.makedirs(path_data_df)
+             
+             
+        for obj in objects:
+            print(f"move {path_data} to {path_data_df}")
+
+            if not os.path.exists(f"{path_data_df}/0{obj_ids[obj]}") : 
+                os.makedirs(f"{path_data_df}/0{obj_ids[obj]}")
+            if not os.path.exists(f"{path_data_df}/0{obj_ids[obj]}/depth/") : 
+                os.makedirs(f"{path_data_df}/0{obj_ids[obj]}/depth/")
+            if not os.path.exists(f"{path_data_df}/0{obj_ids[obj]}/rgb/") : 
+                os.makedirs(f"{path_data_df}/0{obj_ids[obj]}/rgb/")
+            if not os.path.exists(f"{path_data_df}/0{obj_ids[obj]}/mask/") : 
+                os.makedirs(f"{path_data_df}/0{obj_ids[obj]}/mask/")
+
+            os.system(f"cp -r {path_data}/{obj}/Depth_resized/* {path_data_df}/0{obj_ids[obj]}/depth/")
+            os.system(f"cp -r {path_data}/{obj}/RGB_resized/* {path_data_df}/0{obj_ids[obj]}/rgb/")
+            os.system(f"cp -r {path_data}/{obj}/Instance_Mask_resized/* {path_data_df}/0{obj_ids[obj]}/mask/")
+
+            #shutil.copytree(f"{path_data}/{obj}/Depth_resized", f"{path_data_df}/0{obj_ids[obj]}/depth", dirs_exist_ok = True,)
+            #shutil.copytree(f"{path_data}/{obj}/RGB_resized", f"{path_data_df}/0{obj_ids[obj]}/rgb", dirs_exist_ok = True)
+            #shutil.copytree(f"{path_data}/{obj}/Instance_Mask_resized", f"{path_data_df}/0{obj_ids[obj]}/mask", dirs_exist_ok = True)
+
+        for obj in objects:
+            file = path_data + "/" + obj
+            #path_gt_obj = f"{obj_ids[obj]}/gt_new.yml"
+            path_gt_obj = f"{path_data_df}/0{obj_ids[obj]}/gt_{target_folder}{split}.yml"
+            #path_bbox = "GUIMOD_low/Bbox/" + obj
+            path_bbox = f"{path_data}/{obj}/Bbox/"
+
+            object_dicts = {}
+            print(file)
+            path_poses = file + "/Pose_transformed"
+            print("poses ", path_poses)
+            print("list : ", os.listdir(path_poses))
+            list_poses = os.listdir(path_poses)
+
+            print("-------------------------------------------------------------------------------------------------------")
+            print("-------------------------------------------------------------------------------------------------------")
+            
+            if split=="_Training" : 
+                with open(f"{path_data_df}/0{obj_ids[obj]}/train.txt", 'w') as f:
+                    for filename2 in os.listdir(path_poses):
+                        id_file2=filename2.split('.')[0]
+                        print(id_file2+".png")
+                        f.write(f'{id_file2}.png')
+                        f.write('\n')
+
+            elif split=="_Evaluating" : 
+                with open(f"{path_data_df}/0{obj_ids[obj]}/test.txt", 'w') as f:
+                    for filename2 in os.listdir(path_poses):
+                        id_file2=filename2.split('.')[0]
+                        print(id_file2+".png")
+                        f.write(f'{id_file2}.png')
+                        f.write('\n')
+            else : 
+                raise("not in traing or evaluating")
+
+            with open(path_gt_obj, 'w') as yaml_file:
+                for filename in os.listdir(path_poses):
+                #for i in range(num_rgb):
+                    data_file = path_poses + "/" + filename
+                    #print("filename : ", filename)
+                    #data_file = file + "/" + str(i) + ".npy"
+                    #print(list_RGB[4].split('.')[0])
+                    id_file=filename.split('.')[0]
+                    #print(id_file)
+                    #print(filename)
+                    #final_path_bbox = path_bbox + "/" + str(i) + ".txt"
+                    final_path_bbox = path_bbox + "/" + id_file + ".txt"
+                    #print(final_path_bbox)
+                    bbox_file = open(final_path_bbox, "r")
+                    line = bbox_file.readline()
+                    bbox_string = line.split(" ")
+                    bbox = [float(val) for val in bbox_string]
+                    bbox2 = [int(bbox[0] * 0.5), int(bbox[1] * (2 / 3)), int(bbox[2] * 0.5), int(bbox[3] * (2 / 3))]
+                    # print(bbox2)
+                    bbox_file.close()
+
+                    data = np.load(data_file)
+                    cam_R_m2c = np.concatenate((data[0, 0:3], data[1, 0:3], data[2, 0:3]), axis=0).tolist()
+                    cam_t_m2c = np.array((float(data[0, 3])*1000.0, float(data[1, 3])*1000.0, float(data[2, 3]*1000.0))).tolist()
+                    object_dicts[id_file] = [{"cam_R_m2c": cam_R_m2c, "cam_t_m2c": cam_t_m2c, "obj_bb": bbox2, "obj_id": obj_ids[obj]}]
+                yaml.dump(object_dicts, yaml_file, default_flow_style=None)
+
+
 
     for obj in objects:
-        file = path_data + "/" + obj
-        #path_gt_obj = f"{obj_ids[obj]}/gt_new.yml"
-        path_gt_obj = f"/gpfsscratch/rech/uli/ubn15wo/DenseFusion/datasets/linemod/Linemod_preprocessed/data/0{obj_ids[obj]}/gt_{target_folder}.yml"
-        #path_bbox = "GUIMOD_low/Bbox/" + obj
-        path_bbox = f"{path_data}/{obj}/Bbox/"
-
-        object_dicts = {}
-        print(file)
-        path_poses = file + "/Pose_transformed"
-        print("poses ", path_poses)
-        print("list : ", os.listdir(path_poses))
-        list_poses = os.listdir(path_poses)
-
-        print("-------------------------------------------------------------------------------------------------------")
-        print("-------------------------------------------------------------------------------------------------------")
-        with open(f"/gpfsscratch/rech/uli/ubn15wo/DenseFusion/datasets/linemod/Linemod_preprocessed/data/0{obj_ids[obj]}/split_file_{target_folder}.txt", 'w') as f:
-            for filename2 in os.listdir(path_poses):
-                id_file2=filename2.split('.')[0]
-                print(id_file2+".png")
-                f.write(f'{id_file2}.png')
-                f.write('\n')
-
-        with open(path_gt_obj, 'w') as yaml_file:
-            for filename in os.listdir(path_poses):
-            #for i in range(num_rgb):
-                data_file = path_poses + "/" + filename
-                print("filename : ", filename)
-                #data_file = file + "/" + str(i) + ".npy"
-                #print(list_RGB[4].split('.')[0])
-                id_file=filename.split('.')[0]
-                print(id_file)
-                print(filename)
-                #final_path_bbox = path_bbox + "/" + str(i) + ".txt"
-                final_path_bbox = path_bbox + "/" + id_file + ".txt"
-                print(final_path_bbox)
-                bbox_file = open(final_path_bbox, "r")
-                line = bbox_file.readline()
-                bbox_string = line.split(" ")
-                bbox = [float(val) for val in bbox_string]
-                bbox2 = [int(bbox[0] * 0.5), int(bbox[1] * (2 / 3)), int(bbox[2] * 0.5), int(bbox[3] * (2 / 3))]
-                # print(bbox2)
-                bbox_file.close()
-
-                data = np.load(data_file)
-                cam_R_m2c = np.concatenate((data[0, 0:3], data[1, 0:3], data[2, 0:3]), axis=0).tolist()
-                cam_t_m2c = np.array((float(data[0, 3])*1000.0, float(data[1, 3])*1000.0, float(data[2, 3]*1000.0))).tolist()
-                object_dicts[id_file] = [{"cam_R_m2c": cam_R_m2c, "cam_t_m2c": cam_t_m2c, "obj_bb": bbox2, "obj_id": obj_ids[obj]}]
-            yaml.dump(object_dicts, yaml_file, default_flow_style=None)
+
+        # Reading data from file1
+        with open(f"{path_data_df}/0{obj_ids[obj]}/gt_{target_folder}_Training.yml") as fp:
+            data_train = fp.read()
+        
+        # Reading data from file2
+        with open(f"{path_data_df}/0{obj_ids[obj]}/gt_{target_folder}_Evaluating.yml") as fp:
+            data_eval = fp.read()
+        
+        # Merging 2 files
+        # To add the data of file2
+        # from next line
+        data_gt = data_train
+        data_gt += data_eval
+        
+        with open (f"{path_data_df}/0{obj_ids[obj]}/gt.yml", 'w') as fp:
+            fp.write(data_gt)
\ No newline at end of file