diff --git a/compute_features.py b/compute_features.py
index 215648eeb973132b67679c53f2751dc0137bbc34..0fb81d851a4862afdbdabe18c8099b485fc5476c 100644
--- a/compute_features.py
+++ b/compute_features.py
@@ -33,7 +33,6 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
 
     for i in range(World_begin, World_begin + Nb_world): # worlds
 
-
         if i > 8000 :
             destination_folders["Worlds"] = f"Generated_Worlds_Testing"
         elif i > 6000 :
@@ -41,7 +40,7 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
         else :
             destination_folders["Worlds"] = f"Generated_Worlds_Training"
         
-        catergories_instance_array_id_to_cat, catergories_instance_array_cat_to_id, catergories_label_to_id = compute_categories_id(data_name, i)
+        categories_instance_array_id_to_cat, categories_instance_array_cat_to_id, categories_label_to_id = compute_categories_id(data_name, i)
         
         for j in range(1, Nb_camera+1): # cameras
             p = ((i-1)*Nb_camera) + j
@@ -62,7 +61,7 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
             else : 
                 destination_folders["Mix_all"] = f"Generated_Mix_all_dont_save"
 
-            catergories_occ_array = compute_id_good_occ(data_name, p, catergories_instance_array_id_to_cat, catergories_instance_array_cat_to_id, occ_target)
+            categories_occ_array, categories_array = compute_id_good_occ(data_name, p, categories_instance_array_id_to_cat, categories_instance_array_cat_to_id, occ_target)
 
             ### 3D Poses ###
             with open(f'{data_name}/Pose/{p}.json', 'r') as f:
@@ -84,9 +83,11 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
                         with open(f'{data_name}/{destination_folder_loop}/Count_{p-1}.json') as f:
                             list_count_categories[scenario_loop][destination_folder_loop] = json.load(f)
 
+            #res_all = []
             for categories in list_categories:
-                if categories in catergories_occ_array.keys():
-                    Nb_instance = len(catergories_occ_array[categories])
+                if categories in categories_occ_array.keys():
+                    Nb_instance = len(categories_array[categories])
+                    Nb_instance_occ = len(categories_occ_array[categories])
 
                     for scenario_loop in scenarios:
 
@@ -103,8 +104,8 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
 
                         meta['id_generated'] = list_count_categories[scenario_loop][destination_folders[scenario_loop]][categories][f"{Nb_instance}_instances"]
                         meta['id_original'] = p
-                        meta['id_category'] = catergories_label_to_id[categories] 
-                        meta['id_instance'] = catergories_occ_array[categories]
+                        meta['id_category'] = categories_label_to_id[categories] 
+                        meta['id_instance'] = categories_occ_array[categories]
                         meta['id_dataset'] = 1
                         meta['world'] = i
                         meta['camera'] = f"grabber_{j}"
@@ -122,10 +123,11 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
                                 feeds[meta['id_generated']]=meta
                             with open(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Meta_Gen/{categories}_{p}.json', mode='w') as f:
                                 f.write(json.dumps(feeds, indent=4))    
+                        
+                    if (Nb_instance_occ == 1 and Nb_instance == 0 ): # condition of only one instance of occ >= 0.5 and no other < 0.05
 
-                    if (Nb_instance == 1):
                         for k in range(len(data_3D_pose)):
-                            if data_3D_pose[k]['id'] == catergories_occ_array[categories][0]:
+                            if data_3D_pose[k]['id'] == categories_occ_array[categories][0]:
                                 rpy = data_3D_pose[k]['pose']['rpy']
                                 rot = convert2(rpy)
                                 R_exp = transformation @ rot
@@ -135,11 +137,13 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
                                 T_exp = transformation @ xyz
                                 T_exp = np.array(T_exp)
                                 num_arr = np.c_[R_exp, T_exp[0]]
-                                np.save(f'{data_name}/Generated/{categories}/Pose_transformed/{p}.npy', num_arr)  # save
+                                for scenario_loop in scenarios:
+                                    if not destination_folders[scenario_loop] == "dont_save" :
+                                        np.save(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Pose_transformed/{p}.npy', num_arr)  # save
                             else:
                                 continue
 
-                            if data_Bbox_2d[k]['id'] == catergories_occ_array[categories][0]:
+                            if data_Bbox_2d[k]['id'] == categories_occ_array[categories][0]:
                                 bbox = bbox_2d(data_Bbox_2d[k])
                                 for scenario_loop in scenarios:
                                     if not destination_folders[scenario_loop] == "dont_save" :
@@ -147,7 +151,7 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
                             else:
                                 continue
 
-                            if data_Bbox_3d[k]['id'] == catergories_occ_array[categories][0]:
+                            if data_Bbox_3d[k]['id'] == categories_occ_array[categories][0]:
                                 bbox3d_size = data_Bbox_3d[k]['bbox']['size']
                                 for scenario_loop in scenarios:
                                     if not destination_folders[scenario_loop] == "dont_save" :
@@ -155,18 +159,38 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
                             else:
                                 continue
 
-                        id = catergories_occ_array[categories][0]
+                        id = categories_occ_array[categories][0]
                         img = cv2.imread(f"{data_name}/Instance_Segmentation/{p}.png", cv2.IMREAD_UNCHANGED) # plt.imread(path)
 
                         instance_img = instance(img, id)
                         for scenario_loop in scenarios:
                             if not destination_folders[scenario_loop] == "dont_save" :
                                 cv2.imwrite(f"{data_name}/{destination_folders[scenario_loop]}/{categories}/Instance_Mask/{p}.png", 255*instance_img)
+
+                                id_obj = 0.0
+                                res = [id_obj]
+                                
+                                image = cv2.imread(f"{data_name}/{destination_folders[scenario_loop]}/{categories}/Instance_Mask/{p}.png", 0)
+                                image = image/255.0
+                                contours, _ = cv2.findContours(image.astype(np.uint8), cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
+                                for l in range(len(contours[0])):
+                                    x = contours[0][l][0][0]/640.0
+                                    res.append(x)
+                                    y = contours[0][l][0][1]/480.0
+                                    res.append(y)
+                                #id_obj += 1.0
+                                #res_all.append(res)
+
+                                a_file = open(f"{data_name}/{destination_folders[scenario_loop]}/{categories}/Labels/{p}.txt", "w")
+                                #for row in res_all:
+                                np.savetxt(a_file, np.array(res).reshape(1, len(res)))
+
+                                a_file.close()
+
                         instance_img_resized = cv2.resize(instance_img, new_size)
                         for scenario_loop in scenarios:
                             if not destination_folders[scenario_loop] == "dont_save" :
                                 cv2.imwrite(f"{data_name}/{destination_folders[scenario_loop]}/{categories}/Instance_Mask_resized/{p}.png", 255*instance_img_resized)
-
                         img = cv2.imread(f"{data_name}/RGB/{p}.png")
                         for scenario_loop in scenarios:
                             if not destination_folders[scenario_loop] == "dont_save" :
@@ -178,7 +202,9 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
 
                         np.set_printoptions(precision=15)
                         #for scenario_loop in scenarios:
-                        pose = np.load(f'{data_name}/Generated/{categories}/Pose_transformed/{p}.npy')
+                        for scenario_loop in scenarios:
+                            if not destination_folders[scenario_loop] == "dont_save" :
+                                pose = np.load(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/Pose_transformed/{p}.npy')
                         R_exp = pose[0:3, 0:3]
                         tVec = pose[0:3, 3]
 
@@ -186,7 +212,7 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
                         center = fps_points.mean(0)
                         fps_points = np.append(fps_points, [center], axis=0)
                         points = process2(fps_points, R_exp, tVec, camera, img, vis)
-                        out = [int(catergories_occ_array[categories][0])] #len have to be 1 !!
+                        out = [int(categories_occ_array[categories][0])] #len have to be 1 !!
                         ind = 1
                         for point in points:
                             x = point[0][0] / img.shape[1]
@@ -200,7 +226,7 @@ def process_compute(data_name, camera, camera_resized, new_size, Nb_camera, Worl
                                 np.savetxt(f'{data_name}/{destination_folders[scenario_loop]}/{categories}/FPS/{p}.txt',  np.array(out).reshape(1, len(out)))
 
                         points_resized = process2(fps_points, R_exp, tVec, camera_resized, img_resized, vis)
-                        out_resized = [int(catergories_occ_array[categories][0])] #len have to be 1 !
+                        out_resized = [int(categories_occ_array[categories][0])] #len have to be 1 !
                         ind_resized = 1
                         for point_resized in points_resized:
                             x_resized = point_resized[0][0] / img_resized.shape[1]
diff --git a/main.py b/main.py
index 9ccc58d94033046f2911d91937de452af12e2d64..4bc5e6c27db72c4a9c32797a0ee6fdafd2927d20 100644
--- a/main.py
+++ b/main.py
@@ -12,11 +12,11 @@ def generate_folders(name, list_categories, scenario):
     is_exist = os.path.exists(name)
     if not is_exist:
         os.mkdir(name)
-    folders = ["RGB", "RGB_Gen", "RGB_resized", "Meta_Gen", "Depth", "Mask", "Meta", "Pose", "Bbox_2d", "Bbox_2d_loose", "Bbox_3d", "Bbox_3d_Gen",  "Instance_Segmentation", "Semantic_Segmentation", "Instance_Mask", "Instance_Mask_resized", "Occlusion", "Models", "Pose_transformed", "Bbox", "FPS", "FPS_resized"]
+    folders = ["RGB", "RGB_Gen", "RGB_resized", "Meta_Gen", "Depth", "Mask", "Meta", "Pose", "Bbox_2d", "Bbox_2d_loose", "Bbox_3d", "Bbox_3d_Gen",  "Instance_Segmentation", "Semantic_Segmentation", "Instance_Mask", "Labels", "Instance_Mask_resized", "Occlusion", "Models", "Pose_transformed", "Bbox", "FPS", "FPS_resized"]
     for f in folders:
         is_exist = os.path.exists(f"{name}/{f}")
         if not is_exist:
-            if f not in ["RGB_Gen", "RGB_resized",  "Instance_Mask", "Instance_Mask_resized", "Meta_Gen", "Models", "Pose_transformed", "Bbox", "Bbox_3d_Gen", "FPS" , "FPS_resized"]:
+            if f not in ["RGB_Gen", "RGB_resized",  "Instance_Mask", "Labels", "Instance_Mask_resized", "Meta_Gen", "Models", "Pose_transformed", "Bbox", "Bbox_3d_Gen", "FPS" , "FPS_resized"]:
                 os.mkdir(f"{name}/{f}")
             else:
                 for cat in list_categories:
diff --git a/read_count.py b/read_count.py
new file mode 100644
index 0000000000000000000000000000000000000000..11eca90f9c9bdc448257403edbfc333ace3871c9
--- /dev/null
+++ b/read_count.py
@@ -0,0 +1,310 @@
+import os
+import numpy as np
+from prepare_data import reform_data
+from fps_alg import apply_fps
+from bbox_3d import get_3D_bbox
+from compute_features import process_compute
+import open3d as o3d
+from scipy.spatial import distance
+import argparse
+import json
+import pandas as pd
+import altair as alt
+
+
+def generate_folders(name, list_categories, scenario):
+    is_exist = os.path.exists(name)
+    if not is_exist:
+        os.mkdir(name)
+    folders = ["RGB", "RGB_Gen", "RGB_resized", "Meta_Gen", "Depth", "Mask", "Meta", "Pose", "Bbox_2d", "Bbox_2d_loose", "Bbox_3d", "Bbox_3d_Gen",  "Instance_Segmentation", "Semantic_Segmentation", "Instance_Mask", "Instance_Mask_resized", "Occlusion", "Models", "Pose_transformed", "Bbox", "FPS", "FPS_resized"]
+    for f in folders:
+        is_exist = os.path.exists(f"{name}/{f}")
+        if not is_exist:
+            if f not in ["RGB_Gen", "RGB_resized",  "Instance_Mask", "Instance_Mask_resized", "Meta_Gen", "Models", "Pose_transformed", "Bbox", "Bbox_3d_Gen", "FPS" , "FPS_resized"]:
+                os.mkdir(f"{name}/{f}")
+            else:
+                for cat in list_categories:
+                    is_exist2 = os.path.exists(f"{name}/Generated/{cat}")
+                    if not is_exist2:
+                        os.makedirs(f"{name}/Generated/{cat}")
+                    is_exist2 = os.path.exists(f"{name}/Generated/{cat}/Pose_transformed")
+                    if not is_exist2:
+                        os.makedirs(f"{name}/Generated/{cat}/Pose_transformed")
+                    for scenario in ["Worlds", "Cameras", "Mix_all"] :
+                        is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Training/{cat}/{f}")
+                        if not is_exist2:
+                            os.makedirs(f"{name}/Generated_{scenario}_Training/{cat}/{f}")
+                        is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Evaluating/{cat}/{f}")
+                        if not is_exist2:
+                            os.makedirs(f"{name}/Generated_{scenario}_Evaluating/{cat}/{f}")
+                        is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_Testing/{cat}/{f}")
+                        if not is_exist2:
+                            os.makedirs(f"{name}/Generated_{scenario}_Testing/{cat}/{f}")
+                        is_exist2 = os.path.exists(f"{name}/Generated_{scenario}_dont_save/{cat}/{f}")
+                        if not is_exist2:
+                            os.makedirs(f"{name}/Generated_{scenario}_dont_save/{cat}/{f}")
+
+
+
+def calc_pts_diameter2(pts):
+    """Calculates the diameter of a set of 3D points (i.e. the maximum distance
+  between any two points in the set). Faster but requires more memory than
+  calc_pts_diameter.
+  :param pts: nx3 ndarray with 3D points.
+  :return: The calculated diameter.
+  """
+    dists = distance.cdist(pts, pts, 'euclidean')
+    diameter = np.max(dists)
+    return diameter
+
+
+def prep_df(df, name):
+    df = df.stack().reset_index()
+    df.columns = ['c1', 'c2', 'values']
+    df['DF'] = name
+    return df
+
+
+
+if __name__ == '__main__':    
+    # Create the parser
+    parser = argparse.ArgumentParser()
+    # Parse the argument
+    args = parser.parse_args()
+
+    scenario = "Worlds"
+
+    ### parameters ###
+    Categories = [] # to read
+    Nb_instance = 1
+    occ_target = 0.5
+
+    dataset_src = "/gpfsscratch/rech/uli/ubn15wo/dataset/s2rg/Fruits_all_medium/data/"
+    #dataset_src = "/media/mahmoud/E/Fruits_easy/data"
+    #dataset_src = "/media/gduret/DATA/dataset/s2rg/Fruits_all_medium/data"
+
+    choice = "low" # depth of rgb resolution datas
+    data_options = {"high": "ground_truth_rgb",
+                    "low": "ground_truth_depth"}
+    dataset_type = data_options[choice]
+    dataset_name = f"/gpfsscratch/rech/uli/ubn15wo/dataset/s2rg/Fruits_all_medium/GUIMOD_{choice}"
+    list_categories = [ "apple2" , "apricot", "banana1", "kiwi1", "lemon2", "orange2", "peach1", "pear2"]
+    #list_Nb_instance = [ "apple2" , "apricot", "banana1", "kiwi1", "lemon2", "orange2", "peach1", "pear2"]
+    
+
+    path_json = "Count_150000.json"
+
+    if os.path.isfile(path_json):
+        with open(path_json) as f:
+            list_count_categories = json.load(f)
+
+    scenarios = ["Worlds", "Cameras", "Mix_all"]
+
+    destination_folders_list = {}
+    for scenario_loop in scenarios:
+        destination_folders_list[scenario_loop] = [f"Generated_{scenario_loop}_Testing", f"Generated_{scenario_loop}_Evaluating", f"Generated_{scenario_loop}_Training", f"Generated_{scenario_loop}_dont_save" ]
+
+
+    print(list_count_categories)
+
+    stat_cat = {}
+
+    worlds_train=[]
+    worlds_eval=[]
+    worlds_test=[]
+    cameras_train=[]
+    cameras_eval=[]
+    cameras_test=[]
+    mix_train=[]
+    mix_eval=[]
+    mix_test=[]
+
+    array_apple=[]
+    array_apricot=[]
+    array_banana=[]
+    array_kiwi=[]
+    array_lemon=[]
+    array_orange=[]
+    array_peach=[]
+    array_pear=[]
+
+    stat_cat_inst = {}
+
+    for scenario_loop in scenarios :
+        stat_cat_inst[scenario_loop] = {}
+        for cat in list_categories :
+            stat_cat_inst[scenario_loop][cat] = {}
+
+
+    for cat in list_categories : 
+        stat_cat[cat] = {}
+        for scenario_loop in scenarios :
+            stat_cat[cat][scenario_loop] = {}
+            
+            for destination_folder_loop in destination_folders_list[scenario_loop] : # [f"Generated_{scenario}_Testing", f"Generated_{scenario}_Evaluating", f"Generated_{scenario}_Training"] :
+                #print("scenario_loop : " , scenario_loop)
+                #print("destination_folder_loop : " , destination_folder_loop)
+                #print("cat : " , cat)
+                #print("list_count_categories[scenario_loop][destination_folder_loop][cat][1_instances] : " , list_count_categories[scenario_loop][destination_folder_loop][cat]["1_instances"])
+                #print(list_count_categories[scenario_loop][destination_folder_loop])
+                
+                if destination_folder_loop == "Generated_Worlds_Testing" : 
+                    print("\nGenerated_Wolrds_Testing1")
+                    worlds_test.append(list_count_categories[scenario_loop][destination_folder_loop][cat]["1_instances"])
+                if destination_folder_loop == "Generated_Cameras_Testing" :
+                    print("\nGenerated_Wolrds_Testing2")
+                    cameras_test.append(list_count_categories[scenario_loop][destination_folder_loop][cat]["1_instances"])
+                if destination_folder_loop == "Generated_Mix_all_Testing" :
+                    print("\nGenerated_Wolrds_Testing3")
+                    mix_test.append(list_count_categories[scenario_loop][destination_folder_loop][cat]["1_instances"])
+                if destination_folder_loop == "Generated_Worlds_Evaluating" :
+                    print("\nGenerated_Wolrds_Testing4")
+                    worlds_eval.append(list_count_categories[scenario_loop][destination_folder_loop][cat]["1_instances"])
+                if destination_folder_loop == "Generated_Cameras_Evaluating" :
+                    print("\nGenerated_Wolrds_Testing5")
+                    cameras_eval.append(list_count_categories[scenario_loop][destination_folder_loop][cat]["1_instances"])
+                if destination_folder_loop == "Generated_Mix_all_Evaluating" :
+                    print("\nGenerated_Wolrds_Testing6")
+                    mix_eval.append(list_count_categories[scenario_loop][destination_folder_loop][cat]["1_instances"])
+                if destination_folder_loop == "Generated_Worlds_Training" :
+                    print("\nGenerated_Wolrds_Testing7")
+                    worlds_train.append(list_count_categories[scenario_loop][destination_folder_loop][cat]["1_instances"])
+                if destination_folder_loop == "Generated_Cameras_Training" :
+                    print("\nGenerated_Wolrds_Testing8")
+                    cameras_train.append(list_count_categories[scenario_loop][destination_folder_loop][cat]["1_instances"])
+                if destination_folder_loop == "Generated_Mix_all_Training" :
+                    print("\nGenerated_Wolrds_Testing9")
+                    mix_train.append(list_count_categories[scenario_loop][destination_folder_loop][cat]["1_instances"])
+
+
+                if cat in list_count_categories[scenario_loop][destination_folder_loop].keys() :
+                    stat_cat[cat][scenario_loop][destination_folder_loop] = list_count_categories[scenario_loop][destination_folder_loop][cat]["1_instances"]
+                #print(list_count_categories[scenario_loop][destination_folder_loop].keys())
+                if cat in list_count_categories[scenario_loop][destination_folder_loop].keys() : # because dont save empty for world and camera scenario
+                    for nb_inst in list_count_categories[scenario_loop][destination_folder_loop][cat].keys() : 
+                        if nb_inst in stat_cat_inst[scenario_loop][cat].keys() :
+                            stat_cat_inst[scenario_loop][cat][nb_inst] = stat_cat_inst[scenario_loop][cat][nb_inst] + list_count_categories[scenario_loop][destination_folder_loop][cat][nb_inst]
+                        else: 
+                            stat_cat_inst[scenario_loop][cat][nb_inst] = list_count_categories[scenario_loop][destination_folder_loop][cat][nb_inst]
+    #print(stat_cat)
+
+    print(stat_cat_inst)
+
+    print(worlds_train)
+    print(worlds_eval)
+    print(worlds_test)
+    print(cameras_train)
+    print(cameras_eval)
+    print(cameras_test)
+    print(mix_train)
+    print(mix_eval)
+    print(mix_test)
+
+
+    # for scenario_loop in scenarios :
+    #     for cat in list_categories :
+    #         for destination_folder_loop in destination_folders_list[scenario_loop] :
+
+    # stat_cat_inst[scenario_loop][cat][nb_inst]
+
+
+    df1_train=pd.DataFrame(np.resize(np.concatenate((np.array(worlds_train), np.array(cameras_train), np.array(mix_train)) ), (3,8)),index=["World", "Cameras", "All"],columns=[ "apple" , "apricot", "banana", "kiwi", "lemon", "orange", "peach", "pear"])
+    df2_eval=pd.DataFrame(np.resize(np.concatenate((np.array(worlds_eval), np.array(cameras_eval), np.array(mix_eval)) ), (3,8)),index=["World", "Cameras", "All"],columns=[ "apple" , "apricot", "banana", "kiwi", "lemon", "orange", "peach", "pear"])
+    df3_test=pd.DataFrame(np.resize(np.concatenate((np.array(worlds_test), np.array(cameras_test), np.array(mix_test)) ), (3,8)),index=["World", "Cameras", "All"],columns=[ "apple" , "apricot", "banana", "kiwi", "lemon", "orange", "peach", "pear"])
+
+
+    df1 = prep_df(df1_train, 'Train')
+    df2 = prep_df(df2_eval, 'Eval')
+    df3 = prep_df(df3_test, 'Test')
+
+    df = pd.concat([df1, df2, df3])
+
+
+    # print(np.resize(np.concatenate((np.array(worlds_train), np.array(cameras_train), np.array(mix_train)) ), (3,8)))
+    # print(np.random.rand(4,3))
+
+    # df1=pd.DataFrame(10*np.random.rand(4,3),index=["A","B","C","D"],columns=["I","J","K"])
+    # df2=pd.DataFrame(10*np.random.rand(4,3),index=["A","B","C","D"],columns=["I","J","K"])
+    # df3=pd.DataFrame(10*np.random.rand(4,3),index=["A","B","C","D"],columns=["I","J","K"])
+
+    # def prep_df(df, name):
+    #     df = df.stack().reset_index()
+    #     df.columns = ['c1', 'c2', 'values']
+    #     df['DF'] = name
+    #     return df
+
+    # df1 = prep_df(df1, 'DF1')
+    # df2 = prep_df(df2, 'DF2')
+    # df3 = prep_df(df3, 'DF3')
+
+    # df = pd.concat([df1, df2, df3])
+
+    alt.renderers.enable('altair_viewer')
+
+    chart = alt.Chart(df).mark_bar().encode(
+
+    # tell Altair which field to group columns on
+    x=alt.X('c2:N', title=None),
+
+    # tell Altair which field to use as Y values and how to calculate
+    y=alt.Y('sum(values):Q',
+        axis=alt.Axis(
+            grid=False,
+            title=None)),
+
+    # tell Altair which field to use to use as the set of columns to be  represented in each group
+    column=alt.Column('c1:N', title=None),
+
+    # tell Altair which field to use for color segmentation 
+    color=alt.Color('DF:N',
+            scale=alt.Scale(
+                # make it look pretty with an enjoyable color pallet
+                range=['#96ceb4', '#ffcc5c','#ff6f69'],
+            ),
+        ))\
+    .configure_view(
+        # remove grid lines around column clusters
+        strokeOpacity=0    
+    )
+
+    chart.show()
+
+
+    with open(f'Count_Stat.json', mode='w') as f:
+        f.write(json.dumps(stat_cat, indent=4))
+    with open(f'Count_Stat_instance.json', mode='w') as f:
+        f.write(json.dumps(stat_cat_inst, indent=4))
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/utils.py b/utils.py
index b8f4b334e003e027307742ec8f7f69a73a306112..456fbc856c081d2971253278cf03bcb2783e56a0 100644
--- a/utils.py
+++ b/utils.py
@@ -3,9 +3,7 @@ import json
 
 
 def compute_categories_id(data_name, world):
-    #Category = 'banana1'
-    #Category = 'pear2'
-    #Category = "orange2"
+
     # Opening JSON file
     f = open(f'{data_name}/Meta/{world}.json')
     
@@ -16,49 +14,46 @@ def compute_categories_id(data_name, world):
     # Iterating through the json
     # list
 
-    catergories_label_to_id={}
-    catergories_id_to_label={}
-    catergories_instance_array_cat_to_id={}
-    catergories_instance_array_id_to_cat={}
+    categories_label_to_id={}
+    categories_id_to_label={}
+    categories_instance_array_cat_to_id={}
+    categories_instance_array_id_to_cat={}
 
     for k in data['categories']:
-        catergories_label_to_id[k['label']]=k['id']
-        catergories_id_to_label[k['id']]=k['label']
-        catergories_instance_array_cat_to_id[k['label']]=[]
+        categories_label_to_id[k['label']]=k['id']
+        categories_id_to_label[k['id']]=k['label']
+        categories_instance_array_cat_to_id[k['label']]=[]
 
     for k in data['objects']:
-        #print(k)
-        #catergories_instance_array[catergories_id_to_label[i['category_id']]]
-        catergories_instance_array_id_to_cat[k['id']] = catergories_id_to_label[k['category_id']]
-        catergories_instance_array_cat_to_id[catergories_id_to_label[k['category_id']]].append(k['id'])
-        # if i['category_id'] == id_category :
-        #     print("Hello fruits instance")
-        #     id_instances.append(i['id'])
-        #     print(i['id']) 
+        categories_instance_array_id_to_cat[k['id']] = categories_id_to_label[k['category_id']]
+        categories_instance_array_cat_to_id[categories_id_to_label[k['category_id']]].append(k['id'])
 
     # Closing file
     f.close()
 
-    return catergories_instance_array_id_to_cat, catergories_instance_array_cat_to_id, catergories_label_to_id
+    return categories_instance_array_id_to_cat, categories_instance_array_cat_to_id, categories_label_to_id
 
 
 
-def compute_id_good_occ(data_name, count, catergories_instance_array_id_to_cat, catergories_instance_array_cat_to_id, Occ_wanted):
+def compute_id_good_occ(data_name, count, categories_instance_array_id_to_cat, categories_instance_array_cat_to_id, Occ_wanted):
 
     f2 = open(f'{data_name}/Occlusion/{count}.json')
 
     data2 = json.load(f2)
-    catergories_occ_array = {}
+    categories_occ_array = {}
+    categories_array = {}
 
-    for cat in catergories_instance_array_cat_to_id :
-        #print(cat)
-        catergories_occ_array[cat] = []
+    for cat in categories_instance_array_cat_to_id :
+        categories_occ_array[cat] = []
+        categories_array[cat] = []
 
     for i in data2:
         if i['occlusion_value'] >= Occ_wanted :
-            catergories_occ_array[catergories_instance_array_id_to_cat[i['id']]].append(i['id'])
+            categories_occ_array[categories_instance_array_id_to_cat[i['id']]].append(i['id'])
+        elif  i['occlusion_value'] >= 0.05  and i['occlusion_value'] < Occ_wanted:
+            categories_array[categories_instance_array_id_to_cat[i['id']]].append(i['id'])
 
     # Closing file
     f2.close()
 
-    return catergories_occ_array
\ No newline at end of file
+    return categories_occ_array, categories_array
\ No newline at end of file