From ce668fb0b481f253dc3f4744f54b0225ed69876c Mon Sep 17 00:00:00 2001
From: George Marchment <georgemarchment@yahoo.fr>
Date: Fri, 21 Feb 2025 09:22:02 +0100
Subject: [PATCH] Added system where it constantly checks the process
 dependency graph after a modification + tried to debug something -> doesn't
 quite work

---
 run_tests.py                             |  2 +-
 src/graph.py                             | 74 ++++------------------
 src/operation.py                         |  4 +-
 src/outils_graph.py                      | 80 +++++++++++++++++++++++-
 src/workflow.py                          | 80 +++++++++++++++++-------
 tests/test_workflows_simple_duplicate.py | 36 +++++------
 6 files changed, 168 insertions(+), 108 deletions(-)

diff --git a/run_tests.py b/run_tests.py
index 9cb454f..5f799f7 100755
--- a/run_tests.py
+++ b/run_tests.py
@@ -29,7 +29,7 @@ class TestWorkflows(unittest.TestCase):
         def test_wf{num}_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/{num}", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/{num}/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/{num}/specification_graph.json'))
         """
 
         script+=text
diff --git a/src/graph.py b/src/graph.py
index 44b3d19..c04159d 100644
--- a/src/graph.py
+++ b/src/graph.py
@@ -33,6 +33,8 @@ class Graph():
         self.initialised = False
 
 
+    def get_process_dependency_graph(self):
+        return self.dico_process_dependency_graph
     
 
     def initialise(self, processes_2_remove = []):
@@ -80,7 +82,7 @@ class Graph():
 
 
             self.get_dependency_graph()
-            self.get_process_dependency_graph()
+            self.intialise_process_dependency_graph()
 
             
             #self.networkX_wo_operations = self.get_networkx_graph(self.dico_process_dependency_graph, self.networkX_wo_operations)
@@ -151,10 +153,8 @@ class Graph():
     def get_specification_graph_wo_orphan_operations_wo_labels(self, filename = "specification_wo_orphan_operations_wo_labels", render_graphs = True):
         generate_graph(self.get_output_dir()/'graphs'/filename, graph_dico_wo_orphan_operations(self.full_dico), label_edge=False, label_node=False, render_graphs = render_graphs)
 
-    def get_process_dependency_graph_dico(self):
-        return self.dico_process_dependency_graph
 
-    def get_process_dependency_graph(self):
+    def intialise_process_dependency_graph(self):
         self.intia_link_dico()
 
         #Function that replicates the workflow's structure wo the operations in the nodes
@@ -220,7 +220,7 @@ class Graph():
             json.dump(self.dico_process_dependency_graph, output_file, indent=4)
 
     
-    def render_graph_wo_operations(self, filename = "process_dependency_graph", render_graphs = True):
+    def render_process_dependency_graph(self, filename = "process_dependency_graph", render_graphs = True):
         generate_graph(self.get_output_dir()/'graphs'/filename, self.dico_process_dependency_graph, render_graphs = render_graphs, label_edge=False, label_node=False)
     
 
@@ -474,12 +474,7 @@ class Graph():
     #============================
 
     def initialise_flattened_dico(self, dico):
-        self.dico_flattened = {}
-        self.dico_flattened["nodes"] = []
-        self.dico_flattened["edges"] = []
-        #This will stay empty -> it's just so we can use the same function
-        self.dico_flattened["subworkflows"] = []
-        flatten_dico(dico, self.dico_flattened)
+        self.dico_flattened = get_flatten_dico(dico)
         #for node in dico["nodes"]:
         #    self.dico_flattened["nodes"].append(node)
         #for edge in dico["edges"]:
@@ -670,7 +665,7 @@ class Graph():
 
     def get_topogical_order(self, clusters):
         #if(self.get_process_dependency_graph_dico()=={}):
-        #    self.get_process_dependency_graph()  
+        #    self.intialise_process_dependency_graph()  
         link_dico = copy.deepcopy(self.link_dico)
         sorted_nodes = topological_sort(link_dico)
         clusters_sorted = []
@@ -738,60 +733,17 @@ class Graph():
         return elements
     
     #Method that checks if a specified structute is the same than the workflows
-    #WARNING: The method i'm using isn't perfect (i'm not claiming it is)-> but it works well enough for what i want to use it for:)
-    def check_if_equal(self, file, processes_2_remove=[]):
+    def check_if_json_equal_to_full_structure(self, file, processes_2_remove=[]):
         if(not self.initialised):
             self.initialise(processes_2_remove=processes_2_remove)
         spec_graph_wfA = self.full_dico
         with open(file) as json_file:
             spec_graph_wfB = json.load(json_file)
-
-
-        def translate_dico(dico):
-            names_already_given = []
-            def get_ids_2_nodes(dico, ids_2_nodes):
-                for node in dico['nodes']:
-                    already_in, index = True, 0
-                    #We assume the name is not already given 
-                    while(already_in):
-                        if("src.operation.Operation" in node["id"]):
-                            val = f"operation_{node['xlabel']}_{index}"
-                        elif("src.process.Process" in node["id"]):
-                            val = f"process_{node['name']}_{index}"
-                        if(val in names_already_given):
-                            index+=1
-                        else:
-                            already_in = False
-                            names_already_given.append(val)
-                            ids_2_nodes[node["id"]] = val
-                for sub in dico['subworkflows']:
-                    get_ids_2_nodes(dico['subworkflows'][sub], ids_2_nodes)
-            
-            ids_2_nodes={}
-            get_ids_2_nodes(dico, ids_2_nodes=ids_2_nodes)
-
-            def rewrite(dico, rewritten):
-                for node in dico['nodes']:
-                    rewritten["nodes"].append(ids_2_nodes[node['id']])
-                for edge in dico['edges']:
-                    rewritten["edges"].append({"A": ids_2_nodes[edge['A']], "B": ids_2_nodes[edge['B']]})
-                for sub in dico['subworkflows']:
-                    temp = {}
-                    temp["nodes"] = []
-                    temp["edges"] = []
-                    temp["subworkflows"] = {}
-                    rewrite(dico["subworkflows"][sub], temp)
-                    rewritten["subworkflows"][sub] = temp
-
-            translated = {}
-            translated["nodes"] = []
-            translated["edges"] = []
-            translated["subworkflows"] = {}
-            rewrite(dico, translated)
-            return translated
-        
-        #TO do that we rewrite the structure using a commun language (without using the ids) -> then just check if the translated structures are the same
-        return translate_dico(spec_graph_wfA) ==translate_dico(spec_graph_wfB)
+        return check_if_equal(spec_graph_wfA, spec_graph_wfB)
+    
+    def check_if_process_dependendy_is_equivalent_to_other_without_subworkflows(self, dico):
+        A, B = get_flatten_dico(self.dico_process_dependency_graph), get_flatten_dico(dico)
+        return check_if_equal(A, B)
 
 
             
diff --git a/src/operation.py b/src/operation.py
index 3b2254a..b14e178 100644
--- a/src/operation.py
+++ b/src/operation.py
@@ -208,8 +208,8 @@ class Operation(Executor):
                 channels = origin.get_channels_from_name_inside_level(name)
             if(channels==[]):
                 channels = origin.get_channels_from_name_above_level(name)
-            if(channels==[]):
-                channels = origin.get_channels_from_name_other_blocks_on_same_level(name)
+            #if(channels==[]):
+            #    channels = origin.get_channels_from_name_other_blocks_on_same_level(name)
             if(channels==[]):
                 channel = Channel(name=name, origin=self.origin)
                 origin.add_channel(channel)
diff --git a/src/outils_graph.py b/src/outils_graph.py
index 9dad12f..d2a2e00 100644
--- a/src/outils_graph.py
+++ b/src/outils_graph.py
@@ -202,7 +202,7 @@ def get_number_simple_loops(link_dico):
             nb += 1
     return nb
 
-def generate_graph(filename, param_dico, label_node = True, label_edge = True, render_graphs = True, dot = True, mermaid = True, root = False, relevant_nodes = -1):
+def generate_graph(filename, param_dico, label_node = True, label_edge = True, render_graphs = True, dot = True, mermaid = False, root = False, relevant_nodes = -1):
     dico = copy.deepcopy(param_dico)
     if(root):
         outputs = get_output_nodes(dico)
@@ -462,14 +462,20 @@ def get_longest_distance(graph):
 #
 #    return number_paths_source_2_sink, longest_path, smallest_path
 
+def get_flatten_dico(dico):
+    dico_flattened = {}
+    dico_flattened["nodes"] = []
+    dico_flattened["edges"] = []
+    dico_flattened["subworkflows"] = {}
+    return flatten_dico_rec(dico, dico_flattened)
 
-def flatten_dico(dico, dico_flattened):
+def flatten_dico_rec(dico, dico_flattened):
     for node in dico["nodes"]:
         dico_flattened["nodes"].append(node)
     for edge in dico["edges"]:
         dico_flattened["edges"].append(edge)
     for subworkflow in dico["subworkflows"]:
-        flatten_dico(dico["subworkflows"][subworkflow], dico_flattened)
+        flatten_dico_rec(dico["subworkflows"][subworkflow], dico_flattened)
     return dico_flattened
 
 #==================================================
@@ -1007,5 +1013,73 @@ def get_subworkflows_names(dico, val= []):
         val=get_subworkflows_names(dico["subworkflows"][sub], val)
     return val
 
+#WARNING: The method i'm using isn't perfect (i'm not claiming it is)-> but it works well enough for what i want to use it for:)
+def check_if_equal(dicoA, dicoB):
+    def translate_dico(dico):
+        names_already_given = []
+        def get_ids_2_nodes(dico, ids_2_nodes):
+            for node in dico['nodes']:
+                already_in, index = True, 0
+                #We assume the name is not already given 
+                while(already_in):
+                    if("src.operation.Operation" in node["id"]):
+                        val = f"operation_{node['xlabel']}__££__{index}"
+                    elif("src.process.Process" in node["id"]):
+                        val = f"process_{node['name']}__££__{index}"
+                    if(val in names_already_given):
+                        index+=1
+                    else:
+                        already_in = False
+                        names_already_given.append(val)
+                        ids_2_nodes[node["id"]] = val
+            for sub in dico['subworkflows']:
+                get_ids_2_nodes(dico['subworkflows'][sub], ids_2_nodes)
+        
+        ids_2_nodes={}
+        get_ids_2_nodes(dico, ids_2_nodes=ids_2_nodes)
+
+        #We have to sort the lists if they are not in the same order 
+        #I don't want to use "set" cause it would remove the duplicate cases
+        def sort(tab):
+            try:
+                #Case it's a list of words
+                #Nodes
+                tab.sort()
+                return tab
+            except:
+                #Edges
+                temp = []
+                for e in tab:
+                    temp.append(f"{e['A']}__$$__{e['B']}")
+                temp.sort()
+                new_tab = []
+                for e in temp:
+                    A, B = e.split("__$$__")
+                    new_tab.append({"A":A, "B":B})
+                return new_tab
+                
+        def rewrite(dico, rewritten):
+            for node in dico['nodes']:
+                rewritten["nodes"].append(ids_2_nodes[node['id']].split("__££__")[0])
+            rewritten["nodes"] = sort(rewritten["nodes"])
+            for edge in dico['edges']:
+                rewritten["edges"].append({"A": ids_2_nodes[edge['A']].split("__££__")[0], "B": ids_2_nodes[edge['B']].split("__££__")[0]})
+            rewritten["edges"] = sort(rewritten["edges"])
+            for sub in dico['subworkflows']:
+                temp = {}
+                temp["nodes"] = []
+                temp["edges"] = []
+                temp["subworkflows"] = {}
+                rewrite(dico["subworkflows"][sub], temp)
+                rewritten["subworkflows"][sub] = temp
+        translated = {}
+        translated["nodes"] = []
+        translated["edges"] = []
+        translated["subworkflows"] = {}
+        rewrite(dico, translated)
+        return translated
+        
+    #TO do that we rewrite the structure using a commun language (without using the ids) -> then just check if the translated structures are the same
+    return translate_dico(dicoA) ==translate_dico(dicoB)
 
 
diff --git a/src/workflow.py b/src/workflow.py
index 83906e3..d15e45b 100644
--- a/src/workflow.py
+++ b/src/workflow.py
@@ -4,7 +4,7 @@ from .nextflow_file import Nextflow_File
 from .ro_crate import RO_Crate
 from . import constant
 from .outils import is_git_directory, format_with_tabs, replace_thing_by_call, replace_group1, group_together_ifs, extract_curly, remove_extra_jumps, get_channels_to_add_in_false_conditions, extract_conditions
-from .outils_graph import flatten_dico, initia_link_dico_rec, get_number_cycles
+from .outils_graph import get_flatten_dico, initia_link_dico_rec, get_number_cycles, generate_graph
 from .outils_annotate import get_tools_commands_from_user_for_process
 from .bioflowinsighterror import BioFlowInsightError
 from .graph import Graph
@@ -192,14 +192,20 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen
         self.graph.initialise(processes_2_remove = self.processes_2_remove)
         self.graph.get_specification_graph(render_graphs = render_graphs)
 
+    def generate_process_dependency_graph(self, render_graphs = True):
+        self.iniatilise_tab_processes_2_remove()
+        self.graph.initialise(processes_2_remove = self.processes_2_remove)
+        self.graph.render_process_dependency_graph(render_graphs = render_graphs)
+
     #TODO -> update this
     def generate_all_graphs(self, render_graphs = True):
         self.generate_specification_graph(render_graphs = render_graphs)
+        self.generate_process_dependency_graph(render_graphs = render_graphs)
 
     #Method that checks if a given graph sepcification is an isomorphism with the workflows
-    def check_if_equal(self, file):
+    def check_if_json_equal_to_full_structure(self, file):
         self.iniatilise_tab_processes_2_remove()
-        return self.graph.check_if_equal(file, processes_2_remove = self.processes_2_remove)
+        return self.graph.check_if_json_equal_to_full_structure(file, processes_2_remove = self.processes_2_remove)
 
     ###########################
     #    Generate test data
@@ -353,7 +359,7 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen
 
             #Putting || back
             code = code.replace("$OR$", "||")
-
+            self.rewrite_and_initialise(code, self.processes_2_remove)
             return code
     
     #This methods generates a random set of processes to consider as relavant 
@@ -363,13 +369,14 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen
 
         #Random value between 0 and 1, centered at 0.5
         def get_value():
-            check = True
-            val = -1
-            while(check):
-                check = False
-                val = random.gauss(0.5, 0.1)
-                if(val<0 or val>1):
-                    check = True
+            #check = True
+            #val = -1
+            #while(check):
+            #    check = False
+            #    val = random.gauss(0.5, 0.1)
+            #    if(val<0 or val>1):
+            #        check = True
+            val = random.random()
             return val
 
         if(self.duplicate):
@@ -514,15 +521,28 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen
         return subs
 
 
-    def rewrite_and_initialise(self, code):
+    def rewrite_and_initialise(self, code, processes_2_remove):
+        temp_process_dependency_graph = self.graph.get_process_dependency_graph() 
+        temp_spec_graph = self.graph.full_dico    
         #Write new code in temporary file
         temp_file = self.get_output_dir()/f"temp_{str(self)[-7:-2]}.nf"
         with open(temp_file, "w") as file:
             file.write(code)
         
         #Replace old analysis with new analysis (simplified code)
-        self.__init__(str(temp_file), display_info = False, duplicate=True)
+        self.__init__(str(temp_file), display_info = False, duplicate=True, processes_2_remove=processes_2_remove)
         self.initialise()
+        self.graph.initialise(processes_2_remove = self.processes_2_remove)
+        if(not self.graph.check_if_process_dependendy_is_equivalent_to_other_without_subworkflows(temp_process_dependency_graph)):
+            generate_graph(self.get_output_dir()/ "debug" /"spec_graph_OG", temp_spec_graph, render_graphs = True)
+            generate_graph(self.get_output_dir()/ "debug" /"spec_graph", self.graph.full_dico, render_graphs = True)
+            generate_graph(self.get_output_dir()/ "debug" /"process_dependency_graph_OG", temp_process_dependency_graph, render_graphs = True)
+            generate_graph(self.get_output_dir()/ "debug" /"process_dependency_graph", self.graph.get_process_dependency_graph() , render_graphs = True)
+            f = open(self.get_output_dir()/ "debug" / "rewritten.nf", "w")
+            f.write(code)
+            f.close()
+            raise Exception("Something went worng: The flat dependency graph is not the same!")
+
 
     def check_relevant_processes_in_workflow(self, relevant_processes):
         #Check all relevat processes are in wf
@@ -553,6 +573,7 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen
         
         #REPLACE HEADER TAKES
         subworkflow_takes = subworklfow.get_takes()
+        print(subworklfow.get_name(), subworkflow_takes)
         parameters = OG_call.get_parameters()
         if(len(subworkflow_takes)!=len(parameters)):
             raise Exception("This shouldn't happen -> the same number of parameters should be kept")
@@ -563,7 +584,7 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen
             takes = subworkflow_takes[i].get_gives()[0]
             #Here we're checking that the input inside and outside the subworkflow are the same
             if(takes.get_code()!=param.get_code(get_OG = True)):
-                new_header+=f"{takes.get_code()} = {param.get_code(get_OG = True)}"
+                new_header+=f"{takes.get_code()} = {param.get_code(get_OG = True)}\n"
 
         temp_code = code
         code = code.replace(OG_call.get_code(get_OG = True), f"{new_header}\n\n{OG_body}", 1)
@@ -624,14 +645,17 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen
                 raise Exception("This shouldn't happen")
             
         takes = []
+        names_added = []
         for channel in channels_2_sources:
             if(set(channels_2_sources[channel]).intersection(things_added_in_cluster)!=set(channels_2_sources[channel])):
-                takes.append(channel) 
+                print(channel.get_code())
+                if(channel.get_name() not in names_added):
+                    takes.append(channel)
+                    names_added.append(channel.get_name())
         return takes
     
     #This Function returns the channels the subworkflow (things_added_in_cluster) emits (other things depend on)
     def get_emits(self, things_added_in_cluster):
-        emits = []  
         channel_2_sink = {}
         #Basiccaly this is a deco of channels to opeartions -> when the value is an empty list 
         #This means that the channel is totally definied in the subworkflow -> so we are searching for 
@@ -650,9 +674,13 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen
             else:
                 raise Exception("This shouldn't happen")
 
+        emits = []  
+        names_added = []
         for channel in channel_2_sink:
             if(set(channel_2_sink[channel]).intersection(things_added_in_cluster)!=set(channel_2_sink[channel])):
-                emits.append(channel)
+                if(channel.get_name() not in names_added):
+                    emits.append(channel)
+                    names_added.append(channel.get_name())
         return emits
 
 
@@ -662,11 +690,10 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen
     def convert_workflow_2_user_view(self, relevant_processes = []):
         if(self.get_DSL()=="DSL1"):
             code = self.convert_to_DSL2()
-            self.rewrite_and_initialise(code)
 
         if(self.duplicate):
             code = self.simplify_workflow_code()
-            self.rewrite_and_initialise(code)
+            self.rewrite_and_initialise(code, self.processes_2_remove)
             
             
             
@@ -736,8 +763,9 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen
                 #Rewrite broken subworkflows
                 sub = broken_subworkflows[0]
                 code = self.rewrite_subworkflow_call(code, sub)
-                self.rewrite_and_initialise(code)
+                self.rewrite_and_initialise(code, self.processes_2_remove)
                 #Get the clusters and the code
+                #TODO -> remove the generate all_graphs -> it is not necessary 
                 self.generate_all_graphs()
                 self.generate_user_view(relevant_processes = relevant_processes, processes_2_remove =  [])
                 clusters = self.graph.get_clusters_from_user_view()
@@ -903,10 +931,12 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen
                                 conditions_in_subworkflow = list(conditions_in_subworkflow.keys())
 
 
-
+                    #The problem is here!!!!
+                    print(name)
                     #TAKE
                     #Adding take parameters on the inside of the subworkflow
                     takes_param = self.get_takes(things_added_in_cluster)
+                    print()
                     new_param_names, index, old_param_names = [], 1, []
                     for param in takes_param:
                         param_name = f"param_{name}_{index}"
@@ -1044,8 +1074,12 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen
             
             #Putting || back
             code = code.replace("$OR$", "||")
-            
-            return remove_extra_jumps(format_with_tabs(code))
+            code = remove_extra_jumps(format_with_tabs(code))
+            f = open(self.get_output_dir()/ "debug" / "rewritten.nf", "w")
+            f.write(code)
+            f.close()
+            self.rewrite_and_initialise(code, self.processes_2_remove)
+            return code
             #return code
             #
             ##So basically when retriving a thing (process or subworkflow)
diff --git a/tests/test_workflows_simple_duplicate.py b/tests/test_workflows_simple_duplicate.py
index 730c593..268ebf0 100644
--- a/tests/test_workflows_simple_duplicate.py
+++ b/tests/test_workflows_simple_duplicate.py
@@ -7,90 +7,90 @@ class TestWorkflows(unittest.TestCase):
         def test_wfwf6_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf6", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf6/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf6/specification_graph.json'))
         
         def test_wfwf1_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf1", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf1/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf1/specification_graph.json'))
         
         def test_wfwf18_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf18", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf18/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf18/specification_graph.json'))
         
         def test_wfwf13_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf13", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf13/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf13/specification_graph.json'))
         
         def test_wfwf8_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf8", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf8/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf8/specification_graph.json'))
         
         def test_wfwf3_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf3", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf3/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf3/specification_graph.json'))
         
         def test_wfwf2_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf2", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf2/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf2/specification_graph.json'))
         
         def test_wfwf5_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf5", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf5/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf5/specification_graph.json'))
         
         def test_wfwf16_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf16", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf16/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf16/specification_graph.json'))
         
         def test_wfwf12_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf12", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf12/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf12/specification_graph.json'))
         
         def test_wfwf10_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf10", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf10/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf10/specification_graph.json'))
         
         def test_wfwf9_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf9", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf9/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf9/specification_graph.json'))
         
         def test_wfwf7_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf7", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf7/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf7/specification_graph.json'))
         
         def test_wfwf4_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf4", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf4/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf4/specification_graph.json'))
         
         def test_wfwf15_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf15", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf15/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf15/specification_graph.json'))
         
         def test_wfwf17_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf17", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf17/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf17/specification_graph.json'))
         
         def test_wfwf14_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf14", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf14/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf14/specification_graph.json'))
         
         def test_wfwf11_simple_duplicate(self):
             w = Workflow(f"tests/ressources/workflows/wf11", display_info=False, duplicate=True)
             w.initialise()
-            self.assertTrue(w.check_if_equal('tests/ressources/workflows/wf11/specification_graph.json'))
+            self.assertTrue(w.check_if_json_equal_to_full_structure('tests/ressources/workflows/wf11/specification_graph.json'))
         
\ No newline at end of file
-- 
GitLab