diff --git a/src/block.py b/src/block.py index 77bb9c95ad5fe36cb611e3dbe23fcce1de5cb5a7..5cb9d46ae414e57ac7d62d9d5bb2d9c58d089bfc 100644 --- a/src/block.py +++ b/src/block.py @@ -84,6 +84,9 @@ class Block(Root): tab+=block.get_calls_same_level() tab+=block.get_calls_inside_level() return tab + + def get_all_calls_from_root(self): + return self.origin.get_all_calls_from_root() ############# # CHANNELS diff --git a/src/executor.py b/src/executor.py index d276bc0b7a903f0fdf28c8f221e593cdd5c0a768..bb090185d16ea9ce62abd60ca311e1634b69c132 100644 --- a/src/executor.py +++ b/src/executor.py @@ -287,6 +287,13 @@ class Executor(Nextflow_Building_Blocks): for c in call.get_all_calls(): if(c.first_element_called.get_alias()==name): tab.append(c) + #If there is no calls found then we search in all root + if(len(tab)==0): + for call in self.origin.get_all_calls_from_root(): + #call.initialise() + for c in call.get_all_calls(): + if(c.first_element_called.get_alias()==name): + tab.append(c) return tab else: diff --git a/src/operation.py b/src/operation.py index 28e984bc4250a2fbf40186f66e1ccc922080822e..3cb360b4df00903e3d8b47757a8c27bd65e60bb5 100644 --- a/src/operation.py +++ b/src/operation.py @@ -928,6 +928,7 @@ class Operation(Executor): index += 1 temporary_index = 1 + new_body = "" for origin in dico_origin_2_replace: if(len(dico_origin_2_replace[origin])<=1): for val in dico_origin_2_replace[origin]: @@ -937,23 +938,32 @@ class Operation(Executor): code = code.replace(origin, val.simplify_code()) #Case there are mutiple origins then: else: - #TODO -> fix this problem if you have the courage to -> see at what extend it is used - raise BioFlowInsightError("TODO (update): The output of a emit is ambiguous -> there are multiple instance where it exits") - #print(dico_origin_2_replace) - #for e in dico_origin_2_replace[origin]: - # print(e.get_all_conditions()) - temporary_index+=1 - - #for o in self.origins: - # if(o.get_type()=="Call"): - # #If it's not a function -> then we rewrite it - # if(o.get_first_element_called().get_type()!="Function"): - # to_add.append(add_origin_equals(o, index)) - # code = code.replace(o.get_code(get_OG=True), f"operation_{operation_id}_{index}") - # elif(o.get_type()=="Emitted"): - # code = code.replace(o.get_code(get_OG=True), o.simplify_code()) - # index += 1 + #For example + #if(){ + # p1(ch1) + #} else { + # p1(ch2) + #} + #val = p1.out -> when wanting to rewrite this operation + calls = {} + temporary_channel = f"temporary_val_{id(self)}_{temporary_index}" + for e in dico_origin_2_replace[origin]: + try: + temp = calls[e.emitted_by] + except: + calls[e.emitted_by] = [] + calls[e.emitted_by].append(e.simplify_code()) + for c in calls: + #This is just one value + if(len(c.get_all_conditions())>1): + raise Exception("This shoudn't happen") + for condition in c.get_all_conditions(): + new_body+=f"if({condition.get_value().strip()}) {{\n{temporary_channel} = {calls[c][0]}\n}}\n" + new_body+=temporary_channel + code = code.replace(origin, new_body) + temporary_index+=1 + to_add.reverse() for c in to_add: code = f"{c}\n{code}" diff --git a/src/root.py b/src/root.py index 350055e1723d6e7f28bc420def29a79832b58fc2..60c76ee555b957848be74594589eaed4b81ea3b6 100644 --- a/src/root.py +++ b/src/root.py @@ -247,6 +247,11 @@ class Root(Nextflow_Building_Blocks): all_executors = self.get_executors_same_level()+self.get_inside_executors() for e in all_executors: calls[e] = '' + + def get_all_calls_from_root(self): + dico = {} + self.get_all_calls_in_subworkflow(calls=dico) + return list(dico.keys()) ############# diff --git a/src/workflow.py b/src/workflow.py index cd71ce95ad994daa7beb4d0d8bf375b261348942..9b2873d424e9f125cd0f648e3c11516877f7a698 100644 --- a/src/workflow.py +++ b/src/workflow.py @@ -526,7 +526,7 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen return subs - def rewrite_and_initialise(self, code, processes_2_remove, render_graphs): + def rewrite_and_initialise(self, code, processes_2_remove, render_graphs, def_check_the_same = True): temp_process_dependency_graph = self.graph.get_process_dependency_graph() temp_spec_graph = self.graph.full_dico @@ -547,7 +547,7 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen self.initialise() os.remove(temp_file) self.graph.initialise(processes_2_remove = self.processes_2_remove) - if(not self.graph.check_if_process_dependendy_is_equivalent_to_other_without_subworkflows(temp_process_dependency_graph)): + if(def_check_the_same and not self.graph.check_if_process_dependendy_is_equivalent_to_other_without_subworkflows(temp_process_dependency_graph)): if(render_graphs==True): generate_graph(self.get_output_dir()/ "debug" /"spec_graph_OG", temp_spec_graph, render_graphs = True) generate_graph(self.get_output_dir()/ "debug" /"spec_graph", self.graph.full_dico, render_graphs = True) @@ -720,11 +720,13 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen if(self.get_DSL()=="DSL1"): code = self.convert_to_DSL2() + self.rewrite_and_initialise(code, self.processes_2_remove, render_graphs=render_graphs, def_check_the_same = False) - if(self.duplicate): + if(self.get_DSL()=="DSL2"): code = self.simplify_workflow_code() self.rewrite_and_initialise(code, self.processes_2_remove, render_graphs=render_graphs) - + + if(self.duplicate): #DETERMING WHICH SUBWORKFLOWS ARE BROKEN WITH THE CLUSTER