Skip to content
Snippets Groups Projects
Commit 4da8821f authored by George Marchment's avatar George Marchment
Browse files

Fixed the problem i had with the subworkflows being broken or not

parent 64e13dcd
No related branches found
No related tags found
No related merge requests found
Pipeline #14271 failed with stage
in 2 minutes and 19 seconds
......@@ -812,8 +812,8 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen
#I do not recommand that the dev uses the same name for the channels inside and outside the channels
#Since the local channels become local at the upper level
def rewrite_subworkflow_call(seld, code, subworklfow):
print(subworklfow.get_name())
def rewrite_subworkflow_call(self, code, subworklfow):
#Remove the defintion from the code
code = code.replace(subworklfow.get_code(), "")
OG_call = subworklfow.get_call()
......@@ -838,35 +838,49 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen
code = code.replace(OG_call.get_code(get_OG = True), f"{new_header}\n\n{OG_body}")
#REPLACE THE EMITS
#TODO admittedly this code below is very moche -> but it's functionnal -> update it
emits = subworklfow.get_emit()
for o in emits:
origin = o.get_origins()[0]
gives = o.get_gives()
#TODO finish this -> i think that the gives is empty
print(o.get_type())
print(o.get_code(), origin.get_code(), gives)
#print(code)
to_replace = []
all_executors = self.get_all_executors()
for exe in all_executors:
#We don't need to check the case call since the workflow has already been rewriteen -> emits only appear in operations
if(exe.get_type()=="Operation"):
emited = exe.get_origins()
if(len(emited)==1):
emited = emited[0]
if(emited.get_type()=="Emitted"):
if(emited.get_emitted_by()==subworklfow):
if(emited.get_emits() not in emits):
raise Exception("This shoudn't happen -> since it is the actual subworkflow")
to_replace.append((exe.get_code(get_OG = True), f"{exe.get_gives()[0].get_code()} = {emited.get_emits().get_origins()[0].get_code()}"))
for r in to_replace:
old, new = r
#Case of channel == channel
if(new.split("=")[0].strip()==new.split("=")[1].strip()):
new = ''
code = code.replace(old, new)
return code
def rewrite_and_initialise(self, code):
#Write new code in temporary file
temp_file = self.get_output_dir()/f"temp_{str(self)[-7:-2]}.nf"
with open(temp_file, "w") as file:
file.write(code)
#Replace old analysis with new analysis (simplified code)
self.__init__(str(temp_file), display_info = False, duplicate=True)
self.initialise(create_rocrate=False)
#Conert workflow to user_view only makes sense when the option duplicate is activated -> otherwise is doesn't make sense + it makes the analysis way more complicated
def convert_workflow_2_user_view(self, relevant_processes = []):
if(self.duplicate):
#Write new code in temporary file
code = self.simplify_workflow_code()
temp_file = self.get_output_dir()/f"temp_{str(self)[-7:-2]}.nf"
with open(temp_file, "w") as file:
file.write(code)
#Replace old analysis with new analysis (simplified code)
self.__init__(str(temp_file), display_info = False, duplicate=True)
self.initialise(create_rocrate=False)
self.rewrite_and_initialise(code)
#Get the clusters and the code
self.nextflow_file.generate_user_view(relevant_processes = relevant_processes, processes_2_remove = [])
......@@ -927,51 +941,18 @@ George Marchment, Bryan Brancotte, Marie Schmit, Frédéric Lemoine, Sarah Cohen
return broken_subworkflows
broken_subworkflows = get_workflows_broken(subworkflow_2_executors, set_clusters_with_calls)
print(broken_subworkflows)
#Rewrite broken subworkflows
for sub in broken_subworkflows:
code = self.rewrite_subworkflow_call(code, sub)
#TODO -> this needs to be optimised
self.rewrite_and_initialise(code)
#Get the clusters and the code
self.nextflow_file.generate_user_view(relevant_processes = relevant_processes, processes_2_remove = [])
clusters = self.nextflow_file.graph.get_clusters_from_user_view()
1/0
#def check_subworkflow_intact(original_sets, new_sets):
# results = {}
# for sub in original_sets:
# original = original_sets[sub]
# #temp = []
# #for new in new_sets:
# # print(original, new, new.issubset(original))
# # #temp.append(original.issubset(new))
# #print(temp)
# intact = all(new.issubset(original) for new in new_sets)
# results[sub] = intact
# return results
#print(check_subworkflow_intact(subworkflow_2_executors, set_clusters_with_calls))
1/0
#cluster_2_subworkflows = []
#print(clusters)
##Basically here i'm checking if a subworkflow is split
##TODO Check this -> i think in the case something is at the root -> it won't work
#for sub in self.get_subworkflows_called():
# clusters_in_which_elements_are_taken = []
# #TODO do this
# print(sub.get_name(), sub.get_executors())
# for exe in sub.get_executors():
# print(exe, exe.get_subworkflow_origin())
# #If there are multiple clusters and the number of clusters isn't equal to all the clusters -> it means that subworkflow is broken
# if(len(clusters_in_which_elements_are_taken)>1 and len(clusters_in_which_elements_are_taken)!=len(clusters)):
# print(sub.get_name())
#print(clusters)
#for c in clusters:
# for ele in c:
# print(ele.get_type())
#TODO -> need to break clusters here
#And redo analysis
#Get the clsuters with the corresponding operations inside
#for i in range(len(clusters)):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment