Skip to content
Snippets Groups Projects
Commit 000bebff authored by George Marchment's avatar George Marchment
Browse files

Got a one to one mapping between the output of the view user + rewrite -> the...

Got a one to one mapping between the output of the view user + rewrite -> the only expection is that sometimes the userview graph may have extra edges cause it's possible that i group together 2 operations (which isn't shown in the process dependency graph)
parent 91379d23
No related branches found
No related tags found
No related merge requests found
Pipeline #14680 failed with stage
in 4 minutes and 38 seconds
......@@ -430,7 +430,7 @@ class Graph():
#GENERATE USER VIEW
#============================
def get_user_view_graph(self, relevant_processes = [], use_process_dependency_graph = False):
def get_user_view_graph(self, relevant_processes = [], use_process_dependency_graph = False, alias_2_tools = {}):
#For now i'm only gonna work from the flattened dico
if(use_process_dependency_graph):
self.initialise_flattened_dico(self.dico_process_dependency_graph)
......@@ -438,7 +438,7 @@ class Graph():
self.initialise_flattened_dico(self.full_dico)
dico = remove_artificial_nodes(self.dico_flattened)
self.user_view, self.new_nodes_user_view = relev_user_view_builder(dico, relevant_modules=relevant_processes)
self.user_view, self.new_nodes_user_view = relev_user_view_builder(dico, relevant_modules=relevant_processes, alias_2_tools = alias_2_tools)
with open(self.get_output_dir()/ "graphs/user_view.json", 'w') as output_file :
json.dump(self.user_view, output_file, indent=4)
......@@ -451,9 +451,9 @@ class Graph():
#return self.user_view, user_view_with_subworkflows
return self.user_view
def generate_user_view(self, relevant_processes = [], render_graphs = True, use_process_dependency_graph = False):
def generate_user_view(self, relevant_processes = [], render_graphs = True, use_process_dependency_graph = False, alias_2_tools = {}):
#user_view, user_view_with_subworkflows = self.get_user_view_graph(relevant_processes = relevant_processes)
user_view = self.get_user_view_graph(relevant_processes = relevant_processes, use_process_dependency_graph = use_process_dependency_graph)
user_view = self.get_user_view_graph(relevant_processes = relevant_processes, use_process_dependency_graph = use_process_dependency_graph, alias_2_tools = alias_2_tools)
#self.user_view_with_subworkflows = user_view_with_subworkflows
generate_graph(self.get_output_dir()/'graphs'/"user_view", user_view, label_edge=True, label_node=True, render_graphs = render_graphs, root = False, relevant_nodes = copy.deepcopy(relevant_processes))
#generate_graph(self.get_output_dir()/'graphs'/"user_view_with_subworkflows", user_view_with_subworkflows, label_edge=True, label_node=True, render_graphs = render_graphs, root = False, relevant_nodes = copy.deepcopy(relevant_processes))
......
import graphviz
import copy
import numpy as np
import os
import json
process_id = "src.process.Process"
operation_id = "<src.operation.Operation"
......@@ -808,18 +810,53 @@ def get_names_tab(dico, tab):
final.append(names)
return final
def get_name_new_node(new_nodes, relevant_modules):
def get_name_new_node(new_nodes, relevant_modules, tag, alias_2_tools):
for r in relevant_modules:
for new in new_nodes:
if(r in new):
return r
#Arbitrary choice of choosing the name with the longest name
longest_name = new_nodes[0][0]
for name in new_nodes:
if(len(longest_name)<len(name[0])):
longest_name = name[0]
return longest_name
if(alias_2_tools=={}):
#Arbitrary choice of choosing the name with the longest name
longest_name = new_nodes[0][0]
for name in new_nodes:
if(len(longest_name)<len(name[0])):
longest_name = name[0]
return longest_name
else:
#We choose the process which has the "rarest" tool process
names = []
for name in new_nodes:
process_name = name[0].split(tag)[0]
if(process_name!=""):
names.append(process_name)
if(names==[]):
return new_nodes[0][0]
else:
OG_path = os.getcwd()
#Change working directory to the one of the file
os.chdir("/".join((str(__file__).split("/")[:-1])))
with open("../ressources/tool_2_nb_usage.json", 'r') as file:
tool_2_nb_usage = json.load(file)
os.chdir(OG_path)
min_tool, min_process = np.inf, names[0]
for alias in names:
try:
tools = alias_2_tools[alias]
except:
tools = [""]
for t in tools:
try:
val = tool_2_nb_usage[t]
if(t in ['python', 'r', 'perl', 'julia']):#Cause in this case it is a custom script -> one should hope that it is important in this case
val = 1
except:
val = 1
if(val<min_tool):
min_tool = val
min_process = alias
return min_process
def check_same_elements(list1, list2):
return set(list1)==set(list2)
......@@ -835,7 +872,7 @@ def get_color_node(node, new_nodes):
prop = 256- int(127*len(node)/max)
return rgb_to_hex(prop, prop, prop)
def relev_user_view_builder(dico_param, relevant_modules):
def relev_user_view_builder(dico_param, relevant_modules, alias_2_tools):
import time
dico = copy.deepcopy(dico_param)
tag = str(time.time())
......@@ -963,7 +1000,7 @@ def relev_user_view_builder(dico_param, relevant_modules):
new_dico["subworkflows"] = []
for i in range(len(new_nodes)):
new_nodes[i].sort()
new_name = get_name_new_node(get_names_tab(dico, new_nodes[i]), relevant_modules)
new_name = get_name_new_node(get_names_tab(dico, new_nodes[i]), relevant_modules, tag, alias_2_tools)
name_printed = new_name.split(tag)[0]
shape = "ellipse"
if(name_printed==""):
......
This diff is collapsed.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment