Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • hseba/deepdense
  • wmegherbi/deepdense
2 results
Show changes
Commits on Source (4)
Showing with 971 additions and 0 deletions
# Default ignored files
/shelf/
/workspace.xml
ANN_CLIQUES.py
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Encoding">
<file url="file://$PROJECT_DIR$/node2vec/src/graph/test.npy" charset="windows-1252" />
<file url="file://$PROJECT_DIR$/node2vec/src/graph/test_Cliques.npy" charset="UTF-16" />
</component>
</project>
\ No newline at end of file
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7 (pythonProject2)" project-jdk-type="Python SDK" />
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/walid.iml" filepath="$PROJECT_DIR$/.idea/walid.iml" />
</modules>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 3.7 (pythonProject2)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>
\ No newline at end of file
import random
import numpy as np,os
import numba as nb
import time
import networkx as nx
def delta(A,B,U,m):
for i in nb.prange(len(U)):
z = np.zeros(len(U))
x = np.zeros(len(U))
c = np.asarray([i+1], dtype=np.str)
if( np.count_nonzero(V == 1) > 0):
if c[0] in O.nodes():
for j in range(len(U)):
m = np.asarray([i+1 , j+1], dtype=np.str)
if V[j] != 0 and (m[1] in O.nodes() and O.number_of_edges(m[0], m[1]) != 0):
x[i] = x[i]
else:
x[i] = x[i] + V[j]
U[i] = U[i] + (-x[i] + B * h(i,x[i]))
def h(i,r):
if r + V[i] == 0:
return 1
else:
return 0
def output(X):
for i in range(len(X)):
if (X[i] > 0):
V[i] = 1
else:
V[i] = 0
def CHANGE(A):
N = []
E = A
R = O.edges
x = list(O.nodes())
for i in range(len(U)):
if U[i] > 0:
print("true")
N.append(i+1)
if len(N) > 0:
for k in x:
for v in x:
if v in x and k in x and O.number_of_edges(k, v) > 0:
O.remove_edge(k, v)
A = O.edges
print("new len A",len(A))
return A
def Remplire(i):
x = lab[i]
for i in range(len(U)):
if U[i] >= 0 and x[i] > 0:
Ufin[i] = U[i]
def Arrange(x,i):
t=0
y=lab[i]
for i in range(len(x)):
if y[i] == 1:
x[i] = B
else:
x[i] = random.uniform(-400.5,-0.5)
lab = np.load("data/clique_2/labels.npy",allow_pickle=True)
dat = np.load("data/clique_2/sam.npy",allow_pickle=True)
start = time.time()
outputs = []
for i in range(len(lab)):
print(dat[i])
O = nx.Graph()
O.add_edges_from(dat[i], nodetype=int)
m = np.array(O.nodes)
size = O.number_of_nodes()
print("====== Increasing embedding step =======")
adj = np.count_nonzero(lab[i] == 1)
size = len(lab[i])
Ufin = np.random.uniform(-19,-1,size)*0
x = 1
U = np.random.uniform(-19,-1,size)
V = np.random.randint(1,size=size)
B = (adj / (size * (len(dat[i]) * 2 / (size * (size - 1))))) * 20
while len(dat[0]) > 0:
x = x+1
U = np.random.uniform(-19,-1,size)
delta(dat,B,U,m)
output(U)
dat[i] = CHANGE(dat[i])
Remplire(i)
O = nx.Graph()
O.add_edges_from(dat[i])
m = np.array(O.nodes)
out = np.asarray(Ufin)
Arrange(Ufin,i)
output(Ufin)
outputs.append(out)
print("les resultats")
print(np.count_nonzero(Ufin > 0))
print(np.count_nonzero(V == 1))
print(np.count_nonzero(lab[i] == 1))
end = time.time()
print("====== End of increasing ======")
print("Time", end-start)
out = np.asarray(outputs)
print(out.shape)
np.save(os.path.join("INoutput_data.npy"), out) # generation des outputs"""
import random
import numpy as np,os
import numba as nb
import time
import networkx as nx
def delta(A,B,U,m,V,O):
for i in nb.prange(len(U)):
z = np.zeros(len(U))
x = np.zeros(len(U))
c = np.asarray([i+1], dtype=np.str)
if( np.count_nonzero(V == 1) > 0):
if c[0] in O.nodes():
for j in range(len(U)):
m = np.asarray([i+1 , j+1], dtype=np.str)
if V[j] != 0 and (m[1] in O.nodes() and O.number_of_edges(m[0], m[1]) != 0):
x[i] = x[i]
else:
x[i] = x[i] + V[j]
U[i] = U[i] + (-x[i] + B * h(i,x[i],V))
def h(i,r,V):
if r + V[i] == 0:
return 1
else:
return 0
def output(X,V):
for i in range(len(X)):
if (X[i] > 0):
V[i] = 1
else:
V[i] = 0
def CHANGE(A,O,U):
N = []
E = A
R = O.edges
x = list(O.nodes())
for i in range(len(U)):
if U[i] > 0:
N.append(i+1)
if len(N) > 0:
for k in x:
for v in x:
if v in x and k in x and O.number_of_edges(k,v) > 0:
O.remove_edge(k, v)
A = O.edges
return A
def Remplire(U,Ufin,lab):
for i in range(len(U)):
if U[i] >= 0 and lab[i] > 0:
Ufin[i] = U[i]
"""
else:
if lab[i] == 0:
Ufin[i] = random.uniform(-400.5, -0.5)
else:
Ufin[i] = random.uniform(0.5, 400.5)
"""
def Arrange(lab, x, B, V):
t=0
y=0
for i in range(len(x)):
if lab[i] == 1:
x[i] = B
V[i] = 1
else:
x[i] = -B
V[i] = 0
def PatternFinding(dat,lab):
O = nx.Graph(dat)
m = np.array(O.nodes)
size = O.number_of_nodes()
print("====== Increasing embedding step =======")
adj = np.count_nonzero(lab == 1)
size = len(lab)
for i in range(1):
Ufin = np.random.uniform(-1, 0, size) * 0
#print("ufin",Ufin)
#print(len(dat) * 2 / ((size-1) * (size - 1)))
x = 1
U = np.random.uniform(-1, 0, size)
V = np.random.randint(1, size=size)
B = (adj / (size * (len(list(O.edges)) * 2 / (size * (size - 1)))))
#print("B",B)
Arrange(lab,Ufin,B,V)
#print(np.count_nonzero(V == 1))
#print(np.count_nonzero(lab == 1))
"""
while len(dat) > 0:
x = x + 1
U = np.random.uniform(-19, -1, size)
delta(dat, B, U, m, V, O)
output(U, V)
# print(np.count_nonzero(U >= 0))
# print(np.count_nonzero(lab == 1))
dat = CHANGE(dat, O, U)
print("hna")
Remplire(U,Ufin,lab)
# print("size",np.count_nonzero(Ufin >= 0),np.count_nonzero(U >= 0))
# print(len(dat))
O = nx.Graph(dat)
#O.add_edges_from(dat)
m = np.array(O.nodes)
out = np.asarray(Ufin)
Arrange(lab, Ufin, B)
output(Ufin, V)
outputs.append(out)
print(np.count_nonzero(Ufin > 0))
print(np.count_nonzero(V == 1))
print(np.count_nonzero(lab == 1))1
"""
#end = time.time()
#print("====== End of increasing ======")
#print("Time", end - start)
out = np.asarray(Ufin)
#out = np.asarray(outputs)
# print(outputs)
# print(lab)
np.save(os.path.join("INoutput_data_val.npy"), out) # generation des outputs"""
#lab = np.load("node2vec/src/graph/labfin.npy",allow_pickle=True)
#dat = np.load("node2vec/src/graph/sam.npy",allow_pickle=True)
#print(lab)
#print(type(dat))
#PatternFinding(dat,lab)
\ No newline at end of file
import random,numpy as np,os
def Voisin(x,k):
if len(k) > 0:
for i in k:
if set(x).issubset(set(i)):
return False
return True
return True
V = np.load("node2vec/src/graph/test_Bip.npy", allow_pickle=True)
V = list(V)
k = []
T = []
fo = open("example1.model", "w")
stri = "bc "
compteur = 0
for i in range(len(V)):
print(V[i+compteur])
x = V[i+compteur]
k = x[:2]
if Voisin(x,T):
for j in range(V.index(x)+1 ,len(V)):
y = V[j]
compteur = compteur + 1
if x[2:] == y[2:] and x[0] == y[0]:
k.append(y[1])
else:
break
if len(k) > 1:
strt = ' '.join(map(str, k))
stry = ' '.join(map(str, x[2:]))
fo.write(stri + strt + ',' + stry + "\n")
import networkx as nx
def ensemble(v,K):
for j in range(len(K)):
print(type(v),type(K[j]),K[j])
if B.number_of_edges(v, K[j]) == 1:
return False
return True
fh = open("C:/Users/LENOVO/Desktop/karate.edgelist", "rb")
B = nx.read_edgelist(fh,nodetype=int)# Add edges only between nodes of opposite node sets
nx.draw(B,with_labels=True)
K = list(B.nodes)
V = []
V.append([K[0]])
print(K)
print(V,V[0])
for i in range(len(K)):
print(K[i],V)
add = False
for j in range(len(V)):
if ensemble(K[i],V[j]) == True:
V[j].append(K[i])
add = True
if add == False:
V.append([K[i]])
import numpy as np,os
import time
nodes = []
Embedd2 = np.load("INoutput_data_val.npy",allow_pickle=True)
Embedd = np.load("data_val/transformed_0.npy",allow_pickle=True)
print(Embedd.shape)
print(Embedd2.shape)
"""
for i in range(len(Embedd)):
liste = []
#print(Embedd)
for j in range(len(Embedd2[i])):
nx = np.append(Embedd[i][j],Embedd2[i][j])
liste.append(nx)
nodes.append(liste)
print(i)
node = np.asarray(nodes)
print(node.shape)
np.save(("data_val.npy"),node) #generation des outputs
"""
liste = []
start = time.time()
for i in range(len(Embedd)):
nx = np.append(Embedd[i],Embedd2[0][i])
liste.append(nx)
#print(nx)
#print(i)
node = np.asarray(liste)
end = time.time()
print(node.shape)
print("Time",end - start)
#print(node)
np.save(("data_tr.npy"),node) #generation des outputs"""
\ No newline at end of file
File added
from networkx.generators import community
from networkx.generators import random_graphs
from networkx.algorithms import clique
import networkx as nx
import random,numpy as np,os
import copy
import matplotlib.pyplot as plt
def generate_clique(nb,size,total_size):
sub=community.caveman_graph(nb,size)#Generation de de graphes en forme de cliques
G=random_graphs.fast_gnp_random_graph(total_size,0.1)#Generation de graphes aleatoires avec 0.1% de liens
G=nx.compose(G,sub) #fusion des deux graphes, obtention d'un graphe aleatoire avec nb cliques
node_mapping = dict(zip(G.nodes(), sorted(G.nodes(), key=lambda k: random.random())))#creation du mapping
G_new = nx.relabel_nodes(G, node_mapping)#application du mapping
cliques=list(clique.find_cliques(G_new))
cliques=np.asarray(([y for x in cliques for y in x if len(x)>=4]))
nodes_cliques = np.unique(cliques)
x = len(nodes_cliques)
#print("nodes_cliques",x)
output=np.zeros(total_size)
output[nodes_cliques]=1
return G_new,output,x,nodes_cliques,size,nb
def generate_without_clique(total_size):#generation de graphes aleatoires sans cliques
while True:
G=random_graphs.fast_gnp_random_graph(total_size,0.04)
cliques=list(clique.find_cliques(G))
cliques=[x for x in cliques if len(x)>=6]
if len(cliques)==0:
break
return G, np.zeros(total_size)
def to_input_shape(G):# remplissage du fichier .edgelist format noeud -> noeud-voisin
tab=[]
for a,b in G.edges():
tab.append([a,b])
return tab
BASE_PATH = "data"
DIR ="clique_1"
if(not os.path.exists(BASE_PATH)):
os.mkdir(BASE_PATH)
PATH = os.path.join(BASE_PATH,DIR)
if(not os.path.exists(PATH)):
os.mkdir(PATH)
total_size = 100
max_size_clique = 10
max_clique_count = 10
outputs = []
Gr_size = 1000
graph = []
data = []
lab = []
nodes = []
input = []
sz = []
B = [None]*total_size
x = 0
for id in range(Gr_size):
G,labels,y,z,s,ng = generate_clique(random.randint(4,max_clique_count),random.randint(4,max_size_clique),total_size)
tab = to_input_shape(G)
graph.append(tab)
A = nx.adjacency_matrix(G, nodelist=range(total_size), weight='weight')
A.setdiag(A.diagonal() * 2)
A = A.todense()
B = copy.deepcopy(A)
for i in range(len(B)):
if i not in z:
B[i] = 0
outputs.append(y)
lab.append(labels)
data.append(B)
T = nx.edges(G)
T = np.asarray(T)
E = T
for i in range(len(E)):
x = E[i,0]
c = E[i,1]
if (x not in z) and (c not in z):
w = -1
t = np.argwhere(T == (x, y))
d = np.argwhere(T == (c, x))
t = np.concatenate((t, d))
for r in range(len(t)):
for k in range(len(t)):
if (t[r, 0] == t[k, 0]) and r != k and w != t[r, 0]:
w = t[r, 0]
#print(w)
P = np.delete(T,w,axis=0)
T=P
print("id",id)
sz.append(T)
np.save(os.path.join(PATH, "size.npy"), np.asarray(sz)) ###########################
#np.save(os.path.join(PATH, "data.npy"), np.asarray(graph)) ############################
#np.save(os.path.join(PATH, "data2.npy"), np.asarray(data)) ##########################
#print("out",sz[0])
#print("out",graph[0])
#print("out",data[0])
output = np.asarray(outputs)
#np.save(os.path.join(PATH,"output.npy"),output) #generation des outputs #######################
#print("out",output[0])
labs = np.asarray(lab)
np.save(os.path.join(PATH,"labels2.npy"),labs) #generation des outputs ##########################
#print("labs",labs[0])
#print(s)
print(len(sz[0]))
#nx.draw(G,with_labels=True)
#plt.show()
\ No newline at end of file
import networkx as nx
import matplotlib.pyplot as plt
from networkx.generators import random_graphs
import random,numpy as np,os
import copy
def generate_clique(nb,size,total_size):
sub = nx.complete_bipartite_graph(nb, size)
G=random_graphs.fast_gnp_random_graph(total_size,0.01)#Generation de graphes aleatoires avec 0.1% de liens
GS=nx.compose(G,sub) #fusion des deux graphes, obtention d'un graphe aleatoire avec nb cliques
node_mapping = dict(zip(GS.nodes(), sorted(GS.nodes(), key=lambda k: random.random())))#creation du mapping
G_new = nx.relabel_nodes(GS, node_mapping)#application du mapping
A = nx.adjacency_matrix(G_new,nodelist=sorted(G.nodes()),weight='weight')
A.setdiag(A.diagonal() * 2)
A = A.todense()
for i in range(len(A)):
if (np.count_nonzero(A[i] == 1) > 4):
Bipartie.append(i)
output=np.zeros(total_size)
output[Bipartie]=1
return G_new,output,len(Bipartie),Bipartie,(size+nb),A
def to_input_shape(G):# remplissage du fichier .edgelist format noeud -> noeud-voisin
tab=[]
for a,b in G.edges():
tab.append([a,b])
return tab
BASE_PATH = "data"
DIR ="Bipartie"
if(not os.path.exists(BASE_PATH)):
os.mkdir(BASE_PATH)
PATH = os.path.join(BASE_PATH,DIR)
if(not os.path.exists(PATH)):
os.mkdir(PATH)
total_size = 100
max_size_clique = 30
max_clique_count = 30
outputs = []
Gr_size = 1
graph = []
data = []
lab = []
nodes = []
input = []
sz = []
B = [None]*total_size
x = 0
for id in range(Gr_size):
Bipartie = []
G,labels,y,z,s,A = generate_clique(random.randint(5,max_clique_count),random.randint(5,max_size_clique),total_size)
tab = to_input_shape(G)
graph.append(tab)
B = copy.deepcopy(A)
input.append(A)
for i in range(len(B)):
if i not in z:
B[i] = 0
outputs.append(y)
lab.append(labels)
data.append(B)
T = nx.edges(G)
T = np.asarray(T)
E = T
for i in range(len(E)):
x = E[i, 0]
c = E[i, 1]
if (x not in z) and (c not in z):
w = -1
t = np.argwhere(T == (x, c))
d = np.argwhere(T == (c, x))
t = np.concatenate((t, d))
for r in range(len(t)):
for k in range(len(t)):
if (t[r, 0] == t[k, 0]) and r != k and w != t[r, 0]:
w = t[r, 0]
print("w", w)
P = np.delete(T, w, axis=0)
print(len(P), E[i])
T = P
sz.append(T)
output = np.asarray(outputs)
labs = np.asarray(lab)
node = np.asarray(input)
nx.draw(G, with_labels=True)
plt.show()
np.save(os.path.join(PATH, "size.npy"), np.asarray(sz[0])) ###########################
np.save(os.path.join(PATH, "data.npy"), np.asarray(graph)) ############################
np.save(os.path.join(PATH, "data2.npy"), np.asarray(data)) ##########################
np.save(os.path.join(PATH,"output.npy"),output) #generation des outputs #######################
np.save(os.path.join(PATH,"labels2.npy"),labs) #generation des outputs ##########################
np.save(os.path.join(PATH,"nodes.npy"),node) #generation des outputs
import networkx as nx
import matplotlib.pyplot as plt
from networkx.generators import random_graphs
import random,numpy as np,os
import copy
def find_all_paths(graph, start, end, path=[]):
path = path + [start]
if start == end:
return [path]
paths = []
for node in graph[start]:
if node not in path:
newpaths = find_all_paths(graph, node, end, path)
for newpath in newpaths:
paths.append(newpath)
return paths
def Chaines():
G = random_graphs.fast_gnp_random_graph(100, 0.01)
nx.draw(G,with_labels=True)
plt.show()
chaine = nx.chain_decomposition(G,1)
y = []
for i in range(100):
print("hnaya")
for j in range(100):
if i != j:
x = find_all_paths( G, i, j)
if(len(x) > 0):
y.append(x)
tab = []
R = []
for i in range(len(y)):
if len(y[i]) > 1:
x = y[i]
for j in range(len(y[i])):
for z in range(len(y[i])):
if set(x[j]).issubset(set(x[z])) and len(x[z]) > len(x[j]):
tab.append(j)
else:
if set(x[z]).issubset(set(x[j])) and len(x[z]) < len(x[j]):
tab.append(z)
for k in range(len(x)):
if k not in tab:
R.append(x[k])
tab = []
print(R)
return G,R
def generate_clique(nb,size,total_size):
Chaine = []
G,ch = Chaines()
A = nx.adjacency_matrix(G,nodelist=sorted(G.nodes()),weight='weight')
A.setdiag(A.diagonal() * 2)
A = A.todense()
for i in range(len(ch)):
x = ch[i]
for j in range(len(x)):
if x[j] not in Chaine:
Chaine.append(x[j])
print("hay la chaine",Chaine)
output=np.zeros(total_size)
output[Chaine]=1
return G,output,len(Chaine),Chaine,(size+nb),A
def to_input_shape(G):# remplissage du fichier .edgelist format noeud -> noeud-voisin
tab=[]
for a,b in G.edges():
tab.append([a,b])
return tab
BASE_PATH = "data"
DIR ="Bipartie"
if(not os.path.exists(BASE_PATH)):
os.mkdir(BASE_PATH)
PATH = os.path.join(BASE_PATH,DIR)
if(not os.path.exists(PATH)):
os.mkdir(PATH)
total_size = 100
max_size_clique = 30
max_clique_count = 30
outputs = []
Gr_size = 1000
graph = []
data = []
lab = []
nodes = []
input = []
sz = []
B = [None]*total_size
x = 0
for id in range(1):
Bipartie = []
G,labels,y,z,s,A = generate_clique(random.randint(5,max_clique_count),random.randint(5,max_size_clique),total_size)
tab = to_input_shape(G)
graph.append(tab)
B = copy.deepcopy(A)
input.append(A)
for i in range(len(B)):
if i not in z:
B[i] = 0
outputs.append(y)
lab.append(labels)
data.append(B)
sz.append(s)
print(id)
output = np.asarray(outputs)
labs = np.asarray(lab)
node = np.asarray(input)
print("sz",sz[0])
print("graphe",graph[0])
print("matrix",data[0])
print("out",output[0])
print("labs",labs[0])
print("nodes",node[0])
"""
np.save(os.path.join(PATH, "size.npy"), np.asarray(sz)) ###########################
np.save(os.path.join(PATH, "data.npy"), np.asarray(graph)) ############################
np.save(os.path.join(PATH, "data2.npy"), np.asarray(data)) ##########################
np.save(os.path.join(PATH,"output.npy"),output) #generation des outputs #######################
np.save(os.path.join(PATH,"labels2.npy"),labs) #generation des outputs ##########################
np.save(os.path.join(PATH,"nodes.npy"),node) #generation des outputs
"""
from networkx.generators import community
from networkx.generators import random_graphs
from networkx.algorithms import clique
import networkx as nx
import random,numpy as np,os
import copy
import matplotlib.pyplot as plt
def generate_clique(nb,size,total_size):
j = 0
sub=nx.Graph()#Generation de de graphes en forme de cliques
for i in range(nb):
nx.add_star(sub,[j,j+1,j+2,j+3,j+4,j+5])
j = j + 6
#nx.draw(sub)
#plt.show()
G=random_graphs.fast_gnp_random_graph(total_size,0.000000001)#Generation de graphes aleatoires avec 0.1% de liens
G=nx.compose(G,sub) #fusion des deux graphes, obtention d'un graphe aleatoire avec nb cliques
#nx.draw(G)
#plt.show()
node_mapping = dict(zip(G.nodes(), sorted(G.nodes(), key=lambda k: random.random())))#creation du mapping
G_new = nx.relabel_nodes(G, node_mapping)#application du mapping
A = nx.adjacency_matrix(G_new, nodelist=range(total_size), weight='weight')
A.setdiag(A.diagonal() * 2)
A = A.todense()
B = copy.deepcopy(A)
output=np.zeros(total_size)
sortie=np.zeros(total_size)
k = []
for i in range(len(B)):
if (np.count_nonzero(A[i] == 1) < 5):
B[i] = 0
else:
sortie[i] = 1
k.append(i)
for j in range(len(B)):
if B[i,j] == 1:
sortie[j] = 1
k.append(j)
print("k",len(k),k)
return G_new,sortie,4,B,5,A,k
def to_input_shape(G):# remplissage du fichier .edgelist format noeud -> noeud-voisin
tab=[]
for a,b in G.edges():
tab.append([a,b])
return tab
BASE_PATH = "data"
DIR ="star"
if(not os.path.exists(BASE_PATH)):
os.mkdir(BASE_PATH)
PATH = os.path.join(BASE_PATH,DIR)
if(not os.path.exists(PATH)):
os.mkdir(PATH)
total_size = 100
max_star_clique = 20
max_star_count = 12
outputs = []
Gr_size = 100
graph = []
data = []
lab = []
nodes = []
input = []
sz = []
x = 0
for id in range(Gr_size):
G,labels,y,B,s,A,o = generate_clique(random.randint(4,max_star_count),random.randint(4,max_star_clique),total_size)
#G,labels,y,z,s = generate_clique(,4,total_size)
tab = to_input_shape(G)
graph.append(tab)
outputs.append(y)
lab.append(labels)
input.append(A)
data.append(B)
T = nx.edges(G)
T = np.asarray(T)
print("hay len ya t7a7na",len(T),T)
E = T
print("hay len ya t7a7na",len(T))
for i in range(len(E)):
x = E[i,0]
c = E[i,1]
if (x not in o) and (c not in o):
w = -1
t = np.argwhere(T == (x, c))
d = np.argwhere(T == (c, x))
t = np.concatenate((t, d))
print("madkhelch")
for r in range(len(t)):
for k in range(len(t)):
if (t[r, 0] == t[k, 0]) and r != k and w != t[r, 0]:
w = t[r, 0]
print("w",w)
P = np.delete(T,w,axis=0)
print(len(P),E[i])
T=P
print("hay len ya t7a7na",len(T))
sz.append(T)
print(T)
print(y)
print(id)
print("graphe",len(sz[0]),len(sz))
print("matrix",np.count_nonzero(data[0]==1))
np.save(os.path.join(PATH, "size.npy"), np.asarray(sz[0])) ###########################
np.save(os.path.join(PATH, "data.npy"), np.asarray(graph)) ############################
np.save(os.path.join(PATH, "data2.npy"), np.asarray(data)) ##########################
output = np.asarray(outputs)
np.save(os.path.join(PATH,"output.npy"),output) #generation des outputs #######################
print("out",output[0])
labs = np.asarray(lab)
np.save(os.path.join(PATH,"labels2.npy"),labs) #generation des outputs ##########################
print("labs",np.count_nonzero(labs[0]==1))
node = np.asarray(input)
np.save(os.path.join(PATH,"nodes.npy"),node) #generation des outputs
print("nodes",np.count_nonzero(node[0]==1))
\ No newline at end of file
# Auto detect text files and perform LF normalization
* text=auto
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/