From 13d9d2cb5d75a04fad83ea827b7772fa03344c7e Mon Sep 17 00:00:00 2001
From: Schneider Leo <leo.schneider@etu.ec-lyon.fr>
Date: Mon, 21 Oct 2024 09:06:51 +0200
Subject: [PATCH] datasets

---
 main_custom.py  | 2 --
 model_custom.py | 2 ++
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/main_custom.py b/main_custom.py
index 8ddbdea..7c7421b 100644
--- a/main_custom.py
+++ b/main_custom.py
@@ -26,7 +26,6 @@ def train(model, data_train, epoch, optimizer, criterion_rt, criterion_intensity
         param.requires_grad = True
     if forward == 'both':
         for seq, charge, rt, intensity in data_train:
-            print(seq, charge, rt, intensity )
             rt, intensity = rt.float(), intensity.float()
             if torch.cuda.is_available():
                 seq, charge, rt, intensity = seq.cuda(), charge.cuda(), rt.cuda(), intensity.cuda()
@@ -288,7 +287,6 @@ def get_n_params(model):
 
         for s in list(p.size()):
             nn = nn * s
-        print(n, nn)
         pp += nn
     return pp
 
diff --git a/model_custom.py b/model_custom.py
index eddc158..6745da3 100644
--- a/model_custom.py
+++ b/model_custom.py
@@ -102,6 +102,8 @@ class Model_Common_Transformer(nn.Module):
     def forward(self, seq, charge):
         meta_ohe = torch.nn.functional.one_hot(charge - 1, self.charge_max).float()
         seq_emb = torch.nn.functional.one_hot(seq, self.nb_aa).float()
+        print(seq_emb.shape())
+        print(self.nb_aa, self.embedding_dim)
         emb = self.pos_embedding(self.emb(seq_emb))
         meta_enc = self.meta_enc(meta_ohe)
 
-- 
GitLab