From 7d9d2912aa8816d82f6864cbc7ff713c3a1c1e08 Mon Sep 17 00:00:00 2001
From: lschneider <leo.schneider@univ-lyon1.fr>
Date: Thu, 19 Sep 2024 15:19:39 +0200
Subject: [PATCH] test cossim

---
 model_custom.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/model_custom.py b/model_custom.py
index dc73f4c..d2888b4 100644
--- a/model_custom.py
+++ b/model_custom.py
@@ -106,6 +106,7 @@ class Model_Common_Transformer(nn.Module):
     def forward(self, seq, charge):
         meta_ohe = torch.nn.functional.one_hot(charge - 1, self.charge_max).float()
         seq_emb = torch.nn.functional.one_hot(seq, self.nb_aa).float()
+        seq_emb = torch.permute(seq_emb, (0, 2, 1))
         print(seq_emb.size())
         emb = self.pos_embedding(self.emb(seq_emb))
         meta_enc = self.meta_enc(meta_ohe)
@@ -130,6 +131,7 @@ class Model_Common_Transformer(nn.Module):
     def forward_int(self, seq, charge):
         meta_ohe = torch.nn.functional.one_hot(charge - 1, self.charge_max).float()
         seq_emb = torch.nn.functional.one_hot(seq, self.nb_aa).float()
+        seq_emb = torch.permute(seq_emb, (0, 2, 1))
         emb = self.pos_embedding(self.emb(seq_emb))
         meta_enc = self.meta_enc(meta_ohe)
         enc = self.encoder(emb)
-- 
GitLab