From 2d2e549e62e5324bd5aa1f0c181f6e66da2771b4 Mon Sep 17 00:00:00 2001
From: Schneider Leo <leo.schneider@etu.ec-lyon.fr>
Date: Mon, 21 Oct 2024 10:50:47 +0200
Subject: [PATCH] datasets

---
 main_custom.py  | 2 +-
 model_custom.py | 1 +
 2 files changed, 2 insertions(+), 1 deletion(-)

diff --git a/main_custom.py b/main_custom.py
index 1c9d5a5..7c7421b 100644
--- a/main_custom.py
+++ b/main_custom.py
@@ -223,7 +223,7 @@ def main(args):
         data_train, data_val, data_test = common_dataset.load_data(path_train=args.dataset_train,
                                                                    path_val=args.dataset_val,
                                                                    path_test=args.dataset_test,
-                                                                   batch_size=args.batch_size, length=args.seq_length, pad = True, convert=True, vocab='unmod')
+                                                                   batch_size=args.batch_size, length=args.seq_length, pad = False, convert=False, vocab='unmod')
     elif args.forward == 'rt':
         data_train, data_val, data_test = dataloader.load_data(data_sources=[args.dataset_train,args.dataset_val,args.dataset_test],
                                                                batch_size=args.batch_size, length=args.seq_length)
diff --git a/model_custom.py b/model_custom.py
index 571dd3a..84c6c94 100644
--- a/model_custom.py
+++ b/model_custom.py
@@ -104,6 +104,7 @@ class Model_Common_Transformer(nn.Module):
         seq_emb = torch.nn.functional.one_hot(seq, self.nb_aa).float()
         print(seq_emb.shape)
         print(self.nb_aa, self.embedding_dim)
+        print(self.emb(seq_emb))
         emb = self.pos_embedding(self.emb(seq_emb))
         meta_enc = self.meta_enc(meta_ohe)
 
-- 
GitLab