diff --git a/model_custom.py b/model_custom.py
index 96cfc64035ba8c8c40d79b94b81246c05cc54005..4e245e2e28abe67869477f075dda4c7e4da269c2 100644
--- a/model_custom.py
+++ b/model_custom.py
@@ -101,6 +101,7 @@ class Model_Common_Transformer(nn.Module):
                                                 d_model=self.embedding_dim)
 
     def forward(self, seq, charge):
+        print(seq.shape)
         meta_ohe = torch.nn.functional.one_hot(charge - 1, self.charge_max).float()
         seq_emb = torch.nn.functional.one_hot(seq, self.nb_aa).float()
         emb = self.pos_embedding(self.emb(seq_emb))