diff --git a/model_custom.py b/model_custom.py
index 84c6c940ed4423d02c442b921c1781bed3e5b6b0..04f087072a349ce211a869ae88526a97525142d3 100644
--- a/model_custom.py
+++ b/model_custom.py
@@ -100,6 +100,8 @@ class Model_Common_Transformer(nn.Module):
                                                 d_model=self.embedding_dim)
 
     def forward(self, seq, charge):
+        print(seq.size(),charge.size())
+        print(seq, charge)
         meta_ohe = torch.nn.functional.one_hot(charge - 1, self.charge_max).float()
         seq_emb = torch.nn.functional.one_hot(seq, self.nb_aa).float()
         print(seq_emb.shape)