diff --git a/model_custom.py b/model_custom.py index 32fe630af88ce223a5e59d7a8339deb1384529c5..a7947f5080ba6b2f2a26f12fab38ccb1bda9427d 100644 --- a/model_custom.py +++ b/model_custom.py @@ -25,7 +25,6 @@ class PositionalEncoding(nn.Module): pe = torch.zeros(max_len, 1, d_model) pe[:, 0, 0::2] = torch.sin(position * div_term) pe[:, 0, 1::2] = torch.cos(position * div_term) - print(d_model, max_len) self.register_buffer('pe', pe) def forward(self, x): @@ -106,7 +105,7 @@ class Model_Common_Transformer(nn.Module): def forward(self, seq, charge): meta_ohe = torch.nn.functional.one_hot(charge - 1, self.charge_max).float() seq_emb = torch.nn.functional.one_hot(seq, self.nb_aa).float() - print(self.emb(seq_emb).size()) + print(seq_emb.size()) emb = self.pos_embedding(self.emb(seq_emb)) meta_enc = self.meta_enc(meta_ohe)