Skip to content
Snippets Groups Projects
Commit ad33c605 authored by Léo Schneider's avatar Léo Schneider
Browse files

test cossim

parent b3632673
No related branches found
No related tags found
No related merge requests found
......@@ -25,7 +25,6 @@ class PositionalEncoding(nn.Module):
pe = torch.zeros(max_len, 1, d_model)
pe[:, 0, 0::2] = torch.sin(position * div_term)
pe[:, 0, 1::2] = torch.cos(position * div_term)
print(d_model, max_len)
self.register_buffer('pe', pe)
def forward(self, x):
......@@ -106,7 +105,7 @@ class Model_Common_Transformer(nn.Module):
def forward(self, seq, charge):
meta_ohe = torch.nn.functional.one_hot(charge - 1, self.charge_max).float()
seq_emb = torch.nn.functional.one_hot(seq, self.nb_aa).float()
print(self.emb(seq_emb).size())
print(seq_emb.size())
emb = self.pos_embedding(self.emb(seq_emb))
meta_enc = self.meta_enc(meta_ohe)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment