From 55782a1e1ae2c860253ed8dd56c578d11f9210a8 Mon Sep 17 00:00:00 2001
From: Schneider Leo <leo.schneider@etu.ec-lyon.fr>
Date: Mon, 21 Oct 2024 15:26:53 +0200
Subject: [PATCH] datasets

---
 loss.py        | 2 +-
 main_custom.py | 2 ++
 2 files changed, 3 insertions(+), 1 deletion(-)

diff --git a/loss.py b/loss.py
index 0021caf..0d82d02 100644
--- a/loss.py
+++ b/loss.py
@@ -37,7 +37,7 @@ def masked_spectral_angle(y_true, y_pred):
     true_masked = F.normalize(true_masked, p=2, dim=1)
     # print(pred_masked.sum(dim=1))
     # print((pred_masked * true_masked).sum(dim=1).shape)
-    return 1 -2 * torch.acos((pred_masked * true_masked).sum(dim=1)).mean() / np.pi
+    return 1 -2 * torch.acos((pred_masked * true_masked).sum(dim=1).mean()) / np.pi
 
 
 def masked_pearson_correlation_distance(y_true, y_pred, reduce='mean'):
diff --git a/main_custom.py b/main_custom.py
index 7048332..ff87467 100644
--- a/main_custom.py
+++ b/main_custom.py
@@ -24,6 +24,7 @@ def train(model, data_train, epoch, optimizer, criterion_rt, criterion_intensity
     for param in model.parameters():
         param.requires_grad = True
     if forward == 'both':
+        i=0
         for seq, charge, rt, intensity in data_train:
             rt, intensity = rt.float(), intensity.float()
             if torch.cuda.is_available():
@@ -41,6 +42,7 @@ def train(model, data_train, epoch, optimizer, criterion_rt, criterion_intensity
             optimizer.zero_grad()
             loss.backward()
             optimizer.step()
+            print(i,'/',len(data_train))
 
         if wandb is not None:
             wdb.log({"train rt loss": losses_rt / len(data_train), "train int loss": losses_int / len(data_train),
-- 
GitLab