From 74312c196a41ccca1c3c46082b88e3cc6023b1b3 Mon Sep 17 00:00:00 2001
From: lcalmettes <leo.calmettes@etu.ec-lyon.fr>
Date: Mon, 12 May 2025 16:42:26 +0200
Subject: [PATCH] =?UTF-8?q?=09modifi=C3=A9=C2=A0:=20=20=20=20=20=20=20=20?=
 =?UTF-8?q?=20config/config.py=20=09modifi=C3=A9=C2=A0:=20=20=20=20=20=20?=
 =?UTF-8?q?=20=20=20dataset/dataset.py?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 config/config.py   | 2 +-
 dataset/dataset.py | 6 +++---
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/config/config.py b/config/config.py
index 068b2c5..63b5999 100644
--- a/config/config.py
+++ b/config/config.py
@@ -5,7 +5,7 @@ def load_args():
     parser = argparse.ArgumentParser()
     parser.add_argument('--epoches', type=int, default=20)
     parser.add_argument('--eval_inter', type=int, default=1)
-    parser.add_argument('--augment_args', nargs = '+', type = float, default = [0,0,0,0.1,0.1,0.1,0.1])
+    parser.add_argument('--augment_args', nargs = '+', type = float, default = [0,0,0.5,0.1,0.1,0.,7.5])
     parser.add_argument('--noise_threshold', type=int, default=0)
     parser.add_argument('--lr', type=float, default=0.001)
     parser.add_argument('--optim', type = str, default = "Adam")
diff --git a/dataset/dataset.py b/dataset/dataset.py
index 24dd482..7d881e2 100644
--- a/dataset/dataset.py
+++ b/dataset/dataset.py
@@ -221,7 +221,7 @@ def load_data_duo(base_dir, batch_size, args, shuffle=True):
     train_transform = transforms.Compose(
         [#Random_erasing(args.augment_args[0], args.augment_args[3]),
          #Random_int_noise(args.augment_args[1], args.augment_args[4]),
-         #Random_shift_rt(args.augment_args[2], args.augment_args[5], args.augment_args[6]),
+         Random_shift_rt(args.augment_args[2], args.augment_args[5], args.augment_args[6]),
          transforms.Resize((224, 224)),
          Threshold_noise(args.noise_threshold),
          Log_normalisation(),
@@ -254,7 +254,7 @@ def load_data_duo(base_dir, batch_size, args, shuffle=True):
         shuffle=shuffle,
         num_workers=8,
         collate_fn=None,
-        pin_memory=True,
+        pin_memory=False,
     )
     data_loader_test = data.DataLoader(
         dataset=val_dataset,
@@ -262,7 +262,7 @@ def load_data_duo(base_dir, batch_size, args, shuffle=True):
         shuffle=shuffle,
         num_workers=8,
         collate_fn=None,
-        pin_memory=True,
+        pin_memory=False,
     )
 
     return data_loader_train, data_loader_test
-- 
GitLab