Skip to content
Snippets Groups Projects
Commit efe36191 authored by Alexandre Chapin's avatar Alexandre Chapin :race_car:
Browse files

Deactivate wandb

parent 96ec6825
No related branches found
No related tags found
No related merge requests found
...@@ -60,7 +60,6 @@ def main(): ...@@ -60,7 +60,6 @@ def main():
#### Create model #### Create model
model = LitSlotAttentionAutoEncoder(resolution, num_slots, num_iterations, cfg=cfg) model = LitSlotAttentionAutoEncoder(resolution, num_slots, num_iterations, cfg=cfg)
wandb_logger = WandbLogger(project="slot-attention")
checkpoint_callback = ModelCheckpoint( checkpoint_callback = ModelCheckpoint(
save_top_k=10, save_top_k=10,
...@@ -71,7 +70,7 @@ def main(): ...@@ -71,7 +70,7 @@ def main():
) )
trainer = pl.Trainer(accelerator="gpu", devices=num_gpus, profiler="simple", trainer = pl.Trainer(accelerator="gpu", devices=num_gpus, profiler="simple",
default_root_dir="./logs", logger=wandb_logger, default_root_dir="./logs", logger=WandbLogger(project="slot-attention") if args.wandb else None,
strategy="ddp" if num_gpus > 1 else "default", callbacks=[checkpoint_callback], deterministic=True, strategy="ddp" if num_gpus > 1 else "default", callbacks=[checkpoint_callback], deterministic=True,
log_every_n_steps=100, max_steps=num_train_steps) log_every_n_steps=100, max_steps=num_train_steps)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment