backup wip

This commit is contained in:
Alexander Soare 2024-06-06 08:44:59 +01:00
parent 09b83d6584
commit 53b36dcaab
2 changed files with 1 additions and 5 deletions

View File

@ -47,7 +47,6 @@ training:
# faster data loading with datasets small enough to fit in memory. If you wish to use dataloader workers,
# remember to set `dataloader_persistent_workers to True.
dataset_use_cache: false
dataloader_num_workers: 4
dataloader_persistent_workers: false
eval:

View File

@ -353,9 +353,6 @@ def train(cfg: DictConfig, out_dir: str | None = None, job_name: str | None = No
# log metrics to terminal and wandb
logger = Logger(cfg, out_dir, wandb_job_name=job_name)
if cfg.training.online_steps > 0:
raise NotImplementedError("Online training is not implemented yet.")
set_global_seed(cfg.seed)
# Check device is available
@ -451,7 +448,7 @@ def train(cfg: DictConfig, out_dir: str | None = None, job_name: str | None = No
shuffle=True,
)
else:
shuffle = True
shuffle = False # TODO(now)
sampler = None
dataloader = torch.utils.data.DataLoader(
offline_dataset,