Add num_workers and batch_size to default.yaml

This commit is contained in:
Remi Cadene
2024-05-29 11:09:16 +00:00
parent 8c1dd0e263
commit 63e61385fc
2 changed files with 3 additions and 1 deletions

View File

@@ -26,6 +26,8 @@ training:
save_freq: ???
log_freq: 250
save_model: true
num_workers: 4
batch_size: ???
eval:
n_episodes: 1

View File

@@ -386,7 +386,7 @@ def train(cfg: DictConfig, out_dir: str | None = None, job_name: str | None = No
# create dataloader for offline training
dataloader = torch.utils.data.DataLoader(
offline_dataset,
num_workers=4,
num_workers=cfg.training.num_workers,
batch_size=cfg.training.batch_size,
shuffle=True,
pin_memory=cfg.device != "cpu",