try lowering number of workers

This commit is contained in:
Cadene 2024-05-02 20:59:53 +00:00
parent 7cbd1a617c
commit e84db37680
1 changed files with 1 additions and 1 deletions

View File

@ -115,7 +115,7 @@ def test_compute_stats_on_xarm():
# get all frames from the dataset in the same dtype and range as during compute_stats
dataloader = torch.utils.data.DataLoader(
dataset,
num_workers=8,
num_workers=1,
batch_size=len(dataset),
shuffle=False,
)