set num workers to zero in compute stats, temporary fix to hanging code when iterating over dataloader
This commit is contained in:
parent
acc433d25d
commit
6cc2cc896a
|
@ -160,9 +160,8 @@ def to_lerobotdataset_with_save_episode(raw_dir: Path, repo_id: str, push_to_hub
|
|||
|
||||
for key in other_keys:
|
||||
if "language_instruction" in key:
|
||||
continue
|
||||
# Some openx dataset have multiple language commands
|
||||
# episode_data[key] = episode[key].numpy()[0].decode("utf-8")
|
||||
episode_data[key] = episode[key].numpy()[0].decode("utf-8")
|
||||
else:
|
||||
if key == "is_last":
|
||||
episode_data["next.done"] = tf_to_torch(episode[key])
|
||||
|
|
|
@ -26,7 +26,6 @@ def get_stats_einops_patterns(dataset, num_workers=0):
|
|||
|
||||
Note: We assume the images are in channel first format
|
||||
"""
|
||||
|
||||
dataloader = torch.utils.data.DataLoader(
|
||||
dataset,
|
||||
num_workers=num_workers,
|
||||
|
|
|
@ -895,7 +895,7 @@ class LeRobotDataset(torch.utils.data.Dataset):
|
|||
if run_compute_stats:
|
||||
self.stop_image_writer()
|
||||
# TODO(aliberts): refactor stats in save_episodes
|
||||
self.meta.stats = compute_stats(self)
|
||||
self.meta.stats = compute_stats(self, num_workers=0)
|
||||
serialized_stats = serialize_dict(self.meta.stats)
|
||||
write_json(serialized_stats, self.root / STATS_PATH)
|
||||
self.consolidated = True
|
||||
|
|
Loading…
Reference in New Issue