Use sampler always (temp fix)

This commit is contained in:
Pepijn 2025-03-11 12:23:51 +01:00
parent e3c3c165aa
commit 841d54c050
1 changed files with 16 additions and 8 deletions

View File

@ -25,7 +25,7 @@ from torch.amp import GradScaler
from torch.optim import Optimizer from torch.optim import Optimizer
from lerobot.common.datasets.factory import make_dataset from lerobot.common.datasets.factory import make_dataset
from lerobot.common.datasets.sampler import PrioritizedSampler from lerobot.common.datasets.sampler import EpisodeAwareSampler, PrioritizedSampler
from lerobot.common.datasets.utils import cycle from lerobot.common.datasets.utils import cycle
from lerobot.common.envs.factory import make_env from lerobot.common.envs.factory import make_env
from lerobot.common.optim.factory import make_optimizer_and_scheduler from lerobot.common.optim.factory import make_optimizer_and_scheduler
@ -165,6 +165,17 @@ def train(cfg: TrainPipelineConfig):
# create dataloader for offline training # create dataloader for offline training
if hasattr(cfg.policy, "drop_n_last_frames"): if hasattr(cfg.policy, "drop_n_last_frames"):
shuffle = False
sampler = EpisodeAwareSampler(
dataset.episode_data_index,
drop_n_last_frames=cfg.policy.drop_n_last_frames,
shuffle=True,
)
else:
shuffle = True
sampler = None
# TODO(pepijn): If experiment works integrate this
shuffle = False shuffle = False
sampler = PrioritizedSampler( sampler = PrioritizedSampler(
data_len=data_len, data_len=data_len,
@ -174,9 +185,6 @@ def train(cfg: TrainPipelineConfig):
replacement=True, replacement=True,
num_samples_per_epoch=data_len, num_samples_per_epoch=data_len,
) )
else:
shuffle = True
sampler = None
dataloader = torch.utils.data.DataLoader( dataloader = torch.utils.data.DataLoader(
dataset, dataset,