Use sampler always (temp fix)

This commit is contained in:
Pepijn 2025-03-11 12:23:51 +01:00
parent e3c3c165aa
commit 841d54c050
1 changed files with 16 additions and 8 deletions

View File

@ -25,7 +25,7 @@ from torch.amp import GradScaler
from torch.optim import Optimizer from torch.optim import Optimizer
from lerobot.common.datasets.factory import make_dataset from lerobot.common.datasets.factory import make_dataset
from lerobot.common.datasets.sampler import PrioritizedSampler from lerobot.common.datasets.sampler import EpisodeAwareSampler, PrioritizedSampler
from lerobot.common.datasets.utils import cycle from lerobot.common.datasets.utils import cycle
from lerobot.common.envs.factory import make_env from lerobot.common.envs.factory import make_env
from lerobot.common.optim.factory import make_optimizer_and_scheduler from lerobot.common.optim.factory import make_optimizer_and_scheduler
@ -166,18 +166,26 @@ def train(cfg: TrainPipelineConfig):
# create dataloader for offline training # create dataloader for offline training
if hasattr(cfg.policy, "drop_n_last_frames"): if hasattr(cfg.policy, "drop_n_last_frames"):
shuffle = False shuffle = False
sampler = PrioritizedSampler( sampler = EpisodeAwareSampler(
data_len=data_len, dataset.episode_data_index,
alpha=0.6, drop_n_last_frames=cfg.policy.drop_n_last_frames,
beta=0.1, shuffle=True,
eps=1e-6,
replacement=True,
num_samples_per_epoch=data_len,
) )
else: else:
shuffle = True shuffle = True
sampler = None sampler = None
# TODO(pepijn): If experiment works integrate this
shuffle = False
sampler = PrioritizedSampler(
data_len=data_len,
alpha=0.6,
beta=0.1,
eps=1e-6,
replacement=True,
num_samples_per_epoch=data_len,
)
dataloader = torch.utils.data.DataLoader( dataloader = torch.utils.data.DataLoader(
dataset, dataset,
num_workers=cfg.num_workers, num_workers=cfg.num_workers,