Merge branch 'main' into main

This commit is contained in:
zhipeng tang 2025-04-09 11:00:07 +08:00 committed by GitHub
commit 3218b712e1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 11 additions and 11 deletions

View File

@ -36,8 +36,8 @@ repos:
- id: end-of-file-fixer - id: end-of-file-fixer
- id: trailing-whitespace - id: trailing-whitespace
- repo: https://github.com/crate-ci/typos - repo: https://github.com/adhtruong/mirrors-typos
rev: v1.30.2 rev: v1.31.1
hooks: hooks:
- id: typos - id: typos
args: [--force-exclude] args: [--force-exclude]
@ -48,7 +48,7 @@ repos:
- id: pyupgrade - id: pyupgrade
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.9.10 rev: v0.11.4
hooks: hooks:
- id: ruff - id: ruff
args: [--fix] args: [--fix]
@ -57,12 +57,12 @@ repos:
##### Security ##### ##### Security #####
- repo: https://github.com/gitleaks/gitleaks - repo: https://github.com/gitleaks/gitleaks
rev: v8.24.0 rev: v8.24.2
hooks: hooks:
- id: gitleaks - id: gitleaks
- repo: https://github.com/woodruffw/zizmor-pre-commit - repo: https://github.com/woodruffw/zizmor-pre-commit
rev: v1.4.1 rev: v1.5.2
hooks: hooks:
- id: zizmor - id: zizmor

View File

@ -119,7 +119,7 @@ print(dataset.features[camera_key]["shape"])
delta_timestamps = { delta_timestamps = {
# loads 4 images: 1 second before current frame, 500 ms before, 200 ms before, and current frame # loads 4 images: 1 second before current frame, 500 ms before, 200 ms before, and current frame
camera_key: [-1, -0.5, -0.20, 0], camera_key: [-1, -0.5, -0.20, 0],
# loads 8 state vectors: 1.5 seconds before, 1 second before, ... 200 ms, 100 ms, and current frame # loads 6 state vectors: 1.5 seconds before, 1 second before, ... 200 ms, 100 ms, and current frame
"observation.state": [-1.5, -1, -0.5, -0.20, -0.10, 0], "observation.state": [-1.5, -1, -0.5, -0.20, -0.10, 0],
# loads 64 action vectors: current frame, 1 frame in the future, 2 frames, ... 63 frames in the future # loads 64 action vectors: current frame, 1 frame in the future, 2 frames, ... 63 frames in the future
"action": [t / dataset.fps for t in range(64)], "action": [t / dataset.fps for t in range(64)],
@ -143,6 +143,6 @@ dataloader = torch.utils.data.DataLoader(
for batch in dataloader: for batch in dataloader:
print(f"{batch[camera_key].shape=}") # (32, 4, c, h, w) print(f"{batch[camera_key].shape=}") # (32, 4, c, h, w)
print(f"{batch['observation.state'].shape=}") # (32, 5, c) print(f"{batch['observation.state'].shape=}") # (32, 6, c)
print(f"{batch['action'].shape=}") # (32, 64, c) print(f"{batch['action'].shape=}") # (32, 64, c)
break break

View File

@ -1053,7 +1053,7 @@ class MultiLeRobotDataset(torch.utils.data.Dataset):
super().__init__() super().__init__()
self.repo_ids = repo_ids self.repo_ids = repo_ids
self.root = Path(root) if root else HF_LEROBOT_HOME self.root = Path(root) if root else HF_LEROBOT_HOME
self.tolerances_s = tolerances_s if tolerances_s else {repo_id: 1e-4 for repo_id in repo_ids} self.tolerances_s = tolerances_s if tolerances_s else dict.fromkeys(repo_ids, 0.0001)
# Construct the underlying datasets passing everything but `transform` and `delta_timestamps` which # Construct the underlying datasets passing everything but `transform` and `delta_timestamps` which
# are handled by this class. # are handled by this class.
self._datasets = [ self._datasets = [

View File

@ -240,7 +240,7 @@ def load_episodes_stats(local_dir: Path) -> dict:
def backward_compatible_episodes_stats( def backward_compatible_episodes_stats(
stats: dict[str, dict[str, np.ndarray]], episodes: list[int] stats: dict[str, dict[str, np.ndarray]], episodes: list[int]
) -> dict[str, dict[str, np.ndarray]]: ) -> dict[str, dict[str, np.ndarray]]:
return {ep_idx: stats for ep_idx in episodes} return dict.fromkeys(episodes, stats)
def load_image_as_numpy( def load_image_as_numpy(

View File

@ -481,7 +481,7 @@ def convert_dataset(
# Tasks # Tasks
if single_task: if single_task:
tasks_by_episodes = {ep_idx: single_task for ep_idx in episode_indices} tasks_by_episodes = dict.fromkeys(episode_indices, single_task)
dataset, tasks = add_task_index_by_episodes(dataset, tasks_by_episodes) dataset, tasks = add_task_index_by_episodes(dataset, tasks_by_episodes)
tasks_by_episodes = {ep_idx: [task] for ep_idx, task in tasks_by_episodes.items()} tasks_by_episodes = {ep_idx: [task] for ep_idx, task in tasks_by_episodes.items()}
elif tasks_path: elif tasks_path:

View File

@ -94,7 +94,7 @@ class MetricsTracker:
metrics: dict[str, AverageMeter], metrics: dict[str, AverageMeter],
initial_step: int = 0, initial_step: int = 0,
): ):
self.__dict__.update({k: None for k in self.__keys__}) self.__dict__.update(dict.fromkeys(self.__keys__))
self._batch_size = batch_size self._batch_size = batch_size
self._num_frames = num_frames self._num_frames = num_frames
self._avg_samples_per_ep = num_frames / num_episodes self._avg_samples_per_ep = num_frames / num_episodes