Merge branch 'user/aliberts/2025_02_25_refactor_robots' into feat/thread_safe_teleop_keyboard

This commit is contained in:
Simon Alibert 2025-04-04 09:44:35 +02:00 committed by GitHub
commit b722dcd909
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
102 changed files with 3619 additions and 7295 deletions

View File

@ -98,14 +98,18 @@ conda create -y -n lerobot python=3.10
conda activate lerobot
```
Install 🤗 LeRobot:
When using `miniconda`, if you don't have `ffmpeg` in your environment:
```bash
pip install -e .
conda install ffmpeg
```
> **NOTE:** Depending on your platform, If you encounter any build errors during this step
you may need to install `cmake` and `build-essential` for building some of our dependencies.
On linux: `sudo apt-get install cmake build-essential`
Install 🤗 LeRobot:
```bash
pip install --no-binary=av -e .
```
> **NOTE:** If you encounter build errors, you may need to install additional dependencies (`cmake`, `build-essential`, and `ffmpeg libs`). On Linux, run:
`sudo apt-get install cmake build-essential python-dev pkg-config libavformat-dev libavcodec-dev libavdevice-dev libavutil-dev libswscale-dev libswresample-dev libavfilter-dev pkg-config`. For other systems, see: [Compiling PyAV](https://pyav.org/docs/develop/overview/installation.html#bring-your-own-ffmpeg)
For simulations, 🤗 LeRobot comes with gymnasium environments that can be installed as extras:
- [aloha](https://github.com/huggingface/gym-aloha)
@ -114,7 +118,7 @@ For simulations, 🤗 LeRobot comes with gymnasium environments that can be inst
For instance, to install 🤗 LeRobot with aloha and pusht, use:
```bash
pip install -e ".[aloha, pusht]"
pip install --no-binary=av -e ".[aloha, pusht]"
```
To use [Weights and Biases](https://docs.wandb.ai/quickstart) for experiment tracking, log in with

View File

@ -18,7 +18,7 @@ training outputs directory. In the latter case, you might want to run examples/3
It requires the installation of the 'gym_pusht' simulation environment. Install it by running:
```bash
pip install -e ".[pusht]"`
pip install --no-binary=av -e ".[pusht]"`
```
"""

View File

@ -33,7 +33,7 @@ First, install the additional dependencies required for robots built with dynami
Using `pip`:
```bash
pip install -e ".[dynamixel]"
pip install --no-binary=av -e ".[dynamixel]"
```
Using `poetry`:
@ -46,13 +46,6 @@ Using `uv`:
uv sync --extra "dynamixel"
```
/!\ For Linux only, ffmpeg and opencv requires conda install for now. Run this exact sequence of commands:
```bash
conda install -c conda-forge ffmpeg
pip uninstall opencv-python
conda install -c conda-forge "opencv>=4.10.0"
```
You are now ready to plug the 5V power supply to the motor bus of the leader arm (the smaller one) since all its motors only require 5V.
Then plug the 12V power supply to the motor bus of the follower arm. It has two motors that need 12V, and the rest will be powered with 5V through the voltage convertor.
@ -834,11 +827,6 @@ It contains:
- `dtRphone:33.84 (29.5hz)` which is the delta time of capturing an image from the phone camera in the thread running asynchronously.
Troubleshooting:
- On Linux, if you encounter a hanging issue when using cameras, uninstall opencv and re-install it with conda:
```bash
pip uninstall opencv-python
conda install -c conda-forge opencv=4.10.0
```
- On Linux, if you encounter any issue during video encoding with `ffmpeg: unknown encoder libsvtav1`, you can:
- install with conda-forge by running `conda install -c conda-forge ffmpeg` (it should be compiled with `libsvtav1`),
- or, install [Homebrew](https://brew.sh) and run `brew install ffmpeg` (it should be compiled with `libsvtav1`),

View File

@ -1,243 +0,0 @@
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import shutil
from pathlib import Path
import numpy as np
from huggingface_hub import HfApi
from lerobot.common.constants import HF_LEROBOT_HOME
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION, LeRobotDataset
from lerobot.common.datasets.push_dataset_to_hub._download_raw import download_raw
PUSHT_TASK = "Push the T-shaped blue block onto the T-shaped green target surface."
PUSHT_FEATURES = {
"observation.state": {
"dtype": "float32",
"shape": (2,),
"names": {
"axes": ["x", "y"],
},
},
"action": {
"dtype": "float32",
"shape": (2,),
"names": {
"axes": ["x", "y"],
},
},
"next.reward": {
"dtype": "float32",
"shape": (1,),
"names": None,
},
"next.success": {
"dtype": "bool",
"shape": (1,),
"names": None,
},
"observation.environment_state": {
"dtype": "float32",
"shape": (16,),
"names": [
"keypoints",
],
},
"observation.image": {
"dtype": None,
"shape": (3, 96, 96),
"names": [
"channels",
"height",
"width",
],
},
}
def build_features(mode: str) -> dict:
features = PUSHT_FEATURES
if mode == "keypoints":
features.pop("observation.image")
else:
features.pop("observation.environment_state")
features["observation.image"]["dtype"] = mode
return features
def load_raw_dataset(zarr_path: Path):
try:
from lerobot.common.datasets.push_dataset_to_hub._diffusion_policy_replay_buffer import (
ReplayBuffer as DiffusionPolicyReplayBuffer,
)
except ModuleNotFoundError as e:
print("`gym_pusht` is not installed. Please install it with `pip install 'lerobot[gym_pusht]'`")
raise e
zarr_data = DiffusionPolicyReplayBuffer.copy_from_path(zarr_path)
return zarr_data
def calculate_coverage(zarr_data):
try:
import pymunk
from gym_pusht.envs.pusht import PushTEnv, pymunk_to_shapely
except ModuleNotFoundError as e:
print("`gym_pusht` is not installed. Please install it with `pip install 'lerobot[gym_pusht]'`")
raise e
block_pos = zarr_data["state"][:, 2:4]
block_angle = zarr_data["state"][:, 4]
num_frames = len(block_pos)
coverage = np.zeros((num_frames,), dtype=np.float32)
# 8 keypoints with 2 coords each
keypoints = np.zeros((num_frames, 16), dtype=np.float32)
# Set x, y, theta (in radians)
goal_pos_angle = np.array([256, 256, np.pi / 4])
goal_body = PushTEnv.get_goal_pose_body(goal_pos_angle)
for i in range(num_frames):
space = pymunk.Space()
space.gravity = 0, 0
space.damping = 0
# Add walls.
walls = [
PushTEnv.add_segment(space, (5, 506), (5, 5), 2),
PushTEnv.add_segment(space, (5, 5), (506, 5), 2),
PushTEnv.add_segment(space, (506, 5), (506, 506), 2),
PushTEnv.add_segment(space, (5, 506), (506, 506), 2),
]
space.add(*walls)
block_body, block_shapes = PushTEnv.add_tee(space, block_pos[i].tolist(), block_angle[i].item())
goal_geom = pymunk_to_shapely(goal_body, block_body.shapes)
block_geom = pymunk_to_shapely(block_body, block_body.shapes)
intersection_area = goal_geom.intersection(block_geom).area
goal_area = goal_geom.area
coverage[i] = intersection_area / goal_area
keypoints[i] = PushTEnv.get_keypoints(block_shapes).flatten()
return coverage, keypoints
def calculate_success(coverage: float, success_threshold: float):
return coverage > success_threshold
def calculate_reward(coverage: float, success_threshold: float):
return np.clip(coverage / success_threshold, 0, 1)
def main(raw_dir: Path, repo_id: str, mode: str = "video", push_to_hub: bool = True):
if mode not in ["video", "image", "keypoints"]:
raise ValueError(mode)
if (HF_LEROBOT_HOME / repo_id).exists():
shutil.rmtree(HF_LEROBOT_HOME / repo_id)
if not raw_dir.exists():
download_raw(raw_dir, repo_id="lerobot-raw/pusht_raw")
zarr_data = load_raw_dataset(zarr_path=raw_dir / "pusht_cchi_v7_replay.zarr")
env_state = zarr_data["state"][:]
agent_pos = env_state[:, :2]
action = zarr_data["action"][:]
image = zarr_data["img"] # (b, h, w, c)
if image.dtype == np.float32 and image.max() == np.float32(255):
# HACK: images are loaded as float32 but they actually encode uint8 data
image = image.astype(np.uint8)
episode_data_index = {
"from": np.concatenate(([0], zarr_data.meta["episode_ends"][:-1])),
"to": zarr_data.meta["episode_ends"],
}
# Calculate success and reward based on the overlapping area
# of the T-object and the T-area.
coverage, keypoints = calculate_coverage(zarr_data)
success = calculate_success(coverage, success_threshold=0.95)
reward = calculate_reward(coverage, success_threshold=0.95)
features = build_features(mode)
dataset = LeRobotDataset.create(
repo_id=repo_id,
fps=10,
robot_type="2d pointer",
features=features,
image_writer_threads=4,
)
episodes = range(len(episode_data_index["from"]))
for ep_idx in episodes:
from_idx = episode_data_index["from"][ep_idx]
to_idx = episode_data_index["to"][ep_idx]
num_frames = to_idx - from_idx
for frame_idx in range(num_frames):
i = from_idx + frame_idx
idx = i + (frame_idx < num_frames - 1)
frame = {
"action": action[i],
# Shift reward and success by +1 until the last item of the episode
"next.reward": reward[idx : idx + 1],
"next.success": success[idx : idx + 1],
"task": PUSHT_TASK,
}
frame["observation.state"] = agent_pos[i]
if mode == "keypoints":
frame["observation.environment_state"] = keypoints[i]
else:
frame["observation.image"] = image[i]
dataset.add_frame(frame)
dataset.save_episode()
if push_to_hub:
dataset.push_to_hub()
hub_api = HfApi()
hub_api.create_tag(repo_id, tag=CODEBASE_VERSION, repo_type="dataset")
if __name__ == "__main__":
# To try this script, modify the repo id with your own HuggingFace user (e.g cadene/pusht)
repo_id = "lerobot/pusht"
modes = ["video", "image", "keypoints"]
# Uncomment if you want to try with a specific mode
# modes = ["video"]
# modes = ["image"]
# modes = ["keypoints"]
raw_dir = Path("data/lerobot-raw/pusht_raw")
for mode in modes:
if mode in ["image", "keypoints"]:
repo_id += f"_{mode}"
# download and load raw dataset, create LeRobotDataset, populate it, push to hub
main(raw_dir, repo_id=repo_id, mode=mode)
# Uncomment if you want to load the local dataset and explore it
# dataset = LeRobotDataset(repo_id=repo_id)
# breakpoint()

View File

@ -1,85 +0,0 @@
https://drive.google.com/file/d/1_SOJkgfP5yZyVjMhTt3nwhvyUjcnlI51/view?usp=drive_link
https://drive.google.com/file/d/1rmgN8UUzph1qwJnzG1d-uOafodn-gLvb/view?usp=drive_link
https://drive.google.com/file/d/1NYQ-XxsBVinB6dUoZmVWweT83367P3i2/view?usp=drive_link
https://drive.google.com/file/d/1oAv_j74zxxCJieMG7r5Vl2BeHK1__3s3/view?usp=drive_link
https://drive.google.com/file/d/1wFUJQROsrTJt64YRuIeExhFjr2wnK5uu/view?usp=drive_link
https://drive.google.com/file/d/1KzL3Tt0Le7jVl58XVRUcmigmXjyiuhbK/view?usp=drive_link
https://drive.google.com/file/d/1qy_YBladeHtianSSGtgAPSHtMin7msvf/view?usp=drive_link
https://drive.google.com/file/d/1rA_F0V_qL_nyuC_0aBKCisF4-0TIkF2Y/view?usp=drive_link
https://drive.google.com/file/d/1hw-8qMpz9VgSt62XoASqNRuPECpCwJQP/view?usp=drive_link
https://drive.google.com/file/d/1BpHOl9rKMzdvNGka6js7C0s40hH6vnDA/view?usp=drive_link
https://drive.google.com/file/d/1PazhkhiDnJ-OUMyDVDFxEZNKQQqHiNWS/view?usp=drive_link
https://drive.google.com/file/d/1lZ665R6ATl57dypxH4dGJ2NSt6XYnbuz/view?usp=drive_link
https://drive.google.com/file/d/1V9HzLaf-tlG15wUzT7KrTDCS_z1vi5NV/view?usp=drive_link
https://drive.google.com/file/d/1aKauWiXoKqbNwn_2xs4MrmLlaNYlVNmO/view?usp=drive_link
https://drive.google.com/file/d/1WVD5DFhriO1YmmOgiVHhacR6HWoTPxav/view?usp=drive_link
https://drive.google.com/file/d/1_X43WgeBAsfkhH9EmpyPki8U9joMeAGC/view?usp=drive_link
https://drive.google.com/file/d/1t8x0GqWoNKWtnBsB7_D40Z34nL9ak4kf/view?usp=drive_link
https://drive.google.com/file/d/15V_f26WaKOXjKnq2T3HRWAmtQUi4lbu2/view?usp=drive_link
https://drive.google.com/file/d/11VFIAsiSDsMOBANgrOcZBpKB9AFWnLy7/view?usp=drive_link
https://drive.google.com/file/d/1M0NS7vVaxJv3FHnuRYtdwTFYF7We4LxP/view?usp=drive_link
https://drive.google.com/file/d/1mR0OItTNqFnVLoczcyKYlm6drAy778lO/view?usp=drive_link
https://drive.google.com/file/d/1NbVFWDQAh-z4JJ4D-Zw6Lps9kdvpqh2j/view?usp=drive_link
https://drive.google.com/file/d/1JQoZGBzl4W3QG26-n39tefcGN0fDRMbB/view?usp=drive_link
https://drive.google.com/file/d/1VBjHl-TvZpncopvasIP5G9gecbB2a5f6/view?usp=drive_link
https://drive.google.com/file/d/1VzSf6zaB21nahm7MsPwroXbJ84NIwq0b/view?usp=drive_link
https://drive.google.com/file/d/1OtNnfMEydNtZOcivs4k6E_uJSpf8PkGy/view?usp=drive_link
https://drive.google.com/file/d/14nVvpvsrFr_03Pa_N7MKzwnRwibOUYM6/view?usp=drive_link
https://drive.google.com/file/d/1M8li6duiO2r3lv_9HhF_XJn0oZUIEK5F/view?usp=drive_link
https://drive.google.com/file/d/1Cpzea6fO14lxAaNfSBifqoa4ekhCiLD1/view?usp=drive_link
https://drive.google.com/file/d/1mbxRTm5vlbsY9UJ0jfjM6j9D7kPJjBpG/view?usp=drive_link
https://drive.google.com/file/d/1RXD1i6IfWsHRlCxVmG04h2h5Ycm_WwZN/view?usp=drive_link
https://drive.google.com/file/d/1QFqFSwDGOk1BkgGmqgCcc2BRWnJ6R3MA/view?usp=drive_link
https://drive.google.com/file/d/1bFqWR8DQM0ZUxxtS2bl-RANQvukeFLzp/view?usp=drive_link
https://drive.google.com/file/d/1pR-rH3yNGoyPdD4hJ6-3lXQ-PstBx9du/view?usp=drive_link
https://drive.google.com/file/d/107OAwLY-hva9HeQLIK7VCh-ytdDabVjr/view?usp=drive_link
https://drive.google.com/file/d/1Tpl08QOaSZ37GTO4awFWSdD8wBR9xdlT/view?usp=drive_link
https://drive.google.com/file/d/1MR164AOM-0S1T6RX8xKTV2IHyaCvpqAW/view?usp=drive_link
https://drive.google.com/file/d/1_wknJfVnStIhJ82lU_QtcrwahsqYIsr8/view?usp=drive_link
https://drive.google.com/file/d/1ZuEktWrbYkTx0l5pj3WiZ2CJrfbDOHNo/view?usp=drive_link
https://drive.google.com/file/d/15G_10hkkkq6yxvyI5NGZirlF-RzduR2F/view?usp=drive_link
https://drive.google.com/file/d/1DBKxg3ONqh7dhLuX6oh1Yyo2x383V1Hp/view?usp=drive_link
https://drive.google.com/file/d/1B5iDBkTUr5vopDddV_fHud18SqAHhauS/view?usp=drive_link
https://drive.google.com/file/d/1acwFV0eenRkki1QcjSKH5xqOtys-P3Pr/view?usp=drive_link
https://drive.google.com/file/d/1S47BI83xyrh-FKXsvAQqer98Biu_p8XK/view?usp=drive_link
https://drive.google.com/file/d/1JL6DmBZl3uyq9dyLfgSqtGF06e7E9JwM/view?usp=drive_link
https://drive.google.com/file/d/16WvRS4Kjog8Pxgr0E3sGGnI01YwL9Uql/view?usp=drive_link
https://drive.google.com/file/d/12ttGqL33IPWg0-s1SD44rr22M6LiSQBr/view?usp=drive_link
https://drive.google.com/file/d/1OyZqqnldTU_DliRbr6x0C4a_iWPwIN7j/view?usp=drive_link
https://drive.google.com/file/d/1oYk00IpLnR9fesLfD15Ebe7nVBffEbcS/view?usp=drive_link
https://drive.google.com/file/d/1eyE2-MQduCEqCd-5_kl5zsoOEERAzpZD/view?usp=drive_link
https://drive.google.com/file/d/1ir1Ya-vO0d97pfvbePlUeuKTTRc0qIMU/view?usp=drive_link
https://drive.google.com/file/d/1hOi-JnqlMt47gVnLZHMTqeojyYVErohl/view?usp=drive_link
https://drive.google.com/file/d/1NFFw5_PqigQ7xGqsL-MNq2B1r5yAscCf/view?usp=drive_link
https://drive.google.com/file/d/1uftq1-Zlh8d2sNLWrlVcKYQUwZTD7o24/view?usp=drive_link
https://drive.google.com/file/d/1-ax19dSLPacVgk000T-m3l4flPcg07pM/view?usp=drive_link
https://drive.google.com/file/d/126y-lgn86-ZmCz8hooF1THKJGGObw3OB/view?usp=drive_link
https://drive.google.com/file/d/1JiDniK0VmDIkk92AbBILb8J2Ba59PWML/view?usp=drive_link
https://drive.google.com/file/d/1kr8nPIRljiU0R4J9SMgj80o1FPQxzu9z/view?usp=drive_link
https://drive.google.com/file/d/1bbThWRij1pKBh_kFgV8FwK0sXtTHBoLX/view?usp=drive_link
https://drive.google.com/file/d/1WenzDW6lxk1xkOFm-OiGFfc0ROskAuKU/view?usp=drive_link
https://drive.google.com/file/d/1MiKRzuzUn1yN-k_6kPJJzIGy7dT-nnsD/view?usp=drive_link
https://drive.google.com/file/d/17rRg2tcmB-gNhQ0KoZJQmNfyFeoij1jH/view?usp=drive_link
https://drive.google.com/file/d/11mokBpvrY3ld6sY5WztREtJ1jgqfQV70/view?usp=drive_link
https://drive.google.com/file/d/1Il_6IOx9NDp1bX_KHizJfBwzTufTmn86/view?usp=drive_link
https://drive.google.com/file/d/1KswtJGsxJ7eeBDAmNA_aeLjOxcH6MIxa/view?usp=drive_link
https://drive.google.com/file/d/1gzMhi5uWu4C3Y6WbQ3L-08V96GxTZrRR/view?usp=drive_link
https://drive.google.com/file/d/1nRQFtaBxfUCYc2W90Qibh0kHCt6YQCfc/view?usp=drive_link
https://drive.google.com/file/d/1vs-gyW-KheqHbUATwAhA2mmR9GOGw7f_/view?usp=drive_link
https://drive.google.com/file/d/1MuxzGOA2fgLaHryq82KkQumtuRJGcUOC/view?usp=drive_link
https://drive.google.com/file/d/1IIwxZnGlqrXLUXqG6yMO0r7uhCvhpk9e/view?usp=drive_link
https://drive.google.com/file/d/1vE7XPyaFcXP4DtTY5Y9WKIt7zWgmX-Cr/view?usp=drive_link
https://drive.google.com/file/d/1j-bIV09gr21RC3-x1N_pK4RPLV3fmWKz/view?usp=drive_link
https://drive.google.com/file/d/1t3nW1rD3S-EL0Oymb5U7ZAj5UMkydkln/view?usp=drive_link
https://drive.google.com/file/d/14hbfHCdMKtJZ41F9CQReMec2jeRFTOqR/view?usp=drive_link
https://drive.google.com/file/d/1x-hUyOSne5BW0AzQ3W6_Pf4g5yXQWi9M/view?usp=drive_link
https://drive.google.com/file/d/1sw9JqRg6E-3P84I3ZhzTrJMu0vuiaMmP/view?usp=drive_link
https://drive.google.com/file/d/1LuqhQlL4MGZhB_6THmkovRxrlP26BbdC/view?usp=drive_link
https://drive.google.com/file/d/15C5K6v_lkjnMSmUvVyqHQKwh2N166e7K/view?usp=drive_link
https://drive.google.com/file/d/1ns_9eSsQeeoZ10nlbkLy8tu0GmJFSnkt/view?usp=drive_link
https://drive.google.com/file/d/1NpzWJeK6CqjxzjIMYe6aYdX8xGsQwD4o/view?usp=drive_link
https://drive.google.com/file/d/1NMLezwufKJ9_8xTc9KQThSzVVD71B9Ui/view?usp=drive_link
https://drive.google.com/file/d/1aa71DCUqs6oXlIxX35jgsmsgm-NlDxPV/view?usp=drive_link
https://drive.google.com/file/d/1UJzkIZzAL0j-D5YQBnoq7mHvttASy12O/view?usp=drive_link
https://drive.google.com/file/d/1nPgx36HIJFb7oI94VbRzWjpPP2GANxzG/view?usp=drive_link
https://drive.google.com/file/d/1NovAP-KVJjqcuvWy3d6G4ptGGAIDqcCx/view?usp=drive_link

View File

@ -1,55 +0,0 @@
https://drive.google.com/file/d/11M3Ye0r5agMaaicPbVGD0q2Hb3rGklbb/view?usp=drive_link
https://drive.google.com/file/d/1-tx7SvYYgSvXCvnf_EI2OVdwK-CkFY6S/view?usp=drive_link
https://drive.google.com/file/d/1EWJunmOpMHaU1hE106wwpbkGYcjQXYAF/view?usp=drive_link
https://drive.google.com/file/d/1IDn95Z7FSiCckrSENtGV4u3RyFHNQSDY/view?usp=drive_link
https://drive.google.com/file/d/1CwzvWj1i7QOtqrZvsCZ6BdZaKNDfpN32/view?usp=drive_link
https://drive.google.com/file/d/1HvAvlhm77nAD3Td24QPSeq8lw-Rl_aOh/view?usp=drive_link
https://drive.google.com/file/d/1t-suKYOPhXH666RpAYNRp2QU_DOy3AeM/view?usp=drive_link
https://drive.google.com/file/d/18xpKgWh7RWyjMN5PkLTOo-AxsAadAuRw/view?usp=drive_link
https://drive.google.com/file/d/1oci5Eto-ztv-AQNz8EnwZveBIhxvk-xJ/view?usp=drive_link
https://drive.google.com/file/d/1Y-t_4vxdE6NpHO0DLJR8f3mD0Q-Wj5-c/view?usp=drive_link
https://drive.google.com/file/d/1lylRqbbbB8bgtpsBWMPACmHJreuKmllv/view?usp=drive_link
https://drive.google.com/file/d/1yliSyMig_NXShWfQx6qyW7Ijf2Y5lFK6/view?usp=drive_link
https://drive.google.com/file/d/1XXhwJsJbeb7KXAooGvJapnm9bjnGUmxS/view?usp=drive_link
https://drive.google.com/file/d/1_xs1f3hW2JArKyvfF7UWubWjyROGTLs6/view?usp=drive_link
https://drive.google.com/file/d/1WVEHpr6EqKCZbkHapQSTXJq4xE4SWFT-/view?usp=drive_link
https://drive.google.com/file/d/1RqOHv9pEQGvW8NUA7ynffFmG999TL_Az/view?usp=drive_link
https://drive.google.com/file/d/1cu5AgD2gh-uA3PFJmzxxzNaF3qOSlYY1/view?usp=drive_link
https://drive.google.com/file/d/1SsrXqiPclNrnYToPZ9Uq-k3y0C4qdHT1/view?usp=drive_link
https://drive.google.com/file/d/1-J7EXf0vjkLIfSqT8ICEsP6CTjzSLBop/view?usp=drive_link
https://drive.google.com/file/d/11O7ewUmoZXfyyKjy_6B5RW4DpjICxqBT/view?usp=drive_link
https://drive.google.com/file/d/1iic44kZoCsjNsfAz2cMstZ9-WQvAhblF/view?usp=drive_link
https://drive.google.com/file/d/1yLV1lVX-2WnWQldGlnQZ0x7QBuDiVkL3/view?usp=drive_link
https://drive.google.com/file/d/1Tybp9ru98TTbGn4eyROpUQwDFuALWXmk/view?usp=drive_link
https://drive.google.com/file/d/13E9OTMiipVJByDs5-J19oWwAz7l94LTN/view?usp=drive_link
https://drive.google.com/file/d/1EeTpJQdMSliw4JzSMtJ6CyTvVdexjM4M/view?usp=drive_link
https://drive.google.com/file/d/1NHyNwoFqzeAu-1_PSpq5JfxaiD_xbpn9/view?usp=drive_link
https://drive.google.com/file/d/1fJcS0phDp4xm_FyGaJ5wr9Pe4KqtHaxD/view?usp=drive_link
https://drive.google.com/file/d/12AqrLUaewDPEcFRqPZeZFb_TQ0Lfi3At/view?usp=drive_link
https://drive.google.com/file/d/1x_hd4Qsq1oJS-aj2t3qM7WbbV7KZj05b/view?usp=drive_link
https://drive.google.com/file/d/14OUSUArmsB068hs6BuEIXQhI1Cyz8Sf0/view?usp=drive_link
https://drive.google.com/file/d/16zlzh1T5zeUJQnFf382NXkFEKEnDub4O/view?usp=drive_link
https://drive.google.com/file/d/1IbDltmN-NEFCNtr1TO4ILxEgQ94rtjWv/view?usp=drive_link
https://drive.google.com/file/d/15gmlf8Gx9455pZ1AlqcCSwh3nDPxMzSr/view?usp=drive_link
https://drive.google.com/file/d/1qHpRL1oZfIMo_vxnm8qfwQ-7l0BZIVva/view?usp=drive_link
https://drive.google.com/file/d/1H1xskIgiFZivkYn23rMzH3xePGOh3VTC/view?usp=drive_link
https://drive.google.com/file/d/1avls6Pv0kYiCMNVknbc1zQsgy64MUDMM/view?usp=drive_link
https://drive.google.com/file/d/1MmWVgCj5khc8KMIifmt3EzF1o-CtPyyn/view?usp=drive_link
https://drive.google.com/file/d/1U0kCc_xqW0WNppf4sbnK14euWKdPZtzB/view?usp=drive_link
https://drive.google.com/file/d/16CaEyQscOuhLj23PEGDTL9DeyNkohkMn/view?usp=drive_link
https://drive.google.com/file/d/1Iu8uM6UUJ0zW8tvN-9UiOe_4oSNzEutg/view?usp=drive_link
https://drive.google.com/file/d/1UImqiBaIxCR-1DNJaZhHqeHhaySOtVIr/view?usp=drive_link
https://drive.google.com/file/d/1VpU2V_leIoRIyv_lAvE7eLHBG8DxCTnp/view?usp=drive_link
https://drive.google.com/file/d/1_Q8J27OT3Xby7QY6yHvIJauFRWEMxkRm/view?usp=drive_link
https://drive.google.com/file/d/1bantmVo1L9Xz4tbiNw_a1UC2Z_HPO1wT/view?usp=drive_link
https://drive.google.com/file/d/1IRIXMJMCBDkBjbaHvAlEiBogSvZ1jK_3/view?usp=drive_link
https://drive.google.com/file/d/1mAHXKjiFbjwydypW2t5Lv8_H5x6nHegl/view?usp=drive_link
https://drive.google.com/file/d/1SfyY796fLrBCMY39OcyuxZafqSCRZPZk/view?usp=drive_link
https://drive.google.com/file/d/1X-44sZ8CcfzIskc0dvSx882o1yFhHaZB/view?usp=drive_link
https://drive.google.com/file/d/1BOIWCCCk6DLD4Bmvc75ZbbLi9AQm-1ao/view?usp=drive_link
https://drive.google.com/file/d/1RuyDtRE1kk76sw-wP8vx5SgLoPF3PA_H/view?usp=drive_link
https://drive.google.com/file/d/1c4eoQiBbGuy3CTAQDUSkd84Ponh1roAQ/view?usp=drive_link
https://drive.google.com/file/d/19PXB9z4Ljq6dsbf9TqcOrrP5SRbw2Tc_/view?usp=drive_link
https://drive.google.com/file/d/1nn1VVZVoIXWdYDozR7XHXE4mPLQG80PQ/view?usp=drive_link
https://drive.google.com/file/d/1MBdFGOKPV8GUhwoSsJ_Ky3qAMLM2Bv3K/view?usp=drive_link
https://drive.google.com/file/d/1of3k_M-7Nh3I1TndcWedxK4ca9dn8Sc5/view?usp=drive_link

View File

@ -1,20 +0,0 @@
https://drive.google.com/file/d/12ctkOAdkCNGN1JLbZb5ww3XTBn2LFpGI/view?usp=drive_link
https://drive.google.com/file/d/1G_Vd46_4fq6O64gHHjUbJX5Ld44ZZx0y/view?usp=drive_link
https://drive.google.com/file/d/1uKgUy73B3xBogQAOUhfZjO0X5qZGsi2c/view?usp=drive_link
https://drive.google.com/file/d/1fu9cIrfI-fE2LhdGUxbx7-8Ci_PF8Ypm/view?usp=drive_link
https://drive.google.com/file/d/1Ygk9ZPJzx8xw2A9JF3NHbJ44TqnvSTQR/view?usp=drive_link
https://drive.google.com/file/d/18m5xPuccNsEB20WPshm3zhxmXc6k63ED/view?usp=drive_link
https://drive.google.com/file/d/1DiqqxC44rriviRQpqogcv0-EB-Y6nr9g/view?usp=drive_link
https://drive.google.com/file/d/1qPdaoTVDizJXkfXLioWU7iJ8hqCXSyOQ/view?usp=drive_link
https://drive.google.com/file/d/1Fj9kIA_mG7f67WFfACJEaZ7izcHG7vUm/view?usp=drive_link
https://drive.google.com/file/d/1WpYehZnI2P7dUdJPfkE-ij1rqCnjZEbB/view?usp=drive_link
https://drive.google.com/file/d/1_zwWkT4jPyzB38STWb6whlzsPzXmfA9r/view?usp=drive_link
https://drive.google.com/file/d/1U6-J4I_fPlSFFGfhZPxS5_YzKXwXIZYp/view?usp=drive_link
https://drive.google.com/file/d/1pRhxxcTfZp5tQo_EScvJUwfc3amiS6Vk/view?usp=drive_link
https://drive.google.com/file/d/1lWLntqra83RlYU_gN7Vostnfydf6gutd/view?usp=drive_link
https://drive.google.com/file/d/1vIBKo0x-NYEHV1FvRpco1lQMpRdAWAIL/view?usp=drive_link
https://drive.google.com/file/d/1pdrLV3JTQou_XH0Aap61Ssf60iVKm1jJ/view?usp=drive_link
https://drive.google.com/file/d/1QTsLoQ7SwmKdQHjBGVDaR2uTwfFwtrOf/view?usp=drive_link
https://drive.google.com/file/d/1Gytai8M_12J36GY6L_TulEcOC-035jwS/view?usp=drive_link
https://drive.google.com/file/d/14LJudNc629NT-i8xreXtzl27ce_DxOFJ/view?usp=drive_link
https://drive.google.com/file/d/1sBvPCODbzxGAI0S3lgN5cSG9Go3lRi00/view?usp=drive_link

View File

@ -1,18 +0,0 @@
https://drive.google.com/file/d/1MJn9GbC8p9lN4gC9KDMLEkTkP_gGpXj0/view?usp=drive_link
https://drive.google.com/file/d/1-4LXgjl7ZCOgp-8GCJmFRD8OeqN5Jf7-/view?usp=drive_link
https://drive.google.com/file/d/1Ho06Ce0SPbqU3juaMxNUwAt3zCRLGC8W/view?usp=drive_link
https://drive.google.com/file/d/1ivHoj7_7olBSxH-Y8kqXEW7ttITK-45j/view?usp=drive_link
https://drive.google.com/file/d/1qjY4hM_IvZ8cq2II_n9MeJbvyeuN4oBP/view?usp=drive_link
https://drive.google.com/file/d/1rKVhO_f92-7sw13T8hTVrza3B9oAVgoy/view?usp=drive_link
https://drive.google.com/file/d/1pcLPHO8fBkc1-CRa88tyQtEueE4xiXNi/view?usp=drive_link
https://drive.google.com/file/d/1Vev_chCsIeEdvQ8poEYNsOJFGy_QU8kZ/view?usp=drive_link
https://drive.google.com/file/d/1l5G4zpRkxSLCQjvGPYSN4zfCvVRQuzMz/view?usp=drive_link
https://drive.google.com/file/d/14vgthE1eoakXkr2-DRw50E6lAqYOiUuE/view?usp=drive_link
https://drive.google.com/file/d/17nPSmKKmgQ2B7zkzWrZYiLM3RBuFod82/view?usp=drive_link
https://drive.google.com/file/d/1QcDsxplVvb_ID9BVrihl5FvlC-j7waXi/view?usp=drive_link
https://drive.google.com/file/d/18pEejBpI-eEVaWAAjBCyC0vgbX3T1Esj/view?usp=drive_link
https://drive.google.com/file/d/1H8eH6_IRODtEFT6WoM77ltR5OoOrqXmI/view?usp=drive_link
https://drive.google.com/file/d/1IWlpFRZhoxyG4nS13CWK4leZVk5wbNx4/view?usp=drive_link
https://drive.google.com/file/d/1PbZA8_OCGmMLxNP9xbkLRSChniL4uGxl/view?usp=drive_link
https://drive.google.com/file/d/1p9XAdmG2f_WeflNO4DIJ_tr1rK6M9B4B/view?usp=drive_link
https://drive.google.com/file/d/1nS59Et1cNAvKo3Y4SeSGRuZD5TvBbCF3/view?usp=drive_link

View File

@ -1 +0,0 @@
https://drive.google.com/drive/folders/1S8eFg98IaGAIKVZ8QFWG1bx4mHa-O204

View File

@ -1,4 +0,0 @@
https://drive.google.com/drive/folders/1tC_g1AJ8lglBLY-fjsQrG6DMBa3Ucp-0
https://drive.google.com/file/d/1fG_Yi2MJrFjiUVN3XoiWXLtTxHlwwaDv/view?usp=drive_link
https://drive.google.com/file/d/1WX32VWfzzX3Blmd06DRxLwFbMJfVe7P4/view?usp=drive_link
https://drive.google.com/file/d/18onsX3vXg3xkFwP5bVUCjdV4n9TRn0C9/view?usp=drive_link

View File

@ -1,3 +0,0 @@
https://drive.google.com/drive/folders/1RgyD0JgTX30H4IM5XZn8I3zSV_mr8pyF
https://drive.google.com/file/d/18Cudl6nikDtgRolea7je8iF_gGKzynOP/view?usp=drive_link
https://drive.google.com/file/d/1C1kZYyROzs-PrLc0SkDgUgMi4-L3lauE/view?usp=drive_link

View File

@ -1,3 +0,0 @@
https://drive.google.com/drive/folders/1TsojQQSXtHEoGnqgJ3gmpPQR2DPLtS2N
https://drive.google.com/file/d/1wfMSZ24oOh5KR_0aaP3Cnu_c4ZCveduB/view?usp=drive_link
https://drive.google.com/file/d/17EuCUWS6uCCr6yyNzpXdcdE-_TTNCKtf/view?usp=drive_link

View File

@ -1,3 +0,0 @@
https://drive.google.com/drive/folders/1sc-E4QYW7A0o23m1u2VWNGVq5smAsfCo
https://drive.google.com/file/d/18smMymtr8tIxaNUQ61gW6dG50pt3MvGq/view?usp=drive_link
https://drive.google.com/file/d/1Nk7l53d9sJoGDBKAOnNrExX5nLacATc6/view?usp=drive_link

View File

@ -1,3 +0,0 @@
https://drive.google.com/drive/folders/1aRyoOhQwxhyt1J8XgEig4s6kzaw__LXj
https://drive.google.com/file/d/1pnGIOd-E4-rhz2P3VxpknMKRZCoKt6eI/view?usp=drive_link
https://drive.google.com/file/d/1GKReZHrXU73NMiC5zKCq_UtqPVtYq8eo/view?usp=drive_link

View File

@ -1,2 +0,0 @@
https://drive.google.com/drive/folders/19qS_n7vKgDcPeTMnvDHQ5-n73xEbJz5D
https://drive.google.com/file/d/1oC31By0A2bsBeHyUwBdQw1z4ng6yi9Za/view?usp=drive_link

View File

@ -1,2 +0,0 @@
https://drive.google.com/drive/folders/1m5rQ6UVH8Q9RQp_6c0CxkQ88-L-ScO7q
https://drive.google.com/file/d/1wHz2qcmwcVG0C0CZ9MjQDQcmj4OY9_a3/view?usp=drive_link

View File

@ -1,2 +0,0 @@
https://drive.google.com/drive/folders/1seQGay470nGQ-knBI5TjsTr8iL9Qws5q
https://drive.google.com/file/d/1T89hSX5U99wLGvGTE7yUBaQPOpyj6Sai/view?usp=drive_link

View File

@ -1,2 +0,0 @@
https://drive.google.com/drive/folders/1t3eDc5Rg0DveyRe8oTm6Dia_FYU5mXyf
https://drive.google.com/file/d/1TXFaduTakvS0ZWJqKCX-HIvYglum_5CY/view?usp=drive_link

View File

@ -1,2 +0,0 @@
https://drive.google.com/drive/folders/1Z9X3DNzd6LS0FFjQemNUMoMA5yk5VQOh
https://drive.google.com/file/d/1Wlyc0vTkjXuWB6zbaVOWhEfD7BmPgUV_/view?usp=drive_link

View File

@ -1,53 +0,0 @@
https://drive.google.com/drive/folders/1DYgB4ifX4uIid9m9jnC0Zdz8Nf7ZC0fc
https://drive.google.com/file/d/1Eb-NRNk_FmVleCbU_Ng5Y4dfcjTKN7Rv/view?usp=drive_link
https://drive.google.com/file/d/1dkhjEADakT-44l9jf-nK4x89kr4yG_qb/view?usp=drive_link
https://drive.google.com/file/d/14hDhgcZkVqNExGb4tIXpSjMshhqZETch/view?usp=drive_link
https://drive.google.com/file/d/1zVMEHpHbuNyP5A_lYU7RPSLB-4V0yfZw/view?usp=drive_link
https://drive.google.com/file/d/1JtgDjBvy7FnRpFzrx_foC3quorYQFAR-/view?usp=drive_link
https://drive.google.com/file/d/1EHdneB6F-PP0dQlX8qPaXbxmKoBy_YwO/view?usp=drive_link
https://drive.google.com/file/d/17Z0jjVBy1OPKREPu77_n_rQzorDiapji/view?usp=drive_link
https://drive.google.com/file/d/1F4i23qPJ_qTf5jWjfLo4ARGJChznYWt3/view?usp=drive_link
https://drive.google.com/file/d/1kZtXWM3uS0-rLblydBfJ0mMcVnMMXw9w/view?usp=drive_link
https://drive.google.com/file/d/1mNODox87xFfY5Z_o5mcLsr8SHb39jDik/view?usp=drive_link
https://drive.google.com/file/d/1Ob44VdmEUA93FKDECiRb5Ogz2xQg5IWp/view?usp=drive_link
https://drive.google.com/file/d/1fdQLdjj3Cwv33R1wZhfrLz9Del8mqgHb/view?usp=drive_link
https://drive.google.com/file/d/1Yu3L3ft21zP__XL8pCfhb788ZleuW1n5/view?usp=drive_link
https://drive.google.com/file/d/1ozBBWXVZ9hXDh9ooHUNroHdYm8UDqnhJ/view?usp=drive_link
https://drive.google.com/file/d/1o0TGqvfWw_Lunxb5ubKDS21Lr_WC0h75/view?usp=drive_link
https://drive.google.com/file/d/1jZnd5eP5L6BH5l98BPN6OnoQx3fu8e9n/view?usp=drive_link
https://drive.google.com/file/d/1S5sYbz8wcLYp0V67v13i4PRcBxodn4Hg/view?usp=drive_link
https://drive.google.com/file/d/1rFeg_x6ftJYwPtBv34D3h2L2cpDLeR4G/view?usp=drive_link
https://drive.google.com/file/d/1GvS3lcm4o6nm_scUk0XxKeVFNmzjucDZ/view?usp=drive_link
https://drive.google.com/file/d/1-9i0riphC7NhhDahcQfD1QoBXP5gF90A/view?usp=drive_link
https://drive.google.com/file/d/15p_IqGsMbKuvzMS872THAZr-3SBtb1Fr/view?usp=drive_link
https://drive.google.com/file/d/1ToyYcBfJL8gbQn0q_59zPLsFmm7dmMJo/view?usp=drive_link
https://drive.google.com/file/d/1e_7PNH7CYafE4pAebP7ZdI7XFbmEcy_i/view?usp=drive_link
https://drive.google.com/file/d/1JoabvGVsIQdug2xOhUIhetEIyDM91y_Y/view?usp=drive_link
https://drive.google.com/file/d/1kOMw1y0lmnVaCjwZICfzCsx6e0Z8MNGR/view?usp=drive_link
https://drive.google.com/file/d/16it_wd1JOevUQTK2_CvF_pBACTgpIPgM/view?usp=drive_link
https://drive.google.com/file/d/1IRcCj9HnJSfbyMgr5XEERGlEnWeZQwOc/view?usp=drive_link
https://drive.google.com/file/d/1Z2dIJfq_S3liGmPN9Rphvkmucnmw7tlb/view?usp=drive_link
https://drive.google.com/file/d/1J3NoAjzndGx9yNyaBOJHdNny1epzUoBt/view?usp=drive_link
https://drive.google.com/file/d/18nOvxV1k8FSmBrhT4TPo2sKKSZXougyx/view?usp=drive_link
https://drive.google.com/file/d/1CT8FxclafFMjSd7gCWVw3VSeryeiF04i/view?usp=drive_link
https://drive.google.com/file/d/16M9KVqQMFfSsXfypK0bocFft8Nz3j2Rt/view?usp=drive_link
https://drive.google.com/file/d/18QPVkw6bj6HW8LTPrQLWrrUX4R6RcF42/view?usp=drive_link
https://drive.google.com/file/d/1hQTVtA5hBTE_StXpJafTZJ3tgt2VQQ_t/view?usp=drive_link
https://drive.google.com/file/d/1Dn-d5g69H6EgAWgsFdrcbJKtz7ySsCQ8/view?usp=drive_link
https://drive.google.com/file/d/13hMr16483P7ALYv73yMRUN37fJdVQM62/view?usp=drive_link
https://drive.google.com/file/d/1848yN3XMN5zJMEgApt6KzrWgfRPfimtv/view?usp=drive_link
https://drive.google.com/file/d/1oAD9kSnS0fTgj-CjD4u9VdZ5X67IOIMa/view?usp=drive_link
https://drive.google.com/file/d/1ilzIWLCCG5b_KgF5s0wdN2I5-lFNpwC1/view?usp=drive_link
https://drive.google.com/file/d/1rjsT2YBjnidxod1s9s-myAYz8boHr-WB/view?usp=drive_link
https://drive.google.com/file/d/18Gg48HTub15bd8qzbhiCUufbVy0fbN5G/view?usp=drive_link
https://drive.google.com/file/d/1WsSnQSqmMTVSRwrhT1Y-v782My2zcjLm/view?usp=drive_link
https://drive.google.com/file/d/1ea9ZCvoyc-xqiFXgeDcA_mOWsw7VUuoi/view?usp=drive_link
https://drive.google.com/file/d/1wv1v3-XhPgbNzp62BXbJTDzMPu2tlDUc/view?usp=drive_link
https://drive.google.com/file/d/18-ikzt8LoZ83Gi3goKCELs4U4z8hrRoF/view?usp=drive_link
https://drive.google.com/file/d/16Bjhp7JNCXkGuLvyNcZowAx3W-Y-15DV/view?usp=drive_link
https://drive.google.com/file/d/1Gc-KRI-xwcp1fMR55ugbrLg_5y3SPde-/view?usp=drive_link
https://drive.google.com/file/d/1oP72Q386Z4Sy5MMm-t5yNogIe5Van_9k/view?usp=drive_link
https://drive.google.com/file/d/112T90eDUDVH-SyOV7UnZl5bscAH2hcfq/view?usp=drive_link
https://drive.google.com/file/d/1y-uKOesRRhjgDtFbG_j65f4SGg0v8XDg/view?usp=drive_link
https://drive.google.com/file/d/1LOP05OagoI3km-ZKQBrS204A85UVk7Ok/view?usp=drive_link
https://drive.google.com/file/d/1QkHQKgasVzWsmdPvkXgGhWyQ84d93_Az/view?usp=drive_link

View File

@ -1 +0,0 @@
https://drive.google.com/drive/folders/1Ut2cv6o6Pkfgg46DgwVUM7Z5PkNG8eJ-

View File

@ -1 +0,0 @@
https://drive.google.com/drive/folders/1FqxPV0PgvgIu8XFjtvZSPSExuNcxVVAY

View File

@ -1,2 +0,0 @@
https://drive.google.com/drive/folders/1SKtG0ct9q0nVdYssJNMWSOjikcXliT58
https://drive.google.com/file/d/1nchD21O30B3i3LDoqramo1zgW5YvpJIN/view?usp=drive_link

View File

@ -1,2 +0,0 @@
https://drive.google.com/drive/folders/1_4DHf2cma0xsChLQFghwigX6Ukti5-zQ
https://drive.google.com/file/d/1_8vS4hDNDgUQY-SmekrNaa7dF67QJYU-/view?usp=drive_link

View File

@ -1,2 +0,0 @@
https://drive.google.com/drive/folders/1_4DHf2cma0xsChLQFghwigX6Ukti5-zQ
https://drive.google.com/file/d/1_8vS4hDNDgUQY-SmekrNaa7dF67QJYU-/view?usp=drive_link

View File

@ -1,2 +0,0 @@
https://drive.google.com/drive/folders/1fAD7vkyTGTFB_nGXIKofCU1U05oE3MFv
https://drive.google.com/file/d/1XzyQ2B6LLvcurIonOpEu4nij2qwNWshH/view?usp=drive_link

View File

@ -1,53 +0,0 @@
https://drive.google.com/drive/folders/13EQsVsnxT86K20QAoyE_YpsFbQ7fZQdu
https://drive.google.com/file/d/1-W_JHghZG65FNTVhw1SXhtQrazdLL3Ue/view?usp=drive_link
https://drive.google.com/file/d/1VwRJgdWUo-2nQaNM7Bs77-fsm8iwUxEo/view?usp=drive_link
https://drive.google.com/file/d/1wFzGRo5iYA13WLi6IV1ry64RyahQBFio/view?usp=drive_link
https://drive.google.com/file/d/1IKtQzQ-n-UTv64hYpReu2R4cqUvmNQqD/view?usp=drive_link
https://drive.google.com/file/d/1GicVci9OiuuZZH79i5Mg7AtWod94MzwT/view?usp=drive_link
https://drive.google.com/file/d/1JVnIoR7EIQp70T4eAf9RX65JcTrzsjQc/view?usp=drive_link
https://drive.google.com/file/d/1W2xr4h23ucjPrc-mBEeqnACsfaImpc0p/view?usp=drive_link
https://drive.google.com/file/d/10xj_0V7A07o3uCa7v5omUrTC0YlPW8H3/view?usp=drive_link
https://drive.google.com/file/d/1FOc3EMaCy8Mb0_a7PuXLAwKwvxkbKmwU/view?usp=drive_link
https://drive.google.com/file/d/143PgDXBcf2GQ0Q07ZPMVMfBgZDd5sLJG/view?usp=drive_link
https://drive.google.com/file/d/1pE5Tyj0LlGbGWvUzuhixp86Ibu55Ez3I/view?usp=drive_link
https://drive.google.com/file/d/141668b1VzX80ncrVJPzhkoAeIFB4MEK9/view?usp=drive_link
https://drive.google.com/file/d/1bw12lo37p1ZvRvErHsll7cEYi2OxscvZ/view?usp=drive_link
https://drive.google.com/file/d/1zfnMFvbgBjl6SzYhksbaOzfbwLrCN6tb/view?usp=drive_link
https://drive.google.com/file/d/1-GIszA6mUJMaNB-tdh9r9skc77SWA0VX/view?usp=drive_link
https://drive.google.com/file/d/1fTB0zWFYU6zh4IIUFT2zX_OkwYqmElwY/view?usp=drive_link
https://drive.google.com/file/d/1gPIPNKGmrO9c7gKF7SP0SuUYbIBBq8z1/view?usp=drive_link
https://drive.google.com/file/d/12JeJ-dQd5lYyn6PlDOGdE-ChVeiZ-Uv0/view?usp=drive_link
https://drive.google.com/file/d/100_20cgCqerU6qoh3TfTbwLy9mlDAFEG/view?usp=drive_link
https://drive.google.com/file/d/111oAGJ76ku_pYgbBoIdZAC1_XEQcPI__/view?usp=drive_link
https://drive.google.com/file/d/1UhC8L-354ZQ2gblPFGI35EMsVwfpuKa0/view?usp=drive_link
https://drive.google.com/file/d/1sIXQSgUR_xdrNtGrL6QGBnkLMKErsIp1/view?usp=drive_link
https://drive.google.com/file/d/16Ax77bDSIXnsn4GFL8XYKKT1P6bPpfMd/view?usp=drive_link
https://drive.google.com/file/d/1pgRVYwwVIsWq_qsWqZpe1UBzZfF5Fa9D/view?usp=drive_link
https://drive.google.com/file/d/1jtimaZkWsY1P5gC2bbS64H_WCUU7HXN2/view?usp=drive_link
https://drive.google.com/file/d/1N6Bh02P-RiTEgtx1YH1Db_X3TGpP-X_r/view?usp=drive_link
https://drive.google.com/file/d/14Fy8EwJ8d9Vh97Yt1VOvUChSCrfIjBij/view?usp=drive_link
https://drive.google.com/file/d/1IRuv42dvIMPuKhcMZmuXaBjJ-lPFOmQd/view?usp=drive_link
https://drive.google.com/file/d/16XWzNY2D8ucVVn5geBgsVdhm3ppO4que/view?usp=drive_link
https://drive.google.com/file/d/1xsVOoQgthK_L_SDrmq_JvQgUpAvPEAY8/view?usp=drive_link
https://drive.google.com/file/d/1bZbw66DyEMvnJnzkdUUNbKjvNKg8KFYM/view?usp=drive_link
https://drive.google.com/file/d/1CyTVkdrNGGpouCXr4CfhKbMzE6Ah3oo3/view?usp=drive_link
https://drive.google.com/file/d/1hDRyeM-XEDpHXpptbT8LvNnlQUR3PWOh/view?usp=drive_link
https://drive.google.com/file/d/1XhHWxbra8Iy5irQZ83IvxwaJqHq9x4s1/view?usp=drive_link
https://drive.google.com/file/d/1haZcn6aM1o4JlmP9tJj3x2enrxiPaDSD/view?usp=drive_link
https://drive.google.com/file/d/1ypDyuUTbljaBZ34f-t7lj3O_0bRmyX2n/view?usp=drive_link
https://drive.google.com/file/d/1ILEEZo_tA9_ChIAprr2mPaNVKZi5vXsO/view?usp=drive_link
https://drive.google.com/file/d/1U7nVYFaGE8vVTfLCW33D74xOjDcqfgyJ/view?usp=drive_link
https://drive.google.com/file/d/1rZ93_rmCov5SMDxPkfM3qthcRELZrQX6/view?usp=drive_link
https://drive.google.com/file/d/1mYO1b_csddtyE3qT6cwLiw-m2w2_1Lxh/view?usp=drive_link
https://drive.google.com/file/d/1xz7Q5x2jikY8wJQjMRQpRws6AnfWlHm5/view?usp=drive_link
https://drive.google.com/file/d/1OO8GaO-0FrSZRd1kxMYwBmubyiLOWnbl/view?usp=drive_link
https://drive.google.com/file/d/1EXn4NVDmf-4_HCy34mYwT-vwK2CFI9ev/view?usp=drive_link
https://drive.google.com/file/d/10hH70XhXRL9C5SnAG4toHtfHqfJUJo4H/view?usp=drive_link
https://drive.google.com/file/d/18tiBcxea0guUai4lwsXQvt0q2LZ8ZnnJ/view?usp=drive_link
https://drive.google.com/file/d/1Q8R8qv37vk5PQ5kQ2ibx6BFLOySD0VpX/view?usp=drive_link
https://drive.google.com/file/d/17aNriHzjhdibCyuUjQoMFZqjybJZtggG/view?usp=drive_link
https://drive.google.com/file/d/1LVjEYHSdeKm6CotU1QguIeNEPaIaFl_1/view?usp=drive_link
https://drive.google.com/file/d/1ufAhE_EkgJ85slg2EW8aW_grOzE_Lmxd/view?usp=drive_link
https://drive.google.com/file/d/1wtzLtXrkw9eXRGESTPIOlpl1tInu-b2m/view?usp=drive_link
https://drive.google.com/file/d/1Mk5qvVtD_QHwGOUApRq76TUw2T5THu6f/view?usp=drive_link
https://drive.google.com/file/d/1y1WQ3hboWVJ68KEYQQ3OhreGuaUpSgwc/view?usp=drive_link

View File

@ -1,52 +0,0 @@
https://drive.google.com/drive/folders/1dxWh6YFZUDt6qXIoxgD9bla3CiFjZ11C
https://drive.google.com/file/d/1hNBJN00SCAlOl0ZEgm7RRGbAGDjyBs0p/view?usp=drive_link
https://drive.google.com/file/d/17He0CVwXGeoMmXg4SHKo-osNn7YPKVL7/view?usp=drive_link
https://drive.google.com/file/d/1laNKUVID1x2CV6a2O2WQjwFewKu4lidL/view?usp=drive_link
https://drive.google.com/file/d/1pNf36xbZJGRArYLmNAvRj5y6CoqdC6kB/view?usp=drive_link
https://drive.google.com/file/d/1_4E1-y3JXk5I0ebycLYM70YDPK9g52gZ/view?usp=drive_link
https://drive.google.com/file/d/1PHfzhGPdbolKyOpS3FnR2w7Q8zUlJXSk/view?usp=drive_link
https://drive.google.com/file/d/17ls2PPN-Pi3tEuK059cwV2_iDT8aGhOO/view?usp=drive_link
https://drive.google.com/file/d/1LWsg6PmCT00Kv_N_slrmcwKmQPGoBT3k/view?usp=drive_link
https://drive.google.com/file/d/12LckrchoHTUVH7rxi8J7zD9dA19GXvoW/view?usp=drive_link
https://drive.google.com/file/d/1VqrJKjAIkj5gtFXL69grdSeu9CyaqnSw/view?usp=drive_link
https://drive.google.com/file/d/1g5rQYDBZvW-kUtYPeyF3qmd53v6k7kXu/view?usp=drive_link
https://drive.google.com/file/d/10kUgaSJ0TS7teaG83G3Rf_DG4XGrBt6A/view?usp=drive_link
https://drive.google.com/file/d/1je9XmneZQZvTma5adMJICUPDovW3ppei/view?usp=drive_link
https://drive.google.com/file/d/1v28r6bedwZGbUPVVTVImXhK-42XdtGfj/view?usp=drive_link
https://drive.google.com/file/d/1-TEEx9sGVvzMMaNXYfQMtY2JJ6cvl0dT/view?usp=drive_link
https://drive.google.com/file/d/1YdBKdJFP9rJWBUX7qrOYL_gfUA8o6J9M/view?usp=drive_link
https://drive.google.com/file/d/1X9vffwQHNUSKLXr2RlYNtbWDIFCIDfdF/view?usp=drive_link
https://drive.google.com/file/d/11hqesqa5kvEe5FABUnZRcvmOhR373cYM/view?usp=drive_link
https://drive.google.com/file/d/1ltTTECjEcbQPgS3UPRgMzaE2x9n6H7dC/view?usp=drive_link
https://drive.google.com/file/d/1Zxqfa29JdwT-bfMpivi6IG2vz34d21dD/view?usp=drive_link
https://drive.google.com/file/d/11LQlVxS5hz494dYUJ_PNRPx2NHIJbQns/view?usp=drive_link
https://drive.google.com/file/d/1i1JhNtnZpO_E8rAv8gxBP3ZTZRvcvsZi/view?usp=drive_link
https://drive.google.com/file/d/11jOXAr2EULUO4Qkm748634lg4UUFho5U/view?usp=drive_link
https://drive.google.com/file/d/1rj67wur8DdB_Pipwx24bY43xu4X1eQ5e/view?usp=drive_link
https://drive.google.com/file/d/15ZTm6lO6f_JQy_4SNfrOu3iPYn1Ro8mh/view?usp=drive_link
https://drive.google.com/file/d/1q4gBtqWPJtCwXEvknGgN0WHGp7Vfn1b9/view?usp=drive_link
https://drive.google.com/file/d/1t17keyre47AYqm8GgXiQ7EcvcUkeSiDQ/view?usp=drive_link
https://drive.google.com/file/d/1OYUPGxtZgOF86Ng_BEOTXm_XOYpuQPsO/view?usp=drive_link
https://drive.google.com/file/d/1cBjbGHi3dwWHtx6r9EQJi0JT_CE3LuHt/view?usp=drive_link
https://drive.google.com/file/d/14qaMyF0mcbCB-fCYKNyo5_2NahSC6D5u/view?usp=drive_link
https://drive.google.com/file/d/12FgX86eA7Y5co9ULBVK80XMsiKQSs-Ri/view?usp=drive_link
https://drive.google.com/file/d/1yvoHWidf-jdBVw6qCCXOFfkVwKj_2hPk/view?usp=drive_link
https://drive.google.com/file/d/1a2SugsSDlC8UtUrFzp-_KAwyZckQOvdQ/view?usp=drive_link
https://drive.google.com/file/d/1l8pILBFSAosypWJMza2K09Vm7rug9axm/view?usp=drive_link
https://drive.google.com/file/d/1hfPQ8dBCk97PnOhq6_MIISm3IEzcOxJG/view?usp=drive_link
https://drive.google.com/file/d/1PPAUwlJCFKpms8cqF_k1v2_fCgDBOc3S/view?usp=drive_link
https://drive.google.com/file/d/1lVKQZeqFfK3amEmLuFhYLUFQ2eyE8rOW/view?usp=drive_link
https://drive.google.com/file/d/1K9iPMLfDowcIFoyzpvgn88dQ6x6kVwNG/view?usp=drive_link
https://drive.google.com/file/d/1PNvMqG9tL7QxeLaYBGHiWYR6SYb5iIct/view?usp=drive_link
https://drive.google.com/file/d/1xkRtzbvIkUsylx9hrFLGQsJn0h1EYu-5/view?usp=drive_link
https://drive.google.com/file/d/1nxMRrJlSayjDIfr5CmHO1NzAw3COhsLi/view?usp=drive_link
https://drive.google.com/file/d/1Qs3WEyMGrmagiHIkkFEueWNnJhkUeR1s/view?usp=drive_link
https://drive.google.com/file/d/1D-G2_Q0SS3M8zyJbg_XzkF2ANPw1HTuX/view?usp=drive_link
https://drive.google.com/file/d/1mdmJsDGO-YtJAOF_yPKl6lq4PJOIbQhT/view?usp=drive_link
https://drive.google.com/file/d/11m9bwfop_sPmnQr_8amB6EEsrbAeG_z5/view?usp=drive_link
https://drive.google.com/file/d/19tyYt5FMn5kru0g9o2nMJhKPnsDqkIZv/view?usp=drive_link
https://drive.google.com/file/d/1XvTpUdsVTZ-vydvdYYmynbma--HfUGSl/view?usp=drive_link
https://drive.google.com/file/d/1MO3hFu68J6NohTzr9aB_fY02VA6QSOqj/view?usp=drive_link
https://drive.google.com/file/d/1Lh-UjwAk__04YOTWINF_QGVU8SjetVaY/view?usp=drive_link
https://drive.google.com/file/d/1jkSOUwZV5GJ7rZlVeErjcu0DBQs8Np0d/view?usp=drive_link
https://drive.google.com/file/d/1VIN1eLI-93WrVQwCjsv6XQr353DqqBYA/view?usp=drive_link

View File

@ -1,8 +0,0 @@
https://drive.google.com/drive/folders/1EgKar7rWBmTIRmeJYZciSwjZx3uP2mHO
https://drive.google.com/file/d/12eYWQO15atK2hBjXhynPJd9MKAj_42pz/view?usp=drive_link
https://drive.google.com/file/d/1Ul4oEeICJDjgfYTl4H1uaisTzVYIM6wd/view?usp=drive_link
https://drive.google.com/file/d/1WSF-OG8lKSe2wVYCv5D1aJNipxpgddk-/view?usp=drive_link
https://drive.google.com/file/d/1_ppD5j5sFh26aWW0JmhLzJMeNB-lCArk/view?usp=drive_link
https://drive.google.com/file/d/1WUp846dgWXYhu4oJfhHxiU6YL_7N6s4W/view?usp=drive_link
https://drive.google.com/file/d/1HRZNAIoAQw_uYiPwnBvtBioQoqiqoXdA/view?usp=drive_link
https://drive.google.com/file/d/1hedGq-QDMnIn8GlXXBC3GiEJ_Y-LTxyt/view?usp=drive_link

View File

@ -1,634 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper code for loading PushT dataset from Diffusion Policy (https://diffusion-policy.cs.columbia.edu/)
Copied from the original Diffusion Policy repository and used in our `download_and_upload_dataset.py` script.
"""
from __future__ import annotations
import math
import numbers
import os
from functools import cached_property
import numcodecs
import numpy as np
import zarr
def check_chunks_compatible(chunks: tuple, shape: tuple):
assert len(shape) == len(chunks)
for c in chunks:
assert isinstance(c, numbers.Integral)
assert c > 0
def rechunk_recompress_array(group, name, chunks=None, chunk_length=None, compressor=None, tmp_key="_temp"):
old_arr = group[name]
if chunks is None:
chunks = (chunk_length,) + old_arr.chunks[1:] if chunk_length is not None else old_arr.chunks
check_chunks_compatible(chunks, old_arr.shape)
if compressor is None:
compressor = old_arr.compressor
if (chunks == old_arr.chunks) and (compressor == old_arr.compressor):
# no change
return old_arr
# rechunk recompress
group.move(name, tmp_key)
old_arr = group[tmp_key]
n_copied, n_skipped, n_bytes_copied = zarr.copy(
source=old_arr,
dest=group,
name=name,
chunks=chunks,
compressor=compressor,
)
del group[tmp_key]
arr = group[name]
return arr
def get_optimal_chunks(shape, dtype, target_chunk_bytes=2e6, max_chunk_length=None):
"""
Common shapes
T,D
T,N,D
T,H,W,C
T,N,H,W,C
"""
itemsize = np.dtype(dtype).itemsize
# reversed
rshape = list(shape[::-1])
if max_chunk_length is not None:
rshape[-1] = int(max_chunk_length)
split_idx = len(shape) - 1
for i in range(len(shape) - 1):
this_chunk_bytes = itemsize * np.prod(rshape[:i])
next_chunk_bytes = itemsize * np.prod(rshape[: i + 1])
if this_chunk_bytes <= target_chunk_bytes and next_chunk_bytes > target_chunk_bytes:
split_idx = i
rchunks = rshape[:split_idx]
item_chunk_bytes = itemsize * np.prod(rshape[:split_idx])
this_max_chunk_length = rshape[split_idx]
next_chunk_length = min(this_max_chunk_length, math.ceil(target_chunk_bytes / item_chunk_bytes))
rchunks.append(next_chunk_length)
len_diff = len(shape) - len(rchunks)
rchunks.extend([1] * len_diff)
chunks = tuple(rchunks[::-1])
# print(np.prod(chunks) * itemsize / target_chunk_bytes)
return chunks
class ReplayBuffer:
"""
Zarr-based temporal datastructure.
Assumes first dimension to be time. Only chunk in time dimension.
"""
def __init__(self, root: zarr.Group | dict[str, dict]):
"""
Dummy constructor. Use copy_from* and create_from* class methods instead.
"""
assert "data" in root
assert "meta" in root
assert "episode_ends" in root["meta"]
for value in root["data"].values():
assert value.shape[0] == root["meta"]["episode_ends"][-1]
self.root = root
# ============= create constructors ===============
@classmethod
def create_empty_zarr(cls, storage=None, root=None):
if root is None:
if storage is None:
storage = zarr.MemoryStore()
root = zarr.group(store=storage)
root.require_group("data", overwrite=False)
meta = root.require_group("meta", overwrite=False)
if "episode_ends" not in meta:
meta.zeros("episode_ends", shape=(0,), dtype=np.int64, compressor=None, overwrite=False)
return cls(root=root)
@classmethod
def create_empty_numpy(cls):
root = {"data": {}, "meta": {"episode_ends": np.zeros((0,), dtype=np.int64)}}
return cls(root=root)
@classmethod
def create_from_group(cls, group, **kwargs):
if "data" not in group:
# create from stratch
buffer = cls.create_empty_zarr(root=group, **kwargs)
else:
# already exist
buffer = cls(root=group, **kwargs)
return buffer
@classmethod
def create_from_path(cls, zarr_path, mode="r", **kwargs):
"""
Open a on-disk zarr directly (for dataset larger than memory).
Slower.
"""
group = zarr.open(os.path.expanduser(zarr_path), mode)
return cls.create_from_group(group, **kwargs)
# ============= copy constructors ===============
@classmethod
def copy_from_store(
cls,
src_store,
store=None,
keys=None,
chunks: dict[str, tuple] | None = None,
compressors: dict | str | numcodecs.abc.Codec | None = None,
if_exists="replace",
**kwargs,
):
"""
Load to memory.
"""
src_root = zarr.group(src_store)
if chunks is None:
chunks = {}
if compressors is None:
compressors = {}
root = None
if store is None:
# numpy backend
meta = {}
for key, value in src_root["meta"].items():
if len(value.shape) == 0:
meta[key] = np.array(value)
else:
meta[key] = value[:]
if keys is None:
keys = src_root["data"].keys()
data = {}
for key in keys:
arr = src_root["data"][key]
data[key] = arr[:]
root = {"meta": meta, "data": data}
else:
root = zarr.group(store=store)
# copy without recompression
n_copied, n_skipped, n_bytes_copied = zarr.copy_store(
source=src_store, dest=store, source_path="/meta", dest_path="/meta", if_exists=if_exists
)
data_group = root.create_group("data", overwrite=True)
if keys is None:
keys = src_root["data"].keys()
for key in keys:
value = src_root["data"][key]
cks = cls._resolve_array_chunks(chunks=chunks, key=key, array=value)
cpr = cls._resolve_array_compressor(compressors=compressors, key=key, array=value)
if cks == value.chunks and cpr == value.compressor:
# copy without recompression
this_path = "/data/" + key
n_copied, n_skipped, n_bytes_copied = zarr.copy_store(
source=src_store,
dest=store,
source_path=this_path,
dest_path=this_path,
if_exists=if_exists,
)
else:
# copy with recompression
n_copied, n_skipped, n_bytes_copied = zarr.copy(
source=value,
dest=data_group,
name=key,
chunks=cks,
compressor=cpr,
if_exists=if_exists,
)
buffer = cls(root=root)
return buffer
@classmethod
def copy_from_path(
cls,
zarr_path,
backend=None,
store=None,
keys=None,
chunks: dict[str, tuple] | None = None,
compressors: dict | str | numcodecs.abc.Codec | None = None,
if_exists="replace",
**kwargs,
):
"""
Copy a on-disk zarr to in-memory compressed.
Recommended
"""
if chunks is None:
chunks = {}
if compressors is None:
compressors = {}
if backend == "numpy":
print("backend argument is deprecated!")
store = None
group = zarr.open(os.path.expanduser(zarr_path), "r")
return cls.copy_from_store(
src_store=group.store,
store=store,
keys=keys,
chunks=chunks,
compressors=compressors,
if_exists=if_exists,
**kwargs,
)
# ============= save methods ===============
def save_to_store(
self,
store,
chunks: dict[str, tuple] | None = None,
compressors: str | numcodecs.abc.Codec | dict | None = None,
if_exists="replace",
**kwargs,
):
root = zarr.group(store)
if chunks is None:
chunks = {}
if compressors is None:
compressors = {}
if self.backend == "zarr":
# recompression free copy
n_copied, n_skipped, n_bytes_copied = zarr.copy_store(
source=self.root.store,
dest=store,
source_path="/meta",
dest_path="/meta",
if_exists=if_exists,
)
else:
meta_group = root.create_group("meta", overwrite=True)
# save meta, no chunking
for key, value in self.root["meta"].items():
_ = meta_group.array(name=key, data=value, shape=value.shape, chunks=value.shape)
# save data, chunk
data_group = root.create_group("data", overwrite=True)
for key, value in self.root["data"].items():
cks = self._resolve_array_chunks(chunks=chunks, key=key, array=value)
cpr = self._resolve_array_compressor(compressors=compressors, key=key, array=value)
if isinstance(value, zarr.Array):
if cks == value.chunks and cpr == value.compressor:
# copy without recompression
this_path = "/data/" + key
n_copied, n_skipped, n_bytes_copied = zarr.copy_store(
source=self.root.store,
dest=store,
source_path=this_path,
dest_path=this_path,
if_exists=if_exists,
)
else:
# copy with recompression
n_copied, n_skipped, n_bytes_copied = zarr.copy(
source=value,
dest=data_group,
name=key,
chunks=cks,
compressor=cpr,
if_exists=if_exists,
)
else:
# numpy
_ = data_group.array(name=key, data=value, chunks=cks, compressor=cpr)
return store
def save_to_path(
self,
zarr_path,
chunks: dict[str, tuple] | None = None,
compressors: str | numcodecs.abc.Codec | dict | None = None,
if_exists="replace",
**kwargs,
):
if chunks is None:
chunks = {}
if compressors is None:
compressors = {}
store = zarr.DirectoryStore(os.path.expanduser(zarr_path))
return self.save_to_store(
store, chunks=chunks, compressors=compressors, if_exists=if_exists, **kwargs
)
@staticmethod
def resolve_compressor(compressor="default"):
if compressor == "default":
compressor = numcodecs.Blosc(cname="lz4", clevel=5, shuffle=numcodecs.Blosc.NOSHUFFLE)
elif compressor == "disk":
compressor = numcodecs.Blosc("zstd", clevel=5, shuffle=numcodecs.Blosc.BITSHUFFLE)
return compressor
@classmethod
def _resolve_array_compressor(cls, compressors: dict | str | numcodecs.abc.Codec, key, array):
# allows compressor to be explicitly set to None
cpr = "nil"
if isinstance(compressors, dict):
if key in compressors:
cpr = cls.resolve_compressor(compressors[key])
elif isinstance(array, zarr.Array):
cpr = array.compressor
else:
cpr = cls.resolve_compressor(compressors)
# backup default
if cpr == "nil":
cpr = cls.resolve_compressor("default")
return cpr
@classmethod
def _resolve_array_chunks(cls, chunks: dict | tuple, key, array):
cks = None
if isinstance(chunks, dict):
if key in chunks:
cks = chunks[key]
elif isinstance(array, zarr.Array):
cks = array.chunks
elif isinstance(chunks, tuple):
cks = chunks
else:
raise TypeError(f"Unsupported chunks type {type(chunks)}")
# backup default
if cks is None:
cks = get_optimal_chunks(shape=array.shape, dtype=array.dtype)
# check
check_chunks_compatible(chunks=cks, shape=array.shape)
return cks
# ============= properties =================
@cached_property
def data(self):
return self.root["data"]
@cached_property
def meta(self):
return self.root["meta"]
def update_meta(self, data):
# sanitize data
np_data = {}
for key, value in data.items():
if isinstance(value, np.ndarray):
np_data[key] = value
else:
arr = np.array(value)
if arr.dtype == object:
raise TypeError(f"Invalid value type {type(value)}")
np_data[key] = arr
meta_group = self.meta
if self.backend == "zarr":
for key, value in np_data.items():
_ = meta_group.array(
name=key, data=value, shape=value.shape, chunks=value.shape, overwrite=True
)
else:
meta_group.update(np_data)
return meta_group
@property
def episode_ends(self):
return self.meta["episode_ends"]
def get_episode_idxs(self):
import numba
numba.jit(nopython=True)
def _get_episode_idxs(episode_ends):
result = np.zeros((episode_ends[-1],), dtype=np.int64)
for i in range(len(episode_ends)):
start = 0
if i > 0:
start = episode_ends[i - 1]
end = episode_ends[i]
for idx in range(start, end):
result[idx] = i
return result
return _get_episode_idxs(self.episode_ends)
@property
def backend(self):
backend = "numpy"
if isinstance(self.root, zarr.Group):
backend = "zarr"
return backend
# =========== dict-like API ==============
def __repr__(self) -> str:
if self.backend == "zarr":
return str(self.root.tree())
else:
return super().__repr__()
def keys(self):
return self.data.keys()
def values(self):
return self.data.values()
def items(self):
return self.data.items()
def __getitem__(self, key):
return self.data[key]
def __contains__(self, key):
return key in self.data
# =========== our API ==============
@property
def n_steps(self):
if len(self.episode_ends) == 0:
return 0
return self.episode_ends[-1]
@property
def n_episodes(self):
return len(self.episode_ends)
@property
def chunk_size(self):
if self.backend == "zarr":
return next(iter(self.data.arrays()))[-1].chunks[0]
return None
@property
def episode_lengths(self):
ends = self.episode_ends[:]
ends = np.insert(ends, 0, 0)
lengths = np.diff(ends)
return lengths
def add_episode(
self,
data: dict[str, np.ndarray],
chunks: dict[str, tuple] | None = None,
compressors: str | numcodecs.abc.Codec | dict | None = None,
):
if chunks is None:
chunks = {}
if compressors is None:
compressors = {}
assert len(data) > 0
is_zarr = self.backend == "zarr"
curr_len = self.n_steps
episode_length = None
for value in data.values():
assert len(value.shape) >= 1
if episode_length is None:
episode_length = len(value)
else:
assert episode_length == len(value)
new_len = curr_len + episode_length
for key, value in data.items():
new_shape = (new_len,) + value.shape[1:]
# create array
if key not in self.data:
if is_zarr:
cks = self._resolve_array_chunks(chunks=chunks, key=key, array=value)
cpr = self._resolve_array_compressor(compressors=compressors, key=key, array=value)
arr = self.data.zeros(
name=key, shape=new_shape, chunks=cks, dtype=value.dtype, compressor=cpr
)
else:
# copy data to prevent modify
arr = np.zeros(shape=new_shape, dtype=value.dtype)
self.data[key] = arr
else:
arr = self.data[key]
assert value.shape[1:] == arr.shape[1:]
# same method for both zarr and numpy
if is_zarr:
arr.resize(new_shape)
else:
arr.resize(new_shape, refcheck=False)
# copy data
arr[-value.shape[0] :] = value
# append to episode ends
episode_ends = self.episode_ends
if is_zarr:
episode_ends.resize(episode_ends.shape[0] + 1)
else:
episode_ends.resize(episode_ends.shape[0] + 1, refcheck=False)
episode_ends[-1] = new_len
# rechunk
if is_zarr and episode_ends.chunks[0] < episode_ends.shape[0]:
rechunk_recompress_array(self.meta, "episode_ends", chunk_length=int(episode_ends.shape[0] * 1.5))
def drop_episode(self):
is_zarr = self.backend == "zarr"
episode_ends = self.episode_ends[:].copy()
assert len(episode_ends) > 0
start_idx = 0
if len(episode_ends) > 1:
start_idx = episode_ends[-2]
for value in self.data.values():
new_shape = (start_idx,) + value.shape[1:]
if is_zarr:
value.resize(new_shape)
else:
value.resize(new_shape, refcheck=False)
if is_zarr:
self.episode_ends.resize(len(episode_ends) - 1)
else:
self.episode_ends.resize(len(episode_ends) - 1, refcheck=False)
def pop_episode(self):
assert self.n_episodes > 0
episode = self.get_episode(self.n_episodes - 1, copy=True)
self.drop_episode()
return episode
def extend(self, data):
self.add_episode(data)
def get_episode(self, idx, copy=False):
idx = list(range(len(self.episode_ends)))[idx]
start_idx = 0
if idx > 0:
start_idx = self.episode_ends[idx - 1]
end_idx = self.episode_ends[idx]
result = self.get_steps_slice(start_idx, end_idx, copy=copy)
return result
def get_episode_slice(self, idx):
start_idx = 0
if idx > 0:
start_idx = self.episode_ends[idx - 1]
end_idx = self.episode_ends[idx]
return slice(start_idx, end_idx)
def get_steps_slice(self, start, stop, step=None, copy=False):
_slice = slice(start, stop, step)
result = {}
for key, value in self.data.items():
x = value[_slice]
if copy and isinstance(value, np.ndarray):
x = x.copy()
result[key] = x
return result
# =========== chunking =============
def get_chunks(self) -> dict:
assert self.backend == "zarr"
chunks = {}
for key, value in self.data.items():
chunks[key] = value.chunks
return chunks
def set_chunks(self, chunks: dict):
assert self.backend == "zarr"
for key, value in chunks.items():
if key in self.data:
arr = self.data[key]
if value != arr.chunks:
check_chunks_compatible(chunks=value, shape=arr.shape)
rechunk_recompress_array(self.data, key, chunks=value)
def get_compressors(self) -> dict:
assert self.backend == "zarr"
compressors = {}
for key, value in self.data.items():
compressors[key] = value.compressor
return compressors
def set_compressors(self, compressors: dict):
assert self.backend == "zarr"
for key, value in compressors.items():
if key in self.data:
arr = self.data[key]
compressor = self.resolve_compressor(value)
if compressor != arr.compressor:
rechunk_recompress_array(self.data, key, compressor=compressor)

View File

@ -1,202 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This file contains download scripts for raw datasets.
Example of usage:
```
python lerobot/common/datasets/push_dataset_to_hub/_download_raw.py \
--raw-dir data/lerobot-raw/pusht_raw \
--repo-id lerobot-raw/pusht_raw
```
"""
import argparse
import logging
import warnings
from pathlib import Path
from huggingface_hub import snapshot_download
from lerobot.common.datasets.push_dataset_to_hub.utils import check_repo_id
# {raw_repo_id: raw_format}
AVAILABLE_RAW_REPO_IDS = {
"lerobot-raw/aloha_mobile_cabinet_raw": "aloha_hdf5",
"lerobot-raw/aloha_mobile_chair_raw": "aloha_hdf5",
"lerobot-raw/aloha_mobile_elevator_raw": "aloha_hdf5",
"lerobot-raw/aloha_mobile_shrimp_raw": "aloha_hdf5",
"lerobot-raw/aloha_mobile_wash_pan_raw": "aloha_hdf5",
"lerobot-raw/aloha_mobile_wipe_wine_raw": "aloha_hdf5",
"lerobot-raw/aloha_sim_insertion_human_raw": "aloha_hdf5",
"lerobot-raw/aloha_sim_insertion_scripted_raw": "aloha_hdf5",
"lerobot-raw/aloha_sim_transfer_cube_human_raw": "aloha_hdf5",
"lerobot-raw/aloha_sim_transfer_cube_scripted_raw": "aloha_hdf5",
"lerobot-raw/aloha_static_battery_raw": "aloha_hdf5",
"lerobot-raw/aloha_static_candy_raw": "aloha_hdf5",
"lerobot-raw/aloha_static_coffee_new_raw": "aloha_hdf5",
"lerobot-raw/aloha_static_coffee_raw": "aloha_hdf5",
"lerobot-raw/aloha_static_cups_open_raw": "aloha_hdf5",
"lerobot-raw/aloha_static_fork_pick_up_raw": "aloha_hdf5",
"lerobot-raw/aloha_static_pingpong_test_raw": "aloha_hdf5",
"lerobot-raw/aloha_static_pro_pencil_raw": "aloha_hdf5",
"lerobot-raw/aloha_static_screw_driver_raw": "aloha_hdf5",
"lerobot-raw/aloha_static_tape_raw": "aloha_hdf5",
"lerobot-raw/aloha_static_thread_velcro_raw": "aloha_hdf5",
"lerobot-raw/aloha_static_towel_raw": "aloha_hdf5",
"lerobot-raw/aloha_static_vinh_cup_left_raw": "aloha_hdf5",
"lerobot-raw/aloha_static_vinh_cup_raw": "aloha_hdf5",
"lerobot-raw/aloha_static_ziploc_slide_raw": "aloha_hdf5",
"lerobot-raw/umi_cup_in_the_wild_raw": "umi_zarr",
"lerobot-raw/pusht_raw": "pusht_zarr",
"lerobot-raw/unitreeh1_fold_clothes_raw": "aloha_hdf5",
"lerobot-raw/unitreeh1_rearrange_objects_raw": "aloha_hdf5",
"lerobot-raw/unitreeh1_two_robot_greeting_raw": "aloha_hdf5",
"lerobot-raw/unitreeh1_warehouse_raw": "aloha_hdf5",
"lerobot-raw/xarm_lift_medium_raw": "xarm_pkl",
"lerobot-raw/xarm_lift_medium_replay_raw": "xarm_pkl",
"lerobot-raw/xarm_push_medium_raw": "xarm_pkl",
"lerobot-raw/xarm_push_medium_replay_raw": "xarm_pkl",
"lerobot-raw/fractal20220817_data_raw": "openx_rlds.fractal20220817_data",
"lerobot-raw/kuka_raw": "openx_rlds.kuka",
"lerobot-raw/bridge_openx_raw": "openx_rlds.bridge_openx",
"lerobot-raw/taco_play_raw": "openx_rlds.taco_play",
"lerobot-raw/jaco_play_raw": "openx_rlds.jaco_play",
"lerobot-raw/berkeley_cable_routing_raw": "openx_rlds.berkeley_cable_routing",
"lerobot-raw/roboturk_raw": "openx_rlds.roboturk",
"lerobot-raw/nyu_door_opening_surprising_effectiveness_raw": "openx_rlds.nyu_door_opening_surprising_effectiveness",
"lerobot-raw/viola_raw": "openx_rlds.viola",
"lerobot-raw/berkeley_autolab_ur5_raw": "openx_rlds.berkeley_autolab_ur5",
"lerobot-raw/toto_raw": "openx_rlds.toto",
"lerobot-raw/language_table_raw": "openx_rlds.language_table",
"lerobot-raw/columbia_cairlab_pusht_real_raw": "openx_rlds.columbia_cairlab_pusht_real",
"lerobot-raw/stanford_kuka_multimodal_dataset_raw": "openx_rlds.stanford_kuka_multimodal_dataset",
"lerobot-raw/nyu_rot_dataset_raw": "openx_rlds.nyu_rot_dataset",
"lerobot-raw/io_ai_tech_raw": "openx_rlds.io_ai_tech",
"lerobot-raw/stanford_hydra_dataset_raw": "openx_rlds.stanford_hydra_dataset",
"lerobot-raw/austin_buds_dataset_raw": "openx_rlds.austin_buds_dataset",
"lerobot-raw/nyu_franka_play_dataset_raw": "openx_rlds.nyu_franka_play_dataset",
"lerobot-raw/maniskill_dataset_raw": "openx_rlds.maniskill_dataset",
"lerobot-raw/furniture_bench_dataset_raw": "openx_rlds.furniture_bench_dataset",
"lerobot-raw/cmu_franka_exploration_dataset_raw": "openx_rlds.cmu_franka_exploration_dataset",
"lerobot-raw/ucsd_kitchen_dataset_raw": "openx_rlds.ucsd_kitchen_dataset",
"lerobot-raw/ucsd_pick_and_place_dataset_raw": "openx_rlds.ucsd_pick_and_place_dataset",
"lerobot-raw/spoc_raw": "openx_rlds.spoc",
"lerobot-raw/austin_sailor_dataset_raw": "openx_rlds.austin_sailor_dataset",
"lerobot-raw/austin_sirius_dataset_raw": "openx_rlds.austin_sirius_dataset",
"lerobot-raw/bc_z_raw": "openx_rlds.bc_z",
"lerobot-raw/utokyo_pr2_opening_fridge_raw": "openx_rlds.utokyo_pr2_opening_fridge",
"lerobot-raw/utokyo_pr2_tabletop_manipulation_raw": "openx_rlds.utokyo_pr2_tabletop_manipulation",
"lerobot-raw/utokyo_xarm_pick_and_place_raw": "openx_rlds.utokyo_xarm_pick_and_place",
"lerobot-raw/utokyo_xarm_bimanual_raw": "openx_rlds.utokyo_xarm_bimanual",
"lerobot-raw/utokyo_saytap_raw": "openx_rlds.utokyo_saytap",
"lerobot-raw/robo_net_raw": "openx_rlds.robo_net",
"lerobot-raw/robo_set_raw": "openx_rlds.robo_set",
"lerobot-raw/berkeley_mvp_raw": "openx_rlds.berkeley_mvp",
"lerobot-raw/berkeley_rpt_raw": "openx_rlds.berkeley_rpt",
"lerobot-raw/kaist_nonprehensile_raw": "openx_rlds.kaist_nonprehensile",
"lerobot-raw/stanford_mask_vit_raw": "openx_rlds.stanford_mask_vit",
"lerobot-raw/tokyo_u_lsmo_raw": "openx_rlds.tokyo_u_lsmo",
"lerobot-raw/dlr_sara_pour_raw": "openx_rlds.dlr_sara_pour",
"lerobot-raw/dlr_sara_grid_clamp_raw": "openx_rlds.dlr_sara_grid_clamp",
"lerobot-raw/dlr_edan_shared_control_raw": "openx_rlds.dlr_edan_shared_control",
"lerobot-raw/asu_table_top_raw": "openx_rlds.asu_table_top",
"lerobot-raw/stanford_robocook_raw": "openx_rlds.stanford_robocook",
"lerobot-raw/imperialcollege_sawyer_wrist_cam_raw": "openx_rlds.imperialcollege_sawyer_wrist_cam",
"lerobot-raw/iamlab_cmu_pickup_insert_raw": "openx_rlds.iamlab_cmu_pickup_insert",
"lerobot-raw/uiuc_d3field_raw": "openx_rlds.uiuc_d3field",
"lerobot-raw/utaustin_mutex_raw": "openx_rlds.utaustin_mutex",
"lerobot-raw/berkeley_fanuc_manipulation_raw": "openx_rlds.berkeley_fanuc_manipulation",
"lerobot-raw/cmu_playing_with_food_raw": "openx_rlds.cmu_playing_with_food",
"lerobot-raw/cmu_play_fusion_raw": "openx_rlds.cmu_play_fusion",
"lerobot-raw/cmu_stretch_raw": "openx_rlds.cmu_stretch",
"lerobot-raw/berkeley_gnm_recon_raw": "openx_rlds.berkeley_gnm_recon",
"lerobot-raw/berkeley_gnm_cory_hall_raw": "openx_rlds.berkeley_gnm_cory_hall",
"lerobot-raw/berkeley_gnm_sac_son_raw": "openx_rlds.berkeley_gnm_sac_son",
"lerobot-raw/droid_raw": "openx_rlds.droid",
"lerobot-raw/droid_100_raw": "openx_rlds.droid100",
"lerobot-raw/fmb_raw": "openx_rlds.fmb",
"lerobot-raw/dobbe_raw": "openx_rlds.dobbe",
"lerobot-raw/usc_cloth_sim_raw": "openx_rlds.usc_cloth_sim",
"lerobot-raw/plex_robosuite_raw": "openx_rlds.plex_robosuite",
"lerobot-raw/conq_hose_manipulation_raw": "openx_rlds.conq_hose_manipulation",
"lerobot-raw/vima_raw": "openx_rlds.vima",
"lerobot-raw/robot_vqa_raw": "openx_rlds.robot_vqa",
"lerobot-raw/mimic_play_raw": "openx_rlds.mimic_play",
"lerobot-raw/tidybot_raw": "openx_rlds.tidybot",
"lerobot-raw/eth_agent_affordances_raw": "openx_rlds.eth_agent_affordances",
}
def download_raw(raw_dir: Path, repo_id: str):
check_repo_id(repo_id)
user_id, dataset_id = repo_id.split("/")
if not dataset_id.endswith("_raw"):
warnings.warn(
f"""`dataset_id` ({dataset_id}) doesn't end with '_raw' (e.g. 'lerobot/pusht_raw'). Following this
naming convention by renaming your repository is advised, but not mandatory.""",
stacklevel=1,
)
# Send warning if raw_dir isn't well formatted
if raw_dir.parts[-2] != user_id or raw_dir.parts[-1] != dataset_id:
warnings.warn(
f"""`raw_dir` ({raw_dir}) doesn't contain a community or user id `/` the name of the dataset that
match the `repo_id` (e.g. 'data/lerobot/pusht_raw'). Following this naming convention is advised,
but not mandatory.""",
stacklevel=1,
)
raw_dir.mkdir(parents=True, exist_ok=True)
logging.info(f"Start downloading from huggingface.co/{user_id} for {dataset_id}")
snapshot_download(repo_id, repo_type="dataset", local_dir=raw_dir)
logging.info(f"Finish downloading from huggingface.co/{user_id} for {dataset_id}")
def download_all_raw_datasets(data_dir: Path | None = None):
if data_dir is None:
data_dir = Path("data")
for repo_id in AVAILABLE_RAW_REPO_IDS:
raw_dir = data_dir / repo_id
download_raw(raw_dir, repo_id)
def main():
parser = argparse.ArgumentParser(
description=f"""A script to download raw datasets from Hugging Face hub to a local directory. Here is a
non exhaustive list of available repositories to use in `--repo-id`: {list(AVAILABLE_RAW_REPO_IDS.keys())}""",
)
parser.add_argument(
"--raw-dir",
type=Path,
required=True,
help="Directory containing input raw datasets (e.g. `data/aloha_mobile_chair_raw` or `data/pusht_raw).",
)
parser.add_argument(
"--repo-id",
type=str,
required=True,
help="""Repositery identifier on Hugging Face: a community or a user name `/` the name of
the dataset (e.g. `lerobot/pusht_raw`, `cadene/aloha_sim_insertion_human_raw`).""",
)
args = parser.parse_args()
download_raw(**vars(args))
if __name__ == "__main__":
main()

View File

@ -1,184 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Use this script to batch encode lerobot dataset from their raw format to LeRobotDataset and push their updated
version to the hub. Under the hood, this script reuses 'push_dataset_to_hub.py'. It assumes that you already
downloaded raw datasets, which you can do with the related '_download_raw.py' script.
For instance, for codebase_version = 'v1.6', the following command was run, assuming raw datasets from
lerobot-raw were downloaded in 'raw/datasets/directory':
```bash
python lerobot/common/datasets/push_dataset_to_hub/_encode_datasets.py \
--raw-dir raw/datasets/directory \
--raw-repo-ids lerobot-raw \
--local-dir push/datasets/directory \
--tests-data-dir tests/data \
--push-repo lerobot \
--vcodec libsvtav1 \
--pix-fmt yuv420p \
--g 2 \
--crf 30
```
"""
import argparse
from pathlib import Path
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION
from lerobot.common.datasets.push_dataset_to_hub._download_raw import AVAILABLE_RAW_REPO_IDS
from lerobot.common.datasets.push_dataset_to_hub.utils import check_repo_id
from lerobot.scripts.push_dataset_to_hub import push_dataset_to_hub
def get_push_repo_id_from_raw(raw_repo_id: str, push_repo: str) -> str:
dataset_id_raw = raw_repo_id.split("/")[1]
dataset_id = dataset_id_raw.removesuffix("_raw")
return f"{push_repo}/{dataset_id}"
def encode_datasets(
raw_dir: Path,
raw_repo_ids: list[str],
push_repo: str,
vcodec: str,
pix_fmt: str,
g: int,
crf: int,
local_dir: Path | None = None,
tests_data_dir: Path | None = None,
raw_format: str | None = None,
dry_run: bool = False,
) -> None:
if len(raw_repo_ids) == 1 and raw_repo_ids[0].lower() == "lerobot-raw":
raw_repo_ids_format = AVAILABLE_RAW_REPO_IDS
else:
if raw_format is None:
raise ValueError(raw_format)
raw_repo_ids_format = {id_: raw_format for id_ in raw_repo_ids}
for raw_repo_id, repo_raw_format in raw_repo_ids_format.items():
check_repo_id(raw_repo_id)
dataset_repo_id_push = get_push_repo_id_from_raw(raw_repo_id, push_repo)
dataset_raw_dir = raw_dir / raw_repo_id
dataset_dir = local_dir / dataset_repo_id_push if local_dir is not None else None
encoding = {
"vcodec": vcodec,
"pix_fmt": pix_fmt,
"g": g,
"crf": crf,
}
if not (dataset_raw_dir).is_dir():
raise NotADirectoryError(dataset_raw_dir)
if not dry_run:
push_dataset_to_hub(
dataset_raw_dir,
raw_format=repo_raw_format,
repo_id=dataset_repo_id_push,
local_dir=dataset_dir,
resume=True,
encoding=encoding,
tests_data_dir=tests_data_dir,
)
else:
print(
f"DRY RUN: {dataset_raw_dir} --> {dataset_dir} --> {dataset_repo_id_push}@{CODEBASE_VERSION}"
)
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"--raw-dir",
type=Path,
default=Path("data"),
help="Directory where raw datasets are located.",
)
parser.add_argument(
"--raw-repo-ids",
type=str,
nargs="*",
default=["lerobot-raw"],
help="""Raw dataset repo ids. if 'lerobot-raw', the keys from `AVAILABLE_RAW_REPO_IDS` will be
used and raw datasets will be fetched from the 'lerobot-raw/' repo and pushed with their
associated format. It is assumed that each dataset is located at `raw_dir / raw_repo_id` """,
)
parser.add_argument(
"--raw-format",
type=str,
default=None,
help="""Raw format to use for the raw repo-ids. Must be specified if --raw-repo-ids is not
'lerobot-raw'""",
)
parser.add_argument(
"--local-dir",
type=Path,
default=None,
help="""When provided, writes the dataset converted to LeRobotDataset format in this directory
(e.g. `data/lerobot/aloha_mobile_chair`).""",
)
parser.add_argument(
"--push-repo",
type=str,
default="lerobot",
help="Repo to upload datasets to",
)
parser.add_argument(
"--vcodec",
type=str,
default="libsvtav1",
help="Codec to use for encoding videos",
)
parser.add_argument(
"--pix-fmt",
type=str,
default="yuv420p",
help="Pixel formats (chroma subsampling) to be used for encoding",
)
parser.add_argument(
"--g",
type=int,
default=2,
help="Group of pictures sizes to be used for encoding.",
)
parser.add_argument(
"--crf",
type=int,
default=30,
help="Constant rate factors to be used for encoding.",
)
parser.add_argument(
"--tests-data-dir",
type=Path,
default=None,
help=(
"When provided, save tests artifacts into the given directory "
"(e.g. `--tests-data-dir tests/data` will save to tests/data/{--repo-id})."
),
)
parser.add_argument(
"--dry-run",
type=int,
default=0,
help="If not set to 0, this script won't download or upload anything.",
)
args = parser.parse_args()
encode_datasets(**vars(args))
if __name__ == "__main__":
main()

View File

@ -1,326 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# imagecodecs/numcodecs.py
# Copyright (c) 2021-2022, Christoph Gohlke
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Copied from: https://github.com/real-stanford/universal_manipulation_interface/blob/298776ce251f33b6b3185a98d6e7d1f9ad49168b/diffusion_policy/codecs/imagecodecs_numcodecs.py#L1
"""Additional numcodecs implemented using imagecodecs."""
__version__ = "2022.9.26"
__all__ = ("register_codecs",)
import imagecodecs
import numpy
from numcodecs.abc import Codec
from numcodecs.registry import get_codec, register_codec
# TODO (azouitine): Remove useless codecs
def protective_squeeze(x: numpy.ndarray):
"""
Squeeze dim only if it's not the last dim.
Image dim expected to be *, H, W, C
"""
img_shape = x.shape[-3:]
if len(x.shape) > 3:
n_imgs = numpy.prod(x.shape[:-3])
if n_imgs > 1:
img_shape = (-1,) + img_shape
return x.reshape(img_shape)
def get_default_image_compressor(**kwargs):
if imagecodecs.JPEGXL:
# has JPEGXL
this_kwargs = {
"effort": 3,
"distance": 0.3,
# bug in libjxl, invalid codestream for non-lossless
# when decoding speed > 1
"decodingspeed": 1,
}
this_kwargs.update(kwargs)
return JpegXl(**this_kwargs)
else:
this_kwargs = {"level": 50}
this_kwargs.update(kwargs)
return Jpeg2k(**this_kwargs)
class Jpeg2k(Codec):
"""JPEG 2000 codec for numcodecs."""
codec_id = "imagecodecs_jpeg2k"
def __init__(
self,
level=None,
codecformat=None,
colorspace=None,
tile=None,
reversible=None,
bitspersample=None,
resolutions=None,
numthreads=None,
verbose=0,
):
self.level = level
self.codecformat = codecformat
self.colorspace = colorspace
self.tile = None if tile is None else tuple(tile)
self.reversible = reversible
self.bitspersample = bitspersample
self.resolutions = resolutions
self.numthreads = numthreads
self.verbose = verbose
def encode(self, buf):
buf = protective_squeeze(numpy.asarray(buf))
return imagecodecs.jpeg2k_encode(
buf,
level=self.level,
codecformat=self.codecformat,
colorspace=self.colorspace,
tile=self.tile,
reversible=self.reversible,
bitspersample=self.bitspersample,
resolutions=self.resolutions,
numthreads=self.numthreads,
verbose=self.verbose,
)
def decode(self, buf, out=None):
return imagecodecs.jpeg2k_decode(buf, verbose=self.verbose, numthreads=self.numthreads, out=out)
class JpegXl(Codec):
"""JPEG XL codec for numcodecs."""
codec_id = "imagecodecs_jpegxl"
def __init__(
self,
# encode
level=None,
effort=None,
distance=None,
lossless=None,
decodingspeed=None,
photometric=None,
planar=None,
usecontainer=None,
# decode
index=None,
keeporientation=None,
# both
numthreads=None,
):
"""
Return JPEG XL image from numpy array.
Float must be in nominal range 0..1.
Currently L, LA, RGB, RGBA images are supported in contig mode.
Extra channels are only supported for grayscale images in planar mode.
Parameters
----------
level : Default to None, i.e. not overwriting lossess and decodingspeed options.
When < 0: Use lossless compression
When in [0,1,2,3,4]: Sets the decoding speed tier for the provided options.
Minimum is 0 (slowest to decode, best quality/density), and maximum
is 4 (fastest to decode, at the cost of some quality/density).
effort : Default to 3.
Sets encoder effort/speed level without affecting decoding speed.
Valid values are, from faster to slower speed: 1:lightning 2:thunder
3:falcon 4:cheetah 5:hare 6:wombat 7:squirrel 8:kitten 9:tortoise.
Speed: lightning, thunder, falcon, cheetah, hare, wombat, squirrel, kitten, tortoise
control the encoder effort in ascending order.
This also affects memory usage: using lower effort will typically reduce memory
consumption during encoding.
lightning and thunder are fast modes useful for lossless mode (modular).
falcon disables all of the following tools.
cheetah enables coefficient reordering, context clustering, and heuristics for selecting DCT sizes and quantization steps.
hare enables Gaborish filtering, chroma from luma, and an initial estimate of quantization steps.
wombat enables error diffusion quantization and full DCT size selection heuristics.
squirrel (default) enables dots, patches, and spline detection, and full context clustering.
kitten optimizes the adaptive quantization for a psychovisual metric.
tortoise enables a more thorough adaptive quantization search.
distance : Default to 1.0
Sets the distance level for lossy compression: target max butteraugli distance,
lower = higher quality. Range: 0 .. 15. 0.0 = mathematically lossless
(however, use JxlEncoderSetFrameLossless instead to use true lossless,
as setting distance to 0 alone is not the only requirement).
1.0 = visually lossless. Recommended range: 0.5 .. 3.0.
lossess : Default to False.
Use lossess encoding.
decodingspeed : Default to 0.
Duplicate to level. [0,4]
photometric : Return JxlColorSpace value.
Default logic is quite complicated but works most of the time.
Accepted value:
int: [-1,3]
str: ['RGB',
'WHITEISZERO', 'MINISWHITE',
'BLACKISZERO', 'MINISBLACK', 'GRAY',
'XYB', 'KNOWN']
planar : Enable multi-channel mode.
Default to false.
usecontainer :
Forces the encoder to use the box-based container format (BMFF)
even when not necessary.
When using JxlEncoderUseBoxes, JxlEncoderStoreJPEGMetadata or
JxlEncoderSetCodestreamLevel with level 10, the encoder will
automatically also use the container format, it is not necessary
to use JxlEncoderUseContainer for those use cases.
By default this setting is disabled.
index : Selectively decode frames for animation.
Default to 0, decode all frames.
When set to > 0, decode that frame index only.
keeporientation :
Enables or disables preserving of as-in-bitstream pixeldata orientation.
Some images are encoded with an Orientation tag indicating that the
decoder must perform a rotation and/or mirroring to the encoded image data.
If skip_reorientation is JXL_FALSE (the default): the decoder will apply
the transformation from the orientation setting, hence rendering the image
according to its specified intent. When producing a JxlBasicInfo, the decoder
will always set the orientation field to JXL_ORIENT_IDENTITY (matching the
returned pixel data) and also align xsize and ysize so that they correspond
to the width and the height of the returned pixel data.
If skip_reorientation is JXL_TRUE: the decoder will skip applying the
transformation from the orientation setting, returning the image in
the as-in-bitstream pixeldata orientation. This may be faster to decode
since the decoder doesnt have to apply the transformation, but can
cause wrong display of the image if the orientation tag is not correctly
taken into account by the user.
By default, this option is disabled, and the returned pixel data is
re-oriented according to the images Orientation setting.
threads : Default to 1.
If <= 0, use all cores.
If > 32, clipped to 32.
"""
self.level = level
self.effort = effort
self.distance = distance
self.lossless = bool(lossless)
self.decodingspeed = decodingspeed
self.photometric = photometric
self.planar = planar
self.usecontainer = usecontainer
self.index = index
self.keeporientation = keeporientation
self.numthreads = numthreads
def encode(self, buf):
# TODO: only squeeze all but last dim
buf = protective_squeeze(numpy.asarray(buf))
return imagecodecs.jpegxl_encode(
buf,
level=self.level,
effort=self.effort,
distance=self.distance,
lossless=self.lossless,
decodingspeed=self.decodingspeed,
photometric=self.photometric,
planar=self.planar,
usecontainer=self.usecontainer,
numthreads=self.numthreads,
)
def decode(self, buf, out=None):
return imagecodecs.jpegxl_decode(
buf,
index=self.index,
keeporientation=self.keeporientation,
numthreads=self.numthreads,
out=out,
)
def _flat(out):
"""Return numpy array as contiguous view of bytes if possible."""
if out is None:
return None
view = memoryview(out)
if view.readonly or not view.contiguous:
return None
return view.cast("B")
def register_codecs(codecs=None, force=False, verbose=True):
"""Register codecs in this module with numcodecs."""
for name, cls in globals().items():
if not hasattr(cls, "codec_id") or name == "Codec":
continue
if codecs is not None and cls.codec_id not in codecs:
continue
try:
try: # noqa: SIM105
get_codec({"id": cls.codec_id})
except TypeError:
# registered, but failed
pass
except ValueError:
# not registered yet
pass
else:
if not force:
if verbose:
log_warning(f"numcodec {cls.codec_id!r} already registered")
continue
if verbose:
log_warning(f"replacing registered numcodec {cls.codec_id!r}")
register_codec(cls)
def log_warning(msg, *args, **kwargs):
"""Log message with level WARNING."""
import logging
logging.getLogger(__name__).warning(msg, *args, **kwargs)

View File

@ -1,233 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Contains utilities to process raw data format of HDF5 files like in: https://github.com/tonyzhaozh/act
"""
import gc
import shutil
from pathlib import Path
import h5py
import numpy as np
import torch
import tqdm
from datasets import Dataset, Features, Image, Sequence, Value
from PIL import Image as PILImage
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION
from lerobot.common.datasets.push_dataset_to_hub.utils import (
calculate_episode_data_index,
concatenate_episodes,
get_default_encoding,
save_images_concurrently,
)
from lerobot.common.datasets.utils import (
hf_transform_to_torch,
)
from lerobot.common.datasets.video_utils import VideoFrame, encode_video_frames
def get_cameras(hdf5_data):
# ignore depth channel, not currently handled
# TODO(rcadene): add depth
rgb_cameras = [key for key in hdf5_data["/observations/images"].keys() if "depth" not in key] # noqa: SIM118
return rgb_cameras
def check_format(raw_dir) -> bool:
# only frames from simulation are uncompressed
compressed_images = "sim" not in raw_dir.name
hdf5_paths = list(raw_dir.glob("episode_*.hdf5"))
assert len(hdf5_paths) != 0
for hdf5_path in hdf5_paths:
with h5py.File(hdf5_path, "r") as data:
assert "/action" in data
assert "/observations/qpos" in data
assert data["/action"].ndim == 2
assert data["/observations/qpos"].ndim == 2
num_frames = data["/action"].shape[0]
assert num_frames == data["/observations/qpos"].shape[0]
for camera in get_cameras(data):
assert num_frames == data[f"/observations/images/{camera}"].shape[0]
if compressed_images:
assert data[f"/observations/images/{camera}"].ndim == 2
else:
assert data[f"/observations/images/{camera}"].ndim == 4
b, h, w, c = data[f"/observations/images/{camera}"].shape
assert c < h and c < w, f"Expect (h,w,c) image format but ({h=},{w=},{c=}) provided."
def load_from_raw(
raw_dir: Path,
videos_dir: Path,
fps: int,
video: bool,
episodes: list[int] | None = None,
encoding: dict | None = None,
):
# only frames from simulation are uncompressed
compressed_images = "sim" not in raw_dir.name
hdf5_files = sorted(raw_dir.glob("episode_*.hdf5"))
num_episodes = len(hdf5_files)
ep_dicts = []
ep_ids = episodes if episodes else range(num_episodes)
for ep_idx in tqdm.tqdm(ep_ids):
ep_path = hdf5_files[ep_idx]
with h5py.File(ep_path, "r") as ep:
num_frames = ep["/action"].shape[0]
# last step of demonstration is considered done
done = torch.zeros(num_frames, dtype=torch.bool)
done[-1] = True
state = torch.from_numpy(ep["/observations/qpos"][:])
action = torch.from_numpy(ep["/action"][:])
if "/observations/qvel" in ep:
velocity = torch.from_numpy(ep["/observations/qvel"][:])
if "/observations/effort" in ep:
effort = torch.from_numpy(ep["/observations/effort"][:])
ep_dict = {}
for camera in get_cameras(ep):
img_key = f"observation.images.{camera}"
if compressed_images:
import cv2
# load one compressed image after the other in RAM and uncompress
imgs_array = []
for data in ep[f"/observations/images/{camera}"]:
imgs_array.append(cv2.imdecode(data, 1))
imgs_array = np.array(imgs_array)
else:
# load all images in RAM
imgs_array = ep[f"/observations/images/{camera}"][:]
if video:
# save png images in temporary directory
tmp_imgs_dir = videos_dir / "tmp_images"
save_images_concurrently(imgs_array, tmp_imgs_dir)
# encode images to a mp4 video
fname = f"{img_key}_episode_{ep_idx:06d}.mp4"
video_path = videos_dir / fname
encode_video_frames(tmp_imgs_dir, video_path, fps, **(encoding or {}))
# clean temporary images directory
shutil.rmtree(tmp_imgs_dir)
# store the reference to the video frame
ep_dict[img_key] = [
{"path": f"videos/{fname}", "timestamp": i / fps} for i in range(num_frames)
]
else:
ep_dict[img_key] = [PILImage.fromarray(x) for x in imgs_array]
ep_dict["observation.state"] = state
if "/observations/velocity" in ep:
ep_dict["observation.velocity"] = velocity
if "/observations/effort" in ep:
ep_dict["observation.effort"] = effort
ep_dict["action"] = action
ep_dict["episode_index"] = torch.tensor([ep_idx] * num_frames)
ep_dict["frame_index"] = torch.arange(0, num_frames, 1)
ep_dict["timestamp"] = torch.arange(0, num_frames, 1) / fps
ep_dict["next.done"] = done
# TODO(rcadene): add reward and success by computing them in sim
assert isinstance(ep_idx, int)
ep_dicts.append(ep_dict)
gc.collect()
data_dict = concatenate_episodes(ep_dicts)
total_frames = data_dict["frame_index"].shape[0]
data_dict["index"] = torch.arange(0, total_frames, 1)
return data_dict
def to_hf_dataset(data_dict, video) -> Dataset:
features = {}
keys = [key for key in data_dict if "observation.images." in key]
for key in keys:
if video:
features[key] = VideoFrame()
else:
features[key] = Image()
features["observation.state"] = Sequence(
length=data_dict["observation.state"].shape[1], feature=Value(dtype="float32", id=None)
)
if "observation.velocity" in data_dict:
features["observation.velocity"] = Sequence(
length=data_dict["observation.velocity"].shape[1], feature=Value(dtype="float32", id=None)
)
if "observation.effort" in data_dict:
features["observation.effort"] = Sequence(
length=data_dict["observation.effort"].shape[1], feature=Value(dtype="float32", id=None)
)
features["action"] = Sequence(
length=data_dict["action"].shape[1], feature=Value(dtype="float32", id=None)
)
features["episode_index"] = Value(dtype="int64", id=None)
features["frame_index"] = Value(dtype="int64", id=None)
features["timestamp"] = Value(dtype="float32", id=None)
features["next.done"] = Value(dtype="bool", id=None)
features["index"] = Value(dtype="int64", id=None)
hf_dataset = Dataset.from_dict(data_dict, features=Features(features))
hf_dataset.set_transform(hf_transform_to_torch)
return hf_dataset
def from_raw_to_lerobot_format(
raw_dir: Path,
videos_dir: Path,
fps: int | None = None,
video: bool = True,
episodes: list[int] | None = None,
encoding: dict | None = None,
):
# sanity check
check_format(raw_dir)
if fps is None:
fps = 50
data_dict = load_from_raw(raw_dir, videos_dir, fps, video, episodes, encoding)
hf_dataset = to_hf_dataset(data_dict, video)
episode_data_index = calculate_episode_data_index(hf_dataset)
info = {
"codebase_version": CODEBASE_VERSION,
"fps": fps,
"video": video,
}
if video:
info["encoding"] = get_default_encoding()
return hf_dataset, episode_data_index, info

View File

@ -1,107 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Contains utilities to process raw data format of png images files recorded with capture_camera_feed.py
"""
from pathlib import Path
import torch
from datasets import Dataset, Features, Image, Value
from PIL import Image as PILImage
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION
from lerobot.common.datasets.push_dataset_to_hub.utils import (
calculate_episode_data_index,
concatenate_episodes,
)
from lerobot.common.datasets.utils import hf_transform_to_torch
from lerobot.common.datasets.video_utils import VideoFrame
def check_format(raw_dir: Path) -> bool:
image_paths = list(raw_dir.glob("frame_*.png"))
if len(image_paths) == 0:
raise ValueError
def load_from_raw(raw_dir: Path, fps: int, episodes: list[int] | None = None):
if episodes is not None:
# TODO(aliberts): add support for multi-episodes.
raise NotImplementedError()
ep_dict = {}
ep_idx = 0
image_paths = sorted(raw_dir.glob("frame_*.png"))
num_frames = len(image_paths)
ep_dict["observation.image"] = [PILImage.open(x) for x in image_paths]
ep_dict["episode_index"] = torch.tensor([ep_idx] * num_frames)
ep_dict["frame_index"] = torch.arange(0, num_frames, 1)
ep_dict["timestamp"] = torch.arange(0, num_frames, 1) / fps
ep_dicts = [ep_dict]
data_dict = concatenate_episodes(ep_dicts)
total_frames = data_dict["frame_index"].shape[0]
data_dict["index"] = torch.arange(0, total_frames, 1)
return data_dict
def to_hf_dataset(data_dict, video) -> Dataset:
features = {}
if video:
features["observation.image"] = VideoFrame()
else:
features["observation.image"] = Image()
features["episode_index"] = Value(dtype="int64", id=None)
features["frame_index"] = Value(dtype="int64", id=None)
features["timestamp"] = Value(dtype="float32", id=None)
features["index"] = Value(dtype="int64", id=None)
hf_dataset = Dataset.from_dict(data_dict, features=Features(features))
hf_dataset.set_transform(hf_transform_to_torch)
return hf_dataset
def from_raw_to_lerobot_format(
raw_dir: Path,
videos_dir: Path,
fps: int | None = None,
video: bool = True,
episodes: list[int] | None = None,
encoding: dict | None = None,
):
if video or episodes or encoding is not None:
# TODO(aliberts): support this
raise NotImplementedError
# sanity check
check_format(raw_dir)
if fps is None:
fps = 30
data_dict = load_from_raw(raw_dir, videos_dir, fps, video, episodes)
hf_dataset = to_hf_dataset(data_dict, video)
episode_data_index = calculate_episode_data_index(hf_dataset)
info = {
"codebase_version": CODEBASE_VERSION,
"fps": fps,
"video": video,
}
return hf_dataset, episode_data_index, info

View File

@ -1,233 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Contains utilities to process raw data format from dora-record
"""
import re
import warnings
from pathlib import Path
import pandas as pd
import torch
from datasets import Dataset, Features, Image, Sequence, Value
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION
from lerobot.common.datasets.push_dataset_to_hub.utils import calculate_episode_data_index
from lerobot.common.datasets.utils import (
hf_transform_to_torch,
)
from lerobot.common.datasets.video_utils import VideoFrame
def check_format(raw_dir) -> bool:
assert raw_dir.exists()
leader_file = list(raw_dir.glob("*.parquet"))
if len(leader_file) == 0:
raise ValueError(f"Missing parquet files in '{raw_dir}'")
return True
def load_from_raw(raw_dir: Path, videos_dir: Path, fps: int, video: bool, episodes: list[int] | None = None):
# Load data stream that will be used as reference for the timestamps synchronization
reference_files = list(raw_dir.glob("observation.images.cam_*.parquet"))
if len(reference_files) == 0:
raise ValueError(f"Missing reference files for camera, starting with in '{raw_dir}'")
# select first camera in alphanumeric order
reference_key = sorted(reference_files)[0].stem
reference_df = pd.read_parquet(raw_dir / f"{reference_key}.parquet")
reference_df = reference_df[["timestamp_utc", reference_key]]
# Merge all data stream using nearest backward strategy
df = reference_df
for path in raw_dir.glob("*.parquet"):
key = path.stem # action or observation.state or ...
if key == reference_key:
continue
if "failed_episode_index" in key:
# TODO(rcadene): add support for removing episodes that are tagged as "failed"
continue
modality_df = pd.read_parquet(path)
modality_df = modality_df[["timestamp_utc", key]]
df = pd.merge_asof(
df,
modality_df,
on="timestamp_utc",
# "nearest" is the best option over "backward", since the latter can desynchronizes camera timestamps by
# matching timestamps that are too far apart, in order to fit the backward constraints. It's not the case for "nearest".
# However, note that "nearest" might synchronize the reference camera with other cameras on slightly future timestamps.
# are too far apart.
direction="nearest",
tolerance=pd.Timedelta(f"{1 / fps} seconds"),
)
# Remove rows with episode_index -1 which indicates data that correspond to in-between episodes
df = df[df["episode_index"] != -1]
image_keys = [key for key in df if "observation.images." in key]
def get_episode_index(row):
episode_index_per_cam = {}
for key in image_keys:
path = row[key][0]["path"]
match = re.search(r"_(\d{6}).mp4", path)
if not match:
raise ValueError(path)
episode_index = int(match.group(1))
episode_index_per_cam[key] = episode_index
if len(set(episode_index_per_cam.values())) != 1:
raise ValueError(
f"All cameras are expected to belong to the same episode, but getting {episode_index_per_cam}"
)
return episode_index
df["episode_index"] = df.apply(get_episode_index, axis=1)
# dora only use arrays, so single values are encapsulated into a list
df["frame_index"] = df.groupby("episode_index").cumcount()
df = df.reset_index()
df["index"] = df.index
# set 'next.done' to True for the last frame of each episode
df["next.done"] = False
df.loc[df.groupby("episode_index").tail(1).index, "next.done"] = True
df["timestamp"] = df["timestamp_utc"].map(lambda x: x.timestamp())
# each episode starts with timestamp 0 to match the ones from the video
df["timestamp"] = df.groupby("episode_index")["timestamp"].transform(lambda x: x - x.iloc[0])
del df["timestamp_utc"]
# sanity check
has_nan = df.isna().any().any()
if has_nan:
raise ValueError("Dataset contains Nan values.")
# sanity check episode indices go from 0 to n-1
ep_ids = [ep_idx for ep_idx, _ in df.groupby("episode_index")]
expected_ep_ids = list(range(df["episode_index"].max() + 1))
if ep_ids != expected_ep_ids:
raise ValueError(f"Episodes indices go from {ep_ids} instead of {expected_ep_ids}")
# Create symlink to raw videos directory (that needs to be absolute not relative)
videos_dir.parent.mkdir(parents=True, exist_ok=True)
videos_dir.symlink_to((raw_dir / "videos").absolute())
# sanity check the video paths are well formatted
for key in df:
if "observation.images." not in key:
continue
for ep_idx in ep_ids:
video_path = videos_dir / f"{key}_episode_{ep_idx:06d}.mp4"
if not video_path.exists():
raise ValueError(f"Video file not found in {video_path}")
data_dict = {}
for key in df:
# is video frame
if "observation.images." in key:
# we need `[0] because dora only use arrays, so single values are encapsulated into a list.
# it is the case for video_frame dictionary = [{"path": ..., "timestamp": ...}]
data_dict[key] = [video_frame[0] for video_frame in df[key].values]
# sanity check the video path is well formatted
video_path = videos_dir.parent / data_dict[key][0]["path"]
if not video_path.exists():
raise ValueError(f"Video file not found in {video_path}")
# is number
elif df[key].iloc[0].ndim == 0 or df[key].iloc[0].shape[0] == 1:
data_dict[key] = torch.from_numpy(df[key].values)
# is vector
elif df[key].iloc[0].shape[0] > 1:
data_dict[key] = torch.stack([torch.from_numpy(x.copy()) for x in df[key].values])
else:
raise ValueError(key)
return data_dict
def to_hf_dataset(data_dict, video) -> Dataset:
features = {}
keys = [key for key in data_dict if "observation.images." in key]
for key in keys:
if video:
features[key] = VideoFrame()
else:
features[key] = Image()
features["observation.state"] = Sequence(
length=data_dict["observation.state"].shape[1], feature=Value(dtype="float32", id=None)
)
if "observation.velocity" in data_dict:
features["observation.velocity"] = Sequence(
length=data_dict["observation.velocity"].shape[1], feature=Value(dtype="float32", id=None)
)
if "observation.effort" in data_dict:
features["observation.effort"] = Sequence(
length=data_dict["observation.effort"].shape[1], feature=Value(dtype="float32", id=None)
)
features["action"] = Sequence(
length=data_dict["action"].shape[1], feature=Value(dtype="float32", id=None)
)
features["episode_index"] = Value(dtype="int64", id=None)
features["frame_index"] = Value(dtype="int64", id=None)
features["timestamp"] = Value(dtype="float32", id=None)
features["next.done"] = Value(dtype="bool", id=None)
features["index"] = Value(dtype="int64", id=None)
hf_dataset = Dataset.from_dict(data_dict, features=Features(features))
hf_dataset.set_transform(hf_transform_to_torch)
return hf_dataset
def from_raw_to_lerobot_format(
raw_dir: Path,
videos_dir: Path,
fps: int | None = None,
video: bool = True,
episodes: list[int] | None = None,
encoding: dict | None = None,
):
# sanity check
check_format(raw_dir)
if fps is None:
fps = 30
else:
raise NotImplementedError()
if not video:
raise NotImplementedError()
if encoding is not None:
warnings.warn(
"Video encoding is currently done outside of LeRobot for the dora_parquet format.",
stacklevel=1,
)
data_df = load_from_raw(raw_dir, videos_dir, fps, episodes)
hf_dataset = to_hf_dataset(data_df, video)
episode_data_index = calculate_episode_data_index(hf_dataset)
info = {
"codebase_version": CODEBASE_VERSION,
"fps": fps,
"video": video,
}
if video:
info["encoding"] = "unknown"
return hf_dataset, episode_data_index, info

View File

@ -1,312 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
For all datasets in the RLDS format.
For https://github.com/google-deepmind/open_x_embodiment (OPENX) datasets.
NOTE: You need to install tensorflow and tensorflow_datasets before running this script.
Example:
python lerobot/scripts/push_dataset_to_hub.py \
--raw-dir /path/to/data/bridge_dataset/1.0.0/ \
--repo-id your_hub/sampled_bridge_data_v2 \
--raw-format rlds \
--episodes 3 4 5 8 9
Exact dataset fps defined in openx/config.py, obtained from:
https://docs.google.com/spreadsheets/d/1rPBD77tk60AEIGZrGSODwyyzs5FgCU9Uz3h-3_t2A9g/edit?gid=0#gid=0&range=R:R
"""
import shutil
from pathlib import Path
import numpy as np
import tensorflow as tf
import tensorflow_datasets as tfds
import torch
import tqdm
from datasets import Dataset, Features, Image, Sequence, Value
from PIL import Image as PILImage
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION
from lerobot.common.datasets.push_dataset_to_hub.utils import (
calculate_episode_data_index,
concatenate_episodes,
get_default_encoding,
save_images_concurrently,
)
from lerobot.common.datasets.utils import (
hf_transform_to_torch,
)
from lerobot.common.datasets.video_utils import VideoFrame, encode_video_frames
np.set_printoptions(precision=2)
def tf_to_torch(data):
return torch.from_numpy(data.numpy())
def tf_img_convert(img):
if img.dtype == tf.string:
img = tf.io.decode_image(img, expand_animations=False, dtype=tf.uint8)
elif img.dtype != tf.uint8:
raise ValueError(f"Unsupported image dtype: found with dtype {img.dtype}")
return img.numpy()
def _broadcast_metadata_rlds(i: tf.Tensor, traj: dict) -> dict:
"""
In the RLDS format, each trajectory has some top-level metadata that is explicitly separated out, and a "steps"
entry. This function moves the "steps" entry to the top level, broadcasting any metadata to the length of the
trajectory. This function also adds the extra metadata fields `_len`, `_traj_index`, and `_frame_index`.
NOTE: adapted from DLimp library https://github.com/kvablack/dlimp/
"""
steps = traj.pop("steps")
traj_len = tf.shape(tf.nest.flatten(steps)[0])[0]
# broadcast metadata to the length of the trajectory
metadata = tf.nest.map_structure(lambda x: tf.repeat(x, traj_len), traj)
# put steps back in
assert "traj_metadata" not in steps
traj = {**steps, "traj_metadata": metadata}
assert "_len" not in traj
assert "_traj_index" not in traj
assert "_frame_index" not in traj
traj["_len"] = tf.repeat(traj_len, traj_len)
traj["_traj_index"] = tf.repeat(i, traj_len)
traj["_frame_index"] = tf.range(traj_len)
return traj
def load_from_raw(
raw_dir: Path,
videos_dir: Path,
fps: int,
video: bool,
episodes: list[int] | None = None,
encoding: dict | None = None,
):
"""
Args:
raw_dir (Path): _description_
videos_dir (Path): _description_
fps (int): _description_
video (bool): _description_
episodes (list[int] | None, optional): _description_. Defaults to None.
"""
ds_builder = tfds.builder_from_directory(str(raw_dir))
dataset = ds_builder.as_dataset(
split="all",
decoders={"steps": tfds.decode.SkipDecoding()},
)
dataset_info = ds_builder.info
print("dataset_info: ", dataset_info)
ds_length = len(dataset)
dataset = dataset.take(ds_length)
# "flatten" the dataset as such we can apply trajectory level map() easily
# each [obs][key] has a shape of (frame_size, ...)
dataset = dataset.enumerate().map(_broadcast_metadata_rlds)
# we will apply the standardization transform if the dataset_name is provided
# if the dataset name is not provided and the goal is to convert any rlds formatted dataset
# search for 'image' keys in the observations
image_keys = []
state_keys = []
observation_info = dataset_info.features["steps"]["observation"]
for key in observation_info:
# check whether the key is for an image or a vector observation
if len(observation_info[key].shape) == 3:
# only adding uint8 images discards depth images
if observation_info[key].dtype == tf.uint8:
image_keys.append(key)
else:
state_keys.append(key)
lang_key = "language_instruction" if "language_instruction" in dataset.element_spec else None
print(" - image_keys: ", image_keys)
print(" - lang_key: ", lang_key)
it = iter(dataset)
ep_dicts = []
# Init temp path to save ep_dicts in case of crash
tmp_ep_dicts_dir = videos_dir.parent.joinpath("ep_dicts")
tmp_ep_dicts_dir.mkdir(parents=True, exist_ok=True)
# check if ep_dicts have already been saved in /tmp
starting_ep_idx = 0
saved_ep_dicts = [ep.__str__() for ep in tmp_ep_dicts_dir.iterdir()]
if len(saved_ep_dicts) > 0:
saved_ep_dicts.sort()
# get last ep_idx number
starting_ep_idx = int(saved_ep_dicts[-1][-13:-3]) + 1
for i in range(starting_ep_idx):
episode = next(it)
ep_dicts.append(torch.load(saved_ep_dicts[i]))
# if we user specified episodes, skip the ones not in the list
if episodes is not None:
if ds_length == 0:
raise ValueError("No episodes found.")
# convert episodes index to sorted list
episodes = sorted(episodes)
for ep_idx in tqdm.tqdm(range(starting_ep_idx, ds_length)):
episode = next(it)
# if user specified episodes, skip the ones not in the list
if episodes is not None:
if len(episodes) == 0:
break
if ep_idx == episodes[0]:
# process this episode
print(" selecting episode idx: ", ep_idx)
episodes.pop(0)
else:
continue # skip
num_frames = episode["action"].shape[0]
ep_dict = {}
for key in state_keys:
ep_dict[f"observation.{key}"] = tf_to_torch(episode["observation"][key])
ep_dict["action"] = tf_to_torch(episode["action"])
ep_dict["next.reward"] = tf_to_torch(episode["reward"]).float()
ep_dict["next.done"] = tf_to_torch(episode["is_last"])
ep_dict["is_terminal"] = tf_to_torch(episode["is_terminal"])
ep_dict["is_first"] = tf_to_torch(episode["is_first"])
ep_dict["discount"] = tf_to_torch(episode["discount"])
# If lang_key is present, convert the entire tensor at once
if lang_key is not None:
ep_dict["language_instruction"] = [x.numpy().decode("utf-8") for x in episode[lang_key]]
ep_dict["timestamp"] = torch.arange(0, num_frames, 1) / fps
ep_dict["episode_index"] = torch.tensor([ep_idx] * num_frames)
ep_dict["frame_index"] = torch.arange(0, num_frames, 1)
image_array_dict = {key: [] for key in image_keys}
for im_key in image_keys:
imgs = episode["observation"][im_key]
image_array_dict[im_key] = [tf_img_convert(img) for img in imgs]
# loop through all cameras
for im_key in image_keys:
img_key = f"observation.images.{im_key}"
imgs_array = image_array_dict[im_key]
imgs_array = np.array(imgs_array)
if video:
# save png images in temporary directory
tmp_imgs_dir = videos_dir / "tmp_images"
save_images_concurrently(imgs_array, tmp_imgs_dir)
# encode images to a mp4 video
fname = f"{img_key}_episode_{ep_idx:06d}.mp4"
video_path = videos_dir / fname
encode_video_frames(tmp_imgs_dir, video_path, fps, **(encoding or {}))
# clean temporary images directory
shutil.rmtree(tmp_imgs_dir)
# store the reference to the video frame
ep_dict[img_key] = [
{"path": f"videos/{fname}", "timestamp": i / fps} for i in range(num_frames)
]
else:
ep_dict[img_key] = [PILImage.fromarray(x) for x in imgs_array]
path_ep_dict = tmp_ep_dicts_dir.joinpath(
"ep_dict_" + "0" * (10 - len(str(ep_idx))) + str(ep_idx) + ".pt"
)
torch.save(ep_dict, path_ep_dict)
ep_dicts.append(ep_dict)
data_dict = concatenate_episodes(ep_dicts)
total_frames = data_dict["frame_index"].shape[0]
data_dict["index"] = torch.arange(0, total_frames, 1)
return data_dict
def to_hf_dataset(data_dict, video) -> Dataset:
features = {}
for key in data_dict:
# check if vector state obs
if key.startswith("observation.") and "observation.images." not in key:
features[key] = Sequence(length=data_dict[key].shape[1], feature=Value(dtype="float32", id=None))
# check if image obs
elif "observation.images." in key:
if video:
features[key] = VideoFrame()
else:
features[key] = Image()
if "language_instruction" in data_dict:
features["language_instruction"] = Value(dtype="string", id=None)
features["action"] = Sequence(
length=data_dict["action"].shape[1], feature=Value(dtype="float32", id=None)
)
features["is_terminal"] = Value(dtype="bool", id=None)
features["is_first"] = Value(dtype="bool", id=None)
features["discount"] = Value(dtype="float32", id=None)
features["episode_index"] = Value(dtype="int64", id=None)
features["frame_index"] = Value(dtype="int64", id=None)
features["timestamp"] = Value(dtype="float32", id=None)
features["next.reward"] = Value(dtype="float32", id=None)
features["next.done"] = Value(dtype="bool", id=None)
features["index"] = Value(dtype="int64", id=None)
hf_dataset = Dataset.from_dict(data_dict, features=Features(features))
hf_dataset.set_transform(hf_transform_to_torch)
return hf_dataset
def from_raw_to_lerobot_format(
raw_dir: Path,
videos_dir: Path,
fps: int | None = None,
video: bool = True,
episodes: list[int] | None = None,
encoding: dict | None = None,
):
data_dict = load_from_raw(raw_dir, videos_dir, fps, video, episodes, encoding)
hf_dataset = to_hf_dataset(data_dict, video)
episode_data_index = calculate_episode_data_index(hf_dataset)
info = {
"codebase_version": CODEBASE_VERSION,
"fps": fps,
"video": video,
}
if video:
info["encoding"] = get_default_encoding()
return hf_dataset, episode_data_index, info

View File

@ -1,275 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Process zarr files formatted like in: https://github.com/real-stanford/diffusion_policy"""
import shutil
from pathlib import Path
import numpy as np
import torch
import tqdm
import zarr
from datasets import Dataset, Features, Image, Sequence, Value
from PIL import Image as PILImage
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION
from lerobot.common.datasets.push_dataset_to_hub.utils import (
calculate_episode_data_index,
concatenate_episodes,
get_default_encoding,
save_images_concurrently,
)
from lerobot.common.datasets.utils import (
hf_transform_to_torch,
)
from lerobot.common.datasets.video_utils import VideoFrame, encode_video_frames
def check_format(raw_dir):
zarr_path = raw_dir / "pusht_cchi_v7_replay.zarr"
zarr_data = zarr.open(zarr_path, mode="r")
required_datasets = {
"data/action",
"data/img",
"data/keypoint",
"data/n_contacts",
"data/state",
"meta/episode_ends",
}
for dataset in required_datasets:
assert dataset in zarr_data
nb_frames = zarr_data["data/img"].shape[0]
required_datasets.remove("meta/episode_ends")
assert all(nb_frames == zarr_data[dataset].shape[0] for dataset in required_datasets)
def load_from_raw(
raw_dir: Path,
videos_dir: Path,
fps: int,
video: bool,
episodes: list[int] | None = None,
keypoints_instead_of_image: bool = False,
encoding: dict | None = None,
):
try:
import pymunk
from gym_pusht.envs.pusht import PushTEnv, pymunk_to_shapely
from lerobot.common.datasets.push_dataset_to_hub._diffusion_policy_replay_buffer import (
ReplayBuffer as DiffusionPolicyReplayBuffer,
)
except ModuleNotFoundError as e:
print("`gym_pusht` is not installed. Please install it with `pip install 'lerobot[gym_pusht]'`")
raise e
# as define in gmy-pusht env: https://github.com/huggingface/gym-pusht/blob/e0684ff988d223808c0a9dcfaba9dc4991791370/gym_pusht/envs/pusht.py#L174
success_threshold = 0.95 # 95% coverage,
zarr_path = raw_dir / "pusht_cchi_v7_replay.zarr"
zarr_data = DiffusionPolicyReplayBuffer.copy_from_path(zarr_path)
episode_ids = torch.from_numpy(zarr_data.get_episode_idxs())
assert len(
{zarr_data[key].shape[0] for key in zarr_data.keys()} # noqa: SIM118
), "Some data type dont have the same number of total frames."
# TODO(rcadene): verify that goal pose is expected to be fixed
goal_pos_angle = np.array([256, 256, np.pi / 4]) # x, y, theta (in radians)
goal_body = PushTEnv.get_goal_pose_body(goal_pos_angle)
imgs = torch.from_numpy(zarr_data["img"]) # b h w c
states = torch.from_numpy(zarr_data["state"])
actions = torch.from_numpy(zarr_data["action"])
# load data indices from which each episode starts and ends
from_ids, to_ids = [], []
from_idx = 0
for to_idx in zarr_data.meta["episode_ends"]:
from_ids.append(from_idx)
to_ids.append(to_idx)
from_idx = to_idx
num_episodes = len(from_ids)
ep_dicts = []
ep_ids = episodes if episodes else range(num_episodes)
for ep_idx, selected_ep_idx in tqdm.tqdm(enumerate(ep_ids)):
from_idx = from_ids[selected_ep_idx]
to_idx = to_ids[selected_ep_idx]
num_frames = to_idx - from_idx
# sanity check
assert (episode_ids[from_idx:to_idx] == ep_idx).all()
# get image
if not keypoints_instead_of_image:
image = imgs[from_idx:to_idx]
assert image.min() >= 0.0
assert image.max() <= 255.0
image = image.type(torch.uint8)
# get state
state = states[from_idx:to_idx]
agent_pos = state[:, :2]
block_pos = state[:, 2:4]
block_angle = state[:, 4]
# get reward, success, done, and (maybe) keypoints
reward = torch.zeros(num_frames)
success = torch.zeros(num_frames, dtype=torch.bool)
if keypoints_instead_of_image:
keypoints = torch.zeros(num_frames, 16) # 8 keypoints each with 2 coords
done = torch.zeros(num_frames, dtype=torch.bool)
for i in range(num_frames):
space = pymunk.Space()
space.gravity = 0, 0
space.damping = 0
# Add walls.
walls = [
PushTEnv.add_segment(space, (5, 506), (5, 5), 2),
PushTEnv.add_segment(space, (5, 5), (506, 5), 2),
PushTEnv.add_segment(space, (506, 5), (506, 506), 2),
PushTEnv.add_segment(space, (5, 506), (506, 506), 2),
]
space.add(*walls)
block_body, block_shapes = PushTEnv.add_tee(space, block_pos[i].tolist(), block_angle[i].item())
goal_geom = pymunk_to_shapely(goal_body, block_body.shapes)
block_geom = pymunk_to_shapely(block_body, block_body.shapes)
intersection_area = goal_geom.intersection(block_geom).area
goal_area = goal_geom.area
coverage = intersection_area / goal_area
reward[i] = np.clip(coverage / success_threshold, 0, 1)
success[i] = coverage > success_threshold
if keypoints_instead_of_image:
keypoints[i] = torch.from_numpy(PushTEnv.get_keypoints(block_shapes).flatten())
# last step of demonstration is considered done
done[-1] = True
ep_dict = {}
if not keypoints_instead_of_image:
imgs_array = [x.numpy() for x in image]
img_key = "observation.image"
if video:
# save png images in temporary directory
tmp_imgs_dir = videos_dir / "tmp_images"
save_images_concurrently(imgs_array, tmp_imgs_dir)
# encode images to a mp4 video
fname = f"{img_key}_episode_{ep_idx:06d}.mp4"
video_path = videos_dir / fname
encode_video_frames(tmp_imgs_dir, video_path, fps, **(encoding or {}))
# clean temporary images directory
shutil.rmtree(tmp_imgs_dir)
# store the reference to the video frame
ep_dict[img_key] = [
{"path": f"videos/{fname}", "timestamp": i / fps} for i in range(num_frames)
]
else:
ep_dict[img_key] = [PILImage.fromarray(x) for x in imgs_array]
ep_dict["observation.state"] = agent_pos
if keypoints_instead_of_image:
ep_dict["observation.environment_state"] = keypoints
ep_dict["action"] = actions[from_idx:to_idx]
ep_dict["episode_index"] = torch.tensor([ep_idx] * num_frames, dtype=torch.int64)
ep_dict["frame_index"] = torch.arange(0, num_frames, 1)
ep_dict["timestamp"] = torch.arange(0, num_frames, 1) / fps
# ep_dict["next.observation.image"] = image[1:],
# ep_dict["next.observation.state"] = agent_pos[1:],
# TODO(rcadene)] = verify that reward and done are aligned with image and agent_pos
ep_dict["next.reward"] = torch.cat([reward[1:], reward[[-1]]])
ep_dict["next.done"] = torch.cat([done[1:], done[[-1]]])
ep_dict["next.success"] = torch.cat([success[1:], success[[-1]]])
ep_dicts.append(ep_dict)
data_dict = concatenate_episodes(ep_dicts)
total_frames = data_dict["frame_index"].shape[0]
data_dict["index"] = torch.arange(0, total_frames, 1)
return data_dict
def to_hf_dataset(data_dict, video, keypoints_instead_of_image: bool = False):
features = {}
if not keypoints_instead_of_image:
if video:
features["observation.image"] = VideoFrame()
else:
features["observation.image"] = Image()
features["observation.state"] = Sequence(
length=data_dict["observation.state"].shape[1], feature=Value(dtype="float32", id=None)
)
if keypoints_instead_of_image:
features["observation.environment_state"] = Sequence(
length=data_dict["observation.environment_state"].shape[1],
feature=Value(dtype="float32", id=None),
)
features["action"] = Sequence(
length=data_dict["action"].shape[1], feature=Value(dtype="float32", id=None)
)
features["episode_index"] = Value(dtype="int64", id=None)
features["frame_index"] = Value(dtype="int64", id=None)
features["timestamp"] = Value(dtype="float32", id=None)
features["next.reward"] = Value(dtype="float32", id=None)
features["next.done"] = Value(dtype="bool", id=None)
features["next.success"] = Value(dtype="bool", id=None)
features["index"] = Value(dtype="int64", id=None)
hf_dataset = Dataset.from_dict(data_dict, features=Features(features))
hf_dataset.set_transform(hf_transform_to_torch)
return hf_dataset
def from_raw_to_lerobot_format(
raw_dir: Path,
videos_dir: Path,
fps: int | None = None,
video: bool = True,
episodes: list[int] | None = None,
encoding: dict | None = None,
):
# Manually change this to True to use keypoints of the T instead of an image observation (but don't merge
# with True). Also make sure to use video = 0 in the `push_dataset_to_hub.py` script.
keypoints_instead_of_image = False
# sanity check
check_format(raw_dir)
if fps is None:
fps = 10
data_dict = load_from_raw(raw_dir, videos_dir, fps, video, episodes, keypoints_instead_of_image, encoding)
hf_dataset = to_hf_dataset(data_dict, video, keypoints_instead_of_image)
episode_data_index = calculate_episode_data_index(hf_dataset)
info = {
"codebase_version": CODEBASE_VERSION,
"fps": fps,
"video": video if not keypoints_instead_of_image else 0,
}
if video:
info["encoding"] = get_default_encoding()
return hf_dataset, episode_data_index, info

View File

@ -1,234 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Process UMI (Universal Manipulation Interface) data stored in Zarr format like in: https://github.com/real-stanford/universal_manipulation_interface"""
import logging
import shutil
from pathlib import Path
import torch
import tqdm
import zarr
from datasets import Dataset, Features, Image, Sequence, Value
from PIL import Image as PILImage
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION
from lerobot.common.datasets.push_dataset_to_hub._umi_imagecodecs_numcodecs import register_codecs
from lerobot.common.datasets.push_dataset_to_hub.utils import (
calculate_episode_data_index,
concatenate_episodes,
get_default_encoding,
save_images_concurrently,
)
from lerobot.common.datasets.utils import (
hf_transform_to_torch,
)
from lerobot.common.datasets.video_utils import VideoFrame, encode_video_frames
def check_format(raw_dir) -> bool:
zarr_path = raw_dir / "cup_in_the_wild.zarr"
zarr_data = zarr.open(zarr_path, mode="r")
required_datasets = {
"data/robot0_demo_end_pose",
"data/robot0_demo_start_pose",
"data/robot0_eef_pos",
"data/robot0_eef_rot_axis_angle",
"data/robot0_gripper_width",
"meta/episode_ends",
"data/camera0_rgb",
}
for dataset in required_datasets:
if dataset not in zarr_data:
return False
# mandatory to access zarr_data
register_codecs()
nb_frames = zarr_data["data/camera0_rgb"].shape[0]
required_datasets.remove("meta/episode_ends")
assert all(nb_frames == zarr_data[dataset].shape[0] for dataset in required_datasets)
def load_from_raw(
raw_dir: Path,
videos_dir: Path,
fps: int,
video: bool,
episodes: list[int] | None = None,
encoding: dict | None = None,
):
zarr_path = raw_dir / "cup_in_the_wild.zarr"
zarr_data = zarr.open(zarr_path, mode="r")
# We process the image data separately because it is too large to fit in memory
end_pose = torch.from_numpy(zarr_data["data/robot0_demo_end_pose"][:])
start_pos = torch.from_numpy(zarr_data["data/robot0_demo_start_pose"][:])
eff_pos = torch.from_numpy(zarr_data["data/robot0_eef_pos"][:])
eff_rot_axis_angle = torch.from_numpy(zarr_data["data/robot0_eef_rot_axis_angle"][:])
gripper_width = torch.from_numpy(zarr_data["data/robot0_gripper_width"][:])
states_pos = torch.cat([eff_pos, eff_rot_axis_angle], dim=1)
states = torch.cat([states_pos, gripper_width], dim=1)
episode_ends = zarr_data["meta/episode_ends"][:]
num_episodes = episode_ends.shape[0]
# We convert it in torch tensor later because the jit function does not support torch tensors
episode_ends = torch.from_numpy(episode_ends)
# load data indices from which each episode starts and ends
from_ids, to_ids = [], []
from_idx = 0
for to_idx in episode_ends:
from_ids.append(from_idx)
to_ids.append(to_idx)
from_idx = to_idx
ep_dicts_dir = videos_dir / "ep_dicts"
ep_dicts_dir.mkdir(exist_ok=True, parents=True)
ep_dicts = []
ep_ids = episodes if episodes else range(num_episodes)
for ep_idx, selected_ep_idx in tqdm.tqdm(enumerate(ep_ids)):
ep_dict_path = ep_dicts_dir / f"{ep_idx}"
if not ep_dict_path.is_file():
from_idx = from_ids[selected_ep_idx]
to_idx = to_ids[selected_ep_idx]
num_frames = to_idx - from_idx
# TODO(rcadene): save temporary images of the episode?
state = states[from_idx:to_idx]
ep_dict = {}
# load 57MB of images in RAM (400x224x224x3 uint8)
imgs_array = zarr_data["data/camera0_rgb"][from_idx:to_idx]
img_key = "observation.image"
if video:
fname = f"{img_key}_episode_{ep_idx:06d}.mp4"
video_path = videos_dir / fname
if not video_path.is_file():
# save png images in temporary directory
tmp_imgs_dir = videos_dir / "tmp_images"
save_images_concurrently(imgs_array, tmp_imgs_dir)
# encode images to a mp4 video
encode_video_frames(tmp_imgs_dir, video_path, fps, **(encoding or {}))
# clean temporary images directory
shutil.rmtree(tmp_imgs_dir)
# store the reference to the video frame
ep_dict[img_key] = [
{"path": f"videos/{fname}", "timestamp": i / fps} for i in range(num_frames)
]
else:
ep_dict[img_key] = [PILImage.fromarray(x) for x in imgs_array]
ep_dict["observation.state"] = state
ep_dict["episode_index"] = torch.tensor([ep_idx] * num_frames, dtype=torch.int64)
ep_dict["frame_index"] = torch.arange(0, num_frames, 1)
ep_dict["timestamp"] = torch.arange(0, num_frames, 1) / fps
ep_dict["episode_data_index_from"] = torch.tensor([from_idx] * num_frames)
ep_dict["episode_data_index_to"] = torch.tensor([from_idx + num_frames] * num_frames)
ep_dict["end_pose"] = end_pose[from_idx:to_idx]
ep_dict["start_pos"] = start_pos[from_idx:to_idx]
ep_dict["gripper_width"] = gripper_width[from_idx:to_idx]
torch.save(ep_dict, ep_dict_path)
else:
ep_dict = torch.load(ep_dict_path)
ep_dicts.append(ep_dict)
data_dict = concatenate_episodes(ep_dicts)
total_frames = data_dict["frame_index"].shape[0]
data_dict["index"] = torch.arange(0, total_frames, 1)
return data_dict
def to_hf_dataset(data_dict, video):
features = {}
if video:
features["observation.image"] = VideoFrame()
else:
features["observation.image"] = Image()
features["observation.state"] = Sequence(
length=data_dict["observation.state"].shape[1], feature=Value(dtype="float32", id=None)
)
features["episode_index"] = Value(dtype="int64", id=None)
features["frame_index"] = Value(dtype="int64", id=None)
features["timestamp"] = Value(dtype="float32", id=None)
features["index"] = Value(dtype="int64", id=None)
features["episode_data_index_from"] = Value(dtype="int64", id=None)
features["episode_data_index_to"] = Value(dtype="int64", id=None)
# `start_pos` and `end_pos` respectively represent the positions of the end-effector
# at the beginning and the end of the episode.
# `gripper_width` indicates the distance between the grippers, and this value is included
# in the state vector, which comprises the concatenation of the end-effector position
# and gripper width.
features["end_pose"] = Sequence(
length=data_dict["end_pose"].shape[1], feature=Value(dtype="float32", id=None)
)
features["start_pos"] = Sequence(
length=data_dict["start_pos"].shape[1], feature=Value(dtype="float32", id=None)
)
features["gripper_width"] = Sequence(
length=data_dict["gripper_width"].shape[1], feature=Value(dtype="float32", id=None)
)
hf_dataset = Dataset.from_dict(data_dict, features=Features(features))
hf_dataset.set_transform(hf_transform_to_torch)
return hf_dataset
def from_raw_to_lerobot_format(
raw_dir: Path,
videos_dir: Path,
fps: int | None = None,
video: bool = True,
episodes: list[int] | None = None,
encoding: dict | None = None,
):
# sanity check
check_format(raw_dir)
if fps is None:
# For umi cup in the wild: https://arxiv.org/pdf/2402.10329#table.caption.16
fps = 10
if not video:
logging.warning(
"Generating UMI dataset without `video=True` creates ~150GB on disk and requires ~80GB in RAM."
)
data_dict = load_from_raw(raw_dir, videos_dir, fps, video, episodes, encoding)
hf_dataset = to_hf_dataset(data_dict, video)
episode_data_index = calculate_episode_data_index(hf_dataset)
info = {
"codebase_version": CODEBASE_VERSION,
"fps": fps,
"video": video,
}
if video:
info["encoding"] = get_default_encoding()
return hf_dataset, episode_data_index, info

View File

@ -1,200 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Process pickle files formatted like in: https://github.com/fyhMer/fowm"""
import pickle
import shutil
from pathlib import Path
import einops
import torch
import tqdm
from datasets import Dataset, Features, Image, Sequence, Value
from PIL import Image as PILImage
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION
from lerobot.common.datasets.push_dataset_to_hub.utils import (
calculate_episode_data_index,
concatenate_episodes,
get_default_encoding,
save_images_concurrently,
)
from lerobot.common.datasets.utils import (
hf_transform_to_torch,
)
from lerobot.common.datasets.video_utils import VideoFrame, encode_video_frames
def check_format(raw_dir):
keys = {"actions", "rewards", "dones"}
nested_keys = {"observations": {"rgb", "state"}, "next_observations": {"rgb", "state"}}
xarm_files = list(raw_dir.glob("*.pkl"))
assert len(xarm_files) > 0
with open(xarm_files[0], "rb") as f:
dataset_dict = pickle.load(f)
assert isinstance(dataset_dict, dict)
assert all(k in dataset_dict for k in keys)
# Check for consistent lengths in nested keys
expected_len = len(dataset_dict["actions"])
assert all(len(dataset_dict[key]) == expected_len for key in keys if key in dataset_dict)
for key, subkeys in nested_keys.items():
nested_dict = dataset_dict.get(key, {})
assert all(len(nested_dict[subkey]) == expected_len for subkey in subkeys if subkey in nested_dict)
def load_from_raw(
raw_dir: Path,
videos_dir: Path,
fps: int,
video: bool,
episodes: list[int] | None = None,
encoding: dict | None = None,
):
pkl_path = raw_dir / "buffer.pkl"
with open(pkl_path, "rb") as f:
pkl_data = pickle.load(f)
# load data indices from which each episode starts and ends
from_ids, to_ids = [], []
from_idx, to_idx = 0, 0
for done in pkl_data["dones"]:
to_idx += 1
if not done:
continue
from_ids.append(from_idx)
to_ids.append(to_idx)
from_idx = to_idx
num_episodes = len(from_ids)
ep_dicts = []
ep_ids = episodes if episodes else range(num_episodes)
for ep_idx, selected_ep_idx in tqdm.tqdm(enumerate(ep_ids)):
from_idx = from_ids[selected_ep_idx]
to_idx = to_ids[selected_ep_idx]
num_frames = to_idx - from_idx
image = torch.tensor(pkl_data["observations"]["rgb"][from_idx:to_idx])
image = einops.rearrange(image, "b c h w -> b h w c")
state = torch.tensor(pkl_data["observations"]["state"][from_idx:to_idx])
action = torch.tensor(pkl_data["actions"][from_idx:to_idx])
# TODO(rcadene): we have a missing last frame which is the observation when the env is done
# it is critical to have this frame for tdmpc to predict a "done observation/state"
# next_image = torch.tensor(pkl_data["next_observations"]["rgb"][from_idx:to_idx])
# next_state = torch.tensor(pkl_data["next_observations"]["state"][from_idx:to_idx])
next_reward = torch.tensor(pkl_data["rewards"][from_idx:to_idx])
next_done = torch.tensor(pkl_data["dones"][from_idx:to_idx])
ep_dict = {}
imgs_array = [x.numpy() for x in image]
img_key = "observation.image"
if video:
# save png images in temporary directory
tmp_imgs_dir = videos_dir / "tmp_images"
save_images_concurrently(imgs_array, tmp_imgs_dir)
# encode images to a mp4 video
fname = f"{img_key}_episode_{ep_idx:06d}.mp4"
video_path = videos_dir / fname
encode_video_frames(tmp_imgs_dir, video_path, fps, **(encoding or {}))
# clean temporary images directory
shutil.rmtree(tmp_imgs_dir)
# store the reference to the video frame
ep_dict[img_key] = [{"path": f"videos/{fname}", "timestamp": i / fps} for i in range(num_frames)]
else:
ep_dict[img_key] = [PILImage.fromarray(x) for x in imgs_array]
ep_dict["observation.state"] = state
ep_dict["action"] = action
ep_dict["episode_index"] = torch.tensor([ep_idx] * num_frames, dtype=torch.int64)
ep_dict["frame_index"] = torch.arange(0, num_frames, 1)
ep_dict["timestamp"] = torch.arange(0, num_frames, 1) / fps
# ep_dict["next.observation.image"] = next_image
# ep_dict["next.observation.state"] = next_state
ep_dict["next.reward"] = next_reward
ep_dict["next.done"] = next_done
ep_dicts.append(ep_dict)
data_dict = concatenate_episodes(ep_dicts)
total_frames = data_dict["frame_index"].shape[0]
data_dict["index"] = torch.arange(0, total_frames, 1)
return data_dict
def to_hf_dataset(data_dict, video):
features = {}
if video:
features["observation.image"] = VideoFrame()
else:
features["observation.image"] = Image()
features["observation.state"] = Sequence(
length=data_dict["observation.state"].shape[1], feature=Value(dtype="float32", id=None)
)
features["action"] = Sequence(
length=data_dict["action"].shape[1], feature=Value(dtype="float32", id=None)
)
features["episode_index"] = Value(dtype="int64", id=None)
features["frame_index"] = Value(dtype="int64", id=None)
features["timestamp"] = Value(dtype="float32", id=None)
features["next.reward"] = Value(dtype="float32", id=None)
features["next.done"] = Value(dtype="bool", id=None)
features["index"] = Value(dtype="int64", id=None)
# TODO(rcadene): add success
# features["next.success"] = Value(dtype='bool', id=None)
hf_dataset = Dataset.from_dict(data_dict, features=Features(features))
hf_dataset.set_transform(hf_transform_to_torch)
return hf_dataset
def from_raw_to_lerobot_format(
raw_dir: Path,
videos_dir: Path,
fps: int | None = None,
video: bool = True,
episodes: list[int] | None = None,
encoding: dict | None = None,
):
# sanity check
check_format(raw_dir)
if fps is None:
fps = 15
data_dict = load_from_raw(raw_dir, videos_dir, fps, video, episodes, encoding)
hf_dataset = to_hf_dataset(data_dict, video)
episode_data_index = calculate_episode_data_index(hf_dataset)
info = {
"codebase_version": CODEBASE_VERSION,
"fps": fps,
"video": video,
}
if video:
info["encoding"] = get_default_encoding()
return hf_dataset, episode_data_index, info

View File

@ -257,6 +257,7 @@ def encode_video_frames(
) -> None:
"""More info on ffmpeg arguments tuning on `benchmark/video/README.md`"""
video_path = Path(video_path)
imgs_dir = Path(imgs_dir)
video_path.parent.mkdir(parents=True, exist_ok=True)
ffmpeg_args = OrderedDict(

View File

@ -1,3 +0,0 @@
from .motors_bus import visualize_motors_bus
__all__ = ["visualize_motors_bus"]

View File

@ -1,82 +0,0 @@
"""
Usage example
```python
from lerobot.common.debugging.motors_bus import visualize_motors_bus
from lerobot.common.robots.so100 import SO100Robot, SO100RobotConfig
cfg = SO100RobotConfig(port="/dev/tty.usbmodem58760430541")
so100 = SO100Robot(cfg)
visualize_motors_bus(so100.arm)
```
"""
import time
from lerobot.common.motors import MotorsBus
from lerobot.common.motors.feetech.feetech_calibration import (
adjusted_to_homing_ticks,
adjusted_to_motor_ticks,
convert_degrees_to_ticks,
convert_ticks_to_degrees,
)
def visualize_motors_bus(motors_bus: MotorsBus):
"""
Reads each joint's (1) raw ticks, (2) homed ticks, (3) degrees, and (4) invert-adjusted ticks.
"""
if not motors_bus.is_connected:
motors_bus.connect()
# Disable torque on all motors so you can move them freely by hand
values_dict = {idx: 0 for idx in motors_bus.motor_ids}
motors_bus.write("Torque_Enable", values_dict)
print("Torque disabled on all joints.")
try:
print("\nPress Ctrl+C to quit.\n")
while True:
# Read *raw* positions (no calibration).
raw_positions = motors_bus.read("Present_Position")
# # Read *already-homed* positions
# homed_positions = motor_bus.read("Present_Position")
for name, raw_ticks in raw_positions.items():
idx = motors_bus.motors[name][0]
model = motors_bus.motors[name][1]
# homed_val = homed_positions[i] # degrees or % if linear
# Manually compute "adjusted ticks" from raw ticks
manual_adjusted = adjusted_to_homing_ticks(raw_ticks, model, motors_bus, idx)
# Convert to degrees
manual_degs = convert_ticks_to_degrees(manual_adjusted, model)
# Convert that deg back to ticks
manual_ticks = convert_degrees_to_ticks(manual_degs, model)
# Then invert them using offset & bus drive mode
inv_ticks = adjusted_to_motor_ticks(manual_ticks, model, motors_bus, idx)
print(
f"{name:15s} | "
f"RAW={raw_ticks:4d} | "
# f"HOMED_FROM_READ={homed_val:7.2f} | "
f"HOMED_TICKS={manual_adjusted:6d} | "
f"MANUAL_ADJ_DEG={manual_degs:7.2f} | "
f"MANUAL_ADJ_TICKS={manual_ticks:6d} | "
f"INV_TICKS={inv_ticks:4d} "
)
print("----------------------------------------------------")
time.sleep(0.25)
except KeyboardInterrupt:
pass
finally:
print("\nExiting. Disconnecting bus...")
motors_bus.disconnect()
if __name__ == "__main__":
visualize_motors_bus()

View File

@ -1 +1 @@
from .motors_bus import CalibrationMode, DriveMode, Motor, MotorsBus, TorqueMode
from .motors_bus import Motor, MotorCalibration, MotorNormMode, MotorsBus

View File

@ -1,3 +1,3 @@
from .dynamixel import DynamixelMotorsBus
from .dynamixel import DriveMode, DynamixelMotorsBus, OperatingMode, TorqueMode
from .dynamixel_calibration import run_arm_calibration
from .tables import *

View File

@ -18,19 +18,70 @@
# https://emanual.robotis.com/docs/en/dxl/protocol2/#fast-sync-read-0x8a
# -> Need to check compatibility across models
import logging
from copy import deepcopy
from enum import Enum
from ..motors_bus import MotorsBus
from .tables import MODEL_BAUDRATE_TABLE, MODEL_CONTROL_TABLE, MODEL_RESOLUTION
from lerobot.common.utils.encoding_utils import decode_twos_complement, encode_twos_complement
from ..motors_bus import Motor, MotorCalibration, MotorsBus, NameOrID, Value
from .tables import (
AVAILABLE_BAUDRATES,
MODEL_BAUDRATE_TABLE,
MODEL_CONTROL_TABLE,
MODEL_NUMBER,
MODEL_RESOLUTION,
)
PROTOCOL_VERSION = 2.0
BAUDRATE = 1_000_000
DEFAULT_TIMEOUT_MS = 1000
MAX_ID_RANGE = 252
CALIBRATION_REQUIRED = ["Goal_Position", "Present_Position"]
NORMALIZATION_REQUIRED = ["Goal_Position", "Present_Position"]
CONVERT_UINT32_TO_INT32_REQUIRED = ["Goal_Position", "Present_Position"]
logger = logging.getLogger(__name__)
class OperatingMode(Enum):
# DYNAMIXEL only controls current(torque) regardless of speed and position. This mode is ideal for a
# gripper or a system that only uses current(torque) control or a system that has additional
# velocity/position controllers.
CURRENT = 0
# This mode controls velocity. This mode is identical to the Wheel Mode(endless) from existing DYNAMIXEL.
# This mode is ideal for wheel-type robots.
VELOCITY = 1
# This mode controls position. This mode is identical to the Joint Mode from existing DYNAMIXEL. Operating
# position range is limited by the Max Position Limit(48) and the Min Position Limit(52). This mode is
# ideal for articulated robots that each joint rotates less than 360 degrees.
POSITION = 3
# This mode controls position. This mode is identical to the Multi-turn Position Control from existing
# DYNAMIXEL. 512 turns are supported(-256[rev] ~ 256[rev]). This mode is ideal for multi-turn wrists or
# conveyer systems or a system that requires an additional reduction gear. Note that Max Position
# Limit(48), Min Position Limit(52) are not used on Extended Position Control Mode.
EXTENDED_POSITION = 4
# This mode controls both position and current(torque). Up to 512 turns are supported (-256[rev] ~
# 256[rev]). This mode is ideal for a system that requires both position and current control such as
# articulated robots or grippers.
CURRENT_POSITION = 5
# This mode directly controls PWM output. (Voltage Control Mode)
PWM = 16
class DriveMode(Enum):
NON_INVERTED = 0
INVERTED = 1
class TorqueMode(Enum):
ENABLED = 1
DISABLED = 0
class DynamixelMotorsBus(MotorsBus):
"""
@ -39,51 +90,65 @@ class DynamixelMotorsBus(MotorsBus):
https://emanual.robotis.com/docs/en/software/dynamixel/dynamixel_sdk/sample_code/python_read_write_protocol_2_0/#python-read-write-protocol-20
"""
model_ctrl_table = deepcopy(MODEL_CONTROL_TABLE)
model_resolution_table = deepcopy(MODEL_RESOLUTION)
model_baudrate_table = deepcopy(MODEL_BAUDRATE_TABLE)
calibration_required = deepcopy(CALIBRATION_REQUIRED)
available_baudrates = deepcopy(AVAILABLE_BAUDRATES)
default_timeout = DEFAULT_TIMEOUT_MS
model_baudrate_table = deepcopy(MODEL_BAUDRATE_TABLE)
model_ctrl_table = deepcopy(MODEL_CONTROL_TABLE)
model_number_table = deepcopy(MODEL_NUMBER)
model_resolution_table = deepcopy(MODEL_RESOLUTION)
normalization_required = deepcopy(NORMALIZATION_REQUIRED)
def __init__(
self,
port: str,
motors: dict[str, tuple[int, str]],
motors: dict[str, Motor],
calibration: dict[str, MotorCalibration] | None = None,
):
super().__init__(port, motors)
super().__init__(port, motors, calibration)
import dynamixel_sdk as dxl
self.port_handler = dxl.PortHandler(self.port)
self.packet_handler = dxl.PacketHandler(PROTOCOL_VERSION)
self.reader = dxl.GroupSyncRead(self.port_handler, self.packet_handler, 0, 0)
self.writer = dxl.GroupSyncWrite(self.port_handler, self.packet_handler, 0, 0)
self.sync_reader = dxl.GroupSyncRead(self.port_handler, self.packet_handler, 0, 0)
self.sync_writer = dxl.GroupSyncWrite(self.port_handler, self.packet_handler, 0, 0)
self._comm_success = dxl.COMM_SUCCESS
self._no_error = 0x00
def broadcast_ping(
self, num_retry: int = 0, raise_on_error: bool = False
) -> dict[int, list[int, int]] | None:
for _ in range(1 + num_retry):
data_list, comm = self.packet_handler.broadcastPing(self.port_handler)
if self._is_comm_success(comm):
return data_list
def configure_motors(self) -> None:
# By default, Dynamixel motors have a 500µs delay response time (corresponding to a value of 250 on
# the 'Return_Delay_Time' address). We ensure this is reduced to the minimum of 2µs (value of 0).
for id_ in self.ids:
self.write("Return_Delay_Time", id_, 0)
if raise_on_error:
raise ConnectionError(f"Broadcast ping returned a {comm} comm error.")
def _disable_torque(self, motors: list[NameOrID]) -> None:
for motor in motors:
self.write("Torque_Enable", motor, TorqueMode.DISABLED.value)
def calibrate_values(self, ids_values: dict[int, int]) -> dict[int, float]:
# TODO
return ids_values
def _enable_torque(self, motors: list[NameOrID]) -> None:
for motor in motors:
self.write("Torque_Enable", motor, TorqueMode.ENABLED.value)
def uncalibrate_values(self, ids_values: dict[int, float]) -> dict[int, int]:
# TODO
return ids_values
def _encode_value(self, value: int, data_name: str | None = None, n_bytes: int | None = None) -> int:
return encode_twos_complement(value, n_bytes)
def _is_comm_success(self, comm: int) -> bool:
import dynamixel_sdk as dxl
def _decode_value(self, value: int, data_name: str | None = None, n_bytes: int | None = None) -> int:
return decode_twos_complement(value, n_bytes)
return comm == dxl.COMM_SUCCESS
def _get_half_turn_homings(self, positions: dict[NameOrID, Value]) -> dict[NameOrID, Value]:
"""
On Dynamixel Motors:
Present_Position = Actual_Position + Homing_Offset
"""
half_turn_homings = {}
for motor, pos in positions.items():
model = self._get_motor_model(motor)
max_res = self.model_resolution_table[model] - 1
half_turn_homings[motor] = int(max_res / 2) - pos
return half_turn_homings
@staticmethod
def split_int_bytes(value: int, n_bytes: int) -> list[int]:
def _split_int_to_bytes(value: int, n_bytes: int) -> list[int]:
# Validate input
if value < 0:
raise ValueError(f"Negative values are not allowed: {value}")
@ -100,14 +165,9 @@ class DynamixelMotorsBus(MotorsBus):
# Note: No need to convert back into unsigned int, since this byte preprocessing
# already handles it for us.
if n_bytes == 1:
data = [
dxl.DXL_LOBYTE(dxl.DXL_LOWORD(value)),
]
data = [value]
elif n_bytes == 2:
data = [
dxl.DXL_LOBYTE(dxl.DXL_LOWORD(value)),
dxl.DXL_HIBYTE(dxl.DXL_LOWORD(value)),
]
data = [dxl.DXL_LOBYTE(value), dxl.DXL_HIBYTE(value)]
elif n_bytes == 4:
data = [
dxl.DXL_LOBYTE(dxl.DXL_LOWORD(value)),
@ -116,3 +176,19 @@ class DynamixelMotorsBus(MotorsBus):
dxl.DXL_HIBYTE(dxl.DXL_HIWORD(value)),
]
return data
def broadcast_ping(self, num_retry: int = 0, raise_on_error: bool = False) -> dict[int, int] | None:
for n_try in range(1 + num_retry):
data_list, comm = self.packet_handler.broadcastPing(self.port_handler)
if self._is_comm_success(comm):
break
logger.debug(f"Broadcast failed on port '{self.port}' ({n_try=})")
logger.debug(self.packet_handler.getTxRxResult(comm))
if not self._is_comm_success(comm):
if raise_on_error:
raise ConnectionError(self.packet_handler.getTxRxResult(comm))
return data_list if data_list else None
return {id_: data[0] for id_, data in data_list.items()}

View File

@ -17,7 +17,8 @@
import numpy as np
from ..motors_bus import CalibrationMode, MotorsBus, TorqueMode
from ..motors_bus import MotorNormMode, MotorsBus
from .dynamixel import TorqueMode
from .tables import MODEL_RESOLUTION
URL_TEMPLATE = (
@ -132,13 +133,13 @@ def run_arm_calibration(arm: MotorsBus, robot_type: str, arm_name: str, arm_type
print()
# Joints with rotational motions are expressed in degrees in nominal range of [-180, 180]
calib_mode = [CalibrationMode.DEGREE.name] * len(arm.names)
calib_mode = [MotorNormMode.DEGREE.name] * len(arm.names)
# TODO(rcadene): make type of joints (DEGREE or LINEAR) configurable from yaml?
if robot_type in ["aloha"] and "gripper" in arm.names:
# Joints with linear motions (like gripper of Aloha) are expressed in nominal range of [0, 100]
calib_idx = arm.names.index("gripper")
calib_mode[calib_idx] = CalibrationMode.LINEAR.name
calib_mode[calib_idx] = MotorNormMode.LINEAR.name
calib_data = {
"homing_offset": homing_offset.tolist(),

View File

@ -1,895 +0,0 @@
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import enum
import logging
import math
import time
import traceback
from copy import deepcopy
import numpy as np
import tqdm
from lerobot.common.errors import DeviceAlreadyConnectedError, DeviceNotConnectedError
from lerobot.common.utils.utils import capture_timestamp_utc
PROTOCOL_VERSION = 2.0
BAUDRATE = 1_000_000
TIMEOUT_MS = 1000
MAX_ID_RANGE = 252
# The following bounds define the lower and upper joints range (after calibration).
# For joints in degree (i.e. revolute joints), their nominal range is [-180, 180] degrees
# which corresponds to a half rotation on the left and half rotation on the right.
# Some joints might require higher range, so we allow up to [-270, 270] degrees until
# an error is raised.
LOWER_BOUND_DEGREE = -270
UPPER_BOUND_DEGREE = 270
# For joints in percentage (i.e. joints that move linearly like the prismatic joint of a gripper),
# their nominal range is [0, 100] %. For instance, for Aloha gripper, 0% is fully
# closed, and 100% is fully open. To account for slight calibration issue, we allow up to
# [-10, 110] until an error is raised.
LOWER_BOUND_LINEAR = -10
UPPER_BOUND_LINEAR = 110
HALF_TURN_DEGREE = 180
# https://emanual.robotis.com/docs/en/dxl/x/xl330-m077
# https://emanual.robotis.com/docs/en/dxl/x/xl330-m288
# https://emanual.robotis.com/docs/en/dxl/x/xl430-w250
# https://emanual.robotis.com/docs/en/dxl/x/xm430-w350
# https://emanual.robotis.com/docs/en/dxl/x/xm540-w270
# https://emanual.robotis.com/docs/en/dxl/x/xc430-w150
# data_name: (address, size_byte)
X_SERIES_CONTROL_TABLE = {
"Model_Number": (0, 2),
"Model_Information": (2, 4),
"Firmware_Version": (6, 1),
"ID": (7, 1),
"Baud_Rate": (8, 1),
"Return_Delay_Time": (9, 1),
"Drive_Mode": (10, 1),
"Operating_Mode": (11, 1),
"Secondary_ID": (12, 1),
"Protocol_Type": (13, 1),
"Homing_Offset": (20, 4),
"Moving_Threshold": (24, 4),
"Temperature_Limit": (31, 1),
"Max_Voltage_Limit": (32, 2),
"Min_Voltage_Limit": (34, 2),
"PWM_Limit": (36, 2),
"Current_Limit": (38, 2),
"Acceleration_Limit": (40, 4),
"Velocity_Limit": (44, 4),
"Max_Position_Limit": (48, 4),
"Min_Position_Limit": (52, 4),
"Shutdown": (63, 1),
"Torque_Enable": (64, 1),
"LED": (65, 1),
"Status_Return_Level": (68, 1),
"Registered_Instruction": (69, 1),
"Hardware_Error_Status": (70, 1),
"Velocity_I_Gain": (76, 2),
"Velocity_P_Gain": (78, 2),
"Position_D_Gain": (80, 2),
"Position_I_Gain": (82, 2),
"Position_P_Gain": (84, 2),
"Feedforward_2nd_Gain": (88, 2),
"Feedforward_1st_Gain": (90, 2),
"Bus_Watchdog": (98, 1),
"Goal_PWM": (100, 2),
"Goal_Current": (102, 2),
"Goal_Velocity": (104, 4),
"Profile_Acceleration": (108, 4),
"Profile_Velocity": (112, 4),
"Goal_Position": (116, 4),
"Realtime_Tick": (120, 2),
"Moving": (122, 1),
"Moving_Status": (123, 1),
"Present_PWM": (124, 2),
"Present_Current": (126, 2),
"Present_Velocity": (128, 4),
"Present_Position": (132, 4),
"Velocity_Trajectory": (136, 4),
"Position_Trajectory": (140, 4),
"Present_Input_Voltage": (144, 2),
"Present_Temperature": (146, 1),
}
X_SERIES_BAUDRATE_TABLE = {
0: 9_600,
1: 57_600,
2: 115_200,
3: 1_000_000,
4: 2_000_000,
5: 3_000_000,
6: 4_000_000,
}
CALIBRATION_REQUIRED = ["Goal_Position", "Present_Position"]
CONVERT_UINT32_TO_INT32_REQUIRED = ["Goal_Position", "Present_Position"]
MODEL_CONTROL_TABLE = {
"x_series": X_SERIES_CONTROL_TABLE,
"xl330-m077": X_SERIES_CONTROL_TABLE,
"xl330-m288": X_SERIES_CONTROL_TABLE,
"xl430-w250": X_SERIES_CONTROL_TABLE,
"xm430-w350": X_SERIES_CONTROL_TABLE,
"xm540-w270": X_SERIES_CONTROL_TABLE,
"xc430-w150": X_SERIES_CONTROL_TABLE,
}
MODEL_RESOLUTION = {
"x_series": 4096,
"xl330-m077": 4096,
"xl330-m288": 4096,
"xl430-w250": 4096,
"xm430-w350": 4096,
"xm540-w270": 4096,
"xc430-w150": 4096,
}
MODEL_BAUDRATE_TABLE = {
"x_series": X_SERIES_BAUDRATE_TABLE,
"xl330-m077": X_SERIES_BAUDRATE_TABLE,
"xl330-m288": X_SERIES_BAUDRATE_TABLE,
"xl430-w250": X_SERIES_BAUDRATE_TABLE,
"xm430-w350": X_SERIES_BAUDRATE_TABLE,
"xm540-w270": X_SERIES_BAUDRATE_TABLE,
"xc430-w150": X_SERIES_BAUDRATE_TABLE,
}
NUM_READ_RETRY = 10
NUM_WRITE_RETRY = 10
def convert_degrees_to_steps(degrees: float | np.ndarray, models: str | list[str]) -> np.ndarray:
"""This function converts the degree range to the step range for indicating motors rotation.
It assumes a motor achieves a full rotation by going from -180 degree position to +180.
The motor resolution (e.g. 4096) corresponds to the number of steps needed to achieve a full rotation.
"""
resolutions = [MODEL_RESOLUTION[model] for model in models]
steps = degrees / 180 * np.array(resolutions) / 2
steps = steps.astype(int)
return steps
def convert_to_bytes(value, bytes, mock=False):
if mock:
return value
import dynamixel_sdk as dxl
# Note: No need to convert back into unsigned int, since this byte preprocessing
# already handles it for us.
if bytes == 1:
data = [
dxl.DXL_LOBYTE(dxl.DXL_LOWORD(value)),
]
elif bytes == 2:
data = [
dxl.DXL_LOBYTE(dxl.DXL_LOWORD(value)),
dxl.DXL_HIBYTE(dxl.DXL_LOWORD(value)),
]
elif bytes == 4:
data = [
dxl.DXL_LOBYTE(dxl.DXL_LOWORD(value)),
dxl.DXL_HIBYTE(dxl.DXL_LOWORD(value)),
dxl.DXL_LOBYTE(dxl.DXL_HIWORD(value)),
dxl.DXL_HIBYTE(dxl.DXL_HIWORD(value)),
]
else:
raise NotImplementedError(
f"Value of the number of bytes to be sent is expected to be in [1, 2, 4], but "
f"{bytes} is provided instead."
)
return data
def get_group_sync_key(data_name, motor_names):
group_key = f"{data_name}_" + "_".join(motor_names)
return group_key
def get_result_name(fn_name, data_name, motor_names):
group_key = get_group_sync_key(data_name, motor_names)
rslt_name = f"{fn_name}_{group_key}"
return rslt_name
def get_queue_name(fn_name, data_name, motor_names):
group_key = get_group_sync_key(data_name, motor_names)
queue_name = f"{fn_name}_{group_key}"
return queue_name
def get_log_name(var_name, fn_name, data_name, motor_names):
group_key = get_group_sync_key(data_name, motor_names)
log_name = f"{var_name}_{fn_name}_{group_key}"
return log_name
def assert_same_address(model_ctrl_table, motor_models, data_name):
all_addr = []
all_bytes = []
for model in motor_models:
addr, bytes = model_ctrl_table[model][data_name]
all_addr.append(addr)
all_bytes.append(bytes)
if len(set(all_addr)) != 1:
raise NotImplementedError(
f"At least two motor models use a different address for `data_name`='{data_name}' ({list(zip(motor_models, all_addr, strict=False))}). Contact a LeRobot maintainer."
)
if len(set(all_bytes)) != 1:
raise NotImplementedError(
f"At least two motor models use a different bytes representation for `data_name`='{data_name}' ({list(zip(motor_models, all_bytes, strict=False))}). Contact a LeRobot maintainer."
)
class TorqueMode(enum.Enum):
ENABLED = 1
DISABLED = 0
class DriveMode(enum.Enum):
NON_INVERTED = 0
INVERTED = 1
class CalibrationMode(enum.Enum):
# Joints with rotational motions are expressed in degrees in nominal range of [-180, 180]
DEGREE = 0
# Joints with linear motions (like gripper of Aloha) are expressed in nominal range of [0, 100]
LINEAR = 1
class JointOutOfRangeError(Exception):
def __init__(self, message="Joint is out of range"):
self.message = message
super().__init__(self.message)
class DynamixelMotorsBus:
"""
The DynamixelMotorsBus class allows to efficiently read and write to the attached motors. It relies on
the python dynamixel sdk to communicate with the motors. For more info, see the [Dynamixel SDK Documentation](https://emanual.robotis.com/docs/en/software/dynamixel/dynamixel_sdk/sample_code/python_read_write_protocol_2_0/#python-read-write-protocol-20).
A DynamixelMotorsBus instance requires a port (e.g. `DynamixelMotorsBus(port="/dev/tty.usbmodem575E0031751"`)).
To find the port, you can run our utility script:
```bash
python lerobot/scripts/find_motors_bus_port.py
>>> Finding all available ports for the MotorBus.
>>> ['/dev/tty.usbmodem575E0032081', '/dev/tty.usbmodem575E0031751']
>>> Remove the usb cable from your DynamixelMotorsBus and press Enter when done.
>>> The port of this DynamixelMotorsBus is /dev/tty.usbmodem575E0031751.
>>> Reconnect the usb cable.
```
Example of usage for 1 motor connected to the bus:
```python
motor_name = "gripper"
motor_index = 6
motor_model = "xl330-m288"
motors_bus = DynamixelMotorsBus(
port="/dev/tty.usbmodem575E0031751",
motors={motor_name: (motor_index, motor_model)},
)
motors_bus.connect()
position = motors_bus.read("Present_Position")
# move from a few motor steps as an example
few_steps = 30
motors_bus.write("Goal_Position", position + few_steps)
# when done, consider disconnecting
motors_bus.disconnect()
```
"""
def __init__(
self,
port: str,
motors: dict[str, tuple[int, str]],
mock: bool = False,
):
self.port = port
self.motors = motors
self.mock = mock
self.model_ctrl_table = deepcopy(MODEL_CONTROL_TABLE)
self.model_resolution = deepcopy(MODEL_RESOLUTION)
self.port_handler = None
self.packet_handler = None
self.calibration = None
self.is_connected = False
self.group_readers = {}
self.group_writers = {}
self.logs = {}
def connect(self):
if self.is_connected:
raise DeviceAlreadyConnectedError(
f"DynamixelMotorsBus({self.port}) is already connected. Do not call `motors_bus.connect()` twice."
)
if self.mock:
import tests.mock_dynamixel_sdk as dxl
else:
import dynamixel_sdk as dxl
self.port_handler = dxl.PortHandler(self.port)
self.packet_handler = dxl.PacketHandler(PROTOCOL_VERSION)
try:
if not self.port_handler.openPort():
raise OSError(f"Failed to open port '{self.port}'.")
except Exception:
traceback.print_exc()
print(
"\nTry running `python lerobot/scripts/find_motors_bus_port.py` to make sure you are using the correct port.\n"
)
raise
# Allow to read and write
self.is_connected = True
self.port_handler.setPacketTimeoutMillis(TIMEOUT_MS)
def reconnect(self):
if self.mock:
import tests.mock_dynamixel_sdk as dxl
else:
import dynamixel_sdk as dxl
self.port_handler = dxl.PortHandler(self.port)
self.packet_handler = dxl.PacketHandler(PROTOCOL_VERSION)
if not self.port_handler.openPort():
raise OSError(f"Failed to open port '{self.port}'.")
self.is_connected = True
def are_motors_configured(self):
# Only check the motor indices and not baudrate, since if the motor baudrates are incorrect,
# a ConnectionError will be raised anyway.
try:
return (self.motor_indices == self.read("ID")).all()
except ConnectionError as e:
print(e)
return False
def find_motor_indices(self, possible_ids=None, num_retry=2):
if possible_ids is None:
possible_ids = range(MAX_ID_RANGE)
indices = []
for idx in tqdm.tqdm(possible_ids):
try:
present_idx = self.read_with_motor_ids(self.motor_models, [idx], "ID", num_retry=num_retry)[0]
except ConnectionError:
continue
if idx != present_idx:
# sanity check
raise OSError(
"Motor index used to communicate through the bus is not the same as the one present in the motor memory. The motor memory might be damaged."
)
indices.append(idx)
return indices
def set_bus_baudrate(self, baudrate):
present_bus_baudrate = self.port_handler.getBaudRate()
if present_bus_baudrate != baudrate:
print(f"Setting bus baud rate to {baudrate}. Previously {present_bus_baudrate}.")
self.port_handler.setBaudRate(baudrate)
if self.port_handler.getBaudRate() != baudrate:
raise OSError("Failed to write bus baud rate.")
@property
def motor_names(self) -> list[str]:
return list(self.motors.keys())
@property
def motor_models(self) -> list[str]:
return [model for _, model in self.motors.values()]
@property
def motor_indices(self) -> list[int]:
return [idx for idx, _ in self.motors.values()]
def set_calibration(self, calibration: dict[str, list]):
self.calibration = calibration
def apply_calibration_autocorrect(self, values: np.ndarray | list, motor_names: list[str] | None):
"""This function applies the calibration, automatically detects out of range errors for motors values and attempts to correct.
For more info, see docstring of `apply_calibration` and `autocorrect_calibration`.
"""
try:
values = self.apply_calibration(values, motor_names)
except JointOutOfRangeError as e:
print(e)
self.autocorrect_calibration(values, motor_names)
values = self.apply_calibration(values, motor_names)
return values
def apply_calibration(self, values: np.ndarray | list, motor_names: list[str] | None):
"""Convert from unsigned int32 joint position range [0, 2**32[ to the universal float32 nominal degree range ]-180.0, 180.0[ with
a "zero position" at 0 degree.
Note: We say "nominal degree range" since the motors can take values outside this range. For instance, 190 degrees, if the motor
rotate more than a half a turn from the zero position. However, most motors can't rotate more than 180 degrees and will stay in this range.
Joints values are original in [0, 2**32[ (unsigned int32). Each motor are expected to complete a full rotation
when given a goal position that is + or - their resolution. For instance, dynamixel xl330-m077 have a resolution of 4096, and
at any position in their original range, let's say the position 56734, they complete a full rotation clockwise by moving to 60830,
or anticlockwise by moving to 52638. The position in the original range is arbitrary and might change a lot between each motor.
To harmonize between motors of the same model, different robots, or even models of different brands, we propose to work
in the centered nominal degree range ]-180, 180[.
"""
if motor_names is None:
motor_names = self.motor_names
# Convert from unsigned int32 original range [0, 2**32] to signed float32 range
values = values.astype(np.float32)
for i, name in enumerate(motor_names):
calib_idx = self.calibration["motor_names"].index(name)
calib_mode = self.calibration["calib_mode"][calib_idx]
if CalibrationMode[calib_mode] == CalibrationMode.DEGREE:
drive_mode = self.calibration["drive_mode"][calib_idx]
homing_offset = self.calibration["homing_offset"][calib_idx]
_, model = self.motors[name]
resolution = self.model_resolution[model]
# Update direction of rotation of the motor to match between leader and follower.
# In fact, the motor of the leader for a given joint can be assembled in an
# opposite direction in term of rotation than the motor of the follower on the same joint.
if drive_mode:
values[i] *= -1
# Convert from range [-2**31, 2**31] to
# nominal range [-resolution//2, resolution//2] (e.g. [-2048, 2048])
values[i] += homing_offset
# Convert from range [-resolution//2, resolution//2] to
# universal float32 centered degree range [-180, 180]
# (e.g. 2048 / (4096 // 2) * 180 = 180)
values[i] = values[i] / (resolution // 2) * HALF_TURN_DEGREE
if (values[i] < LOWER_BOUND_DEGREE) or (values[i] > UPPER_BOUND_DEGREE):
raise JointOutOfRangeError(
f"Wrong motor position range detected for {name}. "
f"Expected to be in nominal range of [-{HALF_TURN_DEGREE}, {HALF_TURN_DEGREE}] degrees (a full rotation), "
f"with a maximum range of [{LOWER_BOUND_DEGREE}, {UPPER_BOUND_DEGREE}] degrees to account for joints that can rotate a bit more, "
f"but present value is {values[i]} degree. "
"This might be due to a cable connection issue creating an artificial 360 degrees jump in motor values. "
"You need to recalibrate by running: `python lerobot/scripts/control_robot.py calibrate`"
)
elif CalibrationMode[calib_mode] == CalibrationMode.LINEAR:
start_pos = self.calibration["start_pos"][calib_idx]
end_pos = self.calibration["end_pos"][calib_idx]
# Rescale the present position to a nominal range [0, 100] %,
# useful for joints with linear motions like Aloha gripper
values[i] = (values[i] - start_pos) / (end_pos - start_pos) * 100
if (values[i] < LOWER_BOUND_LINEAR) or (values[i] > UPPER_BOUND_LINEAR):
raise JointOutOfRangeError(
f"Wrong motor position range detected for {name}. "
f"Expected to be in nominal range of [0, 100] % (a full linear translation), "
f"with a maximum range of [{LOWER_BOUND_LINEAR}, {UPPER_BOUND_LINEAR}] % to account for some imprecision during calibration, "
f"but present value is {values[i]} %. "
"This might be due to a cable connection issue creating an artificial jump in motor values. "
"You need to recalibrate by running: `python lerobot/scripts/control_robot.py calibrate`"
)
return values
def autocorrect_calibration(self, values: np.ndarray | list, motor_names: list[str] | None):
"""This function automatically detects issues with values of motors after calibration, and correct for these issues.
Some motors might have values outside of expected maximum bounds after calibration.
For instance, for a joint in degree, its value can be outside [-270, 270] degrees, which is totally unexpected given
a nominal range of [-180, 180] degrees, which represents half a turn to the left or right starting from zero position.
Known issues:
#1: Motor value randomly shifts of a full turn, caused by hardware/connection errors.
#2: Motor internal homing offset is shifted by a full turn, caused by using default calibration (e.g Aloha).
#3: motor internal homing offset is shifted by less or more than a full turn, caused by using default calibration
or by human error during manual calibration.
Issues #1 and #2 can be solved by shifting the calibration homing offset by a full turn.
Issue #3 will be visually detected by user and potentially captured by the safety feature `max_relative_target`,
that will slow down the motor, raise an error asking to recalibrate. Manual recalibrating will solve the issue.
Note: A full turn corresponds to 360 degrees but also to 4096 steps for a motor resolution of 4096.
"""
if motor_names is None:
motor_names = self.motor_names
# Convert from unsigned int32 original range [0, 2**32] to signed float32 range
values = values.astype(np.float32)
for i, name in enumerate(motor_names):
calib_idx = self.calibration["motor_names"].index(name)
calib_mode = self.calibration["calib_mode"][calib_idx]
if CalibrationMode[calib_mode] == CalibrationMode.DEGREE:
drive_mode = self.calibration["drive_mode"][calib_idx]
homing_offset = self.calibration["homing_offset"][calib_idx]
_, model = self.motors[name]
resolution = self.model_resolution[model]
# Update direction of rotation of the motor to match between leader and follower.
# In fact, the motor of the leader for a given joint can be assembled in an
# opposite direction in term of rotation than the motor of the follower on the same joint.
if drive_mode:
values[i] *= -1
# Convert from initial range to range [-180, 180] degrees
calib_val = (values[i] + homing_offset) / (resolution // 2) * HALF_TURN_DEGREE
in_range = (calib_val > LOWER_BOUND_DEGREE) and (calib_val < UPPER_BOUND_DEGREE)
# Solve this inequality to find the factor to shift the range into [-180, 180] degrees
# values[i] = (values[i] + homing_offset + resolution * factor) / (resolution // 2) * HALF_TURN_DEGREE
# - HALF_TURN_DEGREE <= (values[i] + homing_offset + resolution * factor) / (resolution // 2) * HALF_TURN_DEGREE <= HALF_TURN_DEGREE
# (- (resolution // 2) - values[i] - homing_offset) / resolution <= factor <= ((resolution // 2) - values[i] - homing_offset) / resolution
low_factor = (-(resolution // 2) - values[i] - homing_offset) / resolution
upp_factor = ((resolution // 2) - values[i] - homing_offset) / resolution
elif CalibrationMode[calib_mode] == CalibrationMode.LINEAR:
start_pos = self.calibration["start_pos"][calib_idx]
end_pos = self.calibration["end_pos"][calib_idx]
# Convert from initial range to range [0, 100] in %
calib_val = (values[i] - start_pos) / (end_pos - start_pos) * 100
in_range = (calib_val > LOWER_BOUND_LINEAR) and (calib_val < UPPER_BOUND_LINEAR)
# Solve this inequality to find the factor to shift the range into [0, 100] %
# values[i] = (values[i] - start_pos + resolution * factor) / (end_pos + resolution * factor - start_pos - resolution * factor) * 100
# values[i] = (values[i] - start_pos + resolution * factor) / (end_pos - start_pos) * 100
# 0 <= (values[i] - start_pos + resolution * factor) / (end_pos - start_pos) * 100 <= 100
# (start_pos - values[i]) / resolution <= factor <= (end_pos - values[i]) / resolution
low_factor = (start_pos - values[i]) / resolution
upp_factor = (end_pos - values[i]) / resolution
if not in_range:
# Get first integer between the two bounds
if low_factor < upp_factor:
factor = math.ceil(low_factor)
if factor > upp_factor:
raise ValueError(f"No integer found between bounds [{low_factor=}, {upp_factor=}]")
else:
factor = math.ceil(upp_factor)
if factor > low_factor:
raise ValueError(f"No integer found between bounds [{low_factor=}, {upp_factor=}]")
if CalibrationMode[calib_mode] == CalibrationMode.DEGREE:
out_of_range_str = f"{LOWER_BOUND_DEGREE} < {calib_val} < {UPPER_BOUND_DEGREE} degrees"
in_range_str = f"{LOWER_BOUND_DEGREE} < {calib_val} < {UPPER_BOUND_DEGREE} degrees"
elif CalibrationMode[calib_mode] == CalibrationMode.LINEAR:
out_of_range_str = f"{LOWER_BOUND_LINEAR} < {calib_val} < {UPPER_BOUND_LINEAR} %"
in_range_str = f"{LOWER_BOUND_LINEAR} < {calib_val} < {UPPER_BOUND_LINEAR} %"
logging.warning(
f"Auto-correct calibration of motor '{name}' by shifting value by {abs(factor)} full turns, "
f"from '{out_of_range_str}' to '{in_range_str}'."
)
# A full turn corresponds to 360 degrees but also to 4096 steps for a motor resolution of 4096.
self.calibration["homing_offset"][calib_idx] += resolution * factor
def revert_calibration(self, values: np.ndarray | list, motor_names: list[str] | None):
"""Inverse of `apply_calibration`."""
if motor_names is None:
motor_names = self.motor_names
for i, name in enumerate(motor_names):
calib_idx = self.calibration["motor_names"].index(name)
calib_mode = self.calibration["calib_mode"][calib_idx]
if CalibrationMode[calib_mode] == CalibrationMode.DEGREE:
drive_mode = self.calibration["drive_mode"][calib_idx]
homing_offset = self.calibration["homing_offset"][calib_idx]
_, model = self.motors[name]
resolution = self.model_resolution[model]
# Convert from nominal 0-centered degree range [-180, 180] to
# 0-centered resolution range (e.g. [-2048, 2048] for resolution=4096)
values[i] = values[i] / HALF_TURN_DEGREE * (resolution // 2)
# Subtract the homing offsets to come back to actual motor range of values
# which can be arbitrary.
values[i] -= homing_offset
# Remove drive mode, which is the rotation direction of the motor, to come back to
# actual motor rotation direction which can be arbitrary.
if drive_mode:
values[i] *= -1
elif CalibrationMode[calib_mode] == CalibrationMode.LINEAR:
start_pos = self.calibration["start_pos"][calib_idx]
end_pos = self.calibration["end_pos"][calib_idx]
# Convert from nominal lnear range of [0, 100] % to
# actual motor range of values which can be arbitrary.
values[i] = values[i] / 100 * (end_pos - start_pos) + start_pos
values = np.round(values).astype(np.int32)
return values
def read_with_motor_ids(self, motor_models, motor_ids, data_name, num_retry=NUM_READ_RETRY):
if self.mock:
import tests.mock_dynamixel_sdk as dxl
else:
import dynamixel_sdk as dxl
return_list = True
if not isinstance(motor_ids, list):
return_list = False
motor_ids = [motor_ids]
assert_same_address(self.model_ctrl_table, self.motor_models, data_name)
addr, bytes = self.model_ctrl_table[motor_models[0]][data_name]
group = dxl.GroupSyncRead(self.port_handler, self.packet_handler, addr, bytes)
for idx in motor_ids:
group.addParam(idx)
for _ in range(num_retry):
comm = group.txRxPacket()
if comm == dxl.COMM_SUCCESS:
break
if comm != dxl.COMM_SUCCESS:
raise ConnectionError(
f"Read failed due to communication error on port {self.port_handler.port_name} for indices {motor_ids}: "
f"{self.packet_handler.getTxRxResult(comm)}"
)
values = []
for idx in motor_ids:
value = group.getData(idx, addr, bytes)
values.append(value)
if return_list:
return values
else:
return values[0]
def read(self, data_name, motor_names: str | list[str] | None = None):
if not self.is_connected:
raise DeviceNotConnectedError(
f"DynamixelMotorsBus({self.port}) is not connected. You need to run `motors_bus.connect()`."
)
start_time = time.perf_counter()
if self.mock:
import tests.mock_dynamixel_sdk as dxl
else:
import dynamixel_sdk as dxl
if motor_names is None:
motor_names = self.motor_names
if isinstance(motor_names, str):
motor_names = [motor_names]
motor_ids = []
models = []
for name in motor_names:
motor_idx, model = self.motors[name]
motor_ids.append(motor_idx)
models.append(model)
assert_same_address(self.model_ctrl_table, models, data_name)
addr, bytes = self.model_ctrl_table[model][data_name]
group_key = get_group_sync_key(data_name, motor_names)
if data_name not in self.group_readers:
# create new group reader
self.group_readers[group_key] = dxl.GroupSyncRead(
self.port_handler, self.packet_handler, addr, bytes
)
for idx in motor_ids:
self.group_readers[group_key].addParam(idx)
for _ in range(NUM_READ_RETRY):
comm = self.group_readers[group_key].txRxPacket()
if comm == dxl.COMM_SUCCESS:
break
if comm != dxl.COMM_SUCCESS:
raise ConnectionError(
f"Read failed due to communication error on port {self.port} for group_key {group_key}: "
f"{self.packet_handler.getTxRxResult(comm)}"
)
values = []
for idx in motor_ids:
value = self.group_readers[group_key].getData(idx, addr, bytes)
values.append(value)
values = np.array(values)
# Convert to signed int to use range [-2048, 2048] for our motor positions.
if data_name in CONVERT_UINT32_TO_INT32_REQUIRED:
values = values.astype(np.int32)
if data_name in CALIBRATION_REQUIRED and self.calibration is not None:
values = self.apply_calibration_autocorrect(values, motor_names)
# log the number of seconds it took to read the data from the motors
delta_ts_name = get_log_name("delta_timestamp_s", "read", data_name, motor_names)
self.logs[delta_ts_name] = time.perf_counter() - start_time
# log the utc time at which the data was received
ts_utc_name = get_log_name("timestamp_utc", "read", data_name, motor_names)
self.logs[ts_utc_name] = capture_timestamp_utc()
return values
def write_with_motor_ids(self, motor_models, motor_ids, data_name, values, num_retry=NUM_WRITE_RETRY):
if self.mock:
import tests.mock_dynamixel_sdk as dxl
else:
import dynamixel_sdk as dxl
if not isinstance(motor_ids, list):
motor_ids = [motor_ids]
if not isinstance(values, list):
values = [values]
assert_same_address(self.model_ctrl_table, motor_models, data_name)
addr, bytes = self.model_ctrl_table[motor_models[0]][data_name]
group = dxl.GroupSyncWrite(self.port_handler, self.packet_handler, addr, bytes)
for idx, value in zip(motor_ids, values, strict=True):
data = convert_to_bytes(value, bytes, self.mock)
group.addParam(idx, data)
for _ in range(num_retry):
comm = group.txPacket()
if comm == dxl.COMM_SUCCESS:
break
if comm != dxl.COMM_SUCCESS:
raise ConnectionError(
f"Write failed due to communication error on port {self.port_handler.port_name} for indices {motor_ids}: "
f"{self.packet_handler.getTxRxResult(comm)}"
)
def write(self, data_name, values: int | float | np.ndarray, motor_names: str | list[str] | None = None):
if not self.is_connected:
raise DeviceNotConnectedError(
f"DynamixelMotorsBus({self.port}) is not connected. You need to run `motors_bus.connect()`."
)
start_time = time.perf_counter()
if self.mock:
import tests.mock_dynamixel_sdk as dxl
else:
import dynamixel_sdk as dxl
if motor_names is None:
motor_names = self.motor_names
if isinstance(motor_names, str):
motor_names = [motor_names]
if isinstance(values, (int, float, np.integer)):
values = [int(values)] * len(motor_names)
values = np.array(values)
motor_ids = []
models = []
for name in motor_names:
motor_idx, model = self.motors[name]
motor_ids.append(motor_idx)
models.append(model)
if data_name in CALIBRATION_REQUIRED and self.calibration is not None:
values = self.revert_calibration(values, motor_names)
values = values.tolist()
assert_same_address(self.model_ctrl_table, models, data_name)
addr, bytes = self.model_ctrl_table[model][data_name]
group_key = get_group_sync_key(data_name, motor_names)
init_group = data_name not in self.group_readers
if init_group:
self.group_writers[group_key] = dxl.GroupSyncWrite(
self.port_handler, self.packet_handler, addr, bytes
)
for idx, value in zip(motor_ids, values, strict=True):
data = convert_to_bytes(value, bytes, self.mock)
if init_group:
self.group_writers[group_key].addParam(idx, data)
else:
self.group_writers[group_key].changeParam(idx, data)
comm = self.group_writers[group_key].txPacket()
if comm != dxl.COMM_SUCCESS:
raise ConnectionError(
f"Write failed due to communication error on port {self.port} for group_key {group_key}: "
f"{self.packet_handler.getTxRxResult(comm)}"
)
# log the number of seconds it took to write the data to the motors
delta_ts_name = get_log_name("delta_timestamp_s", "write", data_name, motor_names)
self.logs[delta_ts_name] = time.perf_counter() - start_time
# TODO(rcadene): should we log the time before sending the write command?
# log the utc time when the write has been completed
ts_utc_name = get_log_name("timestamp_utc", "write", data_name, motor_names)
self.logs[ts_utc_name] = capture_timestamp_utc()
def disconnect(self):
if not self.is_connected:
raise DeviceNotConnectedError(
f"DynamixelMotorsBus({self.port}) is not connected. Try running `motors_bus.connect()` first."
)
if self.port_handler is not None:
self.port_handler.closePort()
self.port_handler = None
self.packet_handler = None
self.group_readers = {}
self.group_writers = {}
self.is_connected = False
def __del__(self):
if getattr(self, "is_connected", False):
self.disconnect()
def set_operating_mode(arm: DynamixelMotorsBus):
if (arm.read("Torque_Enable") != TorqueMode.DISABLED.value).any():
raise ValueError("To run set robot preset, the torque must be disabled on all motors.")
# Use 'extended position mode' for all motors except gripper, because in joint mode the servos can't
# rotate more than 360 degrees (from 0 to 4095) And some mistake can happen while assembling the arm,
# you could end up with a servo with a position 0 or 4095 at a crucial point See [
# https://emanual.robotis.com/docs/en/dxl/x/x_series/#operating-mode11]
all_motors_except_gripper = [name for name in arm.motor_names if name != "gripper"]
if len(all_motors_except_gripper) > 0:
# 4 corresponds to Extended Position on Koch motors
arm.write("Operating_Mode", 4, all_motors_except_gripper)
# Use 'position control current based' for gripper to be limited by the limit of the current.
# For the follower gripper, it means it can grasp an object without forcing too much even tho,
# it's goal position is a complete grasp (both gripper fingers are ordered to join and reach a touch).
# For the leader gripper, it means we can use it as a physical trigger, since we can force with our finger
# to make it move, and it will move back to its original target position when we release the force.
# 5 corresponds to Current Controlled Position on Koch gripper motors "xl330-m077, xl330-m288"
arm.write("Operating_Mode", 5, "gripper")

View File

@ -80,7 +80,7 @@ MODEL_RESOLUTION = {
# {model: model_number}
# https://emanual.robotis.com/docs/en/dxl/x/{MODEL}/#control-table-of-eeprom-area
MODELS_TABLE = {
MODEL_NUMBER = {
"xl330-m077": 1190,
"xl330-m288": 1200,
"xl430-w250": 1060,
@ -89,6 +89,17 @@ MODELS_TABLE = {
"xc430-w150": 1070,
}
# {model: available_operating_modes}
# https://emanual.robotis.com/docs/en/dxl/x/{MODEL}/#operating-mode11
MODEL_OPERATING_MODES = {
"xl330-m077": [0, 1, 3, 4, 5, 16],
"xl330-m288": [0, 1, 3, 4, 5, 16],
"xl430-w250": [1, 3, 4, 16],
"xm430-w350": [0, 1, 3, 4, 5, 16],
"xm540-w270": [0, 1, 3, 4, 5, 16],
"xc430-w150": [1, 3, 4, 16],
}
MODEL_CONTROL_TABLE = {
"x_series": X_SERIES_CONTROL_TABLE,
"xl330-m077": X_SERIES_CONTROL_TABLE,
@ -108,3 +119,23 @@ MODEL_BAUDRATE_TABLE = {
"xm540-w270": X_SERIES_BAUDRATE_TABLE,
"xc430-w150": X_SERIES_BAUDRATE_TABLE,
}
AVAILABLE_BAUDRATES = [
9_600,
19_200,
38_400,
57_600,
115_200,
230_400,
460_800,
500_000,
576_000,
921_600,
1_000_000,
1_152_000,
2_000_000,
2_500_000,
3_000_000,
3_500_000,
4_000_000,
]

View File

@ -1,3 +1,2 @@
from .feetech import FeetechMotorsBus
from .feetech_calibration import apply_feetech_offsets_from_calibration, run_full_arm_calibration
from .feetech import DriveMode, FeetechMotorsBus, OperatingMode, TorqueMode
from .tables import *

View File

@ -12,21 +12,66 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from copy import deepcopy
from enum import Enum
from pprint import pformat
from ..motors_bus import MotorsBus
from lerobot.common.utils.encoding_utils import decode_sign_magnitude, encode_sign_magnitude
from ..motors_bus import Motor, MotorCalibration, MotorsBus, NameOrID, Value
from .tables import (
CALIBRATION_REQUIRED,
AVAILABLE_BAUDRATES,
ENCODINGS,
MODEL_BAUDRATE_TABLE,
MODEL_CONTROL_TABLE,
MODEL_NUMBER,
MODEL_RESOLUTION,
NORMALIZATION_REQUIRED,
)
PROTOCOL_VERSION = 0
BAUDRATE = 1_000_000
DEFAULT_TIMEOUT_MS = 1000
MAX_ID_RANGE = 252
logger = logging.getLogger(__name__)
class OperatingMode(Enum):
# position servo mode
POSITION = 0
# The motor is in constant speed mode, which is controlled by parameter 0x2e, and the highest bit 15 is
# the direction bit
VELOCITY = 1
# PWM open-loop speed regulation mode, with parameter 0x2c running time parameter control, bit11 as
# direction bit
PWM = 2
# In step servo mode, the number of step progress is represented by parameter 0x2a, and the highest bit 15
# is the direction bit
STEP = 3
class DriveMode(Enum):
NON_INVERTED = 0
INVERTED = 1
class TorqueMode(Enum):
ENABLED = 1
DISABLED = 0
def patch_setPacketTimeout(self, packet_length): # noqa: N802
"""
HACK: This patches the PortHandler behavior to set the correct packet timeouts.
It fixes https://gitee.com/ftservo/SCServoSDK/issues/IBY2S6
The bug is fixed on the official Feetech SDK repo (https://gitee.com/ftservo/FTServo_Python)
but because that version is not published on PyPI, we rely on the (unofficial) on that is, which needs
patching.
"""
self.packet_start_time = self.getCurrentTime()
self.packet_timeout = (self.tx_time_per_byte * packet_length) + (self.tx_time_per_byte * 3.0) + 50
class FeetechMotorsBus(MotorsBus):
@ -35,43 +80,76 @@ class FeetechMotorsBus(MotorsBus):
python feetech sdk to communicate with the motors, which is itself based on the dynamixel sdk.
"""
model_ctrl_table = deepcopy(MODEL_CONTROL_TABLE)
model_resolution_table = deepcopy(MODEL_RESOLUTION)
model_baudrate_table = deepcopy(MODEL_BAUDRATE_TABLE)
calibration_required = deepcopy(CALIBRATION_REQUIRED)
available_baudrates = deepcopy(AVAILABLE_BAUDRATES)
default_timeout = DEFAULT_TIMEOUT_MS
model_baudrate_table = deepcopy(MODEL_BAUDRATE_TABLE)
model_ctrl_table = deepcopy(MODEL_CONTROL_TABLE)
model_number_table = deepcopy(MODEL_NUMBER)
model_resolution_table = deepcopy(MODEL_RESOLUTION)
normalization_required = deepcopy(NORMALIZATION_REQUIRED)
# Feetech specific
encodings = deepcopy(ENCODINGS)
def __init__(
self,
port: str,
motors: dict[str, tuple[int, str]],
motors: dict[str, Motor],
calibration: dict[str, MotorCalibration] | None = None,
):
super().__init__(port, motors)
super().__init__(port, motors, calibration)
import scservo_sdk as scs
self.port_handler = scs.PortHandler(self.port)
# HACK: monkeypatch
self.port_handler.setPacketTimeout = patch_setPacketTimeout.__get__(
self.port_handler, scs.PortHandler
)
self.packet_handler = scs.PacketHandler(PROTOCOL_VERSION)
self.reader = scs.GroupSyncRead(self.port_handler, self.packet_handler, 0, 0)
self.writer = scs.GroupSyncWrite(self.port_handler, self.packet_handler, 0, 0)
self.sync_reader = scs.GroupSyncRead(self.port_handler, self.packet_handler, 0, 0)
self.sync_writer = scs.GroupSyncWrite(self.port_handler, self.packet_handler, 0, 0)
self._comm_success = scs.COMM_SUCCESS
self._no_error = 0x00
def broadcast_ping(self, num_retry: int | None = None):
raise NotImplementedError # TODO
def configure_motors(self) -> None:
# By default, Feetech motors have a 500µs delay response time (corresponding to a value of 250 on the
# 'Return_Delay' address). We ensure this is reduced to the minimum of 2µs (value of 0).
for id_ in self.ids:
self.write("Return_Delay_Time", id_, 0)
def calibrate_values(self, ids_values: dict[int, int]) -> dict[int, float]:
# TODO
return ids_values
def _get_half_turn_homings(self, positions: dict[NameOrID, Value]) -> dict[NameOrID, Value]:
"""
On Feetech Motors:
Present_Position = Actual_Position - Homing_Offset
"""
half_turn_homings = {}
for motor, pos in positions.items():
model = self._get_motor_model(motor)
max_res = self.model_resolution_table[model] - 1
half_turn_homings[motor] = pos - int(max_res / 2)
def uncalibrate_values(self, ids_values: dict[int, float]) -> dict[int, int]:
# TODO
return ids_values
return half_turn_homings
def _is_comm_success(self, comm: int) -> bool:
import scservo_sdk as scs
def _disable_torque(self, motors: list[NameOrID]) -> None:
for motor in motors:
self.write("Torque_Enable", motor, TorqueMode.DISABLED.value)
self.write("Lock", motor, 0)
return comm == scs.COMM_SUCCESS
def _enable_torque(self, motors: list[NameOrID]) -> None:
for motor in motors:
self.write("Torque_Enable", motor, TorqueMode.ENABLED.value)
self.write("Lock", motor, 1)
def _encode_value(self, value: int, data_name: str | None = None, n_bytes: int | None = None) -> int:
sign_bit = self.encodings.get(data_name)
return encode_sign_magnitude(value, sign_bit) if sign_bit is not None else value
def _decode_value(self, value: int, data_name: str | None = None, n_bytes: int | None = None) -> int:
sign_bit = self.encodings.get(data_name)
return decode_sign_magnitude(value, sign_bit) if sign_bit is not None else value
@staticmethod
def split_int_bytes(value: int, n_bytes: int) -> list[int]:
def _split_int_to_bytes(value: int, n_bytes: int) -> list[int]:
# Validate input
if value < 0:
raise ValueError(f"Negative values are not allowed: {value}")
@ -85,17 +163,10 @@ class FeetechMotorsBus(MotorsBus):
import scservo_sdk as scs
# Note: No need to convert back into unsigned int, since this byte preprocessing
# already handles it for us.
if n_bytes == 1:
data = [
scs.SCS_LOBYTE(scs.SCS_LOWORD(value)),
]
data = [value]
elif n_bytes == 2:
data = [
scs.SCS_LOBYTE(scs.SCS_LOWORD(value)),
scs.SCS_HIBYTE(scs.SCS_LOWORD(value)),
]
data = [scs.SCS_LOBYTE(value), scs.SCS_HIBYTE(value)]
elif n_bytes == 4:
data = [
scs.SCS_LOBYTE(scs.SCS_LOWORD(value)),
@ -104,3 +175,106 @@ class FeetechMotorsBus(MotorsBus):
scs.SCS_HIBYTE(scs.SCS_HIWORD(value)),
]
return data
def _broadcast_ping(self) -> tuple[dict[int, int], int]:
import scservo_sdk as scs
data_list = {}
status_length = 6
rx_length = 0
wait_length = status_length * scs.MAX_ID
txpacket = [0] * 6
tx_time_per_byte = (1000.0 / self.port_handler.getBaudRate()) * 10.0
txpacket[scs.PKT_ID] = scs.BROADCAST_ID
txpacket[scs.PKT_LENGTH] = 2
txpacket[scs.PKT_INSTRUCTION] = scs.INST_PING
result = self.packet_handler.txPacket(self.port_handler, txpacket)
if result != scs.COMM_SUCCESS:
self.port_handler.is_using = False
return data_list, result
# set rx timeout
self.port_handler.setPacketTimeoutMillis((wait_length * tx_time_per_byte) + (3.0 * scs.MAX_ID) + 16.0)
rxpacket = []
while True:
rxpacket += self.port_handler.readPort(wait_length - rx_length)
rx_length = len(rxpacket)
if self.port_handler.isPacketTimeout(): # or rx_length >= wait_length
break
self.port_handler.is_using = False
if rx_length == 0:
return data_list, scs.COMM_RX_TIMEOUT
while True:
if rx_length < status_length:
return data_list, scs.COMM_RX_CORRUPT
# find packet header
for id_ in range(0, (rx_length - 1)):
if (rxpacket[id_] == 0xFF) and (rxpacket[id_ + 1] == 0xFF):
break
if id_ == 0: # found at the beginning of the packet
# calculate checksum
checksum = 0
for id_ in range(2, status_length - 1): # except header & checksum
checksum += rxpacket[id_]
checksum = scs.SCS_LOBYTE(~checksum)
if rxpacket[status_length - 1] == checksum:
result = scs.COMM_SUCCESS
data_list[rxpacket[scs.PKT_ID]] = rxpacket[scs.PKT_ERROR]
del rxpacket[0:status_length]
rx_length = rx_length - status_length
if rx_length == 0:
return data_list, result
else:
result = scs.COMM_RX_CORRUPT
# remove header (0xFF 0xFF)
del rxpacket[0:2]
rx_length = rx_length - 2
else:
# remove unnecessary packets
del rxpacket[0:id_]
rx_length = rx_length - id_
def broadcast_ping(self, num_retry: int = 0, raise_on_error: bool = False) -> dict[int, int] | None:
for n_try in range(1 + num_retry):
ids_status, comm = self._broadcast_ping()
if self._is_comm_success(comm):
break
logger.debug(f"Broadcast failed on port '{self.port}' ({n_try=})")
logger.debug(self.packet_handler.getRxPacketError(comm))
if not self._is_comm_success(comm):
if raise_on_error:
raise ConnectionError(self.packet_handler.getRxPacketError(comm))
return ids_status if ids_status else None
ids_errors = {id_: status for id_, status in ids_status.items() if self._is_error(status)}
if ids_errors:
display_dict = {id_: self.packet_handler.getRxPacketError(err) for id_, err in ids_errors.items()}
logger.error(f"Some motors found returned an error status:\n{pformat(display_dict, indent=4)}")
comm, model_numbers = self._sync_read(
"Model_Number", list(ids_status), model="scs_series", num_retry=num_retry
)
if not self._is_comm_success(comm):
if raise_on_error:
raise ConnectionError(self.packet_handler.getRxPacketError(comm))
return model_numbers if model_numbers else None
return model_numbers

View File

@ -1,314 +0,0 @@
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from ..motors_bus import (
CalibrationMode,
MotorsBus,
TorqueMode,
)
from .feetech import FeetechMotorsBus
from .tables import MODEL_RESOLUTION
URL_TEMPLATE = (
"https://raw.githubusercontent.com/huggingface/lerobot/main/media/{robot}/{arm}_{position}.webp"
)
def disable_torque(arm: MotorsBus):
if (arm.read("Torque_Enable") != TorqueMode.DISABLED.value).any():
raise ValueError("To run calibration, the torque must be disabled on all motors.")
def get_calibration_modes(arm: MotorsBus):
"""Returns calibration modes for each motor (DEGREE for rotational, LINEAR for gripper)."""
return [
CalibrationMode.LINEAR.name if name == "gripper" else CalibrationMode.DEGREE.name
for name in arm.names
]
def reset_offset(motor_id, motor_bus):
# Open the write lock, changes to EEPROM do NOT persist yet
motor_bus.write("Lock", 1)
# Set offset to 0
motor_name = motor_bus.motor_names[motor_id - 1]
motor_bus.write("Offset", 0, motor_names=[motor_name])
# Close the write lock, changes to EEPROM do persist
motor_bus.write("Lock", 0)
# Confirm that the offset is zero by reading it back
confirmed_offset = motor_bus.read("Offset")[motor_id - 1]
print(f"Offset for motor {motor_id} reset to: {confirmed_offset}")
return confirmed_offset
def calibrate_homing_motor(motor_id, motor_bus):
reset_offset(motor_id, motor_bus)
home_ticks = motor_bus.read("Present_Position")[motor_id - 1] # Read index starts at 0
print(f"Encoder offset (present position in homing position): {home_ticks}")
return home_ticks
def calibrate_linear_motor(motor_id, motor_bus):
motor_names = motor_bus.motor_names
motor_name = motor_names[motor_id - 1]
reset_offset(motor_id, motor_bus)
input(f"Close the {motor_name}, then press Enter...")
start_pos = motor_bus.read("Present_Position")[motor_id - 1] # Read index starts ar 0
print(f" [Motor {motor_id}] start position recorded: {start_pos}")
input(f"Open the {motor_name} fully, then press Enter...")
end_pos = motor_bus.read("Present_Position")[motor_id - 1] # Read index starts ar 0
print(f" [Motor {motor_id}] end position recorded: {end_pos}")
return start_pos, end_pos
def single_motor_calibration(arm: MotorsBus, motor_id: int):
"""Calibrates a single motor and returns its calibration data for updating the calibration file."""
disable_torque(arm)
print(f"\n--- Calibrating Motor {motor_id} ---")
start_pos = 0
end_pos = 0
encoder_offset = 0
if motor_id == 6:
start_pos, end_pos = calibrate_linear_motor(motor_id, arm)
else:
input("Move the motor to (zero) position, then press Enter...")
encoder_offset = calibrate_homing_motor(motor_id, arm)
print(f"Calibration for motor ID:{motor_id} done.")
# Create a calibration dictionary for the single motor
calib_dict = {
"homing_offset": int(encoder_offset),
"drive_mode": 0,
"start_pos": int(start_pos),
"end_pos": int(end_pos),
"calib_mode": get_calibration_modes(arm)[motor_id - 1],
"motor_name": arm.names[motor_id - 1],
}
return calib_dict
def run_full_arm_calibration(arm: MotorsBus, robot_type: str, arm_name: str, arm_type: str):
"""
Runs a full calibration process for all motors in a robotic arm.
This function calibrates each motor in the arm, determining encoder offsets and
start/end positions for linear and rotational motors. The calibration data is then
stored in a dictionary for later use.
**Calibration Process:**
- The user is prompted to move the arm to its homing position before starting.
- Motors with rotational motion are calibrated using a homing method.
- Linear actuators (e.g., grippers) are calibrated separately.
- Encoder offsets, start positions, and end positions are recorded.
**Example Usage:**
```python
run_full_arm_calibration(arm, "so100", "left", "follower")
```
"""
disable_torque(arm)
print(f"\nRunning calibration of {robot_type} {arm_name} {arm_type}...")
print("\nMove arm to homing position (middle)")
print(
"See: " + URL_TEMPLATE.format(robot=robot_type, arm=arm_type, position="zero")
) # TODO(pepijn): replace with new instruction homing pos (all motors in middle) in tutorial
input("Press Enter to continue...")
start_positions = np.zeros(len(arm.ids))
end_positions = np.zeros(len(arm.ids))
encoder_offsets = np.zeros(len(arm.ids))
modes = get_calibration_modes(arm)
for i, motor_id in enumerate(arm.ids):
if modes[i] == CalibrationMode.DEGREE.name:
encoder_offsets[i] = calibrate_homing_motor(motor_id, arm)
start_positions[i] = 0
end_positions[i] = 0
for i, motor_id in enumerate(arm.ids):
if modes[i] == CalibrationMode.LINEAR.name:
start_positions[i], end_positions[i] = calibrate_linear_motor(motor_id, arm)
encoder_offsets[i] = 0
print("\nMove arm to rest position")
input("Press Enter to continue...")
print(f"\n calibration of {robot_type} {arm_name} {arm_type} done!")
# Force drive_mode values (can be static)
drive_modes = [0, 1, 0, 0, 1, 0]
calib_dict = {
"homing_offset": encoder_offsets.astype(int).tolist(),
"drive_mode": drive_modes,
"start_pos": start_positions.astype(int).tolist(),
"end_pos": end_positions.astype(int).tolist(),
"calib_mode": get_calibration_modes(arm),
"motor_names": arm.names,
}
return calib_dict
def run_full_auto_arm_calibration(arm: MotorsBus, robot_type: str, arm_name: str, arm_type: str):
"""TODO(pepijn): Add this method later as extra
Example of usage:
```python
run_full_auto_arm_calibration(arm, "so100", "left", "follower")
```
"""
print(f"\nRunning calibration of {robot_type} {arm_name} {arm_type}...")
def apply_feetech_offsets_from_calibration(motorsbus: FeetechMotorsBus, calibration_dict: dict):
"""
Reads 'calibration_dict' containing 'homing_offset' and 'motor_names',
then writes each motor's offset to the servo's internal Offset (0x1F) in EPROM.
This version is modified so each homed position (originally 0) will now read
2047, i.e. 180° away from 0 in the 4096-count circle. Offsets are permanently
stored in EEPROM, so the servo's Present_Position is hardware-shifted even
after power cycling.
Steps:
1) Subtract 2047 from the old offset (so 0 -> 2047).
2) Clamp to [-2047..+2047].
3) Encode sign bit and magnitude into a 12-bit number.
"""
homing_offsets = calibration_dict["homing_offset"]
motor_names = calibration_dict["motor_names"]
start_pos = calibration_dict["start_pos"]
# Open the write lock, changes to EEPROM do NOT persist yet
motorsbus.write("Lock", 1)
# For each motor, set the 'Offset' parameter
for m_name, old_offset in zip(motor_names, homing_offsets, strict=False):
# If bus doesnt have a motor named m_name, skip
if m_name not in motorsbus.motors:
print(f"Warning: '{m_name}' not found in motorsbus.motors; skipping offset.")
continue
if m_name == "gripper":
old_offset = start_pos # If gripper set the offset to the start position of the gripper
continue
# Shift the offset so the homed position reads 2047
new_offset = old_offset - 2047
# Clamp to [-2047..+2047]
if new_offset > 2047:
new_offset = 2047
print(
f"Warning: '{new_offset}' is getting clamped because its larger then 2047; This should not happen!"
)
elif new_offset < -2047:
new_offset = -2047
print(
f"Warning: '{new_offset}' is getting clamped because its smaller then -2047; This should not happen!"
)
# Determine the direction (sign) bit and magnitude
direction_bit = 1 if new_offset < 0 else 0
magnitude = abs(new_offset)
# Combine sign bit (bit 11) with the magnitude (bits 0..10)
servo_offset = (direction_bit << 11) | magnitude
# Write offset to servo
motorsbus.write("Offset", servo_offset, motor_names=m_name)
print(
f"Set offset for {m_name}: "
f"old_offset={old_offset}, new_offset={new_offset}, servo_encoded={magnitude} + direction={direction_bit}"
)
motorsbus.write("Lock", 0)
print("Offsets have been saved to EEPROM successfully.")
def convert_ticks_to_degrees(ticks, model):
resolutions = MODEL_RESOLUTION[model]
# Convert the ticks to degrees
return ticks * (360.0 / resolutions)
def convert_degrees_to_ticks(degrees, model):
resolutions = MODEL_RESOLUTION[model]
# Convert degrees to motor ticks
return int(degrees * (resolutions / 360.0))
def adjusted_to_homing_ticks(raw_motor_ticks: int, model: str, motor_bus: MotorsBus, motor_id: int) -> int:
"""
Takes a raw reading [0..(res-1)] (e.g. 0..4095) and shifts it so that '2048'
becomes 0 in the homed coordinate system ([-2048..+2047] for 4096 resolution).
"""
resolutions = MODEL_RESOLUTION[model]
# Shift raw ticks by half-resolution so 2048 -> 0, then wrap [0..res-1].
ticks = (raw_motor_ticks - (resolutions // 2)) % resolutions
# If above halfway, fold it into negative territory => [-2048..+2047].
if ticks > (resolutions // 2):
ticks -= resolutions
# Flip sign if drive_mode is set.
drive_mode = 0
if motor_bus.calibration is not None:
drive_mode = motor_bus.calibration["drive_mode"][motor_id - 1]
if drive_mode:
ticks *= -1
return ticks
def adjusted_to_motor_ticks(adjusted_pos: int, model: str, motor_bus: MotorsBus, motor_id: int) -> int:
"""
Inverse of adjusted_to_homing_ticks(). Takes a 'homed' position in [-2048..+2047]
and recovers the raw [0..(res-1)] ticks with 2048 as midpoint.
"""
# Flip sign if drive_mode was set.
drive_mode = 0
if motor_bus.calibration is not None:
drive_mode = motor_bus.calibration["drive_mode"][motor_id - 1]
if drive_mode:
adjusted_pos *= -1
resolutions = MODEL_RESOLUTION[model]
# Shift by +half-resolution and wrap into [0..res-1].
# This undoes the earlier shift by -half-resolution.
ticks = (adjusted_pos + (resolutions // 2)) % resolutions
return ticks

View File

@ -2,13 +2,15 @@
# https://docs.google.com/spreadsheets/d/1GVs7W1VS1PqdhA1nW-abeyAHhTUxKUdR/edit?usp=sharing&ouid=116566590112741600240&rtpof=true&sd=true
# data_name: (address, size_byte)
SCS_SERIES_CONTROL_TABLE = {
"Model": (3, 2),
# EPROM
"Firmware_Version": (0, 2),
"Model_Number": (3, 2),
"ID": (5, 1),
"Baud_Rate": (6, 1),
"Return_Delay": (7, 1),
"Return_Delay_Time": (7, 1),
"Response_Status_Level": (8, 1),
"Min_Angle_Limit": (9, 2),
"Max_Angle_Limit": (11, 2),
"Min_Position_Limit": (9, 2),
"Max_Position_Limit": (11, 2),
"Max_Temperature_Limit": (13, 1),
"Max_Voltage_Limit": (14, 1),
"Min_Voltage_Limit": (15, 1),
@ -24,14 +26,15 @@ SCS_SERIES_CONTROL_TABLE = {
"CCW_Dead_Zone": (27, 1),
"Protection_Current": (28, 2),
"Angular_Resolution": (30, 1),
"Offset": (31, 2),
"Mode": (33, 1),
"Homing_Offset": (31, 2),
"Operating_Mode": (33, 1),
"Protective_Torque": (34, 1),
"Protection_Time": (35, 1),
"Overload_Torque": (36, 1),
"Speed_closed_loop_P_proportional_coefficient": (37, 1),
"Over_Current_Protection_Time": (38, 1),
"Velocity_closed_loop_I_integral_coefficient": (39, 1),
# SRAM
"Torque_Enable": (40, 1),
"Acceleration": (41, 1),
"Goal_Position": (42, 2),
@ -72,9 +75,34 @@ MODEL_RESOLUTION = {
"sts3215": 4096,
}
# {model: model_number}
MODEL_NUMBER = {
"sts3215": 777,
}
MODEL_BAUDRATE_TABLE = {
"scs_series": SCS_SERIES_BAUDRATE_TABLE,
"sts3215": SCS_SERIES_BAUDRATE_TABLE,
}
CALIBRATION_REQUIRED = ["Goal_Position", "Present_Position"]
NORMALIZATION_REQUIRED = ["Goal_Position", "Present_Position"]
# Sign-Magnitude encoding bits
ENCODINGS = {
"Homing_Offset": 11,
"Goal_Speed": 15,
}
AVAILABLE_BAUDRATES = [
4_800,
9_600,
14_400,
19_200,
38_400,
57_600,
115_200,
128_000,
250_000,
500_000,
1_000_000,
]

View File

@ -20,19 +20,19 @@
# https://github.com/astral-sh/ruff/issues/3711
import abc
import json
import logging
from dataclasses import dataclass
from enum import Enum
from functools import cached_property
from pathlib import Path
from pprint import pformat
from typing import Protocol, TypeAlias, overload
import serial
from deepdiff import DeepDiff
from tqdm import tqdm
from lerobot.common.errors import DeviceAlreadyConnectedError, DeviceNotConnectedError
from lerobot.common.utils.utils import enter_pressed, move_cursor_up
NameOrID: TypeAlias = str | int
Value: TypeAlias = int | float
@ -42,42 +42,63 @@ MAX_ID_RANGE = 252
logger = logging.getLogger(__name__)
def get_ctrl_table(model_ctrl_table: dict[str, dict], model: str) -> dict[str, tuple[int, int]]:
ctrl_table = model_ctrl_table.get(model)
if ctrl_table is None:
raise KeyError(f"Control table for {model=} not found.")
return ctrl_table
def get_address(model_ctrl_table: dict[str, dict], model: str, data_name: str) -> tuple[int, int]:
ctrl_table = get_ctrl_table(model_ctrl_table, model)
addr_bytes = ctrl_table.get(data_name)
if addr_bytes is None:
raise KeyError(f"Address for '{data_name}' not found in {model} control table.")
return addr_bytes
def assert_same_address(model_ctrl_table: dict[str, dict], motor_models: list[str], data_name: str) -> None:
all_addr = []
all_bytes = []
for model in motor_models:
addr, bytes = model_ctrl_table[model][data_name]
addr, bytes = get_address(model_ctrl_table, model, data_name)
all_addr.append(addr)
all_bytes.append(bytes)
if len(set(all_addr)) != 1:
raise NotImplementedError(
f"At least two motor models use a different address for `data_name`='{data_name}'"
f"({list(zip(motor_models, all_addr, strict=False))}). Contact a LeRobot maintainer."
f"({list(zip(motor_models, all_addr, strict=False))})."
)
if len(set(all_bytes)) != 1:
raise NotImplementedError(
f"At least two motor models use a different bytes representation for `data_name`='{data_name}'"
f"({list(zip(motor_models, all_bytes, strict=False))}). Contact a LeRobot maintainer."
f"({list(zip(motor_models, all_bytes, strict=False))})."
)
class TorqueMode(Enum):
ENABLED = 1
DISABLED = 0
class DriveMode(Enum):
NON_INVERTED = 0
INVERTED = 1
class CalibrationMode(Enum):
# Joints with rotational motions are expressed in degrees in nominal range of [-180, 180]
class MotorNormMode(Enum):
DEGREE = 0
# Joints with liner motions (like gripper of Aloha) are expressed in nominal range of [0, 100]
LINEAR = 1
RANGE_0_100 = 1
RANGE_M100_100 = 2
VELOCITY = 3
@dataclass
class MotorCalibration:
id: int
drive_mode: int
homing_offset: int
range_min: int
range_max: int
@dataclass
class Motor:
id: int
model: str
norm_mode: MotorNormMode
class JointOutOfRangeError(Exception):
@ -190,12 +211,6 @@ class GroupSyncWrite(Protocol):
def txPacket(self): ...
@dataclass
class Motor:
id: int
model: str
class MotorsBus(abc.ABC):
"""The main LeRobot class for implementing motors buses.
@ -238,30 +253,34 @@ class MotorsBus(abc.ABC):
```
"""
model_ctrl_table: dict[str, dict]
model_resolution_table: dict[str, int]
model_baudrate_table: dict[str, dict]
calibration_required: list[str]
available_baudrates: list[int]
default_timeout: int
model_baudrate_table: dict[str, dict]
model_ctrl_table: dict[str, dict]
model_number_table: dict[str, int]
model_resolution_table: dict[str, int]
normalization_required: list[str]
def __init__(
self,
port: str,
motors: dict[str, Motor],
calibration: dict[str, MotorCalibration] | None = None,
):
self.port = port
self.motors = motors
self._validate_motors()
self.calibration = calibration if calibration else {}
self.port_handler: PortHandler
self.packet_handler: PacketHandler
self.reader: GroupSyncRead
self.writer: GroupSyncWrite
self.sync_reader: GroupSyncRead
self.sync_writer: GroupSyncWrite
self._comm_success: int
self._no_error: int
self.calibration = None
self._id_to_model = {m.id: m.model for m in self.motors.values()}
self._id_to_name = {m.id: name for name, m in self.motors.items()}
self._id_to_model_dict = {m.id: m.model for m in self.motors.values()}
self._id_to_name_dict = {m.id: name for name, m in self.motors.items()}
self._model_nb_to_model_dict = {v: k for k, v in self.model_number_table.items()}
def __len__(self):
return len(self.motors)
@ -270,7 +289,7 @@ class MotorsBus(abc.ABC):
return (
f"{self.__class__.__name__}(\n"
f" Port: '{self.port}',\n"
f" Motors: \n{pformat(self.motors, indent=8)},\n"
f" Motors: \n{pformat(self.motors, indent=8, sort_dicts=False)},\n"
")',\n"
)
@ -280,13 +299,9 @@ class MotorsBus(abc.ABC):
return False
first_table = self.model_ctrl_table[self.models[0]]
return any(DeepDiff(first_table, self.model_ctrl_table[model]) for model in self.models[1:])
def id_to_model(self, motor_id: int) -> str:
return self._id_to_model[motor_id]
def id_to_name(self, motor_id: int) -> str:
return self._id_to_name[motor_id]
return any(
DeepDiff(first_table, get_ctrl_table(self.model_ctrl_table, model)) for model in self.models[1:]
)
@cached_property
def names(self) -> list[str]:
@ -300,22 +315,70 @@ class MotorsBus(abc.ABC):
def ids(self) -> list[int]:
return [m.id for m in self.motors.values()]
def _model_nb_to_model(self, motor_nb: int) -> str:
return self._model_nb_to_model_dict[motor_nb]
def _id_to_model(self, motor_id: int) -> str:
return self._id_to_model_dict[motor_id]
def _id_to_name(self, motor_id: int) -> str:
return self._id_to_name_dict[motor_id]
def _get_motor_id(self, motor: NameOrID) -> int:
if isinstance(motor, str):
return self.motors[motor].id
elif isinstance(motor, int):
return motor
else:
raise TypeError(f"'{motor}' should be int, str.")
def _get_motor_model(self, motor: NameOrID) -> int:
if isinstance(motor, str):
return self.motors[motor].model
elif isinstance(motor, int):
return self._id_to_model_dict[motor]
else:
raise TypeError(f"'{motor}' should be int, str.")
def _validate_motors(self) -> None:
# TODO(aliberts): improve error messages for this (display problematics values)
if len(self.ids) != len(set(self.ids)):
raise ValueError("Some motors have the same id.")
raise ValueError(f"Some motors have the same id!\n{self}")
if len(self.names) != len(set(self.names)):
raise ValueError("Some motors have the same name.")
# Ensure ctrl table available for all models
for model in self.models:
get_ctrl_table(self.model_ctrl_table, model)
if any(model not in self.model_resolution_table for model in self.models):
raise ValueError("Some motors models are not available.")
def _is_comm_success(self, comm: int) -> bool:
return comm == self._comm_success
def _is_error(self, error: int) -> bool:
return error != self._no_error
def _assert_motors_exist(self) -> None:
# TODO(aliberts): collect all wrong ids/models and display them at once
found_models = self.broadcast_ping()
expected_models = {m.id: self.model_number_table[m.model] for m in self.motors.values()}
if not found_models or set(found_models) != set(self.ids):
raise RuntimeError(
f"{self.__class__.__name__} is supposed to have these motors: ({{id: model_nb}})"
f"\n{pformat(expected_models, indent=4, sort_dicts=False)}\n"
f"But it found these motors on port '{self.port}':"
f"\n{pformat(found_models, indent=4, sort_dicts=False)}\n"
)
for id_, model in expected_models.items():
if found_models[id_] != model:
raise RuntimeError(
f"Motor '{self._id_to_name(id_)}' (id={id_}) is supposed to be of model_number={model} "
f"('{self._id_to_model(id_)}') but a model_number={found_models[id_]} "
"was found instead for that id."
)
@property
def is_connected(self) -> bool:
return self.port_handler.is_open
def connect(self) -> None:
def connect(self, assert_motors_exist: bool = True) -> None:
if self.is_connected:
raise DeviceAlreadyConnectedError(
f"{self.__class__.__name__}('{self.port}') is already connected. Do not call `{self.__class__.__name__}.connect()` twice."
@ -324,50 +387,79 @@ class MotorsBus(abc.ABC):
try:
if not self.port_handler.openPort():
raise OSError(f"Failed to open port '{self.port}'.")
elif assert_motors_exist:
self._assert_motors_exist()
except (FileNotFoundError, OSError, serial.SerialException) as e:
logger.error(
raise ConnectionError(
f"\nCould not connect on port '{self.port}'. Make sure you are using the correct port."
"\nTry running `python lerobot/scripts/find_motors_bus_port.py`\n"
)
raise e
) from e
self.set_timeout()
logger.debug(f"{self.__class__.__name__} connected.")
@classmethod
def scan_port(cls, port: str) -> dict[int, list[int]]:
bus = cls(port, {})
try:
bus.port_handler.openPort()
except (FileNotFoundError, OSError, serial.SerialException) as e:
raise ConnectionError(
f"Could not connect to port '{port}'. Make sure you are using the correct port."
"\nTry running `python lerobot/scripts/find_motors_bus_port.py`\n"
) from e
baudrate_ids = {}
for baudrate in tqdm(bus.available_baudrates, desc="Scanning port"):
bus.set_baudrate(baudrate)
ids_models = bus.broadcast_ping()
if ids_models:
tqdm.write(f"Motors found for {baudrate=}: {pformat(ids_models, indent=4)}")
baudrate_ids[baudrate] = list(ids_models)
return baudrate_ids
@abc.abstractmethod
def configure_motors(self) -> None:
pass
def disable_torque(self, motors: NameOrID | list[NameOrID] | None = None) -> None:
pass
if motors is None:
motors = self.names
elif isinstance(motors, (str, int)):
motors = [motors]
elif not isinstance(motors, list):
raise TypeError(motors)
self._disable_torque(motors)
def enable_torque(self, motors: NameOrID | list[NameOrID] | None = None) -> None:
pass
if motors is None:
motors = self.names
elif isinstance(motors, (str, int)):
motors = [motors]
elif not isinstance(motors, list):
raise TypeError(motors)
self._enable_torque(motors)
@abc.abstractmethod
def _enable_torque(self, motors: list[NameOrID]) -> None:
pass
@abc.abstractmethod
def _disable_torque(self, motors: list[NameOrID]) -> None:
pass
def set_timeout(self, timeout_ms: int | None = None):
timeout_ms = timeout_ms if timeout_ms is not None else self.default_timeout
self.port_handler.setPacketTimeoutMillis(timeout_ms)
@property
def are_motors_configured(self) -> bool:
"""
Only check the motor indices and not baudrate, since if the motor baudrates are incorrect, a
ConnectionError will be raised anyway.
"""
try:
# TODO(aliberts): use ping instead
return (self.ids == self.read("ID")).all()
except ConnectionError as e:
logger.error(e)
return False
def get_baudrate(self) -> int:
return self.port_handler.getBaudRate()
def ping(self, motor: NameOrID, num_retry: int = 0, raise_on_error: bool = False) -> int | None:
idx = self.get_motor_id(motor)
for n_try in range(1 + num_retry):
model_number, comm, error = self.packet_handler.ping(self.port_handler, idx)
if self._is_comm_success(comm):
return model_number
logger.debug(f"ping failed for {idx=}: {n_try=} got {comm=} {error=}")
if raise_on_error:
raise ConnectionError(f"Ping motor {motor} returned a {error} error code.")
@abc.abstractmethod
def broadcast_ping(
self, num_retry: int = 0, raise_on_error: bool = False
) -> dict[int, list[int, int]] | None: ...
def set_baudrate(self, baudrate) -> None:
def set_baudrate(self, baudrate: int) -> None:
present_bus_baudrate = self.port_handler.getBaudRate()
if present_bus_baudrate != baudrate:
logger.info(f"Setting bus baud rate to {baudrate}. Previously {present_bus_baudrate}.")
@ -376,27 +468,184 @@ class MotorsBus(abc.ABC):
if self.port_handler.getBaudRate() != baudrate:
raise OSError("Failed to write bus baud rate.")
def set_calibration(self, calibration_fpath: Path) -> None:
with open(calibration_fpath) as f:
calibration = json.load(f)
@property
def is_calibrated(self) -> bool:
return self.calibration == self.read_calibration()
self.calibration = {int(idx): val for idx, val in calibration.items()}
def read_calibration(self) -> dict[str, MotorCalibration]:
offsets = self.sync_read("Homing_Offset", normalize=False)
mins = self.sync_read("Min_Position_Limit", normalize=False)
maxes = self.sync_read("Max_Position_Limit", normalize=False)
try:
drive_modes = self.sync_read("Drive_Mode", normalize=False)
except KeyError:
drive_modes = {name: 0 for name in self.names}
calibration = {}
for name, motor in self.motors.items():
calibration[name] = MotorCalibration(
id=motor.id,
drive_mode=drive_modes[name],
homing_offset=offsets[name],
range_min=mins[name],
range_max=maxes[name],
)
return calibration
def write_calibration(self, calibration_dict: dict[str, MotorCalibration]) -> None:
for motor, calibration in calibration_dict.items():
self.write("Homing_Offset", motor, calibration.homing_offset)
self.write("Min_Position_Limit", motor, calibration.range_min)
self.write("Max_Position_Limit", motor, calibration.range_max)
self.calibration = calibration_dict
def reset_calibration(self, motors: NameOrID | list[NameOrID] | None = None) -> None:
if motors is None:
motors = self.names
elif isinstance(motors, (str, int)):
motors = [motors]
elif not isinstance(motors, list):
raise TypeError(motors)
for motor in motors:
model = self._get_motor_model(motor)
max_res = self.model_resolution_table[model] - 1
self.write("Homing_Offset", motor, 0, normalize=False)
self.write("Min_Position_Limit", motor, 0, normalize=False)
self.write("Max_Position_Limit", motor, max_res, normalize=False)
self.calibration = {}
def set_half_turn_homings(self, motors: NameOrID | list[NameOrID] | None = None) -> dict[NameOrID, Value]:
"""
This assumes motors present positions are roughly in the middle of their desired range
Step 1: Set homing and min max to 0
Step 2: Read Present_Position which will be Actual_Position since
Present_Position = Actual_Position ± Homing_Offset (1)
and Homing_Offset = 0 from step 1
Step 3: We want to set the Homing_Offset such that the current Present_Position to be half range of 1
revolution. For instance, if 1 revolution corresponds to 4095 (4096 steps), this means we want the
current Present_Position to be 2047.
In that example:
Present_Position = 2047 (2)
Actual_Position = X (read in step 2)
from (1) and (2):
=> Homing_Offset = ±(X - 2048)
"""
if motors is None:
motors = self.names
elif isinstance(motors, (str, int)):
motors = [motors]
else:
raise TypeError(motors)
self.reset_calibration(motors)
actual_positions = self.sync_read("Present_Position", motors, normalize=False)
homing_offsets = self._get_half_turn_homings(actual_positions)
for motor, offset in homing_offsets.items():
self.write("Homing_Offset", motor, offset)
return homing_offsets
@abc.abstractmethod
def calibrate_values(self, ids_values: dict[int, int]) -> dict[int, float]:
def _get_half_turn_homings(self, positions: dict[NameOrID, Value]) -> dict[NameOrID, Value]:
pass
def record_ranges_of_motion(
self, motors: NameOrID | list[NameOrID] | None = None, display_values: bool = True
) -> tuple[dict[NameOrID, Value], dict[NameOrID, Value]]:
"""
This assumes that the homing offsets have been set such that all possible values in the range of
motion are positive and that the zero is not crossed. To that end, `set_half_turn_homings` should
typically be called prior to this.
"""
if motors is None:
motors = self.names
elif isinstance(motors, (str, int)):
motors = [motors]
elif not isinstance(motors, list):
raise TypeError(motors)
start_positions = self.sync_read("Present_Position", motors, normalize=False)
mins = start_positions.copy()
maxes = start_positions.copy()
while True:
positions = self.sync_read("Present_Position", motors, normalize=False)
mins = {motor: min(positions[motor], min_) for motor, min_ in mins.items()}
maxes = {motor: max(positions[motor], max_) for motor, max_ in maxes.items()}
if display_values:
print("\n-------------------------------------------")
print(f"{'NAME':<15} | {'MIN':>6} | {'POS':>6} | {'MAX':>6}")
for name in motors:
print(f"{name:<15} | {mins[name]:>6} | {positions[name]:>6} | {maxes[name]:>6}")
if enter_pressed():
break
if display_values:
# Move cursor up to overwrite the previous output
move_cursor_up(len(motors) + 3)
return mins, maxes
def _normalize(self, data_name: str, ids_values: dict[int, int]) -> dict[int, float]:
normalized_values = {}
for id_, val in ids_values.items():
name = self._id_to_name(id_)
min_ = self.calibration[name].range_min
max_ = self.calibration[name].range_max
bounded_val = min(max_, max(min_, val))
if self.motors[name].norm_mode is MotorNormMode.RANGE_M100_100:
normalized_values[id_] = (((bounded_val - min_) / (max_ - min_)) * 200) - 100
elif self.motors[name].norm_mode is MotorNormMode.RANGE_0_100:
normalized_values[id_] = ((bounded_val - min_) / (max_ - min_)) * 100
else:
# TODO(alibers): velocity and degree modes
raise NotImplementedError
return normalized_values
def _unnormalize(self, data_name: str, ids_values: dict[int, float]) -> dict[int, int]:
unnormalized_values = {}
for id_, val in ids_values.items():
name = self._id_to_name(id_)
min_ = self.calibration[name].range_min
max_ = self.calibration[name].range_max
if self.motors[name].norm_mode is MotorNormMode.RANGE_M100_100:
bounded_val = min(100.0, max(-100.0, val))
unnormalized_values[id_] = int(((bounded_val + 100) / 200) * (max_ - min_) + min_)
elif self.motors[name].norm_mode is MotorNormMode.RANGE_0_100:
bounded_val = min(100.0, max(0.0, val))
unnormalized_values[id_] = int((bounded_val / 100) * (max_ - min_) + min_)
else:
# TODO(alibers): velocity and degree modes
raise NotImplementedError
return unnormalized_values
@abc.abstractmethod
def _encode_value(
self, value: int, data_name: str | None = None, n_bytes: int | None = None
) -> dict[int, int]:
pass
@abc.abstractmethod
def uncalibrate_values(self, ids_values: dict[int, float]) -> dict[int, int]:
pass
@abc.abstractmethod
def _is_comm_success(self, comm: int) -> bool:
def _decode_value(
self, value: int, data_name: str | None = None, n_bytes: int | None = None
) -> dict[int, int]:
pass
@staticmethod
@abc.abstractmethod
def split_int_bytes(value: int, n_bytes: int) -> list[int]:
def _split_int_to_bytes(value: int, n_bytes: int) -> list[int]:
"""
Splits an unsigned integer into a list of bytes in little-endian order.
@ -432,22 +681,53 @@ class MotorsBus(abc.ABC):
"""
pass
def get_motor_id(self, motor: NameOrID) -> int:
if isinstance(motor, str):
return self.motors[motor].id
elif isinstance(motor, int):
return motor
else:
raise TypeError(f"'{motor}' should be int, str.")
def ping(self, motor: NameOrID, num_retry: int = 0, raise_on_error: bool = False) -> int | None:
id_ = self._get_motor_id(motor)
for n_try in range(1 + num_retry):
model_number, comm, error = self.packet_handler.ping(self.port_handler, id_)
if self._is_comm_success(comm):
break
logger.debug(f"ping failed for {id_=}: {n_try=} got {comm=} {error=}")
if not self._is_comm_success(comm):
if raise_on_error:
raise ConnectionError(self.packet_handler.getRxPacketError(comm))
else:
return
if self._is_error(error):
if raise_on_error:
raise RuntimeError(self.packet_handler.getTxRxResult(comm))
else:
return
return model_number
@abc.abstractmethod
def broadcast_ping(
self, num_retry: int = 0, raise_on_error: bool = False
) -> dict[int, list[int, str]] | None:
pass
@overload
def read(self, data_name: str, motors: None = ..., num_retry: int = ...) -> dict[str, Value]: ...
def sync_read(
self, data_name: str, motors: None = ..., *, normalize: bool = ..., num_retry: int = ...
) -> dict[str, Value]: ...
@overload
def read(
self, data_name: str, motors: NameOrID | list[NameOrID], num_retry: int = ...
def sync_read(
self,
data_name: str,
motors: NameOrID | list[NameOrID],
*,
normalize: bool = ...,
num_retry: int = ...,
) -> dict[NameOrID, Value]: ...
def read(
self, data_name: str, motors: NameOrID | list[NameOrID] | None = None, num_retry: int = 0
def sync_read(
self,
data_name: str,
motors: NameOrID | list[NameOrID] | None = None,
*,
normalize: bool = True,
num_retry: int = 0,
) -> dict[NameOrID, Value]:
if not self.is_connected:
raise DeviceNotConnectedError(
@ -458,66 +738,85 @@ class MotorsBus(abc.ABC):
if motors is None:
id_key_map = {m.id: name for name, m in self.motors.items()}
elif isinstance(motors, (str, int)):
id_key_map = {self.get_motor_id(motors): motors}
id_key_map = {self._get_motor_id(motors): motors}
elif isinstance(motors, list):
id_key_map = {self.get_motor_id(m): m for m in motors}
id_key_map = {self._get_motor_id(m): m for m in motors}
else:
raise TypeError(motors)
motor_ids = list(id_key_map)
if self._has_different_ctrl_tables:
models = [self.id_to_model(idx) for idx in motor_ids]
assert_same_address(self.model_ctrl_table, models, data_name)
model = self.id_to_model(next(iter(motor_ids)))
addr, n_bytes = self.model_ctrl_table[model][data_name]
comm, ids_values = self._read(motor_ids, addr, n_bytes, num_retry)
comm, ids_values = self._sync_read(data_name, motor_ids, num_retry=num_retry)
if not self._is_comm_success(comm):
raise ConnectionError(
f"Failed to read {data_name} on port {self.port} for ids {motor_ids}:"
f"Failed to sync read '{data_name}' on {motor_ids=} after {num_retry + 1} tries."
f"{self.packet_handler.getTxRxResult(comm)}"
)
if data_name in self.calibration_required and self.calibration is not None:
ids_values = self.calibrate_values(ids_values)
if normalize and data_name in self.normalization_required:
ids_values = self._normalize(data_name, ids_values)
return {id_key_map[idx]: val for idx, val in ids_values.items()}
return {id_key_map[id_]: val for id_, val in ids_values.items()}
def _read(
self, motor_ids: list[str], address: int, n_bytes: int, num_retry: int = 0
def _sync_read(
self, data_name: str, motor_ids: list[str], model: str | None = None, num_retry: int = 0
) -> tuple[int, dict[int, int]]:
self.reader.clearParam()
self.reader.start_address = address
self.reader.data_length = n_bytes
if self._has_different_ctrl_tables:
models = [self._id_to_model(id_) for id_ in motor_ids]
assert_same_address(self.model_ctrl_table, models, data_name)
model = self._id_to_model(next(iter(motor_ids))) if model is None else model
addr, n_bytes = get_address(self.model_ctrl_table, model, data_name)
self._setup_sync_reader(motor_ids, addr, n_bytes)
# FIXME(aliberts, pkooij): We should probably not have to do this.
# Let's try to see if we can do with better comm status handling instead.
# self.port_handler.ser.reset_output_buffer()
# self.port_handler.ser.reset_input_buffer()
for idx in motor_ids:
self.reader.addParam(idx)
for n_try in range(1 + num_retry):
comm = self.reader.txRxPacket()
comm = self.sync_reader.txRxPacket()
if self._is_comm_success(comm):
break
logger.debug(f"ids={list(motor_ids)} @{address} ({n_bytes} bytes) {n_try=} got {comm=}")
logger.debug(f"Failed to sync read '{data_name}' ({addr=} {n_bytes=}) on {motor_ids=} ({n_try=})")
logger.debug(self.packet_handler.getRxPacketError(comm))
values = {}
for id_ in motor_ids:
val = self.sync_reader.getData(id_, addr, n_bytes)
values[id_] = self._decode_value(val, data_name, n_bytes)
values = {idx: self.reader.getData(idx, address, n_bytes) for idx in motor_ids}
return comm, values
# TODO(aliberts, pkooij): Implementing something like this could get much faster read times.
# Note: this could be at the cost of increase latency between the moment the data is produced by the
# motors and the moment it is used by a policy
# def _async_read(self, motor_ids: list[str], address: int, n_bytes: int):
# self.reader.rxPacket()
# self.reader.txPacket()
# for idx in motor_ids:
# value = self.reader.getData(idx, address, n_bytes)
def _setup_sync_reader(self, motor_ids: list[str], addr: int, n_bytes: int) -> None:
self.sync_reader.clearParam()
self.sync_reader.start_address = addr
self.sync_reader.data_length = n_bytes
for id_ in motor_ids:
self.sync_reader.addParam(id_)
def write(self, data_name: str, values: Value | dict[NameOrID, Value], num_retry: int = 0) -> None:
# TODO(aliberts, pkooij): Implementing something like this could get even much faster read times if need be.
# Would have to handle the logic of checking if a packet has been sent previously though but doable.
# This could be at the cost of increase latency between the moment the data is produced by the motors and
# the moment it is used by a policy.
# def _async_read(self, motor_ids: list[str], address: int, n_bytes: int):
# if self.sync_reader.start_address != address or self.sync_reader.data_length != n_bytes or ...:
# self._setup_sync_reader(motor_ids, address, n_bytes)
# else:
# self.sync_reader.rxPacket()
# self.sync_reader.txPacket()
# for id_ in motor_ids:
# value = self.sync_reader.getData(id_, address, n_bytes)
def sync_write(
self,
data_name: str,
values: Value | dict[NameOrID, Value],
*,
normalize: bool = True,
num_retry: int = 0,
) -> None:
if not self.is_connected:
raise DeviceNotConnectedError(
f"{self.__class__.__name__}('{self.port}') is not connected. You need to run `{self.__class__.__name__}.connect()`."
@ -526,53 +825,102 @@ class MotorsBus(abc.ABC):
if isinstance(values, int):
ids_values = {id_: values for id_ in self.ids}
elif isinstance(values, dict):
ids_values = {self.get_motor_id(motor): val for motor, val in values.items()}
ids_values = {self._get_motor_id(motor): val for motor, val in values.items()}
else:
raise ValueError(f"'values' is expected to be a single value or a dict. Got {values}")
raise TypeError(f"'values' is expected to be a single value or a dict. Got {values}")
if self._has_different_ctrl_tables:
models = [self.id_to_model(idx) for idx in ids_values]
assert_same_address(self.model_ctrl_table, models, data_name)
if normalize and data_name in self.normalization_required and self.calibration is not None:
ids_values = self._unnormalize(data_name, ids_values)
if data_name in self.calibration_required and self.calibration is not None:
ids_values = self.uncalibrate_values(ids_values)
model = self.id_to_model(next(iter(ids_values)))
addr, n_bytes = self.model_ctrl_table[model][data_name]
comm = self._write(ids_values, addr, n_bytes, num_retry)
comm = self._sync_write(data_name, ids_values, num_retry=num_retry)
if not self._is_comm_success(comm):
raise ConnectionError(
f"Failed to write {data_name} on port {self.port} for ids {list(ids_values)}:"
f"{self.packet_handler.getTxRxResult(comm)}"
f"Failed to sync write '{data_name}' with {ids_values=} after {num_retry + 1} tries."
f"\n{self.packet_handler.getTxRxResult(comm)}"
)
def _write(self, ids_values: dict[int, int], address: int, n_bytes: int, num_retry: int = 0) -> int:
self.writer.clearParam()
self.writer.start_address = address
self.writer.data_length = n_bytes
def _sync_write(self, data_name: str, ids_values: dict[int, int], num_retry: int = 0) -> int:
if self._has_different_ctrl_tables:
models = [self._id_to_model(id_) for id_ in ids_values]
assert_same_address(self.model_ctrl_table, models, data_name)
for idx, value in ids_values.items():
data = self.split_int_bytes(value, n_bytes)
self.writer.addParam(idx, data)
model = self._id_to_model(next(iter(ids_values)))
addr, n_bytes = get_address(self.model_ctrl_table, model, data_name)
ids_values = {id_: self._encode_value(value, data_name, n_bytes) for id_, value in ids_values.items()}
self._setup_sync_writer(ids_values, addr, n_bytes)
for n_try in range(1 + num_retry):
comm = self.writer.txPacket()
comm = self.sync_writer.txPacket()
if self._is_comm_success(comm):
break
logger.debug(f"ids={list(ids_values)} @{address} ({n_bytes} bytes) {n_try=} got {comm=}")
logger.debug(
f"Failed to sync write '{data_name}' ({addr=} {n_bytes=}) with {ids_values=} ({n_try=})"
)
logger.debug(self.packet_handler.getRxPacketError(comm))
return comm
def disconnect(self) -> None:
def _setup_sync_writer(self, ids_values: dict[int, int], addr: int, n_bytes: int) -> None:
self.sync_writer.clearParam()
self.sync_writer.start_address = addr
self.sync_writer.data_length = n_bytes
for id_, value in ids_values.items():
data = self._split_int_to_bytes(value, n_bytes)
self.sync_writer.addParam(id_, data)
def write(
self, data_name: str, motor: NameOrID, value: Value, *, normalize: bool = True, num_retry: int = 0
) -> None:
if not self.is_connected:
raise DeviceNotConnectedError(
f"{self.__class__.__name__}('{self.port}') is not connected. You need to run `{self.__class__.__name__}.connect()`."
)
id_ = self._get_motor_id(motor)
if normalize and data_name in self.normalization_required and self.calibration is not None:
id_value = self._unnormalize(data_name, {id_: value})
value = id_value[id_]
comm, error = self._write(data_name, id_, value, num_retry=num_retry)
if not self._is_comm_success(comm):
raise ConnectionError(
f"Failed to write '{data_name}' on {id_=} with '{value}' after {num_retry + 1} tries."
f"\n{self.packet_handler.getTxRxResult(comm)}"
)
elif self._is_error(error):
raise RuntimeError(
f"Failed to write '{data_name}' on {id_=} with '{value}' after {num_retry + 1} tries."
f"\n{self.packet_handler.getRxPacketError(error)}"
)
def _write(self, data_name: str, motor_id: int, value: int, num_retry: int = 0) -> tuple[int, int]:
model = self._id_to_model(motor_id)
addr, n_bytes = get_address(self.model_ctrl_table, model, data_name)
value = self._encode_value(value, data_name, n_bytes)
data = self._split_int_to_bytes(value, n_bytes)
for n_try in range(1 + num_retry):
comm, error = self.packet_handler.writeTxRx(self.port_handler, motor_id, addr, n_bytes, data)
if self._is_comm_success(comm):
break
logger.debug(
f"Failed to write '{data_name}' ({addr=} {n_bytes=}) on {motor_id=} with '{value}' ({n_try=})"
)
logger.debug(self.packet_handler.getRxPacketError(comm))
return comm, error
def disconnect(self, disable_torque: bool = True) -> None:
if not self.is_connected:
raise DeviceNotConnectedError(
f"{self.__class__.__name__}('{self.port}') is not connected. Try running `{self.__class__.__name__}.connect()` first."
)
if disable_torque:
self.port_handler.clearPort()
self.port_handler.is_using = False
self.disable_torque()
self.port_handler.closePort()
logger.debug(f"{self.__class__.__name__} disconnected.")
def __del__(self):
if self.is_connected:
self.disconnect()

View File

@ -24,7 +24,7 @@ Designed by Physical Intelligence. Ported from Jax by Hugging Face.
Install pi0 extra dependencies:
```bash
pip install -e ".[pi0]"
pip install --no-binary=av -e ".[pi0]"
```
Example of finetuning the pi0 pretrained model (`pi0_base` in `openpi`):
@ -313,7 +313,7 @@ class PI0Policy(PreTrainedPolicy):
state = self.prepare_state(batch)
lang_tokens, lang_masks = self.prepare_language(batch)
actions = self.prepare_action(batch)
actions_is_pad = batch.get("actions_is_pad")
actions_is_pad = batch.get("action_is_pad")
loss_dict = {}
losses = self.model.forward(images, img_masks, lang_tokens, lang_masks, state, actions, noise, time)

View File

@ -1,4 +1,2 @@
from .configuration_koch import KochRobotConfig
from .robot_koch import KochRobot
__all__ = ["KochRobotConfig", "KochRobot"]
from .config_koch_follower import KochFollowerConfig
from .koch_follower import KochFollower

View File

@ -5,12 +5,14 @@ from lerobot.common.cameras import CameraConfig
from ..config import RobotConfig
@RobotConfig.register_subclass("koch")
@RobotConfig.register_subclass("koch_follower")
@dataclass
class KochRobotConfig(RobotConfig):
# Port to connect to the robot
class KochFollowerConfig(RobotConfig):
# Port to connect to the arm
port: str
disable_torque_on_disconnect: bool = True
# `max_relative_target` limits the magnitude of the relative positional target vector for safety purposes.
# Set this to a positive scalar to have the same value for all motors, or a list that is the same length as
# the number of motors in your follower arms.

View File

@ -0,0 +1,231 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import time
from typing import Any
from lerobot.common.cameras.utils import make_cameras_from_configs
from lerobot.common.constants import OBS_IMAGES, OBS_STATE
from lerobot.common.errors import DeviceAlreadyConnectedError, DeviceNotConnectedError
from lerobot.common.motors import Motor, MotorCalibration, MotorNormMode
from lerobot.common.motors.dynamixel import (
DynamixelMotorsBus,
OperatingMode,
)
from ..robot import Robot
from ..utils import ensure_safe_goal_position
from .config_koch_follower import KochFollowerConfig
logger = logging.getLogger(__name__)
class KochFollower(Robot):
"""
- [Koch v1.0](https://github.com/AlexanderKoch-Koch/low_cost_robot), with and without the wrist-to-elbow
expansion, developed by Alexander Koch from [Tau Robotics](https://tau-robotics.com)
- [Koch v1.1](https://github.com/jess-moss/koch-v1-1) developed by Jess Moss
"""
config_class = KochFollowerConfig
name = "koch_follower"
def __init__(self, config: KochFollowerConfig):
super().__init__(config)
self.config = config
self.arm = DynamixelMotorsBus(
port=self.config.port,
motors={
"shoulder_pan": Motor(1, "xl430-w250", MotorNormMode.RANGE_M100_100),
"shoulder_lift": Motor(2, "xl430-w250", MotorNormMode.RANGE_M100_100),
"elbow_flex": Motor(3, "xl330-m288", MotorNormMode.RANGE_M100_100),
"wrist_flex": Motor(4, "xl330-m288", MotorNormMode.RANGE_M100_100),
"wrist_roll": Motor(5, "xl330-m288", MotorNormMode.RANGE_M100_100),
"gripper": Motor(6, "xl330-m288", MotorNormMode.RANGE_0_100),
},
calibration=self.calibration,
)
self.cameras = make_cameras_from_configs(config.cameras)
@property
def state_feature(self) -> dict:
return {
"dtype": "float32",
"shape": (len(self.arm),),
"names": {"motors": list(self.arm.motors)},
}
@property
def action_feature(self) -> dict:
return self.state_feature
@property
def camera_features(self) -> dict[str, dict]:
cam_ft = {}
for cam_key, cam in self.cameras.items():
cam_ft[cam_key] = {
"shape": (cam.height, cam.width, cam.channels),
"names": ["height", "width", "channels"],
"info": None,
}
return cam_ft
@property
def is_connected(self) -> bool:
# TODO(aliberts): add cam.is_connected for cam in self.cameras
return self.arm.is_connected
def connect(self) -> None:
"""
We assume that at connection time, arm is in a rest position,
and torque can be safely disabled to run calibration.
"""
if self.is_connected:
raise DeviceAlreadyConnectedError(f"{self} already connected")
self.arm.connect()
if not self.is_calibrated:
self.calibrate()
for cam in self.cameras.values():
cam.connect()
self.configure()
logger.info(f"{self} connected.")
@property
def is_calibrated(self) -> bool:
return self.arm.is_calibrated
def calibrate(self) -> None:
logger.info(f"\nRunning calibration of {self}")
self.arm.disable_torque()
for name in self.arm.names:
self.arm.write("Operating_Mode", name, OperatingMode.EXTENDED_POSITION.value)
input("Move robot to the middle of its range of motion and press ENTER....")
homing_offsets = self.arm.set_half_turn_homings()
full_turn_motors = ["shoulder_pan", "wrist_roll"]
unknown_range_motors = [name for name in self.arm.names if name not in full_turn_motors]
logger.info(
f"Move all joints except {full_turn_motors} sequentially through their entire "
"ranges of motion.\nRecording positions. Press ENTER to stop..."
)
range_mins, range_maxes = self.arm.record_ranges_of_motion(unknown_range_motors)
for name in full_turn_motors:
range_mins[name] = 0
range_maxes[name] = 4095
self.calibration = {}
for name, motor in self.arm.motors.items():
self.calibration[name] = MotorCalibration(
id=motor.id,
drive_mode=0,
homing_offset=homing_offsets[name],
range_min=range_mins[name],
range_max=range_maxes[name],
)
self.arm.write_calibration(self.calibration)
self._save_calibration()
logger.info(f"Calibration saved to {self.calibration_fpath}")
def configure(self) -> None:
self.arm.disable_torque()
self.arm.configure_motors()
# Use 'extended position mode' for all motors except gripper, because in joint mode the servos
# can't rotate more than 360 degrees (from 0 to 4095) And some mistake can happen while
# assembling the arm, you could end up with a servo with a position 0 or 4095 at a crucial
# point
for name in self.arm.names:
if name != "gripper":
self.arm.write("Operating_Mode", name, OperatingMode.EXTENDED_POSITION.value)
# Use 'position control current based' for gripper to be limited by the limit of the current.
# For the follower gripper, it means it can grasp an object without forcing too much even tho,
# its goal position is a complete grasp (both gripper fingers are ordered to join and reach a touch).
# For the leader gripper, it means we can use it as a physical trigger, since we can force with our finger
# to make it move, and it will move back to its original target position when we release the force.
self.arm.write("Operating_Mode", "gripper", OperatingMode.CURRENT_POSITION.value)
# Set better PID values to close the gap between recorded states and actions
# TODO(rcadene): Implement an automatic procedure to set optimal PID values for each motor
self.arm.write("Position_P_Gain", "elbow_flex", 1500)
self.arm.write("Position_I_Gain", "elbow_flex", 0)
self.arm.write("Position_D_Gain", "elbow_flex", 600)
self.arm.enable_torque()
def get_observation(self) -> dict[str, Any]:
if not self.is_connected:
raise DeviceNotConnectedError(f"{self} is not connected.")
obs_dict = {}
# Read arm position
start = time.perf_counter()
obs_dict[OBS_STATE] = self.arm.sync_read("Present_Position")
dt_ms = (time.perf_counter() - start) * 1e3
logger.debug(f"{self} read state: {dt_ms:.1f}ms")
# Capture images from cameras
for cam_key, cam in self.cameras.items():
start = time.perf_counter()
obs_dict[f"{OBS_IMAGES}.{cam_key}"] = cam.async_read()
dt_ms = (time.perf_counter() - start) * 1e3
logger.debug(f"{self} read {cam_key}: {dt_ms:.1f}ms")
return obs_dict
def send_action(self, action: dict[str, float]) -> dict[str, float]:
"""Command arm to move to a target joint configuration.
The relative action magnitude may be clipped depending on the configuration parameter
`max_relative_target`. In this case, the action sent differs from original action.
Thus, this function always returns the action actually sent.
Args:
action (dict[str, float]): The goal positions for the motors.
Returns:
dict[str, float]: The action sent to the motors, potentially clipped.
"""
if not self.is_connected:
raise DeviceNotConnectedError(f"{self} is not connected.")
goal_pos = action
# Cap goal position when too far away from present position.
# /!\ Slower fps expected due to reading from the follower.
if self.config.max_relative_target is not None:
present_pos = self.arm.sync_read("Present_Position")
goal_present_pos = {key: (g_pos, present_pos[key]) for key, g_pos in goal_pos.items()}
goal_pos = ensure_safe_goal_position(goal_present_pos, self.config.max_relative_target)
# Send goal position to the arm
self.arm.sync_write("Goal_Position", goal_pos)
return goal_pos
def disconnect(self):
if not self.is_connected:
raise DeviceNotConnectedError(f"{self} is not connected.")
self.arm.disconnect(self.config.disable_torque_on_disconnect)
for cam in self.cameras.values():
cam.disconnect()
logger.info(f"{self} disconnected.")

View File

@ -1,231 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import time
import numpy as np
from lerobot.common.cameras.utils import make_cameras_from_configs
from lerobot.common.constants import OBS_IMAGES, OBS_STATE
from lerobot.common.errors import DeviceAlreadyConnectedError, DeviceNotConnectedError
from lerobot.common.motors import TorqueMode
from lerobot.common.motors.dynamixel import (
DynamixelMotorsBus,
run_arm_calibration,
)
from ..robot import Robot
from ..utils import ensure_safe_goal_position
from .configuration_koch import KochRobotConfig
class KochRobot(Robot):
"""
- [Koch v1.0](https://github.com/AlexanderKoch-Koch/low_cost_robot), with and without the wrist-to-elbow
expansion, developed by Alexander Koch from [Tau Robotics](https://tau-robotics.com)
- [Koch v1.1](https://github.com/jess-moss/koch-v1-1) developed by Jess Moss
"""
config_class = KochRobotConfig
name = "koch"
def __init__(self, config: KochRobotConfig):
super().__init__(config)
self.config = config
self.robot_type = config.type
self.arm = DynamixelMotorsBus(
port=self.config.port,
motors={
"shoulder_pan": (1, "xl430-w250"),
"shoulder_lift": (2, "xl430-w250"),
"elbow_flex": (3, "xl330-m288"),
"wrist_flex": (4, "xl330-m288"),
"wrist_roll": (5, "xl330-m288"),
"gripper": (6, "xl330-m288"),
},
)
self.cameras = make_cameras_from_configs(config.cameras)
self.is_connected = False
self.logs = {}
@property
def state_feature(self) -> dict:
return {
"dtype": "float32",
"shape": (len(self.arm),),
"names": {"motors": list(self.arm.motors)},
}
@property
def action_feature(self) -> dict:
return self.state_feature
@property
def camera_features(self) -> dict[str, dict]:
cam_ft = {}
for cam_key, cam in self.cameras.items():
cam_ft[cam_key] = {
"shape": (cam.height, cam.width, cam.channels),
"names": ["height", "width", "channels"],
"info": None,
}
return cam_ft
def connect(self) -> None:
if self.is_connected:
raise DeviceAlreadyConnectedError(
"ManipulatorRobot is already connected. Do not run `robot.connect()` twice."
)
logging.info("Connecting arm.")
self.arm.connect()
# We assume that at connection time, arm is in a rest position,
# and torque can be safely disabled to run calibration.
self.arm.write("Torque_Enable", TorqueMode.DISABLED.value)
self.calibrate()
# Use 'extended position mode' for all motors except gripper, because in joint mode the servos can't
# rotate more than 360 degrees (from 0 to 4095) And some mistake can happen while assembling the arm,
# you could end up with a servo with a position 0 or 4095 at a crucial point See [
# https://emanual.robotis.com/docs/en/dxl/x/x_series/#operating-mode11]
all_motors_except_gripper = [name for name in self.arm.motor_names if name != "gripper"]
if len(all_motors_except_gripper) > 0:
# 4 corresponds to Extended Position on Koch motors
self.arm.write("Operating_Mode", 4, all_motors_except_gripper)
# Use 'position control current based' for gripper to be limited by the limit of the current.
# For the follower gripper, it means it can grasp an object without forcing too much even tho,
# it's goal position is a complete grasp (both gripper fingers are ordered to join and reach a touch).
# For the leader gripper, it means we can use it as a physical trigger, since we can force with our finger
# to make it move, and it will move back to its original target position when we release the force.
# 5 corresponds to Current Controlled Position on Koch gripper motors "xl330-m077, xl330-m288"
self.arm.write("Operating_Mode", 5, "gripper")
# Set better PID values to close the gap between recorded states and actions
# TODO(rcadene): Implement an automatic procedure to set optimal PID values for each motor
self.arm.write("Position_P_Gain", 1500, "elbow_flex")
self.arm.write("Position_I_Gain", 0, "elbow_flex")
self.arm.write("Position_D_Gain", 600, "elbow_flex")
logging.info("Activating torque.")
self.arm.write("Torque_Enable", TorqueMode.ENABLED.value)
# Check arm can be read
self.arm.read("Present_Position")
# Connect the cameras
for cam in self.cameras.values():
cam.connect()
self.is_connected = True
def calibrate(self) -> None:
"""After calibration all motors function in human interpretable ranges.
Rotations are expressed in degrees in nominal range of [-180, 180],
and linear motions (like gripper of Aloha) in nominal range of [0, 100].
"""
if self.calibration_fpath.exists():
with open(self.calibration_fpath) as f:
calibration = json.load(f)
else:
# TODO(rcadene): display a warning in __init__ if calibration file not available
logging.info(f"Missing calibration file '{self.calibration_fpath}'")
calibration = run_arm_calibration(self.arm, self.robot_type, self.name, "follower")
logging.info(f"Calibration is done! Saving calibration file '{self.calibration_fpath}'")
self.calibration_fpath.parent.mkdir(parents=True, exist_ok=True)
with open(self.calibration_fpath, "w") as f:
json.dump(calibration, f)
self.arm.set_calibration(calibration)
def get_observation(self) -> dict[str, np.ndarray]:
"""The returned observations do not have a batch dimension."""
if not self.is_connected:
raise DeviceNotConnectedError(
"ManipulatorRobot is not connected. You need to run `robot.connect()`."
)
obs_dict = {}
# Read arm position
before_read_t = time.perf_counter()
obs_dict[OBS_STATE] = self.arm.read("Present_Position")
self.logs["read_pos_dt_s"] = time.perf_counter() - before_read_t
# Capture images from cameras
for cam_key, cam in self.cameras.items():
before_camread_t = time.perf_counter()
obs_dict[f"{OBS_IMAGES}.{cam_key}"] = cam.async_read()
self.logs[f"read_camera_{cam_key}_dt_s"] = cam.logs["delta_timestamp_s"]
self.logs[f"async_read_camera_{cam_key}_dt_s"] = time.perf_counter() - before_camread_t
return obs_dict
def send_action(self, action: np.ndarray) -> np.ndarray:
"""Command arm to move to a target joint configuration.
The relative action magnitude may be clipped depending on the configuration parameter
`max_relative_target`. In this case, the action sent differs from original action.
Thus, this function always returns the action actually sent.
Args:
action (np.ndarray): array containing the goal positions for the motors.
Raises:
RobotDeviceNotConnectedError: if robot is not connected.
Returns:
np.ndarray: the action sent to the motors, potentially clipped.
"""
if not self.is_connected:
raise DeviceNotConnectedError(
"ManipulatorRobot is not connected. You need to run `robot.connect()`."
)
goal_pos = action
# Cap goal position when too far away from present position.
# /!\ Slower fps expected due to reading from the follower.
if self.config.max_relative_target is not None:
present_pos = self.arm.read("Present_Position")
goal_pos = ensure_safe_goal_position(goal_pos, present_pos, self.config.max_relative_target)
# Send goal position to the arm
self.arm.write("Goal_Position", goal_pos.astype(np.int32))
return goal_pos
def print_logs(self):
# TODO(aliberts): move robot-specific logs logic here
pass
def disconnect(self):
if not self.is_connected:
raise DeviceNotConnectedError(
"ManipulatorRobot is not connected. You need to run `robot.connect()` before disconnecting."
)
self.arm.disconnect()
for cam in self.cameras.values():
cam.disconnect()
self.is_connected = False

View File

@ -69,7 +69,7 @@ git clone https://github.com/huggingface/lerobot.git ~/lerobot
#### 5. Install LeRobot with dependencies for the feetech motors:
```bash
cd ~/lerobot && pip install -e ".[feetech]"
cd ~/lerobot && pip install --no-binary=av -e ".[feetech]"
```
## C. Install LeRobot on laptop
@ -110,15 +110,9 @@ git clone https://github.com/huggingface/lerobot.git ~/lerobot
#### 5. Install LeRobot with dependencies for the feetech motors:
```bash
cd ~/lerobot && pip install -e ".[feetech]"
cd ~/lerobot && pip install --no-binary=av -e ".[feetech]"
```
*EXTRA: For Linux only (not Mac)*: install extra dependencies for recording datasets:
```bash
conda install -y -c conda-forge ffmpeg
pip uninstall -y opencv-python
conda install -y -c conda-forge "opencv>=4.10.0"
```
Great :hugs:! You are now done installing LeRobot and we can begin assembling the SO100 arms and Mobile base :robot:.
Every time you now want to use LeRobot you can go to the `~/lerobot` folder where we installed LeRobot and run one of the commands.
@ -399,6 +393,10 @@ python lerobot/scripts/control_robot.py \
```
# F. Teleoperate
> [!TIP]
> If you're using a Mac, you might need to give Terminal permission to access your keyboard. Go to System Preferences > Security & Privacy > Input Monitoring and check the box for Terminal.
To teleoperate SSH into your Raspberry Pi, and run `conda activate lerobot` and this script:
```bash
python lerobot/scripts/control_robot.py \

View File

@ -18,7 +18,6 @@ and send orders to its motors.
# TODO(rcadene, aliberts): reorganize the codebase into one file per robot, with the associated
# calibration procedure, to make it easy for people to add their own robot.
import json
import time
import warnings
from dataclasses import dataclass, field
@ -81,73 +80,6 @@ class ManipulatorRobotConfig(RobotConfig):
)
def apply_feetech_offsets_from_calibration(motorsbus, calibration_dict: dict):
"""
Reads 'calibration_dict' containing 'homing_offset' and 'motor_names',
then writes each motor's offset to the servo's internal Offset (0x1F) in EPROM.
This version is modified so each homed position (originally 0) will now read
2047, i.e. 180° away from 0 in the 4096-count circle. Offsets are permanently
stored in EEPROM, so the servo's Present_Position is hardware-shifted even
after power cycling.
Steps:
1) Subtract 2047 from the old offset (so 0 -> 2047).
2) Clamp to [-2047..+2047].
3) Encode sign bit and magnitude into a 12-bit number.
"""
homing_offsets = calibration_dict["homing_offset"]
motor_names = calibration_dict["motor_names"]
start_pos = calibration_dict["start_pos"]
# Open the write lock, changes to EEPROM do NOT persist yet
motorsbus.write("Lock", 1)
# For each motor, set the 'Offset' parameter
for m_name, old_offset in zip(motor_names, homing_offsets, strict=False):
# If bus doesnt have a motor named m_name, skip
if m_name not in motorsbus.motors:
print(f"Warning: '{m_name}' not found in motorsbus.motors; skipping offset.")
continue
if m_name == "gripper":
old_offset = start_pos # If gripper set the offset to the start position of the gripper
continue
# Shift the offset so the homed position reads 2047
new_offset = old_offset - 2047
# Clamp to [-2047..+2047]
if new_offset > 2047:
new_offset = 2047
print(
f"Warning: '{new_offset}' is getting clamped because its larger then 2047; This should not happen!"
)
elif new_offset < -2047:
new_offset = -2047
print(
f"Warning: '{new_offset}' is getting clamped because its smaller then -2047; This should not happen!"
)
# Determine the direction (sign) bit and magnitude
direction_bit = 1 if new_offset < 0 else 0
magnitude = abs(new_offset)
# Combine sign bit (bit 11) with the magnitude (bits 0..10)
servo_offset = (direction_bit << 11) | magnitude
# Write offset to servo
motorsbus.write("Offset", servo_offset, motor_names=m_name)
print(
f"Set offset for {m_name}: "
f"old_offset={old_offset}, new_offset={new_offset}, servo_encoded={magnitude} + direction={direction_bit}"
)
motorsbus.write("Lock", 0)
print("Offsets have been saved to EEPROM successfully.")
class ManipulatorRobot:
# TODO(rcadene): Implement force feedback
"""This class allows to control any manipulator robot of various number of motors.
@ -347,8 +279,6 @@ class ManipulatorRobot:
for name in self.leader_arms:
self.leader_arms[name].write("Torque_Enable", TorqueMode.DISABLED.value)
self.activate_calibration()
# Set robot preset (e.g. torque in leader gripper for Koch v1.1)
if self.robot_type in ["koch", "koch_bimanual"]:
self.set_koch_robot_preset()
@ -385,61 +315,6 @@ class ManipulatorRobot:
self.is_connected = True
def activate_calibration(self):
"""After calibration all motors function in human interpretable ranges.
Rotations are expressed in degrees in nominal range of [-180, 180],
and linear motions (like gripper of Aloha) in nominal range of [0, 100].
"""
def load_or_run_calibration_(name, arm, arm_type):
arm_id = get_arm_id(name, arm_type)
arm_calib_path = self.calibration_dir / f"{arm_id}.json"
if arm_calib_path.exists():
with open(arm_calib_path) as f:
calibration = json.load(f)
else:
# TODO(rcadene): display a warning in __init__ if calibration file not available
print(f"Missing calibration file '{arm_calib_path}'")
if self.robot_type in ["koch", "koch_bimanual", "aloha"]:
from lerobot.common.motors.dynamixel.dynamixel_calibration import run_arm_calibration
calibration = run_arm_calibration(arm, self.robot_type, name, arm_type)
elif self.robot_type in ["so100", "moss", "lekiwi"]:
from lerobot.common.motors.feetech.feetech_calibration import (
run_full_arm_calibration,
)
calibration = run_full_arm_calibration(arm, self.robot_type, name, arm_type)
print(f"Calibration is done! Saving calibration file '{arm_calib_path}'")
arm_calib_path.parent.mkdir(parents=True, exist_ok=True)
with open(arm_calib_path, "w") as f:
json.dump(calibration, f)
return calibration
# For each follower arm
for name, arm_bus in self.follower_arms.items():
calibration = load_or_run_calibration_(name, arm_bus, "follower")
arm_bus.set_calibration(calibration)
# If this is a Feetech robot, also set the servo offset into EEPROM
if self.robot_type in ["so100", "lekiwi"]:
apply_feetech_offsets_from_calibration(arm_bus, calibration)
# For each leader arm
for name, arm_bus in self.leader_arms.items():
calibration = load_or_run_calibration_(name, arm_bus, "leader")
arm_bus.set_calibration(calibration)
# Optionally also set offset for leader if you want the servo offsets as well
if self.robot_type in ["so100", "lekiwi"]:
apply_feetech_offsets_from_calibration(arm_bus, calibration)
def set_koch_robot_preset(self):
def set_operating_mode_(arm):
from lerobot.common.motors.dynamixel.dynamixel import TorqueMode
@ -540,9 +415,6 @@ class ManipulatorRobot:
# Set I_Coefficient and D_Coefficient to default value 0 and 32
self.follower_arms[name].write("I_Coefficient", 0)
self.follower_arms[name].write("D_Coefficient", 32)
# Close the write lock so that Maximum_Acceleration gets written to EPROM address,
# which is mandatory for Maximum_Acceleration to take effect after rebooting.
self.follower_arms[name].write("Lock", 0)
# Set Maximum_Acceleration to 254 to speedup acceleration and deceleration of
# the motors. Note: this configuration is not in the official STS3215 Memory Table
self.follower_arms[name].write("Maximum_Acceleration", 254)

View File

@ -33,14 +33,7 @@ git clone https://github.com/huggingface/lerobot.git ~/lerobot
5. Install LeRobot with dependencies for the feetech motors:
```bash
cd ~/lerobot && pip install -e ".[feetech]"
```
For Linux only (not Mac), install extra dependencies for recording datasets:
```bash
conda install -y -c conda-forge ffmpeg
pip uninstall -y opencv-python
conda install -y -c conda-forge "opencv>=4.10.0"
cd ~/lerobot && pip install --no-binary=av -e ".[feetech]"
```
## Configure the motors

View File

@ -1,7 +1,11 @@
import abc
from pathlib import Path
from typing import Any
import draccus
from lerobot.common.constants import HF_LEROBOT_CALIBRATION, ROBOTS
from lerobot.common.motors import MotorCalibration
from .config import RobotConfig
@ -23,6 +27,12 @@ class Robot(abc.ABC):
)
self.calibration_dir.mkdir(parents=True, exist_ok=True)
self.calibration_fpath = self.calibration_dir / f"{self.id}.json"
self.calibration: dict[str, MotorCalibration] = {}
if self.calibration_fpath.is_file():
self._load_calibration()
def __str__(self) -> str:
return f"{self.id} {self.__class__.__name__}"
# TODO(aliberts): create a proper Feature class for this that links with datasets
@abc.abstractproperty
@ -37,16 +47,38 @@ class Robot(abc.ABC):
def camera_features(self) -> dict[str, dict]:
pass
@abc.abstractproperty
def is_connected(self) -> bool:
pass
@abc.abstractmethod
def connect(self) -> None:
"""Connects to the robot."""
pass
@abc.abstractproperty
def is_calibrated(self) -> bool:
pass
@abc.abstractmethod
def calibrate(self) -> None:
"""Calibrates the robot."""
pass
def _load_calibration(self, fpath: Path | None = None) -> None:
fpath = self.calibration_fpath if fpath is None else fpath
with open(fpath) as f, draccus.config_type("json"):
self.calibration = draccus.load(dict[str, MotorCalibration], f)
def _save_calibration(self, fpath: Path | None = None) -> None:
fpath = self.calibration_fpath if fpath is None else fpath
with open(fpath, "w") as f, draccus.config_type("json"):
draccus.dump(self.calibration, f, indent=4)
@abc.abstractmethod
def configure(self) -> None:
pass
@abc.abstractmethod
def get_observation(self) -> dict[str, Any]:
"""Gets observation from the robot."""
@ -61,7 +93,3 @@ class Robot(abc.ABC):
def disconnect(self) -> None:
"""Disconnects from the robot."""
pass
def __del__(self):
if getattr(self, "is_connected", False):
self.disconnect()

View File

@ -59,15 +59,9 @@ git clone https://github.com/huggingface/lerobot.git ~/lerobot
#### 5. Install LeRobot with dependencies for the feetech motors:
```bash
cd ~/lerobot && pip install -e ".[feetech]"
cd ~/lerobot && pip install --no-binary=av -e ".[feetech]"
```
*EXTRA: For Linux only (not Mac)*: install extra dependencies for recording datasets:
```bash
conda install -y -c conda-forge ffmpeg
pip uninstall -y opencv-python
conda install -y -c conda-forge "opencv>=4.10.0"
```
Great :hugs:! You are now done installing LeRobot and we can begin assembling the SO100 arms :robot:.
Every time you now want to use LeRobot you can go to the `~/lerobot` folder where we installed LeRobot and run one of the commands.

View File

@ -1,4 +1,2 @@
from .configuration_so100 import SO100RobotConfig
from .robot_so100 import SO100Robot
__all__ = ["SO100RobotConfig", "SO100Robot"]
from .config_so100_follower import SO100FollowerConfig
from .so100_follower import SO100Follower

View File

@ -5,12 +5,14 @@ from lerobot.common.cameras import CameraConfig
from ..config import RobotConfig
@RobotConfig.register_subclass("so100")
@RobotConfig.register_subclass("so100_follower")
@dataclass
class SO100RobotConfig(RobotConfig):
# Port to connect to the robot
class SO100FollowerConfig(RobotConfig):
# Port to connect to the arm
port: str
disable_torque_on_disconnect: bool = True
# `max_relative_target` limits the magnitude of the relative positional target vector for safety purposes.
# Set this to a positive scalar to have the same value for all motors, or a list that is the same length as
# the number of motors in your follower arms.

View File

@ -1,223 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import time
import numpy as np
from lerobot.common.cameras.utils import make_cameras_from_configs
from lerobot.common.constants import OBS_IMAGES, OBS_STATE
from lerobot.common.errors import DeviceAlreadyConnectedError, DeviceNotConnectedError
from lerobot.common.motors import TorqueMode
from lerobot.common.motors.feetech import (
FeetechMotorsBus,
apply_feetech_offsets_from_calibration,
run_full_arm_calibration,
)
from ..robot import Robot
from ..utils import ensure_safe_goal_position
from .configuration_so100 import SO100RobotConfig
class SO100Robot(Robot):
"""
[SO-100 Follower Arm](https://github.com/TheRobotStudio/SO-ARM100) designed by TheRobotStudio
"""
config_class = SO100RobotConfig
name = "so100"
def __init__(self, config: SO100RobotConfig):
super().__init__(config)
self.config = config
self.robot_type = config.type
self.arm = FeetechMotorsBus(
port=self.config.port,
motors={
"shoulder_pan": (1, "sts3215"),
"shoulder_lift": (2, "sts3215"),
"elbow_flex": (3, "sts3215"),
"wrist_flex": (4, "sts3215"),
"wrist_roll": (5, "sts3215"),
"gripper": (6, "sts3215"),
},
)
self.cameras = make_cameras_from_configs(config.cameras)
self.is_connected = False
self.logs = {}
@property
def state_feature(self) -> dict:
return {
"dtype": "float32",
"shape": (len(self.arm),),
"names": {"motors": list(self.arm.motors)},
}
@property
def action_feature(self) -> dict:
return self.state_feature
@property
def camera_features(self) -> dict[str, dict]:
cam_ft = {}
for cam_key, cam in self.cameras.items():
cam_ft[cam_key] = {
"shape": (cam.height, cam.width, cam.channels),
"names": ["height", "width", "channels"],
"info": None,
}
return cam_ft
def connect(self) -> None:
if self.is_connected:
raise DeviceAlreadyConnectedError(
"ManipulatorRobot is already connected. Do not run `robot.connect()` twice."
)
logging.info("Connecting arm.")
self.arm.connect()
# We assume that at connection time, arm is in a rest position,
# and torque can be safely disabled to run calibration.
self.arm.write("Torque_Enable", TorqueMode.DISABLED.value)
self.calibrate()
# Mode=0 for Position Control
self.arm.write("Mode", 0)
# Set P_Coefficient to lower value to avoid shakiness (Default is 32)
self.arm.write("P_Coefficient", 16)
# Set I_Coefficient and D_Coefficient to default value 0 and 32
self.arm.write("I_Coefficient", 0)
self.arm.write("D_Coefficient", 32)
# Close the write lock so that Maximum_Acceleration gets written to EPROM address,
# which is mandatory for Maximum_Acceleration to take effect after rebooting.
self.arm.write("Lock", 0)
# Set Maximum_Acceleration to 254 to speedup acceleration and deceleration of
# the motors. Note: this configuration is not in the official STS3215 Memory Table
self.arm.write("Maximum_Acceleration", 254)
self.arm.write("Acceleration", 254)
logging.info("Activating torque.")
self.arm.write("Torque_Enable", TorqueMode.ENABLED.value)
# Check arm can be read
self.arm.read("Present_Position")
# Connect the cameras
for cam in self.cameras.values():
cam.connect()
self.is_connected = True
def calibrate(self) -> None:
"""After calibration all motors function in human interpretable ranges.
Rotations are expressed in degrees in nominal range of [-180, 180],
and linear motions (like gripper of Aloha) in nominal range of [0, 100].
"""
if self.calibration_fpath.exists():
with open(self.calibration_fpath) as f:
calibration = json.load(f)
else:
# TODO(rcadene): display a warning in __init__ if calibration file not available
logging.info(f"Missing calibration file '{self.calibration_fpath}'")
calibration = run_full_arm_calibration(self.arm, self.robot_type, self.name, "follower")
logging.info(f"Calibration is done! Saving calibration file '{self.calibration_fpath}'")
self.calibration_fpath.parent.mkdir(parents=True, exist_ok=True)
with open(self.calibration_fpath, "w") as f:
json.dump(calibration, f)
self.arm.set_calibration(calibration)
apply_feetech_offsets_from_calibration(self.arm, calibration)
def get_observation(self) -> dict[str, np.ndarray]:
"""The returned observations do not have a batch dimension."""
if not self.is_connected:
raise DeviceNotConnectedError(
"ManipulatorRobot is not connected. You need to run `robot.connect()`."
)
obs_dict = {}
# Read arm position
before_read_t = time.perf_counter()
obs_dict[OBS_STATE] = self.arm.read("Present_Position")
self.logs["read_pos_dt_s"] = time.perf_counter() - before_read_t
# Capture images from cameras
for cam_key, cam in self.cameras.items():
before_camread_t = time.perf_counter()
obs_dict[f"{OBS_IMAGES}.{cam_key}"] = cam.async_read()
self.logs[f"read_camera_{cam_key}_dt_s"] = cam.logs["delta_timestamp_s"]
self.logs[f"async_read_camera_{cam_key}_dt_s"] = time.perf_counter() - before_camread_t
return obs_dict
def send_action(self, action: np.ndarray) -> np.ndarray:
"""Command arm to move to a target joint configuration.
The relative action magnitude may be clipped depending on the configuration parameter
`max_relative_target`. In this case, the action sent differs from original action.
Thus, this function always returns the action actually sent.
Args:
action (np.ndarray): array containing the goal positions for the motors.
Raises:
RobotDeviceNotConnectedError: if robot is not connected.
Returns:
np.ndarray: the action sent to the motors, potentially clipped.
"""
if not self.is_connected:
raise DeviceNotConnectedError(
"ManipulatorRobot is not connected. You need to run `robot.connect()`."
)
goal_pos = action
# Cap goal position when too far away from present position.
# /!\ Slower fps expected due to reading from the follower.
if self.config.max_relative_target is not None:
present_pos = self.arm.read("Present_Position")
goal_pos = ensure_safe_goal_position(goal_pos, present_pos, self.config.max_relative_target)
# Send goal position to the arm
self.arm.write("Goal_Position", goal_pos.astype(np.int32))
return goal_pos
def print_logs(self):
# TODO(aliberts): move robot-specific logs logic here
pass
def disconnect(self):
if not self.is_connected:
raise DeviceNotConnectedError(
"ManipulatorRobot is not connected. You need to run `robot.connect()` before disconnecting."
)
self.arm.disconnect()
for cam in self.cameras.values():
cam.disconnect()
self.is_connected = False

View File

@ -0,0 +1,220 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import time
from typing import Any
from lerobot.common.cameras.utils import make_cameras_from_configs
from lerobot.common.constants import OBS_IMAGES, OBS_STATE
from lerobot.common.errors import DeviceAlreadyConnectedError, DeviceNotConnectedError
from lerobot.common.motors import Motor, MotorCalibration, MotorNormMode
from lerobot.common.motors.feetech import (
FeetechMotorsBus,
OperatingMode,
)
from ..robot import Robot
from ..utils import ensure_safe_goal_position
from .config_so100_follower import SO100FollowerConfig
logger = logging.getLogger(__name__)
class SO100Follower(Robot):
"""
[SO-100 Follower Arm](https://github.com/TheRobotStudio/SO-ARM100) designed by TheRobotStudio
"""
config_class = SO100FollowerConfig
name = "so100_follower"
def __init__(self, config: SO100FollowerConfig):
super().__init__(config)
self.config = config
self.arm = FeetechMotorsBus(
port=self.config.port,
motors={
"shoulder_pan": Motor(1, "sts3215", MotorNormMode.RANGE_M100_100),
"shoulder_lift": Motor(2, "sts3215", MotorNormMode.RANGE_M100_100),
"elbow_flex": Motor(3, "sts3215", MotorNormMode.RANGE_M100_100),
"wrist_flex": Motor(4, "sts3215", MotorNormMode.RANGE_M100_100),
"wrist_roll": Motor(5, "sts3215", MotorNormMode.RANGE_M100_100),
"gripper": Motor(6, "sts3215", MotorNormMode.RANGE_0_100),
},
)
self.cameras = make_cameras_from_configs(config.cameras)
@property
def state_feature(self) -> dict:
return {
"dtype": "float32",
"shape": (len(self.arm),),
"names": {"motors": list(self.arm.motors)},
}
@property
def action_feature(self) -> dict:
return self.state_feature
@property
def camera_features(self) -> dict[str, dict]:
cam_ft = {}
for cam_key, cam in self.cameras.items():
cam_ft[cam_key] = {
"shape": (cam.height, cam.width, cam.channels),
"names": ["height", "width", "channels"],
"info": None,
}
return cam_ft
@property
def is_connected(self) -> bool:
# TODO(aliberts): add cam.is_connected for cam in self.cameras
return self.arm.is_connected
def connect(self) -> None:
"""
We assume that at connection time, arm is in a rest position,
and torque can be safely disabled to run calibration.
"""
if self.is_connected:
raise DeviceAlreadyConnectedError(f"{self} already connected")
self.arm.connect()
if not self.is_calibrated:
self.calibrate()
# Connect the cameras
for cam in self.cameras.values():
cam.connect()
self.configure()
logger.info(f"{self} connected.")
@property
def is_calibrated(self) -> bool:
return self.arm.is_calibrated
def calibrate(self) -> None:
logger.info(f"\nRunning calibration of {self}")
self.arm.disable_torque()
for name in self.arm.names:
self.arm.write("Operating_Mode", name, OperatingMode.POSITION.value)
input("Move robot to the middle of its range of motion and press ENTER....")
homing_offsets = self.arm.set_half_turn_homings()
full_turn_motor = "wrist_roll"
unknown_range_motors = [name for name in self.arm.names if name != full_turn_motor]
logger.info(
f"Move all joints except '{full_turn_motor}' sequentially through their "
"entire ranges of motion.\nRecording positions. Press ENTER to stop..."
)
range_mins, range_maxes = self.arm.record_ranges_of_motion(unknown_range_motors)
range_mins[full_turn_motor] = 0
range_maxes[full_turn_motor] = 4095
self.calibration = {}
for name, motor in self.arm.motors.items():
self.calibration[name] = MotorCalibration(
id=motor.id,
drive_mode=0,
homing_offset=homing_offsets[name],
range_min=range_mins[name],
range_max=range_maxes[name],
)
self.arm.write_calibration(self.calibration)
self._save_calibration()
print("Calibration saved to", self.calibration_fpath)
def configure(self) -> None:
self.arm.disable_torque()
self.arm.configure_motors()
for name in self.arm.names:
self.arm.write("Operating_Mode", name, OperatingMode.POSITION.value)
# Set P_Coefficient to lower value to avoid shakiness (Default is 32)
self.arm.write("P_Coefficient", name, 16)
# Set I_Coefficient and D_Coefficient to default value 0 and 32
self.arm.write("I_Coefficient", name, 0)
self.arm.write("D_Coefficient", name, 32)
# Set Maximum_Acceleration to 254 to speedup acceleration and deceleration of
# the motors. Note: this address is not in the official STS3215 Memory Table
self.arm.write("Maximum_Acceleration", name, 254)
self.arm.write("Acceleration", name, 254)
self.arm.enable_torque()
def get_observation(self) -> dict[str, Any]:
if not self.is_connected:
raise DeviceNotConnectedError(f"{self} is not connected.")
obs_dict = {}
# Read arm position
start = time.perf_counter()
obs_dict[OBS_STATE] = self.arm.sync_read("Present_Position")
dt_ms = (time.perf_counter() - start) * 1e3
logger.debug(f"{self} read state: {dt_ms:.1f}ms")
# Capture images from cameras
for cam_key, cam in self.cameras.items():
start = time.perf_counter()
obs_dict[f"{OBS_IMAGES}.{cam_key}"] = cam.async_read()
dt_ms = (time.perf_counter() - start) * 1e3
logger.debug(f"{self} read {cam_key}: {dt_ms:.1f}ms")
return obs_dict
def send_action(self, action: dict[str, Any]) -> dict[str, Any]:
"""Command arm to move to a target joint configuration.
The relative action magnitude may be clipped depending on the configuration parameter
`max_relative_target`. In this case, the action sent differs from original action.
Thus, this function always returns the action actually sent.
Raises:
RobotDeviceNotConnectedError: if robot is not connected.
Returns:
the action sent to the motors, potentially clipped.
"""
if not self.is_connected:
raise DeviceNotConnectedError(f"{self} is not connected.")
goal_pos = action
# Cap goal position when too far away from present position.
# /!\ Slower fps expected due to reading from the follower.
if self.config.max_relative_target is not None:
present_pos = self.arm.sync_read("Present_Position")
goal_present_pos = {key: (g_pos, present_pos[key]) for key, g_pos in goal_pos.items()}
goal_pos = ensure_safe_goal_position(goal_present_pos, self.config.max_relative_target)
# Send goal position to the arm
self.arm.sync_write("Goal_Position", goal_pos)
return goal_pos
def disconnect(self):
if not self.is_connected:
raise DeviceNotConnectedError(f"{self} is not connected.")
self.arm.disconnect(self.config.disable_torque_on_disconnect)
for cam in self.cameras.values():
cam.disconnect()
logger.info(f"{self} disconnected.")

View File

@ -45,18 +45,11 @@ git clone https://github.com/huggingface/lerobot.git ~/lerobot
6. Install LeRobot with stretch dependencies:
```bash
cd ~/lerobot && pip install -e ".[stretch]"
cd ~/lerobot && pip install --no-binary=av -e ".[stretch]"
```
> **Note:** If you get this message, you can ignore it: `ERROR: pip's dependency resolver does not currently take into account all the packages that are installed.`
For Linux only (not Mac), install extra dependencies for recording datasets:
```bash
conda install -y -c conda-forge ffmpeg
pip uninstall -y opencv-python
conda install -y -c conda-forge "opencv>=4.10.0"
```
7. Run a [system check](https://docs.hello-robot.com/0.3/getting_started/stretch_hardware_overview/#system-check) to make sure your robot is ready:
```bash
stretch_system_check.py

View File

@ -1,8 +1,7 @@
import logging
from pprint import pformat
from typing import Protocol
import numpy as np
from lerobot.common.robots import RobotConfig
@ -31,20 +30,20 @@ def make_robot_config(robot_type: str, **kwargs) -> RobotConfig:
from .aloha.configuration_aloha import AlohaRobotConfig
return AlohaRobotConfig(**kwargs)
elif robot_type == "koch":
from .koch.configuration_koch import KochRobotConfig
elif robot_type == "koch_follower":
from .koch.config_koch_follower import KochFollowerConfig
return KochRobotConfig(**kwargs)
return KochFollowerConfig(**kwargs)
# elif robot_type == "koch_bimanual":
# return KochBimanualRobotConfig(**kwargs)
elif robot_type == "moss":
from .moss.configuration_moss import MossRobotConfig
return MossRobotConfig(**kwargs)
elif robot_type == "so100":
from .so100.configuration_so100 import SO100RobotConfig
elif robot_type == "so100_leader":
from .so100.config_so100_follower import SO100FollowerConfig
return SO100RobotConfig(**kwargs)
return SO100FollowerConfig(**kwargs)
elif robot_type == "stretch":
from .stretch3.configuration_stretch3 import Stretch3RobotConfig
@ -81,20 +80,38 @@ def make_robot(robot_type: str, **kwargs) -> Robot:
def ensure_safe_goal_position(
goal_pos: np.ndarray, present_pos: np.ndarray, max_relative_target: float | list[float]
):
# Cap relative action target magnitude for safety.
diff = goal_pos - present_pos
max_relative_target = np.array(max_relative_target)
safe_diff = np.min(diff, max_relative_target)
safe_diff = np.max(safe_diff, -max_relative_target)
safe_goal_pos = present_pos + safe_diff
goal_present_pos: dict[str, tuple[float, float]], max_relative_target: float | dict[float]
) -> dict[str, float]:
"""Caps relative action target magnitude for safety."""
if not np.allclose(goal_pos, safe_goal_pos):
if isinstance(max_relative_target, float):
diff_cap = {key: max_relative_target for key in goal_present_pos}
elif isinstance(max_relative_target, dict):
if not set(goal_present_pos) == set(max_relative_target):
raise ValueError("max_relative_target keys must match those of goal_present_pos.")
diff_cap = max_relative_target
else:
raise TypeError(max_relative_target)
warnings_dict = {}
safe_goal_positions = {}
for key, (goal_pos, present_pos) in goal_present_pos.items():
diff = goal_pos - present_pos
max_diff = diff_cap[key]
safe_diff = min(diff, max_diff)
safe_diff = max(safe_diff, -max_diff)
safe_goal_pos = present_pos + safe_diff
safe_goal_positions[key] = safe_goal_pos
if abs(safe_goal_pos - goal_pos) > 1e-4:
warnings_dict[key] = {
"original goal_pos": goal_pos,
"safe goal_pos": safe_goal_pos,
}
if warnings_dict:
logging.warning(
"Relative goal position magnitude had to be clamped to be safe.\n"
f" requested relative goal position target: {diff}\n"
f" clamped relative goal position target: {safe_diff}"
f"{pformat(warnings_dict, indent=4)}"
)
return safe_goal_pos
return safe_goal_positions

View File

@ -32,14 +32,7 @@ git clone https://github.com/huggingface/lerobot.git ~/lerobot
5. Install LeRobot with dependencies for the Aloha motors (dynamixel) and cameras (intelrealsense):
```bash
cd ~/lerobot && pip install -e ".[dynamixel, intelrealsense]"
```
For Linux only (not Mac), install extra dependencies for recording datasets:
```bash
conda install -y -c conda-forge ffmpeg
pip uninstall -y opencv-python
conda install -y -c conda-forge "opencv>=4.10.0"
cd ~/lerobot && pip install --no-binary=av -e ".[dynamixel, intelrealsense]"
```
## Teleoperate

View File

@ -0,0 +1,2 @@
from .config_viperx import ViperXConfig
from .viperx import ViperX

View File

@ -7,7 +7,11 @@ from ..config import RobotConfig
@RobotConfig.register_subclass("viperx")
@dataclass
class ViperXRobotConfig(RobotConfig):
class ViperXConfig(RobotConfig):
port: str # Port to connect to the arm
disable_torque_on_disconnect: bool = True
# /!\ FOR SAFETY, READ THIS /!\
# `max_relative_target` limits the magnitude of the relative positional target vector for safety purposes.
# Set this to a positive scalar to have the same value for all motors, or a list that is the same length as
@ -20,20 +24,8 @@ class ViperXRobotConfig(RobotConfig):
# then to gradually add more motors (by uncommenting), until you can teleoperate both arms fully
max_relative_target: int | None = 5
waist: tuple = (1, "xm540-w270")
shoulder: tuple = (2, "xm540-w270")
shoulder_shadow: tuple = (3, "xm540-w270")
elbow: tuple = (4, "xm540-w270")
elbow_shadow: tuple = (5, "xm540-w270")
forearm_roll: tuple = (6, "xm540-w270")
wrist_angle: tuple = (7, "xm540-w270")
wrist_rotate: tuple = (8, "xm430-w350")
gripper: tuple = (9, "xm430-w350")
# cameras
cameras: dict[str, CameraConfig] = field(default_factory=dict)
# Troubleshooting: If one of your IntelRealSense cameras freeze during
# data recording due to bandwidth limit, you might need to plug the camera
# on another USB hub or PCIe card.
mock: bool = False

View File

@ -1,238 +0,0 @@
"""Contains logic to instantiate a robot, read information from its motors and cameras,
and send orders to its motors.
"""
# TODO(rcadene, aliberts): reorganize the codebase into one file per robot, with the associated
# calibration procedure, to make it easy for people to add their own robot.
import json
import logging
import time
import numpy as np
from lerobot.common.cameras.utils import make_cameras_from_configs
from lerobot.common.constants import OBS_IMAGES, OBS_STATE
from lerobot.common.errors import DeviceAlreadyConnectedError, DeviceNotConnectedError
from lerobot.common.motors import TorqueMode
from lerobot.common.motors.dynamixel import (
DynamixelMotorsBus,
run_arm_calibration,
)
from ..robot import Robot
from ..utils import ensure_safe_goal_position
from .configuration_viperx import ViperXRobotConfig
class ViperXRobot(Robot):
"""
[ViperX](https://www.trossenrobotics.com/viperx-300) developed by Trossen Robotics
"""
config_class = ViperXRobotConfig
name = "viperx"
def __init__(
self,
config: ViperXRobotConfig,
):
super().__init__(config)
self.config = config
self.robot_type = config.type
self.arm = DynamixelMotorsBus(
port=self.config.port,
motors={
"waist": config.waist,
"shoulder": config.shoulder,
"shoulder_shadow": config.shoulder_shadow,
"elbow": config.elbow,
"elbow_shadow": config.elbow_shadow,
"forearm_roll": config.forearm_roll,
"wrist_angle": config.wrist_angle,
"wrist_rotate": config.wrist_rotate,
"gripper": config.gripper,
},
)
self.cameras = make_cameras_from_configs(config.cameras)
self.is_connected = False
self.logs = {}
@property
def state_feature(self) -> dict:
return {
"dtype": "float32",
"shape": (len(self.arm),),
"names": {"motors": list(self.arm.motors)},
}
@property
def action_feature(self) -> dict:
return self.state_feature
@property
def camera_features(self) -> dict[str, dict]:
cam_ft = {}
for cam_key, cam in self.cameras.items():
key = f"observation.images.{cam_key}"
cam_ft[key] = {
"shape": (cam.height, cam.width, cam.channels),
"names": ["height", "width", "channels"],
"info": None,
}
return cam_ft
def _set_shadow_motors(self):
"""
Set secondary/shadow ID for shoulder and elbow. These joints have two motors.
As a result, if only one of them is required to move to a certain position,
the other will follow. This is to avoid breaking the motors.
"""
shoulder_idx = self.config.shoulder[0]
self.arm.write("Secondary_ID", shoulder_idx, "shoulder_shadow")
elbow_idx = self.config.elbow[0]
self.arm.write("Secondary_ID", elbow_idx, "elbow_shadow")
def connect(self):
if self.is_connected:
raise DeviceAlreadyConnectedError(
"ManipulatorRobot is already connected. Do not run `robot.connect()` twice."
)
logging.info("Connecting arm.")
self.arm.connect()
# We assume that at connection time, arm is in a rest position,
# and torque can be safely disabled to run calibration.
self.arm.write("Torque_Enable", TorqueMode.DISABLED.value)
self.calibrate()
self._set_shadow_motors()
# Set a velocity limit of 131 as advised by Trossen Robotics
self.arm.write("Velocity_Limit", 131)
# Use 'extended position mode' for all motors except gripper, because in joint mode the servos can't
# rotate more than 360 degrees (from 0 to 4095) And some mistake can happen while assembling the arm,
# you could end up with a servo with a position 0 or 4095 at a crucial point See [
# https://emanual.robotis.com/docs/en/dxl/x/x_series/#operating-mode11]
all_motors_except_gripper = [name for name in self.arm.motor_names if name != "gripper"]
if len(all_motors_except_gripper) > 0:
# 4 corresponds to Extended Position on Aloha motors
self.arm.write("Operating_Mode", 4, all_motors_except_gripper)
# Use 'position control current based' for follower gripper to be limited by the limit of the current.
# It can grasp an object without forcing too much even tho,
# it's goal position is a complete grasp (both gripper fingers are ordered to join and reach a touch).
# 5 corresponds to Current Controlled Position on Aloha gripper follower "xm430-w350"
self.arm.write("Operating_Mode", 5, "gripper")
# Note: We can't enable torque on the leader gripper since "xc430-w150" doesn't have
# a Current Controlled Position mode.
logging.info("Activating torque.")
self.arm.write("Torque_Enable", TorqueMode.ENABLED.value)
# Check arm can be read
self.arm.read("Present_Position")
# Connect the cameras
for cam in self.cameras.values():
cam.connect()
self.is_connected = True
def calibrate(self):
"""After calibration all motors function in human interpretable ranges.
Rotations are expressed in degrees in nominal range of [-180, 180],
and linear motions (like gripper of Aloha) in nominal range of [0, 100].
"""
if self.calibration_fpath.exists():
with open(self.calibration_fpath) as f:
calibration = json.load(f)
else:
# TODO(rcadene): display a warning in __init__ if calibration file not available
logging.info(f"Missing calibration file '{self.calibration_fpath}'")
calibration = run_arm_calibration(self.arm, self.robot_type, self.name, "follower")
logging.info(f"Calibration is done! Saving calibration file '{self.calibration_fpath}'")
self.calibration_fpath.parent.mkdir(parents=True, exist_ok=True)
with open(self.calibration_fpath, "w") as f:
json.dump(calibration, f)
self.arm.set_calibration(calibration)
def get_observation(self) -> dict[str, np.ndarray]:
"""The returned observations do not have a batch dimension."""
if not self.is_connected:
raise DeviceNotConnectedError(
"ManipulatorRobot is not connected. You need to run `robot.connect()`."
)
obs_dict = {}
# Read arm position
before_read_t = time.perf_counter()
obs_dict[OBS_STATE] = self.arm.read("Present_Position")
self.logs["read_pos_dt_s"] = time.perf_counter() - before_read_t
# Capture images from cameras
for cam_key, cam in self.cameras.items():
before_camread_t = time.perf_counter()
obs_dict[f"{OBS_IMAGES}.{cam_key}"] = cam.async_read()
self.logs[f"read_camera_{cam_key}_dt_s"] = cam.logs["delta_timestamp_s"]
self.logs[f"async_read_camera_{cam_key}_dt_s"] = time.perf_counter() - before_camread_t
return obs_dict
def send_action(self, action: np.ndarray) -> np.ndarray:
"""Command arm to move to a target joint configuration.
The relative action magnitude may be clipped depending on the configuration parameter
`max_relative_target`. In this case, the action sent differs from original action.
Thus, this function always returns the action actually sent.
Args:
action (np.ndarray): array containing the goal positions for the motors.
Raises:
RobotDeviceNotConnectedError: if robot is not connected.
Returns:
np.ndarray: the action sent to the motors, potentially clipped.
"""
if not self.is_connected:
raise DeviceNotConnectedError(
"ManipulatorRobot is not connected. You need to run `robot.connect()`."
)
goal_pos = action
# Cap goal position when too far away from present position.
# /!\ Slower fps expected due to reading from the follower.
if self.config.max_relative_target is not None:
present_pos = self.arm.read("Present_Position")
goal_pos = ensure_safe_goal_position(goal_pos, present_pos, self.config.max_relative_target)
# Send goal position to the arm
self.arm.write("Goal_Position", goal_pos.astype(np.int32))
return goal_pos
def print_logs(self):
# TODO(aliberts): move robot-specific logs logic here
pass
def disconnect(self):
if not self.is_connected:
raise DeviceNotConnectedError(
"ManipulatorRobot is not connected. You need to run `robot.connect()` before disconnecting."
)
self.arm.disconnect()
for cam in self.cameras.values():
cam.disconnect()
self.is_connected = False

View File

@ -0,0 +1,230 @@
"""Contains logic to instantiate a robot, read information from its motors and cameras,
and send orders to its motors.
"""
# TODO(rcadene, aliberts): reorganize the codebase into one file per robot, with the associated
# calibration procedure, to make it easy for people to add their own robot.
import logging
import time
from typing import Any
from lerobot.common.cameras.utils import make_cameras_from_configs
from lerobot.common.constants import OBS_IMAGES, OBS_STATE
from lerobot.common.errors import DeviceAlreadyConnectedError, DeviceNotConnectedError
from lerobot.common.motors import Motor, MotorCalibration, MotorNormMode
from lerobot.common.motors.dynamixel import (
DynamixelMotorsBus,
OperatingMode,
)
from ..robot import Robot
from ..utils import ensure_safe_goal_position
from .config_viperx import ViperXConfig
logger = logging.getLogger(__name__)
class ViperX(Robot):
"""
[ViperX](https://www.trossenrobotics.com/viperx-300) developed by Trossen Robotics
"""
config_class = ViperXConfig
name = "viperx"
def __init__(
self,
config: ViperXConfig,
):
super().__init__(config)
self.config = config
self.arm = DynamixelMotorsBus(
port=self.config.port,
motors={
"waist": Motor(1, "xm540-w270", MotorNormMode.RANGE_M100_100),
"shoulder": Motor(2, "xm540-w270", MotorNormMode.RANGE_M100_100),
"shoulder_shadow": Motor(3, "xm540-w270", MotorNormMode.RANGE_M100_100),
"elbow": Motor(4, "xm540-w270", MotorNormMode.RANGE_M100_100),
"elbow_shadow": Motor(5, "xm540-w270", MotorNormMode.RANGE_M100_100),
"forearm_roll": Motor(6, "xm540-w270", MotorNormMode.RANGE_M100_100),
"wrist_angle": Motor(7, "xm540-w270", MotorNormMode.RANGE_M100_100),
"wrist_rotate": Motor(8, "xm430-w350", MotorNormMode.RANGE_M100_100),
"gripper": Motor(9, "xm430-w350", MotorNormMode.RANGE_0_100),
},
)
self.cameras = make_cameras_from_configs(config.cameras)
@property
def state_feature(self) -> dict:
return {
"dtype": "float32",
"shape": (len(self.arm),),
"names": {"motors": list(self.arm.motors)},
}
@property
def action_feature(self) -> dict:
return self.state_feature
@property
def camera_features(self) -> dict[str, dict]:
cam_ft = {}
for cam_key, cam in self.cameras.items():
key = f"observation.images.{cam_key}"
cam_ft[key] = {
"shape": (cam.height, cam.width, cam.channels),
"names": ["height", "width", "channels"],
"info": None,
}
return cam_ft
@property
def is_connected(self) -> bool:
# TODO(aliberts): add cam.is_connected for cam in self.cameras
return self.arm.is_connected
def connect(self) -> None:
"""
We assume that at connection time, arm is in a rest position,
and torque can be safely disabled to run calibration.
"""
if self.is_connected:
raise DeviceAlreadyConnectedError(f"{self} already connected")
self.arm.connect()
if not self.is_calibrated:
self.calibrate()
for cam in self.cameras.values():
cam.connect()
self.configure()
logger.info(f"{self} connected.")
@property
def is_calibrated(self) -> bool:
return self.arm.is_calibrated
def calibrate(self) -> None:
raise NotImplementedError # TODO(aliberts): adapt code below (copied from koch
logger.info(f"\nRunning calibration of {self}")
self.arm.disable_torque()
for name in self.arm.names:
self.arm.write("Operating_Mode", name, OperatingMode.EXTENDED_POSITION.value)
input("Move robot to the middle of its range of motion and press ENTER....")
homing_offsets = self.arm.set_half_turn_homings()
full_turn_motors = ["shoulder_pan", "wrist_roll"]
unknown_range_motors = [name for name in self.arm.names if name not in full_turn_motors]
logger.info(
f"Move all joints except {full_turn_motors} sequentially through their entire "
"ranges of motion.\nRecording positions. Press ENTER to stop..."
)
range_mins, range_maxes = self.arm.record_ranges_of_motion(unknown_range_motors)
for name in full_turn_motors:
range_mins[name] = 0
range_maxes[name] = 4095
self.calibration = {}
for name, motor in self.arm.motors.items():
self.calibration[name] = MotorCalibration(
id=motor.id,
drive_mode=0,
homing_offset=homing_offsets[name],
range_min=range_mins[name],
range_max=range_maxes[name],
)
self.arm.write_calibration(self.calibration)
self._save_calibration()
logger.info(f"Calibration saved to {self.calibration_fpath}")
def configure(self) -> None:
self.arm.disable_torque()
self.arm.configure_motors()
# Set secondary/shadow ID for shoulder and elbow. These joints have two motors.
# As a result, if only one of them is required to move to a certain position,
# the other will follow. This is to avoid breaking the motors.
self.arm.write("Secondary_ID", "shoulder_shadow", 2)
self.arm.write("Secondary_ID", "elbow_shadow", 4)
# Set a velocity limit of 131 as advised by Trossen Robotics
# TODO(aliberts): remove as it's actually useless in position control
self.arm.write("Velocity_Limit", 131)
# Use 'extended position mode' for all motors except gripper, because in joint mode the servos can't
# rotate more than 360 degrees (from 0 to 4095) And some mistake can happen while assembling the arm,
# you could end up with a servo with a position 0 or 4095 at a crucial point. See:
# https://emanual.robotis.com/docs/en/dxl/x/x_series/#operating-mode11
for name in self.arm.names:
if name != "gripper":
self.arm.write("Operating_Mode", name, OperatingMode.EXTENDED_POSITION.value)
# Use 'position control current based' for follower gripper to be limited by the limit of the current.
# It can grasp an object without forcing too much even tho, it's goal position is a complete grasp
# (both gripper fingers are ordered to join and reach a touch).
self.arm.write("Operating_Mode", "gripper", OperatingMode.CURRENT_POSITION.value)
self.arm.enable_torque()
def get_observation(self) -> dict[str, Any]:
"""The returned observations do not have a batch dimension."""
if not self.is_connected:
raise DeviceNotConnectedError(f"{self} is not connected.")
obs_dict = {}
# Read arm position
start = time.perf_counter()
obs_dict[OBS_STATE] = self.arm.sync_read("Present_Position")
dt_ms = (time.perf_counter() - start) * 1e3
logger.debug(f"{self} read state: {dt_ms:.1f}ms")
# Capture images from cameras
for cam_key, cam in self.cameras.items():
start = time.perf_counter()
obs_dict[f"{OBS_IMAGES}.{cam_key}"] = cam.async_read()
dt_ms = (time.perf_counter() - start) * 1e3
logger.debug(f"{self} read {cam_key}: {dt_ms:.1f}ms")
return obs_dict
def send_action(self, action: dict[str, float]) -> dict[str, float]:
"""Command arm to move to a target joint configuration.
The relative action magnitude may be clipped depending on the configuration parameter
`max_relative_target`. In this case, the action sent differs from original action.
Thus, this function always returns the action actually sent.
Args:
action (dict[str, float]): The goal positions for the motors.
Returns:
dict[str, float]: The action sent to the motors, potentially clipped.
"""
if not self.is_connected:
raise DeviceNotConnectedError(f"{self} is not connected.")
goal_pos = action
# Cap goal position when too far away from present position.
# /!\ Slower fps expected due to reading from the follower.
if self.config.max_relative_target is not None:
present_pos = self.arm.sync_read("Present_Position")
goal_present_pos = {key: (g_pos, present_pos[key]) for key, g_pos in goal_pos.items()}
goal_pos = ensure_safe_goal_position(goal_present_pos, self.config.max_relative_target)
# Send goal position to the arm
self.arm.sync_write("Goal_Position", goal_pos)
return goal_pos
def disconnect(self):
if not self.is_connected:
raise DeviceNotConnectedError(f"{self} is not connected.")
self.arm.disconnect(self.config.disable_torque_on_disconnect)
for cam in self.cameras.values():
cam.disconnect()
logger.info(f"{self} disconnected.")

View File

@ -1,4 +1,2 @@
from .configuration_koch import KochTeleopConfig
from .teleop_koch import KochTeleop
__all__ = ["KochTeleopConfig", "KochTeleop"]
from .config_koch_leader import KochLeaderConfig
from .koch_leader import KochLeader

View File

@ -19,12 +19,12 @@ from dataclasses import dataclass
from ..config import TeleoperatorConfig
@TeleoperatorConfig.register_subclass("koch")
@TeleoperatorConfig.register_subclass("koch_leader")
@dataclass
class KochTeleopConfig(TeleoperatorConfig):
# Port to connect to the teloperator
class KochLeaderConfig(TeleoperatorConfig):
# Port to connect to the arm
port: str
# Sets the arm in torque mode with the gripper motor set to this angle. This makes it possible
# to squeeze the gripper and have it spring back to an open position on its own.
gripper_open_degree: float = 35.156
# Sets the arm in torque mode with the gripper motor set to this value. This makes it possible to squeeze
# the gripper and have it spring back to an open position on its own.
gripper_open_pos: float = 50.0

View File

@ -0,0 +1,168 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import time
from lerobot.common.errors import DeviceAlreadyConnectedError, DeviceNotConnectedError
from lerobot.common.motors import Motor, MotorCalibration, MotorNormMode
from lerobot.common.motors.dynamixel import (
DriveMode,
DynamixelMotorsBus,
OperatingMode,
)
from ..teleoperator import Teleoperator
from .config_koch_leader import KochLeaderConfig
logger = logging.getLogger(__name__)
class KochLeader(Teleoperator):
"""
- [Koch v1.0](https://github.com/AlexanderKoch-Koch/low_cost_robot), with and without the wrist-to-elbow
expansion, developed by Alexander Koch from [Tau Robotics](https://tau-robotics.com)
- [Koch v1.1](https://github.com/jess-moss/koch-v1-1) developed by Jess Moss
"""
config_class = KochLeaderConfig
name = "koch_leader"
def __init__(self, config: KochLeaderConfig):
super().__init__(config)
self.config = config
self.arm = DynamixelMotorsBus(
port=self.config.port,
motors={
"shoulder_pan": Motor(1, "xl330-m077", MotorNormMode.RANGE_M100_100),
"shoulder_lift": Motor(2, "xl330-m077", MotorNormMode.RANGE_M100_100),
"elbow_flex": Motor(3, "xl330-m077", MotorNormMode.RANGE_M100_100),
"wrist_flex": Motor(4, "xl330-m077", MotorNormMode.RANGE_M100_100),
"wrist_roll": Motor(5, "xl330-m077", MotorNormMode.RANGE_M100_100),
"gripper": Motor(6, "xl330-m077", MotorNormMode.RANGE_0_100),
},
calibration=self.calibration,
)
@property
def action_feature(self) -> dict:
return {
"dtype": "float32",
"shape": (len(self.arm),),
"names": {"motors": list(self.arm.motors)},
}
@property
def feedback_feature(self) -> dict:
return {}
@property
def is_connected(self) -> bool:
return self.arm.is_connected
def connect(self) -> None:
if self.is_connected:
raise DeviceAlreadyConnectedError(f"{self} already connected")
self.arm.connect()
if not self.is_calibrated:
self.calibrate()
self.configure()
logger.info(f"{self} connected.")
@property
def is_calibrated(self) -> bool:
return self.arm.is_calibrated
def calibrate(self) -> None:
logger.info(f"\nRunning calibration of {self}")
self.arm.disable_torque()
for name in self.arm.names:
self.arm.write("Operating_Mode", name, OperatingMode.EXTENDED_POSITION.value)
self.arm.write("Drive_Mode", "elbow_flex", DriveMode.INVERTED.value)
drive_modes = {name: 1 if name == "elbow_flex" else 0 for name in self.arm.names}
input("Move robot to the middle of its range of motion and press ENTER....")
homing_offsets = self.arm.set_half_turn_homings()
full_turn_motors = ["shoulder_pan", "wrist_roll"]
unknown_range_motors = [name for name in self.arm.names if name not in full_turn_motors]
logger.info(
f"Move all joints except {full_turn_motors} sequentially through their "
"entire ranges of motion.\nRecording positions. Press ENTER to stop..."
)
range_mins, range_maxes = self.arm.record_ranges_of_motion(unknown_range_motors)
for name in full_turn_motors:
range_mins[name] = 0
range_maxes[name] = 4095
self.calibration = {}
for name, motor in self.arm.motors.items():
self.calibration[name] = MotorCalibration(
id=motor.id,
drive_mode=drive_modes[name],
homing_offset=homing_offsets[name],
range_min=range_mins[name],
range_max=range_maxes[name],
)
self.arm.write_calibration(self.calibration)
self._save_calibration()
logger.info(f"Calibration saved to {self.calibration_fpath}")
def configure(self) -> None:
self.arm.disable_torque()
self.arm.configure_motors()
for name in self.arm.names:
if name != "gripper":
# Use 'extended position mode' for all motors except gripper, because in joint mode the servos
# can't rotate more than 360 degrees (from 0 to 4095) And some mistake can happen while
# assembling the arm, you could end up with a servo with a position 0 or 4095 at a crucial
# point
self.arm.write("Operating_Mode", name, OperatingMode.EXTENDED_POSITION.value)
# Use 'position control current based' for gripper to be limited by the limit of the current.
# For the follower gripper, it means it can grasp an object without forcing too much even tho,
# its goal position is a complete grasp (both gripper fingers are ordered to join and reach a touch).
# For the leader gripper, it means we can use it as a physical trigger, since we can force with our finger
# to make it move, and it will move back to its original target position when we release the force.
self.arm.write("Operating_Mode", "gripper", OperatingMode.CURRENT_POSITION.value)
# Set gripper's goal pos in current position mode so that we can use it as a trigger.
self.arm.enable_torque("gripper")
self.arm.write("Goal_Position", "gripper", self.config.gripper_open_pos)
def get_action(self) -> dict[str, float]:
if not self.is_connected:
raise DeviceNotConnectedError(f"{self} is not connected.")
start = time.perf_counter()
action = self.arm.sync_read("Present_Position")
dt_ms = (time.perf_counter() - start) * 1e3
logger.debug(f"{self} read action: {dt_ms:.1f}ms")
return action
def send_feedback(self, feedback: dict[str, float]) -> None:
# TODO(rcadene, aliberts): Implement force feedback
raise NotImplementedError
def disconnect(self) -> None:
if not self.is_connected:
raise DeviceNotConnectedError(f"{self} is not connected.")
self.arm.disconnect()
logger.info(f"{self} disconnected.")

View File

@ -1,157 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import time
import numpy as np
from lerobot.common.errors import DeviceAlreadyConnectedError, DeviceNotConnectedError
from lerobot.common.motors import TorqueMode
from lerobot.common.motors.dynamixel import (
DynamixelMotorsBus,
run_arm_calibration,
)
from ..teleoperator import Teleoperator
from .configuration_koch import KochTeleopConfig
class KochTeleop(Teleoperator):
"""
- [Koch v1.0](https://github.com/AlexanderKoch-Koch/low_cost_robot), with and without the wrist-to-elbow
expansion, developed by Alexander Koch from [Tau Robotics](https://tau-robotics.com)
- [Koch v1.1](https://github.com/jess-moss/koch-v1-1) developed by Jess Moss
"""
config_class = KochTeleopConfig
name = "koch"
def __init__(self, config: KochTeleopConfig):
super().__init__(config)
self.config = config
self.robot_type = config.type
self.arm = DynamixelMotorsBus(
port=self.config.port,
motors={
"shoulder_pan": (1, "xl330-m077"),
"shoulder_lift": (2, "xl330-m077"),
"elbow_flex": (3, "xl330-m077"),
"wrist_flex": (4, "xl330-m077"),
"wrist_roll": (5, "xl330-m077"),
"gripper": (6, "xl330-m077"),
},
)
self.is_connected = False
self.logs = {}
@property
def action_feature(self) -> dict:
return {
"dtype": "float32",
"shape": (len(self.arm),),
"names": {"motors": list(self.arm.motors)},
}
@property
def feedback_feature(self) -> dict:
return {}
def connect(self) -> None:
if self.is_connected:
raise DeviceAlreadyConnectedError(
"ManipulatorRobot is already connected. Do not run `robot.connect()` twice."
)
logging.info("Connecting arm.")
self.arm.connect()
# We assume that at connection time, arm is in a rest position,
# and torque can be safely disabled to run calibration.
self.arm.write("Torque_Enable", TorqueMode.DISABLED.value)
self.calibrate()
# Use 'extended position mode' for all motors except gripper, because in joint mode the servos can't
# rotate more than 360 degrees (from 0 to 4095) And some mistake can happen while assembling the arm,
# you could end up with a servo with a position 0 or 4095 at a crucial point See [
# https://emanual.robotis.com/docs/en/dxl/x/x_series/#operating-mode11]
all_motors_except_gripper = [name for name in self.arm.motor_names if name != "gripper"]
if len(all_motors_except_gripper) > 0:
# 4 corresponds to Extended Position on Koch motors
self.arm.write("Operating_Mode", 4, all_motors_except_gripper)
# Use 'position control current based' for gripper to be limited by the limit of the current.
# For the follower gripper, it means it can grasp an object without forcing too much even tho,
# it's goal position is a complete grasp (both gripper fingers are ordered to join and reach a touch).
# For the leader gripper, it means we can use it as a physical trigger, since we can force with our finger
# to make it move, and it will move back to its original target position when we release the force.
# 5 corresponds to Current Controlled Position on Koch gripper motors "xl330-m077, xl330-m288"
self.arm.write("Operating_Mode", 5, "gripper")
# Enable torque on the gripper and move it to 45 degrees so that we can use it as a trigger.
logging.info("Activating torque.")
self.arm.write("Torque_Enable", TorqueMode.ENABLED.value, "gripper")
self.arm.write("Goal_Position", self.config.gripper_open_degree, "gripper")
# Check arm can be read
self.arm.read("Present_Position")
self.is_connected = True
def calibrate(self) -> None:
"""After calibration all motors function in human interpretable ranges.
Rotations are expressed in degrees in nominal range of [-180, 180],
and linear motions (like gripper of Aloha) in nominal range of [0, 100].
"""
if self.calibration_fpath.exists():
with open(self.calibration_fpath) as f:
calibration = json.load(f)
else:
# TODO(rcadene): display a warning in __init__ if calibration file not available
logging.info(f"Missing calibration file '{self.calibration_fpath}'")
calibration = run_arm_calibration(self.arm, self.robot_type, self.name, "leader")
logging.info(f"Calibration is done! Saving calibration file '{self.calibration_fpath}'")
self.calibration_fpath.parent.mkdir(parents=True, exist_ok=True)
with open(self.calibration_fpath, "w") as f:
json.dump(calibration, f)
self.arm.set_calibration(calibration)
def get_action(self) -> np.ndarray:
"""The returned action does not have a batch dimension."""
# Read arm position
before_read_t = time.perf_counter()
action = self.arm.read("Present_Position")
self.logs["read_pos_dt_s"] = time.perf_counter() - before_read_t
return action
def send_feedback(self, feedback: np.ndarray) -> None:
# TODO(rcadene, aliberts): Implement force feedback
pass
def disconnect(self) -> None:
if not self.is_connected:
raise DeviceNotConnectedError(
"ManipulatorRobot is not connected. You need to run `robot.connect()` before disconnecting."
)
self.arm.disconnect()
self.is_connected = False

View File

@ -1,4 +1,2 @@
from .configuration_so100 import SO100TeleopConfig
from .teleop_so100 import SO100Teleop
__all__ = ["SO100TeleopConfig", "SO100Teleop"]
from .config_so100_leader import SO100LeaderConfig
from .so100_leader import SO100Leader

View File

@ -19,8 +19,8 @@ from dataclasses import dataclass
from ..config import TeleoperatorConfig
@TeleoperatorConfig.register_subclass("so100")
@TeleoperatorConfig.register_subclass("so100_leader")
@dataclass
class SO100TeleopConfig(TeleoperatorConfig):
# Port to connect to the teloperator
class SO100LeaderConfig(TeleoperatorConfig):
# Port to connect to the arm
port: str

View File

@ -0,0 +1,142 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import time
from lerobot.common.errors import DeviceAlreadyConnectedError, DeviceNotConnectedError
from lerobot.common.motors import Motor, MotorCalibration, MotorNormMode
from lerobot.common.motors.feetech import (
FeetechMotorsBus,
OperatingMode,
)
from ..teleoperator import Teleoperator
from .config_so100_leader import SO100LeaderConfig
logger = logging.getLogger(__name__)
class SO100Leader(Teleoperator):
"""
[SO-100 Leader Arm](https://github.com/TheRobotStudio/SO-ARM100) designed by TheRobotStudio
"""
config_class = SO100LeaderConfig
name = "so100_leader"
def __init__(self, config: SO100LeaderConfig):
super().__init__(config)
self.config = config
self.arm = FeetechMotorsBus(
port=self.config.port,
motors={
"shoulder_pan": Motor(1, "sts3215", MotorNormMode.RANGE_M100_100),
"shoulder_lift": Motor(2, "sts3215", MotorNormMode.RANGE_M100_100),
"elbow_flex": Motor(3, "sts3215", MotorNormMode.RANGE_M100_100),
"wrist_flex": Motor(4, "sts3215", MotorNormMode.RANGE_M100_100),
"wrist_roll": Motor(5, "sts3215", MotorNormMode.RANGE_M100_100),
"gripper": Motor(6, "sts3215", MotorNormMode.RANGE_0_100),
},
)
@property
def action_feature(self) -> dict:
return {
"dtype": "float32",
"shape": (len(self.arm),),
"names": {"motors": list(self.arm.motors)},
}
@property
def feedback_feature(self) -> dict:
return {}
@property
def is_connected(self) -> bool:
return self.arm.is_connected
def connect(self) -> None:
if self.is_connected:
raise DeviceAlreadyConnectedError(f"{self} already connected")
self.arm.connect()
if not self.is_calibrated:
self.calibrate()
self.configure()
logger.info(f"{self} connected.")
@property
def is_calibrated(self) -> bool:
return self.arm.is_calibrated
def calibrate(self) -> None:
logger.info(f"\nRunning calibration of {self}")
self.arm.disable_torque()
for name in self.arm.names:
self.arm.write("Operating_Mode", name, OperatingMode.POSITION.value)
input("Move robot to the middle of its range of motion and press ENTER....")
homing_offsets = self.arm.set_half_turn_homings()
full_turn_motor = "wrist_roll"
unknown_range_motors = [name for name in self.arm.names if name != full_turn_motor]
logger.info(
f"Move all joints except '{full_turn_motor}' sequentially through their "
"entire ranges of motion.\nRecording positions. Press ENTER to stop..."
)
range_mins, range_maxes = self.arm.record_ranges_of_motion(unknown_range_motors)
range_mins[full_turn_motor] = 0
range_maxes[full_turn_motor] = 4095
self.calibration = {}
for name, motor in self.arm.motors.items():
self.calibration[name] = MotorCalibration(
id=motor.id,
drive_mode=0,
homing_offset=homing_offsets[name],
range_min=range_mins[name],
range_max=range_maxes[name],
)
self.arm.write_calibration(self.calibration)
self._save_calibration()
logger.info(f"Calibration saved to {self.calibration_fpath}")
def configure(self) -> None:
self.arm.disable_torque()
self.arm.configure_motors()
for name in self.arm.names:
self.arm.write("Operating_Mode", name, OperatingMode.POSITION.value)
def get_action(self) -> dict[str, float]:
start = time.perf_counter()
action = self.arm.sync_read("Present_Position")
dt_ms = (time.perf_counter() - start) * 1e3
logger.debug(f"{self} read action: {dt_ms:.1f}ms")
return action
def send_feedback(self, feedback: dict[str, float]) -> None:
# TODO(rcadene, aliberts): Implement force feedback
raise NotImplementedError
def disconnect(self) -> None:
if not self.is_connected:
DeviceNotConnectedError(f"{self} is not connected.")
self.arm.disconnect()
logger.info(f"{self} disconnected.")

View File

@ -1,135 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import time
import numpy as np
from lerobot.common.errors import DeviceAlreadyConnectedError, DeviceNotConnectedError
from lerobot.common.motors import TorqueMode
from lerobot.common.motors.feetech import (
FeetechMotorsBus,
apply_feetech_offsets_from_calibration,
run_full_arm_calibration,
)
from ..teleoperator import Teleoperator
from .configuration_so100 import SO100TeleopConfig
class SO100Teleop(Teleoperator):
"""
[SO-100 Leader Arm](https://github.com/TheRobotStudio/SO-ARM100) designed by TheRobotStudio
"""
config_class = SO100TeleopConfig
name = "so100"
def __init__(self, config: SO100TeleopConfig):
super().__init__(config)
self.config = config
self.robot_type = config.type
self.arm = FeetechMotorsBus(
port=self.config.port,
motors={
"shoulder_pan": (1, "sts3215"),
"shoulder_lift": (2, "sts3215"),
"elbow_flex": (3, "sts3215"),
"wrist_flex": (4, "sts3215"),
"wrist_roll": (5, "sts3215"),
"gripper": (6, "sts3215"),
},
)
self.is_connected = False
self.logs = {}
@property
def action_feature(self) -> dict:
return {
"dtype": "float32",
"shape": (len(self.arm),),
"names": {"motors": list(self.arm.motors)},
}
@property
def feedback_feature(self) -> dict:
return {}
def connect(self) -> None:
if self.is_connected:
raise DeviceAlreadyConnectedError(
"ManipulatorRobot is already connected. Do not run `robot.connect()` twice."
)
logging.info("Connecting arm.")
self.arm.connect()
# We assume that at connection time, arm is in a rest position,
# and torque can be safely disabled to run calibration.
self.arm.write("Torque_Enable", TorqueMode.DISABLED.value)
self.calibrate()
# Check arm can be read
self.arm.read("Present_Position")
self.is_connected = True
def calibrate(self) -> None:
"""After calibration all motors function in human interpretable ranges.
Rotations are expressed in degrees in nominal range of [-180, 180],
and linear motions (like gripper of Aloha) in nominal range of [0, 100].
"""
if self.calibration_fpath.exists():
with open(self.calibration_fpath) as f:
calibration = json.load(f)
else:
# TODO(rcadene): display a warning in __init__ if calibration file not available
logging.info(f"Missing calibration file '{self.calibration_fpath}'")
calibration = run_full_arm_calibration(self.arm, self.robot_type, self.name, "leader")
logging.info(f"Calibration is done! Saving calibration file '{self.calibration_fpath}'")
self.calibration_fpath.parent.mkdir(parents=True, exist_ok=True)
with open(self.calibration_fpath, "w") as f:
json.dump(calibration, f)
self.arm.set_calibration(calibration)
apply_feetech_offsets_from_calibration(self.arm, calibration)
def get_action(self) -> np.ndarray:
"""The returned action does not have a batch dimension."""
# Read arm position
before_read_t = time.perf_counter()
action = self.arm.read("Present_Position")
self.logs["read_pos_dt_s"] = time.perf_counter() - before_read_t
return action
def send_feedback(self, feedback: np.ndarray) -> None:
# TODO(rcadene, aliberts): Implement force feedback
pass
def disconnect(self) -> None:
if not self.is_connected:
raise DeviceNotConnectedError(
"ManipulatorRobot is not connected. You need to run `robot.connect()` before disconnecting."
)
self.arm.disconnect()
self.is_connected = False

View File

@ -1,7 +1,11 @@
import abc
from pathlib import Path
from typing import Any
import draccus
from lerobot.common.constants import HF_LEROBOT_CALIBRATION, TELEOPERATORS
from lerobot.common.motors.motors_bus import MotorCalibration
from .config import TeleoperatorConfig
@ -22,6 +26,12 @@ class Teleoperator(abc.ABC):
)
self.calibration_dir.mkdir(parents=True, exist_ok=True)
self.calibration_fpath = self.calibration_dir / f"{self.id}.json"
self.calibration: dict[str, MotorCalibration] = {}
if self.calibration_fpath.is_file():
self._load_calibration()
def __str__(self) -> str:
return f"{self.id} {self.__class__.__name__}"
@abc.abstractproperty
def action_feature(self) -> dict:
@ -31,16 +41,38 @@ class Teleoperator(abc.ABC):
def feedback_feature(self) -> dict:
pass
@abc.abstractproperty
def is_connected(self) -> bool:
pass
@abc.abstractmethod
def connect(self) -> None:
"""Connects to the teleoperator."""
pass
@abc.abstractproperty
def is_calibrated(self) -> bool:
pass
@abc.abstractmethod
def calibrate(self) -> None:
"""Calibrates the teleoperator."""
pass
def _load_calibration(self, fpath: Path | None = None) -> None:
fpath = self.calibration_fpath if fpath is None else fpath
with open(fpath) as f, draccus.config_type("json"):
self.calibration = draccus.load(dict[str, MotorCalibration], f)
def _save_calibration(self, fpath: Path | None = None) -> None:
fpath = self.calibration_fpath if fpath is None else fpath
with open(fpath, "w") as f, draccus.config_type("json"):
draccus.dump(self.calibration, f, indent=4)
@abc.abstractmethod
def configure(self) -> None:
pass
@abc.abstractmethod
def get_action(self) -> dict[str, Any]:
"""Gets the action to send to a teleoperator."""
@ -55,7 +87,3 @@ class Teleoperator(abc.ABC):
def disconnect(self) -> None:
"""Disconnects from the teleoperator."""
pass
def __del__(self):
if getattr(self, "is_connected", False):
self.disconnect()

View File

@ -1,4 +1,2 @@
from .configuration_widowx import WidowXTeleopConfig
from .teleop_widowx import WidowXTeleop
__all__ = ["WidowXTeleopConfig", "WidowXTeleop"]
from .config_widowx import WidowXConfig
from .widowx import WidowX

View File

@ -0,0 +1,25 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dataclasses import dataclass
from ..config import TeleoperatorConfig
@TeleoperatorConfig.register_subclass("widowx")
@dataclass
class WidowXConfig(TeleoperatorConfig):
port: str # Port to connect to the arm

View File

@ -1,49 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dataclasses import dataclass
from ..config import TeleoperatorConfig
@TeleoperatorConfig.register_subclass("widowx")
@dataclass
class WidowXTeleopConfig(TeleoperatorConfig):
port: str # Port to connect to the teloperator
mock: bool = False
# /!\ FOR SAFETY, READ THIS /!\
# `max_relative_target` limits the magnitude of the relative positional target vector for safety purposes.
# Set this to a positive scalar to have the same value for all motors, or a list that is the same length as
# the number of motors in your follower arms.
# For Aloha, for every goal position request, motor rotations are capped at 5 degrees by default.
# When you feel more confident with teleoperation or running the policy, you can extend
# this safety limit and even removing it by setting it to `null`.
# Also, everything is expected to work safely out-of-the-box, but we highly advise to
# first try to teleoperate the grippers only (by commenting out the rest of the motors in this yaml),
# then to gradually add more motors (by uncommenting), until you can teleoperate both arms fully
max_relative_target: int | None = 5
# motors
waist: tuple = (1, "xm430-w350")
shoulder: tuple = (2, "xm430-w350")
shoulder_shadow: tuple = (3, "xm430-w350")
elbow: tuple = (4, "xm430-w350")
elbow_shadow: tuple = (5, "xm430-w350")
forearm_roll: tuple = (6, "xm430-w350")
wrist_angle: tuple = (7, "xm430-w350")
wrist_rotate: tuple = (8, "xl430-w250")
gripper: tuple = (9, "xc430-w150")

View File

@ -1,157 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import time
import numpy as np
from lerobot.common.errors import DeviceAlreadyConnectedError, DeviceNotConnectedError
from lerobot.common.motors import TorqueMode
from lerobot.common.motors.dynamixel import (
DynamixelMotorsBus,
run_arm_calibration,
)
from ..teleoperator import Teleoperator
from .configuration_widowx import WidowXTeleopConfig
class WidowXTeleop(Teleoperator):
"""
[WidowX](https://www.trossenrobotics.com/widowx-250) developed by Trossen Robotics
"""
config_class = WidowXTeleopConfig
name = "widowx"
def __init__(self, config: WidowXTeleopConfig):
super().__init__(config)
self.config = config
self.robot_type = config.type
self.arm = DynamixelMotorsBus(
port=self.config.port,
motors={
"waist": config.waist,
"shoulder": config.shoulder,
"shoulder_shadow": config.shoulder_shadow,
"elbow": config.elbow,
"elbow_shadow": config.elbow_shadow,
"forearm_roll": config.forearm_roll,
"wrist_angle": config.wrist_angle,
"wrist_rotate": config.wrist_rotate,
"gripper": config.gripper,
},
)
self.is_connected = False
self.logs = {}
@property
def action_feature(self) -> dict:
return {
"dtype": "float32",
"shape": (len(self.arm),),
"names": {"motors": list(self.arm.motors)},
}
@property
def feedback_feature(self) -> dict:
return {}
def _set_shadow_motors(self):
"""
Set secondary/shadow ID for shoulder and elbow. These joints have two motors.
As a result, if only one of them is required to move to a certain position,
the other will follow. This is to avoid breaking the motors.
"""
shoulder_idx = self.config.shoulder[0]
self.arm.write("Secondary_ID", shoulder_idx, "shoulder_shadow")
elbow_idx = self.config.elbow[0]
self.arm.write("Secondary_ID", elbow_idx, "elbow_shadow")
def connect(self):
if self.is_connected:
raise DeviceAlreadyConnectedError(
"ManipulatorRobot is already connected. Do not run `robot.connect()` twice."
)
logging.info("Connecting arm.")
self.arm.connect()
# We assume that at connection time, arm is in a rest position,
# and torque can be safely disabled to run calibration.
self.arm.write("Torque_Enable", TorqueMode.DISABLED.value)
self.calibrate()
self._set_shadow_motors()
logging.info("Activating torque.")
self.arm.write("Torque_Enable", TorqueMode.ENABLED.value)
# Check arm can be read
self.arm.read("Present_Position")
# Connect the cameras
for cam in self.cameras.values():
cam.connect()
self.is_connected = True
def calibrate(self) -> None:
"""After calibration all motors function in human interpretable ranges.
Rotations are expressed in degrees in nominal range of [-180, 180],
and linear motions (like gripper of Aloha) in nominal range of [0, 100].
"""
if self.calibration_fpath.exists():
with open(self.calibration_fpath) as f:
calibration = json.load(f)
else:
# TODO(rcadene): display a warning in __init__ if calibration file not available
logging.info(f"Missing calibration file '{self.calibration_fpath}'")
calibration = run_arm_calibration(self.arm, self.robot_type, self.name, "leader")
logging.info(f"Calibration is done! Saving calibration file '{self.calibration_fpath}'")
self.calibration_fpath.parent.mkdir(parents=True, exist_ok=True)
with open(self.calibration_fpath, "w") as f:
json.dump(calibration, f)
self.arm.set_calibration(calibration)
def get_action(self) -> np.ndarray:
"""The returned action does not have a batch dimension."""
# Read arm position
before_read_t = time.perf_counter()
action = self.arm.read("Present_Position")
self.logs["read_pos_dt_s"] = time.perf_counter() - before_read_t
return action
def send_feedback(self, feedback: np.ndarray) -> None:
# TODO(rcadene, aliberts): Implement force feedback
pass
def disconnect(self) -> None:
if not self.is_connected:
raise DeviceNotConnectedError(
"ManipulatorRobot is not connected. You need to run `robot.connect()` before disconnecting."
)
self.arm.disconnect()
self.is_connected = False

View File

@ -0,0 +1,153 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import time
from lerobot.common.errors import DeviceAlreadyConnectedError, DeviceNotConnectedError
from lerobot.common.motors import Motor, MotorCalibration, MotorNormMode
from lerobot.common.motors.dynamixel import (
DriveMode,
DynamixelMotorsBus,
OperatingMode,
)
from ..teleoperator import Teleoperator
from .config_widowx import WidowXConfig
logger = logging.getLogger(__name__)
class WidowX(Teleoperator):
"""
[WidowX](https://www.trossenrobotics.com/widowx-250) developed by Trossen Robotics
"""
config_class = WidowXConfig
name = "widowx"
def __init__(self, config: WidowXConfig):
super().__init__(config)
self.config = config
self.arm = DynamixelMotorsBus(
port=self.config.port,
motors={
"waist": Motor(1, "xm430-w350", MotorNormMode.RANGE_M100_100),
"shoulder": Motor(2, "xm430-w350", MotorNormMode.RANGE_M100_100),
"shoulder_shadow": Motor(3, "xm430-w350", MotorNormMode.RANGE_M100_100),
"elbow": Motor(4, "xm430-w350", MotorNormMode.RANGE_M100_100),
"elbow_shadow": Motor(5, "xm430-w350", MotorNormMode.RANGE_M100_100),
"forearm_roll": Motor(6, "xm430-w350", MotorNormMode.RANGE_M100_100),
"wrist_angle": Motor(7, "xm430-w350", MotorNormMode.RANGE_M100_100),
"wrist_rotate": Motor(8, "xl430-w250", MotorNormMode.RANGE_M100_100),
"gripper": Motor(9, "xc430-w150", MotorNormMode.RANGE_0_100),
},
)
@property
def action_feature(self) -> dict:
return {
"dtype": "float32",
"shape": (len(self.arm),),
"names": {"motors": list(self.arm.motors)},
}
@property
def feedback_feature(self) -> dict:
return {}
@property
def is_connected(self) -> bool:
return self.arm.is_connected
def connect(self):
if self.is_connected:
raise DeviceAlreadyConnectedError(f"{self} already connected")
self.arm.connect()
if not self.is_calibrated:
self.calibrate()
self.configure()
logger.info(f"{self} connected.")
def calibrate(self) -> None:
raise NotImplementedError # TODO(aliberts): adapt code below (copied from koch)
logger.info(f"\nRunning calibration of {self}")
self.arm.disable_torque()
for name in self.arm.names:
self.arm.write("Operating_Mode", name, OperatingMode.EXTENDED_POSITION.value)
self.arm.write("Drive_Mode", "elbow_flex", DriveMode.INVERTED.value)
drive_modes = {name: 1 if name == "elbow_flex" else 0 for name in self.arm.names}
input("Move robot to the middle of its range of motion and press ENTER....")
homing_offsets = self.arm.set_half_turn_homings()
full_turn_motors = ["shoulder_pan", "wrist_roll"]
unknown_range_motors = [name for name in self.arm.names if name not in full_turn_motors]
logger.info(
f"Move all joints except {full_turn_motors} sequentially through their "
"entire ranges of motion.\nRecording positions. Press ENTER to stop..."
)
range_mins, range_maxes = self.arm.record_ranges_of_motion(unknown_range_motors)
for name in full_turn_motors:
range_mins[name] = 0
range_maxes[name] = 4095
self.calibration = {}
for name, motor in self.arm.motors.items():
self.calibration[name] = MotorCalibration(
id=motor.id,
drive_mode=drive_modes[name],
homing_offset=homing_offsets[name],
range_min=range_mins[name],
range_max=range_maxes[name],
)
self.arm.write_calibration(self.calibration)
self._save_calibration()
logger.info(f"Calibration saved to {self.calibration_fpath}")
def configure(self) -> None:
self.arm.disable_torque()
self.arm.configure_motors()
# Set secondary/shadow ID for shoulder and elbow. These joints have two motors.
# As a result, if only one of them is required to move to a certain position,
# the other will follow. This is to avoid breaking the motors.
self.arm.write("Secondary_ID", "shoulder_shadow", 2)
self.arm.write("Secondary_ID", "elbow_shadow", 4)
def get_action(self) -> dict[str, float]:
if not self.is_connected:
raise DeviceNotConnectedError(f"{self} is not connected.")
start = time.perf_counter()
action = self.arm.read("Present_Position")
dt_ms = (time.perf_counter() - start) * 1e3
logger.debug(f"{self} read action: {dt_ms:.1f}ms")
return action
def send_feedback(self, feedback: dict[str, float]) -> None:
raise NotImplementedError
def disconnect(self) -> None:
if not self.is_connected:
raise DeviceNotConnectedError(f"{self} is not connected.")
self.arm.disconnect()
logger.info(f"{self} disconnected.")

View File

@ -0,0 +1,52 @@
def encode_sign_magnitude(value: int, sign_bit_index: int):
"""
https://en.wikipedia.org/wiki/Signed_number_representations#Sign%E2%80%93magnitude
"""
max_magnitude = (1 << sign_bit_index) - 1
magnitude = abs(value)
if magnitude > max_magnitude:
raise ValueError(f"Magnitude {magnitude} exceeds {max_magnitude} (max for {sign_bit_index=})")
direction_bit = 1 if value < 0 else 0
return (direction_bit << sign_bit_index) | magnitude
def decode_sign_magnitude(encoded_value: int, sign_bit_index: int):
"""
https://en.wikipedia.org/wiki/Signed_number_representations#Sign%E2%80%93magnitude
"""
direction_bit = (encoded_value >> sign_bit_index) & 1
magnitude_mask = (1 << sign_bit_index) - 1
magnitude = encoded_value & magnitude_mask
return -magnitude if direction_bit else magnitude
def encode_twos_complement(value: int, n_bytes: int):
"""
https://en.wikipedia.org/wiki/Signed_number_representations#Two%27s_complement
"""
bit_width = n_bytes * 8
min_val = -(1 << (bit_width - 1))
max_val = (1 << (bit_width - 1)) - 1
if not (min_val <= value <= max_val):
raise ValueError(
f"Value {value} out of range for {n_bytes}-byte two's complement: [{min_val}, {max_val}]"
)
if value >= 0:
return value
return (1 << bit_width) + value
def decode_twos_complement(value: int, n_bytes: int) -> int:
"""
https://en.wikipedia.org/wiki/Signed_number_representations#Two%27s_complement
"""
bits = n_bytes * 8
sign_bit = 1 << (bits - 1)
if value & sign_bit:
value -= 1 << bits
return value

View File

@ -17,7 +17,9 @@ import logging
import os
import os.path as osp
import platform
import select
import subprocess
import sys
from copy import copy
from datetime import datetime, timezone
from pathlib import Path
@ -228,3 +230,12 @@ def is_valid_numpy_dtype_string(dtype_str: str) -> bool:
except TypeError:
# If a TypeError is raised, the string is not a valid dtype
return False
def enter_pressed() -> bool:
return select.select([sys.stdin], [], [], 0)[0] and sys.stdin.readline().strip() == ""
def move_cursor_up(lines):
"""Move the cursor up by a specified number of lines."""
print(f"\033[{lines}A", end="")

View File

@ -90,6 +90,7 @@ class WandBLogger:
# TODO(rcadene): split train and eval, and run async eval with job_type="eval"
job_type="train_eval",
resume="must" if cfg.resume else None,
mode=self.cfg.mode if self.cfg.mode in ["online", "offline", "disabled"] else "online",
)
print(colored("Logs will be synced with wandb.", "blue", attrs=["bold"]))
logging.info(f"Track this run --> {colored(wandb.run.get_url(), 'yellow', attrs=['bold'])}")

View File

@ -48,6 +48,7 @@ class WandBConfig:
entity: str | None = None
notes: str | None = None
run_id: str | None = None
mode: str | None = None # Allowed values: 'online', 'offline' 'disabled'. Defaults to 'online'
@dataclass

View File

@ -1,364 +0,0 @@
#!/usr/bin/env python
# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Use this script to convert your dataset into LeRobot dataset format and upload it to the Hugging Face hub,
or store it locally. LeRobot dataset format is lightweight, fast to load from, and does not require any
installation of neural net specific packages like pytorch, tensorflow, jax.
Example of how to download raw datasets, convert them into LeRobotDataset format, and push them to the hub:
```
python lerobot/scripts/push_dataset_to_hub.py \
--raw-dir data/pusht_raw \
--raw-format pusht_zarr \
--repo-id lerobot/pusht
python lerobot/scripts/push_dataset_to_hub.py \
--raw-dir data/xarm_lift_medium_raw \
--raw-format xarm_pkl \
--repo-id lerobot/xarm_lift_medium
python lerobot/scripts/push_dataset_to_hub.py \
--raw-dir data/aloha_sim_insertion_scripted_raw \
--raw-format aloha_hdf5 \
--repo-id lerobot/aloha_sim_insertion_scripted
python lerobot/scripts/push_dataset_to_hub.py \
--raw-dir data/umi_cup_in_the_wild_raw \
--raw-format umi_zarr \
--repo-id lerobot/umi_cup_in_the_wild
```
"""
import argparse
import json
import shutil
import warnings
from pathlib import Path
from typing import Any
import torch
from huggingface_hub import HfApi
from safetensors.torch import save_file
from lerobot.common.datasets.compute_stats import compute_stats
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION, LeRobotDataset
from lerobot.common.datasets.push_dataset_to_hub.utils import check_repo_id
from lerobot.common.datasets.utils import create_branch, create_lerobot_dataset_card, flatten_dict
def get_from_raw_to_lerobot_format_fn(raw_format: str):
if raw_format == "pusht_zarr":
from lerobot.common.datasets.push_dataset_to_hub.pusht_zarr_format import from_raw_to_lerobot_format
elif raw_format == "umi_zarr":
from lerobot.common.datasets.push_dataset_to_hub.umi_zarr_format import from_raw_to_lerobot_format
elif raw_format == "aloha_hdf5":
from lerobot.common.datasets.push_dataset_to_hub.aloha_hdf5_format import from_raw_to_lerobot_format
elif raw_format in ["rlds", "openx"]:
from lerobot.common.datasets.push_dataset_to_hub.openx_rlds_format import from_raw_to_lerobot_format
elif raw_format == "dora_parquet":
from lerobot.common.datasets.push_dataset_to_hub.dora_parquet_format import from_raw_to_lerobot_format
elif raw_format == "xarm_pkl":
from lerobot.common.datasets.push_dataset_to_hub.xarm_pkl_format import from_raw_to_lerobot_format
elif raw_format == "cam_png":
from lerobot.common.datasets.push_dataset_to_hub.cam_png_format import from_raw_to_lerobot_format
else:
raise ValueError(
f"The selected {raw_format} can't be found. Did you add it to `lerobot/scripts/push_dataset_to_hub.py::get_from_raw_to_lerobot_format_fn`?"
)
return from_raw_to_lerobot_format
def save_meta_data(
info: dict[str, Any], stats: dict, episode_data_index: dict[str, list], meta_data_dir: Path
):
meta_data_dir.mkdir(parents=True, exist_ok=True)
# save info
info_path = meta_data_dir / "info.json"
with open(str(info_path), "w") as f:
json.dump(info, f, indent=4)
# save stats
stats_path = meta_data_dir / "stats.safetensors"
save_file(flatten_dict(stats), stats_path)
# save episode_data_index
episode_data_index = {key: torch.tensor(episode_data_index[key]) for key in episode_data_index}
ep_data_idx_path = meta_data_dir / "episode_data_index.safetensors"
save_file(episode_data_index, ep_data_idx_path)
def push_meta_data_to_hub(repo_id: str, meta_data_dir: str | Path, revision: str | None):
"""Expect all meta data files to be all stored in a single "meta_data" directory.
On the hugging face repositery, they will be uploaded in a "meta_data" directory at the root.
"""
api = HfApi()
api.upload_folder(
folder_path=meta_data_dir,
path_in_repo="meta_data",
repo_id=repo_id,
revision=revision,
repo_type="dataset",
)
def push_dataset_card_to_hub(
repo_id: str,
revision: str | None,
tags: list | None = None,
license: str = "apache-2.0",
**card_kwargs,
):
"""Creates and pushes a LeRobotDataset Card with appropriate tags to easily find it on the hub."""
card = create_lerobot_dataset_card(tags=tags, license=license, **card_kwargs)
card.push_to_hub(repo_id=repo_id, repo_type="dataset", revision=revision)
def push_videos_to_hub(repo_id: str, videos_dir: str | Path, revision: str | None):
"""Expect mp4 files to be all stored in a single "videos" directory.
On the hugging face repositery, they will be uploaded in a "videos" directory at the root.
"""
api = HfApi()
api.upload_folder(
folder_path=videos_dir,
path_in_repo="videos",
repo_id=repo_id,
revision=revision,
repo_type="dataset",
allow_patterns="*.mp4",
)
def push_dataset_to_hub(
raw_dir: Path,
raw_format: str,
repo_id: str,
push_to_hub: bool = True,
local_dir: Path | None = None,
fps: int | None = None,
video: bool = True,
batch_size: int = 32,
num_workers: int = 8,
episodes: list[int] | None = None,
force_override: bool = False,
resume: bool = False,
cache_dir: Path = Path("/tmp"),
tests_data_dir: Path | None = None,
encoding: dict | None = None,
):
check_repo_id(repo_id)
user_id, dataset_id = repo_id.split("/")
# Robustify when `raw_dir` is str instead of Path
raw_dir = Path(raw_dir)
if not raw_dir.exists():
raise NotADirectoryError(
f"{raw_dir} does not exists. Check your paths or run this command to download an existing raw dataset on the hub: "
f"`python lerobot/common/datasets/push_dataset_to_hub/_download_raw.py --raw-dir your/raw/dir --repo-id your/repo/id_raw`"
)
if local_dir:
# Robustify when `local_dir` is str instead of Path
local_dir = Path(local_dir)
# Send warning if local_dir isn't well formatted
if local_dir.parts[-2] != user_id or local_dir.parts[-1] != dataset_id:
warnings.warn(
f"`local_dir` ({local_dir}) doesn't contain a community or user id `/` the name of the dataset that match the `repo_id` (e.g. 'data/lerobot/pusht'). Following this naming convention is advised, but not mandatory.",
stacklevel=1,
)
# Check we don't override an existing `local_dir` by mistake
if local_dir.exists():
if force_override:
shutil.rmtree(local_dir)
elif not resume:
raise ValueError(f"`local_dir` already exists ({local_dir}). Use `--force-override 1`.")
meta_data_dir = local_dir / "meta_data"
videos_dir = local_dir / "videos"
else:
# Temporary directory used to store images, videos, meta_data
meta_data_dir = Path(cache_dir) / "meta_data"
videos_dir = Path(cache_dir) / "videos"
if raw_format is None:
# TODO(rcadene, adilzouitine): implement auto_find_raw_format
raise NotImplementedError()
# raw_format = auto_find_raw_format(raw_dir)
# convert dataset from original raw format to LeRobot format
from_raw_to_lerobot_format = get_from_raw_to_lerobot_format_fn(raw_format)
hf_dataset, episode_data_index, info = from_raw_to_lerobot_format(
raw_dir,
videos_dir,
fps,
video,
episodes,
encoding,
)
lerobot_dataset = LeRobotDataset.from_preloaded(
repo_id=repo_id,
hf_dataset=hf_dataset,
episode_data_index=episode_data_index,
info=info,
videos_dir=videos_dir,
)
stats = compute_stats(lerobot_dataset, batch_size, num_workers)
if local_dir:
hf_dataset = hf_dataset.with_format(None) # to remove transforms that cant be saved
hf_dataset.save_to_disk(str(local_dir / "train"))
if push_to_hub or local_dir:
# mandatory for upload
save_meta_data(info, stats, episode_data_index, meta_data_dir)
if push_to_hub:
hf_dataset.push_to_hub(repo_id, revision="main")
push_meta_data_to_hub(repo_id, meta_data_dir, revision="main")
push_dataset_card_to_hub(repo_id, revision="main")
if video:
push_videos_to_hub(repo_id, videos_dir, revision="main")
create_branch(repo_id, repo_type="dataset", branch=CODEBASE_VERSION)
if tests_data_dir:
# get the first episode
num_items_first_ep = episode_data_index["to"][0] - episode_data_index["from"][0]
test_hf_dataset = hf_dataset.select(range(num_items_first_ep))
episode_data_index = {k: v[:1] for k, v in episode_data_index.items()}
test_hf_dataset = test_hf_dataset.with_format(None)
test_hf_dataset.save_to_disk(str(tests_data_dir / repo_id / "train"))
tests_meta_data = tests_data_dir / repo_id / "meta_data"
save_meta_data(info, stats, episode_data_index, tests_meta_data)
# copy videos of first episode to tests directory
episode_index = 0
tests_videos_dir = tests_data_dir / repo_id / "videos"
tests_videos_dir.mkdir(parents=True, exist_ok=True)
for key in lerobot_dataset.camera_keys:
fname = f"{key}_episode_{episode_index:06d}.mp4"
shutil.copy(videos_dir / fname, tests_videos_dir / fname)
if local_dir is None:
# clear cache
shutil.rmtree(meta_data_dir)
shutil.rmtree(videos_dir)
return lerobot_dataset
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"--raw-dir",
type=Path,
required=True,
help="Directory containing input raw datasets (e.g. `data/aloha_mobile_chair_raw` or `data/pusht_raw).",
)
# TODO(rcadene): add automatic detection of the format
parser.add_argument(
"--raw-format",
type=str,
required=True,
help="Dataset type (e.g. `pusht_zarr`, `umi_zarr`, `aloha_hdf5`, `xarm_pkl`, `dora_parquet`, `rlds`, `openx`).",
)
parser.add_argument(
"--repo-id",
type=str,
required=True,
help="Repositery identifier on Hugging Face: a community or a user name `/` the name of the dataset (e.g. `lerobot/pusht`, `cadene/aloha_sim_insertion_human`).",
)
parser.add_argument(
"--local-dir",
type=Path,
help="When provided, writes the dataset converted to LeRobotDataset format in this directory (e.g. `data/lerobot/aloha_mobile_chair`).",
)
parser.add_argument(
"--push-to-hub",
type=int,
default=1,
help="Upload to hub.",
)
parser.add_argument(
"--fps",
type=int,
help="Frame rate used to collect videos. If not provided, use the default one specified in the code.",
)
parser.add_argument(
"--video",
type=int,
default=1,
help="Convert each episode of the raw dataset to an mp4 video. This option allows 60 times lower disk space consumption and 25 faster loading time during training.",
)
parser.add_argument(
"--batch-size",
type=int,
default=32,
help="Batch size loaded by DataLoader for computing the dataset statistics.",
)
parser.add_argument(
"--num-workers",
type=int,
default=8,
help="Number of processes of Dataloader for computing the dataset statistics.",
)
parser.add_argument(
"--episodes",
type=int,
nargs="*",
help="When provided, only converts the provided episodes (e.g `--episodes 2 3 4`). Useful to test the code on 1 episode.",
)
parser.add_argument(
"--force-override",
type=int,
default=0,
help="When set to 1, removes provided output directory if it already exists. By default, raises a ValueError exception.",
)
parser.add_argument(
"--resume",
type=int,
default=0,
help="When set to 1, resumes a previous run.",
)
parser.add_argument(
"--cache-dir",
type=Path,
required=False,
default="/tmp",
help="Directory to store the temporary videos and images generated while creating the dataset.",
)
parser.add_argument(
"--tests-data-dir",
type=Path,
help=(
"When provided, save tests artifacts into the given directory "
"(e.g. `--tests-data-dir tests/data` will save to tests/data/{--repo-id})."
),
)
args = parser.parse_args()
push_dataset_to_hub(**vars(args))
if __name__ == "__main__":
main()

View File

@ -62,14 +62,14 @@ dependencies = [
"omegaconf>=2.3.0",
"opencv-python>=4.9.0",
"packaging>=24.2",
"av>=12.0.5",
"av>=12.0.5,<13.0.0",
"pymunk>=6.6.0",
"pynput>=1.7.7",
"pyzmq>=26.2.1",
"rerun-sdk>=0.21.0",
"termcolor>=2.4.0",
"torch>=2.2.1",
"torchcodec>=0.2.1; sys_platform != 'win32' and (sys_platform != 'linux' or (platform_machine != 'aarch64' and platform_machine != 'arm64' and platform_machine != 'armv7l'))",
"torchcodec>=0.2.1; sys_platform != 'win32' and (sys_platform != 'linux' or (platform_machine != 'aarch64' and platform_machine != 'arm64' and platform_machine != 'armv7l')) and (sys_platform != 'darwin' or platform_machine != 'x86_64')",
"torchvision>=0.21.0",
"wandb>=0.16.3",
"zarr>=2.17.0",

View File

@ -1,15 +1,14 @@
import abc
import random
import threading
import time
from typing import Callable
import dynamixel_sdk as dxl
import serial
from mock_serial.mock_serial import MockSerial, Stub
from mock_serial.mock_serial import MockSerial
from lerobot.common.motors.dynamixel import X_SERIES_CONTROL_TABLE, DynamixelMotorsBus
from .mock_serial_patch import WaitableStub
# https://emanual.robotis.com/docs/en/dxl/crc/
DXL_CRC_TABLE = [
0x0000, 0x8005, 0x800F, 0x000A, 0x801B, 0x001E, 0x0014, 0x8011,
@ -178,7 +177,7 @@ class MockInstructionPacket(MockDynamixelPacketv2):
Helper class to build valid Dynamixel Protocol 2.0 Instruction Packets.
Protocol 2.0 Instruction Packet structure
(from https://emanual.robotis.com/docs/en/dxl/protocol2/#instruction-packet)
https://emanual.robotis.com/docs/en/dxl/protocol2/#instruction-packet
| Header | Packet ID | Length | Instruction | Params | CRC |
| ------------------- | --------- | ----------- | ----------- | ----------------- | ----------- |
@ -206,7 +205,7 @@ class MockInstructionPacket(MockDynamixelPacketv2):
) -> bytes:
"""
Builds a "Ping" broadcast instruction.
(from https://emanual.robotis.com/docs/en/dxl/protocol2/#ping-0x01)
https://emanual.robotis.com/docs/en/dxl/protocol2/#ping-0x01
No parameters required.
"""
@ -222,7 +221,7 @@ class MockInstructionPacket(MockDynamixelPacketv2):
) -> bytes:
"""
Builds a "Sync_Read" broadcast instruction.
(from https://emanual.robotis.com/docs/en/dxl/protocol2/#sync-read-0x82)
https://emanual.robotis.com/docs/en/dxl/protocol2/#sync-read-0x82
The parameters for Sync_Read (Protocol 2.0) are:
param[0] = start_address L
@ -256,7 +255,7 @@ class MockInstructionPacket(MockDynamixelPacketv2):
) -> bytes:
"""
Builds a "Sync_Write" broadcast instruction.
(from https://emanual.robotis.com/docs/en/dxl/protocol2/#sync-write-0x83)
https://emanual.robotis.com/docs/en/dxl/protocol2/#sync-write-0x83
The parameters for Sync_Write (Protocol 2.0) are:
param[0] = start_address L
@ -281,9 +280,9 @@ class MockInstructionPacket(MockDynamixelPacketv2):
+2 is for the CRC at the end.
"""
data = []
for idx, value in ids_values.items():
split_value = DynamixelMotorsBus.split_int_bytes(value, data_length)
data += [idx, *split_value]
for id_, value in ids_values.items():
split_value = DynamixelMotorsBus._split_int_to_bytes(value, data_length)
data += [id_, *split_value]
params = [
dxl.DXL_LOBYTE(start_address),
dxl.DXL_HIBYTE(start_address),
@ -294,13 +293,47 @@ class MockInstructionPacket(MockDynamixelPacketv2):
length = len(ids_values) * (1 + data_length) + 7
return cls.build(dxl_id=dxl.BROADCAST_ID, params=params, length=length, instruct_type="Sync_Write")
@classmethod
def write(
cls,
dxl_id: int,
value: int,
start_address: int,
data_length: int,
) -> bytes:
"""
Builds a "Write" instruction.
https://emanual.robotis.com/docs/en/dxl/protocol2/#write-0x03
The parameters for Write (Protocol 2.0) are:
param[0] = start_address L
param[1] = start_address H
param[2] = 1st Byte
param[3] = 2nd Byte
...
param[1+X] = X-th Byte
And 'length' = data_length + 5, where:
+1 is for instruction byte,
+2 is for the length bytes,
+2 is for the CRC at the end.
"""
data = DynamixelMotorsBus._split_int_to_bytes(value, data_length)
params = [
dxl.DXL_LOBYTE(start_address),
dxl.DXL_HIBYTE(start_address),
*data,
]
length = data_length + 5
return cls.build(dxl_id=dxl_id, params=params, length=length, instruct_type="Write")
class MockStatusPacket(MockDynamixelPacketv2):
"""
Helper class to build valid Dynamixel Protocol 2.0 Status Packets.
Protocol 2.0 Status Packet structure
(from https://emanual.robotis.com/docs/en/dxl/protocol2/#status-packet)
https://emanual.robotis.com/docs/en/dxl/protocol2/#status-packet
| Header | Packet ID | Length | Instruction | Error | Params | CRC |
| ------------------- | --------- | ----------- | ----------- | ----- | ----------------- | ----------- |
@ -323,8 +356,8 @@ class MockStatusPacket(MockDynamixelPacketv2):
@classmethod
def ping(cls, dxl_id: int, model_nb: int = 1190, firm_ver: int = 50) -> bytes:
"""Builds a 'Ping' status packet.
"""
Builds a 'Ping' status packet.
https://emanual.robotis.com/docs/en/dxl/protocol2/#ping-0x01
Args:
@ -342,22 +375,22 @@ class MockStatusPacket(MockDynamixelPacketv2):
return cls.build(dxl_id, params=params, length=length)
@classmethod
def present_position(cls, dxl_id: int, pos: int | None = None, min_max_range: tuple = (0, 4095)) -> bytes:
"""Builds a 'Present_Position' status packet.
def read(cls, dxl_id: int, value: int, param_length: int) -> bytes:
"""
Builds a 'Read' status packet (also works for 'Sync Read')
https://emanual.robotis.com/docs/en/dxl/protocol2/#read-0x02
https://emanual.robotis.com/docs/en/dxl/protocol2/#sync-read-0x82
Args:
dxl_id (int): ID of the servo responding.
pos (int | None, optional): Desired 'Present_Position' to be returned in the packet. If None, it
will use a random value in the min_max_range. Defaults to None.
min_max_range (tuple, optional): Min/max range to generate the position values used for when 'pos'
is None. Note that the bounds are included in the range. Defaults to (0, 4095).
value (int): Desired value to be returned in the packet.
param_length (int): The address length as reported in the control table.
Returns:
bytes: The raw 'Present_Position' status packet ready to be sent through serial.
"""
pos = random.randint(*min_max_range) if pos is None else pos
params = [dxl.DXL_LOBYTE(pos), dxl.DXL_HIBYTE(pos), 0, 0]
length = 8
params = DynamixelMotorsBus._split_int_to_bytes(value, param_length)
length = param_length + 4
return cls.build(dxl_id, params=params, length=length)
@ -386,34 +419,6 @@ class MockPortHandler(dxl.PortHandler):
return True
class WaitableStub(Stub):
"""
In some situations, a test might be checking if a stub has been called before `MockSerial` thread had time
to read, match, and call the stub. In these situations, the test can fail randomly.
Use `wait_called()` or `wait_calls()` to block until the stub is called, avoiding race conditions.
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._event = threading.Event()
def call(self):
self._event.set()
return super().call()
def wait_called(self, timeout: float = 1.0):
return self._event.wait(timeout)
def wait_calls(self, min_calls: int = 1, timeout: float = 1.0):
start = time.perf_counter()
while time.perf_counter() - start < timeout:
if self.calls >= min_calls:
return self.calls
time.sleep(0.005)
raise TimeoutError(f"Stub not called {min_calls} times within {timeout} seconds.")
class MockMotors(MockSerial):
"""
This class will simulate physical motors by responding with valid status packets upon receiving some
@ -435,16 +440,13 @@ class MockMotors(MockSerial):
return new_stub
def build_broadcast_ping_stub(
self, ids_models_firmwares: dict[int, list[int]] | None = None, num_invalid_try: int = 0
self, ids_models: dict[int, list[int]] | None = None, num_invalid_try: int = 0
) -> str:
ping_request = MockInstructionPacket.ping(dxl.BROADCAST_ID)
return_packets = b"".join(
MockStatusPacket.ping(idx, model, firm_ver)
for idx, (model, firm_ver) in ids_models_firmwares.items()
)
return_packets = b"".join(MockStatusPacket.ping(id_, model) for id_, model in ids_models.items())
ping_response = self._build_send_fn(return_packets, num_invalid_try)
stub_name = "Ping_" + "_".join([str(idx) for idx in ids_models_firmwares])
stub_name = "Ping_" + "_".join([str(id_) for id_ in ids_models])
self.stub(
name=stub_name,
receive_bytes=ping_request,
@ -458,7 +460,6 @@ class MockMotors(MockSerial):
ping_request = MockInstructionPacket.ping(dxl_id)
return_packet = MockStatusPacket.ping(dxl_id, model_nb, firm_ver)
ping_response = self._build_send_fn(return_packet, num_invalid_try)
stub_name = f"Ping_{dxl_id}"
self.stub(
name=stub_name,
@ -470,21 +471,34 @@ class MockMotors(MockSerial):
def build_sync_read_stub(
self, data_name: str, ids_values: dict[int, int] | None = None, num_invalid_try: int = 0
) -> str:
"""
'data_name' supported:
- Present_Position
"""
address, length = self.ctrl_table[data_name]
sync_read_request = MockInstructionPacket.sync_read(list(ids_values), address, length)
if data_name != "Present_Position":
raise NotImplementedError
return_packets = b"".join(
MockStatusPacket.present_position(idx, pos) for idx, pos in ids_values.items()
)
return_packets = b"".join(MockStatusPacket.read(id_, pos, length) for id_, pos in ids_values.items())
sync_read_response = self._build_send_fn(return_packets, num_invalid_try)
stub_name = f"Sync_Read_{data_name}_" + "_".join([str(id_) for id_ in ids_values])
self.stub(
name=stub_name,
receive_bytes=sync_read_request,
send_fn=sync_read_response,
)
return stub_name
stub_name = f"Sync_Read_{data_name}_" + "_".join([str(idx) for idx in ids_values])
def build_sequential_sync_read_stub(
self, data_name: str, ids_values: dict[int, list[int]] | None = None
) -> str:
sequence_length = len(next(iter(ids_values.values())))
assert all(len(positions) == sequence_length for positions in ids_values.values())
address, length = self.ctrl_table[data_name]
sync_read_request = MockInstructionPacket.sync_read(list(ids_values), address, length)
sequential_packets = []
for count in range(sequence_length):
return_packets = b"".join(
MockStatusPacket.read(id_, positions[count], length) for id_, positions in ids_values.items()
)
sequential_packets.append(return_packets)
sync_read_response = self._build_sequential_send_fn(sequential_packets)
stub_name = f"Seq_Sync_Read_{data_name}_" + "_".join([str(id_) for id_ in ids_values])
self.stub(
name=stub_name,
receive_bytes=sync_read_request,
@ -497,7 +511,7 @@ class MockMotors(MockSerial):
) -> str:
address, length = self.ctrl_table[data_name]
sync_read_request = MockInstructionPacket.sync_write(ids_values, address, length)
stub_name = f"Sync_Write_{data_name}_" + "_".join([str(idx) for idx in ids_values])
stub_name = f"Sync_Write_{data_name}_" + "_".join([str(id_) for id_ in ids_values])
self.stub(
name=stub_name,
receive_bytes=sync_read_request,
@ -505,6 +519,20 @@ class MockMotors(MockSerial):
)
return stub_name
def build_write_stub(
self, data_name: str, dxl_id: int, value: int, error: str = "Success", num_invalid_try: int = 0
) -> str:
address, length = self.ctrl_table[data_name]
sync_read_request = MockInstructionPacket.write(dxl_id, value, address, length)
return_packet = MockStatusPacket.build(dxl_id, params=[], length=4, error=error)
stub_name = f"Write_{data_name}_{dxl_id}"
self.stub(
name=stub_name,
receive_bytes=sync_read_request,
send_fn=self._build_send_fn(return_packet, num_invalid_try),
)
return stub_name
@staticmethod
def _build_send_fn(packet: bytes, num_invalid_try: int = 0) -> Callable[[int], bytes]:
def send_fn(_call_count: int) -> bytes:
@ -513,3 +541,10 @@ class MockMotors(MockSerial):
return packet
return send_fn
@staticmethod
def _build_sequential_send_fn(packets: list[bytes]) -> Callable[[int], bytes]:
def send_fn(_call_count: int) -> bytes:
return packets[_call_count - 1]
return send_fn

View File

@ -1,14 +1,14 @@
import abc
import random
import threading
import time
from typing import Callable
import scservo_sdk as scs
import serial
from mock_serial.mock_serial import MockSerial, Stub
from mock_serial import MockSerial
from lerobot.common.motors.feetech import SCS_SERIES_CONTROL_TABLE, FeetechMotorsBus
from lerobot.common.motors.feetech.feetech import patch_setPacketTimeout
from .mock_serial_patch import WaitableStub
# https://files.waveshare.com/upload/2/27/Communication_Protocol_User_Manual-EN%28191218-0923%29.pdf
INSTRUCTION_TYPES = {
@ -46,8 +46,8 @@ class MockFeetechPacket(abc.ABC):
@staticmethod
def _add_checksum(packet: list[int]) -> list[int]:
checksum = 0
for idx in range(2, len(packet) - 1): # except header & checksum
checksum += packet[idx]
for id_ in range(2, len(packet) - 1): # except header & checksum
checksum += packet[id_]
packet[-1] = scs.SCS_LOBYTE(~checksum)
@ -89,8 +89,31 @@ class MockInstructionPacket(MockFeetechPacket):
No parameters required.
"""
params, length = [], 2
return cls.build(scs_id=scs_id, params=params, length=length, instruct_type="Ping")
return cls.build(scs_id=scs_id, params=[], length=2, instruct_type="Ping")
@classmethod
def read(
cls,
scs_id: int,
start_address: int,
data_length: int,
) -> bytes:
"""
Builds a "Read" instruction.
The parameters for Read are:
param[0] = start_address
param[1] = data_length
And 'length' = 4, where:
+1 is for instruction byte,
+1 is for the address byte,
+1 is for the length bytes,
+1 is for the checksum at the end.
"""
params = [start_address, data_length]
length = 4
return cls.build(scs_id=scs_id, params=params, length=length, instruct_type="Read")
@classmethod
def sync_read(
@ -148,13 +171,42 @@ class MockInstructionPacket(MockFeetechPacket):
+1 is for the checksum at the end.
"""
data = []
for idx, value in ids_values.items():
split_value = FeetechMotorsBus.split_int_bytes(value, data_length)
data += [idx, *split_value]
for id_, value in ids_values.items():
split_value = FeetechMotorsBus._split_int_to_bytes(value, data_length)
data += [id_, *split_value]
params = [start_address, data_length, *data]
length = len(ids_values) * (1 + data_length) + 4
return cls.build(scs_id=scs.BROADCAST_ID, params=params, length=length, instruct_type="Sync_Write")
@classmethod
def write(
cls,
scs_id: int,
value: int,
start_address: int,
data_length: int,
) -> bytes:
"""
Builds a "Write" instruction.
The parameters for Write are:
param[0] = start_address L
param[1] = start_address H
param[2] = 1st Byte
param[3] = 2nd Byte
...
param[1+X] = X-th Byte
And 'length' = data_length + 3, where:
+1 is for instruction byte,
+1 is for the length bytes,
+1 is for the checksum at the end.
"""
data = FeetechMotorsBus._split_int_to_bytes(value, data_length)
params = [start_address, *data]
length = data_length + 3
return cls.build(scs_id=scs_id, params=params, length=length, instruct_type="Write")
class MockStatusPacket(MockFeetechPacket):
"""
@ -182,41 +234,32 @@ class MockStatusPacket(MockFeetechPacket):
] # fmt: skip
@classmethod
def ping(cls, scs_id: int, model_nb: int = 1190, firm_ver: int = 50) -> bytes:
def ping(cls, scs_id: int, error: str = "Success") -> bytes:
"""Builds a 'Ping' status packet.
Args:
scs_id (int): ID of the servo responding.
model_nb (int, optional): Desired 'model number' to be returned in the packet. Defaults to 1190
which corresponds to a XL330-M077-T.
firm_ver (int, optional): Desired 'firmware version' to be returned in the packet.
Defaults to 50.
error (str, optional): Error to be returned. Defaults to "Success".
Returns:
bytes: The raw 'Ping' status packet ready to be sent through serial.
"""
# raise NotImplementedError
params = [scs.SCS_LOBYTE(model_nb), scs.SCS_HIBYTE(model_nb), firm_ver]
length = 2
return cls.build(scs_id, params=params, length=length)
return cls.build(scs_id, params=[], length=2, error=error)
@classmethod
def present_position(cls, scs_id: int, pos: int | None = None, min_max_range: tuple = (0, 4095)) -> bytes:
"""Builds a 'Present_Position' status packet.
def read(cls, scs_id: int, value: int, param_length: int) -> bytes:
"""Builds a 'Read' status packet.
Args:
scs_id (int): List of the servos ids
pos (int | None, optional): Desired 'Present_Position' to be returned in the packet. If None, it
will use a random value in the min_max_range. Defaults to None.
min_max_range (tuple, optional): Min/max range to generate the position values used for when 'pos'
is None. Note that the bounds are included in the range. Defaults to (0, 4095).
scs_id (int): ID of the servo responding.
value (int): Desired value to be returned in the packet.
param_length (int): The address length as reported in the control table.
Returns:
bytes: The raw 'Present_Position' status packet ready to be sent through serial.
bytes: The raw 'Sync Read' status packet ready to be sent through serial.
"""
pos = random.randint(*min_max_range) if pos is None else pos
params = [scs.SCS_LOBYTE(pos), scs.SCS_HIBYTE(pos)]
length = 4
params = FeetechMotorsBus._split_int_to_bytes(value, param_length)
length = param_length + 2
return cls.build(scs_id, params=params, length=length)
@ -244,33 +287,8 @@ class MockPortHandler(scs.PortHandler):
return True
class WaitableStub(Stub):
"""
In some situations, a test might be checking if a stub has been called before `MockSerial` thread had time
to read, match, and call the stub. In these situations, the test can fail randomly.
Use `wait_called()` or `wait_calls()` to block until the stub is called, avoiding race conditions.
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._event = threading.Event()
def call(self):
self._event.set()
return super().call()
def wait_called(self, timeout: float = 1.0):
return self._event.wait(timeout)
def wait_calls(self, min_calls: int = 1, timeout: float = 1.0):
start = time.perf_counter()
while time.perf_counter() - start < timeout:
if self.calls >= min_calls:
return self.calls
time.sleep(0.005)
raise TimeoutError(f"Stub not called {min_calls} times within {timeout} seconds.")
def setPacketTimeout(self, packet_length): # noqa: N802
return patch_setPacketTimeout(self, packet_length)
class MockMotors(MockSerial):
@ -293,17 +311,11 @@ class MockMotors(MockSerial):
self._MockSerial__stubs[name or new_stub.receive_bytes] = new_stub
return new_stub
def build_broadcast_ping_stub(
self, ids_models_firmwares: dict[int, list[int]] | None = None, num_invalid_try: int = 0
) -> str:
def build_broadcast_ping_stub(self, ids: list[int] | None = None, num_invalid_try: int = 0) -> str:
ping_request = MockInstructionPacket.ping(scs.BROADCAST_ID)
return_packets = b"".join(
MockStatusPacket.ping(idx, model, firm_ver)
for idx, (model, firm_ver) in ids_models_firmwares.items()
)
return_packets = b"".join(MockStatusPacket.ping(id_) for id_ in ids)
ping_response = self._build_send_fn(return_packets, num_invalid_try)
stub_name = "Ping_" + "_".join([str(idx) for idx in ids_models_firmwares])
stub_name = "Ping_" + "_".join([str(id_) for id_ in ids])
self.stub(
name=stub_name,
receive_bytes=ping_request,
@ -311,13 +323,10 @@ class MockMotors(MockSerial):
)
return stub_name
def build_ping_stub(
self, scs_id: int, model_nb: int, firm_ver: int = 50, num_invalid_try: int = 0
) -> str:
def build_ping_stub(self, scs_id: int, num_invalid_try: int = 0) -> str:
ping_request = MockInstructionPacket.ping(scs_id)
return_packet = MockStatusPacket.ping(scs_id, model_nb, firm_ver)
return_packet = MockStatusPacket.ping(scs_id)
ping_response = self._build_send_fn(return_packet, num_invalid_try)
stub_name = f"Ping_{scs_id}"
self.stub(
name=stub_name,
@ -326,24 +335,59 @@ class MockMotors(MockSerial):
)
return stub_name
def build_sync_read_stub(
self, data_name: str, ids_values: dict[int, int] | None = None, num_invalid_try: int = 0
def build_read_stub(
self, data_name: str, scs_id: int, value: int | None = None, num_invalid_try: int = 0
) -> str:
"""
'data_name' supported:
- Present_Position
- Model_Number
"""
if data_name != "Model_Number":
raise NotImplementedError
address, length = self.ctrl_table[data_name]
read_request = MockInstructionPacket.read(scs_id, address, length)
return_packet = MockStatusPacket.read(scs_id, value, length)
read_response = self._build_send_fn(return_packet, num_invalid_try)
stub_name = f"Read_{data_name}_{scs_id}"
self.stub(
name=stub_name,
receive_bytes=read_request,
send_fn=read_response,
)
return stub_name
def build_sync_read_stub(
self, data_name: str, ids_values: dict[int, int] | None = None, num_invalid_try: int = 0
) -> str:
address, length = self.ctrl_table[data_name]
sync_read_request = MockInstructionPacket.sync_read(list(ids_values), address, length)
if data_name != "Present_Position":
raise NotImplementedError
return_packets = b"".join(MockStatusPacket.read(id_, pos, length) for id_, pos in ids_values.items())
return_packets = b"".join(
MockStatusPacket.present_position(idx, pos) for idx, pos in ids_values.items()
)
sync_read_response = self._build_send_fn(return_packets, num_invalid_try)
stub_name = f"Sync_Read_{data_name}_" + "_".join([str(id_) for id_ in ids_values])
self.stub(
name=stub_name,
receive_bytes=sync_read_request,
send_fn=sync_read_response,
)
return stub_name
stub_name = f"Sync_Read_{data_name}_" + "_".join([str(idx) for idx in ids_values])
def build_sequential_sync_read_stub(
self, data_name: str, ids_values: dict[int, list[int]] | None = None
) -> str:
sequence_length = len(next(iter(ids_values.values())))
assert all(len(positions) == sequence_length for positions in ids_values.values())
address, length = self.ctrl_table[data_name]
sync_read_request = MockInstructionPacket.sync_read(list(ids_values), address, length)
sequential_packets = []
for count in range(sequence_length):
return_packets = b"".join(
MockStatusPacket.read(id_, positions[count], length) for id_, positions in ids_values.items()
)
sequential_packets.append(return_packets)
sync_read_response = self._build_sequential_send_fn(sequential_packets)
stub_name = f"Seq_Sync_Read_{data_name}_" + "_".join([str(id_) for id_ in ids_values])
self.stub(
name=stub_name,
receive_bytes=sync_read_request,
@ -356,7 +400,7 @@ class MockMotors(MockSerial):
) -> str:
address, length = self.ctrl_table[data_name]
sync_read_request = MockInstructionPacket.sync_write(ids_values, address, length)
stub_name = f"Sync_Write_{data_name}_" + "_".join([str(idx) for idx in ids_values])
stub_name = f"Sync_Write_{data_name}_" + "_".join([str(id_) for id_ in ids_values])
self.stub(
name=stub_name,
receive_bytes=sync_read_request,
@ -364,6 +408,20 @@ class MockMotors(MockSerial):
)
return stub_name
def build_write_stub(
self, data_name: str, scs_id: int, value: int, error: str = "Success", num_invalid_try: int = 0
) -> str:
address, length = self.ctrl_table[data_name]
sync_read_request = MockInstructionPacket.write(scs_id, value, address, length)
return_packet = MockStatusPacket.build(scs_id, params=[], length=2, error=error)
stub_name = f"Write_{data_name}_{scs_id}"
self.stub(
name=stub_name,
receive_bytes=sync_read_request,
send_fn=self._build_send_fn(return_packet, num_invalid_try),
)
return stub_name
@staticmethod
def _build_send_fn(packet: bytes, num_invalid_try: int = 0) -> Callable[[int], bytes]:
def send_fn(_call_count: int) -> bytes:
@ -372,3 +430,10 @@ class MockMotors(MockSerial):
return packet
return send_fn
@staticmethod
def _build_sequential_send_fn(packets: list[bytes]) -> Callable[[int], bytes]:
def send_fn(_call_count: int) -> bytes:
return packets[_call_count - 1]
return send_fn

View File

@ -0,0 +1,35 @@
import threading
import time
from mock_serial.mock_serial import Stub
class WaitableStub(Stub):
"""
In some situations, a test might be checking if a stub has been called before `MockSerial` thread had time
to read, match, and call the stub. In these situations, the test can fail randomly.
Use `wait_called()` or `wait_calls()` to block until the stub is called, avoiding race conditions.
Proposed fix:
https://github.com/benthorner/mock_serial/pull/3
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._event = threading.Event()
def call(self):
self._event.set()
return super().call()
def wait_called(self, timeout: float = 1.0):
return self._event.wait(timeout)
def wait_calls(self, min_calls: int = 1, timeout: float = 1.0):
start = time.perf_counter()
while time.perf_counter() - start < timeout:
if self.calls >= min_calls:
return self.calls
time.sleep(0.005)
raise TimeoutError(f"Stub not called {min_calls} times within {timeout} seconds.")

View File

@ -1,12 +1,13 @@
import sys
from typing import Generator
from unittest.mock import patch
from unittest.mock import MagicMock, patch
import dynamixel_sdk as dxl
import pytest
from lerobot.common.motors import Motor
from lerobot.common.motors.dynamixel import DynamixelMotorsBus
from lerobot.common.motors import Motor, MotorCalibration, MotorNormMode
from lerobot.common.motors.dynamixel import MODEL_NUMBER, DynamixelMotorsBus
from lerobot.common.utils.encoding_utils import encode_twos_complement
from tests.mocks.mock_dynamixel import MockMotors, MockPortHandler
@ -30,12 +31,30 @@ def mock_motors() -> Generator[MockMotors, None, None]:
@pytest.fixture
def dummy_motors() -> dict[str, Motor]:
return {
"dummy_1": Motor(id=1, model="xl430-w250"),
"dummy_2": Motor(id=2, model="xm540-w270"),
"dummy_3": Motor(id=3, model="xl330-m077"),
"dummy_1": Motor(1, "xl430-w250", MotorNormMode.RANGE_M100_100),
"dummy_2": Motor(2, "xm540-w270", MotorNormMode.RANGE_M100_100),
"dummy_3": Motor(3, "xl330-m077", MotorNormMode.RANGE_M100_100),
}
@pytest.fixture
def dummy_calibration(dummy_motors) -> dict[str, MotorCalibration]:
drive_modes = [0, 1, 0]
homings = [-709, -2006, 1624]
mins = [43, 27, 145]
maxes = [1335, 3608, 3999]
calibration = {}
for name, motor in dummy_motors.items():
calibration[name] = MotorCalibration(
id=motor.id,
drive_mode=drive_modes[motor.id - 1],
homing_offset=homings[motor.id - 1],
range_min=mins[motor.id - 1],
range_max=maxes[motor.id - 1],
)
return calibration
@pytest.mark.skipif(sys.platform != "darwin", reason=f"No patching needed on {sys.platform=}")
def test_autouse_patch():
"""Ensures that the autouse fixture correctly patches dxl.PortHandler with MockPortHandler."""
@ -67,24 +86,24 @@ def test_autouse_patch():
"max four bytes",
],
) # fmt: skip
def test_split_int_bytes(value, n_bytes, expected):
assert DynamixelMotorsBus.split_int_bytes(value, n_bytes) == expected
def test_split_int_to_bytes(value, n_bytes, expected):
assert DynamixelMotorsBus._split_int_to_bytes(value, n_bytes) == expected
def test_split_int_bytes_invalid_n_bytes():
def test_split_int_to_bytes_invalid_n_bytes():
with pytest.raises(NotImplementedError):
DynamixelMotorsBus.split_int_bytes(100, 3)
DynamixelMotorsBus._split_int_to_bytes(100, 3)
def test_split_int_bytes_negative_numbers():
def test_split_int_to_bytes_negative_numbers():
with pytest.raises(ValueError):
neg = DynamixelMotorsBus.split_int_bytes(-1, 1)
neg = DynamixelMotorsBus._split_int_to_bytes(-1, 1)
print(neg)
def test_split_int_bytes_large_number():
def test_split_int_to_bytes_large_number():
with pytest.raises(ValueError):
DynamixelMotorsBus.split_int_bytes(2**32, 4) # 4-byte max is 0xFFFFFFFF
DynamixelMotorsBus._split_int_to_bytes(2**32, 4) # 4-byte max is 0xFFFFFFFF
def test_abc_implementation(dummy_motors):
@ -92,133 +111,166 @@ def test_abc_implementation(dummy_motors):
DynamixelMotorsBus(port="/dev/dummy-port", motors=dummy_motors)
@pytest.mark.parametrize(
"idx, model_nb",
[
[1, 1190],
[2, 1200],
[3, 1120],
],
)
def test_ping(idx, model_nb, mock_motors, dummy_motors):
stub_name = mock_motors.build_ping_stub(idx, model_nb)
@pytest.mark.parametrize("id_", [1, 2, 3])
def test_ping(id_, mock_motors, dummy_motors):
expected_model_nb = MODEL_NUMBER[dummy_motors[f"dummy_{id_}"].model]
stub_name = mock_motors.build_ping_stub(id_, expected_model_nb)
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect()
motors_bus.connect(assert_motors_exist=False)
ping_model_nb = motors_bus.ping(idx)
ping_model_nb = motors_bus.ping(id_)
assert ping_model_nb == model_nb
assert ping_model_nb == expected_model_nb
assert mock_motors.stubs[stub_name].called
def test_broadcast_ping(mock_motors, dummy_motors):
expected_pings = {
1: [1060, 50],
2: [1120, 30],
3: [1190, 10],
}
stub_name = mock_motors.build_broadcast_ping_stub(expected_pings)
models = {m.id: m.model for m in dummy_motors.values()}
expected_model_nbs = {id_: MODEL_NUMBER[model] for id_, model in models.items()}
stub_name = mock_motors.build_broadcast_ping_stub(expected_model_nbs)
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect()
motors_bus.connect(assert_motors_exist=False)
ping_list = motors_bus.broadcast_ping()
ping_model_nbs = motors_bus.broadcast_ping()
assert ping_list == expected_pings
assert ping_model_nbs == expected_model_nbs
assert mock_motors.stubs[stub_name].called
@pytest.mark.parametrize(
"motors",
[
None,
[1, 2, 3],
["dummy_1", "dummy_2", "dummy_3"],
[1, "dummy_2", 3],
],
ids=["None", "by ids", "by names", "mixed"],
)
def test_read_all_motors(motors, mock_motors, dummy_motors):
def test_sync_read_none(mock_motors, dummy_motors):
expected_positions = {
1: 1337,
2: 42,
3: 4016,
"dummy_1": 1337,
"dummy_2": 42,
"dummy_3": 4016,
}
ids_values = dict(zip([1, 2, 3], expected_positions.values(), strict=True))
stub_name = mock_motors.build_sync_read_stub("Present_Position", ids_values)
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
read_positions = motors_bus.sync_read("Present_Position", normalize=False)
assert mock_motors.stubs[stub_name].called
assert read_positions == expected_positions
@pytest.mark.parametrize(
"id_, position",
[
(1, 1337),
(2, 42),
(3, 4016),
],
)
def test_sync_read_by_id(id_, position, mock_motors, dummy_motors):
expected_position = {id_: position}
stub_name = mock_motors.build_sync_read_stub("Present_Position", expected_position)
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
read_position = motors_bus.sync_read("Present_Position", id_, normalize=False)
assert mock_motors.stubs[stub_name].called
assert read_position == expected_position
@pytest.mark.parametrize(
"ids, positions",
[
([1], [1337]),
([1, 2], [1337, 42]),
([1, 2, 3], [1337, 42, 4016]),
],
ids=["1 motor", "2 motors", "3 motors"],
) # fmt: skip
def test_sync_read_by_ids(ids, positions, mock_motors, dummy_motors):
assert len(ids) == len(positions)
expected_positions = dict(zip(ids, positions, strict=True))
stub_name = mock_motors.build_sync_read_stub("Present_Position", expected_positions)
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect()
motors_bus.connect(assert_motors_exist=False)
positions_read = motors_bus.read("Present_Position", motors=motors)
read_positions = motors_bus.sync_read("Present_Position", ids, normalize=False)
motors = ["dummy_1", "dummy_2", "dummy_3"] if motors is None else motors
assert mock_motors.stubs[stub_name].called
assert positions_read == dict(zip(motors, expected_positions.values(), strict=True))
assert read_positions == expected_positions
@pytest.mark.parametrize(
"idx, pos",
"id_, position",
[
[1, 1337],
[2, 42],
[3, 4016],
(1, 1337),
(2, 42),
(3, 4016),
],
)
def test_read_single_motor_by_name(idx, pos, mock_motors, dummy_motors):
expected_position = {idx: pos}
stub_name = mock_motors.build_sync_read_stub("Present_Position", expected_position)
def test_sync_read_by_name(id_, position, mock_motors, dummy_motors):
expected_position = {f"dummy_{id_}": position}
stub_name = mock_motors.build_sync_read_stub("Present_Position", {id_: position})
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect()
motors_bus.connect(assert_motors_exist=False)
pos_dict = motors_bus.read("Present_Position", f"dummy_{idx}")
read_position = motors_bus.sync_read("Present_Position", f"dummy_{id_}", normalize=False)
assert mock_motors.stubs[stub_name].called
assert pos_dict == {f"dummy_{idx}": pos}
assert read_position == expected_position
@pytest.mark.parametrize(
"idx, pos",
"ids, positions",
[
[1, 1337],
[2, 42],
[3, 4016],
([1], [1337]),
([1, 2], [1337, 42]),
([1, 2, 3], [1337, 42, 4016]),
],
)
def test_read_single_motor_by_id(idx, pos, mock_motors, dummy_motors):
expected_position = {idx: pos}
stub_name = mock_motors.build_sync_read_stub("Present_Position", expected_position)
ids=["1 motor", "2 motors", "3 motors"],
) # fmt: skip
def test_sync_read_by_names(ids, positions, mock_motors, dummy_motors):
assert len(ids) == len(positions)
names = [f"dummy_{dxl_id}" for dxl_id in ids]
expected_positions = dict(zip(names, positions, strict=True))
ids_values = dict(zip(ids, positions, strict=True))
stub_name = mock_motors.build_sync_read_stub("Present_Position", ids_values)
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect()
motors_bus.connect(assert_motors_exist=False)
pos_dict = motors_bus.read("Present_Position", idx)
read_positions = motors_bus.sync_read("Present_Position", names, normalize=False)
assert mock_motors.stubs[stub_name].called
assert pos_dict == {idx: pos}
assert read_positions == expected_positions
@pytest.mark.parametrize(
"num_retry, num_invalid_try, pos",
[
[0, 2, 1337],
[2, 3, 42],
[3, 2, 4016],
[2, 1, 999],
(0, 2, 1337),
(2, 3, 42),
(3, 2, 4016),
(2, 1, 999),
],
)
def test_read_num_retry(num_retry, num_invalid_try, pos, mock_motors, dummy_motors):
def test_sync_read_num_retry(num_retry, num_invalid_try, pos, mock_motors, dummy_motors):
expected_position = {1: pos}
stub_name = mock_motors.build_sync_read_stub(
"Present_Position", expected_position, num_invalid_try=num_invalid_try
@ -227,63 +279,289 @@ def test_read_num_retry(num_retry, num_invalid_try, pos, mock_motors, dummy_moto
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect()
motors_bus.connect(assert_motors_exist=False)
if num_retry >= num_invalid_try:
pos_dict = motors_bus.read("Present_Position", 1, num_retry=num_retry)
pos_dict = motors_bus.sync_read("Present_Position", 1, normalize=False, num_retry=num_retry)
assert pos_dict == {1: pos}
else:
with pytest.raises(ConnectionError):
_ = motors_bus.read("Present_Position", 1, num_retry=num_retry)
_ = motors_bus.sync_read("Present_Position", 1, normalize=False, num_retry=num_retry)
expected_calls = min(1 + num_retry, 1 + num_invalid_try)
assert mock_motors.stubs[stub_name].calls == expected_calls
@pytest.mark.parametrize(
"motors",
"data_name, value",
[
[1, 2, 3],
["dummy_1", "dummy_2", "dummy_3"],
[1, "dummy_2", 3],
("Torque_Enable", 0),
("Torque_Enable", 1),
("Goal_Position", 1337),
("Goal_Position", 42),
],
ids=["by ids", "by names", "mixed"],
)
def test_write_all_motors(motors, mock_motors, dummy_motors):
goal_positions = {
1: 1337,
2: 42,
3: 4016,
}
stub_name = mock_motors.build_sync_write_stub("Goal_Position", goal_positions)
def test_sync_write_single_value(data_name, value, mock_motors, dummy_motors):
ids_values = {m.id: value for m in dummy_motors.values()}
stub_name = mock_motors.build_sync_write_stub(data_name, ids_values)
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect()
motors_bus.connect(assert_motors_exist=False)
values = dict(zip(motors, goal_positions.values(), strict=True))
motors_bus.write("Goal_Position", values)
motors_bus.sync_write(data_name, value, normalize=False)
assert mock_motors.stubs[stub_name].wait_called()
@pytest.mark.parametrize(
"data_name, value",
"id_, position",
[
["Torque_Enable", 0],
["Torque_Enable", 1],
(1, 1337),
(2, 42),
(3, 4016),
],
)
def test_write_all_motors_single_value(data_name, value, mock_motors, dummy_motors):
values = {m.id: value for m in dummy_motors.values()}
stub_name = mock_motors.build_sync_write_stub(data_name, values)
def test_sync_write_by_id(id_, position, mock_motors, dummy_motors):
value = {id_: position}
stub_name = mock_motors.build_sync_write_stub("Goal_Position", value)
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect()
motors_bus.connect(assert_motors_exist=False)
motors_bus.write(data_name, value)
motors_bus.sync_write("Goal_Position", value, normalize=False)
assert mock_motors.stubs[stub_name].wait_called()
@pytest.mark.parametrize(
"ids, positions",
[
([1], [1337]),
([1, 2], [1337, 42]),
([1, 2, 3], [1337, 42, 4016]),
],
ids=["1 motor", "2 motors", "3 motors"],
) # fmt: skip
def test_sync_write_by_ids(ids, positions, mock_motors, dummy_motors):
assert len(ids) == len(positions)
values = dict(zip(ids, positions, strict=True))
stub_name = mock_motors.build_sync_write_stub("Goal_Position", values)
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
motors_bus.sync_write("Goal_Position", values, normalize=False)
assert mock_motors.stubs[stub_name].wait_called()
@pytest.mark.parametrize(
"id_, position",
[
(1, 1337),
(2, 42),
(3, 4016),
],
)
def test_sync_write_by_name(id_, position, mock_motors, dummy_motors):
id_value = {id_: position}
stub_name = mock_motors.build_sync_write_stub("Goal_Position", id_value)
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
write_value = {f"dummy_{id_}": position}
motors_bus.sync_write("Goal_Position", write_value, normalize=False)
assert mock_motors.stubs[stub_name].wait_called()
@pytest.mark.parametrize(
"ids, positions",
[
([1], [1337]),
([1, 2], [1337, 42]),
([1, 2, 3], [1337, 42, 4016]),
],
ids=["1 motor", "2 motors", "3 motors"],
) # fmt: skip
def test_sync_write_by_names(ids, positions, mock_motors, dummy_motors):
assert len(ids) == len(positions)
ids_values = dict(zip(ids, positions, strict=True))
stub_name = mock_motors.build_sync_write_stub("Goal_Position", ids_values)
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
write_values = {f"dummy_{id_}": pos for id_, pos in ids_values.items()}
motors_bus.sync_write("Goal_Position", write_values, normalize=False)
assert mock_motors.stubs[stub_name].wait_called()
@pytest.mark.parametrize(
"data_name, dxl_id, value",
[
("Torque_Enable", 1, 0),
("Torque_Enable", 1, 1),
("Goal_Position", 2, 1337),
("Goal_Position", 3, 42),
],
)
def test_write_by_id(data_name, dxl_id, value, mock_motors, dummy_motors):
stub_name = mock_motors.build_write_stub(data_name, dxl_id, value)
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
motors_bus.write(data_name, dxl_id, value, normalize=False)
assert mock_motors.stubs[stub_name].called
@pytest.mark.parametrize(
"data_name, dxl_id, value",
[
("Torque_Enable", 1, 0),
("Torque_Enable", 1, 1),
("Goal_Position", 2, 1337),
("Goal_Position", 3, 42),
],
)
def test_write_by_name(data_name, dxl_id, value, mock_motors, dummy_motors):
stub_name = mock_motors.build_write_stub(data_name, dxl_id, value)
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
motors_bus.write(data_name, f"dummy_{dxl_id}", value, normalize=False)
assert mock_motors.stubs[stub_name].called
def test_is_calibrated(mock_motors, dummy_motors, dummy_calibration):
drive_modes = {m.id: m.drive_mode for m in dummy_calibration.values()}
encoded_homings = {m.id: encode_twos_complement(m.homing_offset, 4) for m in dummy_calibration.values()}
mins = {m.id: m.range_min for m in dummy_calibration.values()}
maxes = {m.id: m.range_max for m in dummy_calibration.values()}
drive_modes_stub = mock_motors.build_sync_read_stub("Drive_Mode", drive_modes)
offsets_stub = mock_motors.build_sync_read_stub("Homing_Offset", encoded_homings)
mins_stub = mock_motors.build_sync_read_stub("Min_Position_Limit", mins)
maxes_stub = mock_motors.build_sync_read_stub("Max_Position_Limit", maxes)
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
calibration=dummy_calibration,
)
motors_bus.connect(assert_motors_exist=False)
is_calibrated = motors_bus.is_calibrated
assert is_calibrated
assert mock_motors.stubs[drive_modes_stub].called
assert mock_motors.stubs[offsets_stub].called
assert mock_motors.stubs[mins_stub].called
assert mock_motors.stubs[maxes_stub].called
def test_reset_calibration(mock_motors, dummy_motors):
write_homing_stubs = []
write_mins_stubs = []
write_maxes_stubs = []
for motor in dummy_motors.values():
write_homing_stubs.append(mock_motors.build_write_stub("Homing_Offset", motor.id, 0))
write_mins_stubs.append(mock_motors.build_write_stub("Min_Position_Limit", motor.id, 0))
write_maxes_stubs.append(mock_motors.build_write_stub("Max_Position_Limit", motor.id, 4095))
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
motors_bus.reset_calibration()
assert all(mock_motors.stubs[stub].called for stub in write_homing_stubs)
assert all(mock_motors.stubs[stub].called for stub in write_mins_stubs)
assert all(mock_motors.stubs[stub].called for stub in write_maxes_stubs)
def test_set_half_turn_homings(mock_motors, dummy_motors):
"""
For this test, we assume that the homing offsets are already 0 such that
Present_Position == Actual_Position
"""
current_positions = {
1: 1337,
2: 42,
3: 3672,
}
expected_homings = {
1: 710, # 2047 - 1337
2: 2005, # 2047 - 42
3: -1625, # 2047 - 3672
}
read_pos_stub = mock_motors.build_sync_read_stub("Present_Position", current_positions)
write_homing_stubs = []
for id_, homing in expected_homings.items():
encoded_homing = encode_twos_complement(homing, 4)
stub = mock_motors.build_write_stub("Homing_Offset", id_, encoded_homing)
write_homing_stubs.append(stub)
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
motors_bus.reset_calibration = MagicMock()
motors_bus.set_half_turn_homings()
motors_bus.reset_calibration.assert_called_once()
assert mock_motors.stubs[read_pos_stub].called
assert all(mock_motors.stubs[stub].called for stub in write_homing_stubs)
def test_record_ranges_of_motion(mock_motors, dummy_motors):
positions = {
1: [351, 42, 1337],
2: [28, 3600, 2444],
3: [4002, 2999, 146],
}
expected_mins = {
"dummy_1": 42,
"dummy_2": 28,
"dummy_3": 146,
}
expected_maxes = {
"dummy_1": 1337,
"dummy_2": 3600,
"dummy_3": 4002,
}
read_pos_stub = mock_motors.build_sequential_sync_read_stub("Present_Position", positions)
with patch("lerobot.common.motors.motors_bus.enter_pressed", side_effect=[False, True]):
motors_bus = DynamixelMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
mins, maxes = motors_bus.record_ranges_of_motion(display_values=False)
assert mock_motors.stubs[read_pos_stub].calls == 3
assert mins == expected_mins
assert maxes == expected_maxes

View File

@ -1,12 +1,13 @@
import sys
from typing import Generator
from unittest.mock import patch
from unittest.mock import MagicMock, patch
import pytest
import scservo_sdk as scs
from lerobot.common.motors import Motor
from lerobot.common.motors.feetech import FeetechMotorsBus
from lerobot.common.motors import Motor, MotorCalibration, MotorNormMode
from lerobot.common.motors.feetech import MODEL_NUMBER, FeetechMotorsBus
from lerobot.common.utils.encoding_utils import encode_sign_magnitude
from tests.mocks.mock_feetech import MockMotors, MockPortHandler
@ -30,12 +31,29 @@ def mock_motors() -> Generator[MockMotors, None, None]:
@pytest.fixture
def dummy_motors() -> dict[str, Motor]:
return {
"dummy_1": Motor(id=1, model="sts3215"),
"dummy_2": Motor(id=2, model="sts3215"),
"dummy_3": Motor(id=3, model="sts3215"),
"dummy_1": Motor(1, "sts3215", MotorNormMode.RANGE_M100_100),
"dummy_2": Motor(2, "sts3215", MotorNormMode.RANGE_M100_100),
"dummy_3": Motor(3, "sts3215", MotorNormMode.RANGE_M100_100),
}
@pytest.fixture
def dummy_calibration(dummy_motors) -> dict[str, MotorCalibration]:
homings = [-709, -2006, 1624]
mins = [43, 27, 145]
maxes = [1335, 3608, 3999]
calibration = {}
for name, motor in dummy_motors.items():
calibration[name] = MotorCalibration(
id=motor.id,
drive_mode=0,
homing_offset=homings[motor.id - 1],
range_min=mins[motor.id - 1],
range_max=maxes[motor.id - 1],
)
return calibration
@pytest.mark.skipif(sys.platform != "darwin", reason=f"No patching needed on {sys.platform=}")
def test_autouse_patch():
"""Ensures that the autouse fixture correctly patches scs.PortHandler with MockPortHandler."""
@ -67,24 +85,24 @@ def test_autouse_patch():
"max four bytes",
],
) # fmt: skip
def test_split_int_bytes(value, n_bytes, expected):
assert FeetechMotorsBus.split_int_bytes(value, n_bytes) == expected
def test_split_int_to_bytes(value, n_bytes, expected):
assert FeetechMotorsBus._split_int_to_bytes(value, n_bytes) == expected
def test_split_int_bytes_invalid_n_bytes():
def test_split_int_to_bytes_invalid_n_bytes():
with pytest.raises(NotImplementedError):
FeetechMotorsBus.split_int_bytes(100, 3)
FeetechMotorsBus._split_int_to_bytes(100, 3)
def test_split_int_bytes_negative_numbers():
def test_split_int_to_bytes_negative_numbers():
with pytest.raises(ValueError):
neg = FeetechMotorsBus.split_int_bytes(-1, 1)
neg = FeetechMotorsBus._split_int_to_bytes(-1, 1)
print(neg)
def test_split_int_bytes_large_number():
def test_split_int_to_bytes_large_number():
with pytest.raises(ValueError):
FeetechMotorsBus.split_int_bytes(2**32, 4) # 4-byte max is 0xFFFFFFFF
FeetechMotorsBus._split_int_to_bytes(2**32, 4) # 4-byte max is 0xFFFFFFFF
def test_abc_implementation(dummy_motors):
@ -93,134 +111,186 @@ def test_abc_implementation(dummy_motors):
@pytest.mark.skip("TODO")
@pytest.mark.parametrize(
"idx, model_nb",
[
[1, 1190],
[2, 1200],
[3, 1120],
],
)
def test_ping(idx, model_nb, mock_motors, dummy_motors):
stub_name = mock_motors.build_ping_stub(idx, model_nb)
def test_scan_port(mock_motors):
expected = {
9_600: {1: 777},
57_600: {2: 777},
500_000: {237: 777},
}
expected_model_nbs = {id_: model for d in expected.values() for id_, model in d.items()}
ping_stub = mock_motors.build_broadcast_ping_stub(list(expected_model_nbs))
mobel_nb_stub = mock_motors.build_sync_read_stub("Model_Number", expected_model_nbs)
found = FeetechMotorsBus.scan_port(mock_motors.port)
assert found == expected
assert mock_motors.stubs[ping_stub].called
assert mock_motors.stubs[mobel_nb_stub].called
@pytest.mark.parametrize("id_", [1, 2, 3])
def test_ping(id_, mock_motors, dummy_motors):
expected_model_nb = MODEL_NUMBER[dummy_motors[f"dummy_{id_}"].model]
ping_stub = mock_motors.build_ping_stub(id_)
mobel_nb_stub = mock_motors.build_read_stub("Model_Number", id_, expected_model_nb)
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect()
motors_bus.connect(assert_motors_exist=False)
ping_model_nb = motors_bus.ping(idx)
ping_model_nb = motors_bus.ping(id_)
assert ping_model_nb == model_nb
assert mock_motors.stubs[stub_name].called
assert ping_model_nb == expected_model_nb
assert mock_motors.stubs[ping_stub].called
assert mock_motors.stubs[mobel_nb_stub].called
@pytest.mark.skip("TODO")
def test_broadcast_ping(mock_motors, dummy_motors):
expected_pings = {
1: [1060, 50],
2: [1120, 30],
3: [1190, 10],
}
stub_name = mock_motors.build_broadcast_ping_stub(expected_pings)
models = {m.id: m.model for m in dummy_motors.values()}
expected_model_nbs = {id_: MODEL_NUMBER[model] for id_, model in models.items()}
ping_stub = mock_motors.build_broadcast_ping_stub(list(models))
mobel_nb_stub = mock_motors.build_sync_read_stub("Model_Number", expected_model_nbs)
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect()
motors_bus.connect(assert_motors_exist=False)
ping_list = motors_bus.broadcast_ping()
ping_model_nbs = motors_bus.broadcast_ping()
assert ping_model_nbs == expected_model_nbs
assert mock_motors.stubs[ping_stub].called
assert mock_motors.stubs[mobel_nb_stub].called
def test_sync_read_none(mock_motors, dummy_motors):
expected_positions = {
"dummy_1": 1337,
"dummy_2": 42,
"dummy_3": 4016,
}
ids_values = dict(zip([1, 2, 3], expected_positions.values(), strict=True))
stub_name = mock_motors.build_sync_read_stub("Present_Position", ids_values)
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
read_positions = motors_bus.sync_read("Present_Position", normalize=False)
assert ping_list == expected_pings
assert mock_motors.stubs[stub_name].called
assert read_positions == expected_positions
@pytest.mark.parametrize(
"motors",
"id_, position",
[
None,
[1, 2, 3],
["dummy_1", "dummy_2", "dummy_3"],
[1, "dummy_2", 3],
(1, 1337),
(2, 42),
(3, 4016),
],
ids=["None", "by ids", "by names", "mixed"],
)
def test_read_all_motors(motors, mock_motors, dummy_motors):
expected_positions = {
1: 1337,
2: 42,
3: 4016,
}
def test_sync_read_by_id(id_, position, mock_motors, dummy_motors):
expected_position = {id_: position}
stub_name = mock_motors.build_sync_read_stub("Present_Position", expected_position)
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
read_position = motors_bus.sync_read("Present_Position", id_, normalize=False)
assert mock_motors.stubs[stub_name].called
assert read_position == expected_position
@pytest.mark.parametrize(
"ids, positions",
[
([1], [1337]),
([1, 2], [1337, 42]),
([1, 2, 3], [1337, 42, 4016]),
],
ids=["1 motor", "2 motors", "3 motors"],
) # fmt: skip
def test_sync_read_by_ids(ids, positions, mock_motors, dummy_motors):
assert len(ids) == len(positions)
expected_positions = dict(zip(ids, positions, strict=True))
stub_name = mock_motors.build_sync_read_stub("Present_Position", expected_positions)
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect()
motors_bus.connect(assert_motors_exist=False)
positions_read = motors_bus.read("Present_Position", motors=motors)
read_positions = motors_bus.sync_read("Present_Position", ids, normalize=False)
motors = ["dummy_1", "dummy_2", "dummy_3"] if motors is None else motors
assert mock_motors.stubs[stub_name].called
assert positions_read == dict(zip(motors, expected_positions.values(), strict=True))
assert read_positions == expected_positions
@pytest.mark.parametrize(
"idx, pos",
"id_, position",
[
[1, 1337],
[2, 42],
[3, 4016],
(1, 1337),
(2, 42),
(3, 4016),
],
)
def test_read_single_motor_by_name(idx, pos, mock_motors, dummy_motors):
expected_position = {idx: pos}
stub_name = mock_motors.build_sync_read_stub("Present_Position", expected_position)
def test_sync_read_by_name(id_, position, mock_motors, dummy_motors):
expected_position = {f"dummy_{id_}": position}
stub_name = mock_motors.build_sync_read_stub("Present_Position", {id_: position})
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect()
motors_bus.connect(assert_motors_exist=False)
pos_dict = motors_bus.read("Present_Position", f"dummy_{idx}")
read_position = motors_bus.sync_read("Present_Position", f"dummy_{id_}", normalize=False)
assert mock_motors.stubs[stub_name].called
assert pos_dict == {f"dummy_{idx}": pos}
assert read_position == expected_position
@pytest.mark.parametrize(
"idx, pos",
"ids, positions",
[
[1, 1337],
[2, 42],
[3, 4016],
([1], [1337]),
([1, 2], [1337, 42]),
([1, 2, 3], [1337, 42, 4016]),
],
)
def test_read_single_motor_by_id(idx, pos, mock_motors, dummy_motors):
expected_position = {idx: pos}
stub_name = mock_motors.build_sync_read_stub("Present_Position", expected_position)
ids=["1 motor", "2 motors", "3 motors"],
) # fmt: skip
def test_sync_read_by_names(ids, positions, mock_motors, dummy_motors):
assert len(ids) == len(positions)
names = [f"dummy_{dxl_id}" for dxl_id in ids]
expected_positions = dict(zip(names, positions, strict=True))
ids_values = dict(zip(ids, positions, strict=True))
stub_name = mock_motors.build_sync_read_stub("Present_Position", ids_values)
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect()
motors_bus.connect(assert_motors_exist=False)
pos_dict = motors_bus.read("Present_Position", idx)
read_positions = motors_bus.sync_read("Present_Position", names, normalize=False)
assert mock_motors.stubs[stub_name].called
assert pos_dict == {idx: pos}
assert read_positions == expected_positions
@pytest.mark.parametrize(
"num_retry, num_invalid_try, pos",
[
[0, 2, 1337],
[2, 3, 42],
[3, 2, 4016],
[2, 1, 999],
(0, 2, 1337),
(2, 3, 42),
(3, 2, 4016),
(2, 1, 999),
],
)
def test_read_num_retry(num_retry, num_invalid_try, pos, mock_motors, dummy_motors):
def test_sync_read_num_retry(num_retry, num_invalid_try, pos, mock_motors, dummy_motors):
expected_position = {1: pos}
stub_name = mock_motors.build_sync_read_stub(
"Present_Position", expected_position, num_invalid_try=num_invalid_try
@ -229,63 +299,286 @@ def test_read_num_retry(num_retry, num_invalid_try, pos, mock_motors, dummy_moto
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect()
motors_bus.connect(assert_motors_exist=False)
if num_retry >= num_invalid_try:
pos_dict = motors_bus.read("Present_Position", 1, num_retry=num_retry)
pos_dict = motors_bus.sync_read("Present_Position", 1, normalize=False, num_retry=num_retry)
assert pos_dict == {1: pos}
else:
with pytest.raises(ConnectionError):
_ = motors_bus.read("Present_Position", 1, num_retry=num_retry)
_ = motors_bus.sync_read("Present_Position", 1, normalize=False, num_retry=num_retry)
expected_calls = min(1 + num_retry, 1 + num_invalid_try)
assert mock_motors.stubs[stub_name].calls == expected_calls
@pytest.mark.parametrize(
"motors",
"data_name, value",
[
[1, 2, 3],
["dummy_1", "dummy_2", "dummy_3"],
[1, "dummy_2", 3],
("Torque_Enable", 0),
("Torque_Enable", 1),
("Goal_Position", 1337),
("Goal_Position", 42),
],
ids=["by ids", "by names", "mixed"],
)
def test_write_all_motors(motors, mock_motors, dummy_motors):
goal_positions = {
1: 1337,
2: 42,
3: 4016,
}
stub_name = mock_motors.build_sync_write_stub("Goal_Position", goal_positions)
def test_sync_write_single_value(data_name, value, mock_motors, dummy_motors):
ids_values = {m.id: value for m in dummy_motors.values()}
stub_name = mock_motors.build_sync_write_stub(data_name, ids_values)
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect()
motors_bus.connect(assert_motors_exist=False)
values = dict(zip(motors, goal_positions.values(), strict=True))
motors_bus.write("Goal_Position", values)
motors_bus.sync_write(data_name, value, normalize=False)
assert mock_motors.stubs[stub_name].wait_called()
@pytest.mark.parametrize(
"data_name, value",
"id_, position",
[
["Torque_Enable", 0],
["Torque_Enable", 1],
(1, 1337),
(2, 42),
(3, 4016),
],
)
def test_write_all_motors_single_value(data_name, value, mock_motors, dummy_motors):
values = {m.id: value for m in dummy_motors.values()}
stub_name = mock_motors.build_sync_write_stub(data_name, values)
def test_sync_write_by_id(id_, position, mock_motors, dummy_motors):
value = {id_: position}
stub_name = mock_motors.build_sync_write_stub("Goal_Position", value)
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect()
motors_bus.connect(assert_motors_exist=False)
motors_bus.write(data_name, value)
motors_bus.sync_write("Goal_Position", value, normalize=False)
assert mock_motors.stubs[stub_name].wait_called()
@pytest.mark.parametrize(
"ids, positions",
[
([1], [1337]),
([1, 2], [1337, 42]),
([1, 2, 3], [1337, 42, 4016]),
],
ids=["1 motor", "2 motors", "3 motors"],
) # fmt: skip
def test_sync_write_by_ids(ids, positions, mock_motors, dummy_motors):
assert len(ids) == len(positions)
values = dict(zip(ids, positions, strict=True))
stub_name = mock_motors.build_sync_write_stub("Goal_Position", values)
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
motors_bus.sync_write("Goal_Position", values, normalize=False)
assert mock_motors.stubs[stub_name].wait_called()
@pytest.mark.parametrize(
"id_, position",
[
(1, 1337),
(2, 42),
(3, 4016),
],
)
def test_sync_write_by_name(id_, position, mock_motors, dummy_motors):
id_value = {id_: position}
stub_name = mock_motors.build_sync_write_stub("Goal_Position", id_value)
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
write_value = {f"dummy_{id_}": position}
motors_bus.sync_write("Goal_Position", write_value, normalize=False)
assert mock_motors.stubs[stub_name].wait_called()
@pytest.mark.parametrize(
"ids, positions",
[
([1], [1337]),
([1, 2], [1337, 42]),
([1, 2, 3], [1337, 42, 4016]),
],
ids=["1 motor", "2 motors", "3 motors"],
) # fmt: skip
def test_sync_write_by_names(ids, positions, mock_motors, dummy_motors):
assert len(ids) == len(positions)
ids_values = dict(zip(ids, positions, strict=True))
stub_name = mock_motors.build_sync_write_stub("Goal_Position", ids_values)
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
write_values = {f"dummy_{id_}": pos for id_, pos in ids_values.items()}
motors_bus.sync_write("Goal_Position", write_values, normalize=False)
assert mock_motors.stubs[stub_name].wait_called()
@pytest.mark.parametrize(
"data_name, dxl_id, value",
[
("Torque_Enable", 1, 0),
("Torque_Enable", 1, 1),
("Goal_Position", 2, 1337),
("Goal_Position", 3, 42),
],
)
def test_write_by_id(data_name, dxl_id, value, mock_motors, dummy_motors):
stub_name = mock_motors.build_write_stub(data_name, dxl_id, value)
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
motors_bus.write(data_name, dxl_id, value, normalize=False)
assert mock_motors.stubs[stub_name].called
@pytest.mark.parametrize(
"data_name, dxl_id, value",
[
("Torque_Enable", 1, 0),
("Torque_Enable", 1, 1),
("Goal_Position", 2, 1337),
("Goal_Position", 3, 42),
],
)
def test_write_by_name(data_name, dxl_id, value, mock_motors, dummy_motors):
stub_name = mock_motors.build_write_stub(data_name, dxl_id, value)
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
motors_bus.write(data_name, f"dummy_{dxl_id}", value, normalize=False)
assert mock_motors.stubs[stub_name].called
def test_is_calibrated(mock_motors, dummy_motors, dummy_calibration):
encoded_homings = {m.id: encode_sign_magnitude(m.homing_offset, 11) for m in dummy_calibration.values()}
mins = {m.id: m.range_min for m in dummy_calibration.values()}
maxes = {m.id: m.range_max for m in dummy_calibration.values()}
offsets_stub = mock_motors.build_sync_read_stub("Homing_Offset", encoded_homings)
mins_stub = mock_motors.build_sync_read_stub("Min_Position_Limit", mins)
maxes_stub = mock_motors.build_sync_read_stub("Max_Position_Limit", maxes)
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
calibration=dummy_calibration,
)
motors_bus.connect(assert_motors_exist=False)
is_calibrated = motors_bus.is_calibrated
assert is_calibrated
assert mock_motors.stubs[offsets_stub].called
assert mock_motors.stubs[mins_stub].called
assert mock_motors.stubs[maxes_stub].called
def test_reset_calibration(mock_motors, dummy_motors):
write_homing_stubs = []
write_mins_stubs = []
write_maxes_stubs = []
for motor in dummy_motors.values():
write_homing_stubs.append(mock_motors.build_write_stub("Homing_Offset", motor.id, 0))
write_mins_stubs.append(mock_motors.build_write_stub("Min_Position_Limit", motor.id, 0))
write_maxes_stubs.append(mock_motors.build_write_stub("Max_Position_Limit", motor.id, 4095))
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
motors_bus.reset_calibration()
assert all(mock_motors.stubs[stub].called for stub in write_homing_stubs)
assert all(mock_motors.stubs[stub].called for stub in write_mins_stubs)
assert all(mock_motors.stubs[stub].called for stub in write_maxes_stubs)
def test_set_half_turn_homings(mock_motors, dummy_motors):
"""
For this test, we assume that the homing offsets are already 0 such that
Present_Position == Actual_Position
"""
current_positions = {
1: 1337,
2: 42,
3: 3672,
}
expected_homings = {
1: -710, # 1337 - 2047
2: -2005, # 42 - 2047
3: 1625, # 3672 - 2047
}
read_pos_stub = mock_motors.build_sync_read_stub("Present_Position", current_positions)
write_homing_stubs = []
for id_, homing in expected_homings.items():
encoded_homing = encode_sign_magnitude(homing, 11)
stub = mock_motors.build_write_stub("Homing_Offset", id_, encoded_homing)
write_homing_stubs.append(stub)
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
motors_bus.reset_calibration = MagicMock()
motors_bus.set_half_turn_homings()
motors_bus.reset_calibration.assert_called_once()
assert mock_motors.stubs[read_pos_stub].called
assert all(mock_motors.stubs[stub].called for stub in write_homing_stubs)
def test_record_ranges_of_motion(mock_motors, dummy_motors):
positions = {
1: [351, 42, 1337],
2: [28, 3600, 2444],
3: [4002, 2999, 146],
}
expected_mins = {
"dummy_1": 42,
"dummy_2": 28,
"dummy_3": 146,
}
expected_maxes = {
"dummy_1": 1337,
"dummy_2": 3600,
"dummy_3": 4002,
}
read_pos_stub = mock_motors.build_sequential_sync_read_stub("Present_Position", positions)
with patch("lerobot.common.motors.motors_bus.enter_pressed", side_effect=[False, True]):
motors_bus = FeetechMotorsBus(
port=mock_motors.port,
motors=dummy_motors,
)
motors_bus.connect(assert_motors_exist=False)
mins, maxes = motors_bus.record_ranges_of_motion(display_values=False)
assert mock_motors.stubs[read_pos_stub].calls == 3
assert mins == expected_mins
assert maxes == expected_maxes

Some files were not shown because too many files have changed in this diff Show More