Merge branch 'main' into user/mshukor/2025_03_24_pi0fast

This commit is contained in:
Steven Palma 2025-04-04 10:32:57 +02:00 committed by GitHub
commit ffab2a29b1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 29 additions and 63 deletions

View File

@ -98,14 +98,18 @@ conda create -y -n lerobot python=3.10
conda activate lerobot conda activate lerobot
``` ```
Install 🤗 LeRobot: When using `miniconda`, if you don't have `ffmpeg` in your environment:
```bash ```bash
pip install -e . conda install ffmpeg
``` ```
> **NOTE:** Depending on your platform, If you encounter any build errors during this step Install 🤗 LeRobot:
you may need to install `cmake` and `build-essential` for building some of our dependencies. ```bash
On linux: `sudo apt-get install cmake build-essential` pip install --no-binary=av -e .
```
> **NOTE:** If you encounter build errors, you may need to install additional dependencies (`cmake`, `build-essential`, and `ffmpeg libs`). On Linux, run:
`sudo apt-get install cmake build-essential python-dev pkg-config libavformat-dev libavcodec-dev libavdevice-dev libavutil-dev libswscale-dev libswresample-dev libavfilter-dev pkg-config`. For other systems, see: [Compiling PyAV](https://pyav.org/docs/develop/overview/installation.html#bring-your-own-ffmpeg)
For simulations, 🤗 LeRobot comes with gymnasium environments that can be installed as extras: For simulations, 🤗 LeRobot comes with gymnasium environments that can be installed as extras:
- [aloha](https://github.com/huggingface/gym-aloha) - [aloha](https://github.com/huggingface/gym-aloha)
@ -114,7 +118,7 @@ For simulations, 🤗 LeRobot comes with gymnasium environments that can be inst
For instance, to install 🤗 LeRobot with aloha and pusht, use: For instance, to install 🤗 LeRobot with aloha and pusht, use:
```bash ```bash
pip install -e ".[aloha, pusht]" pip install --no-binary=av -e ".[aloha, pusht]"
``` ```
To use [Weights and Biases](https://docs.wandb.ai/quickstart) for experiment tracking, log in with To use [Weights and Biases](https://docs.wandb.ai/quickstart) for experiment tracking, log in with

View File

@ -59,15 +59,9 @@ git clone https://github.com/huggingface/lerobot.git ~/lerobot
#### 5. Install LeRobot with dependencies for the feetech motors: #### 5. Install LeRobot with dependencies for the feetech motors:
```bash ```bash
cd ~/lerobot && pip install -e ".[feetech]" cd ~/lerobot && pip install --no-binary=av -e ".[feetech]"
``` ```
*EXTRA: For Linux only (not Mac)*: install extra dependencies for recording datasets:
```bash
conda install -y -c conda-forge ffmpeg
pip uninstall -y opencv-python
conda install -y -c conda-forge "opencv>=4.10.0"
```
Great :hugs:! You are now done installing LeRobot and we can begin assembling the SO100 arms :robot:. Great :hugs:! You are now done installing LeRobot and we can begin assembling the SO100 arms :robot:.
Every time you now want to use LeRobot you can go to the `~/lerobot` folder where we installed LeRobot and run one of the commands. Every time you now want to use LeRobot you can go to the `~/lerobot` folder where we installed LeRobot and run one of the commands.

View File

@ -69,7 +69,7 @@ git clone https://github.com/huggingface/lerobot.git ~/lerobot
#### 5. Install LeRobot with dependencies for the feetech motors: #### 5. Install LeRobot with dependencies for the feetech motors:
```bash ```bash
cd ~/lerobot && pip install -e ".[feetech]" cd ~/lerobot && pip install --no-binary=av -e ".[feetech]"
``` ```
## C. Install LeRobot on laptop ## C. Install LeRobot on laptop
@ -110,15 +110,9 @@ git clone https://github.com/huggingface/lerobot.git ~/lerobot
#### 5. Install LeRobot with dependencies for the feetech motors: #### 5. Install LeRobot with dependencies for the feetech motors:
```bash ```bash
cd ~/lerobot && pip install -e ".[feetech]" cd ~/lerobot && pip install --no-binary=av -e ".[feetech]"
``` ```
*EXTRA: For Linux only (not Mac)*: install extra dependencies for recording datasets:
```bash
conda install -y -c conda-forge ffmpeg
pip uninstall -y opencv-python
conda install -y -c conda-forge "opencv>=4.10.0"
```
Great :hugs:! You are now done installing LeRobot and we can begin assembling the SO100 arms and Mobile base :robot:. Great :hugs:! You are now done installing LeRobot and we can begin assembling the SO100 arms and Mobile base :robot:.
Every time you now want to use LeRobot you can go to the `~/lerobot` folder where we installed LeRobot and run one of the commands. Every time you now want to use LeRobot you can go to the `~/lerobot` folder where we installed LeRobot and run one of the commands.
@ -399,6 +393,10 @@ python lerobot/scripts/control_robot.py \
``` ```
# F. Teleoperate # F. Teleoperate
> [!TIP]
> If you're using a Mac, you might need to give Terminal permission to access your keyboard. Go to System Preferences > Security & Privacy > Input Monitoring and check the box for Terminal.
To teleoperate SSH into your Raspberry Pi, and run `conda activate lerobot` and this script: To teleoperate SSH into your Raspberry Pi, and run `conda activate lerobot` and this script:
```bash ```bash
python lerobot/scripts/control_robot.py \ python lerobot/scripts/control_robot.py \

View File

@ -33,14 +33,7 @@ git clone https://github.com/huggingface/lerobot.git ~/lerobot
5. Install LeRobot with dependencies for the feetech motors: 5. Install LeRobot with dependencies for the feetech motors:
```bash ```bash
cd ~/lerobot && pip install -e ".[feetech]" cd ~/lerobot && pip install --no-binary=av -e ".[feetech]"
```
For Linux only (not Mac), install extra dependencies for recording datasets:
```bash
conda install -y -c conda-forge ffmpeg
pip uninstall -y opencv-python
conda install -y -c conda-forge "opencv>=4.10.0"
``` ```
## Configure the motors ## Configure the motors

View File

@ -18,7 +18,7 @@ training outputs directory. In the latter case, you might want to run examples/3
It requires the installation of the 'gym_pusht' simulation environment. Install it by running: It requires the installation of the 'gym_pusht' simulation environment. Install it by running:
```bash ```bash
pip install -e ".[pusht]"` pip install --no-binary=av -e ".[pusht]"`
``` ```
""" """

View File

@ -33,7 +33,7 @@ First, install the additional dependencies required for robots built with dynami
Using `pip`: Using `pip`:
```bash ```bash
pip install -e ".[dynamixel]" pip install --no-binary=av -e ".[dynamixel]"
``` ```
Using `poetry`: Using `poetry`:
@ -46,13 +46,6 @@ Using `uv`:
uv sync --extra "dynamixel" uv sync --extra "dynamixel"
``` ```
/!\ For Linux only, ffmpeg and opencv requires conda install for now. Run this exact sequence of commands:
```bash
conda install -c conda-forge ffmpeg
pip uninstall opencv-python
conda install -c conda-forge "opencv>=4.10.0"
```
You are now ready to plug the 5V power supply to the motor bus of the leader arm (the smaller one) since all its motors only require 5V. You are now ready to plug the 5V power supply to the motor bus of the leader arm (the smaller one) since all its motors only require 5V.
Then plug the 12V power supply to the motor bus of the follower arm. It has two motors that need 12V, and the rest will be powered with 5V through the voltage convertor. Then plug the 12V power supply to the motor bus of the follower arm. It has two motors that need 12V, and the rest will be powered with 5V through the voltage convertor.
@ -834,11 +827,6 @@ It contains:
- `dtRphone:33.84 (29.5hz)` which is the delta time of capturing an image from the phone camera in the thread running asynchronously. - `dtRphone:33.84 (29.5hz)` which is the delta time of capturing an image from the phone camera in the thread running asynchronously.
Troubleshooting: Troubleshooting:
- On Linux, if you encounter a hanging issue when using cameras, uninstall opencv and re-install it with conda:
```bash
pip uninstall opencv-python
conda install -c conda-forge opencv=4.10.0
```
- On Linux, if you encounter any issue during video encoding with `ffmpeg: unknown encoder libsvtav1`, you can: - On Linux, if you encounter any issue during video encoding with `ffmpeg: unknown encoder libsvtav1`, you can:
- install with conda-forge by running `conda install -c conda-forge ffmpeg` (it should be compiled with `libsvtav1`), - install with conda-forge by running `conda install -c conda-forge ffmpeg` (it should be compiled with `libsvtav1`),
- or, install [Homebrew](https://brew.sh) and run `brew install ffmpeg` (it should be compiled with `libsvtav1`), - or, install [Homebrew](https://brew.sh) and run `brew install ffmpeg` (it should be compiled with `libsvtav1`),

View File

@ -45,18 +45,11 @@ git clone https://github.com/huggingface/lerobot.git ~/lerobot
6. Install LeRobot with stretch dependencies: 6. Install LeRobot with stretch dependencies:
```bash ```bash
cd ~/lerobot && pip install -e ".[stretch]" cd ~/lerobot && pip install --no-binary=av -e ".[stretch]"
``` ```
> **Note:** If you get this message, you can ignore it: `ERROR: pip's dependency resolver does not currently take into account all the packages that are installed.` > **Note:** If you get this message, you can ignore it: `ERROR: pip's dependency resolver does not currently take into account all the packages that are installed.`
For Linux only (not Mac), install extra dependencies for recording datasets:
```bash
conda install -y -c conda-forge ffmpeg
pip uninstall -y opencv-python
conda install -y -c conda-forge "opencv>=4.10.0"
```
7. Run a [system check](https://docs.hello-robot.com/0.3/getting_started/stretch_hardware_overview/#system-check) to make sure your robot is ready: 7. Run a [system check](https://docs.hello-robot.com/0.3/getting_started/stretch_hardware_overview/#system-check) to make sure your robot is ready:
```bash ```bash
stretch_system_check.py stretch_system_check.py

View File

@ -32,14 +32,7 @@ git clone https://github.com/huggingface/lerobot.git ~/lerobot
5. Install LeRobot with dependencies for the Aloha motors (dynamixel) and cameras (intelrealsense): 5. Install LeRobot with dependencies for the Aloha motors (dynamixel) and cameras (intelrealsense):
```bash ```bash
cd ~/lerobot && pip install -e ".[dynamixel, intelrealsense]" cd ~/lerobot && pip install --no-binary=av -e ".[dynamixel, intelrealsense]"
```
For Linux only (not Mac), install extra dependencies for recording datasets:
```bash
conda install -y -c conda-forge ffmpeg
pip uninstall -y opencv-python
conda install -y -c conda-forge "opencv>=4.10.0"
``` ```
## Teleoperate ## Teleoperate

View File

@ -257,6 +257,7 @@ def encode_video_frames(
) -> None: ) -> None:
"""More info on ffmpeg arguments tuning on `benchmark/video/README.md`""" """More info on ffmpeg arguments tuning on `benchmark/video/README.md`"""
video_path = Path(video_path) video_path = Path(video_path)
imgs_dir = Path(imgs_dir)
video_path.parent.mkdir(parents=True, exist_ok=True) video_path.parent.mkdir(parents=True, exist_ok=True)
ffmpeg_args = OrderedDict( ffmpeg_args = OrderedDict(

View File

@ -24,7 +24,7 @@ Designed by Physical Intelligence. Ported from Jax by Hugging Face.
Install pi0 extra dependencies: Install pi0 extra dependencies:
```bash ```bash
pip install -e ".[pi0]" pip install --no-binary=av -e ".[pi0]"
``` ```
Example of finetuning the pi0 pretrained model (`pi0_base` in `openpi`): Example of finetuning the pi0 pretrained model (`pi0_base` in `openpi`):
@ -313,7 +313,7 @@ class PI0Policy(PreTrainedPolicy):
state = self.prepare_state(batch) state = self.prepare_state(batch)
lang_tokens, lang_masks = self.prepare_language(batch) lang_tokens, lang_masks = self.prepare_language(batch)
actions = self.prepare_action(batch) actions = self.prepare_action(batch)
actions_is_pad = batch.get("actions_is_pad") actions_is_pad = batch.get("action_is_pad")
loss_dict = {} loss_dict = {}
losses = self.model.forward(images, img_masks, lang_tokens, lang_masks, state, actions, noise, time) losses = self.model.forward(images, img_masks, lang_tokens, lang_masks, state, actions, noise, time)

View File

@ -90,6 +90,7 @@ class WandBLogger:
# TODO(rcadene): split train and eval, and run async eval with job_type="eval" # TODO(rcadene): split train and eval, and run async eval with job_type="eval"
job_type="train_eval", job_type="train_eval",
resume="must" if cfg.resume else None, resume="must" if cfg.resume else None,
mode=self.cfg.mode if self.cfg.mode in ["online", "offline", "disabled"] else "online",
) )
print(colored("Logs will be synced with wandb.", "blue", attrs=["bold"])) print(colored("Logs will be synced with wandb.", "blue", attrs=["bold"]))
logging.info(f"Track this run --> {colored(wandb.run.get_url(), 'yellow', attrs=['bold'])}") logging.info(f"Track this run --> {colored(wandb.run.get_url(), 'yellow', attrs=['bold'])}")

View File

@ -48,6 +48,7 @@ class WandBConfig:
entity: str | None = None entity: str | None = None
notes: str | None = None notes: str | None = None
run_id: str | None = None run_id: str | None = None
mode: str | None = None # Allowed values: 'online', 'offline' 'disabled'. Defaults to 'online'
@dataclass @dataclass

View File

@ -62,14 +62,14 @@ dependencies = [
"omegaconf>=2.3.0", "omegaconf>=2.3.0",
"opencv-python>=4.9.0", "opencv-python>=4.9.0",
"packaging>=24.2", "packaging>=24.2",
"av>=12.0.5", "av>=12.0.5,<13.0.0",
"pymunk>=6.6.0", "pymunk>=6.6.0",
"pynput>=1.7.7", "pynput>=1.7.7",
"pyzmq>=26.2.1", "pyzmq>=26.2.1",
"rerun-sdk>=0.21.0", "rerun-sdk>=0.21.0",
"termcolor>=2.4.0", "termcolor>=2.4.0",
"torch>=2.2.1", "torch>=2.2.1",
"torchcodec>=0.2.1; sys_platform != 'win32' and (sys_platform != 'linux' or (platform_machine != 'aarch64' and platform_machine != 'arm64' and platform_machine != 'armv7l'))", "torchcodec>=0.2.1; sys_platform != 'win32' and (sys_platform != 'linux' or (platform_machine != 'aarch64' and platform_machine != 'arm64' and platform_machine != 'armv7l')) and (sys_platform != 'darwin' or platform_machine != 'x86_64')",
"torchvision>=0.21.0", "torchvision>=0.21.0",
"wandb>=0.16.3", "wandb>=0.16.3",
"zarr>=2.17.0", "zarr>=2.17.0",