From 376d75f8d3b9d4fdc1e101bc603c26c148a7378f Mon Sep 17 00:00:00 2001 From: Simon Alibert Date: Tue, 16 Apr 2024 10:35:43 +0200 Subject: [PATCH] Add env info --- lerobot/commands/env.py | 43 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 lerobot/commands/env.py diff --git a/lerobot/commands/env.py b/lerobot/commands/env.py new file mode 100644 index 00000000..1a7e9508 --- /dev/null +++ b/lerobot/commands/env.py @@ -0,0 +1,43 @@ +import platform + +import huggingface_hub + +# import dataset +import numpy as np +import torch + +from lerobot import __version__ as version + +pt_version = torch.__version__ +pt_cuda_available = torch.cuda.is_available() +pt_cuda_available = torch.cuda.is_available() +cuda_version = torch._C._cuda_getCompiledVersion() if torch.version.cuda is not None else "N/A" + + +# TODO(aliberts): refactor into an actual command `lerobot env` +def get_env_info() -> dict: + """Run this to get basic system info to help for tracking issues & bugs.""" + info = { + "`lerobot` version": version, + "Platform": platform.platform(), + "Python version": platform.python_version(), + "Huggingface_hub version": huggingface_hub.__version__, + # TODO(aliberts): Add dataset when https://github.com/huggingface/lerobot/pull/73 is merged + # "Dataset version": dataset.__version__, + "Numpy version": np.__version__, + "PyTorch version (GPU?)": f"{pt_version} ({pt_cuda_available})", + "Cuda version": cuda_version, + "Using GPU in script?": "", + "Using distributed or parallel set-up in script?": "", + } + print("\nCopy-and-paste the text below in your GitHub issue and FILL OUT the two last points.\n") + print(format_dict(info)) + return info + + +def format_dict(d: dict) -> str: + return "\n".join([f"- {prop}: {val}" for prop, val in d.items()]) + "\n" + + +if __name__ == "__main__": + get_env_info()