feat for the GPU poors : Add GPU availability check in evaluate_pretr… (#359)

Co-authored-by: Alexander Soare <alexander.soare159@gmail.com>
This commit is contained in:
Julien Perez
2024-08-13 17:03:05 +02:00
committed by GitHub
parent 03d647269e
commit fab037f78d

View File

@@ -18,7 +18,15 @@ from lerobot.common.policies.diffusion.modeling_diffusion import DiffusionPolicy
output_directory = Path("outputs/eval/example_pusht_diffusion")
output_directory.mkdir(parents=True, exist_ok=True)
device = torch.device("cuda")
# Check if GPU is available
if torch.cuda.is_available():
device = torch.device("cuda")
print("GPU is available. Device set to:", device)
else:
device = torch.device("cpu")
print(f"GPU is not available. Device set to: {device}. Inference will be slower than on GPU.")
# Decrease the number of reverse-diffusion steps (trades off a bit of quality for 10x speed)
policy.diffusion.num_inference_steps = 10
# Download the diffusion policy for pusht environment
pretrained_policy_path = Path(snapshot_download("lerobot/diffusion_pusht"))