chore(utils): remove unused utils legacy functions + rename init_rerun (#2031)

This commit is contained in:
Steven Palma
2025-09-24 17:10:27 +02:00
committed by GitHub
parent ec63225dc1
commit 853cc70194
16 changed files with 28 additions and 85 deletions

View File

@@ -122,7 +122,7 @@ from lerobot.utils.utils import (
init_logging,
log_say,
)
from lerobot.utils.visualization_utils import _init_rerun, log_rerun_data
from lerobot.utils.visualization_utils import init_rerun, log_rerun_data
@dataclass
@@ -378,7 +378,7 @@ def record(cfg: RecordConfig) -> LeRobotDataset:
init_logging()
logging.info(pformat(asdict(cfg)))
if cfg.display_data:
_init_rerun(session_name="recording")
init_rerun(session_name="recording")
robot = make_robot_from_config(cfg.robot)
teleop = make_teleoperator_from_config(cfg.teleop) if cfg.teleop is not None else None

View File

@@ -90,7 +90,7 @@ from lerobot.teleoperators import ( # noqa: F401
)
from lerobot.utils.robot_utils import busy_wait
from lerobot.utils.utils import init_logging, move_cursor_up
from lerobot.utils.visualization_utils import _init_rerun, log_rerun_data
from lerobot.utils.visualization_utils import init_rerun, log_rerun_data
@dataclass
@@ -185,7 +185,7 @@ def teleoperate(cfg: TeleoperateConfig):
init_logging()
logging.info(pformat(asdict(cfg)))
if cfg.display_data:
_init_rerun(session_name="teleoperation")
init_rerun(session_name="teleoperation")
teleop = make_teleoperator_from_config(cfg.teleop)
robot = make_robot_from_config(cfg.robot)

View File

@@ -27,17 +27,3 @@ def busy_wait(seconds):
# On Linux time.sleep is accurate
if seconds > 0:
time.sleep(seconds)
def safe_disconnect(func):
# TODO(aliberts): Allow to pass custom exceptions
# (e.g. ThreadServiceExit, KeyboardInterrupt, SystemExit, UnpluggedError, DynamixelCommError)
def wrapper(robot, *args, **kwargs):
try:
return func(robot, *args, **kwargs)
except Exception as e:
if robot.is_connected:
robot.disconnect()
raise e
return wrapper

View File

@@ -13,10 +13,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from pathlib import Path
from termcolor import colored
from torch.optim import Optimizer
from torch.optim.lr_scheduler import LRScheduler
@@ -36,10 +34,6 @@ from lerobot.utils.constants import (
from lerobot.utils.random_utils import load_rng_state, save_rng_state
def log_output_dir(out_dir):
logging.info(colored("Output dir:", "yellow", attrs=["bold"]) + f" {out_dir}")
def get_step_identifier(step: int, total_steps: int) -> str:
num_digits = max(6, len(str(total_steps)))
return f"{step:0{num_digits}d}"

View File

@@ -15,14 +15,13 @@
# limitations under the License.
import logging
import os
import os.path as osp
import platform
import select
import subprocess
import sys
import time
from copy import copy, deepcopy
from datetime import datetime, timezone
from datetime import datetime
from pathlib import Path
from statistics import mean
@@ -30,12 +29,6 @@ import numpy as np
import torch
def none_or_int(value):
if value == "None":
return None
return int(value)
def inside_slurm():
"""Check whether the python process was launched through slurm"""
# TODO(rcadene): return False for interactive mode `--pty bash`
@@ -165,36 +158,6 @@ def format_big_number(num, precision=0):
return num
def _relative_path_between(path1: Path, path2: Path) -> Path:
"""Returns path1 relative to path2."""
path1 = path1.absolute()
path2 = path2.absolute()
try:
return path1.relative_to(path2)
except ValueError: # most likely because path1 is not a subpath of path2
common_parts = Path(osp.commonpath([path1, path2])).parts
return Path(
"/".join([".."] * (len(path2.parts) - len(common_parts)) + list(path1.parts[len(common_parts) :]))
)
def print_cuda_memory_usage():
"""Use this function to locate and debug memory leak."""
import gc
gc.collect()
# Also clear the cache if you want to fully release the memory
torch.cuda.empty_cache()
print(f"Current GPU Memory Allocated: {torch.cuda.memory_allocated(0) / 1024**2:.2f} MB")
print(f"Maximum GPU Memory Allocated: {torch.cuda.max_memory_allocated(0) / 1024**2:.2f} MB")
print(f"Current GPU Memory Reserved: {torch.cuda.memory_reserved(0) / 1024**2:.2f} MB")
print(f"Maximum GPU Memory Reserved: {torch.cuda.max_memory_reserved(0) / 1024**2:.2f} MB")
def capture_timestamp_utc():
return datetime.now(timezone.utc)
def say(text: str, blocking: bool = False):
system = platform.system()

View File

@@ -20,7 +20,7 @@ import numpy as np
import rerun as rr
def _init_rerun(session_name: str = "lerobot_control_loop") -> None:
def init_rerun(session_name: str = "lerobot_control_loop") -> None:
"""Initializes the Rerun SDK for visualizing the control loop."""
batch_size = os.getenv("RERUN_FLUSH_NUM_BYTES", "8000")
os.environ["RERUN_FLUSH_NUM_BYTES"] = batch_size