chore: enable simplify in ruff lint (#2085)

This commit is contained in:
Steven Palma
2025-09-29 15:06:56 +02:00
committed by GitHub
parent c378a325f0
commit bbcf66bd82
13 changed files with 32 additions and 43 deletions

View File

@@ -437,7 +437,9 @@ def concatenate_video_files(
tmp_concatenate_path, mode="r", format="concat", options={"safe": "0"}
) # safe = 0 allows absolute paths as well as relative paths
tmp_output_video_path = tempfile.NamedTemporaryFile(suffix=".mp4", delete=False).name
with tempfile.NamedTemporaryFile(suffix=".mp4", delete=False) as tmp_named_file:
tmp_output_video_path = tmp_named_file.name
output_container = av.open(
tmp_output_video_path, mode="w", options={"movflags": "faststart"}
) # faststart is to move the metadata to the beginning of the file to speed up loading

View File

@@ -398,10 +398,7 @@ class ACT(nn.Module):
"actions must be provided when using the variational objective in training mode."
)
if OBS_IMAGES in batch:
batch_size = batch[OBS_IMAGES][0].shape[0]
else:
batch_size = batch[OBS_ENV_STATE].shape[0]
batch_size = batch[OBS_IMAGES][0].shape[0] if OBS_IMAGES in batch else batch[OBS_ENV_STATE].shape[0]
# Prepare the latent for input to the transformer encoder.
if self.config.use_vae and ACTION in batch and self.training:

View File

@@ -340,7 +340,7 @@ class GripperPenaltyProcessorStep(ComplementaryDataProcessorStep):
"""
action = self.transition.get(TransitionKey.ACTION)
raw_joint_positions = complementary_data.get("raw_joint_positions", None)
raw_joint_positions = complementary_data.get("raw_joint_positions")
if raw_joint_positions is None:
return complementary_data

View File

@@ -119,13 +119,12 @@ class _NormalizationMixin:
)
self.features = reconstructed
if self.norm_map:
# if keys are strings (JSON), rebuild enum map
if all(isinstance(k, str) for k in self.norm_map.keys()):
reconstructed = {}
for ft_type_str, norm_mode_str in self.norm_map.items():
reconstructed[FeatureType(ft_type_str)] = NormalizationMode(norm_mode_str)
self.norm_map = reconstructed
# if keys are strings (JSON), rebuild enum map
if self.norm_map and all(isinstance(k, str) for k in self.norm_map):
reconstructed = {}
for ft_type_str, norm_mode_str in self.norm_map.items():
reconstructed[FeatureType(ft_type_str)] = NormalizationMode(norm_mode_str)
self.norm_map = reconstructed
# Convert stats to tensors and move to the target device once during initialization.
self.stats = self.stats or {}

View File

@@ -152,7 +152,7 @@ class VanillaObservationProcessorStep(ObservationProcessorStep):
"""
# Build a new features mapping keyed by the same FeatureType buckets
# We assume callers already placed features in the correct FeatureType.
new_features: dict[PipelineFeatureType, dict[str, PolicyFeature]] = {ft: {} for ft in features.keys()}
new_features: dict[PipelineFeatureType, dict[str, PolicyFeature]] = {ft: {} for ft in features}
exact_pairs = {
"pixels": OBS_IMAGE,

View File

@@ -32,11 +32,8 @@ def init_rerun(session_name: str = "lerobot_control_loop") -> None:
def _is_scalar(x):
return (
isinstance(x, float)
or isinstance(x, numbers.Real)
or isinstance(x, (np.integer | np.floating))
or (isinstance(x, np.ndarray) and x.ndim == 0)
return isinstance(x, (float | numbers.Real | np.integer | np.floating)) or (
isinstance(x, np.ndarray) and x.ndim == 0
)