forked from tangger/lerobot
chore: enable simplify in ruff lint (#2085)
This commit is contained in:
@@ -85,7 +85,7 @@ def policy_feature_factory():
|
||||
|
||||
def assert_contract_is_typed(features: dict[PipelineFeatureType, dict[str, PolicyFeature]]) -> None:
|
||||
assert isinstance(features, dict)
|
||||
assert all(isinstance(k, PipelineFeatureType) for k in features.keys())
|
||||
assert all(isinstance(k, PipelineFeatureType) for k in features)
|
||||
assert all(isinstance(v, dict) for v in features.values())
|
||||
assert all(all(isinstance(nk, str) for nk in v.keys()) for v in features.values())
|
||||
assert all(all(isinstance(nk, str) for nk in v) for v in features.values())
|
||||
assert all(all(isinstance(nv, PolicyFeature) for nv in v.values()) for v in features.values())
|
||||
|
||||
@@ -949,7 +949,7 @@ def test_statistics_metadata_validation(tmp_path, empty_lerobot_dataset_factory)
|
||||
# Check that statistics exist for all features
|
||||
assert loaded_dataset.meta.stats is not None, "No statistics found"
|
||||
|
||||
for feature_name in features.keys():
|
||||
for feature_name in features:
|
||||
assert feature_name in loaded_dataset.meta.stats, f"No statistics for feature '{feature_name}'"
|
||||
|
||||
feature_stats = loaded_dataset.meta.stats[feature_name]
|
||||
|
||||
@@ -246,7 +246,7 @@ def test_step_through():
|
||||
# Ensure all results are dicts (same format as input)
|
||||
for result in results:
|
||||
assert isinstance(result, dict)
|
||||
assert all(isinstance(k, TransitionKey) for k in result.keys())
|
||||
assert all(isinstance(k, TransitionKey) for k in result)
|
||||
|
||||
|
||||
def test_step_through_with_dict():
|
||||
@@ -1623,9 +1623,7 @@ def test_override_with_callables():
|
||||
|
||||
# Define a transform function
|
||||
def double_values(x):
|
||||
if isinstance(x, (int | float)):
|
||||
return x * 2
|
||||
elif isinstance(x, torch.Tensor):
|
||||
if isinstance(x, (int | float | torch.Tensor)):
|
||||
return x * 2
|
||||
return x
|
||||
|
||||
@@ -1797,10 +1795,9 @@ def test_from_pretrained_nonexistent_path():
|
||||
)
|
||||
|
||||
# Test with a local directory that exists but has no config files
|
||||
with tempfile.TemporaryDirectory() as tmp_dir:
|
||||
with tempfile.TemporaryDirectory() as tmp_dir, pytest.raises(FileNotFoundError):
|
||||
# Since the directory exists but has no config, it will raise FileNotFoundError
|
||||
with pytest.raises(FileNotFoundError):
|
||||
DataProcessorPipeline.from_pretrained(tmp_dir, config_filename="processor.json")
|
||||
DataProcessorPipeline.from_pretrained(tmp_dir, config_filename="processor.json")
|
||||
|
||||
|
||||
def test_save_load_with_custom_converter_functions():
|
||||
|
||||
@@ -32,10 +32,7 @@ class MockTokenizer:
|
||||
**kwargs,
|
||||
) -> dict[str, torch.Tensor]:
|
||||
"""Mock tokenization that returns deterministic tokens based on text."""
|
||||
if isinstance(text, str):
|
||||
texts = [text]
|
||||
else:
|
||||
texts = text
|
||||
texts = [text] if isinstance(text, str) else text
|
||||
|
||||
batch_size = len(texts)
|
||||
|
||||
|
||||
@@ -245,14 +245,14 @@ def test_get_observation(reachy2):
|
||||
obs = reachy2.get_observation()
|
||||
|
||||
expected_keys = set(reachy2.joints_dict)
|
||||
expected_keys.update(f"{v}" for v in REACHY2_VEL.keys() if reachy2.config.with_mobile_base)
|
||||
expected_keys.update(f"{v}" for v in REACHY2_VEL if reachy2.config.with_mobile_base)
|
||||
expected_keys.update(reachy2.cameras.keys())
|
||||
assert set(obs.keys()) == expected_keys
|
||||
|
||||
for motor in reachy2.joints_dict.keys():
|
||||
for motor in reachy2.joints_dict:
|
||||
assert obs[motor] == reachy2.reachy.joints[REACHY2_JOINTS[motor]].present_position
|
||||
if reachy2.config.with_mobile_base:
|
||||
for vel in REACHY2_VEL.keys():
|
||||
for vel in REACHY2_VEL:
|
||||
assert obs[vel] == reachy2.reachy.mobile_base.odometry[REACHY2_VEL[vel]]
|
||||
if reachy2.config.with_left_teleop_camera:
|
||||
assert obs["teleop_left"].shape == (
|
||||
@@ -282,7 +282,7 @@ def test_send_action(reachy2):
|
||||
action.update({k: i * 0.1 for i, k in enumerate(REACHY2_VEL.keys(), start=1)})
|
||||
|
||||
previous_present_position = {
|
||||
k: reachy2.reachy.joints[REACHY2_JOINTS[k]].present_position for k in reachy2.joints_dict.keys()
|
||||
k: reachy2.reachy.joints[REACHY2_JOINTS[k]].present_position for k in reachy2.joints_dict
|
||||
}
|
||||
returned = reachy2.send_action(action)
|
||||
|
||||
@@ -290,7 +290,7 @@ def test_send_action(reachy2):
|
||||
assert returned == action
|
||||
|
||||
assert reachy2.reachy._goal_position_set_total == len(reachy2.joints_dict)
|
||||
for motor in reachy2.joints_dict.keys():
|
||||
for motor in reachy2.joints_dict:
|
||||
expected_pos = action[motor]
|
||||
real_pos = reachy2.reachy.joints[REACHY2_JOINTS[motor]].goal_position
|
||||
if reachy2.config.max_relative_target is None:
|
||||
|
||||
@@ -121,20 +121,20 @@ def test_get_action(reachy2):
|
||||
action = reachy2.get_action()
|
||||
|
||||
expected_keys = set(reachy2.joints_dict)
|
||||
expected_keys.update(f"{v}" for v in REACHY2_VEL.keys() if reachy2.config.with_mobile_base)
|
||||
expected_keys.update(f"{v}" for v in REACHY2_VEL if reachy2.config.with_mobile_base)
|
||||
assert set(action.keys()) == expected_keys
|
||||
|
||||
for motor in reachy2.joints_dict.keys():
|
||||
for motor in reachy2.joints_dict:
|
||||
if reachy2.config.use_present_position:
|
||||
assert action[motor] == reachy2.reachy.joints[REACHY2_JOINTS[motor]].present_position
|
||||
else:
|
||||
assert action[motor] == reachy2.reachy.joints[REACHY2_JOINTS[motor]].goal_position
|
||||
if reachy2.config.with_mobile_base:
|
||||
if reachy2.config.use_present_position:
|
||||
for vel in REACHY2_VEL.keys():
|
||||
for vel in REACHY2_VEL:
|
||||
assert action[vel] == reachy2.reachy.mobile_base.odometry[REACHY2_VEL[vel]]
|
||||
else:
|
||||
for vel in REACHY2_VEL.keys():
|
||||
for vel in REACHY2_VEL:
|
||||
assert action[vel] == reachy2.reachy.mobile_base.last_cmd_vel[REACHY2_VEL[vel]]
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user