forked from tangger/lerobot
Merge remote-tracking branch 'origin/user/aliberts/2024_09_25_reshape_dataset' into user/rcadene/2024_10_24_feetech_dataset_v2
This commit is contained in:
@@ -29,7 +29,6 @@ from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from lerobot.common.datasets.populate_dataset import add_frame, init_dataset
|
||||
from lerobot.common.logger import Logger
|
||||
from lerobot.common.policies.factory import make_policy
|
||||
from lerobot.common.utils.utils import init_hydra_config
|
||||
@@ -93,8 +92,9 @@ def test_record_without_cameras(tmpdir, request, robot_type, mock):
|
||||
mock_calibration_dir(calibration_dir)
|
||||
overrides.append(f"calibration_dir={calibration_dir}")
|
||||
|
||||
root = Path(tmpdir) / "data"
|
||||
repo_id = "lerobot/debug"
|
||||
root = Path(tmpdir) / "data" / repo_id
|
||||
single_task = "Do something."
|
||||
|
||||
robot = make_robot(robot_type, overrides=overrides, mock=mock)
|
||||
record(
|
||||
@@ -102,6 +102,7 @@ def test_record_without_cameras(tmpdir, request, robot_type, mock):
|
||||
fps=30,
|
||||
root=root,
|
||||
repo_id=repo_id,
|
||||
single_task=single_task,
|
||||
warmup_time_s=1,
|
||||
episode_time_s=1,
|
||||
num_episodes=2,
|
||||
@@ -132,17 +133,18 @@ def test_record_and_replay_and_policy(tmpdir, request, robot_type, mock):
|
||||
env_name = "koch_real"
|
||||
policy_name = "act_koch_real"
|
||||
|
||||
root = tmpdir / "data"
|
||||
repo_id = "lerobot/debug"
|
||||
eval_repo_id = "lerobot/eval_debug"
|
||||
root = tmpdir / "data" / repo_id
|
||||
single_task = "Do something."
|
||||
|
||||
robot = make_robot(robot_type, overrides=overrides, mock=mock)
|
||||
dataset = record(
|
||||
robot,
|
||||
root,
|
||||
repo_id,
|
||||
fps=1,
|
||||
warmup_time_s=1,
|
||||
single_task,
|
||||
fps=5,
|
||||
warmup_time_s=0.5,
|
||||
episode_time_s=1,
|
||||
reset_time_s=1,
|
||||
num_episodes=2,
|
||||
@@ -153,10 +155,10 @@ def test_record_and_replay_and_policy(tmpdir, request, robot_type, mock):
|
||||
display_cameras=False,
|
||||
play_sounds=False,
|
||||
)
|
||||
assert dataset.num_episodes == 2
|
||||
assert len(dataset) == 2
|
||||
assert dataset.total_episodes == 2
|
||||
assert len(dataset) == 10
|
||||
|
||||
replay(robot, episode=0, fps=1, root=root, repo_id=repo_id, play_sounds=False)
|
||||
replay(robot, episode=0, fps=5, root=root, repo_id=repo_id, play_sounds=False)
|
||||
|
||||
# TODO(rcadene, aliberts): rethink this design
|
||||
if robot_type == "aloha":
|
||||
@@ -219,10 +221,14 @@ def test_record_and_replay_and_policy(tmpdir, request, robot_type, mock):
|
||||
else:
|
||||
num_image_writer_processes = 0
|
||||
|
||||
record(
|
||||
eval_repo_id = "lerobot/eval_debug"
|
||||
eval_root = tmpdir / "data" / eval_repo_id
|
||||
|
||||
dataset = record(
|
||||
robot,
|
||||
root,
|
||||
eval_root,
|
||||
eval_repo_id,
|
||||
single_task,
|
||||
pretrained_policy_name_or_path,
|
||||
warmup_time_s=1,
|
||||
episode_time_s=1,
|
||||
@@ -259,13 +265,15 @@ def test_resume_record(tmpdir, request, robot_type, mock):
|
||||
|
||||
robot = make_robot(robot_type, overrides=overrides, mock=mock)
|
||||
|
||||
root = Path(tmpdir) / "data"
|
||||
repo_id = "lerobot/debug"
|
||||
root = Path(tmpdir) / "data" / repo_id
|
||||
single_task = "Do something."
|
||||
|
||||
dataset = record(
|
||||
robot,
|
||||
root,
|
||||
repo_id,
|
||||
single_task,
|
||||
fps=1,
|
||||
warmup_time_s=0,
|
||||
episode_time_s=1,
|
||||
@@ -278,32 +286,33 @@ def test_resume_record(tmpdir, request, robot_type, mock):
|
||||
)
|
||||
assert len(dataset) == 1, "`dataset` should contain only 1 frame"
|
||||
|
||||
init_dataset_return_value = {}
|
||||
# init_dataset_return_value = {}
|
||||
|
||||
def wrapped_init_dataset(*args, **kwargs):
|
||||
nonlocal init_dataset_return_value
|
||||
init_dataset_return_value = init_dataset(*args, **kwargs)
|
||||
return init_dataset_return_value
|
||||
# def wrapped_init_dataset(*args, **kwargs):
|
||||
# nonlocal init_dataset_return_value
|
||||
# init_dataset_return_value = init_dataset(*args, **kwargs)
|
||||
# return init_dataset_return_value
|
||||
|
||||
with patch("lerobot.scripts.control_robot.init_dataset", wraps=wrapped_init_dataset):
|
||||
dataset = record(
|
||||
robot,
|
||||
root,
|
||||
repo_id,
|
||||
fps=1,
|
||||
warmup_time_s=0,
|
||||
episode_time_s=1,
|
||||
num_episodes=2,
|
||||
push_to_hub=False,
|
||||
video=False,
|
||||
display_cameras=False,
|
||||
play_sounds=False,
|
||||
run_compute_stats=False,
|
||||
)
|
||||
assert len(dataset) == 2, "`dataset` should contain only 1 frame"
|
||||
assert (
|
||||
init_dataset_return_value["num_episodes"] == 2
|
||||
), "`init_dataset` should load the previous episode"
|
||||
# with patch("lerobot.scripts.control_robot.init_dataset", wraps=wrapped_init_dataset):
|
||||
dataset = record(
|
||||
robot,
|
||||
root,
|
||||
repo_id,
|
||||
single_task,
|
||||
fps=1,
|
||||
warmup_time_s=0,
|
||||
episode_time_s=1,
|
||||
num_episodes=2,
|
||||
push_to_hub=False,
|
||||
video=False,
|
||||
display_cameras=False,
|
||||
play_sounds=False,
|
||||
run_compute_stats=False,
|
||||
)
|
||||
assert len(dataset) == 2, "`dataset` should contain only 1 frame"
|
||||
# assert (
|
||||
# init_dataset_return_value["num_episodes"] == 2
|
||||
# ), "`init_dataset` should load the previous episode"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("robot_type, mock", [("koch", True)])
|
||||
@@ -322,23 +331,22 @@ def test_record_with_event_rerecord_episode(tmpdir, request, robot_type, mock):
|
||||
overrides = []
|
||||
|
||||
robot = make_robot(robot_type, overrides=overrides, mock=mock)
|
||||
with (
|
||||
patch("lerobot.scripts.control_robot.init_keyboard_listener") as mock_listener,
|
||||
patch("lerobot.common.robot_devices.control_utils.add_frame", wraps=add_frame) as mock_add_frame,
|
||||
):
|
||||
with patch("lerobot.scripts.control_robot.init_keyboard_listener") as mock_listener:
|
||||
mock_events = {}
|
||||
mock_events["exit_early"] = True
|
||||
mock_events["rerecord_episode"] = True
|
||||
mock_events["stop_recording"] = False
|
||||
mock_listener.return_value = (None, mock_events)
|
||||
|
||||
root = Path(tmpdir) / "data"
|
||||
repo_id = "lerobot/debug"
|
||||
root = Path(tmpdir) / "data" / repo_id
|
||||
single_task = "Do something."
|
||||
|
||||
dataset = record(
|
||||
robot,
|
||||
root,
|
||||
repo_id,
|
||||
single_task,
|
||||
fps=1,
|
||||
warmup_time_s=0,
|
||||
episode_time_s=1,
|
||||
@@ -352,7 +360,6 @@ def test_record_with_event_rerecord_episode(tmpdir, request, robot_type, mock):
|
||||
|
||||
assert not mock_events["rerecord_episode"], "`rerecord_episode` wasn't properly reset to False"
|
||||
assert not mock_events["exit_early"], "`exit_early` wasn't properly reset to False"
|
||||
assert mock_add_frame.call_count == 2, "`add_frame` should have been called 2 times"
|
||||
assert len(dataset) == 1, "`dataset` should contain only 1 frame"
|
||||
|
||||
|
||||
@@ -372,23 +379,22 @@ def test_record_with_event_exit_early(tmpdir, request, robot_type, mock):
|
||||
overrides = []
|
||||
|
||||
robot = make_robot(robot_type, overrides=overrides, mock=mock)
|
||||
with (
|
||||
patch("lerobot.scripts.control_robot.init_keyboard_listener") as mock_listener,
|
||||
patch("lerobot.common.robot_devices.control_utils.add_frame", wraps=add_frame) as mock_add_frame,
|
||||
):
|
||||
with patch("lerobot.scripts.control_robot.init_keyboard_listener") as mock_listener:
|
||||
mock_events = {}
|
||||
mock_events["exit_early"] = True
|
||||
mock_events["rerecord_episode"] = False
|
||||
mock_events["stop_recording"] = False
|
||||
mock_listener.return_value = (None, mock_events)
|
||||
|
||||
root = Path(tmpdir) / "data"
|
||||
repo_id = "lerobot/debug"
|
||||
root = Path(tmpdir) / "data" / repo_id
|
||||
single_task = "Do something."
|
||||
|
||||
dataset = record(
|
||||
robot,
|
||||
fps=2,
|
||||
root=root,
|
||||
single_task=single_task,
|
||||
repo_id=repo_id,
|
||||
warmup_time_s=0,
|
||||
episode_time_s=1,
|
||||
@@ -401,7 +407,6 @@ def test_record_with_event_exit_early(tmpdir, request, robot_type, mock):
|
||||
)
|
||||
|
||||
assert not mock_events["exit_early"], "`exit_early` wasn't properly reset to False"
|
||||
assert mock_add_frame.call_count == 1, "`add_frame` should have been called 1 time"
|
||||
assert len(dataset) == 1, "`dataset` should contain only 1 frame"
|
||||
|
||||
|
||||
@@ -423,23 +428,22 @@ def test_record_with_event_stop_recording(tmpdir, request, robot_type, mock, num
|
||||
overrides = []
|
||||
|
||||
robot = make_robot(robot_type, overrides=overrides, mock=mock)
|
||||
with (
|
||||
patch("lerobot.scripts.control_robot.init_keyboard_listener") as mock_listener,
|
||||
patch("lerobot.common.robot_devices.control_utils.add_frame", wraps=add_frame) as mock_add_frame,
|
||||
):
|
||||
with patch("lerobot.scripts.control_robot.init_keyboard_listener") as mock_listener:
|
||||
mock_events = {}
|
||||
mock_events["exit_early"] = True
|
||||
mock_events["rerecord_episode"] = False
|
||||
mock_events["stop_recording"] = True
|
||||
mock_listener.return_value = (None, mock_events)
|
||||
|
||||
root = Path(tmpdir) / "data"
|
||||
repo_id = "lerobot/debug"
|
||||
root = Path(tmpdir) / "data" / repo_id
|
||||
single_task = "Do something."
|
||||
|
||||
dataset = record(
|
||||
robot,
|
||||
root,
|
||||
repo_id,
|
||||
single_task=single_task,
|
||||
fps=1,
|
||||
warmup_time_s=0,
|
||||
episode_time_s=1,
|
||||
@@ -453,5 +457,4 @@ def test_record_with_event_stop_recording(tmpdir, request, robot_type, mock, num
|
||||
)
|
||||
|
||||
assert not mock_events["exit_early"], "`exit_early` wasn't properly reset to False"
|
||||
assert mock_add_frame.call_count == 1, "`add_frame` should have been called 1 time"
|
||||
assert len(dataset) == 1, "`dataset` should contain only 1 frame"
|
||||
|
||||
@@ -42,7 +42,28 @@ from lerobot.common.datasets.utils import (
|
||||
unflatten_dict,
|
||||
)
|
||||
from lerobot.common.utils.utils import init_hydra_config, seeded_context
|
||||
from tests.utils import DEFAULT_CONFIG_PATH, DEVICE
|
||||
from tests.utils import DEFAULT_CONFIG_PATH, DEVICE, make_robot
|
||||
|
||||
# TODO(aliberts): create proper test repo
|
||||
TEST_REPO_ID = "aliberts/koch_tutorial"
|
||||
|
||||
|
||||
def test_same_attributes_defined():
|
||||
# TODO(aliberts): test with keys, shapes, names etc. provided instead of robot
|
||||
robot = make_robot("koch", mock=True)
|
||||
|
||||
# Instantiate both ways
|
||||
dataset_init = LeRobotDataset(repo_id=TEST_REPO_ID)
|
||||
dataset_create = LeRobotDataset.create(repo_id=TEST_REPO_ID, fps=30, robot=robot)
|
||||
|
||||
# Access the '_hub_version' cached_property in both instances to force its creation
|
||||
_ = dataset_init._hub_version
|
||||
_ = dataset_create._hub_version
|
||||
|
||||
init_attr = set(vars(dataset_init).keys())
|
||||
create_attr = set(vars(dataset_create).keys())
|
||||
|
||||
assert init_attr == create_attr, "Attribute sets do not match between __init__ and .create()"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
Reference in New Issue
Block a user