fixed merge conflicts

This commit is contained in:
Michel Aractingi
2024-12-10 16:59:43 +01:00
parent ceda66e2bc
commit d26a9b1363
4 changed files with 0 additions and 43 deletions

View File

@@ -287,11 +287,6 @@ def control_loop(
if dataset is not None: if dataset is not None:
frame = {**observation, **action} frame = {**observation, **action}
<<<<<<< HEAD
if "next.reward" in events:
frame["next.reward"] = events["next.reward"]
=======
>>>>>>> main
dataset.add_frame(frame) dataset.add_frame(frame)
if display_cameras and not is_headless(): if display_cameras and not is_headless():
@@ -375,11 +370,7 @@ def sanity_check_dataset_robot_compatibility(
mismatches = [] mismatches = []
for field, dataset_value, present_value in fields: for field, dataset_value, present_value in fields:
<<<<<<< HEAD
diff = DeepDiff(dataset_value, present_value)
=======
diff = DeepDiff(dataset_value, present_value, exclude_regex_paths=[r".*\['info'\]$"]) diff = DeepDiff(dataset_value, present_value, exclude_regex_paths=[r".*\['info'\]$"])
>>>>>>> main
if diff: if diff:
mismatches.append(f"{field}: expected {present_value}, got {dataset_value}") mismatches.append(f"{field}: expected {present_value}, got {dataset_value}")

View File

@@ -200,10 +200,7 @@ def record(
video: bool = True, video: bool = True,
run_compute_stats: bool = True, run_compute_stats: bool = True,
push_to_hub: bool = True, push_to_hub: bool = True,
<<<<<<< HEAD
=======
tags: list[str] | None = None, tags: list[str] | None = None,
>>>>>>> main
num_image_writer_processes: int = 0, num_image_writer_processes: int = 0,
num_image_writer_threads_per_camera: int = 4, num_image_writer_threads_per_camera: int = 4,
display_cameras: bool = True, display_cameras: bool = True,
@@ -266,10 +263,6 @@ def record(
use_videos=video, use_videos=video,
image_writer_processes=num_image_writer_processes, image_writer_processes=num_image_writer_processes,
image_writer_threads=num_image_writer_threads_per_camera * len(robot.cameras), image_writer_threads=num_image_writer_threads_per_camera * len(robot.cameras),
<<<<<<< HEAD
features=extra_features,
=======
>>>>>>> main
) )
if not robot.is_connected: if not robot.is_connected:
@@ -342,11 +335,7 @@ def record(
dataset.consolidate(run_compute_stats) dataset.consolidate(run_compute_stats)
if push_to_hub: if push_to_hub:
<<<<<<< HEAD
dataset.push_to_hub()
=======
dataset.push_to_hub(tags=tags) dataset.push_to_hub(tags=tags)
>>>>>>> main
log_say("Exiting", play_sounds) log_say("Exiting", play_sounds)
return dataset return dataset
@@ -360,11 +349,7 @@ def replay(
episode: int, episode: int,
fps: int | None = None, fps: int | None = None,
play_sounds: bool = True, play_sounds: bool = True,
<<<<<<< HEAD
local_files_only: bool = True,
=======
local_files_only: bool = False, local_files_only: bool = False,
>>>>>>> main
): ):
# TODO(rcadene, aliberts): refactor with control_loop, once `dataset` is an instance of LeRobotDataset # TODO(rcadene, aliberts): refactor with control_loop, once `dataset` is an instance of LeRobotDataset
# TODO(rcadene): Add option to record logs # TODO(rcadene): Add option to record logs
@@ -519,21 +504,12 @@ if __name__ == "__main__":
"Not enough threads might cause low camera fps." "Not enough threads might cause low camera fps."
), ),
) )
<<<<<<< HEAD
# parser_record.add_argument(
# "--force-override",
# type=int,
# default=0,
# help="By default, data recording is resumed. When set to 1, delete the local directory and start data recording from scratch.",
# )
=======
parser_record.add_argument( parser_record.add_argument(
"--resume", "--resume",
type=int, type=int,
default=0, default=0,
help="Resume recording on an existing dataset.", help="Resume recording on an existing dataset.",
) )
>>>>>>> main
parser_record.add_argument( parser_record.add_argument(
"-p", "-p",
"--pretrained-policy-name-or-path", "--pretrained-policy-name-or-path",

View File

@@ -275,16 +275,10 @@ def main():
kwargs = vars(args) kwargs = vars(args)
repo_id = kwargs.pop("repo_id") repo_id = kwargs.pop("repo_id")
root = kwargs.pop("root") root = kwargs.pop("root")
<<<<<<< HEAD
logging.info("Loading dataset")
dataset = LeRobotDataset(repo_id, root=root, local_files_only=True)
=======
local_files_only = kwargs.pop("local_files_only") local_files_only = kwargs.pop("local_files_only")
logging.info("Loading dataset") logging.info("Loading dataset")
dataset = LeRobotDataset(repo_id, root=root, local_files_only=local_files_only) dataset = LeRobotDataset(repo_id, root=root, local_files_only=local_files_only)
>>>>>>> main
visualize_dataset(dataset, **vars(args)) visualize_dataset(dataset, **vars(args))

View File

@@ -288,13 +288,9 @@ def main():
kwargs = vars(args) kwargs = vars(args)
repo_id = kwargs.pop("repo_id") repo_id = kwargs.pop("repo_id")
root = kwargs.pop("root") root = kwargs.pop("root")
<<<<<<< HEAD
dataset = LeRobotDataset(repo_id, root=root, local_files_only=True)
=======
local_files_only = kwargs.pop("local_files_only") local_files_only = kwargs.pop("local_files_only")
dataset = LeRobotDataset(repo_id, root=root, local_files_only=local_files_only) dataset = LeRobotDataset(repo_id, root=root, local_files_only=local_files_only)
>>>>>>> main
visualize_dataset_html(dataset, **kwargs) visualize_dataset_html(dataset, **kwargs)