Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ repos:
- id: trailing-whitespace

- repo: https://github.com/crate-ci/typos
rev: v1.30.2
rev: v1
hooks:
- id: typos
args: [--force-exclude]
Expand All @@ -48,7 +48,7 @@ repos:
- id: pyupgrade

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.9.10
rev: v0.11.4
hooks:
- id: ruff
args: [--fix]
Expand All @@ -57,12 +57,12 @@ repos:

##### Security #####
- repo: https://github.com/gitleaks/gitleaks
rev: v8.24.0
rev: v8.24.2
hooks:
- id: gitleaks

- repo: https://github.com/woodruffw/zizmor-pre-commit
rev: v1.4.1
rev: v1.5.2
hooks:
- id: zizmor

Expand Down
2 changes: 1 addition & 1 deletion lerobot/common/datasets/lerobot_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -1053,7 +1053,7 @@ def __init__(
super().__init__()
self.repo_ids = repo_ids
self.root = Path(root) if root else HF_LEROBOT_HOME
self.tolerances_s = tolerances_s if tolerances_s else {repo_id: 1e-4 for repo_id in repo_ids}
self.tolerances_s = tolerances_s if tolerances_s else dict.fromkeys(repo_ids, 0.0001)
# Construct the underlying datasets passing everything but `transform` and `delta_timestamps` which
# are handled by this class.
self._datasets = [
Expand Down
2 changes: 1 addition & 1 deletion lerobot/common/datasets/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ def load_episodes_stats(local_dir: Path) -> dict:
def backward_compatible_episodes_stats(
stats: dict[str, dict[str, np.ndarray]], episodes: list[int]
) -> dict[str, dict[str, np.ndarray]]:
return {ep_idx: stats for ep_idx in episodes}
return dict.fromkeys(episodes, stats)


def load_image_as_numpy(
Expand Down
2 changes: 1 addition & 1 deletion lerobot/common/datasets/v2/convert_dataset_v1_to_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -481,7 +481,7 @@ def convert_dataset(

# Tasks
if single_task:
tasks_by_episodes = {ep_idx: single_task for ep_idx in episode_indices}
tasks_by_episodes = dict.fromkeys(episode_indices, single_task)
dataset, tasks = add_task_index_by_episodes(dataset, tasks_by_episodes)
tasks_by_episodes = {ep_idx: [task] for ep_idx, task in tasks_by_episodes.items()}
elif tasks_path:
Expand Down
2 changes: 1 addition & 1 deletion lerobot/common/utils/logging_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def __init__(
metrics: dict[str, AverageMeter],
initial_step: int = 0,
):
self.__dict__.update({k: None for k in self.__keys__})
self.__dict__.update(dict.fromkeys(self.__keys__))
self._batch_size = batch_size
self._num_frames = num_frames
self._avg_samples_per_ep = num_frames / num_episodes
Expand Down
Loading