Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make delayed/chunked export configurable and off by default #5618

Merged
merged 4 commits into from
Jan 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions docs/changes/newsfragments/5618.improved
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
The feature of exporting large DataSets to netcdf by writing individual small files and combining them, introduced in QCoDeS 0.41.0 has been made configurable
and turned off by default due to a number of corner cases where the export did not work correctly. The
feature can be enabled when required by setting the config variable `qcodes.config.dataset.export_chunked_export_of_large_files_enabled`
to True and the threshold controlled using `qcodes.config.dataset.export_chunked_threshold`
2 changes: 2 additions & 0 deletions src/qcodes/configuration/qcodesrc.json
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,8 @@
"export_prefix": "qcodes_",
"export_path": "{db_location}",
"export_name_elements": ["captured_run_id", "guid"],
"export_chunked_export_of_large_files_enabled": false,
"export_chunked_threshold": 1000,
"in_memory_cache": true
},
"telemetry":
Expand Down
10 changes: 10 additions & 0 deletions src/qcodes/configuration/qcodesrc_schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -363,6 +363,16 @@
},
"default": ["captured_run_id", "guid"]
},
"export_chunked_export_of_large_files_enabled": {
"type": "boolean",
"default": false,
"description": "Should large dataset be exported to netcdf in chuncks and then recombined into one file. This reduces the memory requirements for exporting the dataset but may be slower and fail in some corner cases"
},
"export_chunked_threshold": {
"type": "integer",
"default": 1000,
"description": "Estimated size in MB above which the dataset will be exported in chuncks and recombined."
},
"in_memory_cache": {
"type": "boolean",
"default": true,
Expand Down
9 changes: 6 additions & 3 deletions src/qcodes/dataset/data_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,6 @@ def __init__(
self._cache: DataSetCacheWithDBBackend = DataSetCacheWithDBBackend(self)
self._results: list[dict[str, VALUE]] = []
self._in_memory_cache = in_memory_cache
self._export_limit = 1000

if run_id is not None:
if not run_exists(self.conn, run_id):
Expand Down Expand Up @@ -1487,15 +1486,19 @@ def _export_as_netcdf(self, path: Path, file_name: str) -> Path:
import xarray as xr

file_path = path / file_name
if self._estimate_ds_size() > self._export_limit:
if (
qcodes.config.dataset.export_chunked_export_of_large_files_enabled
and self._estimate_ds_size()
> qcodes.config.dataset.export_chunked_threshold
):
log.info(
"Dataset is expected to be larger that threshold. Using distributed export.",
extra={
"file_name": str(file_path),
"qcodes_guid": self.guid,
"ds_name": self.name,
"exp_name": self.exp_name,
"_export_limit": self._export_limit,
"_export_limit": qcodes.config.dataset.export_chunked_threshold,
"_estimated_ds_size": self._estimate_ds_size(),
},
)
Expand Down
21 changes: 18 additions & 3 deletions tests/dataset/test_dataset_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -826,11 +826,24 @@ def test_export_dataset_small_no_delated(
assert "Writing netcdf file directly" in caplog.records[0].msg


def test_export_dataset_delayed_off_by_default(
tmp_path_factory: TempPathFactory, mock_dataset_grid: DataSet, caplog
) -> None:
tmp_path = tmp_path_factory.mktemp("export_netcdf")
qcodes.config.dataset.export_chunked_threshold = 0
assert qcodes.config.dataset.export_chunked_export_of_large_files_enabled is False
with caplog.at_level(logging.INFO):
mock_dataset_grid.export(export_type="netcdf", path=tmp_path, prefix="qcodes_")

assert "Writing netcdf file directly." in caplog.records[0].msg


def test_export_dataset_delayed_numeric(
tmp_path_factory: TempPathFactory, mock_dataset_grid: DataSet, caplog
) -> None:
tmp_path = tmp_path_factory.mktemp("export_netcdf")
mock_dataset_grid._export_limit = 0
qcodes.config.dataset.export_chunked_threshold = 0
qcodes.config.dataset.export_chunked_export_of_large_files_enabled = True
with caplog.at_level(logging.INFO):
mock_dataset_grid.export(export_type="netcdf", path=tmp_path, prefix="qcodes_")

Expand Down Expand Up @@ -863,7 +876,8 @@ def test_export_dataset_delayed(
tmp_path_factory: TempPathFactory, mock_dataset_numpy: DataSet, caplog
) -> None:
tmp_path = tmp_path_factory.mktemp("export_netcdf")
mock_dataset_numpy._export_limit = 0
qcodes.config.dataset.export_chunked_threshold = 0
qcodes.config.dataset.export_chunked_export_of_large_files_enabled = True
with caplog.at_level(logging.INFO):
mock_dataset_numpy.export(export_type="netcdf", path=tmp_path, prefix="qcodes_")

Expand Down Expand Up @@ -896,7 +910,8 @@ def test_export_dataset_delayed_complex(
tmp_path_factory: TempPathFactory, mock_dataset_numpy_complex: DataSet, caplog
) -> None:
tmp_path = tmp_path_factory.mktemp("export_netcdf")
mock_dataset_numpy_complex._export_limit = 0
qcodes.config.dataset.export_chunked_threshold = 0
qcodes.config.dataset.export_chunked_export_of_large_files_enabled = True
with caplog.at_level(logging.INFO):
mock_dataset_numpy_complex.export(
export_type="netcdf", path=tmp_path, prefix="qcodes_"
Expand Down