Skip to content
Draft
Show file tree
Hide file tree
Changes from 15 commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
a4f69d6
Added support for layer-specific dimensions
brian-r-calder Jun 17, 2024
afa60bc
Bug-fixes for VR readers
brian-r-calder Jun 18, 2024
ca684a5
Update bag_vrrefinements.cpp
brian-r-calder Jun 25, 2024
78a1205
Adjustment to remove debugging code after initial tests.
brian-r-calder Jun 26, 2024
6fd3fb9
Merge branch 'master' into 109-LayerDescriptor-shape-parameters
selimnairb Nov 22, 2024
b45aecf
Fix tests broken by addressing #109.
selimnairb Nov 22, 2024
a3cbb27
CI: Enable examples building for test reporting workflow
selimnairb Dec 6, 2024
cea1c15
Modification to exception text for consistency
brian-r-calder Dec 21, 2024
c8b6515
Fix single-line if statement error; added braces.
selimnairb Jan 2, 2025
f9e186e
Fix single-line if statement error; added braces.
selimnairb Jan 2, 2025
7c75ebf
CI: test reporting: Make sure test output is reported to the console,…
selimnairb Jan 2, 2025
2cc6302
Support VR elements stored in either 1D or 2D arrays to be compatible…
selimnairb Jan 2, 2025
d49c72d
test: Restructure test 'test vr metadata write read' to only attempt …
selimnairb Jan 2, 2025
3b91e1e
SWIG: Python: first pass updating swig .i files to mirror changes to …
selimnairb Jan 2, 2025
39cee19
Tests: Python: Update tests to reflect layer dimension API fixes
selimnairb Jan 3, 2025
566324e
Store dimensions of layer descriptors as uint64_t internally (rather …
selimnairb Jan 3, 2025
65c33d2
LayerDescriptor: Return dimensions as uint32_t rather than the underl…
selimnairb Jan 3, 2025
98344af
Partial fix to update VR refinements and nodes to use 2D dataspace (a…
selimnairb Jan 4, 2025
72b166d
Merge branch 'master' into 109-LayerDescriptor-shape-parameters
selimnairb Jan 6, 2025
d7cd221
Update VR refinements and nodes to use 2D dataspace (as other impleme…
selimnairb Jan 6, 2025
27189ec
Ensure VR descriptors are consistent with shape of underlying H5 data…
selimnairb Jan 6, 2025
60a7a92
Remove unnecessary casts
selimnairb Jan 6, 2025
065acbd
Add explicit cast to uint32_t in LayerDescriptor::getDims()
selimnairb Jan 6, 2025
728ee2f
Fix wheel build on Windows
selimnairb Jan 7, 2025
772e7c9
VR: Add test_vr_bag to test reading an existing VR BAG file; Add exce…
selimnairb Jan 8, 2025
27a6687
test_vr_bag: Ensure BAG dimensions match VR metadata descriptor dimen…
selimnairb Jan 8, 2025
cd83fda
tests: Add example VR BAG from NBS archive
selimnairb Jan 9, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/testreporting.yml
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ jobs:
run: |
export CC=${{env.CC}}
export CXX=${{env.CXX}}
cmake -G Ninja -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}} -B build -S . -DCMAKE_INSTALL_PREFIX=/usr/local -DBAG_BUILD_TESTS:BOOL=ON -DBAG_CODE_COVERAGE:BOOL=ON
cmake -G Ninja -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}} -B build -S . -DCMAKE_INSTALL_PREFIX=/usr/local -DBAG_BUILD_EXAMPLES:BOOL=ON -DBAG_BUILD_TESTS:BOOL=ON -DBAG_CODE_COVERAGE:BOOL=ON

- name: Build
# Build your program with the given configuration
Expand All @@ -83,7 +83,7 @@ jobs:

- name: Run tests
run: |
BAG_SAMPLES_PATH=${{github.workspace}}/examples/sample-data ./build/tests/bag_tests_d -r junit -o build/tests/bag_tests-testreport.xml
BAG_SAMPLES_PATH=${{github.workspace}}/examples/sample-data ./build/tests/bag_tests_d -r junit | tee build/tests/bag_tests-testreport.xml

- name: Test Reporter
uses: mikepenz/action-junit-report@v5
Expand Down
4 changes: 2 additions & 2 deletions api/bag.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -824,7 +824,7 @@ BagError bagGetErrorString(
strncpy(str, "Metadata One or more elements of the requested coverage are missing from the XML file", MAX_STR-1);
break;
case BAG_METADTA_INVLID_DIMENSIONS:
sprintf(str, "Metadata The number of dimensions is incorrect (not equal to %d)", RANK);
snprintf(str, MAX_STR, "Metadata The number of dimensions is incorrect (not equal to %d)", RANK);
break;
case BAG_METADTA_BUFFER_EXCEEDED:
strncpy(str, "Metadata supplied buffer is too large to be stored in the internal array", MAX_STR-1);
Expand Down Expand Up @@ -866,7 +866,7 @@ BagError bagGetErrorString(
strncpy(str, "HDF Bag is not an HDF5 File", MAX_STR-1);
break;
case BAG_HDF_RANK_INCOMPATIBLE:
sprintf(str, "HDF Bag's rank is incompatible with expected Rank of the Datasets: %d", RANK);
snprintf(str, MAX_STR, "HDF Bag's rank is incompatible with expected Rank of the Datasets: %d", RANK);
break;
case BAG_HDF_TYPE_NOT_FOUND:
strncpy(str, "HDF Bag surface Datatype parameter not available", MAX_STR-1);
Expand Down
27 changes: 18 additions & 9 deletions api/bag_dataset.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -502,12 +502,12 @@ void Dataset::createDataset(

// Mandatory Layers
// Elevation
this->addLayer(SimpleLayer::create(*this, Elevation, chunkSize,
compressionLevel));
this->addLayer(SimpleLayer::create(*this, Elevation, m_pMetadata->rows(), m_pMetadata->columns(),
chunkSize, compressionLevel));

// Uncertainty
this->addLayer(SimpleLayer::create(*this, Uncertainty, chunkSize,
compressionLevel));
this->addLayer(SimpleLayer::create(*this, Uncertainty, m_pMetadata->rows(), m_pMetadata->columns(),
chunkSize, compressionLevel));
}

//! Create an optional simple layer.
Expand Down Expand Up @@ -546,8 +546,8 @@ Layer& Dataset::createSimpleLayer(
case Num_Soundings: //[[fallthrough]];
case Average_Elevation: //[[fallthrough]];
case Nominal_Elevation:
return this->addLayer(SimpleLayer::create(*this, type, chunkSize,
compressionLevel));
return this->addLayer(SimpleLayer::create(*this, type,
m_pMetadata->rows(), m_pMetadata->columns(), chunkSize, compressionLevel));
case Surface_Correction: //[[fallthrough]];
case Georef_Metadata: //[[fallthrough]];
default:
Expand Down Expand Up @@ -1103,7 +1103,10 @@ void Dataset::readDataset(

H5Dclose(id);

auto layerDesc = SimpleLayerDescriptor::open(*this, layerType);
// Pre-stage the layer-specific desciptor. Note that we don't need to specify the
// dimensions of the layer here, since they're set from the HDF5 dataset when it
// gets opened with SimpleLayer::open().
auto layerDesc = SimpleLayerDescriptor::open(*this, layerType, 0, 0);
this->addLayer(SimpleLayer::open(*this, *layerDesc));
}

Expand Down Expand Up @@ -1166,7 +1169,10 @@ void Dataset::readDataset(
}

{
auto descriptor = VRRefinementsDescriptor::open(*this);
// Pre-stage the layer-specific descriptor for the refinements; note that this
// doesn't have to have specific dimensions since they're set when the refinements
// layer is read in VRRefinements::open().
auto descriptor = VRRefinementsDescriptor::open(*this, 0, 0);
this->addLayer(VRRefinements::open(*this, *descriptor));
}

Expand All @@ -1176,7 +1182,10 @@ void Dataset::readDataset(
{
H5Dclose(id);

auto descriptor = VRNodeDescriptor::open(*this);
// Pre-stage the layer-specific descriptor for the nodes; note that this doesn't
// have to have specific dimensions since they're set when the nodes layer is
// read in VRNode::open().
auto descriptor = VRNodeDescriptor::open(*this, 0, 0);
this->addLayer(VRNode::open(*this, *descriptor));
}
}
Expand Down
2 changes: 1 addition & 1 deletion api/bag_exceptions.h
Original file line number Diff line number Diff line change
Expand Up @@ -441,7 +441,7 @@ struct BAG_API InvalidVRRefinementDimensions final : virtual std::exception
{
const char* what() const noexcept override
{
return "The variable resolution refinement layer is not 1 dimensional.";
return "The variable resolution refinement layer is inconsistent with specification.";
}
};

Expand Down
24 changes: 20 additions & 4 deletions api/bag_georefmetadatalayer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,13 @@ std::shared_ptr<GeorefMetadataLayer> GeorefMetadataLayer::create(
keyType != DT_UINT64)
throw InvalidKeyType{};

// The keys array should be the same dimensions as the mandatory elevation layer, so read
// from the file global descriptor, and set.
uint32_t rows = 0, cols = 0;
std::tie<uint32_t, uint32_t>(rows, cols) = dataset.getDescriptor().getDims();
auto pDescriptor = GeorefMetadataLayerDescriptor::create(dataset, name, profile, keyType,
definition, chunkSize, compressionLevel);
definition, rows, cols,
chunkSize, compressionLevel);

// Create the H5 Group to hold keys & values.
const auto& h5file = dataset.getH5file();
Expand All @@ -122,7 +127,8 @@ std::shared_ptr<GeorefMetadataLayer> GeorefMetadataLayer::create(
auto h5valueDataSet = GeorefMetadataLayer::createH5valueDataSet(dataset, *pDescriptor);

auto layer = std::make_shared<GeorefMetadataLayer>(dataset,
*pDescriptor, std::move(h5keyDataSet), std::move(h5vrKeyDataSet),
*pDescriptor, std::move(h5keyDataSet),
std::move(h5vrKeyDataSet),
std::move(h5valueDataSet));

layer->setValueTable(std::unique_ptr<ValueTable>(new ValueTable{*layer}));
Expand Down Expand Up @@ -150,6 +156,12 @@ std::shared_ptr<GeorefMetadataLayer> GeorefMetadataLayer::open(
new ::H5::DataSet{h5file.openDataSet(internalPath + COMPOUND_KEYS)},
DeleteH5dataSet{});

// The keys array has the dimensions of the layer, so we can read and reset the
// descriptor dimensions, in case they were inconsistent (or not set).
std::array<hsize_t, kRank> dims;
h5keyDataSet->getSpace().getSimpleExtentDims(dims.data(), nullptr);
descriptor.setDims(dims[0], dims[1]);

std::unique_ptr<::H5::DataSet, DeleteH5dataSet> h5vrKeyDataSet{};
if (dataset.getVRMetadata())
h5vrKeyDataSet = std::unique_ptr<::H5::DataSet, DeleteH5dataSet>(
Expand All @@ -161,7 +173,9 @@ std::shared_ptr<GeorefMetadataLayer> GeorefMetadataLayer::open(
DeleteH5dataSet{});

auto layer = std::make_shared<GeorefMetadataLayer>(dataset,
descriptor, std::move(h5keyDataSet), std::move(h5vrKeyDataSet),
descriptor,
std::move(h5keyDataSet),
std::move(h5vrKeyDataSet),
std::move(h5valueDataSet));

layer->setValueTable(std::unique_ptr<ValueTable>(new ValueTable{*layer}));
Expand All @@ -188,7 +202,9 @@ GeorefMetadataLayer::createH5keyDataSet(
std::unique_ptr<::H5::DataSet, DeleteH5dataSet> pH5dataSet;

{
// Use the dimensions from the descriptor.
// Use the dimensions from the descriptor. We could do this from the specific
// descriptor for the layer, too, which should mirror the size of the file global
// descriptor used here.
uint32_t dim0 = 0, dim1 = 0;
std::tie(dim0, dim1) = dataset.getDescriptor().getDims();
const std::array<hsize_t, kRank> fileDims{dim0, dim1};
Expand Down
4 changes: 2 additions & 2 deletions api/bag_georefmetadatalayer.h
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@ class BAG_API GeorefMetadataLayer final : public Layer
protected:
static std::shared_ptr<GeorefMetadataLayer> create(DataType keyType,
const std::string& name, GeorefMetadataProfile profile, Dataset& dataset,
const RecordDefinition& definition, uint64_t chunkSize,
int compressionLevel);
const RecordDefinition& definition,
uint64_t chunkSize, int compressionLevel);
static std::shared_ptr<GeorefMetadataLayer> open(Dataset& dataset,
GeorefMetadataLayerDescriptor& descriptor);

Expand Down
12 changes: 10 additions & 2 deletions api/bag_georefmetadatalayerdescriptor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,11 @@ GeorefMetadataLayerDescriptor::GeorefMetadataLayerDescriptor(
GeorefMetadataProfile profile,
DataType keyType,
RecordDefinition definition,
uint32_t rows, uint32_t cols,
uint64_t chunkSize,
int compressionLevel)
: LayerDescriptor(dataset.getNextId(), GEOREF_METADATA_PATH + name, name,
Georef_Metadata, chunkSize, compressionLevel)
Georef_Metadata, rows, cols, chunkSize, compressionLevel)
, m_pBagDataset(dataset.shared_from_this())
, m_profile(profile)
, m_keyType(keyType)
Expand Down Expand Up @@ -72,12 +73,14 @@ std::shared_ptr<GeorefMetadataLayerDescriptor> GeorefMetadataLayerDescriptor::cr
GeorefMetadataProfile profile,
DataType keyType,
RecordDefinition definition,
uint32_t rows, uint32_t cols,
uint64_t chunkSize,
int compressionLevel)
{
return std::shared_ptr<GeorefMetadataLayerDescriptor>(
new GeorefMetadataLayerDescriptor{dataset, name, profile, keyType,
std::move(definition), chunkSize, compressionLevel});
std::move(definition), rows, cols,
chunkSize, compressionLevel});
}

//! Open an existing georeferenced metadata layer descriptor.
Expand Down Expand Up @@ -165,8 +168,13 @@ std::shared_ptr<GeorefMetadataLayerDescriptor> GeorefMetadataLayerDescriptor::op
profile = UNKNOWN_METADATA_PROFILE;
}

std::array<hsize_t, 2> dims;
h5dataSet.getSpace().getSimpleExtentDims(dims.data(), nullptr);

return std::shared_ptr<GeorefMetadataLayerDescriptor>(
new GeorefMetadataLayerDescriptor{dataset, name, profile, keyType, definition,
static_cast<const uint32_t>(dims[0]),
static_cast<const uint32_t>(dims[1]),
chunkSize, compressionLevel});
}

Expand Down
16 changes: 9 additions & 7 deletions api/bag_georefmetadatalayerdescriptor.h
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,13 @@ namespace BAG {
class BAG_API GeorefMetadataLayerDescriptor final : public LayerDescriptor
{
public:
static std::shared_ptr<GeorefMetadataLayerDescriptor> create(Dataset& dataset,
const std::string& name, GeorefMetadataProfile profile, DataType keyType,
RecordDefinition definition, uint64_t chunkSize,
int compressionLevel);
static std::shared_ptr<GeorefMetadataLayerDescriptor> open(Dataset& dataset,
const std::string& name);
static std::shared_ptr<GeorefMetadataLayerDescriptor>
create(Dataset& dataset,
const std::string& name, GeorefMetadataProfile profile, DataType keyType,
RecordDefinition definition, uint32_t rows, uint32_t cols,
uint64_t chunkSize, int compressionLevel);
static std::shared_ptr<GeorefMetadataLayerDescriptor>
open(Dataset& dataset, const std::string& name);

GeorefMetadataLayerDescriptor(const GeorefMetadataLayerDescriptor&) = delete;
GeorefMetadataLayerDescriptor(GeorefMetadataLayerDescriptor&&) = delete;
Expand All @@ -52,7 +53,8 @@ class BAG_API GeorefMetadataLayerDescriptor final : public LayerDescriptor

protected:
GeorefMetadataLayerDescriptor(Dataset& dataset, const std::string& name, GeorefMetadataProfile profile,
DataType keyType, RecordDefinition definition, uint64_t chunkSize,
DataType keyType, RecordDefinition definition,
uint32_t rows, uint32_t cols, uint64_t chunkSize,
int compressionLevel);

private:
Expand Down
4 changes: 4 additions & 0 deletions api/bag_interleavedlegacylayer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,10 @@ std::shared_ptr<InterleavedLegacyLayer> InterleavedLegacyLayer::open(
descriptor.setMinMax(std::get<1>(possibleMinMax),
std::get<2>(possibleMinMax));

std::array<hsize_t, 2> dims;
h5dataSet->getSpace().getSimpleExtentDims(dims.data(), nullptr);
descriptor.setDims(dims[0], dims[1]);

return std::make_shared<InterleavedLegacyLayer>(dataset,
descriptor, std::move(h5dataSet));
}
Expand Down
17 changes: 12 additions & 5 deletions api/bag_interleavedlegacylayerdescriptor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,10 @@ namespace BAG {
InterleavedLegacyLayerDescriptor::InterleavedLegacyLayerDescriptor(
uint32_t id,
LayerType layerType,
GroupType groupType)
GroupType groupType,
uint32_t rows, uint32_t cols)
: LayerDescriptor(id, Layer::getInternalPath(layerType),
kLayerTypeMapString.at(layerType), layerType, 0, 0)
kLayerTypeMapString.at(layerType), layerType, rows, cols, 0, 0)
, m_groupType(groupType)
, m_elementSize(Layer::getElementSize(Layer::getDataType(layerType)))
{
Expand All @@ -45,8 +46,10 @@ InterleavedLegacyLayerDescriptor::InterleavedLegacyLayerDescriptor(
InterleavedLegacyLayerDescriptor::InterleavedLegacyLayerDescriptor(
const Dataset& dataset,
LayerType layerType,
GroupType groupType)
GroupType groupType,
uint32_t rows, uint32_t cols)
: LayerDescriptor(dataset, layerType,
rows, cols,
groupType == NODE
? NODE_GROUP_PATH
: groupType == ELEVATION
Expand Down Expand Up @@ -76,9 +79,11 @@ std::shared_ptr<InterleavedLegacyLayerDescriptor> InterleavedLegacyLayerDescript
LayerType layerType,
GroupType groupType)
{
uint32_t rows, cols;
std::tie(rows, cols) = dataset.getDescriptor().getDims();
return std::shared_ptr<InterleavedLegacyLayerDescriptor>(
new InterleavedLegacyLayerDescriptor{dataset.getNextId(), layerType,
groupType});
groupType, rows, cols});
}

//! Open an interleaved layer descriptor.
Expand All @@ -99,8 +104,10 @@ std::shared_ptr<InterleavedLegacyLayerDescriptor> InterleavedLegacyLayerDescript
LayerType layerType,
GroupType groupType)
{
uint32_t rows, cols;
std::tie<uint32_t, uint32_t>(rows, cols) = dataset.getDescriptor().getDims();
return std::shared_ptr<InterleavedLegacyLayerDescriptor>(
new InterleavedLegacyLayerDescriptor{dataset, layerType, groupType});
new InterleavedLegacyLayerDescriptor{dataset, layerType, groupType, rows, cols});
}


Expand Down
4 changes: 2 additions & 2 deletions api/bag_interleavedlegacylayerdescriptor.h
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,9 @@ class BAG_API InterleavedLegacyLayerDescriptor final : public LayerDescriptor

protected:
InterleavedLegacyLayerDescriptor(uint32_t id, LayerType layerType,
GroupType groupType);
GroupType groupType, uint32_t rows, uint32_t cols);
InterleavedLegacyLayerDescriptor(const Dataset& dataset, LayerType layerType,
GroupType groupType);
GroupType groupType, uint32_t rows, uint32_t cols);

private:
static void validateTypes(LayerType layerType, GroupType groupType);
Expand Down
3 changes: 1 addition & 2 deletions api/bag_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -208,9 +208,8 @@ UInt8Array Layer::read(
if (m_pBagDataset.expired())
throw DatasetNotFound{};

const auto pDataset = m_pBagDataset.lock();
uint32_t numRows = 0, numColumns = 0;
std::tie(numRows, numColumns) = pDataset->getDescriptor().getDims();
std::tie(numRows, numColumns) = m_pLayerDescriptor->getDims();

if (columnEnd >= numColumns || rowEnd >= numRows)
Copy link
Collaborator

@selimnairb selimnairb Nov 22, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@brian-r-calder This branch is being taken when running test_bag_vrmetadata.cpp, specifically when the test attempts to read back the VR metadata. Is the test wrong, or is there a mistake in Layer::read()?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't see a branch here, but the code is correct --- we have to read the dimensions specifically from the layer rather than from the dataset, since we need to allow each layer to have its own dimensions.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What I meant was if (columnEnd >= numColumns || rowEnd >= numRows) evaluates to true when Layer::read() is called from test_bag_vrmetadata.cpp:452. Is the VR layer being created by the test incorrect?

throw InvalidReadSize{};
Expand Down
20 changes: 20 additions & 0 deletions api/bag_layerdescriptor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ LayerDescriptor::LayerDescriptor(
std::string internalPath,
std::string name,
LayerType type,
uint32_t rows, uint32_t cols,
Copy link
Collaborator

@selimnairb selimnairb Jan 3, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@brian-r-calder We are defining rows and cols as uint32_t but these are always set from hsize_t values. hsize_t is a typedef of uint64_t, so an implicit narrowing is taking place. This doesn't seem to be a problem for GCC on Linux, but it fails by default in Visual Studio, and is a potential source of error.

Any objections to making these uint64_t?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No, that should be fine with most modern implementations. It was historically uint32_t because not all systems had moved to first-class uint64_t support at the time, I think.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Okay, I'll make this change.

uint64_t chunkSize,
int compressionLevel)
: m_id(id)
Expand All @@ -39,6 +40,7 @@ LayerDescriptor::LayerDescriptor(
, m_compressionLevel(compressionLevel)
, m_chunkSize(chunkSize)
, m_minMax(std::numeric_limits<float>::max(), std::numeric_limits<float>::lowest())
, m_dims({rows, cols})
{
}

Expand All @@ -56,11 +58,13 @@ LayerDescriptor::LayerDescriptor(
LayerDescriptor::LayerDescriptor(
const Dataset& dataset,
LayerType type,
uint32_t rows, uint32_t cols,
std::string internalPath,
std::string name)
: m_id(dataset.getNextId())
, m_layerType(type)
, m_minMax(std::numeric_limits<float>::max(), std::numeric_limits<float>::lowest())
, m_dims({rows, cols})
{
m_internalPath = internalPath.empty()
? Layer::getInternalPath(type)
Expand Down Expand Up @@ -169,6 +173,16 @@ const std::string& LayerDescriptor::getName() const & noexcept
return m_name;
}

//! Retrieve the dimensions (shape) of the layer
/*!
\return
The row and column spacing/resolution of the grid
*/
const std::tuple<uint32_t, uint32_t>& LayerDescriptor::getDims() const & noexcept
{
return m_dims;
}

//! Get the size of a buffer for reading a specified number rows and columns.
/*!
\param rows
Expand Down Expand Up @@ -204,6 +218,12 @@ LayerDescriptor& LayerDescriptor::setMinMax(
return *this;
}

LayerDescriptor& LayerDescriptor::setDims(uint32_t rows, uint32_t cols) & noexcept
{
m_dims = {rows, cols};
return *this;
}

//! Set the HDF5 path of the layer.
/*!
\param inPath
Expand Down
Loading
Loading