From a4f69d6cf824897dd25212566ddb8229ba30a038 Mon Sep 17 00:00:00 2001 From: brian-r-calder Date: Mon, 17 Jun 2024 18:36:28 -0400 Subject: [PATCH 01/25] Added support for layer-specific dimensions In order to allow for layers that have different dimensions from the main grid (specifically the VRRefinements and VRNode layers), added support for dimensions in the LayerDescriptor class, and propagated support to the derived classes. --- api/bag.cpp | 4 +-- api/bag_dataset.cpp | 27 ++++++++++++------- api/bag_georefmetadatalayer.cpp | 24 ++++++++++++++--- api/bag_georefmetadatalayer.h | 4 +-- api/bag_georefmetadatalayerdescriptor.cpp | 12 +++++++-- api/bag_georefmetadatalayerdescriptor.h | 16 ++++++----- api/bag_interleavedlegacylayer.cpp | 4 +++ api/bag_interleavedlegacylayerdescriptor.cpp | 17 ++++++++---- api/bag_interleavedlegacylayerdescriptor.h | 4 +-- api/bag_layer.cpp | 3 +-- api/bag_layerdescriptor.cpp | 20 ++++++++++++++ api/bag_layerdescriptor.h | 8 +++++- api/bag_simplelayer.cpp | 12 ++++++--- api/bag_simplelayer.h | 2 +- api/bag_simplelayerdescriptor.cpp | 15 ++++++----- api/bag_simplelayerdescriptor.h | 11 +++++--- api/bag_surfacecorrections.cpp | 2 +- api/bag_surfacecorrectionsdescriptor.cpp | 5 ++-- api/bag_vrmetadata.cpp | 6 +++++ api/bag_vrmetadatadescriptor.cpp | 28 +++++++++++++++----- api/bag_vrmetadatadescriptor.h | 4 +-- api/bag_vrnode.cpp | 17 +++++++++++- api/bag_vrnodedescriptor.cpp | 18 +++++++------ api/bag_vrnodedescriptor.h | 7 ++--- api/bag_vrrefinements.cpp | 28 +++++++++++++++----- api/bag_vrrefinementsdescriptor.cpp | 16 ++++++----- api/bag_vrrefinementsdescriptor.h | 9 ++++--- 27 files changed, 231 insertions(+), 92 deletions(-) diff --git a/api/bag.cpp b/api/bag.cpp index 66ec97b61b..e2623a58d0 100644 --- a/api/bag.cpp +++ b/api/bag.cpp @@ -824,7 +824,7 @@ BagError bagGetErrorString( strncpy(str, "Metadata One or more elements of the requested coverage are missing from the XML file", MAX_STR-1); break; case BAG_METADTA_INVLID_DIMENSIONS: - sprintf(str, "Metadata The number of dimensions is incorrect (not equal to %d)", RANK); + snprintf(str, MAX_STR, "Metadata The number of dimensions is incorrect (not equal to %d)", RANK); break; case BAG_METADTA_BUFFER_EXCEEDED: strncpy(str, "Metadata supplied buffer is too large to be stored in the internal array", MAX_STR-1); @@ -866,7 +866,7 @@ BagError bagGetErrorString( strncpy(str, "HDF Bag is not an HDF5 File", MAX_STR-1); break; case BAG_HDF_RANK_INCOMPATIBLE: - sprintf(str, "HDF Bag's rank is incompatible with expected Rank of the Datasets: %d", RANK); + snprintf(str, MAX_STR, "HDF Bag's rank is incompatible with expected Rank of the Datasets: %d", RANK); break; case BAG_HDF_TYPE_NOT_FOUND: strncpy(str, "HDF Bag surface Datatype parameter not available", MAX_STR-1); diff --git a/api/bag_dataset.cpp b/api/bag_dataset.cpp index ab04c9276b..c92ce2c615 100644 --- a/api/bag_dataset.cpp +++ b/api/bag_dataset.cpp @@ -501,12 +501,12 @@ void Dataset::createDataset( // Mandatory Layers // Elevation - this->addLayer(SimpleLayer::create(*this, Elevation, chunkSize, - compressionLevel)); + this->addLayer(SimpleLayer::create(*this, Elevation, m_pMetadata->rows(), m_pMetadata->columns(), + chunkSize, compressionLevel)); // Uncertainty - this->addLayer(SimpleLayer::create(*this, Uncertainty, chunkSize, - compressionLevel)); + this->addLayer(SimpleLayer::create(*this, Uncertainty, m_pMetadata->rows(), m_pMetadata->columns(), + chunkSize, compressionLevel)); } //! Create an optional simple layer. @@ -545,8 +545,8 @@ Layer& Dataset::createSimpleLayer( case Num_Soundings: //[[fallthrough]]; case Average_Elevation: //[[fallthrough]]; case Nominal_Elevation: - return this->addLayer(SimpleLayer::create(*this, type, chunkSize, - compressionLevel)); + return this->addLayer(SimpleLayer::create(*this, type, + m_pMetadata->rows(), m_pMetadata->columns(), chunkSize, compressionLevel)); case Surface_Correction: //[[fallthrough]]; case Georef_Metadata: //[[fallthrough]]; default: @@ -1096,7 +1096,10 @@ void Dataset::readDataset( H5Dclose(id); - auto layerDesc = SimpleLayerDescriptor::open(*this, layerType); + // Pre-stage the layer-specific desciptor. Note that we don't need to specify the + // dimensions of the layer here, since they're set from the HDF5 dataset when it + // gets opened with SimpleLayer::open(). + auto layerDesc = SimpleLayerDescriptor::open(*this, layerType, 0, 0); this->addLayer(SimpleLayer::open(*this, *layerDesc)); } @@ -1159,7 +1162,10 @@ void Dataset::readDataset( } { - auto descriptor = VRRefinementsDescriptor::open(*this); + // Pre-stage the layer-specific descriptor for the refinements; note that this + // doesn't have to have specific dimensions since they're set when the refinements + // layer is read in VRRefinements::open(). + auto descriptor = VRRefinementsDescriptor::open(*this, 0, 0); this->addLayer(VRRefinements::open(*this, *descriptor)); } @@ -1169,7 +1175,10 @@ void Dataset::readDataset( { H5Dclose(id); - auto descriptor = VRNodeDescriptor::open(*this); + // Pre-stage the layer-specific descriptor for the nodes; note that this doesn't + // have to have specific dimensions since they're set when the nodes layer is + // read in VRNode::open(). + auto descriptor = VRNodeDescriptor::open(*this, 0, 0); this->addLayer(VRNode::open(*this, *descriptor)); } } diff --git a/api/bag_georefmetadatalayer.cpp b/api/bag_georefmetadatalayer.cpp index bde3e0e66e..0eefbf499b 100644 --- a/api/bag_georefmetadatalayer.cpp +++ b/api/bag_georefmetadatalayer.cpp @@ -104,8 +104,13 @@ std::shared_ptr GeorefMetadataLayer::create( keyType != DT_UINT64) throw InvalidKeyType{}; + // The keys array should be the same dimensions as the mandatory elevation layer, so read + // from the file global descriptor, and set. + uint32_t rows = 0, cols = 0; + std::tie(rows, cols) = dataset.getDescriptor().getDims(); auto pDescriptor = GeorefMetadataLayerDescriptor::create(dataset, name, profile, keyType, - definition, chunkSize, compressionLevel); + definition, rows, cols, + chunkSize, compressionLevel); // Create the H5 Group to hold keys & values. const auto& h5file = dataset.getH5file(); @@ -122,7 +127,8 @@ std::shared_ptr GeorefMetadataLayer::create( auto h5valueDataSet = GeorefMetadataLayer::createH5valueDataSet(dataset, *pDescriptor); auto layer = std::make_shared(dataset, - *pDescriptor, std::move(h5keyDataSet), std::move(h5vrKeyDataSet), + *pDescriptor, std::move(h5keyDataSet), + std::move(h5vrKeyDataSet), std::move(h5valueDataSet)); layer->setValueTable(std::unique_ptr(new ValueTable{*layer})); @@ -150,6 +156,12 @@ std::shared_ptr GeorefMetadataLayer::open( new ::H5::DataSet{h5file.openDataSet(internalPath + COMPOUND_KEYS)}, DeleteH5dataSet{}); + // The keys array has the dimensions of the layer, so we can read and reset the + // descriptor dimensions, in case they were inconsistent (or not set). + std::array dims; + h5keyDataSet->getSpace().getSimpleExtentDims(dims.data(), nullptr); + descriptor.setDims(dims[0], dims[1]); + std::unique_ptr<::H5::DataSet, DeleteH5dataSet> h5vrKeyDataSet{}; if (dataset.getVRMetadata()) h5vrKeyDataSet = std::unique_ptr<::H5::DataSet, DeleteH5dataSet>( @@ -161,7 +173,9 @@ std::shared_ptr GeorefMetadataLayer::open( DeleteH5dataSet{}); auto layer = std::make_shared(dataset, - descriptor, std::move(h5keyDataSet), std::move(h5vrKeyDataSet), + descriptor, + std::move(h5keyDataSet), + std::move(h5vrKeyDataSet), std::move(h5valueDataSet)); layer->setValueTable(std::unique_ptr(new ValueTable{*layer})); @@ -188,7 +202,9 @@ GeorefMetadataLayer::createH5keyDataSet( std::unique_ptr<::H5::DataSet, DeleteH5dataSet> pH5dataSet; { - // Use the dimensions from the descriptor. + // Use the dimensions from the descriptor. We could do this from the specific + // descriptor for the layer, too, which should mirror the size of the file global + // descriptor used here. uint32_t dim0 = 0, dim1 = 0; std::tie(dim0, dim1) = dataset.getDescriptor().getDims(); const std::array fileDims{dim0, dim1}; diff --git a/api/bag_georefmetadatalayer.h b/api/bag_georefmetadatalayer.h index 32f940f371..faceb66d78 100644 --- a/api/bag_georefmetadatalayer.h +++ b/api/bag_georefmetadatalayer.h @@ -64,8 +64,8 @@ class BAG_API GeorefMetadataLayer final : public Layer protected: static std::shared_ptr create(DataType keyType, const std::string& name, GeorefMetadataProfile profile, Dataset& dataset, - const RecordDefinition& definition, uint64_t chunkSize, - int compressionLevel); + const RecordDefinition& definition, + uint64_t chunkSize, int compressionLevel); static std::shared_ptr open(Dataset& dataset, GeorefMetadataLayerDescriptor& descriptor); diff --git a/api/bag_georefmetadatalayerdescriptor.cpp b/api/bag_georefmetadatalayerdescriptor.cpp index 527d0fd1a9..9a9bd0e69d 100644 --- a/api/bag_georefmetadatalayerdescriptor.cpp +++ b/api/bag_georefmetadatalayerdescriptor.cpp @@ -35,10 +35,11 @@ GeorefMetadataLayerDescriptor::GeorefMetadataLayerDescriptor( GeorefMetadataProfile profile, DataType keyType, RecordDefinition definition, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : LayerDescriptor(dataset.getNextId(), GEOREF_METADATA_PATH + name, name, - Georef_Metadata, chunkSize, compressionLevel) + Georef_Metadata, rows, cols, chunkSize, compressionLevel) , m_pBagDataset(dataset.shared_from_this()) , m_profile(profile) , m_keyType(keyType) @@ -72,12 +73,14 @@ std::shared_ptr GeorefMetadataLayerDescriptor::cr GeorefMetadataProfile profile, DataType keyType, RecordDefinition definition, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) { return std::shared_ptr( new GeorefMetadataLayerDescriptor{dataset, name, profile, keyType, - std::move(definition), chunkSize, compressionLevel}); + std::move(definition), rows, cols, + chunkSize, compressionLevel}); } //! Open an existing georeferenced metadata layer descriptor. @@ -165,8 +168,13 @@ std::shared_ptr GeorefMetadataLayerDescriptor::op profile = UNKNOWN_METADATA_PROFILE; } + std::array dims; + h5dataSet.getSpace().getSimpleExtentDims(dims.data(), nullptr); + return std::shared_ptr( new GeorefMetadataLayerDescriptor{dataset, name, profile, keyType, definition, + static_cast(dims[0]), + static_cast(dims[1]), chunkSize, compressionLevel}); } diff --git a/api/bag_georefmetadatalayerdescriptor.h b/api/bag_georefmetadatalayerdescriptor.h index 6749d09568..bc3d029037 100644 --- a/api/bag_georefmetadatalayerdescriptor.h +++ b/api/bag_georefmetadatalayerdescriptor.h @@ -21,12 +21,13 @@ namespace BAG { class BAG_API GeorefMetadataLayerDescriptor final : public LayerDescriptor { public: - static std::shared_ptr create(Dataset& dataset, - const std::string& name, GeorefMetadataProfile profile, DataType keyType, - RecordDefinition definition, uint64_t chunkSize, - int compressionLevel); - static std::shared_ptr open(Dataset& dataset, - const std::string& name); + static std::shared_ptr + create(Dataset& dataset, + const std::string& name, GeorefMetadataProfile profile, DataType keyType, + RecordDefinition definition, uint32_t rows, uint32_t cols, + uint64_t chunkSize, int compressionLevel); + static std::shared_ptr + open(Dataset& dataset, const std::string& name); GeorefMetadataLayerDescriptor(const GeorefMetadataLayerDescriptor&) = delete; GeorefMetadataLayerDescriptor(GeorefMetadataLayerDescriptor&&) = delete; @@ -52,7 +53,8 @@ class BAG_API GeorefMetadataLayerDescriptor final : public LayerDescriptor protected: GeorefMetadataLayerDescriptor(Dataset& dataset, const std::string& name, GeorefMetadataProfile profile, - DataType keyType, RecordDefinition definition, uint64_t chunkSize, + DataType keyType, RecordDefinition definition, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel); private: diff --git a/api/bag_interleavedlegacylayer.cpp b/api/bag_interleavedlegacylayer.cpp index d18e6f3fd7..c4e7124197 100644 --- a/api/bag_interleavedlegacylayer.cpp +++ b/api/bag_interleavedlegacylayer.cpp @@ -51,6 +51,10 @@ std::shared_ptr InterleavedLegacyLayer::open( descriptor.setMinMax(std::get<1>(possibleMinMax), std::get<2>(possibleMinMax)); + std::array dims; + h5dataSet->getSpace().getSimpleExtentDims(dims.data(), nullptr); + descriptor.setDims(dims[0], dims[1]); + return std::make_shared(dataset, descriptor, std::move(h5dataSet)); } diff --git a/api/bag_interleavedlegacylayerdescriptor.cpp b/api/bag_interleavedlegacylayerdescriptor.cpp index 179d5685f8..1567ad2df0 100644 --- a/api/bag_interleavedlegacylayerdescriptor.cpp +++ b/api/bag_interleavedlegacylayerdescriptor.cpp @@ -20,9 +20,10 @@ namespace BAG { InterleavedLegacyLayerDescriptor::InterleavedLegacyLayerDescriptor( uint32_t id, LayerType layerType, - GroupType groupType) + GroupType groupType, + uint32_t rows, uint32_t cols) : LayerDescriptor(id, Layer::getInternalPath(layerType), - kLayerTypeMapString.at(layerType), layerType, 0, 0) + kLayerTypeMapString.at(layerType), layerType, rows, cols, 0, 0) , m_groupType(groupType) , m_elementSize(Layer::getElementSize(Layer::getDataType(layerType))) { @@ -45,8 +46,10 @@ InterleavedLegacyLayerDescriptor::InterleavedLegacyLayerDescriptor( InterleavedLegacyLayerDescriptor::InterleavedLegacyLayerDescriptor( const Dataset& dataset, LayerType layerType, - GroupType groupType) + GroupType groupType, + uint32_t rows, uint32_t cols) : LayerDescriptor(dataset, layerType, + rows, cols, groupType == NODE ? NODE_GROUP_PATH : groupType == ELEVATION @@ -76,9 +79,11 @@ std::shared_ptr InterleavedLegacyLayerDescript LayerType layerType, GroupType groupType) { + uint32_t rows, cols; + std::tie(rows, cols) = dataset.getDescriptor().getDims(); return std::shared_ptr( new InterleavedLegacyLayerDescriptor{dataset.getNextId(), layerType, - groupType}); + groupType, rows, cols}); } //! Open an interleaved layer descriptor. @@ -99,8 +104,10 @@ std::shared_ptr InterleavedLegacyLayerDescript LayerType layerType, GroupType groupType) { + uint32_t rows, cols; + std::tie(rows, cols) = dataset.getDescriptor().getDims(); return std::shared_ptr( - new InterleavedLegacyLayerDescriptor{dataset, layerType, groupType}); + new InterleavedLegacyLayerDescriptor{dataset, layerType, groupType, rows, cols}); } diff --git a/api/bag_interleavedlegacylayerdescriptor.h b/api/bag_interleavedlegacylayerdescriptor.h index 6e5fc9d642..1d5beeeb9f 100644 --- a/api/bag_interleavedlegacylayerdescriptor.h +++ b/api/bag_interleavedlegacylayerdescriptor.h @@ -43,9 +43,9 @@ class BAG_API InterleavedLegacyLayerDescriptor final : public LayerDescriptor protected: InterleavedLegacyLayerDescriptor(uint32_t id, LayerType layerType, - GroupType groupType); + GroupType groupType, uint32_t rows, uint32_t cols); InterleavedLegacyLayerDescriptor(const Dataset& dataset, LayerType layerType, - GroupType groupType); + GroupType groupType, uint32_t rows, uint32_t cols); private: static void validateTypes(LayerType layerType, GroupType groupType); diff --git a/api/bag_layer.cpp b/api/bag_layer.cpp index dbec860765..454ef7d78d 100644 --- a/api/bag_layer.cpp +++ b/api/bag_layer.cpp @@ -208,9 +208,8 @@ UInt8Array Layer::read( if (m_pBagDataset.expired()) throw DatasetNotFound{}; - const auto pDataset = m_pBagDataset.lock(); uint32_t numRows = 0, numColumns = 0; - std::tie(numRows, numColumns) = pDataset->getDescriptor().getDims(); + std::tie(numRows, numColumns) = m_pLayerDescriptor->getDims(); if (columnEnd >= numColumns || rowEnd >= numRows) throw InvalidReadSize{}; diff --git a/api/bag_layerdescriptor.cpp b/api/bag_layerdescriptor.cpp index 931d69e7b7..f67bd59384 100644 --- a/api/bag_layerdescriptor.cpp +++ b/api/bag_layerdescriptor.cpp @@ -30,6 +30,7 @@ LayerDescriptor::LayerDescriptor( std::string internalPath, std::string name, LayerType type, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : m_id(id) @@ -39,6 +40,7 @@ LayerDescriptor::LayerDescriptor( , m_compressionLevel(compressionLevel) , m_chunkSize(chunkSize) , m_minMax(std::numeric_limits::max(), std::numeric_limits::lowest()) + , m_dims({rows, cols}) { } @@ -56,11 +58,13 @@ LayerDescriptor::LayerDescriptor( LayerDescriptor::LayerDescriptor( const Dataset& dataset, LayerType type, + uint32_t rows, uint32_t cols, std::string internalPath, std::string name) : m_id(dataset.getNextId()) , m_layerType(type) , m_minMax(std::numeric_limits::max(), std::numeric_limits::lowest()) + , m_dims({rows, cols}) { m_internalPath = internalPath.empty() ? Layer::getInternalPath(type) @@ -169,6 +173,16 @@ const std::string& LayerDescriptor::getName() const & noexcept return m_name; } +//! Retrieve the dimensions (shape) of the layer +/*! +\return + The row and column spacing/resolution of the grid +*/ +const std::tuple& LayerDescriptor::getDims() const & noexcept +{ + return m_dims; +} + //! Get the size of a buffer for reading a specified number rows and columns. /*! \param rows @@ -204,6 +218,12 @@ LayerDescriptor& LayerDescriptor::setMinMax( return *this; } +LayerDescriptor& LayerDescriptor::setDims(uint32_t rows, uint32_t cols) & noexcept +{ + m_dims = {rows, cols}; + return *this; +} + //! Set the HDF5 path of the layer. /*! \param inPath diff --git a/api/bag_layerdescriptor.h b/api/bag_layerdescriptor.h index 98ea835a8d..eb7e506747 100644 --- a/api/bag_layerdescriptor.h +++ b/api/bag_layerdescriptor.h @@ -53,14 +53,18 @@ class BAG_API LayerDescriptor : public std::enable_shared_from_this getMinMax() const noexcept; const std::string& getName() const & noexcept; + const std::tuple& getDims() const & noexcept; LayerDescriptor& setName(std::string inName) & noexcept; LayerDescriptor& setMinMax(float min, float max) & noexcept; + LayerDescriptor& setDims(uint32_t rows, uint32_t cols) & noexcept; protected: LayerDescriptor(uint32_t id, std::string internalPath, std::string name, - LayerType type, uint64_t chunkSize, int compressionLevel); + LayerType type, uint32_t rows, uint32_t cols, uint64_t chunkSize, + int compressionLevel); LayerDescriptor(const Dataset& dataset, LayerType type, + uint32_t rows, uint32_t cols, std::string internalPath = {}, std::string name = {}); size_t getReadBufferSize(uint32_t rows, uint32_t columns) const noexcept; @@ -85,6 +89,8 @@ class BAG_API LayerDescriptor : public std::enable_shared_from_this m_minMax{}; + //! The dimensions of the layer + std::tuple m_dims{}; friend GeorefMetadataLayer; friend InterleavedLegacyLayer; diff --git a/api/bag_simplelayer.cpp b/api/bag_simplelayer.cpp index 56b059d6ce..b5d6d5846b 100644 --- a/api/bag_simplelayer.cpp +++ b/api/bag_simplelayer.cpp @@ -46,11 +46,11 @@ SimpleLayer::SimpleLayer( std::shared_ptr SimpleLayer::create( Dataset& dataset, LayerType type, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) { - auto descriptor = SimpleLayerDescriptor::create(dataset, type, chunkSize, - compressionLevel); + auto descriptor = SimpleLayerDescriptor::create(dataset, type, rows, cols, chunkSize, compressionLevel); auto h5dataSet = SimpleLayer::createH5dataSet(dataset, *descriptor); return std::make_shared(dataset, *descriptor, std::move(h5dataSet)); @@ -74,6 +74,12 @@ std::shared_ptr SimpleLayer::open( auto h5dataSet = std::unique_ptr<::H5::DataSet, DeleteH5dataSet>( new ::H5::DataSet{h5file.openDataSet(descriptor.getInternalPath())}, DeleteH5dataSet{}); + + // Configure the layer dimensions in the descriptor (we implicitally expect the layer + // to be two-dimensional) + hsize_t dims[2]; + h5dataSet->getSpace().getSimpleExtentDims(dims); + descriptor.setDims(dims[0], dims[1]); // Read the min/max attribute values. const auto possibleMinMax = dataset.getMinMax(descriptor.getLayerType()); @@ -101,7 +107,7 @@ SimpleLayer::createH5dataSet( const SimpleLayerDescriptor& descriptor) { uint32_t dim0 = 0, dim1 = 0; - std::tie(dim0, dim1) = dataset.getDescriptor().getDims(); + std::tie(dim0, dim1) = descriptor.getDims(); const std::array fileDims{dim0, dim1}; ::H5::DataSpace h5dataSpace{kRank, fileDims.data(), fileDims.data()}; diff --git a/api/bag_simplelayer.h b/api/bag_simplelayer.h index 6925f4eeba..4d280c0420 100644 --- a/api/bag_simplelayer.h +++ b/api/bag_simplelayer.h @@ -46,7 +46,7 @@ class BAG_API SimpleLayer final : public Layer protected: static std::shared_ptr create(Dataset& dataset, - LayerType type, uint64_t chunkSize, int compressionLevel); + LayerType type, uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel); static std::shared_ptr open(Dataset& dataset, SimpleLayerDescriptor& descriptor); diff --git a/api/bag_simplelayerdescriptor.cpp b/api/bag_simplelayerdescriptor.cpp index ff266f1d26..2e36968490 100644 --- a/api/bag_simplelayerdescriptor.cpp +++ b/api/bag_simplelayerdescriptor.cpp @@ -19,10 +19,11 @@ namespace BAG { SimpleLayerDescriptor::SimpleLayerDescriptor( uint32_t id, LayerType type, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : LayerDescriptor(id, Layer::getInternalPath(type), - kLayerTypeMapString.at(type), type, chunkSize, compressionLevel) + kLayerTypeMapString.at(type), type, rows, cols, chunkSize, compressionLevel) , m_elementSize(Layer::getElementSize(Layer::getDataType(type))) { } @@ -36,8 +37,9 @@ SimpleLayerDescriptor::SimpleLayerDescriptor( */ SimpleLayerDescriptor::SimpleLayerDescriptor( const Dataset& dataset, - LayerType type) - : LayerDescriptor(dataset, type) + LayerType type, + uint32_t rows, uint32_t cols) + : LayerDescriptor(dataset, type, rows, cols) , m_elementSize(Layer::getElementSize(Layer::getDataType(type))) { } @@ -59,11 +61,12 @@ SimpleLayerDescriptor::SimpleLayerDescriptor( std::shared_ptr SimpleLayerDescriptor::create( const Dataset& dataset, LayerType type, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) { return std::shared_ptr( - new SimpleLayerDescriptor{dataset.getNextId(), type, chunkSize, + new SimpleLayerDescriptor{dataset.getNextId(), type, rows, cols, chunkSize, compressionLevel}); } @@ -79,10 +82,10 @@ std::shared_ptr SimpleLayerDescriptor::create( */ std::shared_ptr SimpleLayerDescriptor::open( const Dataset& dataset, - LayerType type) + LayerType type, uint32_t rows, uint32_t cols) { return std::shared_ptr( - new SimpleLayerDescriptor{dataset, type}); + new SimpleLayerDescriptor{dataset, type, rows, cols}); } diff --git a/api/bag_simplelayerdescriptor.h b/api/bag_simplelayerdescriptor.h index b784f398fd..ca48599777 100644 --- a/api/bag_simplelayerdescriptor.h +++ b/api/bag_simplelayerdescriptor.h @@ -16,10 +16,11 @@ class BAG_API SimpleLayerDescriptor final : public LayerDescriptor { public: static std::shared_ptr create(const Dataset& dataset, - LayerType type, uint64_t chunkSize, int compressionLevel); + LayerType type, uint32_t rows, uint32_t cols, + uint64_t chunkSize, int compressionLevel); static std::shared_ptr open(const Dataset& dataset, - LayerType type); + LayerType type, uint32_t rows, uint32_t cols); SimpleLayerDescriptor(const SimpleLayerDescriptor&) = delete; SimpleLayerDescriptor(SimpleLayerDescriptor&&) = delete; @@ -36,9 +37,11 @@ class BAG_API SimpleLayerDescriptor final : public LayerDescriptor } protected: - SimpleLayerDescriptor(uint32_t id, LayerType type, uint64_t chunkSize, + SimpleLayerDescriptor(uint32_t id, LayerType type, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel); - SimpleLayerDescriptor(const Dataset& dataset, LayerType type); + SimpleLayerDescriptor(const Dataset& dataset, LayerType type, + uint32_t rows, uint32_t cols); private: DataType getDataTypeProxy() const noexcept override; diff --git a/api/bag_surfacecorrections.cpp b/api/bag_surfacecorrections.cpp index 7b6f4c912e..10be24b4ac 100644 --- a/api/bag_surfacecorrections.cpp +++ b/api/bag_surfacecorrections.cpp @@ -358,7 +358,7 @@ UInt8Array SurfaceCorrections::readCorrectedRow( --corrector; // This is 0 based when used. auto originalRow = layer.read(row, row, columnStart, columnEnd); - auto* data = reinterpret_cast(originalRow.data()); + auto data = reinterpret_cast(originalRow.data()); // Obtain cell resolution and SW origin (0,1,1,0). double swCornerX = 0., swCornerY = 0.; diff --git a/api/bag_surfacecorrectionsdescriptor.cpp b/api/bag_surfacecorrectionsdescriptor.cpp index 6e1057cb22..2039d299dd 100644 --- a/api/bag_surfacecorrectionsdescriptor.cpp +++ b/api/bag_surfacecorrectionsdescriptor.cpp @@ -54,7 +54,7 @@ SurfaceCorrectionsDescriptor::SurfaceCorrectionsDescriptor( int compressionLevel) : LayerDescriptor(id, Layer::getInternalPath(Surface_Correction), kLayerTypeMapString.at(Surface_Correction), Surface_Correction, - chunkSize, compressionLevel) + 0, 0, chunkSize, compressionLevel) // Dims default to 0,0 like derived type , m_surfaceType(type) , m_elementSize(BAG::getElementSize(type)) , m_numCorrectors(numCorrectors) @@ -68,7 +68,7 @@ SurfaceCorrectionsDescriptor::SurfaceCorrectionsDescriptor( */ SurfaceCorrectionsDescriptor::SurfaceCorrectionsDescriptor( const Dataset& dataset) - : LayerDescriptor(dataset, Surface_Correction) + : LayerDescriptor(dataset, Surface_Correction, 0, 0) // Dims set in body { const auto h5dataSet = dataset.getH5file().openDataSet( Layer::getInternalPath(Surface_Correction)); @@ -279,6 +279,7 @@ SurfaceCorrectionsDescriptor& SurfaceCorrectionsDescriptor::setDims( { m_numRows = numRows; m_numColumns = numColumns; + LayerDescriptor::setDims(numRows, numColumns); return *this; } diff --git a/api/bag_vrmetadata.cpp b/api/bag_vrmetadata.cpp index 3c77c222fb..73dda33d3e 100644 --- a/api/bag_vrmetadata.cpp +++ b/api/bag_vrmetadata.cpp @@ -373,6 +373,12 @@ void VRMetadata::writeProxy( auto pDataset = this->getDataset().lock(); pDataset->getDescriptor().setDims(static_cast(newDims[0]), static_cast(newDims[1])); + // The file descriptor is global (and the size of the mandatory layers) and specified + // in the metadata; each layer has its own size, however, which we need to update. In + // this case, the VRMetadataDescriptor has the same dimensions as the mandatory layer + // (since there should be a refinement for each fixed-resolution cell), so it's formally + // redundant. But we want to make sure that it's consistent, so ... + pDescriptor->setDims(fileDims[0], fileDims[1]); } fileDataSpace.selectHyperslab(H5S_SELECT_SET, count.data(), offset.data()); diff --git a/api/bag_vrmetadatadescriptor.cpp b/api/bag_vrmetadatadescriptor.cpp index 20aca27cec..4c64885f7c 100644 --- a/api/bag_vrmetadatadescriptor.cpp +++ b/api/bag_vrmetadatadescriptor.cpp @@ -17,11 +17,12 @@ namespace BAG { */ VRMetadataDescriptor::VRMetadataDescriptor( uint32_t id, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : LayerDescriptor(id, VR_METADATA_PATH, - kLayerTypeMapString.at(VarRes_Metadata), VarRes_Metadata, chunkSize, - compressionLevel) + kLayerTypeMapString.at(VarRes_Metadata), VarRes_Metadata, + rows, cols, chunkSize, compressionLevel) { } @@ -31,8 +32,9 @@ VRMetadataDescriptor::VRMetadataDescriptor( The BAG Dataset this layer belongs to. */ VRMetadataDescriptor::VRMetadataDescriptor( - const Dataset& dataset) - : LayerDescriptor(dataset, VarRes_Metadata, VR_METADATA_PATH) + const Dataset& dataset, + uint32_t rows, uint32_t cols) + : LayerDescriptor(dataset, VarRes_Metadata, rows, cols, VR_METADATA_PATH) { } @@ -53,9 +55,15 @@ std::shared_ptr VRMetadataDescriptor::create( uint64_t chunkSize, int compressionLevel) { + // The VRMetadataLayer has the same dimensions as the overall BAG file + // (since there should be one element for each cell in the mandatory + // layers). Reading this from the dataset layer descriptor enforces this + // and keeps the call signature simpler. + uint32_t rows, cols; + std::tie(rows, cols) = dataset.getDescriptor().getDims(); return std::shared_ptr( - new VRMetadataDescriptor{dataset.getNextId(), chunkSize, - compressionLevel}); + new VRMetadataDescriptor{dataset.getNextId(), rows, cols, + chunkSize, compressionLevel}); } //! Open an existing variable resolution metadata descriptor. @@ -69,8 +77,14 @@ std::shared_ptr VRMetadataDescriptor::create( std::shared_ptr VRMetadataDescriptor::open( const Dataset& dataset) { + // The VRMetadataLayer has the same dimensions as the overall BAG file + // (since there should be one element for each cell in the mandatory + // layers). Reading this from the dataset layer descriptor enforces this + // and keeps the call signature simpler. + uint32_t rows, cols; + std::tie(rows, cols) = dataset.getDescriptor().getDims(); return std::shared_ptr( - new VRMetadataDescriptor{dataset}); + new VRMetadataDescriptor{dataset, rows, cols}); } diff --git a/api/bag_vrmetadatadescriptor.h b/api/bag_vrmetadatadescriptor.h index 98718c0065..e661993b25 100644 --- a/api/bag_vrmetadatadescriptor.h +++ b/api/bag_vrmetadatadescriptor.h @@ -44,9 +44,9 @@ class BAG_API VRMetadataDescriptor final : public LayerDescriptor VRMetadataDescriptor& setMinResolution(float minResX, float minResY) & noexcept; protected: - VRMetadataDescriptor(uint32_t id, uint64_t chunkSize, + VRMetadataDescriptor(uint32_t id, uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel); - explicit VRMetadataDescriptor(const Dataset& dataset); + explicit VRMetadataDescriptor(const Dataset& dataset, uint32_t rows, uint32_t cols); static std::shared_ptr create(const Dataset& dataset, uint64_t chunkSize, int compressionLevel); diff --git a/api/bag_vrnode.cpp b/api/bag_vrnode.cpp index dc9ba27a8c..044c54bd50 100644 --- a/api/bag_vrnode.cpp +++ b/api/bag_vrnode.cpp @@ -166,9 +166,17 @@ std::shared_ptr VRNode::open( descriptor.setMinMaxNSamples(minNSamples, maxNSamples); auto h5dataSet = std::unique_ptr<::H5::DataSet, DeleteH5dataSet>( - new ::H5::DataSet{h5file.openDataSet(VR_REFINEMENT_PATH)}, + new ::H5::DataSet{h5file.openDataSet(VR_NODE_PATH)}, DeleteH5dataSet{}); + // We need to know the dimensions of the array on file so that we can update the + // descriptor for the layer. + hsize_t dims[2]; // Should be 1D, but you never know ... + int ndims = h5dataSet->getSpace().getSimpleExtentDims(dims, nullptr); + if (ndims != 1) { + throw InvalidVRRefinementDimensions{}; + } + descriptor.setDims(1, dims[0]); return std::make_unique(dataset, descriptor, std::move(h5dataSet)); } @@ -354,7 +362,14 @@ void VRNode::writeProxy( throw DatasetNotFound{}; auto pDataset = this->getDataset().lock(); + // TODO: Confirm that this is what we want --- this resets the dimensions of the + // overall BAG, rather than the layer, which means that it's going to set the + // metadata size of the mandatory layers to 1xN ... which is odd. pDataset->getDescriptor().setDims(1, static_cast(newMaxLength)); + // So that the read() call checks correctly against the size of the array, rather + // than the dimensions of the mandatory layer, we need to keep track of the size + // of the layer in the layer-specific descriptor. + pDescriptor->setDims(1, newMaxLength); } fileDataSpace.selectHyperslab(H5S_SELECT_SET, &columns, &offset); diff --git a/api/bag_vrnodedescriptor.cpp b/api/bag_vrnodedescriptor.cpp index aecb0f22ff..2f932619b5 100644 --- a/api/bag_vrnodedescriptor.cpp +++ b/api/bag_vrnodedescriptor.cpp @@ -17,11 +17,13 @@ namespace BAG { */ VRNodeDescriptor::VRNodeDescriptor( uint32_t id, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : LayerDescriptor(id, VR_NODE_PATH, - kLayerTypeMapString.at(VarRes_Node), VarRes_Node, chunkSize, - compressionLevel) + kLayerTypeMapString.at(VarRes_Node), VarRes_Node, + rows, cols, + chunkSize, compressionLevel) { } @@ -31,8 +33,9 @@ VRNodeDescriptor::VRNodeDescriptor( The BAG Dataset this layer belongs to. */ VRNodeDescriptor::VRNodeDescriptor( - const Dataset& dataset) - : LayerDescriptor(dataset, VarRes_Node, VR_NODE_PATH) + const Dataset& dataset, + uint32_t rows, uint32_t cols) + : LayerDescriptor(dataset, VarRes_Node, rows, cols, VR_NODE_PATH) { } @@ -55,7 +58,7 @@ std::shared_ptr VRNodeDescriptor::create( int compressionLevel) { return std::shared_ptr( - new VRNodeDescriptor{dataset.getNextId(), chunkSize, + new VRNodeDescriptor{dataset.getNextId(), 1, 0, chunkSize, compressionLevel}); } @@ -65,13 +68,12 @@ std::shared_ptr VRNodeDescriptor::create( The BAG Dataset this layer belongs to. */ std::shared_ptr VRNodeDescriptor::open( - const Dataset& dataset) + const Dataset& dataset, uint32_t rows, uint32_t cols) { return std::shared_ptr( - new VRNodeDescriptor{dataset}); + new VRNodeDescriptor{dataset, rows, cols}); } - //! \copydoc LayerDescriptor::getDataType DataType VRNodeDescriptor::getDataTypeProxy() const noexcept { diff --git a/api/bag_vrnodedescriptor.h b/api/bag_vrnodedescriptor.h index cd8fd526c9..8de0fe7ed5 100644 --- a/api/bag_vrnodedescriptor.h +++ b/api/bag_vrnodedescriptor.h @@ -43,14 +43,15 @@ class BAG_API VRNodeDescriptor final : public LayerDescriptor uint32_t maxNumHypotheses) & noexcept; protected: - VRNodeDescriptor(uint32_t id, uint64_t chunkSize, + VRNodeDescriptor(uint32_t id, uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel); - explicit VRNodeDescriptor(const Dataset& dataset); + explicit VRNodeDescriptor(const Dataset& dataset, uint32_t rows, uint32_t cols); static std::shared_ptr create(const Dataset& dataset, uint64_t chunkSize, int compressionLevel); - static std::shared_ptr open(const Dataset& dataset); + static std::shared_ptr open(const Dataset& dataset, + uint32_t rows, uint32_t cols); private: DataType getDataTypeProxy() const noexcept override; diff --git a/api/bag_vrrefinements.cpp b/api/bag_vrrefinements.cpp index 291412208d..6cc2bb7294 100644 --- a/api/bag_vrrefinements.cpp +++ b/api/bag_vrrefinements.cpp @@ -59,10 +59,10 @@ T readAttribute( The layer's descriptor. Will never be nullptr. */ - std::shared_ptr VRRefinements::getDescriptor() & noexcept - { - return std::dynamic_pointer_cast(Layer::getDescriptor()); - } +std::shared_ptr VRRefinements::getDescriptor() & noexcept +{ + return std::dynamic_pointer_cast(Layer::getDescriptor()); +} //! Retrieve the layer's descriptor. Note: this shadows BAG::Layer.getDescriptor() /*! @@ -70,9 +70,10 @@ T readAttribute( The layer's descriptor. Will never be nullptr. */ - std::shared_ptr VRRefinements::getDescriptor() const & noexcept { - return std::dynamic_pointer_cast(Layer::getDescriptor()); - } +std::shared_ptr VRRefinements::getDescriptor() const & noexcept +{ + return std::dynamic_pointer_cast(Layer::getDescriptor()); +} //! Constructor. /*! @@ -151,6 +152,12 @@ std::unique_ptr VRRefinements::open( new ::H5::DataSet{h5file.openDataSet(VR_REFINEMENT_PATH)}, DeleteH5dataSet{}); + hsize_t dims[2]; + int ndims = h5dataSet->getSpace().getSimpleExtentDims(dims, nullptr); + if (ndims != 1) { + throw InvalidVRRefinementDimensions{}; + } + descriptor.setDims(1, dims[0]); return std::unique_ptr(new VRRefinements{dataset, descriptor, std::move(h5dataSet)}); } @@ -321,7 +328,14 @@ void VRRefinements::writeProxy( throw DatasetNotFound{}; auto pDataset = this->getDataset().lock(); + // TODO: Confirm that this is what we want --- this resets the dimensions of the + // overall BAG, rather than the layer, which means that it's going to set the + // metadata size of the mandatory layers to 1xN ... which is odd. pDataset->getDescriptor().setDims(1, static_cast(newMaxLength)); + // So that the read() call checks correctly against the size of the array, rather + // than the dimensions of the mandatory layer, we need to keep track of the size + // of the layer in the layer-specific descriptor. + pDescriptor->setDims(1, newMaxLength); } fileDataSpace.selectHyperslab(H5S_SELECT_SET, &columns, &offset); diff --git a/api/bag_vrrefinementsdescriptor.cpp b/api/bag_vrrefinementsdescriptor.cpp index 8a5630daca..e478e727ed 100644 --- a/api/bag_vrrefinementsdescriptor.cpp +++ b/api/bag_vrrefinementsdescriptor.cpp @@ -17,11 +17,13 @@ namespace BAG { */ VRRefinementsDescriptor::VRRefinementsDescriptor( uint32_t id, + uint32_t rows, uint32_t cols, uint64_t chunkSize, int compressionLevel) : LayerDescriptor(id, VR_REFINEMENT_PATH, - kLayerTypeMapString.at(VarRes_Refinement), VarRes_Refinement, chunkSize, - compressionLevel) + kLayerTypeMapString.at(VarRes_Refinement), VarRes_Refinement, + rows, cols, + chunkSize, compressionLevel) { } @@ -31,8 +33,8 @@ VRRefinementsDescriptor::VRRefinementsDescriptor( The BAG Dataset this layer belongs to. */ VRRefinementsDescriptor::VRRefinementsDescriptor( - const Dataset& dataset) - : LayerDescriptor(dataset, VarRes_Refinement, VR_REFINEMENT_PATH) + const Dataset& dataset, uint32_t rows, uint32_t cols) + : LayerDescriptor(dataset, VarRes_Refinement, rows, cols, VR_REFINEMENT_PATH) { } @@ -54,7 +56,7 @@ std::shared_ptr VRRefinementsDescriptor::create( int compressionLevel) { return std::shared_ptr( - new VRRefinementsDescriptor{dataset.getNextId(), chunkSize, + new VRRefinementsDescriptor{dataset.getNextId(), 1, 0, chunkSize, compressionLevel}); } @@ -67,10 +69,10 @@ std::shared_ptr VRRefinementsDescriptor::create( The existing variable resolution refinements descriptor. */ std::shared_ptr VRRefinementsDescriptor::open( - const Dataset& dataset) + const Dataset& dataset, uint32_t rows, uint32_t cols) { return std::shared_ptr( - new VRRefinementsDescriptor{dataset}); + new VRRefinementsDescriptor{dataset, rows, cols}); } diff --git a/api/bag_vrrefinementsdescriptor.h b/api/bag_vrrefinementsdescriptor.h index 9fa283adc9..0c7190aa93 100644 --- a/api/bag_vrrefinementsdescriptor.h +++ b/api/bag_vrrefinementsdescriptor.h @@ -38,14 +38,15 @@ class BAG_API VRRefinementsDescriptor final : public LayerDescriptor float maxUncertainty) & noexcept; protected: - VRRefinementsDescriptor(uint32_t id, uint64_t chunkSize, - int compressionLevel); - explicit VRRefinementsDescriptor(const Dataset& dataset); + VRRefinementsDescriptor(uint32_t id, uint32_t rows, uint32_t cols, + uint64_t chunkSize, int compressionLevel); + explicit VRRefinementsDescriptor(const Dataset& dataset, uint32_t rows, uint32_t cols); static std::shared_ptr create(const Dataset& dataset, uint64_t chunkSize, int compressionLevel); - static std::shared_ptr open(const Dataset& dataset); + static std::shared_ptr open(const Dataset& dataset, + uint32_t rows, uint32_t cols); private: DataType getDataTypeProxy() const noexcept override; From afa60bc345a6ebf28562d1bf3c5b2cc9f8fce077 Mon Sep 17 00:00:00 2001 From: brian-r-calder Date: Tue, 18 Jun 2024 18:15:05 -0400 Subject: [PATCH 02/25] Bug-fixes for VR readers VR data in field BAG files appears to contain 2D arrays rather than the 1D version that they should; modified code to follow this model, rather than the standard so that source data can be read. Replaced deprecated sprintf() with snprintf(). --- api/bag_metadata_export.cpp | 2 +- api/bag_metadata_import.cpp | 2 +- api/bag_vrnode.cpp | 21 +++++++++++---------- api/bag_vrrefinements.cpp | 20 ++++++++++++-------- 4 files changed, 25 insertions(+), 20 deletions(-) diff --git a/api/bag_metadata_export.cpp b/api/bag_metadata_export.cpp index 45fc3ec2f4..50087c87e9 100644 --- a/api/bag_metadata_export.cpp +++ b/api/bag_metadata_export.cpp @@ -993,7 +993,7 @@ bool addSpatialRepresentation(xmlNode &parentNode, const BagSpatialRepresentatio xmlSetProp(pPointNode, XMLCast("gml:id"), XMLCast("id1")); char pointsString[88]; - sprintf(pointsString, "%.12lf,%.12lf %.12lf,%.12lf", spatialRepresentationInfo.llCornerX, spatialRepresentationInfo.llCornerY, spatialRepresentationInfo.urCornerX, spatialRepresentationInfo.urCornerY); + snprintf(pointsString, 88, "%.12lf,%.12lf %.12lf,%.12lf", spatialRepresentationInfo.llCornerX, spatialRepresentationInfo.llCornerY, spatialRepresentationInfo.urCornerX, spatialRepresentationInfo.urCornerY); xmlNode *pCoordNode = xmlNewChild(pPointNode, pGmlNamespace, XMLCast("coordinates"), EncodedString(*parentNode.doc, pointsString)); xmlSetProp(pCoordNode, XMLCast("decimal"), XMLCast(".")); diff --git a/api/bag_metadata_import.cpp b/api/bag_metadata_import.cpp index 69cfa6df8a..20c15875f1 100644 --- a/api/bag_metadata_import.cpp +++ b/api/bag_metadata_import.cpp @@ -1247,7 +1247,7 @@ bool decodeReferenceSystemInfoFromSpatial( { char buffer[2048]; - sprintf(buffer, "%d", epsg); + snprintf(buffer, 2048, "%d", epsg); referenceSystemInfo->definition = copyString(buffer); referenceSystemInfo->type = copyString("EPSG"); diff --git a/api/bag_vrnode.cpp b/api/bag_vrnode.cpp index 044c54bd50..0507d97317 100644 --- a/api/bag_vrnode.cpp +++ b/api/bag_vrnode.cpp @@ -173,12 +173,11 @@ std::shared_ptr VRNode::open( // descriptor for the layer. hsize_t dims[2]; // Should be 1D, but you never know ... int ndims = h5dataSet->getSpace().getSimpleExtentDims(dims, nullptr); - if (ndims != 1) { + if (ndims != 2) { throw InvalidVRRefinementDimensions{}; } - descriptor.setDims(1, dims[0]); - return std::make_unique(dataset, - descriptor, std::move(h5dataSet)); + descriptor.setDims(dims[0], dims[1]); + return std::make_unique(dataset, descriptor, std::move(h5dataSet)); } @@ -271,18 +270,20 @@ UInt8Array VRNode::readProxy( const hsize_t columns = (columnEnd - columnStart) + 1; const hsize_t offset = columnStart; - const auto fileDataSpace = m_pH5dataSet->getSpace(); - fileDataSpace.selectHyperslab(H5S_SELECT_SET, &columns, &offset); + const std::array sizes{1, columns}; + const std::array offsets{0, offset}; + + const auto h5fileDataSpace = m_pH5dataSet->getSpace(); + h5fileDataSpace.selectHyperslab(H5S_SELECT_SET, sizes.data(), offsets.data()); - const auto bufferSize = pDescriptor->getReadBufferSize(1, - static_cast(columns)); + const auto bufferSize = pDescriptor->getReadBufferSize(1, columns); UInt8Array buffer{bufferSize}; - const ::H5::DataSpace memDataSpace{1, &columns, &columns}; + const ::H5::DataSpace memDataSpace{kRank, sizes.data(), sizes.data()}; const auto memDataType = makeDataType(); - m_pH5dataSet->read(buffer.data(), memDataType, memDataSpace, fileDataSpace); + m_pH5dataSet->read(buffer.data(), memDataType, memDataSpace, h5fileDataSpace); return buffer; } diff --git a/api/bag_vrrefinements.cpp b/api/bag_vrrefinements.cpp index 6cc2bb7294..0f00285983 100644 --- a/api/bag_vrrefinements.cpp +++ b/api/bag_vrrefinements.cpp @@ -4,6 +4,7 @@ #include "bag_vrrefinements.h" #include "bag_vrrefinementsdescriptor.h" +#include #include #include //memset #include @@ -154,10 +155,10 @@ std::unique_ptr VRRefinements::open( hsize_t dims[2]; int ndims = h5dataSet->getSpace().getSimpleExtentDims(dims, nullptr); - if (ndims != 1) { + if (ndims != 2) { throw InvalidVRRefinementDimensions{}; } - descriptor.setDims(1, dims[0]); + descriptor.setDims(dims[0], dims[1]); return std::unique_ptr(new VRRefinements{dataset, descriptor, std::move(h5dataSet)}); } @@ -244,18 +245,20 @@ UInt8Array VRRefinements::readProxy( const hsize_t columns = (columnEnd - columnStart) + 1; const hsize_t offset = columnStart; - const auto fileDataSpace = m_pH5dataSet->getSpace(); - fileDataSpace.selectHyperslab(H5S_SELECT_SET, &columns, &offset); + const std::array sizes{1, columns}; + const std::array offsets{0, offset}; + + const auto h5fileDataSpace = m_pH5dataSet->getSpace(); + h5fileDataSpace.selectHyperslab(H5S_SELECT_SET, sizes.data(), offsets.data()); - const auto bufferSize = pDescriptor->getReadBufferSize(1, - static_cast(columns)); + const auto bufferSize = pDescriptor->getReadBufferSize(1, columns); UInt8Array buffer{bufferSize}; - const ::H5::DataSpace memDataSpace{1, &columns, &columns}; + const ::H5::DataSpace memDataSpace{kRank, sizes.data(), sizes.data()}; const auto memDataType = makeDataType(); - m_pH5dataSet->read(buffer.data(), memDataType, memDataSpace, fileDataSpace); + m_pH5dataSet->read(buffer.data(), memDataType, memDataSpace, h5fileDataSpace); return buffer; } @@ -313,6 +316,7 @@ void VRRefinements::writeProxy( const int numDims = fileDataSpace.getSimpleExtentDims(fileLength.data(), maxFileLength.data()); if (numDims != 1) + std::cout << "Number of dimensions for VRRefinements = " << numDims << std::endl; throw InvalidVRRefinementDimensions{}; if (fileLength[0] < (columnEnd + 1)) From ca684a568bafb89d73d96a9bb93679b964b26673 Mon Sep 17 00:00:00 2001 From: brian-r-calder Date: Tue, 25 Jun 2024 10:30:00 -0400 Subject: [PATCH 03/25] Update bag_vrrefinements.cpp WIP debugging to confirm location of memory leak causing heap-bombs elsewhere (commit purely to allow publication and other support activities). --- api/bag_vrrefinements.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/api/bag_vrrefinements.cpp b/api/bag_vrrefinements.cpp index 0f00285983..225e2d8975 100644 --- a/api/bag_vrrefinements.cpp +++ b/api/bag_vrrefinements.cpp @@ -9,7 +9,6 @@ #include //memset #include - namespace BAG { namespace { @@ -252,7 +251,10 @@ UInt8Array VRRefinements::readProxy( h5fileDataSpace.selectHyperslab(H5S_SELECT_SET, sizes.data(), offsets.data()); const auto bufferSize = pDescriptor->getReadBufferSize(1, columns); - UInt8Array buffer{bufferSize}; + UInt8Array buffer{bufferSize*2}; + { + std::cout << "Reading " << columns << " VRRefinements into array of size " << buffer.size() << "B" << std::endl; + } const ::H5::DataSpace memDataSpace{kRank, sizes.data(), sizes.data()}; From 78a12056ded10cf71682ce1e3a7e4ae95fb37035 Mon Sep 17 00:00:00 2001 From: brian-r-calder Date: Wed, 26 Jun 2024 18:02:27 -0400 Subject: [PATCH 04/25] Adjustment to remove debugging code after initial tests. --- api/bag_vrrefinements.cpp | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/api/bag_vrrefinements.cpp b/api/bag_vrrefinements.cpp index 225e2d8975..3f2c7f211c 100644 --- a/api/bag_vrrefinements.cpp +++ b/api/bag_vrrefinements.cpp @@ -251,11 +251,8 @@ UInt8Array VRRefinements::readProxy( h5fileDataSpace.selectHyperslab(H5S_SELECT_SET, sizes.data(), offsets.data()); const auto bufferSize = pDescriptor->getReadBufferSize(1, columns); - UInt8Array buffer{bufferSize*2}; - { - std::cout << "Reading " << columns << " VRRefinements into array of size " << buffer.size() << "B" << std::endl; - } - + UInt8Array buffer{bufferSize}; + const ::H5::DataSpace memDataSpace{kRank, sizes.data(), sizes.data()}; const auto memDataType = makeDataType(); From b45aecff6cda06882c89b0952e896944ebac715a Mon Sep 17 00:00:00 2001 From: selimnairb Date: Fri, 22 Nov 2024 13:33:12 -0500 Subject: [PATCH 05/25] Fix tests broken by addressing #109. --- tests/test_bag_simplelayerdescriptor.cpp | 90 +++++++++++++----------- 1 file changed, 48 insertions(+), 42 deletions(-) diff --git a/tests/test_bag_simplelayerdescriptor.cpp b/tests/test_bag_simplelayerdescriptor.cpp index 3c0797d3c3..4723b07c71 100644 --- a/tests/test_bag_simplelayerdescriptor.cpp +++ b/tests/test_bag_simplelayerdescriptor.cpp @@ -9,6 +9,7 @@ #include #include +#include using BAG::Dataset; @@ -315,6 +316,11 @@ const std::string kMetadataXML{R"( )"}; +constexpr const uint32_t kRows = 30; +constexpr const uint32_t kCols = 40; +constexpr uint64_t kExpectedChunkSize = 100; +constexpr unsigned int kExpectedCompressionLevel = 6; + } // namespace // NOTE The base class is also tested here. @@ -328,26 +334,30 @@ TEST_CASE("test layer descriptor creation", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Check that the layer descriptor type matches that was created."); CHECK(pDescriptor->getLayerType() == Elevation); + auto dims = pDescriptor->getDims(); + UNSCOPED_INFO("Check that the layer descriptor dimensions match that which was created."); + CHECK( (std::get<0>(dims) == kRows && std::get<1>(dims) == kCols) ); + UNSCOPED_INFO("Check the chunk size is read properly."); CHECK(pDescriptor->getChunkSize() == kExpectedChunkSize); UNSCOPED_INFO("Check the compression level is read properly."); CHECK(pDescriptor->getCompressionLevel() == kExpectedCompressionLevel); + } // const std::string& getName() const & noexcept; @@ -360,16 +370,15 @@ TEST_CASE("test layer descriptor get/set name", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); const auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); const std::string kExpectedName{"Expected Name"}; @@ -389,23 +398,24 @@ TEST_CASE("test layer descriptor get data type", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify the data type of an Elevation layer descriptor is correct."); CHECK(pDescriptor->getDataType() == Layer::getDataType(Elevation)); pDescriptor = SimpleLayerDescriptor::create(*pDataset, Num_Hypotheses, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); UNSCOPED_INFO("Verify the data type of an Num_Hypotheses layer descriptor is correct."); CHECK(pDescriptor->getDataType() == Layer::getDataType(Num_Hypotheses)); @@ -420,23 +430,24 @@ TEST_CASE("test layer descriptor get layer type", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify the layer type of an Elevation layer descriptor is correct."); CHECK(pDescriptor->getLayerType() == Elevation); pDescriptor = SimpleLayerDescriptor::create(*pDataset, Std_Dev, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify the layer type of an Std_Dev layer descriptor is correct."); @@ -453,16 +464,15 @@ TEST_CASE("test layer descriptor get/set min max", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify setting min max does not throw."); @@ -484,16 +494,15 @@ TEST_CASE("test layer descriptor get internal path", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); - auto pDescriptor = SimpleLayerDescriptor::create(*pDataset,Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify Elevation internal path is as expected."); @@ -501,7 +510,9 @@ TEST_CASE("test layer descriptor get internal path", CHECK(pDescriptor->getInternalPath() == Layer::getInternalPath(Elevation)); pDescriptor = SimpleLayerDescriptor::create(*pDataset, Uncertainty, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify Uncertainty internal path is as expected."); @@ -518,16 +529,15 @@ TEST_CASE("test layer descriptor get element size", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify Elevation element size is as expected."); @@ -545,16 +555,15 @@ TEST_CASE("test descriptor get chunk size", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify getting the chunk size does not throw."); @@ -573,16 +582,15 @@ TEST_CASE("test descriptor get compression level", Metadata metadata; metadata.loadFromBuffer(kMetadataXML); - constexpr uint64_t kExpectedChunkSize = 100; - constexpr unsigned int kExpectedCompressionLevel = 6; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kExpectedChunkSize, kExpectedCompressionLevel); REQUIRE(pDataset); UNSCOPED_INFO("Check that creating a simple layer descriptor returns something."); auto pDescriptor = SimpleLayerDescriptor::create(*pDataset, Elevation, - kExpectedChunkSize, kExpectedCompressionLevel); + kRows, kCols, + kExpectedChunkSize, + kExpectedCompressionLevel); REQUIRE(pDescriptor); UNSCOPED_INFO("Verify getting the compression level does not throw."); @@ -591,5 +599,3 @@ TEST_CASE("test descriptor get compression level", UNSCOPED_INFO("Verify getting the compression level matches the expected."); CHECK(pDescriptor->getCompressionLevel() == kExpectedCompressionLevel); } - - From a3cbb273adfdf52aa7e42da4ef700d31ede064ee Mon Sep 17 00:00:00 2001 From: selimnairb Date: Fri, 6 Dec 2024 10:58:08 -0500 Subject: [PATCH 06/25] CI: Enable examples building for test reporting workflow --- .github/workflows/testreporting.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/testreporting.yml b/.github/workflows/testreporting.yml index 5ee706b64d..6d35909732 100644 --- a/.github/workflows/testreporting.yml +++ b/.github/workflows/testreporting.yml @@ -72,7 +72,7 @@ jobs: run: | export CC=${{env.CC}} export CXX=${{env.CXX}} - cmake -G Ninja -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}} -B build -S . -DCMAKE_INSTALL_PREFIX=/usr/local -DBAG_BUILD_TESTS:BOOL=ON -DBAG_CODE_COVERAGE:BOOL=ON + cmake -G Ninja -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}} -B build -S . -DCMAKE_INSTALL_PREFIX=/usr/local -DBAG_BUILD_EXAMPLES:BOOL=ON -DBAG_BUILD_TESTS:BOOL=ON -DBAG_CODE_COVERAGE:BOOL=ON - name: Build # Build your program with the given configuration From cea1c1511d86f681e6236a23418b8f35aa0ee810 Mon Sep 17 00:00:00 2001 From: Brian Calder Date: Sat, 21 Dec 2024 17:19:47 +0000 Subject: [PATCH 07/25] Modification to exception text for consistency When reporting exceptions for VR refinements not having the right number of dimensions, we have both 1D and 2D arrays that have to be handled (the 2D simply because we're patching around a bug in BAG creation in other software that doesn't use this code that's not following the specification correctly). The reporting of InvalidVRRefinementDimensions therefore has to be neutral on number of dimensions and report that the number is inconsistent. --- api/bag_exceptions.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/bag_exceptions.h b/api/bag_exceptions.h index 6f1979b2c4..31abc59058 100644 --- a/api/bag_exceptions.h +++ b/api/bag_exceptions.h @@ -441,7 +441,7 @@ struct BAG_API InvalidVRRefinementDimensions final : virtual std::exception { const char* what() const noexcept override { - return "The variable resolution refinement layer is not 1 dimensional."; + return "The variable resolution refinement layer is inconsistent with specification."; } }; From c8b6515c760b11f56a38fdeb410e00afff68c73e Mon Sep 17 00:00:00 2001 From: selimnairb Date: Thu, 2 Jan 2025 13:22:41 -0500 Subject: [PATCH 08/25] Fix single-line if statement error; added braces. --- scripts/dev-cont-build-bag.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/dev-cont-build-bag.sh b/scripts/dev-cont-build-bag.sh index 5dc4778958..615f321244 100755 --- a/scripts/dev-cont-build-bag.sh +++ b/scripts/dev-cont-build-bag.sh @@ -5,7 +5,7 @@ set -ex # Abort on error. # Configure CMake cmake -G Ninja -DCMAKE_BUILD_TYPE=Debug -B build -S . \ - -DCMAKE_INSTALL_PREFIX=/usr -DBAG_BUILD_TESTS:BOOL=ON + -DCMAKE_INSTALL_PREFIX=/usr -DBAG_BUILD_TESTS:BOOL=ON -DBAG_BUILD_EXAMPLES:BOOL=ON # Build cmake --build build python3 -m pip wheel -w ./wheel/ ./build/api/swig/python From f9e186e2d3f43df27a67cdac4b28402ba972e15e Mon Sep 17 00:00:00 2001 From: selimnairb Date: Thu, 2 Jan 2025 13:42:10 -0500 Subject: [PATCH 09/25] Fix single-line if statement error; added braces. --- api/bag_vrrefinements.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/api/bag_vrrefinements.cpp b/api/bag_vrrefinements.cpp index 3f2c7f211c..fdcded58b2 100644 --- a/api/bag_vrrefinements.cpp +++ b/api/bag_vrrefinements.cpp @@ -314,9 +314,10 @@ void VRRefinements::writeProxy( ::H5::DataSpace fileDataSpace = m_pH5dataSet->getSpace(); const int numDims = fileDataSpace.getSimpleExtentDims(fileLength.data(), maxFileLength.data()); - if (numDims != 1) + if (numDims != 1) { std::cout << "Number of dimensions for VRRefinements = " << numDims << std::endl; throw InvalidVRRefinementDimensions{}; + } if (fileLength[0] < (columnEnd + 1)) { From 7c75ebf2cd152c87468759cabcd3927893b9b8d4 Mon Sep 17 00:00:00 2001 From: selimnairb Date: Thu, 2 Jan 2025 14:09:16 -0500 Subject: [PATCH 10/25] CI: test reporting: Make sure test output is reported to the console, in addition to XML file on filesystem, to provide diagnostic info when tests crash --- .github/workflows/testreporting.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/testreporting.yml b/.github/workflows/testreporting.yml index 6d35909732..dff36d2adc 100644 --- a/.github/workflows/testreporting.yml +++ b/.github/workflows/testreporting.yml @@ -83,7 +83,7 @@ jobs: - name: Run tests run: | - BAG_SAMPLES_PATH=${{github.workspace}}/examples/sample-data ./build/tests/bag_tests_d -r junit -o build/tests/bag_tests-testreport.xml + BAG_SAMPLES_PATH=${{github.workspace}}/examples/sample-data ./build/tests/bag_tests_d -r junit | tee build/tests/bag_tests-testreport.xml - name: Test Reporter uses: mikepenz/action-junit-report@v5 From 2cc630231862def3d4b71a6328e6e176427aa24a Mon Sep 17 00:00:00 2001 From: selimnairb Date: Thu, 2 Jan 2025 14:50:06 -0500 Subject: [PATCH 11/25] Support VR elements stored in either 1D or 2D arrays to be compatible common usage that is counter to the BAG spec (i.e. 1D) --- api/bag_vrnode.cpp | 5 +++-- api/bag_vrrefinements.cpp | 5 ++++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/api/bag_vrnode.cpp b/api/bag_vrnode.cpp index 0507d97317..63bfad9f87 100644 --- a/api/bag_vrnode.cpp +++ b/api/bag_vrnode.cpp @@ -171,9 +171,10 @@ std::shared_ptr VRNode::open( // We need to know the dimensions of the array on file so that we can update the // descriptor for the layer. - hsize_t dims[2]; // Should be 1D, but you never know ... + hsize_t dims[2]; int ndims = h5dataSet->getSpace().getSimpleExtentDims(dims, nullptr); - if (ndims != 2) { + if (!(ndims == 1 || ndims == 2)) { + // Should be 1D according to BAG spec, but some implementations use a 2D array. throw InvalidVRRefinementDimensions{}; } descriptor.setDims(dims[0], dims[1]); diff --git a/api/bag_vrrefinements.cpp b/api/bag_vrrefinements.cpp index fdcded58b2..2835eccda2 100644 --- a/api/bag_vrrefinements.cpp +++ b/api/bag_vrrefinements.cpp @@ -152,9 +152,12 @@ std::unique_ptr VRRefinements::open( new ::H5::DataSet{h5file.openDataSet(VR_REFINEMENT_PATH)}, DeleteH5dataSet{}); + // We need to know the dimensions of the array on file so that we can update the + // descriptor for the layer. hsize_t dims[2]; int ndims = h5dataSet->getSpace().getSimpleExtentDims(dims, nullptr); - if (ndims != 2) { + if (!(ndims == 1 || ndims == 2)) { + // Should be 1D according to BAG spec, but some implementations use a 2D array. throw InvalidVRRefinementDimensions{}; } descriptor.setDims(dims[0], dims[1]); From d49c72df8793a69e3a343f5413ea29c27df3246a Mon Sep 17 00:00:00 2001 From: selimnairb Date: Thu, 2 Jan 2025 15:42:19 -0500 Subject: [PATCH 12/25] test: Restructure test 'test vr metadata write read' to only attempt to read-back dta after it was written to disk in accordance with other tests (such as 'test vr metadata create open' in the same test file) --- tests/test_bag_vrmetadata.cpp | 103 ++++++++++++++++++---------------- 1 file changed, 55 insertions(+), 48 deletions(-) diff --git a/tests/test_bag_vrmetadata.cpp b/tests/test_bag_vrmetadata.cpp index 638f64cd67..cb5e710cdf 100644 --- a/tests/test_bag_vrmetadata.cpp +++ b/tests/test_bag_vrmetadata.cpp @@ -411,61 +411,68 @@ TEST_CASE("test vr metadata write read", "[vrmetadata][write][read]") { const TestUtils::RandomFileGuard tmpBagFile; - UNSCOPED_INFO("Check dataset was created successfully."); - constexpr uint64_t kChunkSize = 100; - constexpr unsigned int kCompressionLevel = 6; + constexpr uint32_t kRowStart = 0; + constexpr uint32_t kColumnStart = 0; + constexpr uint32_t kRowEnd = 0; + constexpr uint32_t kColumnEnd = 0; - BAG::Metadata metadata; - metadata.loadFromBuffer(kMetadataXML); + constexpr BAG::VRMetadataItem kExpectedItem0{ + 0, 1, 2, 3.45f, 6.789f, 1001.01f, 4004.004f}; - auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kChunkSize, - kCompressionLevel); - REQUIRE(pDataset); + { // test create + UNSCOPED_INFO("Check dataset was created successfully."); + constexpr uint64_t kChunkSize = 100; + constexpr unsigned int kCompressionLevel = 6; - UNSCOPED_INFO("Check creating variable resolution layers does not throw."); - REQUIRE_NOTHROW(pDataset->createVR(kChunkSize, kCompressionLevel, false)); + BAG::Metadata metadata; + metadata.loadFromBuffer(kMetadataXML); - UNSCOPED_INFO("Check the variable resolution metadata exists."); - auto pVrMetadata = pDataset->getVRMetadata(); - REQUIRE(pVrMetadata); + auto pDataset = Dataset::create(tmpBagFile, std::move(metadata), kChunkSize, + kCompressionLevel); + REQUIRE(pDataset); - UNSCOPED_INFO("Check VRMetadataDescriptor is the default descriptor."); - auto pVrMetadataDescriptor = - std::dynamic_pointer_cast( - pVrMetadata->getDescriptor()); - REQUIRE(pVrMetadataDescriptor); + UNSCOPED_INFO("Check creating variable resolution layers does not throw."); + REQUIRE_NOTHROW(pDataset->createVR(kChunkSize, kCompressionLevel, false)); - UNSCOPED_INFO("Write one record."); - constexpr BAG::VRMetadataItem kExpectedItem0{ - 0, 1, 2, 3.45f, 6.789f, 1001.01f, 4004.004f}; + UNSCOPED_INFO("Check the variable resolution metadata exists."); + auto pVrMetadata = pDataset->getVRMetadata(); + REQUIRE(pVrMetadata); - const auto* buffer = reinterpret_cast(&kExpectedItem0); - constexpr uint32_t kRowStart = 0; - constexpr uint32_t kColumnStart = 0; - constexpr uint32_t kRowEnd = 0; - constexpr uint32_t kColumnEnd = 0; + UNSCOPED_INFO("Check VRMetadataDescriptor is the default descriptor."); + auto pVrMetadataDescriptor = + std::dynamic_pointer_cast( + pVrMetadata->getDescriptor()); + REQUIRE(pVrMetadataDescriptor); + + UNSCOPED_INFO("Write one record."); + const auto *buffer = reinterpret_cast(&kExpectedItem0); + REQUIRE_NOTHROW(pVrMetadata->write(kRowStart, kColumnStart, kRowEnd, + kColumnEnd, buffer)); + } + + { // test open + auto pDataset = Dataset::open(tmpBagFile, BAG_OPEN_READONLY); - REQUIRE_NOTHROW(pVrMetadata->write(kRowStart, kColumnStart, kRowEnd, - kColumnEnd, buffer)); - - UNSCOPED_INFO("Read the record back."); - auto result = pVrMetadata->read(kRowStart, kColumnStart, kRowEnd, kColumnEnd); - CHECK(result); - - const auto* res = reinterpret_cast(result.data()); - UNSCOPED_INFO("Check the expected value of VRMetadataItem::index."); - CHECK(res->index == kExpectedItem0.index); - UNSCOPED_INFO("Check the expected value of VRMetadataItem::dimensions_x."); - CHECK(res->dimensions_x == kExpectedItem0.dimensions_x); - UNSCOPED_INFO("Check the expected value of VRMetadataItem::dimensions_y."); - CHECK(res->dimensions_y == kExpectedItem0.dimensions_y); - UNSCOPED_INFO("Check the expected value of VRMetadataItem::resolution_x."); - CHECK(res->resolution_x == kExpectedItem0.resolution_x); - UNSCOPED_INFO("Check the expected value of VRMetadataItem::resolution_y."); - CHECK(res->resolution_y == kExpectedItem0.resolution_y); - UNSCOPED_INFO("Check the expected value of VRMetadataItem::sw_corner_x."); - CHECK(res->sw_corner_x == kExpectedItem0.sw_corner_x); - UNSCOPED_INFO("Check the expected value of VRMetadataItem::sw_corner_y."); - CHECK(res->sw_corner_y == kExpectedItem0.sw_corner_y); + UNSCOPED_INFO("Read the record back."); + auto pVrMetadata = pDataset->getVRMetadata(); + auto result = pVrMetadata->read(kRowStart, kColumnStart, kRowEnd, kColumnEnd); + CHECK(result); + + const auto *res = reinterpret_cast(result.data()); + UNSCOPED_INFO("Check the expected value of VRMetadataItem::index."); + CHECK(res->index == kExpectedItem0.index); + UNSCOPED_INFO("Check the expected value of VRMetadataItem::dimensions_x."); + CHECK(res->dimensions_x == kExpectedItem0.dimensions_x); + UNSCOPED_INFO("Check the expected value of VRMetadataItem::dimensions_y."); + CHECK(res->dimensions_y == kExpectedItem0.dimensions_y); + UNSCOPED_INFO("Check the expected value of VRMetadataItem::resolution_x."); + CHECK(res->resolution_x == kExpectedItem0.resolution_x); + UNSCOPED_INFO("Check the expected value of VRMetadataItem::resolution_y."); + CHECK(res->resolution_y == kExpectedItem0.resolution_y); + UNSCOPED_INFO("Check the expected value of VRMetadataItem::sw_corner_x."); + CHECK(res->sw_corner_x == kExpectedItem0.sw_corner_x); + UNSCOPED_INFO("Check the expected value of VRMetadataItem::sw_corner_y."); + CHECK(res->sw_corner_y == kExpectedItem0.sw_corner_y); + } } From 3b91e1ebfe14645cecc9b46a744d748be1e78a46 Mon Sep 17 00:00:00 2001 From: selimnairb Date: Thu, 2 Jan 2025 16:14:12 -0500 Subject: [PATCH 13/25] SWIG: Python: first pass updating swig .i files to mirror changes to C++ header file public interfaces --- api/swig/include/bag_georefmetadatalayerdescriptor.i | 4 ++-- api/swig/include/bag_layerdescriptor.i | 9 +++++++++ api/swig/include/bag_simplelayerdescriptor.i | 7 ++++--- scripts/dev-cont-build-bag.sh | 2 +- 4 files changed, 16 insertions(+), 6 deletions(-) diff --git a/api/swig/include/bag_georefmetadatalayerdescriptor.i b/api/swig/include/bag_georefmetadatalayerdescriptor.i index 44acbc8617..f1fe8c20c2 100644 --- a/api/swig/include/bag_georefmetadatalayerdescriptor.i +++ b/api/swig/include/bag_georefmetadatalayerdescriptor.i @@ -34,8 +34,8 @@ class GeorefMetadataLayerDescriptor final : public LayerDescriptor public: static std::shared_ptr create(Dataset& dataset, const std::string& name, GeorefMetadataProfile profile, DataType indexType, - RecordDefinition definition, uint64_t chunkSize, - int compressionLevel); + RecordDefinition definition, uint32_t rows, uint32_t cols, + uint64_t chunkSize, int compressionLevel); %rename(openDataset) open(Dataset& dataset, const std::string& name); static std::shared_ptr open(Dataset& dataset, const std::string& name); diff --git a/api/swig/include/bag_layerdescriptor.i b/api/swig/include/bag_layerdescriptor.i index cbe5a813c1..f0ccb29960 100644 --- a/api/swig/include/bag_layerdescriptor.i +++ b/api/swig/include/bag_layerdescriptor.i @@ -54,11 +54,13 @@ public: //! Intentionally omit exposing of std::tuple method (unsupported by SWIG), //! so it can be exposed with std::pair below. //std::tuple getMinMax() const noexcept; + //const std::tuple& getDims() const & noexcept; const std::string& getName() const & noexcept; LayerDescriptor& setName(std::string inName) & noexcept; LayerDescriptor& setMinMax(float min, float max) & noexcept; + LayerDescriptor& setDims(uint32_t rows, uint32_t cols) & noexcept; }; %extend LayerDescriptor @@ -69,6 +71,13 @@ public: std::tie(min, max) = self->getMinMax(); return std::pair(min, max); } + + std::pair getDims() const noexcept + { + uint32_t rows=0, cols=0; + std::tie(rows, cols) = self->getDims(); + return std::pair(rows, cols); + } } } // namespace BAG diff --git a/api/swig/include/bag_simplelayerdescriptor.i b/api/swig/include/bag_simplelayerdescriptor.i index 84d048852d..e59f086f27 100644 --- a/api/swig/include/bag_simplelayerdescriptor.i +++ b/api/swig/include/bag_simplelayerdescriptor.i @@ -28,11 +28,12 @@ class SimpleLayerDescriptor final : public LayerDescriptor { public: static std::shared_ptr create(const Dataset& dataset, - LayerType type, uint64_t chunkSize, int compressionLevel); + LayerType type, uint32_t rows, uint32_t cols, + uint64_t chunkSize, int compressionLevel); - %rename(openDataset) open(const Dataset&, LayerType); + %rename(openDataset) open(const Dataset&, LayerType, uint32_t rows, uint32_t cols); static std::shared_ptr open( - const Dataset& dataset, LayerType type); + const Dataset& dataset, LayerType type, uint32_t rows, uint32_t cols); SimpleLayerDescriptor(const SimpleLayerDescriptor&) = delete; SimpleLayerDescriptor(SimpleLayerDescriptor&&) = delete; diff --git a/scripts/dev-cont-build-bag.sh b/scripts/dev-cont-build-bag.sh index 615f321244..7bc6b33bd7 100755 --- a/scripts/dev-cont-build-bag.sh +++ b/scripts/dev-cont-build-bag.sh @@ -11,7 +11,7 @@ cmake --build build python3 -m pip wheel -w ./wheel/ ./build/api/swig/python # Install cmake --install build -python3 -m pip install --break-system-packages ./wheel/bagPy-*.whl +python3 -m pip install --break-system-packages --force-reinstall ./wheel/bagPy-*.whl # Run tests export BAG_SAMPLES_PATH=/tmp/bag/examples/sample-data ./build/tests/bag_tests_d From 39cee1969ebf09db63b77a33eadccd1e59e9a9e4 Mon Sep 17 00:00:00 2001 From: selimnairb Date: Fri, 3 Jan 2025 10:32:47 -0500 Subject: [PATCH 14/25] Tests: Python: Update tests to reflect layer dimension API fixes --- python/test_simplelayerdescriptor.py | 24 +++++++++++---------- python/test_surfacecorrectionsdescriptor.py | 24 +++++++++++---------- python/test_vrmetadata.py | 7 ++++++ scripts/dev-cont-build-bag.sh | 14 +++++++++--- 4 files changed, 44 insertions(+), 25 deletions(-) diff --git a/python/test_simplelayerdescriptor.py b/python/test_simplelayerdescriptor.py index 2909d1fdcb..ed172bcbf3 100644 --- a/python/test_simplelayerdescriptor.py +++ b/python/test_simplelayerdescriptor.py @@ -12,6 +12,8 @@ datapath = str(pathlib.Path(__file__).parent.absolute()) + "/../examples/sample-data" kExpectedChunkSize = 100 kExpectedCompressionLevel = 6 +kRows = 30 +kCols = 40 class TestSimpleLayerDescriptor(unittest.TestCase): @@ -26,7 +28,7 @@ def testCreation(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -47,7 +49,7 @@ def testGetSetName(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -68,7 +70,7 @@ def testGetDataType(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -87,13 +89,13 @@ def testGetLayerType(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getLayerType(), Elevation) - descriptor = SimpleLayerDescriptor.create(dataset, Std_Dev, + descriptor = SimpleLayerDescriptor.create(dataset, Std_Dev, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -112,7 +114,7 @@ def testGetSetMinMax(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -137,12 +139,12 @@ def testGetInternalPath(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getInternalPath(), Layer.getInternalPath(Elevation)) - descriptor = SimpleLayerDescriptor.create(dataset, Uncertainty, + descriptor = SimpleLayerDescriptor.create(dataset, Uncertainty, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getInternalPath(), Layer.getInternalPath(Uncertainty)) @@ -160,7 +162,7 @@ def testGetElementSize(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getElementSize(), @@ -179,7 +181,7 @@ def testGetChunkSize(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getChunkSize(), kExpectedChunkSize) @@ -197,7 +199,7 @@ def testGetCompressionLevel(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getCompressionLevel(), kExpectedCompressionLevel) diff --git a/python/test_surfacecorrectionsdescriptor.py b/python/test_surfacecorrectionsdescriptor.py index e73d02f07d..e07a3ddf6b 100644 --- a/python/test_surfacecorrectionsdescriptor.py +++ b/python/test_surfacecorrectionsdescriptor.py @@ -12,6 +12,8 @@ datapath = str(pathlib.Path(__file__).parent.absolute()) + "/../examples/sample-data" kExpectedChunkSize = 100 kExpectedCompressionLevel = 6 +kRows = 30 +kCols = 40 class TestSurfaceCorrectionsDescriptor(unittest.TestCase): @@ -27,7 +29,7 @@ def testCreation(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -48,7 +50,7 @@ def testGetSetName(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -69,7 +71,7 @@ def testGetDataType(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -88,13 +90,13 @@ def testGetLayerType(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getLayerType(), Elevation) - descriptor = SimpleLayerDescriptor.create(dataset, Std_Dev, + descriptor = SimpleLayerDescriptor.create(dataset, Std_Dev, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -113,7 +115,7 @@ def testGetSetMinMax(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) @@ -138,12 +140,12 @@ def testGetInternalPath(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getInternalPath(), Layer.getInternalPath(Elevation)) - descriptor = SimpleLayerDescriptor.create(dataset, Uncertainty, + descriptor = SimpleLayerDescriptor.create(dataset, Uncertainty, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getInternalPath(), Layer.getInternalPath(Uncertainty)) @@ -161,7 +163,7 @@ def testGetElementSize(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getElementSize(), @@ -180,7 +182,7 @@ def testGetChunkSize(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getChunkSize(), kExpectedChunkSize) @@ -198,7 +200,7 @@ def testGetCompressionLevel(self): kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(dataset) - descriptor = SimpleLayerDescriptor.create(dataset, Elevation, + descriptor = SimpleLayerDescriptor.create(dataset, Elevation, kRows, kCols, kExpectedChunkSize, kExpectedCompressionLevel) self.assertIsNotNone(descriptor) self.assertEqual(descriptor.getCompressionLevel(), kExpectedCompressionLevel) diff --git a/python/test_vrmetadata.py b/python/test_vrmetadata.py index 66544ad714..6443e1aa43 100644 --- a/python/test_vrmetadata.py +++ b/python/test_vrmetadata.py @@ -142,6 +142,13 @@ def testWriteRead(self): buffer = VRMetadataLayerItems((kExpectedItem0,)) vrMetadata.write(kRowStart, kColumnStart, kRowEnd, kColumnEnd, buffer) + # Force a close. + del vrMetadata + del dataset + + # Re-open read-only + dataset = Dataset.openDataset(tmpBagFile.getName(), BAG_OPEN_READONLY) + vrMetadata = dataset.getVRMetadata() # Read the record back. buffer = vrMetadata.read(kRowStart, kColumnStart, kRowEnd, kColumnEnd) diff --git a/scripts/dev-cont-build-bag.sh b/scripts/dev-cont-build-bag.sh index 7bc6b33bd7..504f27b523 100755 --- a/scripts/dev-cont-build-bag.sh +++ b/scripts/dev-cont-build-bag.sh @@ -3,16 +3,24 @@ set -ex # Abort on error. # Note: This script is meant to be run within the development container defined by ../Dockerfile.dev. +rm -rf venv-docker +python3 -m venv venv-docker +source venv-docker/bin/activate +pip install -r requirements.txt +pip install mypy numpy GDAL==3.9.3 + # Configure CMake cmake -G Ninja -DCMAKE_BUILD_TYPE=Debug -B build -S . \ -DCMAKE_INSTALL_PREFIX=/usr -DBAG_BUILD_TESTS:BOOL=ON -DBAG_BUILD_EXAMPLES:BOOL=ON # Build cmake --build build -python3 -m pip wheel -w ./wheel/ ./build/api/swig/python +pip wheel -w ./wheel/ ./build/api/swig/python # Install cmake --install build -python3 -m pip install --break-system-packages --force-reinstall ./wheel/bagPy-*.whl +pip install --force-reinstall ./wheel/bagPy-*.whl +# Generate PEP484 stub file +stubgen -m bagPy -o ./python # Run tests export BAG_SAMPLES_PATH=/tmp/bag/examples/sample-data ./build/tests/bag_tests_d -python3 -m pytest python/test_*.py +pytest python/test_*.py From 566324edd2bb9bdad5e7081883984b00a768070a Mon Sep 17 00:00:00 2001 From: selimnairb Date: Fri, 3 Jan 2025 14:07:44 -0500 Subject: [PATCH 15/25] Store dimensions of layer descriptors as uint64_t internally (rather than uint32_t like elsewhere in the API) because dimensions are initialized from HDF5 hsize_t values, which are uint64_t. --- api/bag_layerdescriptor.cpp | 12 ++++++------ api/bag_layerdescriptor.h | 15 ++++++++------- api/swig/include/bag_layerdescriptor.i | 10 +++++----- api/swig/python/CMakeLists.txt | 2 +- api/swig/python/bagpy.i | 1 + 5 files changed, 21 insertions(+), 19 deletions(-) diff --git a/api/bag_layerdescriptor.cpp b/api/bag_layerdescriptor.cpp index f67bd59384..bdb9cca4ae 100644 --- a/api/bag_layerdescriptor.cpp +++ b/api/bag_layerdescriptor.cpp @@ -30,7 +30,7 @@ LayerDescriptor::LayerDescriptor( std::string internalPath, std::string name, LayerType type, - uint32_t rows, uint32_t cols, + uint64_t rows, uint64_t cols, uint64_t chunkSize, int compressionLevel) : m_id(id) @@ -58,7 +58,7 @@ LayerDescriptor::LayerDescriptor( LayerDescriptor::LayerDescriptor( const Dataset& dataset, LayerType type, - uint32_t rows, uint32_t cols, + uint64_t rows, uint64_t cols, std::string internalPath, std::string name) : m_id(dataset.getNextId()) @@ -178,7 +178,7 @@ const std::string& LayerDescriptor::getName() const & noexcept \return The row and column spacing/resolution of the grid */ -const std::tuple& LayerDescriptor::getDims() const & noexcept +const std::tuple& LayerDescriptor::getDims() const & noexcept { return m_dims; } @@ -194,8 +194,8 @@ const std::tuple& LayerDescriptor::getDims() const & noexcep A buffer that can hold rows x columns of values of this layer. */ size_t LayerDescriptor::getReadBufferSize( - uint32_t rows, - uint32_t columns) const noexcept + uint64_t rows, + uint64_t columns) const noexcept { return rows * columns * this->getElementSize(); } @@ -218,7 +218,7 @@ LayerDescriptor& LayerDescriptor::setMinMax( return *this; } -LayerDescriptor& LayerDescriptor::setDims(uint32_t rows, uint32_t cols) & noexcept +LayerDescriptor& LayerDescriptor::setDims(uint64_t rows, uint64_t cols) & noexcept { m_dims = {rows, cols}; return *this; diff --git a/api/bag_layerdescriptor.h b/api/bag_layerdescriptor.h index eb7e506747..a55b7e37a0 100644 --- a/api/bag_layerdescriptor.h +++ b/api/bag_layerdescriptor.h @@ -53,21 +53,21 @@ class BAG_API LayerDescriptor : public std::enable_shared_from_this getMinMax() const noexcept; const std::string& getName() const & noexcept; - const std::tuple& getDims() const & noexcept; + const std::tuple& getDims() const & noexcept; LayerDescriptor& setName(std::string inName) & noexcept; LayerDescriptor& setMinMax(float min, float max) & noexcept; - LayerDescriptor& setDims(uint32_t rows, uint32_t cols) & noexcept; + LayerDescriptor& setDims(uint64_t rows, uint64_t cols) & noexcept; protected: LayerDescriptor(uint32_t id, std::string internalPath, std::string name, - LayerType type, uint32_t rows, uint32_t cols, uint64_t chunkSize, + LayerType type, uint64_t rows, uint64_t cols, uint64_t chunkSize, int compressionLevel); LayerDescriptor(const Dataset& dataset, LayerType type, - uint32_t rows, uint32_t cols, + uint64_t rows, uint64_t cols, std::string internalPath = {}, std::string name = {}); - size_t getReadBufferSize(uint32_t rows, uint32_t columns) const noexcept; + size_t getReadBufferSize(uint64_t rows, uint64_t columns) const noexcept; LayerDescriptor& setInternalPath(std::string inPath) & noexcept; @@ -89,8 +89,9 @@ class BAG_API LayerDescriptor : public std::enable_shared_from_this m_minMax{}; - //! The dimensions of the layer - std::tuple m_dims{}; + //! The dimensions of the layer. These are uint64_t (rather than uint32_t like elsewhere in the API) because + //! dimensions are initialized from HDF5 hsize_t values, which are uint64_t. + std::tuple m_dims{}; friend GeorefMetadataLayer; friend InterleavedLegacyLayer; diff --git a/api/swig/include/bag_layerdescriptor.i b/api/swig/include/bag_layerdescriptor.i index f0ccb29960..473876f0e6 100644 --- a/api/swig/include/bag_layerdescriptor.i +++ b/api/swig/include/bag_layerdescriptor.i @@ -54,13 +54,13 @@ public: //! Intentionally omit exposing of std::tuple method (unsupported by SWIG), //! so it can be exposed with std::pair below. //std::tuple getMinMax() const noexcept; - //const std::tuple& getDims() const & noexcept; + //const std::tuple& getDims() const & noexcept; const std::string& getName() const & noexcept; LayerDescriptor& setName(std::string inName) & noexcept; LayerDescriptor& setMinMax(float min, float max) & noexcept; - LayerDescriptor& setDims(uint32_t rows, uint32_t cols) & noexcept; + LayerDescriptor& setDims(uint64_t rows, uint64_t cols) & noexcept; }; %extend LayerDescriptor @@ -72,11 +72,11 @@ public: return std::pair(min, max); } - std::pair getDims() const noexcept + std::pair getDims() const noexcept { - uint32_t rows=0, cols=0; + uint64_t rows=0, cols=0; std::tie(rows, cols) = self->getDims(); - return std::pair(rows, cols); + return std::pair(rows, cols); } } diff --git a/api/swig/python/CMakeLists.txt b/api/swig/python/CMakeLists.txt index a21208aac9..797fb1f379 100644 --- a/api/swig/python/CMakeLists.txt +++ b/api/swig/python/CMakeLists.txt @@ -31,7 +31,7 @@ endif() set_property(GLOBAL PROPERTY BAGPY_BINARY_DIR_PROP ${CMAKE_CURRENT_BINARY_DIR}) set(BAGPY_MODULE_NAME bagPy.py) -set(CMAKE_SWIG_FLAGS "-Wextra" "-v" "-py3") +set(CMAKE_SWIG_FLAGS "-Wextra" "-v" "-DSWIGWORDSIZE64") set_source_files_properties("bagpy.i" PROPERTIES SWIG_MODULE_NAME bagPy diff --git a/api/swig/python/bagpy.i b/api/swig/python/bagpy.i index 49d2fc6e5e..6956ba64a5 100644 --- a/api/swig/python/bagpy.i +++ b/api/swig/python/bagpy.i @@ -35,6 +35,7 @@ namespace std %template(DoublePair) pair; %template(FloatPair) pair; %template(UInt32Pair) pair; + %template(UInt64Pair) pair; %template(Cover) pair, pair >; %template(FloatVector) vector; %template(UInt32Vector) vector; From 65c33d2a5c6ad1a23376a81d77fb5017f781039e Mon Sep 17 00:00:00 2001 From: selimnairb Date: Fri, 3 Jan 2025 15:05:15 -0500 Subject: [PATCH 16/25] LayerDescriptor: Return dimensions as uint32_t rather than the underlying uint64_t to maintain compatibility with the rest of the BAG API, which assumes rows and cols are uint32_t. --- api/bag_layerdescriptor.cpp | 6 ++++-- api/bag_layerdescriptor.h | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/api/bag_layerdescriptor.cpp b/api/bag_layerdescriptor.cpp index bdb9cca4ae..7ad4d977ba 100644 --- a/api/bag_layerdescriptor.cpp +++ b/api/bag_layerdescriptor.cpp @@ -173,12 +173,14 @@ const std::string& LayerDescriptor::getName() const & noexcept return m_name; } -//! Retrieve the dimensions (shape) of the layer +//! Retrieve the dimensions (shape) of the layer. +//! Return dimensions as uint32_t rather than the underlying uint64_t to maintain compatibility with the rest of the +//! BAG API, which assumes rows and cols are uint32_t. /*! \return The row and column spacing/resolution of the grid */ -const std::tuple& LayerDescriptor::getDims() const & noexcept +std::tuple LayerDescriptor::getDims() const & noexcept { return m_dims; } diff --git a/api/bag_layerdescriptor.h b/api/bag_layerdescriptor.h index a55b7e37a0..937caf55a8 100644 --- a/api/bag_layerdescriptor.h +++ b/api/bag_layerdescriptor.h @@ -53,7 +53,7 @@ class BAG_API LayerDescriptor : public std::enable_shared_from_this getMinMax() const noexcept; const std::string& getName() const & noexcept; - const std::tuple& getDims() const & noexcept; + std::tuple getDims() const & noexcept; LayerDescriptor& setName(std::string inName) & noexcept; LayerDescriptor& setMinMax(float min, float max) & noexcept; From 98344af0c6b464a7d1bf158821d49dac2381dd0d Mon Sep 17 00:00:00 2001 From: selimnairb Date: Sat, 4 Jan 2025 16:25:08 -0500 Subject: [PATCH 17/25] Partial fix to update VR refinements and nodes to use 2D dataspace (as other implementations do) rather than 1D (as called for in BAG spec) --- api/bag_vrnode.cpp | 17 ++++++++++------- api/bag_vrrefinements.cpp | 17 +++++++++-------- 2 files changed, 19 insertions(+), 15 deletions(-) diff --git a/api/bag_vrnode.cpp b/api/bag_vrnode.cpp index 63bfad9f87..76f5edac4b 100644 --- a/api/bag_vrnode.cpp +++ b/api/bag_vrnode.cpp @@ -173,8 +173,9 @@ std::shared_ptr VRNode::open( // descriptor for the layer. hsize_t dims[2]; int ndims = h5dataSet->getSpace().getSimpleExtentDims(dims, nullptr); - if (!(ndims == 1 || ndims == 2)) { - // Should be 1D according to BAG spec, but some implementations use a 2D array. + if (ndims != 2) { + // Should be 1D according to BAG spec, but some implementations use a 2D array, + // so for compatibility's sake, use 2D. throw InvalidVRRefinementDimensions{}; } descriptor.setDims(dims[0], dims[1]); @@ -197,9 +198,9 @@ VRNode::createH5dataSet( const Dataset& dataset, const VRNodeDescriptor& descriptor) { - const hsize_t fileLength = 0; - const hsize_t kMaxFileLength = H5S_UNLIMITED; - const ::H5::DataSpace h5fileDataSpace{1, &fileLength, &kMaxFileLength}; + std::array fileDims{0, 0}; + const std::array kMaxFileDims{H5S_UNLIMITED, H5S_UNLIMITED}; + const ::H5::DataSpace h5fileDataSpace{kRank, fileDims.data(), kMaxFileDims.data()}; // Create the creation property list. const ::H5::DSetCreatPropList h5createPropList{}; @@ -209,7 +210,8 @@ VRNode::createH5dataSet( const auto compressionLevel = descriptor.getCompressionLevel(); if (chunkSize > 0) { - h5createPropList.setChunk(1, &chunkSize); + const std::array chunkDims{chunkSize, chunkSize}; + h5createPropList.setChunk(kRank, chunkDims.data()); if (compressionLevel > 0 && compressionLevel <= kMaxCompressionLevel) h5createPropList.setDeflate(compressionLevel); @@ -348,8 +350,9 @@ void VRNode::writeProxy( ::H5::DataSpace fileDataSpace = m_pH5dataSet->getSpace(); const int numDims = fileDataSpace.getSimpleExtentDims(fileLength.data(), maxFileLength.data()); - if (numDims != 1) + if (numDims != kRank) { throw InvalidVRRefinementDimensions{}; + } if (fileLength[0] < (columnEnd + 1)) { diff --git a/api/bag_vrrefinements.cpp b/api/bag_vrrefinements.cpp index 2835eccda2..04c5233bc6 100644 --- a/api/bag_vrrefinements.cpp +++ b/api/bag_vrrefinements.cpp @@ -156,8 +156,9 @@ std::unique_ptr VRRefinements::open( // descriptor for the layer. hsize_t dims[2]; int ndims = h5dataSet->getSpace().getSimpleExtentDims(dims, nullptr); - if (!(ndims == 1 || ndims == 2)) { - // Should be 1D according to BAG spec, but some implementations use a 2D array. + if (ndims != 2) { + // Should be 1D according to BAG spec, but some implementations use a 2D array, + // so for compatibility's sake, use 2D. throw InvalidVRRefinementDimensions{}; } descriptor.setDims(dims[0], dims[1]); @@ -181,9 +182,9 @@ VRRefinements::createH5dataSet( const Dataset& dataset, const VRRefinementsDescriptor& descriptor) { - constexpr hsize_t fileLength = 0; - constexpr hsize_t kMaxFileLength = H5S_UNLIMITED; - const ::H5::DataSpace h5fileDataSpace{1, &fileLength, &kMaxFileLength}; + std::array fileDims{0, 0}; + const std::array kMaxFileDims{H5S_UNLIMITED, H5S_UNLIMITED}; + const ::H5::DataSpace h5fileDataSpace{kRank, fileDims.data(), kMaxFileDims.data()}; // Create the creation property list. const ::H5::DSetCreatPropList h5createPropList{}; @@ -193,7 +194,8 @@ VRRefinements::createH5dataSet( const auto compressionLevel = descriptor.getCompressionLevel(); if (chunkSize > 0) { - h5createPropList.setChunk(1, &chunkSize); + const std::array chunkDims{chunkSize, chunkSize}; + h5createPropList.setChunk(kRank, chunkDims.data()); if (compressionLevel > 0 && compressionLevel <= kMaxCompressionLevel) h5createPropList.setDeflate(compressionLevel); @@ -317,8 +319,7 @@ void VRRefinements::writeProxy( ::H5::DataSpace fileDataSpace = m_pH5dataSet->getSpace(); const int numDims = fileDataSpace.getSimpleExtentDims(fileLength.data(), maxFileLength.data()); - if (numDims != 1) { - std::cout << "Number of dimensions for VRRefinements = " << numDims << std::endl; + if (numDims != kRank) { throw InvalidVRRefinementDimensions{}; } From d7cd22155aa550ef164f852925c770335cc742d7 Mon Sep 17 00:00:00 2001 From: selimnairb Date: Mon, 6 Jan 2025 15:08:15 -0500 Subject: [PATCH 18/25] Update VR refinements and nodes to use 2D dataspace (as other implementations do) rather than 1D (as called for in BAG spec) --- api/bag_vrnode.cpp | 42 ++++++++++++++++------------- api/bag_vrnodedescriptor.cpp | 2 +- api/bag_vrrefinements.cpp | 42 ++++++++++++++++------------- api/bag_vrrefinementsdescriptor.cpp | 2 +- 4 files changed, 48 insertions(+), 40 deletions(-) diff --git a/api/bag_vrnode.cpp b/api/bag_vrnode.cpp index 76f5edac4b..175b61fda6 100644 --- a/api/bag_vrnode.cpp +++ b/api/bag_vrnode.cpp @@ -326,11 +326,10 @@ void VRNode::writeAttributesProxy() const } //! \copydoc Layer::write -//! Ignore rows since the data is 1 dimensional. void VRNode::writeProxy( - uint32_t /*rowStart*/, + uint32_t rowStart, uint32_t columnStart, - uint32_t /*rowEnd*/, + uint32_t rowEnd, uint32_t columnEnd, const uint8_t* buffer) { @@ -339,26 +338,31 @@ void VRNode::writeProxy( if (!pDescriptor) throw InvalidLayerDescriptor{}; - const hsize_t columns = (columnEnd - columnStart) + 1; - const hsize_t offset = columnStart; - const ::H5::DataSpace memDataSpace{1, &columns, &columns}; + const auto rows = (rowEnd - rowStart) + 1; + const auto columns = (columnEnd - columnStart) + 1; + const std::array count{rows, columns}; + const std::array offset{rowStart, columnStart}; + const ::H5::DataSpace memDataSpace{kRank, count.data(), count.data()}; + + ::H5::DataSpace fileDataSpace = m_pH5dataSet->getSpace(); // Expand the file data space if needed. - std::array fileLength{}; - std::array maxFileLength{}; + std::array fileDims{}; + std::array maxFileDims{}; - ::H5::DataSpace fileDataSpace = m_pH5dataSet->getSpace(); - const int numDims = fileDataSpace.getSimpleExtentDims(fileLength.data(), - maxFileLength.data()); + const int numDims = fileDataSpace.getSimpleExtentDims(fileDims.data(), + maxFileDims.data()); if (numDims != kRank) { throw InvalidVRRefinementDimensions{}; } - if (fileLength[0] < (columnEnd + 1)) + if ((fileDims[0] < (rowEnd + 1)) || + (fileDims[1] < (columnEnd + 1))) { - const auto newMaxLength = std::max(fileLength[0], columnEnd + 1); - - m_pH5dataSet->extend(&newMaxLength); + const std::array newDims{ + std::max(fileDims[0], rowEnd + 1), + std::max(fileDims[1], columnEnd + 1)}; + m_pH5dataSet->extend(newDims.data()); fileDataSpace = m_pH5dataSet->getSpace(); @@ -366,18 +370,18 @@ void VRNode::writeProxy( if (this->getDataset().expired()) throw DatasetNotFound{}; - auto pDataset = this->getDataset().lock(); +// auto pDataset = this->getDataset().lock(); // TODO: Confirm that this is what we want --- this resets the dimensions of the // overall BAG, rather than the layer, which means that it's going to set the // metadata size of the mandatory layers to 1xN ... which is odd. - pDataset->getDescriptor().setDims(1, static_cast(newMaxLength)); +// pDataset->getDescriptor().setDims(1, static_cast(newMaxLength)); // So that the read() call checks correctly against the size of the array, rather // than the dimensions of the mandatory layer, we need to keep track of the size // of the layer in the layer-specific descriptor. - pDescriptor->setDims(1, newMaxLength); +// pDescriptor->setDims(1, newMaxLength); } - fileDataSpace.selectHyperslab(H5S_SELECT_SET, &columns, &offset); + fileDataSpace.selectHyperslab(H5S_SELECT_SET, count.data(), offset.data()); const auto memDataType = makeDataType(); diff --git a/api/bag_vrnodedescriptor.cpp b/api/bag_vrnodedescriptor.cpp index 2f932619b5..433a9a40e9 100644 --- a/api/bag_vrnodedescriptor.cpp +++ b/api/bag_vrnodedescriptor.cpp @@ -58,7 +58,7 @@ std::shared_ptr VRNodeDescriptor::create( int compressionLevel) { return std::shared_ptr( - new VRNodeDescriptor{dataset.getNextId(), 1, 0, chunkSize, + new VRNodeDescriptor{dataset.getNextId(), 1, 1, chunkSize, compressionLevel}); } diff --git a/api/bag_vrrefinements.cpp b/api/bag_vrrefinements.cpp index 04c5233bc6..25f0d9554a 100644 --- a/api/bag_vrrefinements.cpp +++ b/api/bag_vrrefinements.cpp @@ -295,11 +295,10 @@ void VRRefinements::writeAttributesProxy() const } //! \copydoc Layer::write -//! Ignore rows since the data is 1 dimensional. void VRRefinements::writeProxy( - uint32_t /*rowStart*/, + uint32_t rowStart, uint32_t columnStart, - uint32_t /*rowEnd*/, + uint32_t rowEnd, uint32_t columnEnd, const uint8_t* buffer) { @@ -308,26 +307,31 @@ void VRRefinements::writeProxy( if (!pDescriptor) throw InvalidLayerDescriptor{}; - const hsize_t columns = (columnEnd - columnStart) + 1; - const hsize_t offset = columnStart; - const ::H5::DataSpace memDataSpace{1, &columns, &columns}; + const auto rows = (rowEnd - rowStart) + 1; + const auto columns = (columnEnd - columnStart) + 1; + const std::array count{rows, columns}; + const std::array offset{rowStart, columnStart}; + const ::H5::DataSpace memDataSpace{kRank, count.data(), count.data()}; + + ::H5::DataSpace fileDataSpace = m_pH5dataSet->getSpace(); // Expand the file data space if needed. - std::array fileLength{}; - std::array maxFileLength{}; + std::array fileDims{}; + std::array maxFileDims{}; - ::H5::DataSpace fileDataSpace = m_pH5dataSet->getSpace(); - const int numDims = fileDataSpace.getSimpleExtentDims(fileLength.data(), - maxFileLength.data()); + const int numDims = fileDataSpace.getSimpleExtentDims(fileDims.data(), + maxFileDims.data()); if (numDims != kRank) { throw InvalidVRRefinementDimensions{}; } - if (fileLength[0] < (columnEnd + 1)) + if ((fileDims[0] < (rowEnd + 1)) || + (fileDims[1] < (columnEnd + 1))) { - const auto newMaxLength = std::max(fileLength[0], columnEnd + 1); - - m_pH5dataSet->extend(&newMaxLength); + const std::array newDims{ + std::max(fileDims[0], rowEnd + 1), + std::max(fileDims[1], columnEnd + 1)}; + m_pH5dataSet->extend(newDims.data()); fileDataSpace = m_pH5dataSet->getSpace(); @@ -335,18 +339,18 @@ void VRRefinements::writeProxy( if (this->getDataset().expired()) throw DatasetNotFound{}; - auto pDataset = this->getDataset().lock(); +// auto pDataset = this->getDataset().lock(); // TODO: Confirm that this is what we want --- this resets the dimensions of the // overall BAG, rather than the layer, which means that it's going to set the // metadata size of the mandatory layers to 1xN ... which is odd. - pDataset->getDescriptor().setDims(1, static_cast(newMaxLength)); +// pDataset->getDescriptor().setDims(1, static_cast(newMaxLength)); // So that the read() call checks correctly against the size of the array, rather // than the dimensions of the mandatory layer, we need to keep track of the size // of the layer in the layer-specific descriptor. - pDescriptor->setDims(1, newMaxLength); +// pDescriptor->setDims(1, newMaxLength); } - fileDataSpace.selectHyperslab(H5S_SELECT_SET, &columns, &offset); + fileDataSpace.selectHyperslab(H5S_SELECT_SET, count.data(), offset.data()); const auto memDataType = makeDataType(); diff --git a/api/bag_vrrefinementsdescriptor.cpp b/api/bag_vrrefinementsdescriptor.cpp index e478e727ed..61bd5e60f3 100644 --- a/api/bag_vrrefinementsdescriptor.cpp +++ b/api/bag_vrrefinementsdescriptor.cpp @@ -56,7 +56,7 @@ std::shared_ptr VRRefinementsDescriptor::create( int compressionLevel) { return std::shared_ptr( - new VRRefinementsDescriptor{dataset.getNextId(), 1, 0, chunkSize, + new VRRefinementsDescriptor{dataset.getNextId(), 1, 1, chunkSize, compressionLevel}); } From 27189ec4cd44ebca30288624e0443822cb1fb588 Mon Sep 17 00:00:00 2001 From: selimnairb Date: Mon, 6 Jan 2025 15:27:00 -0500 Subject: [PATCH 19/25] Ensure VR descriptors are consistent with shape of underlying H5 dataspaces --- api/bag_vrnode.cpp | 3 ++- api/bag_vrnodedescriptor.cpp | 2 +- api/bag_vrrefinements.cpp | 3 ++- api/bag_vrrefinementsdescriptor.cpp | 2 +- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/api/bag_vrnode.cpp b/api/bag_vrnode.cpp index 175b61fda6..47adeecf2d 100644 --- a/api/bag_vrnode.cpp +++ b/api/bag_vrnode.cpp @@ -378,7 +378,8 @@ void VRNode::writeProxy( // So that the read() call checks correctly against the size of the array, rather // than the dimensions of the mandatory layer, we need to keep track of the size // of the layer in the layer-specific descriptor. -// pDescriptor->setDims(1, newMaxLength); + pDescriptor->setDims(static_cast(newDims[0]), + static_cast(newDims[1])); } fileDataSpace.selectHyperslab(H5S_SELECT_SET, count.data(), offset.data()); diff --git a/api/bag_vrnodedescriptor.cpp b/api/bag_vrnodedescriptor.cpp index 433a9a40e9..1f86d1ec97 100644 --- a/api/bag_vrnodedescriptor.cpp +++ b/api/bag_vrnodedescriptor.cpp @@ -58,7 +58,7 @@ std::shared_ptr VRNodeDescriptor::create( int compressionLevel) { return std::shared_ptr( - new VRNodeDescriptor{dataset.getNextId(), 1, 1, chunkSize, + new VRNodeDescriptor{dataset.getNextId(), 0, 0, chunkSize, compressionLevel}); } diff --git a/api/bag_vrrefinements.cpp b/api/bag_vrrefinements.cpp index 25f0d9554a..2330c960c9 100644 --- a/api/bag_vrrefinements.cpp +++ b/api/bag_vrrefinements.cpp @@ -347,7 +347,8 @@ void VRRefinements::writeProxy( // So that the read() call checks correctly against the size of the array, rather // than the dimensions of the mandatory layer, we need to keep track of the size // of the layer in the layer-specific descriptor. -// pDescriptor->setDims(1, newMaxLength); + pDescriptor->setDims(static_cast(newDims[0]), + static_cast(newDims[1])); } fileDataSpace.selectHyperslab(H5S_SELECT_SET, count.data(), offset.data()); diff --git a/api/bag_vrrefinementsdescriptor.cpp b/api/bag_vrrefinementsdescriptor.cpp index 61bd5e60f3..f422a7950e 100644 --- a/api/bag_vrrefinementsdescriptor.cpp +++ b/api/bag_vrrefinementsdescriptor.cpp @@ -56,7 +56,7 @@ std::shared_ptr VRRefinementsDescriptor::create( int compressionLevel) { return std::shared_ptr( - new VRRefinementsDescriptor{dataset.getNextId(), 1, 1, chunkSize, + new VRRefinementsDescriptor{dataset.getNextId(), 0, 0, chunkSize, compressionLevel}); } From 60a7a9253c5eb7a9f1346a0938fa95f71857ad86 Mon Sep 17 00:00:00 2001 From: selimnairb Date: Mon, 6 Jan 2025 15:47:54 -0500 Subject: [PATCH 20/25] Remove unnecessary casts --- api/bag_vrnode.cpp | 3 +-- api/bag_vrrefinements.cpp | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/api/bag_vrnode.cpp b/api/bag_vrnode.cpp index 47adeecf2d..3caabc1aea 100644 --- a/api/bag_vrnode.cpp +++ b/api/bag_vrnode.cpp @@ -378,8 +378,7 @@ void VRNode::writeProxy( // So that the read() call checks correctly against the size of the array, rather // than the dimensions of the mandatory layer, we need to keep track of the size // of the layer in the layer-specific descriptor. - pDescriptor->setDims(static_cast(newDims[0]), - static_cast(newDims[1])); + pDescriptor->setDims(newDims[0], newDims[1]); } fileDataSpace.selectHyperslab(H5S_SELECT_SET, count.data(), offset.data()); diff --git a/api/bag_vrrefinements.cpp b/api/bag_vrrefinements.cpp index 2330c960c9..2897b0d421 100644 --- a/api/bag_vrrefinements.cpp +++ b/api/bag_vrrefinements.cpp @@ -347,8 +347,7 @@ void VRRefinements::writeProxy( // So that the read() call checks correctly against the size of the array, rather // than the dimensions of the mandatory layer, we need to keep track of the size // of the layer in the layer-specific descriptor. - pDescriptor->setDims(static_cast(newDims[0]), - static_cast(newDims[1])); + pDescriptor->setDims(newDims[0], newDims[1]); } fileDataSpace.selectHyperslab(H5S_SELECT_SET, count.data(), offset.data()); From 065acbda5ad28640161418085e53c1eb9ca319b8 Mon Sep 17 00:00:00 2001 From: selimnairb Date: Mon, 6 Jan 2025 16:09:40 -0500 Subject: [PATCH 21/25] Add explicit cast to uint32_t in LayerDescriptor::getDims() --- api/bag_layerdescriptor.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/api/bag_layerdescriptor.cpp b/api/bag_layerdescriptor.cpp index 7ad4d977ba..531a531ced 100644 --- a/api/bag_layerdescriptor.cpp +++ b/api/bag_layerdescriptor.cpp @@ -182,7 +182,10 @@ const std::string& LayerDescriptor::getName() const & noexcept */ std::tuple LayerDescriptor::getDims() const & noexcept { - return m_dims; + return std::tuple{ + static_cast(std::get<0>(m_dims)), + static_cast(std::get<1>(m_dims)) + }; } //! Get the size of a buffer for reading a specified number rows and columns. From 728ee2f8fb82f0f86348ad65501c71199a991cc1 Mon Sep 17 00:00:00 2001 From: selimnairb Date: Tue, 7 Jan 2025 14:39:25 -0500 Subject: [PATCH 22/25] Fix wheel build on Windows --- api/swig/python/CMakeLists.txt | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/api/swig/python/CMakeLists.txt b/api/swig/python/CMakeLists.txt index 797fb1f379..e5d5044b92 100644 --- a/api/swig/python/CMakeLists.txt +++ b/api/swig/python/CMakeLists.txt @@ -31,7 +31,11 @@ endif() set_property(GLOBAL PROPERTY BAGPY_BINARY_DIR_PROP ${CMAKE_CURRENT_BINARY_DIR}) set(BAGPY_MODULE_NAME bagPy.py) -set(CMAKE_SWIG_FLAGS "-Wextra" "-v" "-DSWIGWORDSIZE64") +if (MSVC) + set(CMAKE_SWIG_FLAGS "-Wextra" "-v") +else() + set(CMAKE_SWIG_FLAGS "-Wextra" "-v" "-DSWIGWORDSIZE64") +endif() set_source_files_properties("bagpy.i" PROPERTIES SWIG_MODULE_NAME bagPy From 772e7c915e703fd96e945fcf4499875948be2136 Mon Sep 17 00:00:00 2001 From: selimnairb Date: Wed, 8 Jan 2025 12:18:45 -0500 Subject: [PATCH 23/25] VR: Add test_vr_bag to test reading an existing VR BAG file; Add exception handling for HDF5 call in Dataset::readDataset() --- api/bag_dataset.cpp | 15 ++++++++--- examples/sample-data/test_vr.bag | Bin 0 -> 17480 bytes tests/CMakeLists.txt | 1 + tests/test_vr_bag.cpp | 44 +++++++++++++++++++++++++++++++ 4 files changed, 56 insertions(+), 4 deletions(-) create mode 100644 examples/sample-data/test_vr.bag create mode 100644 tests/test_vr_bag.cpp diff --git a/api/bag_dataset.cpp b/api/bag_dataset.cpp index a29fd99474..f564e737df 100644 --- a/api/bag_dataset.cpp +++ b/api/bag_dataset.cpp @@ -1073,10 +1073,17 @@ void Dataset::readDataset( OpenMode openMode) { signal(SIGABRT, handleAbrt); - m_pH5file = std::unique_ptr<::H5::H5File, DeleteH5File>(new ::H5::H5File{ - fileName.c_str(), - (openMode == BAG_OPEN_READONLY) ? H5F_ACC_RDONLY : H5F_ACC_RDWR}, - DeleteH5File{}); + try { + m_pH5file = std::unique_ptr<::H5::H5File, DeleteH5File>(new ::H5::H5File{ + fileName.c_str(), + (openMode == BAG_OPEN_READONLY) ? H5F_ACC_RDONLY : H5F_ACC_RDWR}, + DeleteH5File{}); + } + catch( ::H5::FileIException& e ) + { + std::cerr << "Unable to read BAG file, error was: " << e.getCDetailMsg() << std::endl; + e.printErrorStack(); + } m_pMetadata = std::make_unique(*this); diff --git a/examples/sample-data/test_vr.bag b/examples/sample-data/test_vr.bag new file mode 100644 index 0000000000000000000000000000000000000000..679bf1582c94eebaab45be131189cad395c267ea GIT binary patch literal 17480 zcmeHO2|QKX_dl1+WC)eSm1GtX8sths<}o5%T=P7qjFG8?Od&&xP%?*7AxVi)nKM08 zlA$6A|8wrSw>-n^z4w2=|NsB{|7@Rg_ugx*{oU)VJ)C{kIzr@*9j78^A_t&EN(yWO zXs|{6&k`=l9fK+OI-YNYDf8amnAyWs2id5OXk{$h>*2!A(! zX>jwGiv$L;z~TLB{-)w4uESsKe?$ELhXwHSHxG9^`28=Q|6hB58p$Zz2id z{_yKU4p+Cp?XUk@ZT@}H@z1T5_M!yjus4MOC_x*O0!Pv2NNtRhv!%Tqmc9l2MS>UJ z-a-PR0{ejDmNpnWv@Hf{XYYbEvv;*K1(Yzh_D*Mz2!I@x#Wg?xTv!|g0ZiD0tpNZr zE?-$E1fR5!BrTE^e3nBA32Y?=w)$Yp0JwX>&-W2pm|%gUZvu6P+F{%5c9AE=;SeH5 z$Y2p8oFtsl;bs)n=7_nq-cLJh1L#4XlBJ!ct*b3k#s=etb^)z{)JEI5V%F6Q!hsXA z5?_LHc>a>$dINw6+`hY9W_cO^Tu`N3PF-DwtqS6?ujVbMh~p}~9LuSz%kV4!06EUp zyyX=9^9#kdoVvPK2t>*nb3KB$0MS<62a87_WhH|L>&(J+OilZv+I#4thCcMat zBnE0ywQC5RQnLd#X=x^8i;1-F{Gb)9TXesquG8G#B$G5f$@5eF_oe!K!pYs&HoK{& zQ9o_czfmVg84=xbVYw?Kwr7r#>FQ?DfiRN?@vs(lU8f^`{Sm>_sj^n48Ox^X(_VYQ?viLd!?qNPvgQMX}Mc!et$^*W4gRQsp6J2)YsXNVK zxHYqK3^_Mw$l21T6WTCEViV%b)=?QJttlInUuW}hB6exFy~?9^1+~2=?g~CT8HXWM zIb7Vs)_2GIqoq&cmxSD2jNi$cPfWZ;JEyd#Y!?ZwT4zN~SR*Kcq-?SE-%n2#9gcmTM4u*mi3xTCIQU4^1q;EhVQ zHnvC$9=fg?&=Jabw_s1YfRbC)hX4y7FV605wo01=kG6eya8-=ep3_Tf`>}MdSW4%j zg#8IjA&%k)xYUIR@5C3i%LdK~>Lf(=4AFlU4M7H2dL`D2DhTa~N9~OrkkMl~OEPqB zWFDhCQ!Eo+U732hXto;Z@tKT|k^cM_UTvODSx$$0Y?tD1x}UVH*fVkJCecL+tw}^( zSXWQ62ZN#5_$iwtBWDq9dkr0~evR>vro8&GKC|EZ@w zVZV)mNZID>SF;+66AGv(#zL$T6W8V){^*@aC?>8&x0ErO&#$u?r7gpwLa7sVQ@WWyPyc~{s$YpDO;6f* zeoPv{Md`fu4~@S~n6cE|pN!<)<}*F=>7nm|L7L$AXHaA6C62jrRSx&Nm0v||X%SD3 z&<$$gDkoncyGd&#ZOD=1Z#8y$xU){VE=)sMNAkD87j)`-ctoSkbWLibnHleWW; ztRd<&R<#8kpUtUqI>w%oh*uYTai*6JV``K*t`ItZs#BX5iyoPws=0TJuC$^5Lc4&? zx5c|*hDL<_CW9v>B-jp$_L-(NMm8C}H|393w33`;8RD)!Y(^yztQekfC{JoL0Ye=3 zJh7yNN1I1S;FV6hbU|>_ndRm)Os7vwy5+{6wn(>k%ng}(xzTn3 zro66;oSrhB4-eIAGMNZdphGT~L zt(1pNG-GS!EvUCz0&^OZ>?KLmNQE8__GflG;u!3Djo3=O`=e@HhSX9Tf+tiYCfHL| zxOO6XPGz+2n0SaO6&P^Q>r)qQUj(|3Uh^HQ=eo2+vdj7G)ZQCN>Zm-*Pvj{!_Y#7L zt%~o)Fp{uMeF`*aFYh^CsL@?4#olIkT9bvqqhnkxU%6V41~bGP(LPW@r|{a0j6%Ko z)rs40>JHE;gkL&meskfh{_9OV19mnZc^}SByz8!LiOo9YF0Cip7{u>pWMv*AF|wUB zK%OtkeVg>De76!e{v)?MZ1+zc|6I>{GkQRrF@ZVfjgzi;Pu&~isofG$6b5W{&aok6 z1v|o95<>@!STtOPUOe(6E2TZEZGCmD!SV9q!EAlq*6nxZZ+ zZi_0=MOuxk5#61OHhK<3dbg4{6gGVS#`B$oIFpF(^7(5C)!LepK7}$_&ueb0s)u)u}| zHY~7Vf&U~6cyx5Dby|Th>|PTmpB>2@Qs+5#JHje2EhEPCPBapIno}sXJ?}np(MxQr zj8NrlLrd}jbF+r}A-S`ncjAg?UTfq#5Y)Qo?>Hu>S`Z}8rnxU%B;CGqrq>`QqQZAu zaD;C1uJJcq=?byJ0`4&%`EKx^KrIpzZlaU1-dYAIF^3IKPK^2|xvMUzWUmhO~8_OXq=35*f7EcbKogV(g z(Co>Oe9&XmP+fa&YU%5uZi_$N)%*ty_HqZ$xtmQz%k_6pzD&M)JHkM^fVQA3)782{ z$bmy$jy@aFPNM2SV}*LNIfcCRyI0%I_*=qXp6k67X6t#S{M`7~SnWW53fZ14OoG?b z7Rg})^m&htYwEZ+@>@xkC2SG#I@I$tagOLM{h~5iF~?5~@Hn=9?71RvlShx_Vi-eb zV`HORo2iJW5hHV~wyz0=q7<*J;?S4Y&KlQTd&9Wu?nVp=preNO+$oFXh`V#ot|H^%m}J<#>&DM@w6yY% z(<*cc%XZ!9$_VEUx;(4mzQrwL+r^7?_Wlt^+;hgyI!8LbAoA-50)tr{n-J~rs*T^Z@#F{7s=Y^6z=*=yLV)petKl+^Nf4+;-~#p546@j;~g!N zYBf@04_`WOlXq+GmX10jowuIP?j3y7j^}4Q3(|JYd^*uy$94oE)#l#k(c!9YYu_(O zxyXBK{xD_NxQN#6W~x^KPvpfaGv>81S=_Xi>Kp=lmloCOQg_N&@y_Qjw0T>F5b>ON z8nIRP@O)guW|uEQ;)Z;e-7VxC_DIsZGj0hD&oK!(^7Y$1|AI>%wX(|viH}Dp(4!n3 zG%-Sty&p}C(Zv`a(!1(vn8?i6V^sNA?X~@B&eI{|tmJbJFSehSle~I+k|X=I_Gm?D zl+nesv+++zKIS@bC+;);R6ox5RvaV8Am3zRj$!z>! z`A7Py7df1cLCuLi8AlupCDHq*njAdn>_xU6_LIutt!}E)l@OaqyJLD~_IR}?VHiC} z<>b7mg8wG8s>NNPC-%xwwio9M<1@oD44KCF6f0Csdy4ra_R4D%bpF=g3@tOZJAOFMIq`8}-r{NWOD^>D4!!tD#` zUuzq`j>ctHm~MI)re}XNci`>*MDuIH(&7bXOfEct*wIRN?lcY&mut*j5)A|)Z~Q{E z-zt4#9r1H2gx$UlC@`0Vw^k^})AarFQM&kpddvb~`~D6rqAH5EBJrqVSlaAPowKKo__EBh(C6 zJPP~iM64Ly(tD3U+Gp@2?FuKIRPe8z{b+q#n2h!WQlRcQhpIC1ZcpUgolF$oQ!as z^w0XI;69;VVMht*pTfIV^Oqu#sg==zcBj%#n(q@<>IDojKP0 z*k~?(9H9pI{;-X)by!~=9OIvkC$HyMTtA-ti51q(2(KbGUw!|oSULh=`dvI(>*w6T zMU-KfwdNVbS!oYSUVV3~c`p{vV~a z#@pE)+z_1XFix-69?5Q&xlmFy2cH$npmW2NGr6mV_zIznL?2!Iy5kJwt;8LpfskihcsgzETpg9rb) zAK`G&9)Af3dKg3LXS{(HyRIR9mW}<7DZidw@p?grxi#@Xz5Wg!bWnxyu(06$uW2uM zQhx4iswml=rhxQ!&xdJTyVOr~cv26{((}j}@w(YO>zqK&%-$(hP;O@n73PayTLq+Vq z2!0<0Ko^+)dVGpdP5_36;2^VLAy=+jSOfqt`0uB11Y3*?+7#`A#`Ce~An;sZ2pwub z;>xp?8`=q64u(IP;p$EpGfO)R__%O^k3ay9z7h_5&rn}n_zu^};W*do@3*ULTFax56iz;oj&FuJ6P=k_PL(+uu$F8!TxRetyzS&EA zG(ytP>TM&wVxoHcqJaoq%mV|38gliQ3KwfcGZBgD$@dd-y$R(Pg}$A(+E3QTY1GB) zS5zAFq}wX+m2{I0^-|%aXUfTR#v2pzUgw|sTF)d**76A_<-GSle#vk9@T7lROUApy z5e>zSo&>Qahlxhd8LLp_m_1HuNqKA<$mn&B^#NCFYFo?K66fDodSp+k-TTg(qHjp9 zJ7?gS-Tt0nM`ZSmxNZ^S;ge>34Os{aZ{kGJuOt40V)9Fia&9anfuTJ-A?|%D4x)NN z)_b1}@MUf-(i2Zo%+Ay}?D?uoIPW4!l0)0W%9vR1O%~!o2qk)h0;Q_1#th@n4vhRR zw*yVs4{O*a6$JCjWt`2mzd)Tl-={j?N!p`Q(~+!GZ^WN_$a3@Kn?#+ehXb(+ojD_+ z>4kk%l23-!JjTOZQVLX-awkSa`}|Y4pRcDfynLazC804(REW^&zS8Xj9`sxnX39Sr;Wd{hK;LM3JpH|kw`^0DRXeMi zIrY3o@GXj)V((GUgW@TEYyRw{%+k&|(yvMqIo+(?^`0qV82DImKh^C~;B4y-$69^i zp%L{;_wl#9*B0N5aCnVfvKgCZdl6TzSza!v_b}yivp}%tt*7s7X+@q^Pzcl~J>B*2 z8vPBP(dM%vdR=E(E|YwgR=uYfcHrjrT;9ExA1|_m?`J+O`;>PeK6H?rW!5aa_3oyz ziOjUYb}ssfOR~=|>WH%VXURmy_liZyN=TyWKB5m+zPS?R8!{-tb?V znPRDZ@x-m?Zb*0b=_rnh8s%B;)Gv95$P-mR#XzDbVaDiCt4F6oG%El0gi{fRDUs;; zLTgIm>FPX*mVm`Vvmz~)Al^}*$M;*^T4=Xag(-Ow8}251^f29QtFU|2j+L;;W&d?wCNXq}pGorjmRmUJa`uO^I-jl60ES>fgaT-2D_g;U`}18(hnG2i*wXe*|Fv#|`p#qOkV|{a+>Ye~Gq`-x?RYgcyGUM+%Z# zg#-Gne|j4K3=V{{1`eptU&BFS^+AB4Z|GiqK?fp~ejO)%1;>{_BSSHU|EV~!v#T-q z|M>2|^y4k}r$cR^nDnpWNXy&c#}uOdQCe%fSNcqU1ZCF&S_@-kJo;C0BS=qpuC9(p zGp*Avo(#2xhkbQCdW|?U6d#0!9Gh19nSajfZJ@nL0c~~UnxW?9wuXRU-C<1V_rCz{ C!=KOq literal 0 HcmV?d00001 diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index fd293931c2..7e6d0ce859 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -30,6 +30,7 @@ set(TEST_SOURCE_FILES test_bag_vrrefinements.cpp test_bag_vrrefinementsdescriptor.cpp test_bag_vrtrackinglist.cpp + test_vr_bag.cpp ) source_group("Source Files" FILES ${TEST_SOURCE_FILES}) diff --git a/tests/test_vr_bag.cpp b/tests/test_vr_bag.cpp new file mode 100644 index 0000000000..c2ebd15f41 --- /dev/null +++ b/tests/test_vr_bag.cpp @@ -0,0 +1,44 @@ +#include +#include +#include +#include +#include + +#include + + +using BAG::Dataset; +using BAG::VRRefinements; +using BAG::VRRefinementsDescriptor; +using BAG::VRMetadata; +using BAG::VRMetadataDescriptor; + + +// Test basic reading of an existing VR BAG from https://github.com/OSGeo/gdal/blob/master/autotest/gdrivers/data/bag/test_vr.bag +TEST_CASE("test VR BAG reading", "[dataset][open][VR]") +{ + const std::string bagFileName{std::string{std::getenv("BAG_SAMPLES_PATH")} + + "/test_vr.bag"}; + + const size_t kNumExpectedLayers = 4; + const auto dataset = Dataset::open(bagFileName, BAG_OPEN_READONLY); + REQUIRE(dataset); + + CHECK(dataset->getLayerTypes().size() == kNumExpectedLayers); + + CHECK(dataset->getDescriptor().getVersion() == "1.6.2"); + + auto vrMeta = dataset->getVRMetadata(); + REQUIRE(vrMeta); + const auto vrMetaDesc = vrMeta->getDescriptor(); + auto vrMetaDescDims = vrMetaDesc->getDims(); + CHECK(std::get<0>(vrMetaDescDims) == 4); + CHECK(std::get<1>(vrMetaDescDims) == 6); + + auto vrRef = dataset->getVRRefinements(); + REQUIRE(vrRef); + const auto vrRefDesc = vrRef->getDescriptor(); + auto vrRefDescDims = vrRefDesc->getDims(); + CHECK(std::get<0>(vrRefDescDims) == 1); + CHECK(std::get<1>(vrRefDescDims) == 556); +} \ No newline at end of file From 27a6687cfde28b4dd2a7b8665dae774bd58bc058 Mon Sep 17 00:00:00 2001 From: selimnairb Date: Wed, 8 Jan 2025 13:28:25 -0500 Subject: [PATCH 24/25] test_vr_bag: Ensure BAG dimensions match VR metadata descriptor dimensions --- tests/test_vr_bag.cpp | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/test_vr_bag.cpp b/tests/test_vr_bag.cpp index c2ebd15f41..e6513c9e09 100644 --- a/tests/test_vr_bag.cpp +++ b/tests/test_vr_bag.cpp @@ -26,14 +26,20 @@ TEST_CASE("test VR BAG reading", "[dataset][open][VR]") CHECK(dataset->getLayerTypes().size() == kNumExpectedLayers); + const uint32_t kExpectedRows = 4; + const uint32_t kExpectedCols = 6; CHECK(dataset->getDescriptor().getVersion() == "1.6.2"); + auto dims = dataset->getDescriptor().getDims(); + CHECK(std::get<0>(dims) == kExpectedRows); + CHECK(std::get<1>(dims) == kExpectedCols); auto vrMeta = dataset->getVRMetadata(); REQUIRE(vrMeta); const auto vrMetaDesc = vrMeta->getDescriptor(); auto vrMetaDescDims = vrMetaDesc->getDims(); - CHECK(std::get<0>(vrMetaDescDims) == 4); - CHECK(std::get<1>(vrMetaDescDims) == 6); + // VR metadata descriptor dims should be the same as BAG dataset dims... + CHECK(std::get<0>(vrMetaDescDims) == kExpectedRows); + CHECK(std::get<1>(vrMetaDescDims) == kExpectedCols); auto vrRef = dataset->getVRRefinements(); REQUIRE(vrRef); From cd83fda8a9b337e6c378c0879834a20987020c10 Mon Sep 17 00:00:00 2001 From: selimnairb Date: Thu, 9 Jan 2025 10:53:00 -0500 Subject: [PATCH 25/25] tests: Add example VR BAG from NBS archive --- examples/sample-data/Sample_VR_BAG-gzip.bag | Bin 0 -> 40489 bytes tests/test_vr_bag.cpp | 39 +++++++++++++++++++- 2 files changed, 37 insertions(+), 2 deletions(-) create mode 100644 examples/sample-data/Sample_VR_BAG-gzip.bag diff --git a/examples/sample-data/Sample_VR_BAG-gzip.bag b/examples/sample-data/Sample_VR_BAG-gzip.bag new file mode 100644 index 0000000000000000000000000000000000000000..99430ca884c25fdedb7684f3ad8000f588cbbb9a GIT binary patch literal 40489 zcmeFa1z256vNnta2=49>+%>p6!QFM^F2NxILU4k+YjAf7?hqis-2(xFJA4q}%sFS~ z%-lQQcc15<`)Bd&Rozw9wYuNctJm(@)lGnykO(Xk78KC^@%%Z^Ga!VY$*0rp<3W+~ zzn1bk>-IsB!GeL|A2rn9{Grm_W!J>C@(Awbf@`b|A{^p16l-oNIidI z{G$KALO@KIU-rJis*kOH>Q6n7{p<-00e(>(J8Nr)r~FgD=pXto!ejp> zhj=h#@@Vg0_20C|j%)MZy#6os{~ZWC_21IR<~)sm8^-^$?NgAJ5xO6NpTM6DFFhadm60hECCLBhI$Tq_j=F|x{n2bfyjSq1A{y;o;{ZRD++q|roGoh zB*4FWm){=*Kg&;9!}+an8K@b6{6 z=REWRARu&=-*f*yt@_9cGx~ek?>SHJ3ERZqbN~JH{rl;D9C-R(^+c)ug-HUGKPvp} zA9t)j(?4qc%Kbv%7XrT!_=UhP1b!j#3xQt<{HG$|JZ&ILv2XavTXL9|SCy{(j3C&W zxaDxs`vZ$j^K8E$L<_Se?{gsY3Un;z=_&=OuxWaPrc)JyJ$73j2}zZm9iZz|T!Px52s4?c;P3lxOnV_aYlr7)MxvF{SWWt-Nu?WG{pH*}c9#eB z!gOX!oxyz5(NS7uLzkx!hhyJRB#pTYO|h_N4yeV~lMtxiY|FGB=9JpbNmZr<207{xCC*LCVO*|-Labk?qfo?kd z8BJ($&0zFWaUXlDs<1*)zX+xKM>)4CvC1e=gujq6_XYRi^-dxr%+-+z z{bqltZ~@;BFFpk6rxH=0quBS7xyedH&Yfg*&R8*hAJ6H+l${jYzz-$|wKWgDV%%4s zoVLZ9s%i9R90FO%mE|tRkV;lUv6|x@$9u-j`knymnx^72uSTtU#K@Ah=8 zm2eytVxhH4q`}~MFY`)C0_>fqixi}++vs7gsq#3-DwE@?p}o}8t0 zj$5)MikYY_+xCXqTl>Nx2tG*Cp7QivNUmTWMs4+SrFPP}!7Nh9P(j6F%WPTVYDmJM zMxU0aQFN!8CFFq}r||YzXU7lnNwD3wXlpgw$v=s_g)DDhg0Lv#6*&Pdub>-hrV;7I zT@8J;?-uJCJ}$X?Q5h5Grc>=p6Y2<0(PTXqO3YY4+>Pwbp6RF4bKa!tOa&#PKo!?t;H z`c;X^H_9~nbhVu@!m3?*+>s# zQuW()(9|1p#0P&~wcxnI<+BdH0T^?oawa0Tzvx|y|r!spoS-=n5WqaoG zntOui8@qb|$t)?$CT|myby%W`Q7Mc7#|8F5ABPNA|Iz1AFHreV_X+3euX0+F?er+u zRK(4v25j%jP#r)KlVDJENOC4<<5pu{RLyuc^^Sfm1}q1`H#0Ypv2V<}4da!owc5Cr zP@|+(^sCL%XptP64v~~FdF>Ivf(rw1(axySaz^R%WrqS@OuYygE!Y;9HK)a{ zqCEyGj4A2Q6a*cic}Y_}uIgQl^XkXV{(<86tV>Z!y|bRo;RYnMxOV@#VbGVarCG;g z#@LFtIhMg`uyaj)>;1dM83V4LM;qzt4xTLFDF5mN@?fi=}O1V_$j`C z-moFjvg7@*7k+_xUGWO@xh({F>y}pHNCxN)KbOZjKO>19K>Rh^blF9rQT-B-vI1aU zPIBwCJZBmJ;9qc;r`^OxpWSUsu|@=wjn3Aq=w{r!k?wq13Vc#_Qk3E(1`ekqXX|t~ zAa{oAn77fRbhb$JR#aB0n+Kr@2^;ow)bHqt^+puJZ_aT_i< zU0o&3MVtb0NrmqEx`(^3}v(PMUIH371hA4^qP0xfrH&lb;5fJFo%_1*tD{B&tL>S*Q)`g$+n=v*HIY(4A^Fzyw6zN#Rf=i}s6e zbwaPUU=c@bf=Ea&`~o6^t(T<>P`9E~dTgVO4rEEdNM8nb9U9sVZ_@8-ihrA}i?cT; z915elnwaAWL+^KpYb2UnYV0A%R`poT)5E04qE0o69kADOs=A0Ib8%6RR-e2VJ?CClGTlTzAGJo0w27>tM zTYNa(`xzfRfWVKA%N&!3rak$)K=0acU#EM&>^%j0%!7bFI9UJteov6kxDS`O_lez` z{CWJ@?+FB?`zZOJB@cc+?7#E##s5~bKVDz*(Z6T*B!6=3J*usuJ|v`{51{E|CANQen^!(*?)5W zKPvRlJ|uLWW}iOmk)XHC%Nn&<*Mut`Kl+j;#1;*YI8i$wI{af zQ{sUlb3V|*C;OUzl%GAyf9pTGf0So&Kjamk${YSsj>h|t2RyM&o)Qn#jPH+mAd83m zles@he`S9m@C$)o2>gErfqSRTjF}G~!gaKMYNp6@6mj2iKFG>+lb!;L*JMWgP5>vn z?Fpwr5skrYk8eQ(N%8E)v{cZM%!v#LJrg*J%qIsC@1Vxh_+-Vxu+kg-TVyJ-674c& z3jxfA?+xrn~7Y`4z3cUAS2Ystjx)~@wK`7m*L*vK+}^(>opPv%07$ESSAWv z?~aQE%5ILl;H8#s$t*`XOdG&4Z;G=%yDNQykaM;WZNgjh z-p4i8kRK3tS#h=@003xJz5-4|U-KK$SFjS*@(i9e_zvD!u-YkngDT*s`gp$JkTBqe z#a(@29MO3?CLVkUKfDfobnN8z(?#~=i75dljq?+s1H2c1B|yIuC}8$Z!c>!l;++?- z6XaS$jaEeC+4=dDRBRqWCrn04nq!K_X}c2G*mHAOQi+>tC(ZJH?+EGxh zqG%MedMDy7{d<<($wb|V4mERrH2GUcp<-7Tl*F8x&_86%(PT^7^s!Lo z7`M88_P2X@KdSWuVn2QlW!wIdUK@iN+xnd=t$RT*-JfJvRe^4Wv?93q^V!;tOOKTw zl)4Bc&HSgj*FPN8G|v`R(YXnnQk|zmF~N;i?p`_{i9VWsJ<;wk6Jg>Ug&IIBZOorBv#psgft$5?yZ9-(m;mW@T{JiC z3aEY!!KyP?PWc2mM)pTm7CkjqVsKY(3}9~C{4OZ5xzOBtneOUy#L}?go3KVqT}=41 zQ)A8g<3M+$k4M3ApLlq-qyte!^Qz9Bnn$bxo43;68i&dk*aw9fbD`QiJEjg%rXkl9 zqznF@6uIWB-AE!#tfW=!$vtb4$@}tj#=n4ay1tD^J$${wA5bV(PL%E-V=AyA9y5~y zjGVeURvn5x($+lBwevF461JYTkyEV9G%Y#h!o#TeIE`}Vn|fFZw?J6QVv%xHM=SSx z$!c&yjrk}@05#1dSkqB}Osx!-Vnftbi{|079bP$XAB|NDE*_Vj@x@4WTrRpjN55JR zGkJCG=(^h&m$7gU^9xd)7LId$<>!{Uu-5bNHcoA1E1s8-gZg9|wGhOPsC7k!G5fug z`x{7Dq6W7hpZ2XdoToh^w5+}9QT%QL^=gnY5=F2`rW`b0Rd?huHE2*OT8PMqQ+Qro zR-qtVO@jIIYCQ;yT>YQa0vzD>K9>PA|belH`M4Q3%rtT^0YPw)2rp)ka}QW zZA5*umM|3|3j$*yv6!Hu)-2vdCXplAF_W=D*3U=oGogeBLvNF97mVVKOvMQ$o ztl;kANq54~$q*(+rdR@>&>LPA%_tmNv9M>Ef&|pqZ&y}U=bubHi{yuknvTCC8AVb7 zPhHnAP`UManO;Ph(mCpVb{mv>f4Y5B*(VgMP=y&%)n(JH^;~R{LTepg6KH9@_Cf0a zMFa2RL)Po0t5;a>B)?&!!}P1qYJwQ8kziVveM*0!pYDzTvC?@sI~#Qv_^r$#fj2qc zqr@NL)N|pQPPE|`sIlHA(|Oua!P5H9mw!Wr7I|SB2$s^k*+#PFs*(b)Gc0-CdX6PW1G^HQd1iW>^1H-smAr*CZ$(^+rW<#S!Sqjx)*= z>5F=2rT&KL{tw(rub|u;*j92q-1lGdv@1bsIlJ*Ag%V(op%sHtMT0Dz(3P>EXM*Zu z%DHkRgXp0g^g82^Cy$6@4CR4mQVzDtc1OhMHgMNeLk`tlITUej`8vH)F2$&WgjqEC z7Gec~+e!E7rNkxi=T;keluwNC5yw1rUD?Xj3KhcS4Zhv!bYSgOk$nitzHb-*GweBY{20ZeF$M^gKHyj;Bj4Zn3S0n z2^jNKH72+g%@ zUwddw)>Me2fB=rSH=N%^*tSX&*IjPS*Ee^+yq!9dbmoLLT4=spn3yr0|DGlFQBLL> z-!;^9?9L}8+Yfc`jK&W0=oo+fO}sFWfKrt|tPD0A!O~v?->XF2(&KI5(PGMLzbSwb zNF-68g%*K%Hbx~IIGxbl`y>fJPLskS##h~GO5}&SKUwdP6FDmlzJ!!Y3V7(%Z>iyT{oRC6E+vz}(f#b!r4DWoMa&p#*^a!2{C8bxg03^z4 zaS&!1a{eewK0{9Mo;c~968zF?Dt>c(C%Wdn-9xA5Vb`%Q5;{po>lt6L z)nL?K6!7&5^pIf$uBUE&sCLW^{o#FNz_h-Q1s)wse45SrhG^nk(dDJ$(2F$>g`pSi z677&*UL6q9H6T)Vk5l1{DE7H?&L=g`x;GrnzR4Ps%B|77Dmi|qT+VUrc9~f6?d#VR zGM#Fo_OcrLLNMYePZ3g%EJW9QD!!{ujCz-3MxC97| z8DIAh_wJPb&;q0Qhz(z zLYOCPl6H>s94>7^2WBmm$_ui#&i%q3B43~K`GhO@$}^DUv8U<-`p9Qi-D_og+NYYR201UB3dE2x!+nMRK01HM3 zX*G>6F4-`tJJw_L0Zd8xjb`|KRX>P990weWez*xZ%wgZE^sZ>H=JE{N#BVRGE0f*5 zmOEvoWZL);nl7mZNbT}1pe5wlSl)~ZWqo@eK$taEIa6LXXIZTf?X_2}3XE-qv{4%W za&NHzyMb&cGm1F*3rS4TgyxarVkPz%bDE8tS*=WeyALm*?J3m;{E_CceDqXQL|$a- z2@oO3V;)P5;W#Qm_DW#M%Y&>?jP4%S5B1i5D7ADbdf{As&C*997RYSCThI@6)!A2d zJ;9Q|IZr(L;o~;=w0nu;w-Fl?%KF&WXDyM18BHjpu#{@rh7cIY1Q61Sj2t%a>Px`7 zxr4T?l=$UZp`$-3GAm4&zdq5NOG#)gHn|`JSL~cKnNfoc-J5h9*y^S0kDQrtgwKL1 zTRA)2dDCLM-%1z%PSRNdKkncn7Y1a42cItk4%;9&*WkNYxJ1N6v$I)U^?UT{l{pxP zHiKhVDBbqWg(W)`kiq?6v{G2WbyGQfs?9z^EJ`)eO(sSQP{o{VhVyh&L_eH2J%X@T zCn_gs+@@c4IRU><4?K#ASmbB~Bg?_5fOxp*4^-QiPTJgc3N~V9hKo1|1U}vd)#Z?q z5$Gu9U`pxB(9hc#zve51)DlrfKcBVPOCcI|Qj&>PF{>SYl_=VP6-(su1&Wq0$aD7K zoj-u>(=iaJyfscZyt3)5R3J2e>Ccxa_z6o4pD@u5tt^T` zLGa-4Ac^Y+78v)a4u=~rE&&}b<*qGc2Y1yOs*e($>nl7P+ai-3{BU8H$ zaB2_$gq){$M1-u$c^C0N|TCdqrngtdE z5vUeVwgYEoGmXq~neKh8u_LrHX(QeU<~?J+aY^c8&l){R^bzl6H# zXmHY%9o2~aew5-X0dRTQWqlRD_7-8JF+~dV+M=U#yl8Y9c#~E#K7^vROISs^`=J)t zR{y(wrcr~iEL<#i!cAw~v_hk>)dw#I6~E zjl-ApvS%{d7vW}{hxvx=E7kO>eoe+>>Ojj>QL~YB>XlI>Q_gW&?W<o?@o}Twup1V8CC8q&E^hP z=BBGy&C1iEQQ);_R(iZ!p!!6n!1r?{xw|ikcV9vLxVKT0`i-JU+TX|5cKXjyeOt_t z`VeuFbg`#GPLMY>gZE~ydE28&o3|rO?1al>gY$?iQHxQTu}GFaOquk#p?A|{Bbvb2 zmnoz*gE#F<^a$$m-|Key@?o8WL-^8T+)y+9GALQVN~dtm3n99-TqhzNvF2n_c5Sy- zB@*;$qE}wpfS?ba#gv7xihrpas63Of?L|$_f0d~pX4p{S%n;V59G4av-6P&JfKY}5 zp;7GoDo;S3Xy)v7S$I=;MWQ1~!VU=k)j+&=G!c6vO810v4 zT3pu%?C~6`c7ZkKUsf50eNba;4*_Ajd9SEi!pQt^3~;uc=%PO!H9nK1;tZu%cp%MgzdVLz*#%&B|F7oNdvG zP_Ll`xJ}O6N$+xR%qC_L2)#(h;awG8AM!EmRGVI8M4%wce(^mmAgb;L&pt_FF?@CP zL)Ze}uG)GADivxc(r0jc?-~@@=iYJv4!Nu|xZF2UpJ{q@cn7&EM>)Vu2-6d(G*DiR zY-UtK4qKX!0TOmUkCGzR=L=@5l){*h#ijs@7G>?hIMiQTE!Z?!XSD*>6a*vU6&g~b z^8$-;z>DC5`*RvgsuwHTLswPoyeU!yLRLjhhwU>|ygwEdv4b*F+bTGl#pM|G1xB=e z^i$n>IZX*^8IS_`Zde=}4009Px8fQ6lwp#ZVm@<^>wb;-Un4WB`1av;udeSFp#3_TW z;ICk-wLf^827f4FfW#Ba=%|z2idQ6=d8t?SIFU={50w+djZq-Mbvk7r65CWI;HFY- zXDtwrO{~63rc@Bj02c7nK>>#_V^*5~geV2S3Lh<^Kv7dXzewk8v*t8R8JtO8n*hQ{ zbs3{@`Re^M^>VE9(mp{rme9kSZacD76V=OYTAfn3Wwz(WAoKNtH|+{VTCG+X!9g0D z-MMy4N(h}3+wmcp%-*3_d>2*v`r@da?+g|`7Ry-Yo#8gZEZ14NX-r(5Oj0^gh0trU zG>gh9%kYmlu$a~B?XBEF7hf-?RZr!K=ManFQbpo zGBg-bNOyRQI91W&bsHyv>a5+DoeU-Uu~)?muhE1}blLQILDEq;MaZVuXC{oHtq)D) z;f$TP-{H ziP9M0ugaA*7()hpIU@uEE)ni$28K$&s|uCJs-$yvB;R(6i69kD=LL=n1pu68C`2O#U(R|F^P+7Mfk~5($V9|w z9c@(?15;BzDn2$;PhU-6nnGy4;{^}fr>|3EBM^npX@S?%6F!E0hx1UR+eBYQkN(8g zW8n|VeHdXcp&&nJhcpIZF2=^e*#PcaTQ7?OCY`XAFw~Pa#dEj4U9cE2v^m8S zMFhT8QC(JjIVvtzV2v3HNRC7QF6?A2QpOyF0ikox6G}$=w0Jq)PyMq`+O~&S&Nd$_ zGHK>w&7M_^)@_tGgd}{^;9e8GiwgoM6mm(0_iWav8YgLfW_QXF(qW@Qy6jqJ)0Z~P zD+-_1ylxA4`J7A3cjkN8Ts#3yuHnLNwaE=8<{heC+}N`2uiR^L!)geHfp2LJEPbus zyHxwWGzvrTv|2)TZ(FcPe^q;>+Jr3u)q1l+1V=7`<~~(P0EKdNpaS4JX6d@LJg?^o zB)4G5erx`EQv+)-sPDkRAF_X z3ULwRLfIhHO*-wfL3^_u8Q{#NW#WP>12j)cg{x4&$Vsx^^>XRF%?DFCif)Y%yEa89 zr~Af9gWZ|WoIg4xm{om(L11QQJX2zm=!b_)4+|81=m%38EYr|{X=)>+ zSE*qhg^gi>$*_H)7xi?$EcBSicp*6+CfRW19Q(@T9N$PTIvomzx`JF%;B+DLKU9t@ z{yYe>5WVJWQGhc}+Tm4bbHh9cFM<2kflm@KyE-N@fH} zflDMxVy2;fx4G>!;GL$SE`pcMP_!Ykw^x94jU)LOa&mdleW~}|(xk`=J*xfL%I9H? zdaa?s4aS2*=8m&1I?)s3ZNGFU-Ep7LUB6u1qR*Y_*qZpHX7p0;lfGB*&`u!TdMdR5 zU1A#Ed5t~p-nn;U$u-sTyp+adP+?mR{WFMdjuT4LM#*-4qzJJYua+|wbAwZ&{UkRT z>7gcJ0_ibjWc*k`e3NjH#w~jxsydm^3`2@WunBx*+tpf@>8m78678dsqYUO|J}a!ZqFdc|KuLTLmaWt<2Kv zNy#(<0Kh2rVJ0xts9tY;Fid?+Pq(abYnDdTQHdQml8KD6ND_}ZA`&tN&5r$$xd3+3r zSN$hajcwTAS^Y^!vhwp8*nHgY0`)=$dzoSO0-}eU`?HA$i!z$|?5em^=9~)x6SaNP z9G#8d#U6gzHejRh-rQIShUp51t7xujjC>6!%~_rO?A%VhLT(v|=785rqW~ae(qhu2 z85Lv~OEv7NvO8^mO9tF`mJ0zPRV!2yqGTzZ*8jq-k&zbK5B5uh&JYlSyunz6B|g(P zKKBKM1;2uc8i)P3>>9nZ4>U1jKWKwX99v~VoZNc=XlVi*R_Rys*`mj_eI}hzIIO#r zEXq=?+Fv~_>BJ!}6qv}a!zb{k6)*7JTD8s_ECsPnZa~21JyxBk`v)XzDn?|62P-K* zQj5J~^x$6MJ`9u$l(1Jglt|N$-vr?k24)W+F+CTqlMM$^q?LVZV5etlWNRdDH9Qn-bM2bfIl{mF{<%&KASf(z@=euR#`VCyQPvJm(zimJ85HIo+4)pZ9 z+uy>0OnwFmp@9S4g*O2K*;zY1sr*xVAI0(zDMJ8cVEyo$|3m(-i~YWgA;8ArK9b=1 zU#tI={G(-@j#dT$I|n^eD+iaKIxzo+4F@|t19Ke%Ys1I5hWonxv*TX=5Yq4*$imd# zLB}3oX9}?YSp?~y6}*!n+%@vyb|Cz}2)BB=b@y9V+~2aF$DhNkV1W33@_~U+e=f{b zyl=0ml`+AqyUG*10$3UUg#YKF)2I7*U*OMS8~?$}ap{D9BJkT%+@H<-Gfq#-agCK9 zluIAT-T5E)h`(A!`WQv^Tj@V1jHUm`d3sjeF5cq|_F9d!e z@SlpnZ_7x122!RlY;UUG?i;Hmmros8a3?J&rCSv^uV%pl(VHR1I8R^ReZ%(xXET`$ z3%-RXV*lX(0tLSxVDn}X+zo-&u#HBv*dt8UCJ}Q$+T$|RW|?&?3Ze!FNWRNY{XZ3* zU-kZlz%K-TA@B=rk_W#-OkM6g<)18Cb&I(}nP>sJid;h}FKezGVD}VSk;4ci^vHy{cdu_10 zZ5Z$S-ui;X<|LSt%CqFH`;o}t_`{h#&{_NNN zD?jbehvuo!KcM0N!cTkOo?pi=1b!j#3xQt<{6gRt0>2RWg~0zF1b(-j_k`L*v+k!I z_n)6%{r(nLf%r47!qU`A=g;$$`(nSJPu%C9=53hwnY-if?C3fFn|8#0+Pj~x-EU8H zU!UL2J${>V{nhvSr+Rf+J_L;XZF{0edw2Evzq7~pTm9}gi2oB_50jERjsHu${u_4w zDPF(X`G17hf7{;uHv0GXMC|d#{CWHPr~8!y_Y>CN`qfh&6v+Q45AAM#bU){PinDsq z{_FEUYVXgoI(N}lb`C!a{;T=-KeF*OZ(~n*xHj8k1MdIL&VBOdy!?I({>N?j|Eu%C z-#w?XCJ(0X&E3QB)8wD$gAXb|K!B%hyB-;T`^N(hjGu8~cX3iW_d#BFtUt$^z1Fk4 zdmMCrEB2VT0~ncF0q)!t4i6z=K#%-CoBQp4kv~=FFX&&*U(Fu3d-{uFK*M%_l>E8B z`~>HZD!+2S5cq|_F9d!e@C$)o2>e3e7Xtsm2;A)qkfh`g^{u*?u$3f4GSY8xceLk{ zq?UR2*=ARREf$uO(}bT$ghZWNHsyuL+vjkt0^S-wJ|Mh3EJ?Jt9bz-sYdTRk-{Y31 zO$iTg;9}WuZ#kFDSIx}!2}G3>LKH^1KV97hlliJ-Vx7e{pbDhE)CRs);ATDgV81ip zy8K}^{6xcG--p$g?6Mf8#^`m$ON-+R+!O9GH1bvaPZuuaZm&jN5tMVI&Y6NYq5=pKD`s z4qW;w&|NT5jY_RWmtII(LgO|SnKdD@A{Nk+*+F6zf<~3O+stX$AP+MEhUi7->U?C zikO_2X8aX8*n@f_hR{hbj%Fjgapr7XYKYF5_jZh1pLC1eNsZw)B!I*7b{`3(VY{}( z2*WRFtX$s;$I7DwU^k#d%~7~iIUFlTp(0hX4Oz=1BDD}Kbe1H?@OnT?kxud8)dIxX zxUe{C#K>fEnw1Hby9gy)(T{fCx%SzSPw*d`IxpzC@i*#Ai_25iWu&j1RlBoxgyQQt z@Jv7Wh8q%d^s4Ws(IK{!gtnD5((R;1o>yj;)#(a=EI6h%3gfTp=6DU`-#)g^aZ}do zb52R4QEYGEZb@SuEhe-@37dMBrT;^aWv4_x9V>hNi|D?MrhJ)VAMBUyeEsbB;hS%G zN(Gth>t^cgWR8>2{ZY+%nPdEmEK_mswO4}Z5+4(2FmmSw1nDl* zHUh(ZM;rx*g&0e@az1hl5OE}m-eh&_r(iX-FR#C`bE}1|p16j#EDLlS;&Q$@`z+bb zP^b6&i|NOf4c{S^{t77r`W1ga>j|y5!>}wHP7R?Ma}EAPf#exar!y7Xrh;H-G<@W{ zn;l~rWvd{fRCXkase=je{B~9M!pi>^e?zH2?fLe#qTF}Ko}OZ*jU5^SI77BHpP`ifgs!D1S5fY1Ql|5J6V z=0Wjs%hh>DfEY2e!ODt^7|HKcpk8^jRyRV)|*y~s_a-c71lWJlHJSLuitXNQr>8u zpj{YJoeQM;_Ml2|w9L1~mp~asUHCI0qh1kzW1`jiz(l-)TYVDA=U&5!Gg z6|R6@9(e*@wCzVHmibVX9cG8k=x7&`05a2{goOxd&mP?UnQ^F zQ_Wd-^4C+K0{s2u%jW%^XYq=b-$I-A1jsTL&QS#yr)Rls+XpWc=@u>E6II1|d(F89 zf4;WE)A)hPs~u^VA&j}i#O)iD5{1a!W;k#y>mMQkgde{@;U!=LQ*VbP5LN?eWeIw z{n~Kcp>L(;iEm3zw&bW(WU8j#nv)${XwZQ~Q*GDrn~E&Za7bV2`#8DaGBx^h^O~y_ z8A3^aAEkHNc;_{J-O=7z$W=xLoZdj)$R!l;UB<|0uPkj_lgUY#jh=9%<8__Lq*;P$ zeK-=l5mp_!NSL$=j6-LAT7p?xNeyx!nJeo;hH$~cQQ_AyTt}mA21px zGuGB`mTqmrLg>ueoWAxIx0u=1*F@V3D5)Y*oO4z2&GfdNn_HHqFeq3m8dwzv zz|K?Am*fVlYl9y#V5;RO;|_9Q)DdaC??nVp`Cce;xeLAvU7GHvJnUa-gHxPsyt;1# zlUic11h&mPYw#g9dAmLul__-E`GtaC5Q(LGjf{^*e<7Fqe4r6NP2qf(@3$c8!gH2q z*u`-!O@&C9%|@Z;W5Mv3Qb~!rZKKPiL!6YNRdXV&SovM;TOzM(W@K%B-b$9wI-g4% zOS8V9$Un=WUjaF0ycWfcv#7a(aRgh-e>h`e}#0~Y1(FD zrXxq;)h9UOXfP=1UIsDGTeh%+8(JQ=(gFf(pghvDl%fH`?Sdh#c0geIDKG9Ce-7sV%tx%zc?KaoUslL zzk$9$IJN)7&6k1714_k{mlf{5-*|Tr z52pu1OEjQ16kl4;9qC@2$l`HH6k4J7i5h6@&!M7+(FC;K@YC`nnetNaL!+OQ3Z@i_5TW>rQS2`FfT48~r&5EK~!^kA*V})B-*u;rJ z8~+Yn-hu6db&d@3q1W`voD019GcUp6Vmw+$k(G>r!8XB-aJ8Yq4&YdV$oB_FeHg*T zxDp2F5Ehr3-*vitT)9cuF-IQ#Z zKO?*QCd30;(d25RkC6-|j(u9fEdm(#7_GSZ?9`1p4_#X6m~+|K1w%utG3>KDe9&Oa z8*WK16y)NSuaYk>sq5aRqSx0oE8}=d!UG~j^vE&r`?k8+2DtW?8AU<1B+qq&Y}3;f zb3wa`sdlUG0o2Nc-<+2P432 zRp|!D-r9d8w>p7>t z(Y%E$mVsYcVsX+NI4G5W%{V0UTI@V#Q{ z1l;$q$gK)`6zgVo3{`fh!EAQ*mRT_Kgygr}w<~o8CMZ%n*$bT-<|s2V{q2F?VxHKE zKD*V=!G}+fR;u4sOh>q1RZ%3#r1FZqzO?D;!CN#D!4Qq{43Lp*XS-RWqITZWOMb0l zYv=b~<&tbgC@V46SgSMc#%da0yY-o$-S-Gpa;$d;y;ZV`8f3?FjTq!59Or_H)THqC z@;UbHh`WmeV7L3Tu%RJF6dIi2x01|SxX@)ja$jgq&|IZnh<3#wdWD89h9oU#Xs$dD zRXak`Y9u~$7DJP-T7rQYZcPbY`eITJ@usH`Im?@%K~brt|-$r@PNz^^l&2k{i%=7V4{a|ONiTLJxd}JUr*l5;Vn@0$as3* z6re@=_?X@O51SE-B`tx>&z1YGe6jTqXXSYC zVaJ6^p&Es!N|t6L6GM0)+FU;U02)*G9mBhPKC|&||I=r5o82S&n`>Of0sF(NLqike z26n;Z;00~JgWWF$KjP8*kTWC7$h$iQ{pcy+wPr`zjuv)oO|@VY%y!Y8s5(kN>Yx&` zM!1#t2f?BCq%#!+EG1^B)#)fPbmMh43HAllI}u~*v}$N_BKLj^l7w#23&(l+G07wa zrS+=W<#`-ttG<%9#Dw?~we}ezFn3%0`YE2Zvfut(v?zwtSi2gmwZP19J6vEx4`V(1 ztStmKR$YAaI+A{JI*uzD7QCw;qpr-CmYKT^9#!#9;86;ky68_g5|+zUV8|?rXtR#} zcZ_cda6)?hwH!O1+cSq{DWZG?WY!VY1aG$R2KuE^LM{x+^L!~Gw;L8_WIM+`@Csbj zsi&J<4qe<$2NScxZXJQJ-*>TO%XM^p8`VJ{W95Z^P^_19vPprpcVOeyDmeRN-^P4w zJFa4QP;dh2jjL74mgoM$(WEC^%$)J@aSIPm%=rvQ5KDyEawp*uD{1Dr)JV`IO&FOC2TTzqVKoKnHDw|K`7P5#|l|D~@TYJOqw-LjPDwV{L zRNYW?R?6bn^70V5A4@!C#ymg>lYQvO$AsB?Hc3O~RSS*F*FxIzdZ7xVPCI?SD5t&I zwT^6^j5q)d6=hS%YIDdu5y0d{H{JmMpcbOL!BBgX)6!L5Z+pMvl0&UddTTCyHHy;+ zxoqD)?n}9WExm3?_#3G(Jr-nD*BQe7R|g-PD#Ir2d1HLuzedJ(>nh(t@c(wK?{f0W zxU5K&TH)4$lrp#Thi+vF2E$!7kE}_%6hoJme&pzX!Z%E5i21xBWWO3Np?`8@LW{4V zRI0@cFXTbC?v@cZ-yFR8nLLnc107v2l9=7r3vz^;(R-!eH7K)1s5Hon)inv;ILe7s zdIE04sUI&>M+`&M>&B}E{$-{LkyK>z4V)Ipj3W;vI8UfC|CH6Te1)^LIvN2=OGlaX z=jRL+1Tj;ft^r4AwlKAm5{4E*fNXP`&Zv{Jt9@DYeIi1?_c4`-`<@J3Gjl4;gi~DW zZe@gTG_M+COs>fjJQ}{BAa4jA4jn-03yYv^Ske+VS4CdX6hN{69SetOx-fX7 z!^7M+-Gxgh8jHlJDQIQ20axgn&~{eyv(mHKV899)aK=HBDr@1m+*xjSsoXI*(cQCq z({$bO5uBlrw<4V_pO`MXn5s|m4lr>5ViR$~!tqDygjoOe@q8tr4N$p(A~ogNz5v4s z*moxQ2))fkJn246-eh#^D-~YPM5_k7qUEtX4`I8fby-A@LpmE=&F0Nqpjl!+#+ml; zxlqP)t?@|1$sTm)F&B!<|Cm7jwmGKU>Xyr^7p*XK<*lC~uE4%<74g2DOknc-n#no; zUFk+=jWgDZmGT`xM`bgk)n!#e-XfHdwzASl89zOlg*gG~98AU@U1Dd%Dglpn)6!mZ zi^+B2(oqa8vqo zOcJIIW4$#VFlcp8k$4q5=~GY0p{NQuk6tD!Ux<=Kj6waksssb#v6yG;MVSE{MUJk#%+^*JZbTn0_JE6@Xm6xJP5K-OC<_ zj6MK?E*a$BqRVqyK$DOQUe>t_xV1_}s0!t2WeqOMS#i;Nv3`@j);Niw$W@{@WjbP_ zqt(vePCP;(Hh97^L|hY)01R)n_yZGlyMvc+W||UfOQ$^%OhQ}ng@I_gcj~D1%9qz* zuw)DRqblsz^J|1c`C~B@>~U*QdgteK47GsgB}X3qp0NPMJi9b)f)(0Sq~(u{aWkZ` zD432uQ*OG1T>Jd)2lkHXVb3!%+XT|DsQmLGZ%?l;qzaA-$EE{h=p2|mU_?Laam-6Y zi&ogH?{0r@s{w(n>2Lq+7~pqC18m9j@nfL_Bnw(z9rrSlXG3v@&eXF?vrPSNFi6#0 zr3g3c%*2)mP|C)ResZv?6P3*aAMa#vuC~UtQ5*^@15>N#bYC0a!_EkpxbNe!Q2G6c z<##DPi7ETu0Y90DH&8B=={Cl1OL0YtCM|K{q3Aw`DaEiI(JEARY{M0ifDn*ge?qBQG|*e96RVUfZ}#G0ZS@k@LUYq0@npFww3hr>o>7LnOjKt_}ACXvy~n-&-C ze^1UI>=LXD3UC>qIfH9hfe0{u&HJWiMt&6h%}T98a(;lvZtAqP1*<|bcz zh$xF0je5_L!qu#tUT1`?#Um_JSu_QDc6v~c+@5$ePxLja-uR9BP-JL9wmN?BYDbu* z9m~2i3>t$}8IsN{eMTeW^~}!H)&aWHXYW|Dd9ElRE>p2E*~UOp^+e#zEI%FCfND@M zW6X?x;t^p|Bs-E%t!3Cq45i|IXJC*P`W81CCk?}}+&y22#b+d!f2jOiC>Ek;6&A~nH`AVr#(fK)*VqJkjJ&_YR&9s-df0t!)jHAt@mhy;F+cqW-84xjz|nwU0&p-_9@y=(&h_lfM~-*qE+-(iUGmH;qgX3m z#$L#-NMfdP8L)zsVHu@MwpK_OmUajscXz9SzZZ`4dSmUog~Bg zfW3NrGc~x?Pj|g$ttQH}Rqv$quddM1AF-P&-;O>`s+R-owFHN!7F8jd120Y$SG^hx zp5l-a)g{x|d)s;wK!;zg>O73VX4u+G^<-9&xiV2k<2G;PUb@e!pWHUn>E2uyUV!UE zGIWpC#Ln0b^&V9Sc*M8yd@?}fF5t<^MDMS%Sqq-NJ&s~t{RqbVA$Clu@5J&0XlrP9 z#@JrkSy|-EGV?Q{h=LeY_d$fcM`F-^Uz$6?;617$A=iEd_u~sY1Wq-76%GmN) zwh+4i_1!yoe>eC=&c)?{+H;YuF1m3!F1p$J z9ML^6TK2pi!LndUot0IgQ4GD+;)}0rt8%{{^j22D>Y+kF#;2QTeWS^OdB}9i`cVw;!jnuzR;n8D|{$LxI27BeQJx%v~6VIEz@SyxYfes_7}GAD;%bLoJ3|q3$TahXVVMsTGi0e zIf^v9F*N;Vsk3h=HMrc+MoXh%Bj4I4+qy=GL!vT_j!{9js22i9DLSDy(K)IOhxo3t zLNPCyM3=QA9`c_+IBPL=bQXFrvaz#$n01gaUpUOeELM<)V&WMecQM~NlUoEfShg?` zi#^LZG4>ouRun8TfkT&^BsEXz7ru*e0wKiPyZHI6IP{lSPlrib1N_5_%tPur#`Kd3 zgKsn)MC5B6NW>2Lxm694P6Gct7@aBd#@CJ_2C|ZTzj^3*Vi@Fg^BsZyxA@!RZT!q^ zGC*pn+?-QILeQ;`-1wbJJguRy=slpjyAP-|5oq%{z3vv)qrw_Uv?@wjOf@Xnt_Y7f zVI>OAn_9FelGt;IaL;@!e2p)e0%$1el6V+QuPE3@=CN%29cQOnt#96|%a-x(Wmi$r z)7t@l=?%x|^NUY3$5Y(Gvn{S!c*}c}L|B~OHs#QI+_dFiGt-qfGRi@d#~-O_@PAYT$~eHeBMOd z%K_l>OmPaNoaIu?tSs*hzgCQ9SZ7jFxCP5`2 zjoZyqf^LRzf5Yc+;c1zn39GXL%-EGO$!kFv`sv)}&tIa@y^tu2N`Vf?b)J^%@U+0~ zE6*vc*mPDF{%*-!HwZUcHz72j)nw05--NnidxY=U&&f&Dktlvb+%vAmDGCzhbNm2gKH9;AY%=gk@|(g0`LSr zSou}e1v6pTYbF}|yj}}KD2@G%&vpY!R5V8aSJh$tk*uVqMSB;oZGKKO;|~UA!!(7= zc3VX+k1)BFZ`qgI+ZeRAM~P$0B+o@Vr-y48@Wme!Z^Nr>Mebv4pEgd z$+md3YGEEVLzSp1ku7(~)#FeS^pxerI^ZAs1=iolOzw-Un~-zN7{F$OD@F_?T0)zH z;1%P;#M*J;+yubUCS!wl=aRi~?ETSqG0Q-xBYo=y6e+&OB+MyU3cJDprBh#Zc^-Xi zRGwSbVu;Hob4Rg9!~eG{bgF7wE~~jI>)C30Mj2W95#~bneis}*JV+W@L^9rkuvfb{ z>+UDc?ubQY8JpJcF{Y7Mjd|^?E6c16&Q|q*SvQD-N%zaz1{~@QOMaS)ZUX6feYM&_ zEJ9H;Lb+)?pC6VM1~vM%>!Xu;WI(Wxp|V>qDCIP8ztrDneIT}djcIbJiP7MK_e9@_1zAtAGk%3>rhfZ*n^B;57?=MRK?`csB6g4 z>~kQ!#wQM|X#%G5tGl(x^Y5$Z;t7Bg=KDw0CwiM<8upsRFPEj<()qtVceA1-7rRx~>)C zX;NnkG>5_cIZ?1EsLrA%w>+wT$dwAC+OTyf?WFQ~+~)*(Kg8XzzO)0GKfNuvyfpe= zWVWKK{idzsX(Mv`FSoh?C7_dPc= zL9l1l#p{k9D%S@%F3}q~%~S+Ec(zp|)6o#>ddB?;=8g-c(0jwVV~~we;eo;C`8Qz{ zGymeroi6th;J~#|uanJLCUn6$)!3X#&a*W$&3Bt77t#mu25_Sugck;+jwSgKKJ)rl z>R)7o_BSoHcGA@?#k3_MV3KIq!Z=*ltAa~IH`6YCyv}AXQs0ys5`uEKJkPSo(%zav^0{2D~3tv%KSK8fmbhtKi)!#!7F3 zr8X$%B(pT@b^y|KF{0q&MWGb&5XqdLNAr*ib*o@Y)!| z3fp4!_0+)N$%QtXy`3W8LHw!4E7osAPJT=uyle_@n7O(#UG2DEw6QHP9!Qy-e7Va} z8b%=nj-pWQ?wUgKYF_2w<^kMp!y9p|sA+kkdn`|D+@gFS%WVt8u~9_Be0W;@@J+-b zCv4=+Ht+7ybF10Hm>Jd8o-9(j^Ib`>+AC|fkQuTKcsWr|Fn^6-rE4GYbt=<8c01rj z?S(*dHj1&NH%~`vyW*5;qLWo}oAUgagsN1#N-BtX(eV}2v<-8$4%obpM%ozU9sEcN zqxdn7@!<)}%szdrr)VXi|Ao;W%Yy`tk=T zRLp^DBiY=?jneok>J?;+nD_{xlYci>l_Yb$XOT>UT zILkBYWNg-fgl+}I$#uXA{&auh3TX5xne15Z7wJ*v|BYILBZ!v9vH zNe=klVcIoBl@SEg43INivpB0*y!a`)q&5`Rf zrB*-wHr?&NkCA=){o@y4^+##{AXpBJ2w$m+KPZ;J$gX355?OzzaDGI|985U;PHI?O zX|6pu|BsvlLhKJ~^1=O|D^-)nM^^Zk1~I6m0^3wV?mkN^Mx literal 0 HcmV?d00001 diff --git a/tests/test_vr_bag.cpp b/tests/test_vr_bag.cpp index e6513c9e09..0cdec4e884 100644 --- a/tests/test_vr_bag.cpp +++ b/tests/test_vr_bag.cpp @@ -15,7 +15,7 @@ using BAG::VRMetadataDescriptor; // Test basic reading of an existing VR BAG from https://github.com/OSGeo/gdal/blob/master/autotest/gdrivers/data/bag/test_vr.bag -TEST_CASE("test VR BAG reading", "[dataset][open][VR]") +TEST_CASE("test VR BAG reading GDAL", "[dataset][open][VR][GDAL]") { const std::string bagFileName{std::string{std::getenv("BAG_SAMPLES_PATH")} + "/test_vr.bag"}; @@ -47,4 +47,39 @@ TEST_CASE("test VR BAG reading", "[dataset][open][VR]") auto vrRefDescDims = vrRefDesc->getDims(); CHECK(std::get<0>(vrRefDescDims) == 1); CHECK(std::get<1>(vrRefDescDims) == 556); -} \ No newline at end of file +} + +// Test basic reading of an existing VR BAG from the National Bathymetric Source archive (https://www.nauticalcharts.noaa.gov/learn/nbs.html) +TEST_CASE("test VR BAG reading NBS", "[dataset][open][VR][NBS]") +{ + const std::string bagFileName{std::string{std::getenv("BAG_SAMPLES_PATH")} + + "/Sample_VR_BAG-gzip.bag"}; + + const size_t kNumExpectedLayers = 4; + const auto dataset = Dataset::open(bagFileName, BAG_OPEN_READONLY); + REQUIRE(dataset); + + CHECK(dataset->getLayerTypes().size() == kNumExpectedLayers); + + const uint32_t kExpectedRows = 4; + const uint32_t kExpectedCols = 4; + CHECK(dataset->getDescriptor().getVersion() == "1.6.0"); + auto dims = dataset->getDescriptor().getDims(); + CHECK(std::get<0>(dims) == kExpectedRows); + CHECK(std::get<1>(dims) == kExpectedCols); + + auto vrMeta = dataset->getVRMetadata(); + REQUIRE(vrMeta); + const auto vrMetaDesc = vrMeta->getDescriptor(); + auto vrMetaDescDims = vrMetaDesc->getDims(); + // VR metadata descriptor dims should be the same as BAG dataset dims... + CHECK(std::get<0>(vrMetaDescDims) == kExpectedRows); + CHECK(std::get<1>(vrMetaDescDims) == kExpectedCols); + + auto vrRef = dataset->getVRRefinements(); + REQUIRE(vrRef); + const auto vrRefDesc = vrRef->getDescriptor(); + auto vrRefDescDims = vrRefDesc->getDims(); + CHECK(std::get<0>(vrRefDescDims) == 1); + CHECK(std::get<1>(vrRefDescDims) == 3750); +}