From aa80f5499649b01d572234362184798a7963b453 Mon Sep 17 00:00:00 2001 From: t-bltg Date: Tue, 5 Jul 2022 15:00:37 +0200 Subject: [PATCH 1/6] add formatter action --- .JuliaFormatter.toml | 10 ++++++++ .github/workflows/CI.yml | 12 ++++----- .github/workflows/Format-check.yml | 39 ++++++++++++++++++++++++++++++ 3 files changed, 55 insertions(+), 6 deletions(-) create mode 100644 .JuliaFormatter.toml create mode 100644 .github/workflows/Format-check.yml diff --git a/.JuliaFormatter.toml b/.JuliaFormatter.toml new file mode 100644 index 000000000..91f762908 --- /dev/null +++ b/.JuliaFormatter.toml @@ -0,0 +1,10 @@ +style = "blue" +annotate_untyped_fields_with_any = false +short_to_long_function_def = false +trailing_comma = "nothing" +always_use_return = false +import_to_using = false +align_struct_field = true +align_conditional = true +align_assignment = true +align_pair_arrow = true diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index ce56392d6..3b10bac87 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -28,7 +28,7 @@ jobs: sudo apt-get install mpich libhdf5-mpich-dev echo "JULIA_HDF5_PATH=/usr/lib/x86_64-linux-gnu/hdf5/mpich/" >> $GITHUB_ENV echo "JULIA_MPI_BINARY=system" >> $GITHUB_ENV - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: julia-actions/setup-julia@latest with: version: ${{ matrix.version }} @@ -68,7 +68,7 @@ jobs: - version: '1.3' os: windows-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: julia-actions/setup-julia@latest with: version: ${{ matrix.version }} @@ -78,7 +78,7 @@ jobs: env: JULIA_DEBUG: Main - uses: julia-actions/julia-processcoverage@latest - - uses: codecov/codecov-action@v1 + - uses: codecov/codecov-action@v3 with: file: lcov.info @@ -99,14 +99,14 @@ jobs: - {user: JuliaIO, repo: MAT.jl} - {user: JuliaIO, repo: JLD.jl} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: julia-actions/setup-julia@latest with: version: ${{ matrix.version }} arch: ${{ matrix.arch }} - uses: julia-actions/julia-buildpkg@latest - name: Clone ${{ matrix.package.repo }} - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ matrix.package.user }}/${{ matrix.package.repo }} path: downstream @@ -131,7 +131,7 @@ jobs: docs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: julia-actions/julia-buildpkg@latest - uses: julia-actions/julia-docdeploy@latest env: diff --git a/.github/workflows/Format-check.yml b/.github/workflows/Format-check.yml new file mode 100644 index 000000000..c78049d32 --- /dev/null +++ b/.github/workflows/Format-check.yml @@ -0,0 +1,39 @@ +name: format + +on: + push: + branches: + - 'master' + - 'release-' + tags: '*' + pull_request: + +jobs: + check: + runs-on: ${{ matrix.os }} + strategy: + matrix: + julia-version: ['1'] + julia-arch: [x64] + os: [ubuntu-latest] + steps: + - uses: julia-actions/setup-julia@latest + with: + version: ${{ matrix.julia-version }} + + - uses: actions/checkout@v3 + - name: Install JuliaFormatter and format + run: | + julia -e 'using Pkg; Pkg.add(PackageSpec(name="JuliaFormatter"))' + julia -e 'using JuliaFormatter; format(["src", "test", "deps", "filters", "gen"], verbose=true)' + - name: Format check + run: | + julia -e ' + out = Cmd(`git diff --name-only`) |> read |> String + if out == "" + exit(0) + else + @error "Some files have not been formatted !!!" + write(stdout, out) + exit(1) + end' From bbd7fa8fe857176afa3bd21d811dd047d075fc87 Mon Sep 17 00:00:00 2001 From: t-bltg Date: Fri, 5 Aug 2022 23:11:16 +0200 Subject: [PATCH 2/6] turn off formatting for auto-generated files --- gen/api_defs.jl | 1 + gen/gen_wrappers.jl | 1 + src/api/functions.jl | 1 + 3 files changed, 3 insertions(+) diff --git a/gen/api_defs.jl b/gen/api_defs.jl index 36b1a2c07..4bff4b1e5 100644 --- a/gen/api_defs.jl +++ b/gen/api_defs.jl @@ -1,3 +1,4 @@ +#! format: off # The `@bind` macro is used to automatically generate Julia bindings to the low-level # HDF5 library functions. # diff --git a/gen/gen_wrappers.jl b/gen/gen_wrappers.jl index ebc8c7828..11918e82c 100644 --- a/gen/gen_wrappers.jl +++ b/gen/gen_wrappers.jl @@ -22,6 +22,7 @@ append!(bound_api["H5T"], # Now dump the text representation to disk open(joinpath(@__DIR__, "..", "src", "api", "functions.jl"), "w") do fid println(fid, """ +#! format: off # This file is autogenerated by HDF5.jl's `gen/gen_wrappers.jl` and should not be editted. # # To add new bindings, define the binding in `gen/api_defs.jl`, re-run diff --git a/src/api/functions.jl b/src/api/functions.jl index 63f7b56fe..f52c3fc9d 100644 --- a/src/api/functions.jl +++ b/src/api/functions.jl @@ -1,3 +1,4 @@ +#! format: off # This file is autogenerated by HDF5.jl's `gen/gen_wrappers.jl` and should not be editted. # # To add new bindings, define the binding in `gen/api_defs.jl`, re-run From da7eaef91da225b52bc25f0eaebb292b24077fa7 Mon Sep 17 00:00:00 2001 From: t-bltg Date: Fri, 5 Aug 2022 23:53:57 +0200 Subject: [PATCH 3/6] run initial format(["src", "test", "deps", "filters", "gen"]) x2 --- deps/build.jl | 9 +- filters/H5Zbitshuffle/src/H5Zbitshuffle.jl | 276 ++- filters/H5Zblosc/src/H5Zblosc.jl | 93 +- filters/H5Zbzip2/src/H5Zbzip2.jl | 183 +- filters/H5Zlz4/src/H5Zlz4.jl | 272 ++- filters/H5Zzstd/src/H5Zzstd.jl | 116 +- gen/bind_generator.jl | 57 +- gen/gen_wrappers.jl | 95 +- src/HDF5.jl | 84 +- src/api/api.jl | 6 +- src/api/error.jl | 8 +- src/api/functions.jl | 1 - src/api/helpers.jl | 134 +- src/api/types.jl | 134 +- src/api_midlevel.jl | 73 +- src/attributes.jl | 62 +- src/context.jl | 72 +- src/datasets.jl | 239 +- src/dataspaces.jl | 49 +- src/datatypes.jl | 23 +- src/deprecated.jl | 19 +- src/drivers/drivers.jl | 24 +- src/drivers/mpio.jl | 17 +- src/file.jl | 78 +- src/fileio.jl | 36 +- src/filters/builtin.jl | 14 +- src/filters/filters.jl | 91 +- src/groups.jl | 80 +- src/objects.jl | 43 +- src/properties.jl | 569 +++-- src/readwrite.jl | 111 +- src/show.jl | 64 +- src/typeconversions.jl | 410 ++-- src/types.jl | 35 +- test/api.jl | 49 +- test/attributes.jl | 19 +- test/chunkstorage.jl | 299 +-- test/compound.jl | 60 +- test/custom.jl | 4 +- test/dataspace.jl | 47 +- test/drivers.jl | 31 +- test/extend_test.jl | 130 +- test/external.jl | 102 +- test/fileio.jl | 208 +- test/filter.jl | 336 +-- test/filters/FilterTestUtils.jl | 52 +- test/gc.jl | 104 +- test/memtest.jl | 8 +- test/mmap.jl | 104 +- test/mpio.jl | 121 +- test/nonallocating.jl | 20 +- test/objects.jl | 5 +- test/plain.jl | 2425 +++++++++++--------- test/properties.jl | 223 +- test/readremote.jl | 383 ++-- test/runtests.jl | 118 +- test/swmr.jl | 179 +- test/table.jl | 25 +- 58 files changed, 4815 insertions(+), 3814 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index b5142266a..3464f7c49 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -2,14 +2,13 @@ using Libdl const depsfile = joinpath(@__DIR__, "deps.jl") -libpath = get(ENV, "JULIA_HDF5_PATH", - get(ENV, "JULIA_HDF5_LIBRARY_PATH", nothing)) # legacy env variable for compatibility +libpath = get(ENV, "JULIA_HDF5_PATH", get(ENV, "JULIA_HDF5_LIBRARY_PATH", nothing)) # legacy env variable for compatibility # We avoid calling Libdl.find_library to avoid possible segfault when calling # dlclose (#929). # The only difference with Libdl.find_library is that we allow custom dlopen # flags via the `flags` argument. -function find_library_alt(libnames, extrapaths=String[]; flags = RTLD_LAZY) +function find_library_alt(libnames, extrapaths=String[]; flags=RTLD_LAZY) for lib in libnames for path in extrapaths l = joinpath(path, lib) @@ -44,8 +43,8 @@ else libpaths = [libpath, joinpath(libpath, "lib"), joinpath(libpath, "lib64")] flags = RTLD_LAZY | RTLD_NODELETE # RTLD_NODELETE may be needed to avoid segfault (#929) - libhdf5 = find_library_alt(["libhdf5"], libpaths; flags = flags) - libhdf5_hl = find_library_alt(["libhdf5_hl"], libpaths; flags = flags) + libhdf5 = find_library_alt(["libhdf5"], libpaths; flags=flags) + libhdf5_hl = find_library_alt(["libhdf5_hl"], libpaths; flags=flags) isempty(libhdf5) && error("libhdf5 could not be found") isempty(libhdf5_hl) && error("libhdf5_hl could not be found") diff --git a/filters/H5Zbitshuffle/src/H5Zbitshuffle.jl b/filters/H5Zbitshuffle/src/H5Zbitshuffle.jl index 153f67082..ca5dc50de 100644 --- a/filters/H5Zbitshuffle/src/H5Zbitshuffle.jl +++ b/filters/H5Zbitshuffle/src/H5Zbitshuffle.jl @@ -12,9 +12,18 @@ module H5Zbitshuffle using bitshuffle_jll using HDF5.API -import HDF5.Filters: Filter, filterid, register_filter, filtername, filter_func, filter_cfunc, set_local_func, set_local_cfunc - -export BSHUF_H5_COMPRESS_LZ4, BSHUF_H5_COMPRESS_ZSTD, BitshuffleFilter, H5Z_filter_bitshuffle +import HDF5.Filters: + Filter, + filterid, + register_filter, + filtername, + filter_func, + filter_cfunc, + set_local_func, + set_local_cfunc + +export BSHUF_H5_COMPRESS_LZ4, + BSHUF_H5_COMPRESS_ZSTD, BitshuffleFilter, H5Z_filter_bitshuffle # From bshuf_h5filter.h @@ -35,21 +44,22 @@ function bitshuffle_set_local(dcpl::API.hid_t, htype::API.hid_t, space::API.hid_ # Sanity check of provided values and set element size bs_flags = Ref{Cuint}() - bs_values = Vector{Cuint}(undef,8) + bs_values = Vector{Cuint}(undef, 8) bs_nelements = Ref{Csize_t}(length(bs_values)) - - API.h5p_get_filter_by_id(dcpl, H5Z_FILTER_BITSHUFFLE, bs_flags, bs_nelements, - bs_values, 0, C_NULL, C_NULL) + + API.h5p_get_filter_by_id( + dcpl, H5Z_FILTER_BITSHUFFLE, bs_flags, bs_nelements, bs_values, 0, C_NULL, C_NULL + ) @debug "Initial filter info" bs_flags bs_values bs_nelements - + flags = bs_flags[] # set values - + bs_values[1] = BSHUF_VERSION_MAJOR bs_values[2] = BSHUF_VERSION_MINOR - + elem_size = API.h5t_get_size(htype) @debug "Element size for $htype reported as $elem_size" @@ -60,15 +70,17 @@ function bitshuffle_set_local(dcpl::API.hid_t, htype::API.hid_t, space::API.hid_ bs_values[3] = elem_size nelements = bs_nelements[] - + # check user-supplied values - + if nelements > 3 - if bs_values[4] % 8 !=0 || bs_values[4] < 0 return API.herr_t(-1) end + if bs_values[4] % 8 != 0 || bs_values[4] < 0 + return API.herr_t(-1) + end end if nelements > 4 - if !(bs_values[5] in (0,BSHUF_H5_COMPRESS_LZ4,BSHUF_H5_COMPRESS_ZSTD)) + if !(bs_values[5] in (0, BSHUF_H5_COMPRESS_LZ4, BSHUF_H5_COMPRESS_ZSTD)) return API.herr_t(-1) end end @@ -76,21 +88,23 @@ function bitshuffle_set_local(dcpl::API.hid_t, htype::API.hid_t, space::API.hid_ @debug "Final values" bs_values API.h5p_modify_filter(dcpl, H5Z_FILTER_BITSHUFFLE, bs_flags[], nelements, bs_values) - + return API.herr_t(1) end -function H5Z_filter_bitshuffle(flags::Cuint, cd_nelmts::Csize_t, - cd_values::Ptr{Cuint}, nbytes::Csize_t, - buf_size::Ptr{Csize_t}, buf::Ptr{Ptr{Cvoid}})::Csize_t - - +function H5Z_filter_bitshuffle( + flags::Cuint, + cd_nelmts::Csize_t, + cd_values::Ptr{Cuint}, + nbytes::Csize_t, + buf_size::Ptr{Csize_t}, + buf::Ptr{Ptr{Cvoid}} +)::Csize_t in_buf = unsafe_load(buf) #in_buf is *void out_buf = C_NULL nbytes_out = 0 block_size = 0 - try #mop up errors at end @debug "nelmts" cd_nelmts @@ -100,130 +114,210 @@ function H5Z_filter_bitshuffle(flags::Cuint, cd_nelmts::Csize_t, # Get needed information - major = unsafe_load(cd_values,1) - minor = unsafe_load(cd_values,2) - elem_size = unsafe_load(cd_values,3) - comp_lvl = unsafe_load(cd_values,6) - compress_flag = unsafe_load(cd_values,5) - + major = unsafe_load(cd_values, 1) + minor = unsafe_load(cd_values, 2) + elem_size = unsafe_load(cd_values, 3) + comp_lvl = unsafe_load(cd_values, 6) + compress_flag = unsafe_load(cd_values, 5) + if cd_nelmts > 3 - block_size = unsafe_load(cd_values,4) + block_size = unsafe_load(cd_values, 4) end @debug "Major,minor:" major minor @debug "element size, compress_level, compress_flag" elem_size comp_lvl compress_flag if block_size == 0 - block_size = ccall((:bshuf_default_block_size,libbitshuffle),Csize_t,(Csize_t,),elem_size) + block_size = ccall( + (:bshuf_default_block_size, libbitshuffle), Csize_t, (Csize_t,), elem_size + ) end - # Work out buffer sizes - - if cd_nelmts > 4 && (compress_flag in (BSHUF_H5_COMPRESS_LZ4, BSHUF_H5_COMPRESS_ZSTD)) + + if cd_nelmts > 4 && + (compress_flag in (BSHUF_H5_COMPRESS_LZ4, BSHUF_H5_COMPRESS_ZSTD)) # Use compression - - if(flags & API.H5Z_FLAG_REVERSE) != 0 # unshuffle and decompress + + if (flags & API.H5Z_FLAG_REVERSE) != 0 # unshuffle and decompress # First 8 bytes is number of uncompressed bytes - nbytes_uncomp = ccall((:bshuf_read_uint64_BE,libbitshuffle),UInt64,(Ptr{Cvoid},),in_buf) + nbytes_uncomp = ccall( + (:bshuf_read_uint64_BE, libbitshuffle), UInt64, (Ptr{Cvoid},), in_buf + ) # Next 4 bytes are the block size - - block_size = ccall((:bshuf_read_uint32_BE,libbitshuffle),UInt32,(Ptr{Cvoid},),in_buf+8)÷elem_size + + block_size = + ccall( + (:bshuf_read_uint32_BE, libbitshuffle), + UInt32, + (Ptr{Cvoid},), + in_buf + 8 + ) ÷ elem_size in_buf += 12 buf_size_out = nbytes_uncomp - - else #shuffle and compress + else #shuffle and compress nbytes_uncomp = nbytes if compress_flag == BSHUF_H5_COMPRESS_LZ4 - buf_size_out = ccall((:bshuf_compress_lz4_bound,libbitshuffle),Csize_t,(Csize_t,Csize_t,Csize_t), - nbytes_uncomp÷elem_size,elem_size,block_size) + 12 + buf_size_out = + ccall( + (:bshuf_compress_lz4_bound, libbitshuffle), + Csize_t, + (Csize_t, Csize_t, Csize_t), + nbytes_uncomp ÷ elem_size, + elem_size, + block_size + ) + 12 elseif compress_flag == BSHUF_H5_COMPRESS_ZSTD - buf_size_out = ccall((:bshuf_compress_zstd_bound,libbitshuffle),Csize_t,(Csize_t,Csize_t,Csize_t), - nbytes_uncomp÷elem_size,elem_size,block_size)+12 + buf_size_out = + ccall( + (:bshuf_compress_zstd_bound, libbitshuffle), + Csize_t, + (Csize_t, Csize_t, Csize_t), + nbytes_uncomp ÷ elem_size, + elem_size, + block_size + ) + 12 end end - + else # No compression required nbytes_uncomp = nbytes buf_size_out = nbytes end - + if nbytes_uncomp % elem_size != 0 - error("bitshuffle_h5plugin: Uncompressed size $nbytes_uncomp is not a multiple of $elem_size") + error( + "bitshuffle_h5plugin: Uncompressed size $nbytes_uncomp is not a multiple of $elem_size" + ) end - size = nbytes_uncomp÷elem_size + size = nbytes_uncomp ÷ elem_size out_buf = Libc.malloc(buf_size_out) if out_buf == C_NULL - error("bitshuffle_h5plugin: Cannot allocate memory for outbuf during decompression") + error( + "bitshuffle_h5plugin: Cannot allocate memory for outbuf during decompression" + ) end # Now perform the decompression - if cd_nelmts > 4 && (compress_flag in (BSHUF_H5_COMPRESS_LZ4, BSHUF_H5_COMPRESS_ZSTD)) + if cd_nelmts > 4 && + (compress_flag in (BSHUF_H5_COMPRESS_LZ4, BSHUF_H5_COMPRESS_ZSTD)) if flags & API.H5Z_FLAG_REVERSE != 0 #unshuffle and decompress if compress_flag == BSHUF_H5_COMPRESS_LZ4 - err = ccall((:bshuf_decompress_lz4,libbitshuffle),Int64, - (Ptr{Cvoid},Ptr{Cvoid},Csize_t,Csize_t,Csize_t), - in_buf,out_buf,size,elem_size,block_size) + err = ccall( + (:bshuf_decompress_lz4, libbitshuffle), + Int64, + (Ptr{Cvoid}, Ptr{Cvoid}, Csize_t, Csize_t, Csize_t), + in_buf, + out_buf, + size, + elem_size, + block_size + ) elseif compress_flag == BSHUF_H5_COMPRESS_ZSTD - err = ccall((:bshuf_decompress_zstd,libbitshuffle),Int64, - (Ptr{Cvoid},Ptr{Cvoid},Csize_t,Csize_t,Csize_t), - in_buf,out_buf,size,elem_size,block_size) + err = ccall( + (:bshuf_decompress_zstd, libbitshuffle), + Int64, + (Ptr{Cvoid}, Ptr{Cvoid}, Csize_t, Csize_t, Csize_t), + in_buf, + out_buf, + size, + elem_size, + block_size + ) end nbytes_out = nbytes_uncomp - + else #shuffle and compress - - ccall((:bshuf_write_uint64_BE,libbitshuffle),Cvoid,(Ptr{Cvoid},UInt64),out_buf,nbytes_uncomp) - ccall((:bshuf_write_uint32_BE,libbitshuffle),Cvoid,(Ptr{Cvoid},UInt32),out_buf+8,block_size*elem_size) - + ccall( + (:bshuf_write_uint64_BE, libbitshuffle), + Cvoid, + (Ptr{Cvoid}, UInt64), + out_buf, + nbytes_uncomp + ) + ccall( + (:bshuf_write_uint32_BE, libbitshuffle), + Cvoid, + (Ptr{Cvoid}, UInt32), + out_buf + 8, + block_size * elem_size + ) + if compress_flag == BSHUF_H5_COMPRESS_LZ4 - err = ccall((:bshuf_compress_lz4,libbitshuffle),Int64, - (Ptr{Cvoid},Ptr{Cvoid},Csize_t,Csize_t,Csize_t), - in_buf,out_buf+12,size,elem_size,block_size) + err = ccall( + (:bshuf_compress_lz4, libbitshuffle), + Int64, + (Ptr{Cvoid}, Ptr{Cvoid}, Csize_t, Csize_t, Csize_t), + in_buf, + out_buf + 12, + size, + elem_size, + block_size + ) else - err = ccall((:bshuf_compress_zstd,libbitshuffle),Int64, - (Ptr{Cvoid},Ptr{Cvoid},Csize_t,Csize_t,Csize_t), - in_buf,out_buf+12,size,elem_size,block_size) + err = ccall( + (:bshuf_compress_zstd, libbitshuffle), + Int64, + (Ptr{Cvoid}, Ptr{Cvoid}, Csize_t, Csize_t, Csize_t), + in_buf, + out_buf + 12, + size, + elem_size, + block_size + ) end - + nbytes_out = err + 12 end else # just the shuffle thanks - if flags & API.H5Z_FLAG_REVERSE != 0 - err = ccall((:bshuf_bitunshuffle,libbitshuffle),Int64, - (Ptr{Cvoid},Ptr{Cvoid},Csize_t,Csize_t,Csize_t), - in_buf,out_buf,size,elem_size,block_size) + err = ccall( + (:bshuf_bitunshuffle, libbitshuffle), + Int64, + (Ptr{Cvoid}, Ptr{Cvoid}, Csize_t, Csize_t, Csize_t), + in_buf, + out_buf, + size, + elem_size, + block_size + ) else - err = ccall((:bshuf_bitshuffle,libbitshuffle),Int64, - (Ptr{Cvoid},Ptr{Cvoid},Csize_t,Csize_t,Csize_t), - in_buf,out_buf,size,elem_size,block_size) + err = ccall( + (:bshuf_bitshuffle, libbitshuffle), + Int64, + (Ptr{Cvoid}, Ptr{Cvoid}, Csize_t, Csize_t, Csize_t), + in_buf, + out_buf, + size, + elem_size, + block_size + ) end - + nbytes_out = nbytes end - + # And wrap it up if err < 0 error("h5plugin_bitshuffle: Error in bitshuffle with code $err") end - + Libc.free(unsafe_load(buf)) - unsafe_store!(buf,out_buf) - unsafe_store!(buf_size,Csize_t(buf_size_out)) + unsafe_store!(buf, out_buf) + unsafe_store!(buf_size, Csize_t(buf_size_out)) out_buf = C_NULL - + catch e # On failure, return 0 and change no arguments - + nbytes_out = Csize_t(0) @error "Non-fatal H5 bitshuffle plugin error: " e display(stacktrace(catch_backtrace())) @@ -257,25 +351,31 @@ The Bitshuffle filter can optionally include compression :lz4 or :zstd. For :zst comp_level can be provided. This is ignored for :lz4 compression. If `blocksize` is zero the default bitshuffle blocksize is used. """ -function BitshuffleFilter(;blocksize = 0, compressor=:none, comp_level=0) - compressor in (:lz4,:zstd,:none) || throw(ArgumentError("Invalid bitshuffle compression $compressor")) +function BitshuffleFilter(; blocksize=0, compressor=:none, comp_level=0) + compressor in (:lz4, :zstd, :none) || + throw(ArgumentError("Invalid bitshuffle compression $compressor")) compcode = 0 if compressor == :lz4 compcode = BSHUF_H5_COMPRESS_LZ4 elseif compressor == :zstd compcode = BSHUF_H5_COMPRESS_ZSTD end - BitshuffleFilter(BSHUF_VERSION_MAJOR,BSHUF_VERSION_MINOR,0,blocksize,compcode,comp_level) + BitshuffleFilter( + BSHUF_VERSION_MAJOR, BSHUF_VERSION_MINOR, 0, blocksize, compcode, comp_level + ) end filterid(::Type{BitshuffleFilter}) = H5Z_FILTER_BITSHUFFLE filtername(::Type{BitshuffleFilter}) = bitshuffle_name set_local_func(::Type{BitshuffleFilter}) = bitshuffle_set_local -set_local_cfunc(::Type{BitshuffleFilter}) = @cfunction(bitshuffle_set_local,API.herr_t,(API.hid_t,API.hid_t,API.hid_t)) +set_local_cfunc(::Type{BitshuffleFilter}) = + @cfunction(bitshuffle_set_local, API.herr_t, (API.hid_t, API.hid_t, API.hid_t)) filterfunc(::Type{BitshuffleFilter}) = H5Z_filter_bitshuffle -filter_cfunc(::Type{BitshuffleFilter}) = @cfunction(H5Z_filter_bitshuffle, Csize_t, - (Cuint, Csize_t, Ptr{Cuint}, Csize_t, - Ptr{Csize_t}, Ptr{Ptr{Cvoid}})) +filter_cfunc(::Type{BitshuffleFilter}) = @cfunction( + H5Z_filter_bitshuffle, + Csize_t, + (Cuint, Csize_t, Ptr{Cuint}, Csize_t, Ptr{Csize_t}, Ptr{Ptr{Cvoid}}) +) function __init__() register_filter(BitshuffleFilter) diff --git a/filters/H5Zblosc/src/H5Zblosc.jl b/filters/H5Zblosc/src/H5Zblosc.jl index 9396cea7a..26b5d1ae4 100644 --- a/filters/H5Zblosc/src/H5Zblosc.jl +++ b/filters/H5Zblosc/src/H5Zblosc.jl @@ -4,7 +4,14 @@ module H5Zblosc import Blosc using HDF5.API import HDF5.Filters: Filter, FilterPipeline -import HDF5.Filters: filterid, register_filter, filtername, filter_func, filter_cfunc, set_local_func, set_local_cfunc +import HDF5.Filters: + filterid, + register_filter, + filtername, + filter_func, + filter_cfunc, + set_local_func, + set_local_cfunc import HDF5.Filters.Shuffle export H5Z_FILTER_BLOSC, blosc_filter, BloscFilter @@ -18,11 +25,20 @@ const blosc_name = "blosc" function blosc_set_local(dcpl::API.hid_t, htype::API.hid_t, space::API.hid_t) blosc_flags = Ref{Cuint}() - blosc_values = Vector{Cuint}(undef,8) + blosc_values = Vector{Cuint}(undef, 8) blosc_nelements = Ref{Csize_t}(length(blosc_values)) - blosc_chunkdims = Vector{API.hsize_t}(undef,32) - - API.h5p_get_filter_by_id(dcpl, H5Z_FILTER_BLOSC, blosc_flags, blosc_nelements, blosc_values, 0, C_NULL, C_NULL) + blosc_chunkdims = Vector{API.hsize_t}(undef, 32) + + API.h5p_get_filter_by_id( + dcpl, + H5Z_FILTER_BLOSC, + blosc_flags, + blosc_nelements, + blosc_values, + 0, + C_NULL, + C_NULL + ) flags = blosc_flags[] nelements = max(blosc_nelements[], 4) # First 4 slots reserved @@ -60,9 +76,14 @@ function blosc_set_local(dcpl::API.hid_t, htype::API.hid_t, space::API.hid_t) return API.herr_t(1) end -function blosc_filter(flags::Cuint, cd_nelmts::Csize_t, - cd_values::Ptr{Cuint}, nbytes::Csize_t, - buf_size::Ptr{Csize_t}, buf::Ptr{Ptr{Cvoid}}) +function blosc_filter( + flags::Cuint, + cd_nelmts::Csize_t, + cd_values::Ptr{Cuint}, + nbytes::Csize_t, + buf_size::Ptr{Csize_t}, + buf::Ptr{Ptr{Cvoid}} +) typesize = unsafe_load(cd_values, 3) # The datatype size outbuf_size = unsafe_load(cd_values, 4) # Compression level: @@ -85,8 +106,9 @@ function blosc_filter(flags::Cuint, cd_nelmts::Csize_t, "blosclz" end Blosc.set_compressor(compname) - status = Blosc.blosc_compress(clevel, doshuffle, typesize, nbytes, - unsafe_load(buf), outbuf, nbytes) + status = Blosc.blosc_compress( + clevel, doshuffle, typesize, nbytes, unsafe_load(buf), outbuf, nbytes + ) status < 0 && (Libc.free(outbuf); return Csize_t(0)) else # decompressing # Extract the exact outbuf_size from the buffer header. @@ -139,38 +161,55 @@ struct BloscFilter <: Filter compcode::Cuint end -function BloscFilter(;level=5, shuffle=SHUFFLE, compressor="blosclz") +function BloscFilter(; level=5, shuffle=SHUFFLE, compressor="blosclz") Blosc.isvalidshuffle(shuffle) || throw(ArgumentError("invalid blosc shuffle $shuffle")) compcode = Blosc.compcode(compressor) - BloscFilter(0,0,0,0,level,shuffle,compcode) + BloscFilter(0, 0, 0, 0, level, shuffle, compcode) end filterid(::Type{BloscFilter}) = H5Z_FILTER_BLOSC filtername(::Type{BloscFilter}) = blosc_name set_local_func(::Type{BloscFilter}) = blosc_set_local -set_local_cfunc(::Type{BloscFilter}) = @cfunction(blosc_set_local, API.herr_t, (API.hid_t,API.hid_t,API.hid_t)) +set_local_cfunc(::Type{BloscFilter}) = + @cfunction(blosc_set_local, API.herr_t, (API.hid_t, API.hid_t, API.hid_t)) filter_func(::Type{BloscFilter}) = blosc_filter -filter_cfunc(::Type{BloscFilter}) = @cfunction(blosc_filter, Csize_t, - (Cuint, Csize_t, Ptr{Cuint}, Csize_t, - Ptr{Csize_t}, Ptr{Ptr{Cvoid}})) +filter_cfunc(::Type{BloscFilter}) = @cfunction( + blosc_filter, + Csize_t, + (Cuint, Csize_t, Ptr{Cuint}, Csize_t, Ptr{Csize_t}, Ptr{Ptr{Cvoid}}) +) function Base.show(io::IO, blosc::BloscFilter) - print(io, BloscFilter, - "(level=", Int(blosc.level), - ",shuffle=", blosc.shuffle==NOSHUFFLE ? "NOSHUFFLE" : - blosc.shuffle==SHUFFLE ? "SHUFFLE" : - blosc.shuffle==BITSHUFFLE ? "BITSHUFFLE" : - "UNKNOWN", - ",compressor=", Blosc.compname(blosc.compcode), - ")") + print( + io, + BloscFilter, + "(level=", + Int(blosc.level), + ",shuffle=", + blosc.shuffle == NOSHUFFLE ? "NOSHUFFLE" : + blosc.shuffle == SHUFFLE ? "SHUFFLE" : + blosc.shuffle == BITSHUFFLE ? "BITSHUFFLE" : + "UNKNOWN", + ",compressor=", + Blosc.compname(blosc.compcode), + ")" + ) end function Base.push!(f::FilterPipeline, blosc::BloscFilter) - 0 <= blosc.level <= 9 || throw(ArgumentError("blosc compression $(blosc.level) not in [0,9]")) - Blosc.isvalidshuffle(blosc.shuffle) || throw(ArgumentError("invalid blosc shuffle $(blosc.shuffle)")) + 0 <= blosc.level <= 9 || + throw(ArgumentError("blosc compression $(blosc.level) not in [0,9]")) + Blosc.isvalidshuffle(blosc.shuffle) || + throw(ArgumentError("invalid blosc shuffle $(blosc.shuffle)")) ref = Ref(blosc) GC.@preserve ref begin - API.h5p_set_filter(f.plist, filterid(BloscFilter), API.H5Z_FLAG_OPTIONAL, div(sizeof(BloscFilter), sizeof(Cuint)), pointer_from_objref(ref)) + API.h5p_set_filter( + f.plist, + filterid(BloscFilter), + API.H5Z_FLAG_OPTIONAL, + div(sizeof(BloscFilter), sizeof(Cuint)), + pointer_from_objref(ref) + ) end return f end diff --git a/filters/H5Zbzip2/src/H5Zbzip2.jl b/filters/H5Zbzip2/src/H5Zbzip2.jl index cb95e1ff8..65bbe7843 100644 --- a/filters/H5Zbzip2/src/H5Zbzip2.jl +++ b/filters/H5Zbzip2/src/H5Zbzip2.jl @@ -14,113 +14,118 @@ module H5Zbzip2 using CodecBzip2 import CodecBzip2: libbzip2 using HDF5.API -import HDF5.Filters: Filter, filterid, register_filter, filtername, filter_func, filter_cfunc +import HDF5.Filters: + Filter, filterid, register_filter, filtername, filter_func, filter_cfunc export H5Z_FILTER_BZIP2, H5Z_filter_bzip2, Bzip2Filter - const H5Z_FILTER_BZIP2 = API.H5Z_filter_t(307) const bzip2_name = "HDF5 bzip2 filter; see http://www.hdfgroup.org/services/contributions.html" -function H5Z_filter_bzip2(flags::Cuint, cd_nelmts::Csize_t, - cd_values::Ptr{Cuint}, nbytes::Csize_t, - buf_size::Ptr{Csize_t}, buf::Ptr{Ptr{Cvoid}})::Csize_t +function H5Z_filter_bzip2( + flags::Cuint, + cd_nelmts::Csize_t, + cd_values::Ptr{Cuint}, + nbytes::Csize_t, + buf_size::Ptr{Csize_t}, + buf::Ptr{Ptr{Cvoid}} +)::Csize_t outbuf = C_NULL outdatalen = Cuint(0) # Prepare the output buffer try + if flags & API.H5Z_FLAG_REVERSE != 0 + # Decompress - if flags & API.H5Z_FLAG_REVERSE != 0 - # Decompress + outbuflen = nbytes * 3 + 1 + outbuf = Libc.malloc(outbuflen) + if outbuf == C_NULL + error("H5Zbzip2: memory allocation failed for bzip2 decompression.") + end - outbuflen = nbytes * 3 + 1 - outbuf = Libc.malloc(outbuflen) - if outbuf == C_NULL - error("H5Zbzip2: memory allocation failed for bzip2 decompression.") - end + stream = CodecBzip2.BZStream() + # Just use default malloc and free + stream.bzalloc = C_NULL + stream.bzfree = C_NULL + # BZ2_bzDecompressInit + ret = CodecBzip2.decompress_init!(stream, 0, false) + if ret != CodecBzip2.BZ_OK + errror("H5Zbzip2: bzip2 decompress start failed with error $ret.") + end - stream = CodecBzip2.BZStream() - # Just use default malloc and free - stream.bzalloc = C_NULL - stream.bzfree = C_NULL - # BZ2_bzDecompressInit - ret = CodecBzip2.decompress_init!(stream, 0, false) - if ret != CodecBzip2.BZ_OK - errror("H5Zbzip2: bzip2 decompress start failed with error $ret.") - end + stream.next_out = outbuf + stream.avail_out = outbuflen + stream.next_in = unsafe_load(buf) + stream.avail_in = nbytes - stream.next_out = outbuf - stream.avail_out = outbuflen - stream.next_in = unsafe_load(buf) - stream.avail_in = nbytes + cont = true - cont = true + while cont + # BZ2_bzDecompress + ret = CodecBzip2.decompress!(stream) + if ret < 0 + error("H5Zbzip2: bzip2 decompression failed with error $ret.") + end + cont = ret != CodecBzip2.BZ_STREAM_END + if cont && stream.avail_out == 0 + # Grow the output buffer + newbuflen = outbuflen * 2 + newbuf = Libc.realloc(outbuf, newbuflen) + if newbuf == C_NULL + error("H5Zbzip2: memory allocation failed for bzip2 decompression.") + end + stream.next_out = newbuf + outbuflen + stream.avail_out = outbuflen + outbuf = newbuf + outbuflen = newbuflen + end + end - while cont - # BZ2_bzDecompress - ret = CodecBzip2.decompress!(stream) - if ret < 0 - error("H5Zbzip2: bzip2 decompression failed with error $ret.") + outdatalen = stream.total_out_lo32 + # BZ2_bzDecompressEnd + ret = CodecBzip2.decompress_end!(stream) + if ret != CodecBzip2.BZ_OK + error("H5Zbzip2: bzip2 compression end failed with error $ret.") end - cont = ret != CodecBzip2.BZ_STREAM_END - if cont && stream.avail_out == 0 - # Grow the output buffer - newbuflen = outbuflen * 2 - newbuf = Libc.realloc(outbuf, newbuflen) - if newbuf == C_NULL - error("H5Zbzip2: memory allocation failed for bzip2 decompression.") + else + # Compress data + + # Maybe not the same size as outdatalen + odatalen = Cuint(0) + blockSize100k = 9 + + # Get compression blocksize if present + if cd_nelmts > 0 + blockSize100k = unsafe_load(cd_values) + if blockSize100k < 1 || blockSize100k > 9 + error("H5Zbzip2: Invalid compression blocksize: $blockSize100k") end - stream.next_out = newbuf + outbuflen - stream.avail_out = outbuflen - outbuf = newbuf - outbuflen = newbuflen end - end - outdatalen = stream.total_out_lo32 - # BZ2_bzDecompressEnd - ret = CodecBzip2.decompress_end!(stream) - if ret != CodecBzip2.BZ_OK - error("H5Zbzip2: bzip2 compression end failed with error $ret.") - end - else - # Compress data - - # Maybe not the same size as outdatalen - odatalen = Cuint(0) - blockSize100k = 9 - - # Get compression blocksize if present - if cd_nelmts > 0 - blockSize100k = unsafe_load(cd_values) - if blockSize100k < 1 || blockSize100k > 9 - error("H5Zbzip2: Invalid compression blocksize: $blockSize100k") + # Prepare the output buffer + outbuflen = nbytes + nbytes ÷ 100 + 600 # worse case (bzip2 docs) + outbuf = Libc.malloc(outbuflen) + @debug "Allocated" outbuflen outbuf + if outbuf == C_NULL + error("H5Zbzip2: Memory allocation failed for bzip2 compression") end - end - # Prepare the output buffer - outbuflen = nbytes + nbytes ÷ 100 + 600 # worse case (bzip2 docs) - outbuf = Libc.malloc(outbuflen) - @debug "Allocated" outbuflen outbuf - if outbuf == C_NULL - error("H5Zbzip2: Memory allocation failed for bzip2 compression") - end - - # Compress data - odatalen = outbuflen - r_odatalen = Ref{Cuint}(odatalen) - ret = BZ2_bzBuffToBuffCompress(outbuf, r_odatalen, unsafe_load(buf), nbytes, - blockSize100k, 0, 0) - outdatalen = r_odatalen[] - if ret != CodecBzip2.BZ_OK - error("H5Zbzip2: bzip2 compression failed with error $ret.") - end - end # if flags & API.H5Z_FLAG_REVERSE != 0 - Libc.free(unsafe_load(buf)) - unsafe_store!(buf, outbuf) - unsafe_store!(buf_size, outbuflen) + # Compress data + odatalen = outbuflen + r_odatalen = Ref{Cuint}(odatalen) + ret = BZ2_bzBuffToBuffCompress( + outbuf, r_odatalen, unsafe_load(buf), nbytes, blockSize100k, 0, 0 + ) + outdatalen = r_odatalen[] + if ret != CodecBzip2.BZ_OK + error("H5Zbzip2: bzip2 compression failed with error $ret.") + end + end # if flags & API.H5Z_FLAG_REVERSE != 0 + Libc.free(unsafe_load(buf)) + unsafe_store!(buf, outbuf) + unsafe_store!(buf_size, outbuflen) catch err # "In the case of failure, the return value is 0 (zero) and all pointer arguments are left unchanged." @@ -136,7 +141,9 @@ function H5Z_filter_bzip2(flags::Cuint, cd_nelmts::Csize_t, end # function H5Z_filter_bzip2 # Need stdcall for 32-bit Windows? -function BZ2_bzBuffToBuffCompress(dest, destLen, source, sourceLen, blockSize100k, verbosity, workFactor) +function BZ2_bzBuffToBuffCompress( + dest, destLen, source, sourceLen, blockSize100k, verbosity, workFactor +) @static if CodecBzip2.WIN32 return ccall( ("BZ2_bzBuffToBuffCompress@28", libbzip2), @@ -215,9 +222,11 @@ Bzip2Filter() = Bzip2Filter(9) filterid(::Type{Bzip2Filter}) = H5Z_FILTER_BZIP2 filtername(::Type{Bzip2Filter}) = bzip2_name filter_func(::Type{Bzip2Filter}) = H5Z_filter_bzip2 -filter_cfunc(::Type{Bzip2Filter}) = @cfunction(H5Z_filter_bzip2, Csize_t, - (Cuint, Csize_t, Ptr{Cuint}, Csize_t, - Ptr{Csize_t}, Ptr{Ptr{Cvoid}})) +filter_cfunc(::Type{Bzip2Filter}) = @cfunction( + H5Z_filter_bzip2, + Csize_t, + (Cuint, Csize_t, Ptr{Cuint}, Csize_t, Ptr{Csize_t}, Ptr{Ptr{Cvoid}}) +) function __init__() register_filter(Bzip2Filter) diff --git a/filters/H5Zlz4/src/H5Zlz4.jl b/filters/H5Zlz4/src/H5Zlz4.jl index c2d61dd8e..39de93945 100644 --- a/filters/H5Zlz4/src/H5Zlz4.jl +++ b/filters/H5Zlz4/src/H5Zlz4.jl @@ -13,162 +13,178 @@ module H5Zlz4 using CodecLz4 using HDF5.API -import HDF5.Filters: Filter, filterid, register_filter, filtername, filter_func, filter_cfunc - +import HDF5.Filters: + Filter, filterid, register_filter, filtername, filter_func, filter_cfunc export H5Z_FILTER_LZ4, H5Z_filter_lz4, Lz4Filter const H5Z_FILTER_LZ4 = API.H5Z_filter_t(32004) -const DEFAULT_BLOCK_SIZE = 1 << 30; +const DEFAULT_BLOCK_SIZE = 1 << 30 const lz4_name = "HDF5 lz4 filter; see http://www.hdfgroup.org/services/contributions.html" const LZ4_AGGRESSION = Ref(1) - - # flags H5Z_FLAG_REVERSE or H5Z_FLAG_OPTIONAL # cd_nelmts number of elements in cd_values (0 or 1) # cd_values the first optional element must be the blockSize # nbytes - number of valid bytes of data # buf_size - total size of buffer # buf - pointer to pointer of data -function H5Z_filter_lz4(flags::Cuint, cd_nelmts::Csize_t, - cd_values::Ptr{Cuint}, nbytes::Csize_t, - buf_size::Ptr{Csize_t}, buf::Ptr{Ptr{Cvoid}})::Csize_t - +function H5Z_filter_lz4( + flags::Cuint, + cd_nelmts::Csize_t, + cd_values::Ptr{Cuint}, + nbytes::Csize_t, + buf_size::Ptr{Csize_t}, + buf::Ptr{Ptr{Cvoid}} +)::Csize_t outBuf = C_NULL ret_value = Csize_t(0) try - - if (flags & API.H5Z_FLAG_REVERSE) != 0 # reverse filter, decompressing - #i32Buf = Ref{UInt32}() - blockSize = UInt32(0) - roBuf = Ref{UInt8}() - rpos = Ptr{UInt8}(unsafe_load(buf)) - #i64Buf = Ptr{UInt64}(rpos) - # Load the first 8 bytes from buffer as a big endian UInt64 - # This is the original size of the buffer - origSize = ntoh(unsafe_load(Ptr{UInt64}(rpos))) - rpos += 8 # advance the pointer - - # Next read the next four bytes from the buffer as a big endian UInt32 - # This is the blocksize - #i32Buf[] = rpos - blockSize = ntoh(unsafe_load(Ptr{UInt32}(rpos))) - rpos += 4 - if blockSize > origSize - blockSize = origSize - end - - # malloc a byte buffer of origSize - # outBuf = Vector{UInt8}(undef, origSize) - @debug "OrigSize" origSize - outBuf = Libc.malloc(origSize) - # Julia should throw an error if it cannot allocate this - roBuf = Ptr{UInt8}(outBuf) - decompSize = 0 - # Start with the first blockSize - while decompSize < origSize - # compressedBlockSize = UInt32(0) - if origSize - decompSize < blockSize # the last block can be smaller than block size - blockSize = origSize - decompSize - end - + if (flags & API.H5Z_FLAG_REVERSE) != 0 # reverse filter, decompressing + #i32Buf = Ref{UInt32}() + blockSize = UInt32(0) + roBuf = Ref{UInt8}() + rpos = Ptr{UInt8}(unsafe_load(buf)) + #i64Buf = Ptr{UInt64}(rpos) + # Load the first 8 bytes from buffer as a big endian UInt64 + # This is the original size of the buffer + origSize = ntoh(unsafe_load(Ptr{UInt64}(rpos))) + rpos += 8 # advance the pointer + + # Next read the next four bytes from the buffer as a big endian UInt32 + # This is the blocksize #i32Buf[] = rpos - compressedBlockSize = ntoh(unsafe_load(Ptr{UInt32}(rpos))) + blockSize = ntoh(unsafe_load(Ptr{UInt32}(rpos))) rpos += 4 - - if compressedBlockSize == blockSize - # There was no compression - # memcpy(roBuf, rpos, blockSize) - unsafe_copyto!(roBuf, rpos, blockSize) - decompressedBytes = blockSize - else - # do the compression - # LZ4_decompress_fast, version number 10300 ? - @debug "decompress_safe" rpos roBuf compressedBlockSize (origSize - decompSize) - decompressedBytes = CodecLz4.LZ4_decompress_safe(rpos, roBuf, compressedBlockSize, origSize -decompSize) - @debug "decompressedBytes" decompressedBytes + if blockSize > origSize + blockSize = origSize end - rpos += compressedBlockSize - roBuf += blockSize - decompSize += decompressedBytes - end - Libc.free(unsafe_load(buf)) - unsafe_store!(buf, outBuf) - outBuf = C_NULL - ret_value = Csize_t(origSize) - else - # forward filter - # compressing - #i64Buf = Ref{UInt64}() - #i32Buf = Ref{UInt32}() - - if nbytes > typemax(Int32) - error("Can only compress chunks up to 2GB") - end - blockSize = unsafe_load(cd_values) - if cd_nelmts > 0 && blockSize > 0 + # malloc a byte buffer of origSize + # outBuf = Vector{UInt8}(undef, origSize) + @debug "OrigSize" origSize + outBuf = Libc.malloc(origSize) + # Julia should throw an error if it cannot allocate this + roBuf = Ptr{UInt8}(outBuf) + decompSize = 0 + # Start with the first blockSize + while decompSize < origSize + # compressedBlockSize = UInt32(0) + if origSize - decompSize < blockSize # the last block can be smaller than block size + blockSize = origSize - decompSize + end + + #i32Buf[] = rpos + compressedBlockSize = ntoh(unsafe_load(Ptr{UInt32}(rpos))) + rpos += 4 + + if compressedBlockSize == blockSize + # There was no compression + # memcpy(roBuf, rpos, blockSize) + unsafe_copyto!(roBuf, rpos, blockSize) + decompressedBytes = blockSize + else + # do the compression + # LZ4_decompress_fast, version number 10300 ? + @debug "decompress_safe" rpos roBuf compressedBlockSize ( + origSize - decompSize + ) + decompressedBytes = CodecLz4.LZ4_decompress_safe( + rpos, roBuf, compressedBlockSize, origSize - decompSize + ) + @debug "decompressedBytes" decompressedBytes + end + + rpos += compressedBlockSize + roBuf += blockSize + decompSize += decompressedBytes + end + Libc.free(unsafe_load(buf)) + unsafe_store!(buf, outBuf) + outBuf = C_NULL + ret_value = Csize_t(origSize) else - blockSize = DEFAULT_BLOCK_SIZE - end - if blockSize > nbytes - blockSize = nbytes - end - nBlocks = (nbytes-1) ÷ blockSize + 1 - maxDestSize = nBlocks * CodecLz4.LZ4_compressBound(blockSize) + 4 + 8 + nBlocks*4 - outBuf = Libc.malloc(maxDestSize) - - rpos = Ptr{UInt8}(unsafe_load(buf)) - roBuf = Ptr{UInt8}(outBuf) - - # Header - unsafe_store!(Ptr{UInt64}(roBuf), hton(UInt64(nbytes))) - roBuf += 8 + # forward filter + # compressing + #i64Buf = Ref{UInt64}() + #i32Buf = Ref{UInt32}() - unsafe_store!(Ptr{UInt32}(roBuf), hton(UInt32(blockSize))) - roBuf += 4 - - outSize = 12 - - for block = 0:nBlocks-1 - # compBlockSize::UInt32 - origWritten = Csize_t(block*blockSize) - if nbytes - origWritten < blockSize # the last block may be < blockSize - blockSize = nbytes - origWritten + if nbytes > typemax(Int32) + error("Can only compress chunks up to 2GB") end - - # aggression = 1 is the same LZ4_compress_default - @debug "LZ4_compress_fast args" rpos outBuf roBuf roBuf+4 blockSize nBlocks CodecLz4.LZ4_compressBound(blockSize) - compBlockSize = UInt32(CodecLz4.LZ4_compress_fast(rpos, roBuf+4, blockSize, CodecLz4.LZ4_compressBound(blockSize), LZ4_AGGRESSION[])) - @debug "Compressed block size" compBlockSize - - if compBlockSize == 0 - error("Could not compress block $block") + blockSize = unsafe_load(cd_values) + if cd_nelmts > 0 && blockSize > 0 + else + blockSize = DEFAULT_BLOCK_SIZE end - - if compBlockSize >= blockSize # compression did not save any space, do a memcpy instead - compBlockSize = blockSize - unsafe_copyto!(roBuf+4, rpos, blockSize) + if blockSize > nbytes + blockSize = nbytes end + nBlocks = (nbytes - 1) ÷ blockSize + 1 + maxDestSize = + nBlocks * CodecLz4.LZ4_compressBound(blockSize) + 4 + 8 + nBlocks * 4 + outBuf = Libc.malloc(maxDestSize) + + rpos = Ptr{UInt8}(unsafe_load(buf)) + roBuf = Ptr{UInt8}(outBuf) - unsafe_store!(Ptr{UInt32}(roBuf), hton(UInt32(compBlockSize))) # write blocksize + # Header + unsafe_store!(Ptr{UInt64}(roBuf), hton(UInt64(nbytes))) + roBuf += 8 + + unsafe_store!(Ptr{UInt32}(roBuf), hton(UInt32(blockSize))) roBuf += 4 - rpos += blockSize - roBuf += compBlockSize - outSize += compBlockSize + 4 - end + outSize = 12 + + for block in 0:(nBlocks - 1) + # compBlockSize::UInt32 + origWritten = Csize_t(block * blockSize) + if nbytes - origWritten < blockSize # the last block may be < blockSize + blockSize = nbytes - origWritten + end + + # aggression = 1 is the same LZ4_compress_default + @debug "LZ4_compress_fast args" rpos outBuf roBuf roBuf + 4 blockSize nBlocks CodecLz4.LZ4_compressBound( + blockSize + ) + compBlockSize = UInt32( + CodecLz4.LZ4_compress_fast( + rpos, + roBuf + 4, + blockSize, + CodecLz4.LZ4_compressBound(blockSize), + LZ4_AGGRESSION[] + ) + ) + @debug "Compressed block size" compBlockSize + + if compBlockSize == 0 + error("Could not compress block $block") + end + + if compBlockSize >= blockSize # compression did not save any space, do a memcpy instead + compBlockSize = blockSize + unsafe_copyto!(roBuf + 4, rpos, blockSize) + end + + unsafe_store!(Ptr{UInt32}(roBuf), hton(UInt32(compBlockSize))) # write blocksize + roBuf += 4 + + rpos += blockSize + roBuf += compBlockSize + outSize += compBlockSize + 4 + end - Libc.free(unsafe_load(buf)) - unsafe_store!(buf, outBuf) - unsafe_store!(buf_size, outSize) - outBuf = C_NULL - ret_value = Csize_t(outSize) - end # (flags & API.H5Z_FLAG_REVERSE) != 0 + Libc.free(unsafe_load(buf)) + unsafe_store!(buf, outBuf) + unsafe_store!(buf_size, outSize) + outBuf = C_NULL + ret_value = Csize_t(outSize) + end # (flags & API.H5Z_FLAG_REVERSE) != 0 catch err # "In the case of failure, the return value is 0 (zero) and all pointer arguments are left unchanged." @@ -202,9 +218,11 @@ Lz4Filter() = Lz4Filter(DEFAULT_BLOCK_SIZE) filterid(::Type{Lz4Filter}) = H5Z_FILTER_LZ4 filtername(::Type{Lz4Filter}) = lz4_name filter_func(::Type{Lz4Filter}) = H5Z_filter_lz4 -filter_cfunc(::Type{Lz4Filter}) = @cfunction(H5Z_filter_lz4, Csize_t, - (Cuint, Csize_t, Ptr{Cuint}, Csize_t, - Ptr{Csize_t}, Ptr{Ptr{Cvoid}})) +filter_cfunc(::Type{Lz4Filter}) = @cfunction( + H5Z_filter_lz4, + Csize_t, + (Cuint, Csize_t, Ptr{Cuint}, Csize_t, Ptr{Csize_t}, Ptr{Ptr{Cvoid}}) +) function __init__() register_filter(Lz4Filter) diff --git a/filters/H5Zzstd/src/H5Zzstd.jl b/filters/H5Zzstd/src/H5Zzstd.jl index 3ea62661c..14bdc3fc2 100644 --- a/filters/H5Zzstd/src/H5Zzstd.jl +++ b/filters/H5Zzstd/src/H5Zzstd.jl @@ -10,79 +10,85 @@ module H5Zzstd using CodecZstd import CodecZstd.LibZstd using HDF5.API -import HDF5.Filters: Filter, filterid, register_filter, filterid, filtername, filter_func, filter_cfunc - +import HDF5.Filters: + Filter, filterid, register_filter, filterid, filtername, filter_func, filter_cfunc const H5Z_FILTER_ZSTD = API.H5Z_filter_t(32015) const zstd_name = "Zstandard compression: http://www.zstd.net" -export H5Z_filter_zstd, H5Z_FILTER_ZSTD, ZstdFilter +export H5Z_filter_zstd, H5Z_FILTER_ZSTD, ZstdFilter # cd_values First optional value is the compressor aggression # Default is CodecZstd.LibZstd.ZSTD_CLEVEL_DEFAULT -function H5Z_filter_zstd(flags::Cuint, cd_nelmts::Csize_t, - cd_values::Ptr{Cuint}, nbytes::Csize_t, - buf_size::Ptr{Csize_t}, buf::Ptr{Ptr{Cvoid}})::Csize_t +function H5Z_filter_zstd( + flags::Cuint, + cd_nelmts::Csize_t, + cd_values::Ptr{Cuint}, + nbytes::Csize_t, + buf_size::Ptr{Csize_t}, + buf::Ptr{Ptr{Cvoid}} +)::Csize_t inbuf = unsafe_load(buf) outbuf = C_NULL origSize = nbytes ret_value = Csize_t(0) try - - if flags & API.H5Z_FLAG_REVERSE != 0 - #decompresssion - - decompSize = LibZstd.ZSTD_getDecompressedSize(inbuf, origSize) - outbuf = Libc.malloc(decompSize) - if outbuf == C_NULL - error("zstd_h5plugin: Cannot allocate memory for outbuf during decompression.") - end - decompSize = LibZstd.ZSTD_decompress(outbuf, decompSize, inbuf, origSize) - Libc.free(inbuf) - unsafe_store!(buf, outbuf) - outbuf = C_NULL - ret_value = Csize_t(decompSize) - else - # compression - - if cd_nelmts > 0 - aggression = Cint(unsafe_load(cd_values)) + if flags & API.H5Z_FLAG_REVERSE != 0 + #decompresssion + + decompSize = LibZstd.ZSTD_getDecompressedSize(inbuf, origSize) + outbuf = Libc.malloc(decompSize) + if outbuf == C_NULL + error( + "zstd_h5plugin: Cannot allocate memory for outbuf during decompression." + ) + end + decompSize = LibZstd.ZSTD_decompress(outbuf, decompSize, inbuf, origSize) + Libc.free(inbuf) + unsafe_store!(buf, outbuf) + outbuf = C_NULL + ret_value = Csize_t(decompSize) else - aggression = CodecZstd.LibZstd.ZSTD_CLEVEL_DEFAULT - end - - if aggression < 1 - aggression = 1 # ZSTD_minCLevel() - elseif aggression > LibZstd.ZSTD_maxCLevel() - aggression = LibZstd.ZSTD_maxCLevel() - end - - compSize = LibZstd.ZSTD_compressBound(origSize) - outbuf = Libc.malloc(compSize) - if outbuf == C_NULL - error("zstd_h5plugin: Cannot allocate memory for outbuf during compression.") + # compression + + if cd_nelmts > 0 + aggression = Cint(unsafe_load(cd_values)) + else + aggression = CodecZstd.LibZstd.ZSTD_CLEVEL_DEFAULT + end + + if aggression < 1 + aggression = 1 # ZSTD_minCLevel() + elseif aggression > LibZstd.ZSTD_maxCLevel() + aggression = LibZstd.ZSTD_maxCLevel() + end + + compSize = LibZstd.ZSTD_compressBound(origSize) + outbuf = Libc.malloc(compSize) + if outbuf == C_NULL + error( + "zstd_h5plugin: Cannot allocate memory for outbuf during compression." + ) + end + + compSize = LibZstd.ZSTD_compress(outbuf, compSize, inbuf, origSize, aggression) + + Libc.free(unsafe_load(buf)) + unsafe_store!(buf, outbuf) + unsafe_store!(buf_size, compSize) + outbuf = C_NULL + ret_value = compSize end - - compSize = LibZstd.ZSTD_compress(outbuf, compSize, inbuf, origSize, aggression) - - Libc.free(unsafe_load(buf)) - unsafe_store!(buf, outbuf) - unsafe_store!(buf_size, compSize) - outbuf = C_NULL - ret_value = compSize - end catch e # "In the case of failure, the return value is 0 (zero) and all pointer arguments are left unchanged." ret_value = Csize_t(0) @error "H5Zzstd Non-Fatal ERROR: " err display(stacktrace(catch_backtrace())) finally - if outbuf != C_NULL free(outbuf) end - end # try catch finally return Csize_t(ret_value) end @@ -102,16 +108,18 @@ struct ZstdFilter <: Filter clevel::Cuint end ZstdFilter() = ZstdFilter(CodecZstd.LibZstd.ZSTD_CLEVEL_DEFAULT) - + filterid(::Type{ZstdFilter}) = H5Z_FILTER_ZSTD filtername(::Type{ZstdFilter}) = zstd_name filter_func(::Type{ZstdFilter}) = H5Z_filter_zstd -filter_cfunc(::Type{ZstdFilter}) = @cfunction(H5Z_filter_zstd, Csize_t, - (Cuint, Csize_t, Ptr{Cuint}, Csize_t, - Ptr{Csize_t}, Ptr{Ptr{Cvoid}})) +filter_cfunc(::Type{ZstdFilter}) = @cfunction( + H5Z_filter_zstd, + Csize_t, + (Cuint, Csize_t, Ptr{Cuint}, Csize_t, Ptr{Csize_t}, Ptr{Ptr{Cvoid}}) +) function __init__() register_filter(ZstdFilter) end -end # module H5Zzstd \ No newline at end of file +end # module H5Zzstd diff --git a/gen/bind_generator.jl b/gen/bind_generator.jl index 1c11adf88..a8b958874 100644 --- a/gen/bind_generator.jl +++ b/gen/bind_generator.jl @@ -10,31 +10,35 @@ bind_exceptions[:h5p_get_fapl_mpio64] = :H5Pget_fapl_mpio bind_exceptions[:h5p_set_fapl_mpio32] = :H5Pset_fapl_mpio bind_exceptions[:h5p_set_fapl_mpio64] = :H5Pset_fapl_mpio # have numbers at the end -bind_exceptions[:h5p_set_fletcher32] = :H5Pset_fletcher32 -bind_exceptions[:h5p_set_fapl_sec2] = :H5Pset_fapl_sec2 +bind_exceptions[:h5p_set_fletcher32] = :H5Pset_fletcher32 +bind_exceptions[:h5p_set_fapl_sec2] = :H5Pset_fapl_sec2 # underscore separator not removed -bind_exceptions[:h5fd_core_init] = :H5FD_core_init -bind_exceptions[:h5fd_family_init] = :H5FD_family_init -bind_exceptions[:h5fd_log_init] = :H5FD_log_init -bind_exceptions[:h5fd_mpio_init] = :H5FD_mpio_init -bind_exceptions[:h5fd_multi_init] = :H5FD_multi_init -bind_exceptions[:h5fd_sec2_init] = :H5FD_sec2_init -bind_exceptions[:h5fd_stdio_init] = :H5FD_stdio_init +bind_exceptions[:h5fd_core_init] = :H5FD_core_init +bind_exceptions[:h5fd_family_init] = :H5FD_family_init +bind_exceptions[:h5fd_log_init] = :H5FD_log_init +bind_exceptions[:h5fd_mpio_init] = :H5FD_mpio_init +bind_exceptions[:h5fd_multi_init] = :H5FD_multi_init +bind_exceptions[:h5fd_sec2_init] = :H5FD_sec2_init +bind_exceptions[:h5fd_stdio_init] = :H5FD_stdio_init # An expression which is injected at the beginning of the API defitions to aid in doing # (pre)compile-time conditional compilation based on the libhdf5 version. _libhdf5_build_ver_expr = quote _libhdf5_build_ver = let - majnum, minnum, relnum = Ref{Cuint}(), Ref{Cuint}(), Ref{Cuint}() - r = ccall((:H5get_libversion, libhdf5), herr_t, - (Ref{Cuint}, Ref{Cuint}, Ref{Cuint}), - majnum, minnum, relnum) - r < 0 && error("Error getting HDF5 library version") - VersionNumber(majnum[], minnum[], relnum[]) - end + majnum, minnum, relnum = Ref{Cuint}(), Ref{Cuint}(), Ref{Cuint}() + r = ccall( + (:H5get_libversion, libhdf5), + herr_t, + (Ref{Cuint}, Ref{Cuint}, Ref{Cuint}), + majnum, + minnum, + relnum + ) + r < 0 && error("Error getting HDF5 library version") + VersionNumber(majnum[], minnum[], relnum[]) + end end - # We'll also use this processing pass to automatically generate documentation that simply # lists all of the bound API functions. const bound_api = Dict{String,Vector{String}}() @@ -130,7 +134,8 @@ function _bind(__module__, __source__, sig::Expr, err::Union{String,Expr,Nothing # Pull apart return-type and rest of function declaration rettype = sig.args[2]::Union{Symbol,Expr} funcsig = sig.args[1] - isexpr(funcsig, :call) || error("expected function-like expression, found `", funcsig, "`") + isexpr(funcsig, :call) || + error("expected function-like expression, found `", funcsig, "`") funcsig = funcsig::Expr # Extract function name and argument list @@ -143,13 +148,15 @@ function _bind(__module__, __source__, sig::Expr, err::Union{String,Expr,Nothing for ii in 1:length(funcargs) argex = funcargs[ii] if !isexpr(argex, :(::)) || !(argex.args[1] isa Symbol) - error("expected `name::type` expression in argument ", ii, ", got ", funcargs[ii]) + error( + "expected `name::type` expression in argument ", ii, ", got ", funcargs[ii] + ) end push!(args, argex.args[1]) push!(argt, argex.args[2]) end - prefix, rest = split(string(jlfuncname), "_", limit = 2) + prefix, rest = split(string(jlfuncname), "_"; limit=2) # Translate the C function name to a local equivalent if haskey(bind_exceptions, jlfuncname) cfuncname = bind_exceptions[jlfuncname] @@ -158,7 +165,7 @@ function _bind(__module__, __source__, sig::Expr, err::Union{String,Expr,Nothing cfuncname = Symbol(uppercase(prefix), rest) # Remove the version number if present (excluding match to literal "hdf5" suffix) if occursin(r"\d(?= n + 2 && - ex.args[1] == sym + ismacro(ex, sym, n=0) = + isexpr(ex, :macrocall) && length(ex.args) >= n + 2 && ex.args[1] == sym for funcblock in exprs.args if ismacro(funcblock, Symbol("@doc"), 2) # Pretty print the doc macro as just a juxtaposed doc string and function @@ -46,8 +51,8 @@ open(joinpath(@__DIR__, "..", "src", "api", "functions.jl"), "w") do fid # rely on Julia's parsing behavior. print(fid, triplequote(funcblock.args[3]), funcblock.args[4], "\n\n") elseif ismacro(funcblock, Symbol("@static"), 1) && - isexpr(funcblock.args[3], :if, 2) && - ismacro(funcblock.args[3].args[2], Symbol("@doc"), 2) + isexpr(funcblock.args[3], :if, 2) && + ismacro(funcblock.args[3].args[2], Symbol("@doc"), 2) # Within a @static block, we have to keep the @doc prefix, but we can still # switch to triple-quoting and there's special parsing to allow the function # definition to be on the next line. @@ -61,8 +66,11 @@ open(joinpath(@__DIR__, "..", "src", "api", "functions.jl"), "w") do fid buf = replace(buf, r"^\s{4}"m => s"") # deindent buf = replace(buf, r"^(\s{4})\s{4}"m => s"\1") # deindent # Now format the doc string and replace (note need to indent `function`) - buf = replace(buf, r"^\s+@doc \"SENTINEL_DOC\" "m => - triplequote(docstr, " "^4, "@doc ") * " "^4) + buf = replace( + buf, + r"^\s+@doc \"SENTINEL_DOC\" "m => + triplequote(docstr, " "^4, "@doc ") * " "^4 + ) print(fid, buf, "\n\n") else # passthrough @@ -83,7 +91,7 @@ for (mod, desc, urltail) in ( ("H5I", "Identifier Interface", "Identifiers"), ("H5L", "Link Interface", "Links"), ("H5O", "Object Interface", "Objects"), - ("H5PL","Plugin Interface", "Plugins"), + ("H5PL", "Plugin Interface", "Plugins"), ("H5P", "Property Interface", "Property+Lists"), ("H5R", "Reference Interface", "References"), ("H5S", "Dataspace Interface", "Dataspaces"), @@ -93,7 +101,7 @@ for (mod, desc, urltail) in ( ("H5DS", "Dimension Scale Interface", "Dimension+Scales"), ("H5LT", "Lite Interface", "Lite"), ("H5TB", "Table Interface", "Tables"), - ) +) global apidocs funclist = sort!(bound_api[mod]) index = join(["- [`$f`](@ref $f)" for f in funclist], "\n") @@ -111,32 +119,33 @@ for (mod, desc, urltail) in ( end open(joinpath(@__DIR__, "..", "docs", "src", "api_bindings.md"), "w") do fid - write(fid, -""" -```@raw html - -``` -```@meta -CurrentModule = HDF5.API -``` - -# Low-level library bindings - -At the lowest level, `HDF5.jl` operates by calling the public API of the HDF5 shared -library through a set of `ccall` wrapper functions. -This page documents the function names and nominal C argument types of the API which -have bindings in this package. -Note that in many cases, high-level data types are valid arguments through automatic -`ccall` conversions. -For instance, `HDF5.Datatype` objects will be automatically converted to their `hid_t` ID -by Julia's `cconvert`+`unsafe_convert` `ccall` rules. - -There are additional helper wrappers (often for out-argument functions) which are not -documented here. - -$apidocs -""" + write( + fid, + """ + ```@raw html + + ``` + ```@meta + CurrentModule = HDF5.API + ``` + + # Low-level library bindings + + At the lowest level, `HDF5.jl` operates by calling the public API of the HDF5 shared + library through a set of `ccall` wrapper functions. + This page documents the function names and nominal C argument types of the API which + have bindings in this package. + Note that in many cases, high-level data types are valid arguments through automatic + `ccall` conversions. + For instance, `HDF5.Datatype` objects will be automatically converted to their `hid_t` ID + by Julia's `cconvert`+`unsafe_convert` `ccall` rules. + + There are additional helper wrappers (often for out-argument functions) which are not + documented here. + + $apidocs + """ ) end diff --git a/src/HDF5.jl b/src/HDF5.jl index a7d76cca8..6501d0436 100644 --- a/src/HDF5.jl +++ b/src/HDF5.jl @@ -9,18 +9,42 @@ import Mmap ### PUBLIC API ### -export -@read, @write, -h5open, h5read, h5write, h5rewrite, h5writeattr, h5readattr, -create_attribute, open_attribute, read_attribute, write_attribute, delete_attribute, rename_attribute, attributes, attrs, -create_dataset, open_dataset, read_dataset, write_dataset, -create_group, open_group, -copy_object, open_object, delete_object, move_link, -create_datatype, commit_datatype, open_datatype, -create_property, -group_info, object_info, -dataspace, datatype, -Filters, Drivers +export @read, + @write, + h5open, + h5read, + h5write, + h5rewrite, + h5writeattr, + h5readattr, + create_attribute, + open_attribute, + read_attribute, + write_attribute, + delete_attribute, + rename_attribute, + attributes, + attrs, + create_dataset, + open_dataset, + read_dataset, + write_dataset, + create_group, + open_group, + copy_object, + open_object, + delete_object, + move_link, + create_datatype, + commit_datatype, + open_datatype, + create_property, + group_info, + object_info, + dataspace, + datatype, + Filters, + Drivers ### The following require module scoping ### @@ -69,7 +93,7 @@ end function h5read(filename, name::AbstractString; pv...) local dat - fapl = FileAccessProperties(; fclose_degree = :strong) + fapl = FileAccessProperties(; fclose_degree=:strong) pv = setproperties!(fapl; pv...) file = h5open(filename, "r", fapl) try @@ -84,7 +108,7 @@ end function h5read(filename, name_type_pair::Pair{<:AbstractString,DataType}; pv...) local dat - fapl = FileAccessProperties(; fclose_degree = :strong) + fapl = FileAccessProperties(; fclose_degree=:strong) pv = setproperties!(fapl; pv...) file = h5open(filename, "r", fapl) try @@ -97,9 +121,14 @@ function h5read(filename, name_type_pair::Pair{<:AbstractString,DataType}; pv... dat end -function h5read(filename, name::AbstractString, indices::Tuple{Vararg{Union{AbstractRange{Int},Int,Colon}}}; pv...) +function h5read( + filename, + name::AbstractString, + indices::Tuple{Vararg{Union{AbstractRange{Int},Int,Colon}}}; + pv... +) local dat - fapl = FileAccessProperties(; fclose_degree = :strong) + fapl = FileAccessProperties(; fclose_degree=:strong) pv = setproperties!(fapl; pv...) file = h5open(filename, "r", fapl) try @@ -112,8 +141,6 @@ function h5read(filename, name::AbstractString, indices::Tuple{Vararg{Union{Abst dat end - - function Base.getindex(parent::Union{File,Group}, path::AbstractString; pv...) haskey(parent, path) || throw(KeyError(path)) # Faster than below if defaults are OK @@ -136,7 +163,9 @@ end # Assign syntax: obj[path] = value # Create a dataset with properties: obj[path, prop = val, ...] = val -function Base.setindex!(parent::Union{File,Group}, val, path::Union{AbstractString,Nothing}; pv...) +function Base.setindex!( + parent::Union{File,Group}, val, path::Union{AbstractString,Nothing}; pv... +) need_chunks = any(k in keys(chunked_props) for k in keys(pv)) have_chunks = any(k == :chunk for k in keys(pv)) @@ -148,7 +177,7 @@ function Base.setindex!(parent::Union{File,Group}, val, path::Union{AbstractStri pv = pairs(Base.structdiff((; pv...), chunked_props)) else if need_chunks && !have_chunks - pv = pairs((; chunk = chunk, pv...)) + pv = pairs((; chunk=chunk, pv...)) end end write(parent, path, val; pv...) @@ -172,7 +201,6 @@ get_create_properties(g::Group) = GroupCreateProperties(API.h5g_get_create_p get_create_properties(f::File) = FileCreateProperties(API.h5f_get_create_plist(f)) get_create_properties(a::Attribute) = AttributeCreateProperties(API.h5a_get_create_plist(a)) - const HAS_PARALLEL = Ref(false) """ @@ -196,12 +224,16 @@ function __init__() ASCII_ATTRIBUTE_PROPERTIES.char_encoding = :ascii UTF8_ATTRIBUTE_PROPERTIES.char_encoding = :utf8 - @require FileIO="5789e2e9-d7fb-5bc7-8068-2c6fae9b9549" begin - @require OrderedCollections="bac558e1-5e72-5ebc-8fee-abe8a469f55d" include("fileio.jl") + @require FileIO = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549" begin + @require OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" include( + "fileio.jl" + ) end - @require H5Zblosc="c8ec2601-a99c-407f-b158-e79c03c2f5f7" begin - set_blosc!(p::Properties, val::Bool) = val && push!(Filters.FilterPipeline(p), H5Zblosc.BloscFilter()) - set_blosc!(p::Properties, level::Integer) = push!(Filters.FilterPipeline(p), H5Zblosc.BloscFilter(level=level)) + @require H5Zblosc = "c8ec2601-a99c-407f-b158-e79c03c2f5f7" begin + set_blosc!(p::Properties, val::Bool) = + val && push!(Filters.FilterPipeline(p), H5Zblosc.BloscFilter()) + set_blosc!(p::Properties, level::Integer) = + push!(Filters.FilterPipeline(p), H5Zblosc.BloscFilter(; level=level)) end return nothing diff --git a/src/api/api.jl b/src/api/api.jl index f868a4663..abd5f93c9 100644 --- a/src/api/api.jl +++ b/src/api/api.jl @@ -7,8 +7,10 @@ const depsfile = joinpath(@__DIR__, "..", "..", "deps", "deps.jl") if isfile(depsfile) include(depsfile) else - error("HDF5 is not properly installed. Please run Pkg.build(\"HDF5\") ", - "and restart Julia.") + error( + "HDF5 is not properly installed. Please run Pkg.build(\"HDF5\") ", + "and restart Julia." + ) end include("types.jl") diff --git a/src/api/error.jl b/src/api/error.jl index 7bfae4a0d..5f698c552 100644 --- a/src/api/error.jl +++ b/src/api/error.jl @@ -62,7 +62,13 @@ function Base.showerror(io::IO, err::H5Error) minor = h5e_get_msg(errval.min_num)[2] print(io, major, "/", minor) if errval.desc != C_NULL - printstyled(io, "\n", " "^(4 + ndigits(n_total)), unsafe_string(errval.desc), color=:light_black) + printstyled( + io, + "\n", + " "^(4 + ndigits(n_total)), + unsafe_string(errval.desc); + color=:light_black + ) end if SHORT_ERROR[] if n_total > 1 diff --git a/src/api/functions.jl b/src/api/functions.jl index f52c3fc9d..736fab890 100644 --- a/src/api/functions.jl +++ b/src/api/functions.jl @@ -5403,4 +5403,3 @@ function h5fd_stdio_init() var"#status#" < 0 && @h5error("Error initializing file driver") return var"#status#" end - diff --git a/src/api/helpers.jl b/src/api/helpers.jl index 0e4b96ba6..823ef0097 100644 --- a/src/api/helpers.jl +++ b/src/api/helpers.jl @@ -37,7 +37,7 @@ end function h5a_get_name(attr_id) len = h5a_get_name(attr_id, 0, C_NULL) buf = StringVector(len) - h5a_get_name(attr_id, len+1, buf) + h5a_get_name(attr_id, len + 1, buf) return String(buf) end @@ -52,7 +52,9 @@ end # emulating it with the less desirable form of creating closure handles directly in # `@cfunction` with `$f`. # This helper translates between the two preferred forms for each respective language. -function h5a_iterate_helper(loc_id::hid_t, attr_name::Ptr{Cchar}, ainfo::Ptr{H5A_info_t}, @nospecialize(data::Any))::herr_t +function h5a_iterate_helper( + loc_id::hid_t, attr_name::Ptr{Cchar}, ainfo::Ptr{H5A_info_t}, @nospecialize(data::Any) +)::herr_t f, err_ref = data try return herr_t(f(loc_id, attr_name, ainfo)) @@ -85,7 +87,7 @@ julia> HDF5.API.h5a_iterate(obj, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC) d end ``` """ -function h5a_iterate(@nospecialize(f), obj_id, idx_type, order, idx = 0) +function h5a_iterate(@nospecialize(f), obj_id, idx_type, order, idx=0) err_ref = Ref{Any}(nothing) idxref = Ref{hsize_t}(idx) fptr = @cfunction(h5a_iterate_helper, herr_t, (hid_t, Ptr{Cchar}, Ptr{H5A_info_t}, Any)) @@ -130,9 +132,10 @@ function h5d_get_chunk_info(dataset_id, fspace_id, index) addr = Ref{haddr_t}() size = Ref{hsize_t}() h5d_get_chunk_info(dataset_id, fspace_id, index, offset, filter_mask, addr, size) - return (offset = offset, filter_mask = filter_mask[], addr = addr[], size = size[]) + return (offset=offset, filter_mask=filter_mask[], addr=addr[], size=size[]) end -h5d_get_chunk_info(dataset_id, index; fspace_id = H5S_ALL) = h5d_get_chunk_info(dataset_id, fspace_id, index) +h5d_get_chunk_info(dataset_id, index; fspace_id=H5S_ALL) = + h5d_get_chunk_info(dataset_id, fspace_id, index) """ h5d_get_chunk_info_by_coord(dataset_id, offset) @@ -144,7 +147,7 @@ function h5d_get_chunk_info_by_coord(dataset_id, offset) addr = Ref{haddr_t}() size = Ref{hsize_t}() h5d_get_chunk_info_by_coord(dataset_id, offset, filter_mask, addr, size) - return (filter_mask = filter_mask[], addr = addr[], size = size[]) + return (filter_mask=filter_mask[], addr=addr[], size=size[]) end """ @@ -164,7 +167,7 @@ end Helper method to retrieve the number of chunks. Returns an integer of type `HDF5.API.hsize_t`. """ - function h5d_get_num_chunks(dataset_id, fspace_id = H5S_ALL) + function h5d_get_num_chunks(dataset_id, fspace_id=H5S_ALL) nchunks = Ref{hsize_t}() h5d_get_num_chunks(dataset_id, fspace_id, nchunks) return nchunks[] @@ -183,7 +186,6 @@ function h5d_get_space_status(dataset_id) return r[] end - ### ### Error Interface ### @@ -203,14 +205,15 @@ function h5e_get_msg(mesg_id) mesg_type = Ref{Cint}() mesg_len = h5e_get_msg(mesg_id, mesg_type, C_NULL, 0) buffer = StringVector(mesg_len) - h5e_get_msg(mesg_id, mesg_type, buffer, mesg_len+1) + h5e_get_msg(mesg_id, mesg_type, buffer, mesg_len + 1) resize!(buffer, mesg_len) return mesg_type[], String(buffer) end - # See explanation for h5a_iterate above. -function h5e_walk_helper(n::Cuint, err_desc::Ptr{H5E_error2_t}, @nospecialize(data::Any))::herr_t +function h5e_walk_helper( + n::Cuint, err_desc::Ptr{H5E_error2_t}, @nospecialize(data::Any) +)::herr_t f, err_ref = data try return herr_t(f(n, err_desc)) @@ -246,7 +249,7 @@ end function h5f_get_name(loc_id) len = h5f_get_name(loc_id, C_NULL, 0) buf = StringVector(len) - h5f_get_name(loc_id, buf, len+1) + h5f_get_name(loc_id, buf, len + 1) return String(buf) end @@ -289,24 +292,26 @@ function h5p_get_file_locking(fapl) use_file_locking = Ref{API.hbool_t}(0) ignore_when_disabled = Ref{API.hbool_t}(0) h5p_get_file_locking(fapl, use_file_locking, ignore_when_disabled) - return (use_file_locking = Bool(use_file_locking[]), - ignore_when_disabled = Bool(ignore_when_disabled[])) + return ( + use_file_locking = Bool(use_file_locking[]), + ignore_when_disabled = Bool(ignore_when_disabled[]) + ) end # Check to see if h5p_set_file_locking should exist const _has_h5p_set_file_locking = _has_symbol(:H5Pset_file_locking) function has_h5p_set_file_locking() - return _has_h5p_set_file_locking - #= - h5_version = h5_get_libversion() - if (h5_version >= v"1.10" && h5_version < v"1.10.7") || - (h5_version >= v"1.12" && h5_version < v"1.12.1") || - (h5_version < v"1.10") - return false - else - return true - end - =# + return _has_h5p_set_file_locking + #= + h5_version = h5_get_libversion() + if (h5_version >= v"1.10" && h5_version < v"1.10.7") || + (h5_version >= v"1.12" && h5_version < v"1.12.1") || + (h5_version < v"1.10") + return false + else + return true + end + =# end function h5p_get_file_space_strategy(plist_id) @@ -314,7 +319,7 @@ function h5p_get_file_space_strategy(plist_id) persist = Ref{hbool_t}(0) threshold = Ref{hsize_t}() h5p_get_file_space_strategy(plist_id, strategy, persist, threshold) - return (strategy = strategy[], persist = persist[], threshold = threshold[]) + return (strategy=strategy[], persist=persist[], threshold=threshold[]) end function h5p_get_file_space_page_size(plist_id) @@ -323,7 +328,9 @@ function h5p_get_file_space_page_size(plist_id) return fsp_size[] end -function h5p_set_file_space_strategy(plist_id; strategy = nothing, persist = nothing, threshold = nothing) +function h5p_set_file_space_strategy( + plist_id; strategy=nothing, persist=nothing, threshold=nothing +) current = h5p_get_file_space_strategy(plist_id) strategy = isnothing(strategy) ? current[:strategy] : strategy persist = isnothing(persist) ? current[:persist] : persist @@ -354,7 +361,7 @@ end function h5i_get_name(loc_id) len = h5i_get_name(loc_id, C_NULL, 0) buf = StringVector(len) - h5i_get_name(loc_id, buf, len+1) + h5i_get_name(loc_id, buf, len + 1) return String(buf) end @@ -376,7 +383,9 @@ function h5l_get_name_by_idx(loc_id, group_name, idx_type, order, idx, lapl_id) end # See explanation for h5a_iterate above. -function h5l_iterate_helper(group::hid_t, name::Ptr{Cchar}, info::Ptr{H5L_info_t}, @nospecialize(data::Any))::herr_t +function h5l_iterate_helper( + group::hid_t, name::Ptr{Cchar}, info::Ptr{H5L_info_t}, @nospecialize(data::Any) +)::herr_t f, err_ref = data try return herr_t(f(group, name, info)) @@ -407,12 +416,12 @@ julia> HDF5.API.h5l_iterate(hfile, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC) end ``` """ -function h5l_iterate(@nospecialize(f), group_id, idx_type, order, idx = 0) +function h5l_iterate(@nospecialize(f), group_id, idx_type, order, idx=0) err_ref = Ref{Any}(nothing) idxref = Ref{hsize_t}(idx) fptr = @cfunction(h5l_iterate_helper, herr_t, (hid_t, Ptr{Cchar}, Ptr{H5L_info_t}, Any)) try - h5l_iterate(group_id, idx_type, order, idxref, fptr, (f,err_ref)) + h5l_iterate(group_id, idx_type, order, idxref, fptr, (f, err_ref)) catch h5err jlerr = err_ref[] if !isnothing(jlerr) @@ -467,7 +476,9 @@ elseif _libhdf5_build_ver >= v"1.10.3" && _libhdf5_build_ver < v"1.12.0" end # H5Oget_info_by_idx2 - function h5o_get_info_by_idx(loc_id, group_name, idx_type, order, n, fields=H5O_INFO_ALL, lapl=H5P_DEFAULT) + function h5o_get_info_by_idx( + loc_id, group_name, idx_type, order, n, fields=H5O_INFO_ALL, lapl=H5P_DEFAULT + ) oinfo = Ref{H5O_info1_t}() h5o_get_info_by_idx(loc_id, group_name, idx_type, order, n, oinfo, fields, lapl) return oinfo[] @@ -490,25 +501,22 @@ else # _libhdf5_build_ver >= v"1.12.0" end # H5Oget_info_by_idx3 - function h5o_get_info_by_idx(loc_id, group_name, idx_type, order, n, fields=H5O_INFO_ALL, lapl=H5P_DEFAULT) + function h5o_get_info_by_idx( + loc_id, group_name, idx_type, order, n, fields=H5O_INFO_ALL, lapl=H5P_DEFAULT + ) oinfo = Ref{H5O_info2_t}() h5o_get_info_by_idx(loc_id, group_name, idx_type, order, n, oinfo, fields, lapl) return oinfo[] end - function h5o_get_native_info( - loc_id, - fields=H5O_NATIVE_INFO_ALL - ) + function h5o_get_native_info(loc_id, fields=H5O_NATIVE_INFO_ALL) oinfo = Ref{H5O_native_info_t}() h5o_get_native_info(loc_id, oinfo, fields) return oinfo[] end function h5o_get_native_info_by_idx( - loc_id, group_name, idx_type, order, n, - fields=H5O_NATIVE_INFO_ALL, - lapl=H5P_DEFAULT + loc_id, group_name, idx_type, order, n, fields=H5O_NATIVE_INFO_ALL, lapl=H5P_DEFAULT ) oinfo = Ref{H5O_native_info_t}() h5o_get_native_info_by_idx( @@ -518,15 +526,12 @@ else # _libhdf5_build_ver >= v"1.12.0" end function h5o_get_native_info_by_name( - loc_id, name, - fields=H5O_NATIVE_INFO_ALL, - lapl=H5P_DEFAULT + loc_id, name, fields=H5O_NATIVE_INFO_ALL, lapl=H5P_DEFAULT ) oinfo = Ref{H5O_native_info_t}() h5o_get_native_info_by_name(loc_id, name, oinfo, fields, lapl) return oinfo[] end - end # @static if _libhdf5_build_ver < v"1.12.0" # Add a default link access property list if not specified @@ -544,7 +549,9 @@ Deprecated HDF5 function. Use [`h5o_get_info`](@ref) or [`h5o_get_native_info`]( See `libhdf5` documentation for [`H5Oget_info1`](https://portal.hdfgroup.org/display/HDF5/H5O_GET_INFO1). """ function h5o_get_info1(object_id, buf) - var"#status#" = ccall((:H5Oget_info1, libhdf5), herr_t, (hid_t, Ptr{H5O_info_t}), object_id, buf) + var"#status#" = ccall( + (:H5Oget_info1, libhdf5), herr_t, (hid_t, Ptr{H5O_info_t}), object_id, buf + ) var"#status#" < 0 && @h5error("Error getting object info") return nothing end @@ -589,7 +596,7 @@ function h5p_get_chunk_cache(dapl_id) nbytes = Ref{Csize_t}() w0 = Ref{Cdouble}() h5p_get_chunk_cache(dapl_id, nslots, nbytes, w0) - return (nslots = nslots[], nbytes = nbytes[], w0 = w0[]) + return (nslots=nslots[], nbytes=nbytes[], w0=w0[]) end function h5p_get_create_intermediate_group(plist_id) @@ -607,16 +614,23 @@ end function h5p_get_efile_prefix(plist) efile_len = h5p_get_efile_prefix(plist, C_NULL, 0) buffer = StringVector(efile_len) - prefix_size = h5p_get_efile_prefix(plist, buffer, efile_len+1) + prefix_size = h5p_get_efile_prefix(plist, buffer, efile_len + 1) return String(buffer) end function h5p_set_efile_prefix(plist, sym::Symbol) - sym === :origin ? h5p_set_efile_prefix(plist, raw"$ORIGIN") : - throw(ArgumentError("The only valid `Symbol` argument for `h5p_set_efile_prefix` is `:origin`. Got `$sym`.")) + if sym === :origin + h5p_set_efile_prefix(plist, raw"$ORIGIN") + else + throw( + ArgumentError( + "The only valid `Symbol` argument for `h5p_set_efile_prefix` is `:origin`. Got `$sym`." + ) + ) + end end -function h5p_get_external(plist, idx = 0) +function h5p_get_external(plist, idx=0) offset = Ref{off_t}(0) sz = Ref{hsize_t}(0) name_size = 64 @@ -628,7 +642,7 @@ function h5p_get_external(plist, idx = 0) name_size *= 2 resize!(name, name_size) else - resize!(name, null_id-1) + resize!(name, null_id - 1) break end end @@ -646,7 +660,7 @@ function h5p_get_external(plist, idx = 0) # Scenario 2: The size is in the upper 32 bits, lower 32 bits is 0 as of HDF5 v1.12.1 sz[] = lower == 0 && upper != 0xffffffff ? upper : lower end - return (name = String(name), offset = offset[], size = sz[]) + return (name=String(name), offset=offset[], size=sz[]) end function h5p_get_fclose_degree(fapl_id) @@ -695,7 +709,7 @@ end function h5p_get_virtual_prefix(dapl_id) virtual_file_len = h5p_get_virtual_prefix(dapl_id, C_NULL, 0) buffer = StringVector(virtual_file_len) - prefix_size = h5p_get_virtual_prefix(dapl_id, buffer, virtual_file_len+1) + prefix_size = h5p_get_virtual_prefix(dapl_id, buffer, virtual_file_len + 1) return String(buffer) end @@ -757,7 +771,7 @@ function h5pl_get_loading_state() plugin_control_mask[] end -function h5pl_get(index = 0) +function h5pl_get(index=0) buf_size = Csize_t(1024) path_buf = Vector{Cchar}(undef, buf_size) h5pl_get(index, path_buf, buf_size) @@ -770,7 +784,6 @@ function h5pl_size() num_paths[] end - ### ### Reference Interface ### @@ -780,7 +793,7 @@ end ### function h5s_get_regular_hyperslab(space_id) - n = h5s_get_simple_extent_ndims(space_id) + n = h5s_get_simple_extent_ndims(space_id) start = Vector{hsize_t}(undef, n) stride = Vector{hsize_t}(undef, n) count = Vector{hsize_t}(undef, n) @@ -803,7 +816,6 @@ function h5s_get_simple_extent_dims(space_id, ::Nothing) return dims end - ### ### Datatype Interface ### @@ -857,9 +869,7 @@ function h5t_get_tag(type_id) return s end -h5t_get_native_type(type_id) = - h5t_get_native_type(type_id, H5T_DIR_ASCEND) - +h5t_get_native_type(type_id) = h5t_get_native_type(type_id, H5T_DIR_ASCEND) ### ### Optimized Functions Interface @@ -901,7 +911,7 @@ function h5tb_get_field_info(loc_id, table_name) did = h5d_open(loc_id, table_name, H5P_DEFAULT) tid = h5d_get_type(did) h5d_close(did) - field_names = [h5t_get_member_name(tid, i-1) for i in 1:nfields] + field_names = [h5t_get_member_name(tid, i - 1) for i in 1:nfields] h5t_close(tid) return field_names, field_sizes, field_offsets, type_size[] end @@ -916,7 +926,6 @@ function h5z_get_filter_info(filter) ref[] end - ### ### MPIO ### @@ -926,7 +935,6 @@ h5p_set_fapl_mpio(fapl_id, comm::Hmpih32, info::Hmpih32) = h5p_set_fapl_mpio(fapl_id, comm::Hmpih64, info::Hmpih64) = h5p_set_fapl_mpio64(fapl_id, comm, info) - h5p_get_fapl_mpio(fapl_id, comm::Ref{Hmpih32}, info::Ref{Hmpih32}) = h5p_get_fapl_mpio32(fapl_id, comm, info) h5p_get_fapl_mpio(fapl_id, comm::Ref{Hmpih64}, info::Ref{Hmpih64}) = diff --git a/src/api/types.jl b/src/api/types.jl index 245cddf5b..5a5e31521 100644 --- a/src/api/types.jl +++ b/src/api/types.jl @@ -117,7 +117,7 @@ const H5O_NATIVE_INFO_META_SIZE = 0x0010 const H5O_NATIVE_INFO_ALL = H5O_NATIVE_INFO_HDR | H5O_NATIVE_INFO_META_SIZE struct H5O_token_t - __data::NTuple{16, UInt8} + __data::NTuple{16,UInt8} end @enum H5O_type_t::Cint begin H5O_TYPE_UNKNOWN = -1 @@ -162,7 +162,6 @@ const H5O_info_t = H5O_info1_t Base.getproperty(oinfo::H5O_info1_t, field::Symbol) = field == :type ? getfield(oinfo, :otype) : getfield(oinfo, field) - struct H5O_info2_t fileno::Culong token::H5O_token_t @@ -208,19 +207,18 @@ struct H5E_error2_t desc::Cstring # optional supplied description end - # MPI communicators required by H5P -abstract type Hmpih end +abstract type Hmpih end primitive type Hmpih32 <: Hmpih 32 end # MPICH C/Fortran, OpenMPI Fortran: 32 bit handles primitive type Hmpih64 <: Hmpih 64 end # OpenMPI C: pointers (mostly 64 bit) # HDFS Drivers struct H5FD_hdfs_fapl_t version::Int32 - namenode_name::NTuple{129, Cchar} + namenode_name::NTuple{129,Cchar} namenode_port::Int32 - user_name::NTuple{129, Cchar} - kerberos_ticket_cache::NTuple{129, Cchar} + user_name::NTuple{129,Cchar} + kerberos_ticket_cache::NTuple{129,Cchar} stream_buffer_size::Int32 end @@ -229,8 +227,8 @@ struct H5FD_splitter_vfd_config_t version::Cuint rw_fapl_id::hid_t wo_fapl_id::hid_t - wo_path::NTuple{4097, Cchar} - log_file_path::NTuple{4097, Cchar} + wo_path::NTuple{4097,Cchar} + log_file_path::NTuple{4097,Cchar} ignore_wo_errs::hbool_t end @@ -259,9 +257,9 @@ _has_symbol(sym::Symbol) = Libdl.dlsym(libhdf5handle[], sym; throw_error=false) #const H5_INDEX_N = 2 # dataset constants -const H5D_COMPACT = 0 -const H5D_CONTIGUOUS = 1 -const H5D_CHUNKED = 2 +const H5D_COMPACT = 0 +const H5D_CONTIGUOUS = 1 +const H5D_CHUNKED = 2 # allocation times (C enum H5D_alloc_time_t) const H5D_ALLOC_TIME_ERROR = -1 @@ -283,7 +281,7 @@ const H5D_SPACE_STATUS_ALLOCATED = Cint(2) const H5D_space_status_t = Cint # error-related constants -const H5E_DEFAULT = 0 +const H5E_DEFAULT = 0 const H5E_WALK_UPWARD = 0 const H5E_WALK_DOWNWARD = 1 @@ -299,12 +297,12 @@ const H5F_ACC_SWMR_READ = 0x0040 # Library versions @enum H5F_libver_t::Int32 begin - H5F_LIBVER_ERROR = -1 + H5F_LIBVER_ERROR = -1 H5F_LIBVER_EARLIEST = 0 - H5F_LIBVER_V18 = 1 - H5F_LIBVER_V110 = 2 - H5F_LIBVER_V112 = 3 - H5F_LIBVER_NBOUNDS = 4 + H5F_LIBVER_V18 = 1 + H5F_LIBVER_V110 = 2 + H5F_LIBVER_V112 = 3 + H5F_LIBVER_NBOUNDS = 4 end # H5F_LIBVER_LATEST defined in helpers.jl @@ -314,7 +312,7 @@ const H5F_OBJ_DATASET = 0x0002 const H5F_OBJ_GROUP = 0x0004 const H5F_OBJ_DATATYPE = 0x0008 const H5F_OBJ_ATTR = 0x0010 -const H5F_OBJ_ALL = (H5F_OBJ_FILE|H5F_OBJ_DATASET|H5F_OBJ_GROUP|H5F_OBJ_DATATYPE|H5F_OBJ_ATTR) +const H5F_OBJ_ALL = (H5F_OBJ_FILE | H5F_OBJ_DATASET | H5F_OBJ_GROUP | H5F_OBJ_DATATYPE | H5F_OBJ_ATTR) const H5F_OBJ_LOCAL = 0x0020 # other file constants @@ -335,19 +333,19 @@ const H5FD_MPIO_COLLECTIVE_IO = 0 const H5FD_MPIO_INDIVIDUAL_IO = 1 # object types (C enum H5Itype_t) -const H5I_FILE = 1 -const H5I_GROUP = 2 -const H5I_DATATYPE = 3 -const H5I_DATASPACE = 4 -const H5I_DATASET = 5 -const H5I_ATTR = 6 -const H5I_REFERENCE = 7 -const H5I_VFL = 8 +const H5I_FILE = 1 +const H5I_GROUP = 2 +const H5I_DATATYPE = 3 +const H5I_DATASPACE = 4 +const H5I_DATASET = 5 +const H5I_ATTR = 6 +const H5I_REFERENCE = 7 +const H5I_VFL = 8 # Link constants -const H5L_TYPE_HARD = 0 -const H5L_TYPE_SOFT = 1 -const H5L_TYPE_EXTERNAL= 2 +const H5L_TYPE_HARD = 0 +const H5L_TYPE_SOFT = 1 +const H5L_TYPE_EXTERNAL = 2 # H5O_INFO constants const H5O_INFO_BASIC = Cuint(0x0001) @@ -355,7 +353,8 @@ const H5O_INFO_TIME = Cuint(0x0002) const H5O_INFO_NUM_ATTRS = Cuint(0x0004) const H5O_INFO_HDR = Cuint(0x0008) const H5O_INFO_META_SIZE = Cuint(0x0010) -const H5O_INFO_ALL = H5O_INFO_BASIC | H5O_INFO_TIME | H5O_INFO_NUM_ATTRS | H5O_INFO_HDR | H5O_INFO_META_SIZE +const H5O_INFO_ALL = + H5O_INFO_BASIC | H5O_INFO_TIME | H5O_INFO_NUM_ATTRS | H5O_INFO_HDR | H5O_INFO_META_SIZE # Object constants # Moved to H5O_type_t enum @@ -393,8 +392,8 @@ const H5P_CRT_ORDER_TRACKED = 1 const H5P_CRT_ORDER_INDEXED = 2 # Reference constants -const H5R_OBJECT = 0 -const H5R_DATASET_REGION = 1 +const H5R_OBJECT = 0 +const H5R_DATASET_REGION = 1 const H5R_OBJ_REF_BUF_SIZE = 8 # == sizeof(hobj_ref_t) const H5R_DSET_REG_REF_BUF_SIZE = 12 # == sizeof(hdset_reg_ref_t) @@ -424,18 +423,18 @@ const H5S_SEL_HYPERSLABS = 2 const H5S_SEL_ALL = 3 # type classes (C enum H5T_class_t) -const H5T_NO_CLASS = hid_t(-1) -const H5T_INTEGER = hid_t(0) -const H5T_FLOAT = hid_t(1) -const H5T_TIME = hid_t(2) # not supported by HDF5 library -const H5T_STRING = hid_t(3) -const H5T_BITFIELD = hid_t(4) -const H5T_OPAQUE = hid_t(5) -const H5T_COMPOUND = hid_t(6) -const H5T_REFERENCE = hid_t(7) -const H5T_ENUM = hid_t(8) -const H5T_VLEN = hid_t(9) -const H5T_ARRAY = hid_t(10) +const H5T_NO_CLASS = hid_t(-1) +const H5T_INTEGER = hid_t(0) +const H5T_FLOAT = hid_t(1) +const H5T_TIME = hid_t(2) # not supported by HDF5 library +const H5T_STRING = hid_t(3) +const H5T_BITFIELD = hid_t(4) +const H5T_OPAQUE = hid_t(5) +const H5T_COMPOUND = hid_t(6) +const H5T_REFERENCE = hid_t(7) +const H5T_ENUM = hid_t(8) +const H5T_VLEN = hid_t(9) +const H5T_ARRAY = hid_t(10) # Byte orders (C enum H5T_order_t) const H5T_ORDER_ERROR = -1 # error @@ -452,18 +451,18 @@ const H5T_NORM_MSBSET = 1 # msb of mantissa is always 1 const H5T_NORM_NONE = 2 # not normalized # Character types -const H5T_CSET_ASCII = 0 -const H5T_CSET_UTF8 = 1 +const H5T_CSET_ASCII = 0 +const H5T_CSET_UTF8 = 1 # Sign types (C enum H5T_sign_t) -const H5T_SGN_ERROR = Cint(-1) # error -const H5T_SGN_NONE = Cint(0) # unsigned -const H5T_SGN_2 = Cint(1) # 2's complement -const H5T_NSGN = Cint(2) # sentinel: this must be last! +const H5T_SGN_ERROR = Cint(-1) # error +const H5T_SGN_NONE = Cint(0) # unsigned +const H5T_SGN_2 = Cint(1) # 2's complement +const H5T_NSGN = Cint(2) # sentinel: this must be last! # Search directions -const H5T_DIR_ASCEND = 1 -const H5T_DIR_DESCEND = 2 +const H5T_DIR_ASCEND = 1 +const H5T_DIR_DESCEND = 2 # String padding modes const H5T_STR_NULLTERM = 0 @@ -495,17 +494,17 @@ const H5T_C_S1 = _read_const(:H5T_C_S1_g) const H5T_STD_REF_OBJ = _read_const(:H5T_STD_REF_OBJ_g) const H5T_STD_REF_DSETREG = _read_const(:H5T_STD_REF_DSETREG_g) # Native types -const H5T_NATIVE_B8 = _read_const(:H5T_NATIVE_B8_g) -const H5T_NATIVE_INT8 = _read_const(:H5T_NATIVE_INT8_g) -const H5T_NATIVE_UINT8 = _read_const(:H5T_NATIVE_UINT8_g) -const H5T_NATIVE_INT16 = _read_const(:H5T_NATIVE_INT16_g) -const H5T_NATIVE_UINT16 = _read_const(:H5T_NATIVE_UINT16_g) -const H5T_NATIVE_INT32 = _read_const(:H5T_NATIVE_INT32_g) -const H5T_NATIVE_UINT32 = _read_const(:H5T_NATIVE_UINT32_g) -const H5T_NATIVE_INT64 = _read_const(:H5T_NATIVE_INT64_g) -const H5T_NATIVE_UINT64 = _read_const(:H5T_NATIVE_UINT64_g) -const H5T_NATIVE_FLOAT = _read_const(:H5T_NATIVE_FLOAT_g) -const H5T_NATIVE_DOUBLE = _read_const(:H5T_NATIVE_DOUBLE_g) +const H5T_NATIVE_B8 = _read_const(:H5T_NATIVE_B8_g) +const H5T_NATIVE_INT8 = _read_const(:H5T_NATIVE_INT8_g) +const H5T_NATIVE_UINT8 = _read_const(:H5T_NATIVE_UINT8_g) +const H5T_NATIVE_INT16 = _read_const(:H5T_NATIVE_INT16_g) +const H5T_NATIVE_UINT16 = _read_const(:H5T_NATIVE_UINT16_g) +const H5T_NATIVE_INT32 = _read_const(:H5T_NATIVE_INT32_g) +const H5T_NATIVE_UINT32 = _read_const(:H5T_NATIVE_UINT32_g) +const H5T_NATIVE_INT64 = _read_const(:H5T_NATIVE_INT64_g) +const H5T_NATIVE_UINT64 = _read_const(:H5T_NATIVE_UINT64_g) +const H5T_NATIVE_FLOAT = _read_const(:H5T_NATIVE_FLOAT_g) +const H5T_NATIVE_DOUBLE = _read_const(:H5T_NATIVE_DOUBLE_g) # Other type constants const H5T_VARIABLE = reinterpret(UInt, -1) @@ -593,7 +592,6 @@ end H5Z_NO_EDC = 2 end - # Callbacks # typedef herr_t ( * H5P_prp_cb1_t ) ( const char * name , size_t size , void * value ) const H5P_prp_cb1_t = Ptr{Cvoid} @@ -634,7 +632,6 @@ struct H5Z_cb_t op_data::Ptr{Cvoid} end - @enum H5C_cache_incr_mode::UInt32 begin H5C_incr__off = 0 H5C_incr__threshold = 1 @@ -652,13 +649,12 @@ end H5C_decr__age_out_with_threshold = 3 end - struct H5AC_cache_config_t version::Cint rpt_fcn_enabled::hbool_t open_trace_file::hbool_t close_trace_file::hbool_t - trace_file_name::NTuple{1025, Cchar} + trace_file_name::NTuple{1025,Cchar} evictions_enabled::hbool_t set_initial_size::hbool_t initial_size::Csize_t @@ -732,5 +728,5 @@ end struct H5F_retry_info_t nbins::Cuint - retries::NTuple{21, Ptr{UInt32}} + retries::NTuple{21,Ptr{UInt32}} end diff --git a/src/api_midlevel.jl b/src/api_midlevel.jl index 3bf8d4b0c..45d1939c8 100644 --- a/src/api_midlevel.jl +++ b/src/api_midlevel.jl @@ -20,7 +20,9 @@ Change the dimensions of a dataspace `dspace` to `new_dims`, optionally with the dimensions `max_dims` different from the active size `new_dims`. If not given, `max_dims` is set equal to `new_dims`. """ -function set_extent_dims(dspace::Dataspace, size::Dims, max_dims::Union{Dims,Nothing} = nothing) +function set_extent_dims( + dspace::Dataspace, size::Dims, max_dims::Union{Dims,Nothing}=nothing +) checkvalid(dspace) rank = length(size) current_size = API.hsize_t[reverse(size)...] @@ -39,8 +41,8 @@ function get_extent_dims(obj::Union{Dataspace,Dataset,Attribute}) h5_dims, h5_maxdims = API.h5s_get_simple_extent_dims(dspace) # reverse dimensions since hdf5 uses C-style order N = length(h5_dims) - dims = ntuple(i -> @inbounds(Int(h5_dims[N-i+1])), N) - maxdims = ntuple(i -> @inbounds(h5_maxdims[N-i+1]) % Int, N) # allows max_dims to be specified as -1 without triggering an overflow + dims = ntuple(i -> @inbounds(Int(h5_dims[N - i + 1])), N) + maxdims = ntuple(i -> @inbounds(h5_maxdims[N - i + 1]) % Int, N) # allows max_dims to be specified as -1 without triggering an overflow obj isa Dataspace || close(dspace) return dims, maxdims end @@ -54,7 +56,9 @@ For a 1-based API, see `HDF5.ChunkStorage`. function get_chunk_offset(dataset_id, index) extent = size(dataset_id) chunk = get_chunk(dataset_id) - chunk_indices = CartesianIndices(ntuple(i -> 0:extent[i]÷chunk[i]-1, length(extent))) + chunk_indices = CartesianIndices( + ntuple(i -> 0:(extent[i] ÷ chunk[i] - 1), length(extent)) + ) offset = API.hsize_t.(chunk_indices[index + 1].I .* chunk) return offset end @@ -68,7 +72,7 @@ For a 1-based API, see `HDF5.ChunkStorage`. function get_chunk_index(dataset_id, offset) extent = size(dataset_id) chunk = get_chunk(dataset_id) - chunk_indices = LinearIndices(ntuple(i->0:extent[i]÷chunk[i]-1, length(extent))) + chunk_indices = LinearIndices(ntuple(i -> 0:(extent[i] ÷ chunk[i] - 1), length(extent))) chunk_indices[(offset .÷ chunk .+ 1)...] - 1 end @@ -126,11 +130,13 @@ Argument `filters` can be retrieved by supplying a `Ref{UInt32}` value via a key This method returns `Vector{UInt8}`. """ -function read_chunk(dataset_id, offset, - buf::Vector{UInt8} = Vector{UInt8}(undef, get_chunk_length(dataset_id)); - dxpl_id = API.H5P_DEFAULT, - filters = Ref{UInt32}() - ) +function read_chunk( + dataset_id, + offset, + buf::Vector{UInt8}=Vector{UInt8}(undef, get_chunk_length(dataset_id)); + dxpl_id=API.H5P_DEFAULT, + filters=Ref{UInt32}() +) API.h5d_read_chunk(dataset_id, dxpl_id, offset, filters, buf) return buf end @@ -146,13 +152,15 @@ Argument `filters` can be retrieved by supplying a `Ref{UInt32}` value via a key This method returns `Vector{UInt8}`. """ -function read_chunk(dataset_id, index::Integer, - buf::Vector{UInt8} = Vector{UInt8}(undef, get_chunk_length(dataset_id)); - dxpl_id = API.H5P_DEFAULT, - filters = Ref{UInt32}() - ) +function read_chunk( + dataset_id, + index::Integer, + buf::Vector{UInt8}=Vector{UInt8}(undef, get_chunk_length(dataset_id)); + dxpl_id=API.H5P_DEFAULT, + filters=Ref{UInt32}() +) offset = [reverse(get_chunk_offset(dataset_id, index))...] - read_chunk(dataset_id, offset, buf; dxpl_id = dxpl_id, filters = filters) + read_chunk(dataset_id, offset, buf; dxpl_id=dxpl_id, filters=filters) end """ @@ -160,13 +168,22 @@ end Helper method to write chunks via 0-based offsets `offset` as a `Tuple`. """ -function write_chunk(dataset_id, offset, buf::AbstractArray; dxpl_id = API.H5P_DEFAULT, filter_mask = 0) +function write_chunk( + dataset_id, offset, buf::AbstractArray; dxpl_id=API.H5P_DEFAULT, filter_mask=0 +) # Borrowed from write_dataset stride detection - stride(buf, 1) == 1 || throw(ArgumentError("Cannot write arrays with a different stride than `Array`")) + stride(buf, 1) == 1 || + throw(ArgumentError("Cannot write arrays with a different stride than `Array`")) API.h5d_write_chunk(dataset_id, dxpl_id, filter_mask, offset, sizeof(buf), buf) end -function write_chunk(dataset_id, offset, buf::Union{DenseArray,Base.FastContiguousSubArray}; dxpl_id = API.H5P_DEFAULT, filter_mask = 0) +function write_chunk( + dataset_id, + offset, + buf::Union{DenseArray,Base.FastContiguousSubArray}; + dxpl_id=API.H5P_DEFAULT, + filter_mask=0 +) # We can bypass the need to check stride with Array and FastContiguousSubArray API.h5d_write_chunk(dataset_id, dxpl_id, filter_mask, offset, sizeof(buf), buf) end @@ -176,19 +193,27 @@ end Helper method to write chunks via 0-based integer `index`. """ -function write_chunk(dataset_id, index::Integer, buf::AbstractArray; dxpl_id = API.H5P_DEFAULT, filter_mask = 0) +function write_chunk( + dataset_id, index::Integer, buf::AbstractArray; dxpl_id=API.H5P_DEFAULT, filter_mask=0 +) offset = [reverse(get_chunk_offset(dataset_id, index))...] - write_chunk(dataset_id, offset, buf; dxpl_id = dxpl_id, filter_mask = filter_mask) + write_chunk(dataset_id, offset, buf; dxpl_id=dxpl_id, filter_mask=filter_mask) end # Avoid ambiguous method with offset based versions -function write_chunk(dataset_id, index::Integer, buf::Union{DenseArray,Base.FastContiguousSubArray}; dxpl_id = API.H5P_DEFAULT, filter_mask = 0) +function write_chunk( + dataset_id, + index::Integer, + buf::Union{DenseArray,Base.FastContiguousSubArray}; + dxpl_id=API.H5P_DEFAULT, + filter_mask=0 +) # We can bypass the need to check stride with Array and FastContiguousSubArray offset = [reverse(get_chunk_offset(dataset_id, index))...] - write_chunk(dataset_id, offset, buf; dxpl_id = dxpl_id, filter_mask = filter_mask) + write_chunk(dataset_id, offset, buf; dxpl_id=dxpl_id, filter_mask=filter_mask) end -function get_fill_value(plist_id, ::Type{T}) where T +function get_fill_value(plist_id, ::Type{T}) where {T} value = Ref{T}() API.h5p_get_fill_value(plist_id, datatype(T), value) return value[] diff --git a/src/attributes.jl b/src/attributes.jl index c2810e2ce..7ee564d0c 100644 --- a/src/attributes.jl +++ b/src/attributes.jl @@ -25,7 +25,6 @@ function Base.close(obj::Attribute) end name(attr::Attribute) = API.h5a_get_name(attr) - datatype(dset::Attribute) = Datatype(API.h5a_get_type(checkvalid(dset)), file(dset)) dataspace(attr::Attribute) = Dataspace(API.h5a_get_space(checkvalid(attr))) @@ -64,8 +63,11 @@ read_attribute(attr::Attribute, memtype::Datatype, buf) = API.h5a_read(attr, mem Open the [`Attribute`](@ref) named `name` on the object `parent`. """ -open_attribute(parent::Union{File,Object}, name::AbstractString, aapl::AttributeAccessProperties=AttributeAccessProperties()) = - Attribute(API.h5a_open(checkvalid(parent), name, aapl), file(parent)) +open_attribute( + parent::Union{File,Object}, + name::AbstractString, + aapl::AttributeAccessProperties=AttributeAccessProperties() +) = Attribute(API.h5a_open(checkvalid(parent), name, aapl), file(parent)) """ create_attribute(parent::Union{File,Object}, name::AbstractString, dtype::Datatype, space::Dataspace) @@ -86,8 +88,12 @@ function create_attribute(parent::Union{File,Object}, name::AbstractString, data end return obj, dtype end -function create_attribute(parent::Union{File,Object}, name::AbstractString, dtype::Datatype, dspace::Dataspace) - attrid = API.h5a_create(checkvalid(parent), name, dtype, dspace, _attr_properties(name), API.H5P_DEFAULT) +function create_attribute( + parent::Union{File,Object}, name::AbstractString, dtype::Datatype, dspace::Dataspace +) + attrid = API.h5a_create( + checkvalid(parent), name, dtype, dspace, _attr_properties(name), API.H5P_DEFAULT + ) return Attribute(attrid, file(parent)) end @@ -95,7 +101,11 @@ end write_attribute(attr::Attribute, memtype::Datatype, x) = API.h5a_write(attr, memtype, x) # specific methods function write_attribute(attr::Attribute, memtype::Datatype, x::AbstractArray) - length(x) == length(attr) || throw(ArgumentError("Invalid length: $(length(x)) != $(length(attr)), for attribute \"$(name(attr))\"")) + length(x) == length(attr) || throw( + ArgumentError( + "Invalid length: $(length(x)) != $(length(attr)), for attribute \"$(name(attr))\"" + ) + ) API.h5a_write(attr, memtype, x) end function write_attribute(attr::Attribute, memtype::Datatype, str::AbstractString) @@ -105,7 +115,9 @@ function write_attribute(attr::Attribute, memtype::Datatype, str::AbstractString write_attribute(attr, memtype, buf) end end -function write_attribute(attr::Attribute, memtype::Datatype, x::T) where {T<:Union{ScalarType,Complex{<:ScalarType}}} +function write_attribute( + attr::Attribute, memtype::Datatype, x::T +) where {T<:Union{ScalarType,Complex{<:ScalarType}}} tmp = Ref{T}(x) write_attribute(attr, memtype, tmp) end @@ -139,16 +151,17 @@ end Rename the [`Attribute`](@ref) of the object `parent` named `oldname` to `newname`. """ -rename_attribute(parent::Union{File,Object}, oldname::AbstractString, newname::AbstractString) = - API.h5a_rename(checkvalid(parent), oldname, newname) +rename_attribute( + parent::Union{File,Object}, oldname::AbstractString, newname::AbstractString +) = API.h5a_rename(checkvalid(parent), oldname, newname) """ delete_attribute(parent::Union{File,Object}, name::AbstractString) Delete the [`Attribute`](@ref) named `name` on the object `parent`. """ -delete_attribute(parent::Union{File,Object}, path::AbstractString) = API.h5a_delete(checkvalid(parent), path) - +delete_attribute(parent::Union{File,Object}, path::AbstractString) = + API.h5a_delete(checkvalid(parent), path) """ h5writeattr(filename, name::AbstractString, data::Dict) @@ -173,7 +186,7 @@ Read the attributes of the object at `name` in the HDF5 file `filename`, returni """ function h5readattr(filename, name::AbstractString) local dat - file = h5open(filename,"r") + file = h5open(filename, "r") try obj = file[name] dat = Dict(attrs(obj)) @@ -222,7 +235,8 @@ function attrs(parent) return AttributeDict(parent) end -Base.haskey(attrdict::AttributeDict, path::AbstractString) = API.h5a_exists(checkvalid(attrdict.parent), path) +Base.haskey(attrdict::AttributeDict, path::AbstractString) = + API.h5a_exists(checkvalid(attrdict.parent), path) Base.length(attrdict::AttributeDict) = num_attrs(attrdict.parent) function Base.getindex(x::AttributeDict, name::AbstractString) @@ -248,13 +262,16 @@ function Base.setindex!(attrdict::AttributeDict, val, name::AbstractString) write_attribute(attrdict.parent, name, val) end end -Base.delete!(attrdict::AttributeDict, path::AbstractString) = delete_attribute(attrdict.parent, path) +Base.delete!(attrdict::AttributeDict, path::AbstractString) = + delete_attribute(attrdict.parent, path) function Base.keys(attrdict::AttributeDict) # faster than iteratively calling h5a_get_name_by_idx checkvalid(attrdict.parent) keyvec = sizehint!(String[], length(attrdict)) - API.h5a_iterate(attrdict.parent, idx_type(attrdict.parent), order(attrdict.parent)) do _, attr_name, _ + API.h5a_iterate( + attrdict.parent, idx_type(attrdict.parent), order(attrdict.parent) + ) do _, attr_name, _ push!(keyvec, unsafe_string(attr_name)) return false end @@ -275,9 +292,6 @@ function Base.iterate(attrdict::AttributeDict, (keyvec, n)) return (key => attrdict[key]), (keyvec, nn) end - - - struct Attributes parent::Union{File,Object} end @@ -297,8 +311,10 @@ function Base.getindex(x::Attributes, name::AbstractString) haskey(x, name) || throw(KeyError(name)) open_attribute(x.parent, name) end -Base.setindex!(x::Attributes, val, name::AbstractString) = write_attribute(x.parent, name, val) -Base.haskey(attr::Attributes, path::AbstractString) = API.h5a_exists(checkvalid(attr.parent), path) +Base.setindex!(x::Attributes, val, name::AbstractString) = + write_attribute(x.parent, name, val) +Base.haskey(attr::Attributes, path::AbstractString) = + API.h5a_exists(checkvalid(attr.parent), path) Base.length(x::Attributes) = num_attrs(x.parent) function Base.keys(x::Attributes) @@ -313,10 +329,12 @@ end Base.read(attr::Attributes, name::AbstractString) = read_attribute(attr.parent, name) # Dataset methods which act like attributes -Base.write(parent::Dataset, name::AbstractString, data; pv...) = write_attribute(parent, name, data; pv...) +Base.write(parent::Dataset, name::AbstractString, data; pv...) = + write_attribute(parent, name, data; pv...) function Base.getindex(dset::Dataset, name::AbstractString) haskey(dset, name) || throw(KeyError(name)) open_attribute(dset, name) end Base.setindex!(dset::Dataset, val, name::AbstractString) = write_attribute(dset, name, val) -Base.haskey(dset::Union{Dataset,Datatype}, path::AbstractString) = API.h5a_exists(checkvalid(dset), path) +Base.haskey(dset::Union{Dataset,Datatype}, path::AbstractString) = + API.h5a_exists(checkvalid(dset), path) diff --git a/src/context.jl b/src/context.jl index 3c432eb80..c02750282 100644 --- a/src/context.jl +++ b/src/context.jl @@ -38,23 +38,23 @@ serves as the default context if the current task does not have a * string_create """ struct HDF5Context - attribute_access::AttributeAccessProperties - attribute_create::AttributeCreateProperties - dataset_access ::DatasetAccessProperties - dataset_create ::DatasetCreateProperties - dataset_transfer::DatasetTransferProperties - datatype_access ::DatatypeAccessProperties - datatype_create ::DatatypeCreateProperties - file_access ::FileAccessProperties - file_create ::FileCreateProperties - file_mount ::FileMountProperties - group_access ::GroupAccessProperties - group_create ::GroupCreateProperties - link_access ::LinkAccessProperties - link_create ::LinkCreateProperties - object_copy ::ObjectCopyProperties - object_create ::ObjectCreateProperties - string_create ::StringCreateProperties + attribute_access :: AttributeAccessProperties + attribute_create :: AttributeCreateProperties + dataset_access :: DatasetAccessProperties + dataset_create :: DatasetCreateProperties + dataset_transfer :: DatasetTransferProperties + datatype_access :: DatatypeAccessProperties + datatype_create :: DatatypeCreateProperties + file_access :: FileAccessProperties + file_create :: FileCreateProperties + file_mount :: FileMountProperties + group_access :: GroupAccessProperties + group_create :: GroupCreateProperties + link_access :: LinkAccessProperties + link_create :: LinkCreateProperties + object_copy :: ObjectCopyProperties + object_create :: ObjectCreateProperties + string_create :: StringCreateProperties end Base.copy(ctx::HDF5Context) = @@ -64,25 +64,25 @@ Base.close(ctx::HDF5Context) = foreach(n -> close(getfield(ctx, n)), fieldnames(HDF5Context)) function HDF5Context() - HDF5Context( - AttributeAccessProperties(), - AttributeCreateProperties(), - DatasetAccessProperties(), - DatasetCreateProperties(), - DatasetTransferProperties(), - DatatypeAccessProperties(), - DatatypeCreateProperties(), - FileAccessProperties(), - FileCreateProperties(), - FileMountProperties(), - GroupAccessProperties(), - GroupCreateProperties(), - LinkAccessProperties(), - LinkCreateProperties(), - ObjectCopyProperties(), - ObjectCreateProperties(), - StringCreateProperties(), - ) + HDF5Context( + AttributeAccessProperties(), + AttributeCreateProperties(), + DatasetAccessProperties(), + DatasetCreateProperties(), + DatasetTransferProperties(), + DatatypeAccessProperties(), + DatatypeCreateProperties(), + FileAccessProperties(), + FileCreateProperties(), + FileMountProperties(), + GroupAccessProperties(), + GroupCreateProperties(), + LinkAccessProperties(), + LinkCreateProperties(), + ObjectCopyProperties(), + ObjectCreateProperties(), + StringCreateProperties(), + ) end """ diff --git a/src/datasets.jl b/src/datasets.jl index aa61d5a0b..d1b1b773c 100644 --- a/src/datasets.jl +++ b/src/datasets.jl @@ -5,7 +5,8 @@ dataspace(dset::Dataset) = Dataspace(API.h5d_get_space(checkvalid(dset))) # Open Dataset -open_dataset(parent::Union{File,Group}, +open_dataset( + parent::Union{File,Group}, name::AbstractString, dapl::DatasetAccessProperties=DatasetAccessProperties(), dxpl::DatasetTransferProperties=DatasetTransferProperties() @@ -43,13 +44,17 @@ function create_dataset( path::Union{AbstractString,Nothing}, dtype::Datatype, dspace::Dataspace; - dcpl::DatasetCreateProperties = DatasetCreateProperties(), - dxpl::DatasetTransferProperties = DatasetTransferProperties(), - dapl::DatasetAccessProperties = DatasetAccessProperties(), + dcpl::DatasetCreateProperties=DatasetCreateProperties(), + dxpl::DatasetTransferProperties=DatasetTransferProperties(), + dapl::DatasetAccessProperties=DatasetAccessProperties(), pv... ) - !isnothing(path) && haskey(parent, path) && error("cannot create dataset: object \"", path, "\" already exists at ", name(parent)) - pv = setproperties!(dcpl,dxpl,dapl; pv...) + !isnothing(path) && + haskey(parent, path) && + error( + "cannot create dataset: object \"", path, "\" already exists at ", name(parent) + ) + pv = setproperties!(dcpl, dxpl, dapl; pv...) isempty(pv) || error("invalid keyword options") if isnothing(path) ds = API.h5d_create_anon(parent, dtype, dspace, dcpl, dapl) @@ -58,11 +63,53 @@ function create_dataset( end Dataset(ds, file(parent), dxpl) end -create_dataset(parent::Union{File,Group}, path::Union{AbstractString,Nothing}, dtype::Datatype, dspace_dims::Dims; pv...) = create_dataset(checkvalid(parent), path, dtype, dataspace(dspace_dims); pv...) -create_dataset(parent::Union{File,Group}, path::Union{AbstractString,Nothing}, dtype::Datatype, dspace_dims::Tuple{Dims,Dims}; pv...) = create_dataset(checkvalid(parent), path, dtype, dataspace(dspace_dims[1], max_dims=dspace_dims[2]); pv...) -create_dataset(parent::Union{File,Group}, path::Union{AbstractString,Nothing}, dtype::Type, dspace_dims::Tuple{Dims,Dims}; pv...) = create_dataset(checkvalid(parent), path, datatype(dtype), dataspace(dspace_dims[1], max_dims=dspace_dims[2]); pv...) -create_dataset(parent::Union{File,Group}, path::Union{AbstractString,Nothing}, dtype::Type, dspace_dims::Dims; pv...) = create_dataset(checkvalid(parent), path, datatype(dtype), dataspace(dspace_dims); pv...) -create_dataset(parent::Union{File,Group}, path::Union{AbstractString,Nothing}, dtype::Type, dspace_dims::Int...; pv...) = create_dataset(checkvalid(parent), path, datatype(dtype), dataspace(dspace_dims); pv...) +create_dataset( + parent::Union{File,Group}, + path::Union{AbstractString,Nothing}, + dtype::Datatype, + dspace_dims::Dims; + pv... +) = create_dataset(checkvalid(parent), path, dtype, dataspace(dspace_dims); pv...) +create_dataset( + parent::Union{File,Group}, + path::Union{AbstractString,Nothing}, + dtype::Datatype, + dspace_dims::Tuple{Dims,Dims}; + pv... +) = create_dataset( + checkvalid(parent), + path, + dtype, + dataspace(dspace_dims[1]; max_dims=dspace_dims[2]); + pv... +) +create_dataset( + parent::Union{File,Group}, + path::Union{AbstractString,Nothing}, + dtype::Type, + dspace_dims::Tuple{Dims,Dims}; + pv... +) = create_dataset( + checkvalid(parent), + path, + datatype(dtype), + dataspace(dspace_dims[1]; max_dims=dspace_dims[2]); + pv... +) +create_dataset( + parent::Union{File,Group}, + path::Union{AbstractString,Nothing}, + dtype::Type, + dspace_dims::Dims; + pv... +) = create_dataset(checkvalid(parent), path, datatype(dtype), dataspace(dspace_dims); pv...) +create_dataset( + parent::Union{File,Group}, + path::Union{AbstractString,Nothing}, + dtype::Type, + dspace_dims::Int...; + pv... +) = create_dataset(checkvalid(parent), path, datatype(dtype), dataspace(dspace_dims); pv...) # Get the datatype of a dataset datatype(dset::Dataset) = Datatype(API.h5d_get_type(checkvalid(dset)), file(dset)) @@ -168,7 +215,7 @@ function readmmap(obj::Dataset, ::Type{T}) where {T} elseif offset % Base.datatype_alignment(T) == 0 A = Mmap.mmap(fd, Array{T,length(dims)}, dims, offset) else - Aflat = Mmap.mmap(fd, Vector{UInt8}, prod(dims)*sizeof(T), offset) + Aflat = Mmap.mmap(fd, Vector{UInt8}, prod(dims) * sizeof(T), offset) A = reshape(reinterpret(T, Aflat), dims) end @@ -187,18 +234,25 @@ function readmmap(obj::Dataset) end # Generic write -function Base.write(parent::Union{File,Group}, name1::Union{AbstractString,Nothing}, val1, name2::Union{AbstractString,Nothing}, val2, nameval...) # FIXME: remove? +function Base.write( + parent::Union{File,Group}, + name1::Union{AbstractString,Nothing}, + val1, + name2::Union{AbstractString,Nothing}, + val2, + nameval... +) # FIXME: remove? if !iseven(length(nameval)) error("name, value arguments must come in pairs") end write(parent, name1, val1) write(parent, name2, val2) - for i = 1:2:length(nameval) + for i in 1:2:length(nameval) thisname = nameval[i] if !isa(thisname, AbstractString) - error("Argument ", i+5, " should be a string, but it's a ", typeof(thisname)) + error("Argument ", i + 5, " should be a string, but it's a ", typeof(thisname)) end - write(parent, thisname, nameval[i+1]) + write(parent, thisname, nameval[i + 1]) end end @@ -208,7 +262,9 @@ end # Create datasets and attributes with "native" types, but don't write the data. # The return syntax is: dset, dtype = create_dataset(parent, name, data; properties...) -function create_dataset(parent::Union{File,Group}, name::Union{AbstractString,Nothing}, data; pv...) +function create_dataset( + parent::Union{File,Group}, name::Union{AbstractString,Nothing}, data; pv... +) dtype = datatype(data) dspace = dataspace(data) obj = try @@ -220,7 +276,9 @@ function create_dataset(parent::Union{File,Group}, name::Union{AbstractString,No end # Create and write, closing the objects upon exit -function write_dataset(parent::Union{File,Group}, name::Union{AbstractString,Nothing}, data; pv...) +function write_dataset( + parent::Union{File,Group}, name::Union{AbstractString,Nothing}, data; pv... +) obj, dtype = create_dataset(parent, name, data; pv...) try write_dataset(obj, dtype, data) @@ -245,8 +303,8 @@ function Base.write(obj::Dataset, x) end # For plain files and groups, let "write(obj, name, val; properties...)" mean "write_dataset" -Base.write(parent::Union{File,Group}, name::Union{AbstractString,Nothing}, data; pv...) = write_dataset(parent, name, data; pv...) - +Base.write(parent::Union{File,Group}, name::Union{AbstractString,Nothing}, data; pv...) = + write_dataset(parent, name, data; pv...) # Indexing @@ -256,7 +314,7 @@ Base.axes(dset::Dataset) = map(Base.OneTo, size(dset)) # Write to a subset of a dataset using array slices: dataset[:,:,10] = array const IndexType = Union{AbstractRange{Int},Int,Colon} -function Base.setindex!(dset::Dataset, X::Array{T}, I::IndexType...) where T +function Base.setindex!(dset::Dataset, X::Array{T}, I::IndexType...) where {T} !isconcretetype(T) && error("type $T is not concrete") U = get_jl_type(dset) @@ -293,7 +351,7 @@ function Base.setindex!(dset::Dataset, X::Array{T}, I::IndexType...) where T return X end -function Base.setindex!(dset::Dataset, x::T, I::IndexType...) where T <: Number +function Base.setindex!(dset::Dataset, x::T, I::IndexType...) where {T<:Number} indices = Base.to_indices(dset, I) X = fill(x, map(length, indices)) Base.setindex!(dset, X, indices...) @@ -317,42 +375,82 @@ Use `API.h5p_set_external` to link to multiple segments. See also [`API.h5p_set_external`](@ref) """ -function create_external_dataset(parent::Union{File,Group}, name::AbstractString, filepath::AbstractString, t, sz::Dims, offset::Integer=0) +function create_external_dataset( + parent::Union{File,Group}, + name::AbstractString, + filepath::AbstractString, + t, + sz::Dims, + offset::Integer=0 +) create_external_dataset(parent, name, filepath, datatype(t), dataspace(sz), offset) end -function create_external_dataset(parent::Union{File,Group}, name::AbstractString, filepath::AbstractString, dtype::Datatype, dspace::Dataspace, offset::Integer=0) +function create_external_dataset( + parent::Union{File,Group}, + name::AbstractString, + filepath::AbstractString, + dtype::Datatype, + dspace::Dataspace, + offset::Integer=0 +) checkvalid(parent) - create_dataset(parent, name, dtype, dspace; external=(filepath, offset, length(dspace)*sizeof(dtype))) + create_dataset( + parent, + name, + dtype, + dspace; + external=(filepath, offset, length(dspace) * sizeof(dtype)) + ) end ### HDF5 utilities ### - # default behavior -read_dataset(dset::Dataset, memtype::Datatype, buf, xfer::DatasetTransferProperties=dset.xfer) = - API.h5d_read(dset, memtype, API.H5S_ALL, API.H5S_ALL, xfer, buf) -write_dataset(dset::Dataset, memtype::Datatype, x, xfer::DatasetTransferProperties=dset.xfer) = - API.h5d_write(dset, memtype, API.H5S_ALL, API.H5S_ALL, xfer, x) +read_dataset( + dset::Dataset, memtype::Datatype, buf, xfer::DatasetTransferProperties=dset.xfer +) = API.h5d_read(dset, memtype, API.H5S_ALL, API.H5S_ALL, xfer, buf) +write_dataset( + dset::Dataset, memtype::Datatype, x, xfer::DatasetTransferProperties=dset.xfer +) = API.h5d_write(dset, memtype, API.H5S_ALL, API.H5S_ALL, xfer, x) # type-specific behaviors function _check_invalid(dataset::Dataset, buf::AbstractArray) num_bytes_dset = Base.checked_mul(sizeof(datatype(dataset)), length(dataset)) num_bytes_buf = Base.checked_mul(sizeof(eltype(buf)), length(buf)) - num_bytes_buf == num_bytes_dset || throw(ArgumentError( - "Invalid number of bytes: $num_bytes_buf != $num_bytes_dset, for dataset \"$(name(dataset))\"" - )) - stride(buf, 1) == 1 || throw(ArgumentError("Cannot read/write arrays with a different stride than `Array`")) -end -function read_dataset(dataset::Dataset, memtype::Datatype, buf::AbstractArray, xfer::DatasetTransferProperties=dataset.xfer) + num_bytes_buf == num_bytes_dset || throw( + ArgumentError( + "Invalid number of bytes: $num_bytes_buf != $num_bytes_dset, for dataset \"$(name(dataset))\"" + ) + ) + stride(buf, 1) == 1 || throw( + ArgumentError("Cannot read/write arrays with a different stride than `Array`") + ) +end +function read_dataset( + dataset::Dataset, + memtype::Datatype, + buf::AbstractArray, + xfer::DatasetTransferProperties=dataset.xfer +) _check_invalid(dataset, buf) API.h5d_read(dataset, memtype, API.H5S_ALL, API.H5S_ALL, xfer, buf) end -function write_dataset(dataset::Dataset, memtype::Datatype, buf::AbstractArray, xfer::DatasetTransferProperties=dataset.xfer) +function write_dataset( + dataset::Dataset, + memtype::Datatype, + buf::AbstractArray, + xfer::DatasetTransferProperties=dataset.xfer +) _check_invalid(dataset, buf) API.h5d_write(dataset, memtype, API.H5S_ALL, API.H5S_ALL, xfer, buf) end -function write_dataset(dataset::Dataset, memtype::Datatype, str::Union{AbstractString,Nothing}, xfer::DatasetTransferProperties=dataset.xfer) +function write_dataset( + dataset::Dataset, + memtype::Datatype, + str::Union{AbstractString,Nothing}, + xfer::DatasetTransferProperties=dataset.xfer +) strbuf = Base.cconvert(Cstring, str) GC.@preserve strbuf begin # unsafe_convert(Cstring, strbuf) is responsible for enforcing the no-'\0' policy, @@ -362,15 +460,27 @@ function write_dataset(dataset::Dataset, memtype::Datatype, str::Union{AbstractS API.h5d_write(dataset, memtype, API.H5S_ALL, API.H5S_ALL, xfer, buf) end end -function write_dataset(dataset::Dataset, memtype::Datatype, x::T, xfer::DatasetTransferProperties=dataset.xfer) where {T<:Union{ScalarType, Complex{<:ScalarType}}} +function write_dataset( + dataset::Dataset, memtype::Datatype, x::T, xfer::DatasetTransferProperties=dataset.xfer +) where {T<:Union{ScalarType,Complex{<:ScalarType}}} tmp = Ref{T}(x) API.h5d_write(dataset, memtype, API.H5S_ALL, API.H5S_ALL, xfer, tmp) end -function write_dataset(dataset::Dataset, memtype::Datatype, strs::Array{<:AbstractString}, xfer::DatasetTransferProperties=dataset.xfer) +function write_dataset( + dataset::Dataset, + memtype::Datatype, + strs::Array{<:AbstractString}, + xfer::DatasetTransferProperties=dataset.xfer +) p = Ref{Cstring}(strs) API.h5d_write(dataset, memtype, API.H5S_ALL, API.H5S_ALL, xfer, p) end -write_dataset(dataset::Dataset, memtype::Datatype, ::EmptyArray, xfer::DatasetTransferProperties=dataset.xfer) = nothing +write_dataset( + dataset::Dataset, + memtype::Datatype, + ::EmptyArray, + xfer::DatasetTransferProperties=dataset.xfer +) = nothing """ get_datasets(file::HDF5.File) -> datasets::Vector{HDF5.Dataset} @@ -382,7 +492,7 @@ function get_datasets(file::File) get_datasets!(list, file) list end - function get_datasets!(list::Vector{Dataset}, node::Union{File,Group,Dataset}) +function get_datasets!(list::Vector{Dataset}, node::Union{File,Group,Dataset}) if isa(node, Dataset) push!(list, node) else @@ -402,11 +512,11 @@ function heuristic_chunk(T, shape) chunk = [shape...] nd = length(chunk) # simplification of ugly heuristic target chunk size from PyTables/h5py: - target = min(1500000, max(12000, floor(Int, 300*cbrt(Ts*sz)))) + target = min(1500000, max(12000, floor(Int, 300 * cbrt(Ts * sz)))) Ts > target && return ones(chunk) # divide last non-unit dimension by 2 until we get <= target # (since Julia default to column-major, favor contiguous first dimension) - while Ts*prod(chunk) > target + while Ts * prod(chunk) > target i = nd while chunk[i] == 1 i -= 1 @@ -440,7 +550,9 @@ Write a raw chunk at a given linear index. `chunk_bytes` is an AbstractArray that can be converted to a pointer, Ptr{Cvoid}. `index` is 1-based and consecutive up to the number of chunks. """ -function do_write_chunk(dataset::Dataset, index::Integer, chunk_bytes::AbstractArray, filter_mask=0) +function do_write_chunk( + dataset::Dataset, index::Integer, chunk_bytes::AbstractArray, filter_mask=0 +) checkvalid(dataset) index -= 1 write_chunk(dataset, index, chunk_bytes; filter_mask=UInt32(filter_mask)) @@ -456,7 +568,7 @@ function do_read_chunk(dataset::Dataset, offset) checkvalid(dataset) offs = collect(API.hsize_t, reverse(offset)) .- 1 filters = Ref{UInt32}() - buf = read_chunk(dataset, offs; filters = filters) + buf = read_chunk(dataset, offs; filters=filters) return (filters[], buf) end @@ -470,7 +582,7 @@ function do_read_chunk(dataset::Dataset, index::Integer) checkvalid(dataset) index -= 1 filters = Ref{UInt32}() - buf = read_chunk(dataset, index; filters = filters) + buf = read_chunk(dataset, index; filters=filters) return (filters[], buf) end @@ -483,12 +595,11 @@ Base.IndexStyle(::ChunkStorage{I}) where {I<:IndexStyle} = I() # ChunkStorage{IndexCartesian,N} (default) function ChunkStorage(dataset) - ChunkStorage{IndexCartesian, ndims(dataset)}(dataset) + ChunkStorage{IndexCartesian,ndims(dataset)}(dataset) end Base.size(cs::ChunkStorage{IndexCartesian}) = get_num_chunks_per_dim(cs.dataset) - function Base.axes(cs::ChunkStorage{IndexCartesian}) chunk = get_chunk(cs.dataset) extent = size(cs.dataset) @@ -496,12 +607,18 @@ function Base.axes(cs::ChunkStorage{IndexCartesian}) end # Filter flags provided -function Base.setindex!(chunk_storage::ChunkStorage{IndexCartesian}, v::Tuple{<:Integer,AbstractArray}, index::Integer...) +function Base.setindex!( + chunk_storage::ChunkStorage{IndexCartesian}, + v::Tuple{<:Integer,AbstractArray}, + index::Integer... +) do_write_chunk(chunk_storage.dataset, index, v[2], v[1]) end # Filter flags will default to 0 -function Base.setindex!(chunk_storage::ChunkStorage{IndexCartesian}, v::AbstractArray, index::Integer...) +function Base.setindex!( + chunk_storage::ChunkStorage{IndexCartesian}, v::AbstractArray, index::Integer... +) do_write_chunk(chunk_storage.dataset, index, v) end @@ -511,16 +628,22 @@ end # ChunkStorage{IndexLinear,1} -ChunkStorage{IndexLinear}(dataset) = ChunkStorage{IndexLinear,1}(dataset) +ChunkStorage{IndexLinear}(dataset) = ChunkStorage{IndexLinear,1}(dataset) Base.size(cs::ChunkStorage{IndexLinear}) = (get_num_chunks(cs.dataset),) -Base.length(cs::ChunkStorage{IndexLinear}) = get_num_chunks(cs.dataset) +Base.length(cs::ChunkStorage{IndexLinear}) = get_num_chunks(cs.dataset) -function Base.setindex!(chunk_storage::ChunkStorage{IndexLinear}, v::Tuple{<:Integer,AbstractArray}, index::Integer) +function Base.setindex!( + chunk_storage::ChunkStorage{IndexLinear}, + v::Tuple{<:Integer,AbstractArray}, + index::Integer +) do_write_chunk(chunk_storage.dataset, index, v[2], v[1]) end # Filter flags will default to 0 -function Base.setindex!(chunk_storage::ChunkStorage{IndexLinear}, v::AbstractArray, index::Integer) +function Base.setindex!( + chunk_storage::ChunkStorage{IndexLinear}, v::AbstractArray, index::Integer +) do_write_chunk(chunk_storage.dataset, index, v) end @@ -532,13 +655,15 @@ end # ChunkStorage axes may be StepRanges, but this is not available until v"1.6.0" # no method matching CartesianIndices(::Tuple{StepRange{Int64,Int64},UnitRange{Int64}}) until v"1.6.0" -function Base.show(io::IO, cs::ChunkStorage{IndexCartesian,N}) where N +function Base.show(io::IO, cs::ChunkStorage{IndexCartesian,N}) where {N} println(io, "HDF5.ChunkStorage{IndexCartesian,$N}") print(io, "Axes: ") println(io, axes(cs)) print(io, cs.dataset) end -Base.show(io::IO, ::MIME{Symbol("text/plain")}, cs::ChunkStorage{IndexCartesian,N}) where {N} = show(io, cs) +Base.show( + io::IO, ::MIME{Symbol("text/plain")}, cs::ChunkStorage{IndexCartesian,N} +) where {N} = show(io, cs) function get_chunk(dset::Dataset) p = get_create_properties(dset) diff --git a/src/dataspaces.jl b/src/dataspaces.jl index 21267b465..61717510e 100644 --- a/src/dataspaces.jl +++ b/src/dataspaces.jl @@ -37,7 +37,6 @@ The [`Dataspace`](@ref) of `obj`. """ dataspace(ds::Dataspace) = ds - # Create a dataspace from in-memory types """ dataspace(data) @@ -47,10 +46,11 @@ The default `Dataspace` used for representing a Julia object `data`: - arrays: a simple `Dataspace` - `nothing` or an `EmptyArray`: a null dataspace """ -dataspace(x::Union{T, Complex{T}}) where {T<:ScalarType} = Dataspace(API.h5s_create(API.H5S_SCALAR)) +dataspace(x::Union{T,Complex{T}}) where {T<:ScalarType} = + Dataspace(API.h5s_create(API.H5S_SCALAR)) dataspace(::AbstractString) = Dataspace(API.h5s_create(API.H5S_SCALAR)) -function _dataspace(sz::Dims{N}, max_dims::Union{Dims{N}, Tuple{}}=()) where N +function _dataspace(sz::Dims{N}, max_dims::Union{Dims{N},Tuple{}}=()) where {N} dims = API.hsize_t[sz[i] for i in N:-1:1] if isempty(max_dims) maxd = dims @@ -61,7 +61,8 @@ function _dataspace(sz::Dims{N}, max_dims::Union{Dims{N}, Tuple{}}=()) where N end return Dataspace(API.h5s_create_simple(length(dims), dims, maxd)) end -dataspace(A::AbstractArray{T,N}; max_dims::Union{Dims{N},Tuple{}} = ()) where {T,N} = _dataspace(size(A), max_dims) +dataspace(A::AbstractArray{T,N}; max_dims::Union{Dims{N},Tuple{}}=()) where {T,N} = + _dataspace(size(A), max_dims) # special array types dataspace(v::VLen; max_dims::Union{Dims,Tuple{}}=()) = _dataspace(size(v.data), max_dims) dataspace(A::EmptyArray) = Dataspace(API.h5s_create(API.H5S_NULL)) @@ -75,9 +76,10 @@ Construct a simple `Dataspace` for the given dimensions `dims`. The maximum dimensions `maxdims` specifies the maximum possible size: `-1` can be used to indicate unlimited dimensions. """ -dataspace(sz::Dims{N}; max_dims::Union{Dims{N},Tuple{}}=()) where {N} = _dataspace(sz, max_dims) -dataspace(sz1::Int, sz2::Int, sz3::Int...; max_dims::Union{Dims,Tuple{}}=()) = _dataspace(tuple(sz1, sz2, sz3...), max_dims) - +dataspace(sz::Dims{N}; max_dims::Union{Dims{N},Tuple{}}=()) where {N} = + _dataspace(sz, max_dims) +dataspace(sz1::Int, sz2::Int, sz3::Int...; max_dims::Union{Dims,Tuple{}}=()) = + _dataspace(tuple(sz1, sz2, sz3...), max_dims) function Base.ndims(dspace::Dataspace) API.h5s_get_simple_extent_ndims(checkvalid(dspace)) @@ -85,7 +87,7 @@ end function Base.size(dspace::Dataspace) h5_dims = API.h5s_get_simple_extent_dims(checkvalid(dspace), nothing) N = length(h5_dims) - return ntuple(i -> @inbounds(Int(h5_dims[N-i+1])), N) + return ntuple(i -> @inbounds(Int(h5_dims[N - i + 1])), N) end function Base.size(dspace::Dataspace, d::Integer) d > 0 || throw(ArgumentError("invalid dimension d; must be positive integer")) @@ -101,7 +103,6 @@ function Base.length(dspace::Dataspace) end Base.isempty(dspace::Dataspace) = length(dspace) == 0 - """ isnull(dspace::Union{HDF5.Dataspace, HDF5.Dataset, HDF5.Attribute}) @@ -120,11 +121,10 @@ function isnull(dspace::Dataspace) return API.h5s_get_simple_extent_type(checkvalid(dspace)) == API.H5S_NULL end - function get_regular_hyperslab(dspace::Dataspace) start, stride, count, block = API.h5s_get_regular_hyperslab(dspace) N = length(start) - @inline rev(v) = ntuple(i -> @inbounds(Int(v[N-i+1])), N) + @inline rev(v) = ntuple(i -> @inbounds(Int(v[N - i + 1])), N) return rev(start), rev(stride), rev(count), rev(block) end @@ -132,26 +132,29 @@ function hyperslab(dspace::Dataspace, I::Union{AbstractRange{Int},Int}...) dims = size(dspace) n_dims = length(dims) if length(I) != n_dims - error("Wrong number of indices supplied, supplied length $(length(I)) but expected $(n_dims).") + error( + "Wrong number of indices supplied, supplied length $(length(I)) but expected $(n_dims)." + ) end - dsel_id = API.h5s_copy(dspace) - dsel_start = Vector{API.hsize_t}(undef,n_dims) - dsel_stride = Vector{API.hsize_t}(undef,n_dims) - dsel_count = Vector{API.hsize_t}(undef,n_dims) - for k = 1:n_dims - index = I[n_dims-k+1] + dsel_id = API.h5s_copy(dspace) + dsel_start = Vector{API.hsize_t}(undef, n_dims) + dsel_stride = Vector{API.hsize_t}(undef, n_dims) + dsel_count = Vector{API.hsize_t}(undef, n_dims) + for k in 1:n_dims + index = I[n_dims - k + 1] if isa(index, Integer) - dsel_start[k] = index-1 + dsel_start[k] = index - 1 dsel_stride[k] = 1 dsel_count[k] = 1 elseif isa(index, AbstractRange) - dsel_start[k] = first(index)-1 + dsel_start[k] = first(index) - 1 dsel_stride[k] = step(index) dsel_count[k] = length(index) else error("index must be range or integer") end - if dsel_start[k] < 0 || dsel_start[k]+(dsel_count[k]-1)*dsel_stride[k] >= dims[n_dims-k+1] + if dsel_start[k] < 0 || + dsel_start[k] + (dsel_count[k] - 1) * dsel_stride[k] >= dims[n_dims - k + 1] println(dsel_start) println(dsel_stride) println(dsel_count) @@ -159,7 +162,9 @@ function hyperslab(dspace::Dataspace, I::Union{AbstractRange{Int},Int}...) error("index out of range") end end - API.h5s_select_hyperslab(dsel_id, API.H5S_SELECT_SET, dsel_start, dsel_stride, dsel_count, C_NULL) + API.h5s_select_hyperslab( + dsel_id, API.H5S_SELECT_SET, dsel_start, dsel_stride, dsel_count, C_NULL + ) return Dataspace(dsel_id) end diff --git a/src/datatypes.jl b/src/datatypes.jl index d7cb85bf7..e027256bc 100644 --- a/src/datatypes.jl +++ b/src/datatypes.jl @@ -11,22 +11,27 @@ end # The datatype of a Datatype is the Datatype datatype(dt::Datatype) = dt - -open_datatype(parent::Union{File,Group}, name::AbstractString, tapl::DatatypeAccessProperties=DatatypeAccessProperties()) = - Datatype(API.h5t_open(checkvalid(parent), name, tapl), file(parent)) +open_datatype( + parent::Union{File,Group}, + name::AbstractString, + tapl::DatatypeAccessProperties=DatatypeAccessProperties() +) = Datatype(API.h5t_open(checkvalid(parent), name, tapl), file(parent)) # Note that H5Tcreate is very different; H5Tcommit is the analog of these others create_datatype(class_id, sz) = Datatype(API.h5t_create(class_id, sz)) -function commit_datatype(parent::Union{File,Group}, path::AbstractString, dtype::Datatype, - lcpl::LinkCreateProperties=LinkCreateProperties(), - tcpl::DatatypeCreateProperties=DatatypeCreateProperties(), - tapl::DatatypeAccessProperties=DatatypeAccessProperties()) +function commit_datatype( + parent::Union{File,Group}, + path::AbstractString, + dtype::Datatype, + lcpl::LinkCreateProperties=LinkCreateProperties(), + tcpl::DatatypeCreateProperties=DatatypeCreateProperties(), + tapl::DatatypeAccessProperties=DatatypeAccessProperties() +) lcpl.char_encoding = cset(typeof(path)) API.h5t_commit(checkvalid(parent), path, dtype, lcpl, tcpl, tapl) dtype.file = file(parent) return dtype end - -Base.sizeof(dtype::Datatype) = Int(API.h5t_get_size(dtype)) \ No newline at end of file +Base.sizeof(dtype::Datatype) = Int(API.h5t_get_size(dtype)) diff --git a/src/deprecated.jl b/src/deprecated.jl index 8cf19f5a2..59879582e 100644 --- a/src/deprecated.jl +++ b/src/deprecated.jl @@ -41,8 +41,11 @@ function create_property(class; kwargs...) class == HDF5.API.H5P_LINK_CREATE ? (:H5P_LINK_CREATE, LinkCreateProperties) : class == HDF5.API.H5P_LINK_ACCESS ? (:H5P_LINK_ACCESS, LinkAccessProperties) : error("invalid class") - Base.depwarn("`create_property(HDF5.$oldname; kwargs...)` has been deprecated, use `$newtype(;kwargs...)` instead.", :create_property) - init!(newtype(;kwargs...)) + Base.depwarn( + "`create_property(HDF5.$oldname; kwargs...)` has been deprecated, use `$newtype(;kwargs...)` instead.", + :create_property + ) + init!(newtype(; kwargs...)) end @deprecate set_chunk(p::Properties, dims...) set_chunk!(p, dims) false @@ -65,8 +68,12 @@ end ### Changed in PR #902 import Base: append!, push! import .Filters: ExternalFilter -@deprecate append!(filters::Filters.FilterPipeline, extra::NTuple{N, Integer}) where N append!(filters, [ExternalFilter(extra...)]) -@deprecate push!(p::Filters.FilterPipeline, f::NTuple{N, Integer}) where N push!(p, ExternalFilter(f...)) +@deprecate append!(filters::Filters.FilterPipeline, extra::NTuple{N,Integer}) where {N} append!( + filters, [ExternalFilter(extra...)] +) +@deprecate push!(p::Filters.FilterPipeline, f::NTuple{N,Integer}) where {N} push!( + p, ExternalFilter(f...) +) @deprecate ExternalFilter(t::Tuple) ExternalFilter(t...) false ### Changed in PR #979 @@ -74,4 +81,6 @@ import .Filters: ExternalFilter @deprecate object_info(obj::Union{File,Object}) API.h5o_get_info1(checkvalid(obj)) ### Changed in PR #994 -@deprecate set_track_order(p::Properties, val::Bool) set_track_order!(p::Properties, val::Bool) false +@deprecate set_track_order(p::Properties, val::Bool) set_track_order!( + p::Properties, val::Bool +) false diff --git a/src/drivers/drivers.jl b/src/drivers/drivers.jl index 927983a2d..43bb11483 100644 --- a/src/drivers/drivers.jl +++ b/src/drivers/drivers.jl @@ -18,7 +18,7 @@ end abstract type Driver end -const DRIVERS = Dict{API.hid_t, Type{<: Driver}}() +const DRIVERS = Dict{API.hid_t,Type{<:Driver}}() """ Core([increment::Csize_t, backing_store::Cuint, [write_tracking::Cuint, page_size::Csize_t]]) @@ -38,12 +38,8 @@ struct Core <: Driver page_size::Csize_t end Core(increment, backing_store) = Core(increment, backing_store, false, 524288) -Core(; - increment = 8192, - backing_store = true, - write_tracking = false, - page_size = 524288 -) = Core(increment, backing_store, write_tracking, page_size) +Core(; increment=8192, backing_store=true, write_tracking=false, page_size=524288) = + Core(increment, backing_store, write_tracking, page_size) function get_driver(p::Properties, ::Type{Core}) r_increment = Ref{Csize_t}(0) @@ -52,12 +48,7 @@ function get_driver(p::Properties, ::Type{Core}) r_page_size = Ref{Csize_t}(0) API.h5p_get_fapl_core(p, r_increment, r_backing_store) API.h5p_get_core_write_tracking(p, r_write_tracking, r_page_size) - return Core( - r_increment[], - r_backing_store[], - r_write_tracking[], - r_page_size[] - ) + return Core(r_increment[], r_backing_store[], r_write_tracking[], r_page_size[]) end function set_driver!(fapl::Properties, driver::Core) @@ -75,8 +66,7 @@ Also referred to as SEC2, this driver uses POSIX file-system functions like read write to perform I/O to a single, permanent file on local disk with no system buffering. This driver is POSIX-compliant and is the default file driver for all systems. """ -struct POSIX <: Driver -end +struct POSIX <: Driver end function get_driver(fapl::Properties, ::Type{POSIX}) POSIX() @@ -98,7 +88,9 @@ function __init__() # Check whether the libhdf5 was compiled with parallel support. HDF5.HAS_PARALLEL[] = API._has_symbol(:H5Pset_fapl_mpio) - @require MPI="da04e1cc-30fd-572f-bb4f-1f8673147195" (HDF5.has_parallel() && include("mpio.jl")) + @require MPI = "da04e1cc-30fd-572f-bb4f-1f8673147195" ( + HDF5.has_parallel() && include("mpio.jl") + ) end end # module diff --git a/src/drivers/mpio.jl b/src/drivers/mpio.jl index 6231c1b53..28bdfdc62 100644 --- a/src/drivers/mpio.jl +++ b/src/drivers/mpio.jl @@ -2,10 +2,10 @@ using .MPI import Libdl # Low-level MPI handles. -const MPIHandle = Union{MPI.MPI_Comm, MPI.MPI_Info} +const MPIHandle = Union{MPI.MPI_Comm,MPI.MPI_Info} # MPI.jl wrapper types. -const MPIHandleWrapper = Union{MPI.Comm, MPI.Info} +const MPIHandleWrapper = Union{MPI.Comm,MPI.Info} const H5MPIHandle = let csize = sizeof(MPI.MPI_Comm) @assert csize in (4, 8) @@ -18,7 +18,6 @@ h5_to_mpi_info(handle::H5MPIHandle) = reinterpret(MPI.MPI_Info, handle) mpi_to_h5(handle::MPIHandle) = reinterpret(H5MPIHandle, handle) mpi_to_h5(mpiobj::MPIHandleWrapper) = mpi_to_h5(mpiobj.val) - """ MPIO(comm::MPI.Comm, info::MPI.Info) MPIO(comm::MPI.Comm; kwargs....) @@ -39,8 +38,7 @@ struct MPIO <: Driver comm::MPI.Comm info::MPI.Info end - MPIO(comm::MPI.Comm; kwargs...) = - MPIO(comm, MPI.Info(;kwargs...)) +MPIO(comm::MPI.Comm; kwargs...) = MPIO(comm, MPI.Info(; kwargs...)) function set_driver!(fapl::Properties, mpio::MPIO) HDF5.has_parallel() || error( @@ -78,9 +76,12 @@ See the [HDF5 docs](https://portal.hdfgroup.org/display/HDF5/H5P_SET_FAPL_MPIO) for details on the `comm` and `info` arguments. """ function HDF5.h5open( - filename::AbstractString, mode::AbstractString, - comm::MPI.Comm, info::MPI.Info = MPI.Info(); pv... - ) + filename::AbstractString, + mode::AbstractString, + comm::MPI.Comm, + info::MPI.Info=MPI.Info(); + pv... +) HDF5.h5open(filename, mode; driver=MPIO(comm, info), pv...) end diff --git a/src/file.jl b/src/file.jl index 2002b02ba..a2d071175 100644 --- a/src/file.jl +++ b/src/file.jl @@ -17,12 +17,18 @@ lists passed in via keyword will be closed. This is useful to set properties not Note that `h5open` uses `fclose_degree = :strong` by default, but this can be overriden by the `fapl` keyword. """ -function h5open(filename::AbstractString, mode::AbstractString, fapl::FileAccessProperties, fcpl::FileCreateProperties=FileCreateProperties(); swmr::Bool = false) +function h5open( + filename::AbstractString, + mode::AbstractString, + fapl::FileAccessProperties, + fcpl::FileCreateProperties=FileCreateProperties(); + swmr::Bool=false +) rd, wr, cr, tr, ff = - mode == "r" ? (true, false, false, false, false) : - mode == "r+" ? (true, true, false, false, true ) : - mode == "cw" ? (false, true, true, false, true ) : - mode == "w" ? (false, true, true, true, false) : + mode == "r" ? (true, false, false, false, false) : + mode == "r+" ? (true, true, false, false, true) : + mode == "cw" ? (false, true, true, false, true) : + mode == "w" ? (false, true, true, true, false) : # mode == "w+" ? (true, true, true, true, false) : # mode == "a" ? (true, true, true, true, true ) : error("invalid open mode: ", mode) @@ -31,25 +37,28 @@ function h5open(filename::AbstractString, mode::AbstractString, fapl::FileAccess end if cr && (tr || !isfile(filename)) - flag = swmr ? API.H5F_ACC_TRUNC|API.H5F_ACC_SWMR_WRITE : API.H5F_ACC_TRUNC + flag = swmr ? API.H5F_ACC_TRUNC | API.H5F_ACC_SWMR_WRITE : API.H5F_ACC_TRUNC fid = API.h5f_create(filename, flag, fcpl, fapl) else - ishdf5(filename) || error("unable to determine if $filename is accessible in the HDF5 format (file may not exist)") + ishdf5(filename) || error( + "unable to determine if $filename is accessible in the HDF5 format (file may not exist)" + ) if wr - flag = swmr ? API.H5F_ACC_RDWR|API.H5F_ACC_SWMR_WRITE : API.H5F_ACC_RDWR + flag = swmr ? API.H5F_ACC_RDWR | API.H5F_ACC_SWMR_WRITE : API.H5F_ACC_RDWR else - flag = swmr ? API.H5F_ACC_RDONLY|API.H5F_ACC_SWMR_READ : API.H5F_ACC_RDONLY + flag = swmr ? API.H5F_ACC_RDONLY | API.H5F_ACC_SWMR_READ : API.H5F_ACC_RDONLY end fid = API.h5f_open(filename, flag, fapl) end return File(fid, filename) end - -function h5open(filename::AbstractString, mode::AbstractString = "r"; - swmr::Bool = false, +function h5open( + filename::AbstractString, + mode::AbstractString="r"; + swmr::Bool=false, # With garbage collection, the other modes don't make sense - fapl = FileAccessProperties(; fclose_degree = :strong), + fapl = FileAccessProperties(; fclose_degree=:strong), fcpl = FileCreateProperties(), pv... ) @@ -75,7 +84,7 @@ For example with a `do` block: end """ -function h5open(f::Function, args...; context = copy(CONTEXT), pv...) +function h5open(f::Function, args...; context=copy(CONTEXT), pv...) file = h5open(args...; pv...) task_local_storage(:hdf5_context, context) do if (track_order = get(pv, :track_order, nothing)) !== nothing @@ -91,26 +100,25 @@ function h5open(f::Function, args...; context = copy(CONTEXT), pv...) end function h5rewrite(f::Function, filename::AbstractString, args...) - tmppath,tmpio = mktemp(dirname(filename)) - close(tmpio) - - try - val = h5open(f, tmppath, "w", args...) - Base.Filesystem.rename(tmppath, filename) - return val - catch - Base.Filesystem.unlink(tmppath) - rethrow() - end -end + tmppath, tmpio = mktemp(dirname(filename)) + close(tmpio) + try + val = h5open(f, tmppath, "w", args...) + Base.Filesystem.rename(tmppath, filename) + return val + catch + Base.Filesystem.unlink(tmppath) + rethrow() + end +end function Base.close(obj::File) - if obj.id != -1 - API.h5f_close(obj) - obj.id = -1 - end - nothing + if obj.id != -1 + API.h5f_close(obj) + obj.id = -1 + end + nothing end """ @@ -120,7 +128,6 @@ Returns `true` if `obj` has not been closed, `false` if it has been closed. """ Base.isopen(obj::File) = obj.id != -1 - """ ishdf5(name::AbstractString) @@ -142,7 +149,8 @@ file(f::File) = f file(o::Union{Object,Attribute}) = o.file fd(obj::Object) = API.h5i_get_file_id(checkvalid(obj)) -filename(obj::Union{File,Group,Dataset,Attribute,Datatype}) = API.h5f_get_name(checkvalid(obj)) +filename(obj::Union{File,Group,Dataset,Attribute,Datatype}) = + API.h5f_get_name(checkvalid(obj)) """ start_swmr_write(h5::HDF5.File) @@ -153,5 +161,5 @@ See [SWMR documentation](https://portal.hdfgroup.org/display/HDF5/Single+Writer+ start_swmr_write(h5::File) = API.h5f_start_swmr_write(h5) # Flush buffers -Base.flush(f::Union{Object,Attribute,Datatype,File}, scope = API.H5F_SCOPE_GLOBAL) = API.h5f_flush(checkvalid(f), scope) - +Base.flush(f::Union{Object,Attribute,Datatype,File}, scope=API.H5F_SCOPE_GLOBAL) = + API.h5f_flush(checkvalid(f), scope) diff --git a/src/fileio.jl b/src/fileio.jl index c5069c1c8..554226212 100644 --- a/src/fileio.jl +++ b/src/fileio.jl @@ -18,21 +18,32 @@ _infer_track_order(track_order::Union{Nothing,Bool}, dict::AbstractDict) = # load with just a filename returns a flat dictionary containing all the variables function fileio_load( f::FileIO.File{FileIO.format"HDF5"}; - dict=Dict{String,Any}(), track_order::Union{Nothing,Bool}=nothing, kwargs... + dict=Dict{String,Any}(), + track_order::Union{Nothing,Bool}=nothing, + kwargs... ) - h5open(FileIO.filename(f), "r"; track_order=_infer_track_order(track_order, dict), kwargs...) do file + h5open( + FileIO.filename(f), + "r"; + track_order=_infer_track_order(track_order, dict), + kwargs... + ) do file loadtodict!(dict, file) end end # when called with explicitly requested variable names, return each one -function fileio_load(f::FileIO.File{FileIO.format"HDF5"}, varname::AbstractString; kwargs...) +function fileio_load( + f::FileIO.File{FileIO.format"HDF5"}, varname::AbstractString; kwargs... +) h5open(FileIO.filename(f), "r"; kwargs...) do file read(file, varname) end end -function fileio_load(f::FileIO.File{FileIO.format"HDF5"}, varnames::AbstractString...; kwargs...) +function fileio_load( + f::FileIO.File{FileIO.format"HDF5"}, varnames::AbstractString...; kwargs... +) h5open(FileIO.filename(f), "r"; kwargs...) do file map(var -> read(file, var), varnames) end @@ -40,12 +51,21 @@ end # save all the key-value pairs in the dict as top-level variables function fileio_save( - f::FileIO.File{FileIO.format"HDF5"}, dict::AbstractDict; - track_order::Union{Nothing,Bool}=nothing, kwargs... + f::FileIO.File{FileIO.format"HDF5"}, + dict::AbstractDict; + track_order::Union{Nothing,Bool}=nothing, + kwargs... ) - h5open(FileIO.filename(f), "w"; track_order=_infer_track_order(track_order, dict), kwargs...) do file + h5open( + FileIO.filename(f), + "w"; + track_order=_infer_track_order(track_order, dict), + kwargs... + ) do file for (k, v) in dict - isa(k, AbstractString) || throw(ArgumentError("keys must be strings (the names of variables), got $k")) + isa(k, AbstractString) || throw( + ArgumentError("keys must be strings (the names of variables), got $k") + ) write(file, String(k), v) end end diff --git a/src/filters/builtin.jl b/src/filters/builtin.jl index 2c68f0897..49eec076f 100644 --- a/src/filters/builtin.jl +++ b/src/filters/builtin.jl @@ -12,7 +12,7 @@ highest compression (but slowest speed). struct Deflate <: Filter level::Cuint end -Deflate(;level=5) = Deflate(level) +Deflate(; level=5) = Deflate(level) Base.show(io::IO, deflate::Deflate) = print(io, Deflate, "(level=", Int(deflate.level), ")") filterid(::Type{Deflate}) = API.H5Z_FILTER_DEFLATE @@ -44,8 +44,7 @@ compression filter without the shuffle filter. # External links - $(h5doc("H5P_SET_SHUFFLE")) """ -struct Shuffle <: Filter -end +struct Shuffle <: Filter end filterid(::Type{Shuffle}) = API.H5Z_FILTER_SHUFFLE FILTERS[API.H5Z_FILTER_SHUFFLE] = Shuffle @@ -65,8 +64,7 @@ This should be applied _after_ any lossy filters have been applied. - $(h5doc("H5P_SET_FLETCHER32")) - [_Fletcher's checksum_ on Wikipedia](https://en.wikipedia.org/wiki/Fletcher's_checksum) """ -struct Fletcher32 <: Filter -end +struct Fletcher32 <: Filter end filterid(::Type{Fletcher32}) = API.H5Z_FILTER_FLETCHER32 FILTERS[API.H5Z_FILTER_FLETCHER32] = Fletcher32 function Base.push!(f::FilterPipeline, ::Fletcher32) @@ -92,7 +90,7 @@ struct Szip <: Filter options_mask::Cuint pixels_per_block::Cuint end -function Szip(;coding=:nn, pixels_per_block=8) +function Szip(; coding=:nn, pixels_per_block=8) options_mask = Cuint(0) if coding == :ec options_mask |= API.H5_SZIP_EC_OPTION_MASK @@ -128,8 +126,7 @@ The N-Bit filter. # External links - $(h5doc("H5P_SET_NBIT")) """ -struct NBit <: Filter -end +struct NBit <: Filter end filterid(::Type{NBit}) = API.H5Z_FILTER_NBIT FILTERS[API.H5Z_FILTER_NBIT] = NBit function Base.push!(f::FilterPipeline, ::NBit) @@ -156,4 +153,3 @@ function Base.push!(f::FilterPipeline, scaleoffset::ScaleOffset) API.h5p_set_scaleoffset(f.plist, scaleoffset.scale_type, scaleoffset.scale_factor) return f end - diff --git a/src/filters/filters.jl b/src/filters/filters.jl index c8af19d97..7f9d7b951 100644 --- a/src/filters/filters.jl +++ b/src/filters/filters.jl @@ -50,7 +50,6 @@ export Deflate, Shuffle, Fletcher32, Szip, NBit, ScaleOffset, ExternalFilter import ..HDF5: Properties, h5doc, API - """ Filter @@ -91,7 +90,7 @@ abstract type Filter end Maps filter id to filter type. """ -const FILTERS = Dict{API.H5Z_filter_t, Type{<: Filter}}() +const FILTERS = Dict{API.H5Z_filter_t,Type{<:Filter}}() """ filterid(F) where {F <: Filter} @@ -153,7 +152,7 @@ function can_apply_cfunc(::Type{F}) where {F<:Filter} if func === nothing return C_NULL else - return @cfunction($func, API.herr_t, (API.hid_t,API.hid_t,API.hid_t)) + return @cfunction($func, API.herr_t, (API.hid_t, API.hid_t, API.hid_t)) end end @@ -181,11 +180,10 @@ function set_local_cfunc(::Type{F}) where {F<:Filter} if func === nothing return C_NULL else - return @cfunction($func, API.herr_t, (API.hid_t,API.hid_t,API.hid_t)) + return @cfunction($func, API.herr_t, (API.hid_t, API.hid_t, API.hid_t)) end end - """ filter_func(::Type{F}) where {F<:Filter} @@ -210,9 +208,9 @@ function filter_cfunc(::Type{F}) where {F<:Filter} if func === nothing error("Filter function for $F must be defined via `filter_func`.") end - c_filter_func = @cfunction($func, Csize_t, - (Cuint, Csize_t, Ptr{Cuint}, Csize_t, - Ptr{Csize_t}, Ptr{Ptr{Cvoid}})) + c_filter_func = @cfunction( + $func, Csize_t, (Cuint, Csize_t, Ptr{Cuint}, Csize_t, Ptr{Csize_t}, Ptr{Ptr{Cvoid}}) + ) return c_filter_func end @@ -223,7 +221,7 @@ end Register the filter with the HDF5 library via [`API.h5z_register`](@ref). Also add F to the FILTERS dictionary. """ -function register_filter(::Type{F}) where F <: Filter +function register_filter(::Type{F}) where {F<:Filter} id = filterid(F) encoder = encoder_present(F) decoder = decoder_present(F) @@ -232,16 +230,18 @@ function register_filter(::Type{F}) where F <: Filter set_local = set_local_cfunc(F) func = filter_cfunc(F) GC.@preserve name begin - API.h5z_register(API.H5Z_class_t( - API.H5Z_CLASS_T_VERS, - id, - encoder, - decoder, - pointer(name), - can_apply, - set_local, - func - )) + API.h5z_register( + API.H5Z_class_t( + API.H5Z_CLASS_T_VERS, + id, + encoder, + decoder, + pointer(name), + can_apply, + set_local, + func + ) + ) end FILTERS[id] = F return nothing @@ -288,7 +288,7 @@ end function ExternalFilter(filter_id, flags, data::Integer...) ExternalFilter(filter_id, flags, Cuint[data...]) end -function ExternalFilter(filter_id, data::AbstractVector{<:Integer} = Cuint[]) +function ExternalFilter(filter_id, data::AbstractVector{<:Integer}=Cuint[]) ExternalFilter(filter_id, API.H5Z_FLAG_MANDATORY, data) end filterid(filter::ExternalFilter) = filter.filter_id @@ -328,22 +328,26 @@ end Base.size(f::FilterPipeline) = (length(f),) function Base.getindex(f::FilterPipeline, i::Integer) - id = API.h5p_get_filter(f.plist, i-1, C_NULL, C_NULL, C_NULL, 0, C_NULL, C_NULL) + id = API.h5p_get_filter(f.plist, i - 1, C_NULL, C_NULL, C_NULL, 0, C_NULL, C_NULL) F = get(FILTERS, id, ExternalFilter) return getindex(f, F, i) end -function Base.getindex(f::FilterPipeline, ::Type{ExternalFilter}, i::Integer, cd_values::Vector{Cuint} = Cuint[]) +function Base.getindex( + f::FilterPipeline, ::Type{ExternalFilter}, i::Integer, cd_values::Vector{Cuint}=Cuint[] +) flags = Ref{Cuint}() cd_nelmts = Ref{Csize_t}(length(cd_values)) namebuf = Array{UInt8}(undef, 256) config = Ref{Cuint}() - id = API.h5p_get_filter(f.plist, i-1, flags, cd_nelmts, cd_values, length(namebuf), namebuf, config) + id = API.h5p_get_filter( + f.plist, i - 1, flags, cd_nelmts, cd_values, length(namebuf), namebuf, config + ) if cd_nelmts[] > length(cd_values) resize!(cd_values, cd_nelmts[]) return getindex(f, ExternalFilter, i, cd_values) end - resize!(namebuf, findfirst(isequal(0), namebuf)-1) + resize!(namebuf, findfirst(isequal(0), namebuf) - 1) resize!(cd_values, cd_nelmts[]) return ExternalFilter(id, flags[], cd_values, String(namebuf), config[]) end @@ -352,7 +356,16 @@ function Base.getindex(f::FilterPipeline, ::Type{F}, i::Integer) where {F<:Filte @assert isbitstype(F) ref = Ref{F}() GC.@preserve ref begin - id = API.h5p_get_filter(f.plist, i-1, C_NULL, div(sizeof(F), sizeof(Cuint)), pointer_from_objref(ref), 0, C_NULL, C_NULL) + id = API.h5p_get_filter( + f.plist, + i - 1, + C_NULL, + div(sizeof(F), sizeof(Cuint)), + pointer_from_objref(ref), + 0, + C_NULL, + C_NULL + ) end @assert id == filterid(F) return ref[] @@ -361,12 +374,20 @@ function Base.getindex(f::FilterPipeline, ::Type{F}) where {F<:Filter} @assert isbitstype(F) ref = Ref{F}() GC.@preserve ref begin - API.h5p_get_filter_by_id(f.plist, filterid(F), C_NULL, div(sizeof(F), sizeof(Cuint)), pointer_from_objref(ref), 0, C_NULL, C_NULL) + API.h5p_get_filter_by_id( + f.plist, + filterid(F), + C_NULL, + div(sizeof(F), sizeof(Cuint)), + pointer_from_objref(ref), + 0, + C_NULL, + C_NULL + ) end return ref[] end - function Base.empty!(filters::FilterPipeline) API.h5p_remove_filter(filters.plist, API.H5Z_FILTER_ALL) return filters @@ -375,16 +396,24 @@ function Base.delete!(filters::FilterPipeline, ::Type{F}) where {F<:Filter} API.h5p_remove_filter(filters.plist, filterid(F)) return filters end -function Base.append!(filters::FilterPipeline, extra::Union{AbstractVector{<:Filter}, NTuple{N, Filter} where N}) +function Base.append!( + filters::FilterPipeline, extra::Union{AbstractVector{<:Filter},NTuple{N,Filter} where N} +) for filter in extra push!(filters, filter) end return filters end -function Base.push!(p::FilterPipeline, f::F) where F <: Filter +function Base.push!(p::FilterPipeline, f::F) where {F<:Filter} ref = Ref(f) GC.@preserve ref begin - API.h5p_set_filter(p.plist, filterid(F), API.H5Z_FLAG_OPTIONAL, div(sizeof(F), sizeof(Cuint)), pointer_from_objref(ref)) + API.h5p_set_filter( + p.plist, + filterid(F), + API.H5Z_FLAG_OPTIONAL, + div(sizeof(F), sizeof(Cuint)), + pointer_from_objref(ref) + ) end return p end @@ -396,7 +425,7 @@ function Base.push!(p::FilterPipeline, f::ExternalFilter) end # Convert a Filter to an Integer subtype using filterid -function Base.convert(::Type{I}, ::Type{F}) where {I <: Integer, F <: Filter} +function Base.convert(::Type{I}, ::Type{F}) where {I<:Integer,F<:Filter} Base.convert(I, filterid(F)) end diff --git a/src/groups.jl b/src/groups.jl index b2f96b9b7..db6e9d32c 100644 --- a/src/groups.jl +++ b/src/groups.jl @@ -19,18 +19,25 @@ There are many keyword properties that can be set. Below are a few select keywor See also * [`H5P`](@ref H5P) """ -function create_group(parent::Union{File,Group}, path::AbstractString, - lcpl::LinkCreateProperties=_link_properties(path), - gcpl::GroupCreateProperties=GroupCreateProperties(); - pv...) - haskey(parent, path) && error("cannot create group: object \"", path, "\" already exists at ", name(parent)) +function create_group( + parent::Union{File,Group}, + path::AbstractString, + lcpl::LinkCreateProperties=_link_properties(path), + gcpl::GroupCreateProperties=GroupCreateProperties(); + pv... +) + haskey(parent, path) && + error("cannot create group: object \"", path, "\" already exists at ", name(parent)) pv = setproperties!(gcpl; pv...) isempty(pv) || error("invalid keyword options $pv") Group(API.h5g_create(parent, path, lcpl, gcpl, API.H5P_DEFAULT), file(parent)) end -open_group(parent::Union{File,Group}, name::AbstractString, gapl::GroupAccessProperties=GroupAccessProperties()) = - Group(API.h5g_open(checkvalid(parent), name, gapl), file(parent)) +open_group( + parent::Union{File,Group}, + name::AbstractString, + gapl::GroupAccessProperties=GroupAccessProperties() +) = Group(API.h5g_open(checkvalid(parent), name, gapl), file(parent)) # Get the root group root(h5file::File) = open_group(h5file, "/") @@ -44,12 +51,17 @@ Base.isempty(x::Union{Group,File}) = length(x) == 0 name(obj::Union{File,Group,Dataset,Datatype}) = API.h5i_get_name(checkvalid(obj)) # iteration by objects -function Base.iterate(parent::Union{File,Group}, iter = (1,nothing)) +function Base.iterate(parent::Union{File,Group}, iter=(1, nothing)) n, prev_obj = iter prev_obj ≢ nothing && close(prev_obj) n > length(parent) && return nothing - obj = h5object(API.h5o_open_by_idx(checkvalid(parent), ".", idx_type(parent), order(parent), n-1, API.H5P_DEFAULT), parent) - return (obj, (n+1,obj)) + obj = h5object( + API.h5o_open_by_idx( + checkvalid(parent), ".", idx_type(parent), order(parent), n - 1, API.H5P_DEFAULT + ), + parent + ) + return (obj, (n + 1, obj)) end function Base.parent(obj::Union{File,Group,Dataset}) @@ -78,7 +90,11 @@ function split1(path::AbstractString) end end -function Base.haskey(parent::Union{File,Group}, path::AbstractString, lapl::LinkAccessProperties = LinkAccessProperties()) +function Base.haskey( + parent::Union{File,Group}, + path::AbstractString, + lapl::LinkAccessProperties=LinkAccessProperties() +) # recursively check each step of the path exists # see https://portal.hdfgroup.org/display/HDF5/H5L_EXISTS checkvalid(parent) @@ -106,16 +122,29 @@ function Base.keys(x::Union{Group,File}) return children end - -delete_object(parent::Union{File,Group}, path::AbstractString, lapl::LinkAccessProperties=LinkAccessProperties()) = - API.h5l_delete(checkvalid(parent), path, lapl) -delete_object(obj::Object) = delete_object(parent(obj), ascii(split(name(obj),"/")[end])) # FIXME: remove ascii? +delete_object( + parent::Union{File,Group}, + path::AbstractString, + lapl::LinkAccessProperties=LinkAccessProperties() +) = API.h5l_delete(checkvalid(parent), path, lapl) +delete_object(obj::Object) = delete_object(parent(obj), ascii(split(name(obj), "/")[end])) # FIXME: remove ascii? # Move links -move_link(src::Union{File,Group}, src_name::AbstractString, dest::Union{File,Group}, dest_name::AbstractString=src_name, lapl::LinkAccessProperties = LinkAccessProperties(), lcpl::LinkCreateProperties = LinkCreateProperties()) = - API.h5l_move(checkvalid(src), src_name, checkvalid(dest), dest_name, lcpl, lapl) -move_link(parent::Union{File,Group}, src_name::AbstractString, dest_name::AbstractString, lapl::LinkAccessProperties = LinkAccessProperties(), lcpl::LinkCreateProperties = LinkCreateProperties()) = - API.h5l_move(checkvalid(parent), src_name, parent, dest_name, lcpl, lapl) +move_link( + src::Union{File,Group}, + src_name::AbstractString, + dest::Union{File,Group}, + dest_name::AbstractString=src_name, + lapl::LinkAccessProperties=LinkAccessProperties(), + lcpl::LinkCreateProperties=LinkCreateProperties() +) = API.h5l_move(checkvalid(src), src_name, checkvalid(dest), dest_name, lcpl, lapl) +move_link( + parent::Union{File,Group}, + src_name::AbstractString, + dest_name::AbstractString, + lapl::LinkAccessProperties=LinkAccessProperties(), + lcpl::LinkCreateProperties=LinkCreateProperties() +) = API.h5l_move(checkvalid(parent), src_name, parent, dest_name, lcpl, lapl) """ create_external(source::Union{HDF5.File, HDF5.Group}, source_relpath, target_filename, target_path; @@ -124,7 +153,16 @@ move_link(parent::Union{File,Group}, src_name::AbstractString, dest_name::Abstra Create an external link such that `source[source_relpath]` points to `target_path` within the file with path `target_filename`; Calls `[H5Lcreate_external](https://www.hdfgroup.org/HDF5/doc/RM/RM_H5L.html#Link-CreateExternal)`. """ -function create_external(source::Union{File,Group}, source_relpath, target_filename, target_path; lcpl_id=API.H5P_DEFAULT, lapl_id=API.H5P_DEFAULT) - API.h5l_create_external(target_filename, target_path, source, source_relpath, lcpl_id, lapl_id) +function create_external( + source::Union{File,Group}, + source_relpath, + target_filename, + target_path; + lcpl_id=API.H5P_DEFAULT, + lapl_id=API.H5P_DEFAULT +) + API.h5l_create_external( + target_filename, target_path, source, source_relpath, lcpl_id, lapl_id + ) nothing end diff --git a/src/objects.jl b/src/objects.jl index 5ed9855e2..396685830 100644 --- a/src/objects.jl +++ b/src/objects.jl @@ -1,6 +1,7 @@ # Ensure that objects haven't been closed Base.isvalid(obj::Union{File,Datatype,Dataspace}) = obj.id != -1 && API.h5i_is_valid(obj) -Base.isvalid(obj::Union{Group,Dataset,Attribute}) = obj.id != -1 && obj.file.id != -1 && API.h5i_is_valid(obj) +Base.isvalid(obj::Union{Group,Dataset,Attribute}) = + obj.id != -1 && obj.file.id != -1 && API.h5i_is_valid(obj) checkvalid(obj) = isvalid(obj) ? obj : error("File or object has been closed") # Close functions @@ -22,13 +23,19 @@ end # Object (group, named datatype, or dataset) open function h5object(obj_id::API.hid_t, parent) obj_type = API.h5i_get_type(obj_id) - obj_type == API.H5I_GROUP ? Group(obj_id, file(parent)) : - obj_type == API.H5I_DATATYPE ? Datatype(obj_id, file(parent)) : - obj_type == API.H5I_DATASET ? Dataset(obj_id, file(parent)) : - error("Invalid object type for path ", path) + if obj_type == API.H5I_GROUP + Group(obj_id, file(parent)) + elseif obj_type == API.H5I_DATATYPE + Datatype(obj_id, file(parent)) + elseif obj_type == API.H5I_DATASET + Dataset(obj_id, file(parent)) + else + error("Invalid object type for path ", path) + end end -open_object(parent, path::AbstractString) = h5object(API.h5o_open(checkvalid(parent), path, API.H5P_DEFAULT), parent) +open_object(parent, path::AbstractString) = + h5object(API.h5o_open(checkvalid(parent), path, API.H5P_DEFAULT), parent) function gettype(parent, path::AbstractString) obj_id = API.h5o_open(checkvalid(parent), path, API.H5P_DEFAULT) @@ -38,5 +45,25 @@ function gettype(parent, path::AbstractString) end # Copy objects -copy_object(src_parent::Union{File,Group}, src_path::AbstractString, dst_parent::Union{File,Group}, dst_path::AbstractString) = API.h5o_copy(checkvalid(src_parent), src_path, checkvalid(dst_parent), dst_path, API.H5P_DEFAULT, _link_properties(dst_path)) -copy_object(src_obj::Object, dst_parent::Union{File,Group}, dst_path::AbstractString) = API.h5o_copy(checkvalid(src_obj), ".", checkvalid(dst_parent), dst_path, API.H5P_DEFAULT, _link_properties(dst_path)) +copy_object( + src_parent::Union{File,Group}, + src_path::AbstractString, + dst_parent::Union{File,Group}, + dst_path::AbstractString +) = API.h5o_copy( + checkvalid(src_parent), + src_path, + checkvalid(dst_parent), + dst_path, + API.H5P_DEFAULT, + _link_properties(dst_path) +) +copy_object(src_obj::Object, dst_parent::Union{File,Group}, dst_path::AbstractString) = + API.h5o_copy( + checkvalid(src_obj), + ".", + checkvalid(dst_parent), + dst_path, + API.H5P_DEFAULT, + _link_properties(dst_path) + ) diff --git a/src/properties.jl b/src/properties.jl index 2cbc3a425..3cc05ace2 100644 --- a/src/properties.jl +++ b/src/properties.jl @@ -17,7 +17,7 @@ function Base.close(obj::Properties) end Base.isvalid(obj::Properties) = obj.id != -1 && API.h5i_is_valid(obj) -Base.copy(obj::P) where {P <: Properties} = P(HDF5.API.h5p_copy(obj.id)) +Base.copy(obj::P) where {P<:Properties} = P(HDF5.API.h5p_copy(obj.id)) # By default, properties objects are only initialized lazily function init!(prop::P) where {P<:Properties} @@ -27,7 +27,7 @@ function init!(prop::P) where {P<:Properties} return prop end -function (::Type{P})(;kwargs...) where {P <: Properties} +function (::Type{P})(; kwargs...) where {P<:Properties} obj = P(API.H5P_DEFAULT) for (k, v) in kwargs setproperty!(obj, k, v) @@ -35,7 +35,7 @@ function (::Type{P})(;kwargs...) where {P <: Properties} return obj end # Properties() do syntax -function (::Type{P})(func::Function; kwargs...) where {P <: Properties} +function (::Type{P})(func::Function; kwargs...) where {P<:Properties} p = P(; kwargs...) # Eagerly initialize when using do syntax # This allows for use low-level API calls @@ -47,9 +47,8 @@ function (::Type{P})(func::Function; kwargs...) where {P <: Properties} end end -function Base.getproperty(p::P, name::Symbol) where {P <: Properties} - name === :id ? getfield(p, :id) : - class_getproperty(P, init!(p), name) +function Base.getproperty(p::P, name::Symbol) where {P<:Properties} + name === :id ? getfield(p, :id) : class_getproperty(P, init!(p), name) end function Base.setproperty!(p::P, name::Symbol, val) where {P<:Properties} @@ -61,7 +60,8 @@ function Base.setproperty!(p::P, name::Symbol, val) where {P<:Properties} end Base.propertynames(p::P) where {P<:Properties} = (all_propertynames(P)..., :id) -all_propertynames(::Type{P}) where {P<:Properties} = (class_propertynames(P)..., all_propertynames(superclass(P))...,) +all_propertynames(::Type{P}) where {P<:Properties} = + (class_propertynames(P)..., all_propertynames(superclass(P))...,) # defaults: refer to super class class_getproperty(::Type{P}, props, name) where {P<:Properties} = @@ -70,8 +70,6 @@ class_setproperty!(::Type{P}, p, name, val) where {P<:Properties} = class_setproperty!(superclass(P), p, name, val) class_propertynames(::Type{P}) where {P<:Properties} = () - - """ @propertyclass P classid @@ -115,7 +113,6 @@ macro propertyclass(name, classid) return esc(expr) end - @propertyclass GenericProperties API.H5P_DEFAULT superclass(::Type{P}) where {P<:Properties} = GenericProperties @@ -126,7 +123,6 @@ class_setproperty!(::Type{GenericProperties}, props, name, val) = error("$(typeof(props)) has no property $name") all_propertynames(::Type{GenericProperties}) = () - # for initializing multiple Properties from a set of keyword arguments """ setproperties!(props::Properties...; kwargs...) @@ -136,11 +132,11 @@ each `Properties` object in `props`. Returns a `Dict` of any pairs which didn't match properties in `props`. """ function setproperties!(props::Properties...; kwargs...) - filter(kwargs) do (k,v) + filter(kwargs) do (k, v) found = false for prop in props if k in all_propertynames(typeof(prop)) - setproperty!(prop,k,v) + setproperty!(prop, k, v) found = true end end @@ -148,8 +144,6 @@ function setproperties!(props::Properties...; kwargs...) end end - - ### ### Convenience macros for defining getter/setter functions ### @@ -158,10 +152,10 @@ end @tuple_property(name) """ macro tuple_property(property) - get_property = Symbol(:get_,property) - set_property! = Symbol(:set_,property,:!) - api_get_property = :(API.$(Symbol(:h5p_get_,property))) - api_set_property = :(API.$(Symbol(:h5p_set_,property))) + get_property = Symbol(:get_, property) + set_property! = Symbol(:set_, property, :!) + api_get_property = :(API.$(Symbol(:h5p_get_, property))) + api_set_property = :(API.$(Symbol(:h5p_set_, property))) quote function $(esc(get_property))(p::Properties) return $api_get_property(p) @@ -178,10 +172,10 @@ end Wrap property getter/setter API functions that use enum values to use symbol instead. """ macro enum_property(property, pairs...) - get_property = Symbol(:get_,property) - set_property! = Symbol(:set_,property,:!) - api_get_property = :(API.$(Symbol(:h5p_get_,property))) - api_set_property = :(API.$(Symbol(:h5p_set_,property))) + get_property = Symbol(:get_, property) + set_property! = Symbol(:set_, property, :!) + api_get_property = :(API.$(Symbol(:h5p_get_, property))) + api_set_property = :(API.$(Symbol(:h5p_set_, property))) get_expr = :(error("Unknown $property value $enum")) set_expr = :(throw(ArgumentError("Invalid $property $val"))) @@ -216,10 +210,10 @@ end Wrap property getter/setter API functions that use `0`/`1` to use `Bool` values """ macro bool_property(property) - get_property = Symbol(:get_,property) - set_property! = Symbol(:set_,property,:!) - api_get_property = :(API.$(Symbol(:h5p_get_,property))) - api_set_property = :(API.$(Symbol(:h5p_set_,property))) + get_property = Symbol(:get_, property) + set_property! = Symbol(:set_, property, :!) + api_get_property = :(API.$(Symbol(:h5p_get_, property))) + api_set_property = :(API.$(Symbol(:h5p_set_, property))) quote function $(esc(get_property))(p::Properties) return $api_get_property(p) != 0 @@ -230,7 +224,6 @@ macro bool_property(property) end end - ### ### Define Properties types ### @@ -252,24 +245,44 @@ that will be closed. @bool_property(obj_track_times) -class_propertynames(::Type{ObjectCreateProperties}) = ( - :obj_track_times, - :track_times, - ) +class_propertynames(::Type{ObjectCreateProperties}) = (:obj_track_times, :track_times,) function class_getproperty(::Type{ObjectCreateProperties}, p::Properties, name::Symbol) - name === :obj_track_times ? get_obj_track_times(p) : - # deprecated - name === :track_times ? (depwarn("`track_times` property is deprecated, use `obj_track_times` instead",:track_times); get_obj_track_times(p)) : - class_getproperty(superclass(ObjectCreateProperties), p, name) + if name === :obj_track_times + get_obj_track_times(p) + # deprecated + elseif name === :track_times + ( + depwarn( + "`track_times` property is deprecated, use `obj_track_times` instead", + :track_times + ); + get_obj_track_times(p) + ) + else + class_getproperty(superclass(ObjectCreateProperties), p, name) + end end -function class_setproperty!(::Type{ObjectCreateProperties}, p::Properties, name::Symbol, val) - name === :obj_track_times ? set_obj_track_times!(p, val) : - # deprecated - name === :track_times ? (depwarn("`track_times=$val` keyword option is deprecated, use `obj_track_times=$val` instead",:track_times); set_obj_track_times!(p, val)) : - class_setproperty!(superclass(ObjectCreateProperties), p, name, val) +function class_setproperty!( + ::Type{ObjectCreateProperties}, p::Properties, name::Symbol, val +) + if name === :obj_track_times + set_obj_track_times!(p, val) + # deprecated + elseif name === :track_times + ( + depwarn( + "`track_times=$val` keyword option is deprecated, use `obj_track_times=$val` instead", + :track_times + ); + set_obj_track_times!(p, val) + ) + else + class_setproperty!(superclass(ObjectCreateProperties), p, name, val) + end end -get_track_order(p::Properties) = API.h5p_get_link_creation_order(p) != 0 && API.h5p_get_attr_creation_order(p) != 0 +get_track_order(p::Properties) = + API.h5p_get_link_creation_order(p) != 0 && API.h5p_get_attr_creation_order(p) != 0 function set_track_order!(p::Properties, val::Bool) crt_order_flags = val ? (API.H5P_CRT_ORDER_TRACKED | API.H5P_CRT_ORDER_INDEXED) : 0 @@ -295,19 +308,24 @@ that will be closed. @propertyclass GroupCreateProperties API.H5P_GROUP_CREATE superclass(::Type{GroupCreateProperties}) = ObjectCreateProperties -class_propertynames(::Type{GroupCreateProperties}) = ( - :local_heap_size_hint, - :track_order, - ) +class_propertynames(::Type{GroupCreateProperties}) = (:local_heap_size_hint, :track_order,) function class_getproperty(::Type{GroupCreateProperties}, p::Properties, name::Symbol) - name === :local_heap_size_hint ? API.h5p_get_local_heap_size_hint(p) : - name === :track_order ? get_track_order(p) : - class_getproperty(superclass(GroupCreateProperties), p, name) + if name === :local_heap_size_hint + API.h5p_get_local_heap_size_hint(p) + elseif name === :track_order + get_track_order(p) + else + class_getproperty(superclass(GroupCreateProperties), p, name) + end end function class_setproperty!(::Type{GroupCreateProperties}, p::Properties, name::Symbol, val) - name === :local_heap_size_hint ? API.h5p_set_local_heap_size_hint(p, val) : - name === :track_order ? set_track_order!(p, val) : - class_setproperty!(superclass(GroupCreateProperties), p, name, val) + if name === :local_heap_size_hint + API.h5p_set_local_heap_size_hint(p, val) + elseif name === :track_order + set_track_order!(p, val) + else + class_setproperty!(superclass(GroupCreateProperties), p, name, val) + end end """ @@ -328,15 +346,8 @@ that will be closed. @propertyclass FileCreateProperties API.H5P_FILE_CREATE superclass(::Type{FileCreateProperties}) = ObjectCreateProperties - -class_propertynames(::Type{FileCreateProperties}) = ( - :userblock, - :track_order, - :strategy, - :persist, - :threshold, - :file_space_page_size - ) +class_propertynames(::Type{FileCreateProperties}) = + (:userblock, :track_order, :strategy, :persist, :threshold, :file_space_page_size) const FSPACE_STRATEGY_SYMBOLS = Dict( :fsm_aggr => API.H5F_FSPACE_STRATEGY_FSM_AGGR, @@ -346,8 +357,10 @@ const FSPACE_STRATEGY_SYMBOLS = Dict( :ntypes => API.H5F_FSPACE_STRATEGY_NTYPES ) -set_strategy!(p::FileCreateProperties, val) = API.h5p_set_file_space_strategy(p, strategy = val) -set_strategy!(p::FileCreateProperties, val::Symbol) = API.h5p_set_file_space_strategy(p, strategy = FSPACE_STRATEGY_SYMBOLS[val]) +set_strategy!(p::FileCreateProperties, val) = + API.h5p_set_file_space_strategy(p; strategy=val) +set_strategy!(p::FileCreateProperties, val::Symbol) = + API.h5p_set_file_space_strategy(p; strategy=FSPACE_STRATEGY_SYMBOLS[val]) function get_strategy(p::FileCreateProperties) strategy = API.h5p_get_file_space_strategy(p)[:strategy] for (k, v) in FSPACE_STRATEGY_SYMBOLS @@ -359,25 +372,32 @@ function get_strategy(p::FileCreateProperties) end function class_getproperty(::Type{FileCreateProperties}, p::Properties, name::Symbol) - name === :userblock ? API.h5p_get_userblock(p) : - name === :track_order ? get_track_order(p) : - name === :strategy ? get_strategy(p) : - name === :persist ? API.h5p_get_file_space_strategy(p)[:persist] : - name === :threshold ? API.h5p_get_file_space_strategy(p)[:threshold] : - name === :file_space_page_size ? API.h5p_get_file_space_page_size(p) : - class_getproperty(superclass(FileCreateProperties), p, name) + if name === :userblock + API.h5p_get_userblock(p) + elseif name === :track_order + get_track_order(p) + elseif name === :strategy + get_strategy(p) + elseif name === :persist + API.h5p_get_file_space_strategy(p)[:persist] + elseif name === :threshold + API.h5p_get_file_space_strategy(p)[:threshold] + elseif name === :file_space_page_size + API.h5p_get_file_space_page_size(p) + else + class_getproperty(superclass(FileCreateProperties), p, name) + end end function class_setproperty!(::Type{FileCreateProperties}, p::Properties, name::Symbol, val) - name === :userblock ? API.h5p_set_userblock(p, val) : - name === :track_order ? set_track_order!(p, val) : - name === :strategy ? set_strategy!(p, val) : - name === :persist ? API.h5p_set_file_space_strategy(p, persist = val) : - name === :threshold ? API.h5p_set_file_space_strategy(p, threshold = val) : - name === :file_space_page_size ? API.h5p_set_file_space_page_size(p, val) : + name === :userblock ? API.h5p_set_userblock(p, val) : + name === :track_order ? set_track_order!(p, val) : + name === :strategy ? set_strategy!(p, val) : + name === :persist ? API.h5p_set_file_space_strategy(p; persist=val) : + name === :threshold ? API.h5p_set_file_space_strategy(p; threshold=val) : + name === :file_space_page_size ? API.h5p_set_file_space_page_size(p, val) : class_setproperty!(superclass(FileCreateProperties), p, name, val) end - """ DatatypeCreateProperties(;kws...) DatatypeCreateProperties(f::Function; kws...) @@ -465,27 +485,33 @@ that will be closed. @propertyclass DatasetCreateProperties API.H5P_DATASET_CREATE superclass(::Type{DatasetCreateProperties}) = ObjectCreateProperties -@enum_property(alloc_time, +@enum_property( + alloc_time, :default => API.H5D_ALLOC_TIME_DEFAULT, :early => API.H5D_ALLOC_TIME_EARLY, :incremental => API.H5D_ALLOC_TIME_INCR, - :late => API.H5D_ALLOC_TIME_LATE) + :late => API.H5D_ALLOC_TIME_LATE +) # reverse indices function get_chunk(p::Properties) dims, N = API.h5p_get_chunk(p) - ntuple(i -> Int(dims[N-i+1]), N) + ntuple(i -> Int(dims[N - i + 1]), N) end -set_chunk!(p::Properties, dims) = API.h5p_set_chunk(p, length(dims), API.hsize_t[reverse(dims)...]) +set_chunk!(p::Properties, dims) = + API.h5p_set_chunk(p, length(dims), API.hsize_t[reverse(dims)...]) -@enum_property(layout, +@enum_property( + layout, :compact => API.H5D_COMPACT, :contiguous => API.H5D_CONTIGUOUS, :chunked => API.H5D_CHUNKED, - :virtual => API.H5D_VIRTUAL) + :virtual => API.H5D_VIRTUAL +) # See https://portal.hdfgroup.org/display/HDF5/H5P_SET_FILL_TIME -@enum_property(fill_time, +@enum_property( + fill_time, :alloc => API.H5D_FILL_TIME_ALLOC, :never => API.H5D_FILL_TIME_NEVER, :ifset => API.H5D_FILL_TIME_IFSET @@ -493,16 +519,22 @@ set_chunk!(p::Properties, dims) = API.h5p_set_chunk(p, length(dims), API.hsize_t # filters getters/setters get_filters(p::Properties) = Filters.FilterPipeline(p) -set_filters!(p::Properties, val::Filters.Filter) = push!(empty!(Filters.FilterPipeline(p)), val) -set_filters!(p::Properties, vals::Union{Tuple, AbstractVector}) = append!(empty!(Filters.FilterPipeline(p)), vals) +set_filters!(p::Properties, val::Filters.Filter) = + push!(empty!(Filters.FilterPipeline(p)), val) +set_filters!(p::Properties, vals::Union{Tuple,AbstractVector}) = + append!(empty!(Filters.FilterPipeline(p)), vals) # convenience -set_deflate!(p::Properties, val::Bool) = val && push!(Filters.FilterPipeline(p), Filters.Deflate()) -set_deflate!(p::Properties, level::Integer) = push!(Filters.FilterPipeline(p), Filters.Deflate(level=level)) -set_shuffle!(p::Properties, val::Bool) = val && push!(Filters.FilterPipeline(p), Filters.Shuffle()) -set_fletcher32!(p::Properties, val::Bool) = val && push!(Filters.FilterPipeline(p), Filters.Fletcher32()) -set_blosc!(p::Properties, val) = error("The Blosc filter now requires the H5Zblosc package be loaded") - +set_deflate!(p::Properties, val::Bool) = + val && push!(Filters.FilterPipeline(p), Filters.Deflate()) +set_deflate!(p::Properties, level::Integer) = + push!(Filters.FilterPipeline(p), Filters.Deflate(; level=level)) +set_shuffle!(p::Properties, val::Bool) = + val && push!(Filters.FilterPipeline(p), Filters.Shuffle()) +set_fletcher32!(p::Properties, val::Bool) = + val && push!(Filters.FilterPipeline(p), Filters.Fletcher32()) +set_blosc!(p::Properties, val) = + error("The Blosc filter now requires the H5Zblosc package be loaded") class_propertynames(::Type{DatasetCreateProperties}) = ( :alloc_time, @@ -521,48 +553,96 @@ class_propertynames(::Type{DatasetCreateProperties}) = ( # deprecated :compress, :filter - ) - +) function class_getproperty(::Type{DatasetCreateProperties}, p::Properties, name::Symbol) - name === :alloc_time ? get_alloc_time(p) : - name === :fill_time ? get_fill_time(p) : - name === :fill_value ? get_fill_value(p) : - name === :chunk ? get_chunk(p) : - name === :external ? API.h5p_get_external(p) : - name === :filters ? get_filters(p) : - name === :layout ? get_layout(p) : - name === :no_attrs_hint ? - @static(API.h5_get_libversion() < v"1.10.5" ? - false : - API.h5p_get_dset_no_attrs_hint(p) - ) : - # deprecated - name === :filter ? (depwarn("`filter` property name is deprecated, use `filters` instead",:class_getproperty); get_filters(p)) : - class_getproperty(superclass(DatasetCreateProperties), p, name) -end -function class_setproperty!(::Type{DatasetCreateProperties}, p::Properties, name::Symbol, val) - name === :alloc_time ? set_alloc_time!(p, val) : - name === :fill_time ? set_fill_time!(p, val) : - name === :fill_value ? set_fill_value!(p, val) : - name === :chunk ? set_chunk!(p, val) : - name === :external ? API.h5p_set_external(p, val...) : - name === :filters ? set_filters!(p, val) : - name === :layout ? set_layout!(p, val) : - name === :no_attrs_hint ? - @static(API.h5_get_libversion() < v"1.10.5" ? - error("no_attrs_hint is only valid for HDF5 library versions 1.10.5 or greater") : - API.h5p_set_dset_no_attrs_hint(p, val) - ) : - # set-only for convenience - name === :blosc ? set_blosc!(p, val) : - name === :deflate ? set_deflate!(p, val) : - name === :fletcher32 ? set_fletcher32!(p, val) : - name === :shuffle ? set_shuffle!(p, val) : - # deprecated - name === :filter ? (depwarn("`filter=$val` keyword option is deprecated, use `filters=$val` instead",:class_setproperty!); set_filters!(p, val)) : - name === :compress ? (depwarn("`compress=$val` keyword option is deprecated, use `deflate=$val` instead",:class_setproperty!); set_deflate!(p, val)) : - class_setproperty!(superclass(DatasetCreateProperties), p, name, val) + if name === :alloc_time + get_alloc_time(p) + elseif name === :fill_time + get_fill_time(p) + elseif name === :fill_value + get_fill_value(p) + elseif name === :chunk + get_chunk(p) + elseif name === :external + API.h5p_get_external(p) + elseif name === :filters + get_filters(p) + elseif name === :layout + get_layout(p) + elseif name === :no_attrs_hint + @static( + API.h5_get_libversion() < v"1.10.5" ? false : API.h5p_get_dset_no_attrs_hint(p) + ) + # deprecated + elseif name === :filter + ( + depwarn( + "`filter` property name is deprecated, use `filters` instead", + :class_getproperty + ); + get_filters(p) + ) + else + class_getproperty(superclass(DatasetCreateProperties), p, name) + end +end +function class_setproperty!( + ::Type{DatasetCreateProperties}, p::Properties, name::Symbol, val +) + if name === :alloc_time + set_alloc_time!(p, val) + elseif name === :fill_time + set_fill_time!(p, val) + elseif name === :fill_value + set_fill_value!(p, val) + elseif name === :chunk + set_chunk!(p, val) + elseif name === :external + API.h5p_set_external(p, val...) + elseif name === :filters + set_filters!(p, val) + elseif name === :layout + set_layout!(p, val) + elseif name === :no_attrs_hint + @static( + if API.h5_get_libversion() < v"1.10.5" + error( + "no_attrs_hint is only valid for HDF5 library versions 1.10.5 or greater" + ) + else + API.h5p_set_dset_no_attrs_hint(p, val) + end + ) + # set-only for convenience + elseif name === :blosc + set_blosc!(p, val) + elseif name === :deflate + set_deflate!(p, val) + elseif name === :fletcher32 + set_fletcher32!(p, val) + elseif name === :shuffle + set_shuffle!(p, val) + # deprecated + elseif name === :filter + ( + depwarn( + "`filter=$val` keyword option is deprecated, use `filters=$val` instead", + :class_setproperty! + ); + set_filters!(p, val) + ) + elseif name === :compress + ( + depwarn( + "`compress=$val` keyword option is deprecated, use `deflate=$val` instead", + :class_setproperty! + ); + set_deflate!(p, val) + ) + else + class_setproperty!(superclass(DatasetCreateProperties), p, name, val) + end end """ @@ -574,21 +654,24 @@ that will be closed. """ @propertyclass StringCreateProperties API.H5P_STRING_CREATE -@enum_property(char_encoding, - :ascii => API.H5T_CSET_ASCII, - :utf8 => API.H5T_CSET_UTF8) +@enum_property(char_encoding, :ascii => API.H5T_CSET_ASCII, :utf8 => API.H5T_CSET_UTF8) - -class_propertynames(::Type{StringCreateProperties}) = ( - :char_encoding, - ) +class_propertynames(::Type{StringCreateProperties}) = (:char_encoding,) function class_getproperty(::Type{StringCreateProperties}, p::Properties, name::Symbol) - name === :char_encoding ? get_char_encoding(p) : - class_getproperty(superclass(StringCreateProperties), p, name) + if name === :char_encoding + get_char_encoding(p) + else + class_getproperty(superclass(StringCreateProperties), p, name) + end end -function class_setproperty!(::Type{StringCreateProperties}, p::Properties, name::Symbol, val) - name === :char_encoding ? set_char_encoding!(p, val) : - class_setproperty!(superclass(StringCreateProperties), p, name, val) +function class_setproperty!( + ::Type{StringCreateProperties}, p::Properties, name::Symbol, val +) + if name === :char_encoding + set_char_encoding!(p, val) + else + class_setproperty!(superclass(StringCreateProperties), p, name, val) + end end """ @@ -610,16 +693,20 @@ superclass(::Type{LinkCreateProperties}) = StringCreateProperties @bool_property(create_intermediate_group) -class_propertynames(::Type{LinkCreateProperties}) = ( - :create_intermediate_group, - ) +class_propertynames(::Type{LinkCreateProperties}) = (:create_intermediate_group,) function class_getproperty(::Type{LinkCreateProperties}, p::Properties, name::Symbol) - name === :create_intermediate_group ? get_create_intermediate_group(p) : - class_getproperty(superclass(LinkCreateProperties), p, name) + if name === :create_intermediate_group + get_create_intermediate_group(p) + else + class_getproperty(superclass(LinkCreateProperties), p, name) + end end function class_setproperty!(::Type{LinkCreateProperties}, p::Properties, name::Symbol, val) - name === :create_intermediate_group ? set_create_intermediate_group!(p, val) : - class_setproperty!(superclass(LinkCreateProperties), p, name, val) + if name === :create_intermediate_group + set_create_intermediate_group!(p, val) + else + class_setproperty!(superclass(LinkCreateProperties), p, name, val) + end end """ @@ -636,7 +723,6 @@ that will be closed. @propertyclass AttributeCreateProperties API.H5P_ATTRIBUTE_CREATE superclass(::Type{AttributeCreateProperties}) = StringCreateProperties - """ FileAccessProperties(;kws...) FileAccessProperties(f::Function; kws...) @@ -684,19 +770,21 @@ end @tuple_property(alignment) -@enum_property(fclose_degree, - :weak => API.H5F_CLOSE_WEAK, - :semi => API.H5F_CLOSE_SEMI, - :strong => API.H5F_CLOSE_STRONG, - :default => API.H5F_CLOSE_DEFAULT) +@enum_property( + fclose_degree, + :weak => API.H5F_CLOSE_WEAK, + :semi => API.H5F_CLOSE_SEMI, + :strong => API.H5F_CLOSE_STRONG, + :default => API.H5F_CLOSE_DEFAULT +) # getter/setter for libver_bounds libver_bound_to_enum(val::Integer) = val libver_bound_to_enum(val::API.H5F_libver_t) = val function libver_bound_to_enum(val::VersionNumber) - val >= v"1.12" ? API.H5F_LIBVER_V112 : - val >= v"1.10" ? API.H5F_LIBVER_V110 : - val >= v"1.8" ? API.H5F_LIBVER_V18 : + val >= v"1.12" ? API.H5F_LIBVER_V112 : + val >= v"1.10" ? API.H5F_LIBVER_V110 : + val >= v"1.8" ? API.H5F_LIBVER_V18 : throw(ArgumentError("libver_bound must be >= v\"1.8\".")) end function libver_bound_to_enum(val::Symbol) @@ -723,7 +811,6 @@ function set_libver_bounds!(p::Properties, val) API.h5p_set_libver_bounds(p, libver_bound_to_enum(val), libver_bound_to_enum(val)) end - class_propertynames(::Type{FileAccessProperties}) = ( :alignment, :driver, @@ -733,33 +820,64 @@ class_propertynames(::Type{FileAccessProperties}) = ( :file_locking, :libver_bounds, :meta_block_size, - ) +) function class_getproperty(::Type{FileAccessProperties}, p::Properties, name::Symbol) - name === :alignment ? get_alignment(p) : - name === :driver ? Drivers.get_driver(p) : - name === :driver_info ? API.h5p_get_driver_info(p) : # get only - name === :fclose_degree ? get_fclose_degree(p) : - name === :file_locking ? API.h5p_get_file_locking(p) : - name === :libver_bounds ? get_libver_bounds(p) : - name === :meta_block_size ? API.h5p_get_meta_block_size(p) : - # deprecated - name === :fapl_mpio ? (depwarn("The `fapl_mpio` property is deprecated, use `driver=HDF5.Drivers.MPIO(...)` instead.", :fapl_mpio); drv = get_driver(p, MPIO); (drv.comm, drv.info)) : - class_getproperty(superclass(FileAccessProperties), p, name) + if name === :alignment + get_alignment(p) + elseif name === :driver + Drivers.get_driver(p) + elseif name === :driver_info + API.h5p_get_driver_info(p) # get only + elseif name === :fclose_degree + get_fclose_degree(p) + elseif name === :file_locking + API.h5p_get_file_locking(p) + elseif name === :libver_bounds + get_libver_bounds(p) + elseif name === :meta_block_size + API.h5p_get_meta_block_size(p) + # deprecated + elseif name === :fapl_mpio + ( + depwarn( + "The `fapl_mpio` property is deprecated, use `driver=HDF5.Drivers.MPIO(...)` instead.", + :fapl_mpio + ); + drv = get_driver(p, MPIO); + (drv.comm, drv.info) + ) + else + class_getproperty(superclass(FileAccessProperties), p, name) + end end function class_setproperty!(::Type{FileAccessProperties}, p::Properties, name::Symbol, val) - name === :alignment ? set_alignment!(p, val) : - name === :driver ? Drivers.set_driver!(p, val) : - name === :fclose_degree ? set_fclose_degree!(p, val) : - name === :file_locking ? API.h5p_set_file_locking(p, val...) : - name === :libver_bounds ? set_libver_bounds!(p, val) : - name === :meta_block_size ? API.h5p_set_meta_block_size(p, val) : - # deprecated - name === :fapl_mpio ? (depwarn("The `fapl_mpio` property is deprecated, use `driver=HDF5.Drivers.MPIO(...)` instead.", :fapl_mpio); p.driver = Drivers.MPIO(val...)) : - class_setproperty!(superclass(FileAccessProperties), p, name, val) + if name === :alignment + set_alignment!(p, val) + elseif name === :driver + Drivers.set_driver!(p, val) + elseif name === :fclose_degree + set_fclose_degree!(p, val) + elseif name === :file_locking + API.h5p_set_file_locking(p, val...) + elseif name === :libver_bounds + set_libver_bounds!(p, val) + elseif name === :meta_block_size + API.h5p_set_meta_block_size(p, val) + # deprecated + elseif name === :fapl_mpio + ( + depwarn( + "The `fapl_mpio` property is deprecated, use `driver=HDF5.Drivers.MPIO(...)` instead.", + :fapl_mpio + ); + p.driver = Drivers.MPIO(val...) + ) + else + class_setproperty!(superclass(FileAccessProperties), p, name, val) + end end - @propertyclass LinkAccessProperties API.H5P_LINK_ACCESS @propertyclass GroupAccessProperties API.H5P_GROUP_ACCESS @@ -798,34 +916,46 @@ See [Dataset Access Properties](https://portal.hdfgroup.org/display/HDF5/Dataset @propertyclass DatasetAccessProperties API.H5P_DATASET_ACCESS superclass(::Type{DatasetAccessProperties}) = LinkAccessProperties -class_propertynames(::Type{DatasetAccessProperties}) = ( - :chunk_cache, - :efile_prefix, - :virtual_prefix, - :virtual_printf_gap, - :virtual_view -) +class_propertynames(::Type{DatasetAccessProperties}) = + (:chunk_cache, :efile_prefix, :virtual_prefix, :virtual_printf_gap, :virtual_view) -@enum_property(virtual_view, +@enum_property( + virtual_view, :first_missing => API.H5D_VDS_FIRST_MISSING, :last_available => API.H5D_VDS_LAST_AVAILABLE ) function class_getproperty(::Type{DatasetAccessProperties}, p::Properties, name::Symbol) - name === :chunk_cache ? API.h5p_get_chunk_cache(p) : - name === :efile_prefix ? API.h5p_get_efile_prefix(p) : - name === :virtual_prefix ? API.h5p_get_virtual_prefix(p) : - name === :virtual_printf_gap ? API.h5p_get_virtual_printf_gap(p) : - name === :virtual_view ? get_virtual_view(p) : - class_getproperty(superclass(DatasetAccessProperties), p, name) -end -function class_setproperty!(::Type{DatasetAccessProperties}, p::Properties, name::Symbol, val) - name === :chunk_cache ? API.h5p_set_chunk_cache(p, val...) : - name === :efile_prefix ? API.h5p_set_efile_prefix(p, val) : - name === :virtual_prefix ? API.h5p_set_virtual_prefix(p, val) : - name === :virtual_printf_gap ? API.h5p_set_virtual_printf_gap(p, val) : - name === :virtual_view ? set_virtual_view!(p, val) : - class_setproperty!(superclass(DatasetAccessProperties), p, name, val) + if name === :chunk_cache + API.h5p_get_chunk_cache(p) + elseif name === :efile_prefix + API.h5p_get_efile_prefix(p) + elseif name === :virtual_prefix + API.h5p_get_virtual_prefix(p) + elseif name === :virtual_printf_gap + API.h5p_get_virtual_printf_gap(p) + elseif name === :virtual_view + get_virtual_view(p) + else + class_getproperty(superclass(DatasetAccessProperties), p, name) + end +end +function class_setproperty!( + ::Type{DatasetAccessProperties}, p::Properties, name::Symbol, val +) + if name === :chunk_cache + API.h5p_set_chunk_cache(p, val...) + elseif name === :efile_prefix + API.h5p_set_efile_prefix(p, val) + elseif name === :virtual_prefix + API.h5p_set_virtual_prefix(p, val) + elseif name === :virtual_printf_gap + API.h5p_set_virtual_printf_gap(p, val) + elseif name === :virtual_view + set_virtual_view!(p, val) + else + class_setproperty!(superclass(DatasetAccessProperties), p, name, val) + end end @propertyclass AttributeAccessProperties API.H5P_ATTRIBUTE_ACCESS @@ -846,26 +976,33 @@ that will be closed. """ @propertyclass DatasetTransferProperties API.H5P_DATASET_XFER -@enum_property(dxpl_mpio, - :independent => API.H5FD_MPIO_INDEPENDENT, - :collective => API.H5FD_MPIO_COLLECTIVE) +@enum_property( + dxpl_mpio, + :independent => API.H5FD_MPIO_INDEPENDENT, + :collective => API.H5FD_MPIO_COLLECTIVE +) -class_propertynames(::Type{DatasetTransferProperties}) = ( - :dxpl_mpio, - ) +class_propertynames(::Type{DatasetTransferProperties}) = (:dxpl_mpio,) function class_getproperty(::Type{DatasetTransferProperties}, p::Properties, name::Symbol) - name === :dxpl_mpio ? get_dxpl_mpio(p) : - class_getproperty(superclass(DatasetTransferProperties), p, name) + if name === :dxpl_mpio + get_dxpl_mpio(p) + else + class_getproperty(superclass(DatasetTransferProperties), p, name) + end end -function class_setproperty!(::Type{DatasetTransferProperties}, p::Properties, name::Symbol, val) - name === :dxpl_mpio ? set_dxpl_mpio!(p, val) : - class_setproperty!(superclass(DatasetTransferProperties), p, name, val) +function class_setproperty!( + ::Type{DatasetTransferProperties}, p::Properties, name::Symbol, val +) + if name === :dxpl_mpio + set_dxpl_mpio!(p, val) + else + class_setproperty!(superclass(DatasetTransferProperties), p, name, val) + end end @propertyclass FileMountProperties API.H5P_FILE_MOUNT @propertyclass ObjectCopyProperties API.H5P_OBJECT_COPY - const DEFAULT_PROPERTIES = GenericProperties() # These properties are initialized in __init__() const ASCII_LINK_PROPERTIES = LinkCreateProperties() diff --git a/src/readwrite.jl b/src/readwrite.jl index 4de283742..61d7ee0d3 100644 --- a/src/readwrite.jl +++ b/src/readwrite.jl @@ -19,7 +19,9 @@ function Base.read(parent::Union{File,Group}, name::AbstractString; pv...) val end -function Base.read(parent::Union{File,Group}, name_type_pair::Pair{<:AbstractString,DataType}; pv...) +function Base.read( + parent::Union{File,Group}, name_type_pair::Pair{<:AbstractString,DataType}; pv... +) obj = getindex(parent, name_type_pair[1]; pv...) val = read(obj, name_type_pair[2]) close(obj) @@ -47,7 +49,7 @@ function Base.getindex(obj::DatasetOrAttribute, I...) return val end -function Base.read(obj::DatasetOrAttribute, ::Type{T}, I...) where T +function Base.read(obj::DatasetOrAttribute, ::Type{T}, I...) where {T} dtype = datatype(obj) val = generic_read(obj, dtype, T, I...) close(dtype) @@ -59,7 +61,7 @@ end function Base.read(obj::DatasetOrAttribute, ::Type{String}, I...) dtype = datatype(obj) T = get_jl_type(dtype) - T <: Union{Cstring, FixedString} || error(name(obj), " cannot be read as type `String`") + T <: Union{Cstring,FixedString} || error(name(obj), " cannot be read as type `String`") val = generic_read(obj, dtype, T, I...) close(dtype) return val @@ -71,7 +73,9 @@ end Copy [part of] a HDF5 dataset or attribute to a preallocated output buffer. The output buffer must be convertible to a pointer and have a contiguous layout. """ -function Base.copyto!(output_buffer::AbstractArray{T}, obj::DatasetOrAttribute, I...) where T +function Base.copyto!( + output_buffer::AbstractArray{T}, obj::DatasetOrAttribute, I... +) where {T} dtype = datatype(obj) val = nothing try @@ -83,11 +87,18 @@ function Base.copyto!(output_buffer::AbstractArray{T}, obj::DatasetOrAttribute, end # Special handling for reading OPAQUE datasets and attributes -function generic_read!(buf::Matrix{UInt8}, obj::DatasetOrAttribute, filetype::Datatype, ::Type{Opaque}) +function generic_read!( + buf::Matrix{UInt8}, obj::DatasetOrAttribute, filetype::Datatype, ::Type{Opaque} +) generic_read(obj, filetype, Opaque, buf) end -function generic_read(obj::DatasetOrAttribute, filetype::Datatype, ::Type{Opaque}, buf::Union{Matrix{UInt8}, Nothing} = nothing) - sz = size(obj) +function generic_read( + obj::DatasetOrAttribute, + filetype::Datatype, + ::Type{Opaque}, + buf::Union{Matrix{UInt8},Nothing}=nothing +) + sz = size(obj) if isnothing(buf) buf = Matrix{UInt8}(undef, sizeof(filetype), prod(sz)) end @@ -102,21 +113,33 @@ function generic_read(obj::DatasetOrAttribute, filetype::Datatype, ::Type{Opaque data = vec(buf) else # array of opaque objects - data = reshape([buf[:,i] for i in 1:prod(sz)], sz...) + data = reshape([buf[:, i] for i in 1:prod(sz)], sz...) end return Opaque(data, tag) end # generic read function -function generic_read!(buf::Union{AbstractMatrix{UInt8}, AbstractArray{T}}, obj::DatasetOrAttribute, filetype::Datatype, ::Type{T}, I...) where T +function generic_read!( + buf::Union{AbstractMatrix{UInt8},AbstractArray{T}}, + obj::DatasetOrAttribute, + filetype::Datatype, + ::Type{T}, + I... +) where {T} return _generic_read(obj, filetype, T, buf, I...) end -function generic_read(obj::DatasetOrAttribute, filetype::Datatype, ::Type{T}, I...) where T +function generic_read( + obj::DatasetOrAttribute, filetype::Datatype, ::Type{T}, I... +) where {T} return _generic_read(obj, filetype, T, nothing, I...) end -function _generic_read(obj::DatasetOrAttribute, filetype::Datatype, ::Type{T}, - buf::Union{AbstractMatrix{UInt8}, AbstractArray{T}, Nothing}, I...) where T - +function _generic_read( + obj::DatasetOrAttribute, + filetype::Datatype, + ::Type{T}, + buf::Union{AbstractMatrix{UInt8},AbstractArray{T},Nothing}, + I... +) where {T} sz, scalar, dspace = _size_of_buffer(obj, I) if isempty(sz) @@ -128,8 +151,9 @@ function _generic_read(obj::DatasetOrAttribute, filetype::Datatype, ::Type{T}, if isnothing(buf) buf = _normalized_buffer(T, sz) else - sizeof(buf) != prod(sz)*sizeof(T) && - error("Provided array buffer of size, $(size(buf)), and element type, $(eltype(buf)), does not match the dataset of size, $sz, and type, $T") + sizeof(buf) != prod(sz) * sizeof(T) && error( + "Provided array buffer of size, $(size(buf)), and element type, $(eltype(buf)), does not match the dataset of size, $sz, and type, $T" + ) end catch err close(dspace) @@ -168,7 +192,6 @@ function _generic_read(obj::DatasetOrAttribute, filetype::Datatype, ::Type{T}, end end - """ similar(obj::DatasetOrAttribute, [::Type{T}], [dims::Integer...]; normalize = true) @@ -177,11 +200,8 @@ Return a `Array{T}` or `Matrix{UInt8}` to that can contain [part of] the dataset The `normalize` keyword will normalize the buffer for string and array datatypes. """ function Base.similar( - obj::DatasetOrAttribute, - ::Type{T}, - dims::Dims; - normalize::Bool = true -) where T + obj::DatasetOrAttribute, ::Type{T}, dims::Dims; normalize::Bool=true +) where {T} filetype = datatype(obj) try return similar(obj, filetype, T, dims; normalize=normalize) @@ -190,14 +210,11 @@ function Base.similar( end end Base.similar( - obj::DatasetOrAttribute, - ::Type{T}, - dims::Integer...; - normalize::Bool = true -) where T = similar(obj, T, Int.(dims); normalize=normalize) + obj::DatasetOrAttribute, ::Type{T}, dims::Integer...; normalize::Bool=true +) where {T} = similar(obj, T, Int.(dims); normalize=normalize) # Base.similar without specifying the Julia type -function Base.similar(obj::DatasetOrAttribute, dims::Dims; normalize::Bool = true) +function Base.similar(obj::DatasetOrAttribute, dims::Dims; normalize::Bool=true) filetype = datatype(obj) try T = get_jl_type(filetype) @@ -206,27 +223,22 @@ function Base.similar(obj::DatasetOrAttribute, dims::Dims; normalize::Bool = tru close(filetype) end end -Base.similar( - obj::DatasetOrAttribute, - dims::Integer...; - normalize::Bool = true -) = similar(obj, Int.(dims); normalize=normalize) +Base.similar(obj::DatasetOrAttribute, dims::Integer...; normalize::Bool=true) = + similar(obj, Int.(dims); normalize=normalize) # Opaque types -function Base.similar(obj::DatasetOrAttribute, filetype::Datatype, ::Type{Opaque}; normalize::Bool = true) +function Base.similar( + obj::DatasetOrAttribute, filetype::Datatype, ::Type{Opaque}; normalize::Bool=true +) # normalize keyword for consistency, but it is ignored for Opaque - sz = size(obj) + sz = size(obj) return Matrix{UInt8}(undef, sizeof(filetype), prod(sz)) end # Undocumented Base.similar signature allowing filetype to be specified function Base.similar( - obj::DatasetOrAttribute, - filetype::Datatype, - ::Type{T}, - dims::Dims; - normalize::Bool = true -) where T + obj::DatasetOrAttribute, filetype::Datatype, ::Type{T}, dims::Dims; normalize::Bool=true +) where {T} # We are reusing code that expect indices I = Base.OneTo.(dims) sz, scalar, dspace = _size_of_buffer(obj, I) @@ -237,7 +249,7 @@ function Base.similar( if normalize && do_normalize(T) buf = reshape(normalize_types(T, buf), sz) end - + return buf finally close(dspace) @@ -249,8 +261,8 @@ Base.similar( filetype::Datatype, ::Type{T}, dims::Integer...; - normalize::Bool = true -) where T = similar(obj, filetype, T, Int.(dims); normalize=normalize) + normalize::Bool=true +) where {T} = similar(obj, filetype, T, Int.(dims); normalize=normalize) # Utilities used in Base.similar implementation @@ -261,7 +273,7 @@ This is a utility function originall from generic_read. It gets the native memory type for the system based on filetype, and checks if the size matches. =# -@inline function _memtype(filetype::Datatype, ::Type{T}) where T +@inline function _memtype(filetype::Datatype, ::Type{T}) where {T} !isconcretetype(T) && error("type $T is not concrete") # padded layout in memory @@ -297,11 +309,11 @@ create in order to hold the contents of a Dataset or Attribute. * `dspace`, hyper =# @inline function _size_of_buffer( - obj::DatasetOrAttribute, - I::Tuple = (), - dspace::Dataspace = dataspace(obj) + obj::DatasetOrAttribute, I::Tuple=(), dspace::Dataspace=dataspace(obj) ) - !isempty(I) && obj isa Attribute && error("HDF5 attributes do not support hyperslab selections") + !isempty(I) && + obj isa Attribute && + error("HDF5 attributes do not support hyperslab selections") stype = API.h5s_get_simple_extent_type(dspace) @@ -339,7 +351,7 @@ end Return a Matrix{UInt8} for a normalized type or `Array{T}` for a regular type. See `do_normalize` in typeconversions.jl. =# -@inline function _normalized_buffer(::Type{T}, sz::NTuple{N, Int}) where {T, N} +@inline function _normalized_buffer(::Type{T}, sz::NTuple{N,Int}) where {T,N} if do_normalize(T) # The entire dataset is read into in a buffer matrix where the first dimension at # any stage of normalization is the bytes for a single element of type `T`, and @@ -351,4 +363,3 @@ See `do_normalize` in typeconversions.jl. return buf end - diff --git a/src/show.jl b/src/show.jl index 1f2fa9f06..03f523365 100644 --- a/src/show.jl +++ b/src/show.jl @@ -1,10 +1,10 @@ function Base.show(io::IO, fid::File) if isvalid(fid) - intent = API.h5f_get_intent(fid) + intent = API.h5f_get_intent(fid) RW_MASK = API.H5F_ACC_RDONLY | API.H5F_ACC_RDWR SWMR_MASK = API.H5F_ACC_SWMR_READ | API.H5F_ACC_SWMR_WRITE - rw = (intent & RW_MASK) == API.H5F_ACC_RDONLY ? "(read-only" : "(read-write" - swmr = (intent & SWMR_MASK) != 0 ? ", swmr) " : ") " + rw = (intent & RW_MASK) == API.H5F_ACC_RDONLY ? "(read-only" : "(read-write" + swmr = (intent & SWMR_MASK) != 0 ? ", swmr) " : ") " print(io, "HDF5.File: ", rw, swmr, fid.filename) else print(io, "HDF5.File: (closed) ", fid.filename) @@ -36,7 +36,7 @@ function Base.show(io::IO, prop::Properties) # or always well-defined (e.g. chunk if layout != :chunked, dxpl_mpio if no MPI) try val = getproperty(prop, name) - print(io, "\n ", rpad(name, 15), " = ", repr(val),",") + print(io, "\n ", rpad(name, 15), " = ", repr(val), ",") catch e end end @@ -46,7 +46,16 @@ end function Base.show(io::IO, dset::Dataset) if isvalid(dset) - print(io, "HDF5.Dataset: ", name(dset), " (file: ", dset.file.filename, " xfer_mode: ", dset.xfer.id, ")") + print( + io, + "HDF5.Dataset: ", + name(dset), + " (file: ", + dset.file.filename, + " xfer_mode: ", + dset.xfer.id, + ")" + ) else print(io, "HDF5.Dataset: (invalid)") end @@ -72,8 +81,7 @@ function Base.summary(io::IO, attrdict::AttributeDict) end end - -const ENDIAN_DICT = Dict( +const ENDIAN_DICT = Dict( API.H5T_ORDER_LE => "little endian byte order", API.H5T_ORDER_BE => "big endian byte order", API.H5T_ORDER_VAX => "vax mixed endian byte order", @@ -81,12 +89,11 @@ const ENDIAN_DICT = Dict( API.H5T_ORDER_NONE => "no particular byte order", ) - function Base.show(io::IO, dtype::Datatype) print(io, "HDF5.Datatype: ") if isvalid(dtype) API.h5t_committed(dtype) && print(io, name(dtype), " ") - buffer = IOBuffer(; sizehint = 18) + buffer = IOBuffer(; sizehint=18) print(buffer, API.h5lt_dtype_to_text(dtype)) str = String(take!(buffer)) if str == "undefined integer" @@ -94,7 +101,7 @@ function Base.show(io::IO, dtype::Datatype) println(io, " size: ", API.h5t_get_size(dtype), " bytes") println(io, " precision: ", API.h5t_get_precision(dtype), " bits") println(io, " offset: ", API.h5t_get_offset(dtype), " bits") - print(io, " order: ", ENDIAN_DICT[API.h5t_get_order(dtype)]) + print(io, " order: ", ENDIAN_DICT[API.h5t_get_order(dtype)]) else print(io, str) end @@ -116,16 +123,16 @@ end function Base.show(io::IO, dspace::Dataspace) if !isvalid(dspace) print(io, "HDF5.Dataspace: (invalid)") - return + return nothing end print(io, "HDF5.Dataspace: ") type = API.h5s_get_simple_extent_type(dspace) if type == API.H5S_NULL print(io, "H5S_NULL") - return + return nothing elseif type == API.H5S_SCALAR print(io, "H5S_SCALAR") - return + return nothing end # otherwise type == API.H5S_SIMPLE sz, maxsz = get_extent_dims(dspace) @@ -136,7 +143,7 @@ function Base.show(io::IO, dspace::Dataspace) print(io, "(") for ii in 1:ndims s, d, l = start[ii], stride[ii], count[ii] - print(io, range(s + 1, length = l, step = d == 1 ? nothing : d)) + print(io, range(s + 1; length=l, step=d == 1 ? nothing : d)) ii != ndims && print(io, ", ") end print(io, ") / (") @@ -178,7 +185,9 @@ Maximum number of children to show at each node. """ const SHOW_TREE_MAX_CHILDREN = Ref{Int}(50) -function Base.show(io::IO, ::MIME"text/plain", obj::Union{File,Group,Dataset,Attributes,Attribute}) +function Base.show( + io::IO, ::MIME"text/plain", obj::Union{File,Group,Dataset,Attributes,Attribute} +) if get(io, :compact, false)::Bool show(io, obj) else @@ -192,23 +201,28 @@ _tree_icon(::Type{Dataset}) = SHOW_TREE_ICONS[] ? "🔢" : "[D]" _tree_icon(::Type{Datatype}) = SHOW_TREE_ICONS[] ? "📄" : "[T]" _tree_icon(::Type{File}) = SHOW_TREE_ICONS[] ? "🗂️" : "[F]" _tree_icon(::Type) = SHOW_TREE_ICONS[] ? "❓" : "[?]" -_tree_icon(obj) = _tree_icon(typeof(obj)) -_tree_icon(obj::Attributes) = _tree_icon(obj.parent) +_tree_icon(obj) = _tree_icon(typeof(obj)) +_tree_icon(obj::Attributes) = _tree_icon(obj.parent) _tree_head(io::IO, obj) = print(io, _tree_icon(obj), " ", obj) -_tree_head(io::IO, obj::Datatype) = print(io, _tree_icon(obj), " HDF5.Datatype: ", name(obj)) +_tree_head(io::IO, obj::Datatype) = + print(io, _tree_icon(obj), " HDF5.Datatype: ", name(obj)) _tree_count(parent::Union{File,Group}, attributes::Bool) = length(parent) + (attributes ? length(HDF5.attrs(parent)) : 0) -_tree_count(parent::Dataset, attributes::Bool) = - attributes ? length(HDF5.attrs(parent)) : 0 +_tree_count(parent::Dataset, attributes::Bool) = attributes ? length(HDF5.attrs(parent)) : 0 _tree_count(parent::Attributes, _::Bool) = length(parent) _tree_count(parent::Union{Attribute,Datatype}, _::Bool) = 0 -function _show_tree(io::IO, obj::Union{File,Group,Dataset,Datatype,Attributes,Attribute}, indent::String=""; - attributes::Bool = true, depth::Int = 1) +function _show_tree( + io::IO, + obj::Union{File,Group,Dataset,Datatype,Attributes,Attribute}, + indent::String=""; + attributes::Bool=true, + depth::Int=1 +) isempty(indent) && _tree_head(io, obj) - isvalid(obj) || return + isvalid(obj) || return nothing INDENT = " " PIPE = "│ " @@ -249,7 +263,7 @@ function _show_tree(io::IO, obj::Union{File,Group,Dataset,Datatype,Attributes,At end end - typeof(obj) <: Union{File, Group} || return nothing + typeof(obj) <: Union{File,Group} || return nothing API.h5l_iterate(obj, idx_type(obj), order(obj)) do loc_id, cname, _ depth_check() && return API.herr_t(1) @@ -261,7 +275,7 @@ function _show_tree(io::IO, obj::Union{File,Group,Dataset,Datatype,Attributes,At islast = counter == nchildren print(io, "\n", indent, islast ? ELBOW : TEE, icon, " ", name) nextindent = indent * (islast ? INDENT : PIPE) - _show_tree(io, child, nextindent; attributes = attributes, depth = depth + 1) + _show_tree(io, child, nextindent; attributes=attributes, depth=depth + 1) close(child) return API.herr_t(0) diff --git a/src/typeconversions.jl b/src/typeconversions.jl index 2553f52ff..87b1119a4 100644 --- a/src/typeconversions.jl +++ b/src/typeconversions.jl @@ -3,12 +3,12 @@ abstract type CharType <: AbstractString end struct ASCIIChar <: CharType - c::UInt8 + c::UInt8 end Base.length(c::ASCIIChar) = 1 struct UTF8Char <: CharType - c::UInt8 + c::UInt8 end Base.length(c::UTF8Char) = 1 @@ -20,7 +20,6 @@ cset(::Type{<:AbstractString}) = API.H5T_CSET_UTF8 cset(::Type{UTF8Char}) = API.H5T_CSET_UTF8 cset(::Type{ASCIIChar}) = API.H5T_CSET_ASCII - function unpad(s::String, pad::Integer)::String if pad == API.H5T_STR_NULLTERM # null-terminated ind = findfirst(isequal('\0'), s) @@ -35,23 +34,22 @@ function unpad(s::String, pad::Integer)::String end unpad(s, pad::Integer) = unpad(String(s), pad) - # VLEN objects struct VLen{T} - data::Array + data::Array end VLen(strs::Array{S}) where {S<:String} = VLen{chartype(S)}(strs) VLen(A::Array{Array{T}}) where {T<:ScalarType} = VLen{T}(A) VLen(A::Array{Array{T,N}}) where {T<:ScalarType,N} = VLen{T}(A) function Base.cconvert(::Type{Ptr{Cvoid}}, v::VLen) - len = length(v.data) - h = Vector{API.hvl_t}(undef, len) - for ii in 1:len - d = v.data[ii] - p = unsafe_convert(Ptr{UInt8}, d) - h[ii] = API.hvl_t(length(d), p) - end - return h + len = length(v.data) + h = Vector{API.hvl_t}(undef, len) + for ii in 1:len + d = v.data[ii] + p = unsafe_convert(Ptr{UInt8}, d) + h[ii] = API.hvl_t(length(d), p) + end + return h end datatype(A::VLen{T}) where {T<:ScalarType} = Datatype(API.h5t_vlen_create(hdf5_type_id(T))) @@ -64,8 +62,8 @@ end # Opaque types struct Opaque - data - tag::String + data + tag::String end # An empty array type @@ -110,44 +108,59 @@ struct VariableArray{T} len::Csize_t p::Ptr{Cvoid} end -Base.eltype(::Type{VariableArray{T}}) where T = T +Base.eltype(::Type{VariableArray{T}}) where {T} = T ## Conversion between Julia types and HDF5 atomic types -hdf5_type_id(::Type{Bool}) = API.H5T_NATIVE_B8 -hdf5_type_id(::Type{Int8}) = API.H5T_NATIVE_INT8 -hdf5_type_id(::Type{UInt8}) = API.H5T_NATIVE_UINT8 -hdf5_type_id(::Type{Int16}) = API.H5T_NATIVE_INT16 -hdf5_type_id(::Type{UInt16}) = API.H5T_NATIVE_UINT16 -hdf5_type_id(::Type{Int32}) = API.H5T_NATIVE_INT32 -hdf5_type_id(::Type{UInt32}) = API.H5T_NATIVE_UINT32 -hdf5_type_id(::Type{Int64}) = API.H5T_NATIVE_INT64 -hdf5_type_id(::Type{UInt64}) = API.H5T_NATIVE_UINT64 -hdf5_type_id(::Type{Float32}) = API.H5T_NATIVE_FLOAT -hdf5_type_id(::Type{Float64}) = API.H5T_NATIVE_DOUBLE +hdf5_type_id(::Type{Bool}) = API.H5T_NATIVE_B8 +hdf5_type_id(::Type{Int8}) = API.H5T_NATIVE_INT8 +hdf5_type_id(::Type{UInt8}) = API.H5T_NATIVE_UINT8 +hdf5_type_id(::Type{Int16}) = API.H5T_NATIVE_INT16 +hdf5_type_id(::Type{UInt16}) = API.H5T_NATIVE_UINT16 +hdf5_type_id(::Type{Int32}) = API.H5T_NATIVE_INT32 +hdf5_type_id(::Type{UInt32}) = API.H5T_NATIVE_UINT32 +hdf5_type_id(::Type{Int64}) = API.H5T_NATIVE_INT64 +hdf5_type_id(::Type{UInt64}) = API.H5T_NATIVE_UINT64 +hdf5_type_id(::Type{Float32}) = API.H5T_NATIVE_FLOAT +hdf5_type_id(::Type{Float64}) = API.H5T_NATIVE_DOUBLE hdf5_type_id(::Type{Reference}) = API.H5T_STD_REF_OBJ hdf5_type_id(::Type{<:AbstractString}) = API.H5T_C_S1 - # It's not safe to use particular id codes because these can change, so we use characteristics of the type. function _hdf5_type_map(class_id, is_signed, native_size) if class_id == API.H5T_INTEGER if is_signed == API.H5T_SGN_2 - return native_size == 1 ? Int8 : - native_size == 2 ? Int16 : - native_size == 4 ? Int32 : - native_size == 8 ? Int64 : - throw(KeyError((class_id, is_signed, native_size))) + return if native_size == 1 + Int8 + elseif native_size == 2 + Int16 + elseif native_size == 4 + Int32 + elseif native_size == 8 + Int64 + else + throw(KeyError((class_id, is_signed, native_size))) + end else - return native_size == 1 ? UInt8 : - native_size == 2 ? UInt16 : - native_size == 4 ? UInt32 : - native_size == 8 ? UInt64 : - throw(KeyError((class_id, is_signed, native_size))) + return if native_size == 1 + UInt8 + elseif native_size == 2 + UInt16 + elseif native_size == 4 + UInt32 + elseif native_size == 8 + UInt64 + else + throw(KeyError((class_id, is_signed, native_size))) + end end else - return native_size == 4 ? Float32 : - native_size == 8 ? Float64 : - throw(KeyError((class_id, is_signed, native_size))) + return if native_size == 4 + Float32 + elseif native_size == 8 + Float64 + else + throw(KeyError((class_id, is_signed, native_size))) + end end end @@ -156,18 +169,20 @@ const COMPLEX_SUPPORT = Ref(true) const COMPLEX_FIELD_NAMES = Ref(("r", "i")) enable_complex_support() = COMPLEX_SUPPORT[] = true disable_complex_support() = COMPLEX_SUPPORT[] = false -set_complex_field_names(real::AbstractString, imag::AbstractString) = COMPLEX_FIELD_NAMES[] = ((real, imag)) +set_complex_field_names(real::AbstractString, imag::AbstractString) = + COMPLEX_FIELD_NAMES[] = ((real, imag)) # Create a datatype from in-memory types datatype(x::ScalarType) = Datatype(hdf5_type_id(typeof(x)), false) datatype(::Type{T}) where {T<:ScalarType} = Datatype(hdf5_type_id(T), false) datatype(A::AbstractArray{T}) where {T<:ScalarType} = Datatype(hdf5_type_id(T), false) function datatype(::Type{Complex{T}}) where {T<:ScalarType} - COMPLEX_SUPPORT[] || error("complex support disabled. call HDF5.enable_complex_support() to enable") - dtype = API.h5t_create(API.H5T_COMPOUND, 2*sizeof(T)) - API.h5t_insert(dtype, COMPLEX_FIELD_NAMES[][1], 0, hdf5_type_id(T)) - API.h5t_insert(dtype, COMPLEX_FIELD_NAMES[][2], sizeof(T), hdf5_type_id(T)) - return Datatype(dtype) + COMPLEX_SUPPORT[] || + error("complex support disabled. call HDF5.enable_complex_support() to enable") + dtype = API.h5t_create(API.H5T_COMPOUND, 2 * sizeof(T)) + API.h5t_insert(dtype, COMPLEX_FIELD_NAMES[][1], 0, hdf5_type_id(T)) + API.h5t_insert(dtype, COMPLEX_FIELD_NAMES[][2], sizeof(T), hdf5_type_id(T)) + return Datatype(dtype) end datatype(x::Complex{<:ScalarType}) = datatype(typeof(x)) datatype(A::AbstractArray{Complex{T}}) where {T<:ScalarType} = datatype(eltype(A)) @@ -185,183 +200,194 @@ function datatype(::Array{S}) where {S<:AbstractString} Datatype(type_id) end - # conversions to Julia types function get_jl_type(obj_type::Datatype) - class_id = API.h5t_get_class(obj_type) - if class_id == API.H5T_OPAQUE - return Opaque - else - return get_mem_compatible_jl_type(obj_type) - end + class_id = API.h5t_get_class(obj_type) + if class_id == API.H5T_OPAQUE + return Opaque + else + return get_mem_compatible_jl_type(obj_type) + end end function get_jl_type(obj) - dtype = datatype(obj) - try - return get_jl_type(dtype) - finally - close(dtype) - end + dtype = datatype(obj) + try + return get_jl_type(dtype) + finally + close(dtype) + end end Base.eltype(dset::Union{Dataset,Attribute}) = get_jl_type(dset) function get_mem_compatible_jl_type(obj_type::Datatype) - class_id = API.h5t_get_class(obj_type) - if class_id == API.H5T_STRING - if API.h5t_is_variable_str(obj_type) - return Cstring - else - N = sizeof(obj_type) - PAD = API.h5t_get_strpad(obj_type) - return FixedString{N,PAD} - end - elseif class_id == API.H5T_INTEGER || class_id == API.H5T_FLOAT - native_type = API.h5t_get_native_type(obj_type) - try - native_size = API.h5t_get_size(native_type) - if class_id == API.H5T_INTEGER - is_signed = API.h5t_get_sign(native_type) - else - is_signed = nothing - end - return _hdf5_type_map(class_id, is_signed, native_size) - finally - API.h5t_close(native_type) - end - elseif class_id == API.H5T_BITFIELD - return Bool - elseif class_id == API.H5T_ENUM - super_type = API.h5t_get_super(obj_type) - try - native_type = API.h5t_get_native_type(super_type) - try - native_size = API.h5t_get_size(native_type) - is_signed = API.h5t_get_sign(native_type) - return _hdf5_type_map(API.H5T_INTEGER, is_signed, native_size) - finally - API.h5t_close(native_type) - end - finally - API.h5t_close(super_type) - end - elseif class_id == API.H5T_REFERENCE - # TODO update to use version 1.12 reference functions/types - return Reference - elseif class_id == API.H5T_OPAQUE - # TODO: opaque objects should get their own fixed-size data type; punning like - # this permits recursively reading (i.e. compound data type containing an - # opaque field). Requires figuring out what to do about the tag... - len = Int(API.h5t_get_size(obj_type)) - return FixedArray{UInt8, (len,), len} - elseif class_id == API.H5T_VLEN - superid = API.h5t_get_super(obj_type) - return VariableArray{get_mem_compatible_jl_type(Datatype(superid))} - elseif class_id == API.H5T_COMPOUND - N = API.h5t_get_nmembers(obj_type) - - membernames = ntuple(N) do i - API.h5t_get_member_name(obj_type, i-1) - end + class_id = API.h5t_get_class(obj_type) + if class_id == API.H5T_STRING + if API.h5t_is_variable_str(obj_type) + return Cstring + else + N = sizeof(obj_type) + PAD = API.h5t_get_strpad(obj_type) + return FixedString{N,PAD} + end + elseif class_id == API.H5T_INTEGER || class_id == API.H5T_FLOAT + native_type = API.h5t_get_native_type(obj_type) + try + native_size = API.h5t_get_size(native_type) + if class_id == API.H5T_INTEGER + is_signed = API.h5t_get_sign(native_type) + else + is_signed = nothing + end + return _hdf5_type_map(class_id, is_signed, native_size) + finally + API.h5t_close(native_type) + end + elseif class_id == API.H5T_BITFIELD + return Bool + elseif class_id == API.H5T_ENUM + super_type = API.h5t_get_super(obj_type) + try + native_type = API.h5t_get_native_type(super_type) + try + native_size = API.h5t_get_size(native_type) + is_signed = API.h5t_get_sign(native_type) + return _hdf5_type_map(API.H5T_INTEGER, is_signed, native_size) + finally + API.h5t_close(native_type) + end + finally + API.h5t_close(super_type) + end + elseif class_id == API.H5T_REFERENCE + # TODO update to use version 1.12 reference functions/types + return Reference + elseif class_id == API.H5T_OPAQUE + # TODO: opaque objects should get their own fixed-size data type; punning like + # this permits recursively reading (i.e. compound data type containing an + # opaque field). Requires figuring out what to do about the tag... + len = Int(API.h5t_get_size(obj_type)) + return FixedArray{UInt8,(len,),len} + elseif class_id == API.H5T_VLEN + superid = API.h5t_get_super(obj_type) + return VariableArray{get_mem_compatible_jl_type(Datatype(superid))} + elseif class_id == API.H5T_COMPOUND + N = API.h5t_get_nmembers(obj_type) + + membernames = ntuple(N) do i + API.h5t_get_member_name(obj_type, i - 1) + end - membertypes = ntuple(N) do i - dtype = Datatype(API.h5t_get_member_type(obj_type, i-1)) - return get_mem_compatible_jl_type(dtype) - end + membertypes = ntuple(N) do i + dtype = Datatype(API.h5t_get_member_type(obj_type, i - 1)) + return get_mem_compatible_jl_type(dtype) + end - # check if should be interpreted as complex - iscomplex = COMPLEX_SUPPORT[] && - N == 2 && - (membernames == COMPLEX_FIELD_NAMES[]) && - (membertypes[1] == membertypes[2]) && - (membertypes[1] <: ScalarType) + # check if should be interpreted as complex + iscomplex = + COMPLEX_SUPPORT[] && + N == 2 && + (membernames == COMPLEX_FIELD_NAMES[]) && + (membertypes[1] == membertypes[2]) && + (membertypes[1] <: ScalarType) - if iscomplex - return Complex{membertypes[1]} - else - return NamedTuple{Symbol.(membernames), Tuple{membertypes...}} - end - elseif class_id == API.H5T_ARRAY - dims = API.h5t_get_array_dims(obj_type) - nd = length(dims) - eltyp = Datatype(API.h5t_get_super(obj_type)) - elT = get_mem_compatible_jl_type(eltyp) - dimsizes = ntuple(i -> Int(dims[nd-i+1]), nd) # reverse order - return FixedArray{elT, dimsizes, prod(dimsizes)} - end - error("Class id ", class_id, " is not yet supported") + if iscomplex + return Complex{membertypes[1]} + else + return NamedTuple{Symbol.(membernames),Tuple{membertypes...}} + end + elseif class_id == API.H5T_ARRAY + dims = API.h5t_get_array_dims(obj_type) + nd = length(dims) + eltyp = Datatype(API.h5t_get_super(obj_type)) + elT = get_mem_compatible_jl_type(eltyp) + dimsizes = ntuple(i -> Int(dims[nd - i + 1]), nd) # reverse order + return FixedArray{elT,dimsizes,prod(dimsizes)} + end + error("Class id ", class_id, " is not yet supported") end # convert special types to native julia types function normalize_types(::Type{T}, buf::AbstractMatrix{UInt8}) where {T} - # First dimension spans bytes of a single element of type T --- (recursively) normalize - # each range of bytes to final type, returning vector of normalized data. - return [_normalize_types(T, view(buf, :, ind)) for ind in axes(buf, 2)] + # First dimension spans bytes of a single element of type T --- (recursively) normalize + # each range of bytes to final type, returning vector of normalized data. + return [_normalize_types(T, view(buf, :, ind)) for ind in axes(buf, 2)] end # high-level description which should always work --- here, the buffer contains the bytes # for exactly 1 element of an object of type T, so reinterpret the `UInt8` vector as a # length-1 array of type `T` and extract the (only) element. function _typed_load(::Type{T}, buf::AbstractVector{UInt8}) where {T} - return @inbounds reinterpret(T, buf)[1] + return @inbounds reinterpret(T, buf)[1] end # fast-path for common concrete types with simple layout (which should be nearly all cases) -function _typed_load(::Type{T}, buf::V) where {T, V <: Union{Vector{UInt8}, Base.FastContiguousSubArray{UInt8,1}}} - dest = Ref{T}() - GC.@preserve dest buf Base._memcpy!(unsafe_convert(Ptr{Cvoid}, dest), pointer(buf), sizeof(T)) - return dest[] - # TODO: The above can maybe be replaced with - # return GC.@preserve buf unsafe_load(convert(Ptr{t}, pointer(buf))) - # dependent on data elements being properly aligned for all datatypes, on all - # platforms. +function _typed_load( + ::Type{T}, buf::V +) where {T,V<:Union{Vector{UInt8},Base.FastContiguousSubArray{UInt8,1}}} + dest = Ref{T}() + GC.@preserve dest buf Base._memcpy!( + unsafe_convert(Ptr{Cvoid}, dest), pointer(buf), sizeof(T) + ) + return dest[] + # TODO: The above can maybe be replaced with + # return GC.@preserve buf unsafe_load(convert(Ptr{t}, pointer(buf))) + # dependent on data elements being properly aligned for all datatypes, on all + # platforms. end _normalize_types(::Type{T}, buf::AbstractVector{UInt8}) where {T} = _typed_load(T, buf) -function _normalize_types(::Type{T}, buf::AbstractVector{UInt8}) where {K, T <: NamedTuple{K}} - # Compound data types do not necessarily have members of uniform size, so instead of - # dim-1 => bytes of single element and dim-2 => over elements, just loop over exact - # byte ranges within the provided buffer vector. - nv = ntuple(length(K)) do ii - elT = fieldtype(T, ii) - off = fieldoffset(T, ii) % Int - sub = view(buf, off .+ (1:sizeof(elT))) - return _normalize_types(elT, sub) - end - return NamedTuple{K}(nv) +function _normalize_types(::Type{T}, buf::AbstractVector{UInt8}) where {K,T<:NamedTuple{K}} + # Compound data types do not necessarily have members of uniform size, so instead of + # dim-1 => bytes of single element and dim-2 => over elements, just loop over exact + # byte ranges within the provided buffer vector. + nv = ntuple(length(K)) do ii + elT = fieldtype(T, ii) + off = fieldoffset(T, ii) % Int + sub = view(buf, off .+ (1:sizeof(elT))) + return _normalize_types(elT, sub) + end + return NamedTuple{K}(nv) end -function _normalize_types(::Type{V}, buf::AbstractVector{UInt8}) where {T, V <: VariableArray{T}} - va = _typed_load(V, buf) - pbuf = unsafe_wrap(Array, convert(Ptr{UInt8}, va.p), (sizeof(T), Int(va.len))) - if do_normalize(T) - # If `T` a non-trivial type, recursively normalize the vlen buffer. - return normalize_types(T, pbuf) - else - # Otherwise if `T` is simple type, directly reinterpret the vlen buffer. - # (copy since libhdf5 will reclaim `pbuf = va.p` in `API.h5d_vlen_reclaim`) - return copy(vec(reinterpret(T, pbuf))) - end +function _normalize_types( + ::Type{V}, buf::AbstractVector{UInt8} +) where {T,V<:VariableArray{T}} + va = _typed_load(V, buf) + pbuf = unsafe_wrap(Array, convert(Ptr{UInt8}, va.p), (sizeof(T), Int(va.len))) + if do_normalize(T) + # If `T` a non-trivial type, recursively normalize the vlen buffer. + return normalize_types(T, pbuf) + else + # Otherwise if `T` is simple type, directly reinterpret the vlen buffer. + # (copy since libhdf5 will reclaim `pbuf = va.p` in `API.h5d_vlen_reclaim`) + return copy(vec(reinterpret(T, pbuf))) + end end -function _normalize_types(::Type{F}, buf::AbstractVector{UInt8}) where {T, F <: FixedArray{T}} - if do_normalize(T) - # If `T` a non-trivial type, recursively normalize the buffer after reshaping to - # matrix with dim-1 => bytes of single element and dim-2 => over elements. - return reshape(normalize_types(T, reshape(buf, sizeof(T), :)), size(F)...) - else - # Otherwise, if `T` is simple type, directly reinterpret the array and reshape to - # final dimensions. The copy ensures (a) the returned array is independent of - # [potentially much larger] read() buffer, and (b) that the returned data is an - # Array and not ReshapedArray of ReinterpretArray of SubArray of ... - return copy(reshape(reinterpret(T, buf), size(F)...)) - end +function _normalize_types(::Type{F}, buf::AbstractVector{UInt8}) where {T,F<:FixedArray{T}} + if do_normalize(T) + # If `T` a non-trivial type, recursively normalize the buffer after reshaping to + # matrix with dim-1 => bytes of single element and dim-2 => over elements. + return reshape(normalize_types(T, reshape(buf, sizeof(T), :)), size(F)...) + else + # Otherwise, if `T` is simple type, directly reinterpret the array and reshape to + # final dimensions. The copy ensures (a) the returned array is independent of + # [potentially much larger] read() buffer, and (b) that the returned data is an + # Array and not ReshapedArray of ReinterpretArray of SubArray of ... + return copy(reshape(reinterpret(T, buf), size(F)...)) + end end -_normalize_types(::Type{Cstring}, buf::AbstractVector{UInt8}) = unsafe_string(_typed_load(Ptr{UInt8}, buf)) -_normalize_types(::Type{T}, buf::AbstractVector{UInt8}) where {T <: FixedString} = unpad(String(buf), pad(T)) +_normalize_types(::Type{Cstring}, buf::AbstractVector{UInt8}) = + unsafe_string(_typed_load(Ptr{UInt8}, buf)) +_normalize_types(::Type{T}, buf::AbstractVector{UInt8}) where {T<:FixedString} = + unpad(String(buf), pad(T)) do_normalize(::Type{T}) where {T} = false -do_normalize(::Type{NamedTuple{T,U}}) where {U,T} = any(i -> do_normalize(fieldtype(U,i)), 1:fieldcount(U)) -do_normalize(::Type{T}) where {T <: Union{Cstring,FixedString,FixedArray,VariableArray}} = true +do_normalize(::Type{NamedTuple{T,U}}) where {U,T} = + any(i -> do_normalize(fieldtype(U, i)), 1:fieldcount(U)) +do_normalize(::Type{T}) where {T<:Union{Cstring,FixedString,FixedArray,VariableArray}} = + true do_reclaim(::Type{T}) where {T} = false -do_reclaim(::Type{NamedTuple{T,U}}) where {U,T} = any(i -> do_reclaim(fieldtype(U,i)), 1:fieldcount(U)) -do_reclaim(::Type{T}) where T <: Union{Cstring,VariableArray} = true +do_reclaim(::Type{NamedTuple{T,U}}) where {U,T} = + any(i -> do_reclaim(fieldtype(U, i)), 1:fieldcount(U)) +do_reclaim(::Type{T}) where {T<:Union{Cstring,VariableArray}} = true diff --git a/src/types.jl b/src/types.jl index 735d479fb..eea17a9b3 100644 --- a/src/types.jl +++ b/src/types.jl @@ -13,14 +13,13 @@ end # Read every variable in the file function Base.read(f::H5DataStore) vars = keys(f) - vals = Vector{Any}(undef,length(vars)) - for i = 1:length(vars) + vals = Vector{Any}(undef, length(vars)) + for i in 1:length(vars) vals[i] = read(f, vars[i]) end Dict(zip(vars, vals)) end - ### Base HDF5 structs ### ## HDF5 uses a plain integer to refer to each file, group, or @@ -68,7 +67,7 @@ mutable struct Dataset file::File xfer::DatasetTransferProperties - function Dataset(id, file, xfer = DatasetTransferProperties()) + function Dataset(id, file, xfer=DatasetTransferProperties()) dset = new(id, file, xfer) finalizer(close, dset) dset @@ -129,24 +128,34 @@ Base.unsafe_convert(::Type{API.hid_t}, attr::Attribute) = attr.id # High-level reference handler struct Reference - r::API.hobj_ref_t + r::API.hobj_ref_t end -Base.cconvert(::Type{Ptr{T}}, ref::Reference) where {T<:Union{Reference,API.hobj_ref_t,Cvoid}} = Ref(ref) +Base.cconvert( + ::Type{Ptr{T}}, ref::Reference +) where {T<:Union{Reference,API.hobj_ref_t,Cvoid}} = Ref(ref) -const BitsType = Union{Bool,Int8,UInt8,Int16,UInt16,Int32,UInt32,Int64,UInt64,Float32,Float64} +const BitsType = Union{ + Bool,Int8,UInt8,Int16,UInt16,Int32,UInt32,Int64,UInt64,Float32,Float64 +} const ScalarType = Union{BitsType,Reference} # Define an H5O Object type const Object = Union{Group,Dataset,Datatype} idx_type(obj::File) = - get_context_property(:file_create).track_order || - get_create_properties(obj).track_order ? - API.H5_INDEX_CRT_ORDER : API.H5_INDEX_NAME + if get_context_property(:file_create).track_order || + get_create_properties(obj).track_order + API.H5_INDEX_CRT_ORDER + else + API.H5_INDEX_NAME + end idx_type(obj::Group) = - get_context_property(:group_create).track_order || - get_create_properties(obj).track_order ? - API.H5_INDEX_CRT_ORDER : API.H5_INDEX_NAME + if get_context_property(:group_create).track_order || + get_create_properties(obj).track_order + API.H5_INDEX_CRT_ORDER + else + API.H5_INDEX_NAME + end idx_type(obj) = API.H5_INDEX_NAME # TODO: implement alternative iteration order ? diff --git a/test/api.jl b/test/api.jl index 81ae20d59..d3045b3ad 100644 --- a/test/api.jl +++ b/test/api.jl @@ -9,7 +9,9 @@ using HDF5, Test # iterate over attributes names = String[] - @test HDF5.API.h5a_iterate(f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC) do loc, name, info + @test HDF5.API.h5a_iterate( + f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC + ) do loc, name, info push!(names, unsafe_string(name)) return false end == 2 @@ -17,7 +19,9 @@ using HDF5, Test # iterate over attributes in reverse names = String[] - @test HDF5.API.h5a_iterate(f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_DEC) do loc, name, info + @test HDF5.API.h5a_iterate( + f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_DEC + ) do loc, name, info push!(names, unsafe_string(name)) return false end == 2 @@ -25,26 +29,30 @@ using HDF5, Test # only iterate once names = String[] - @test HDF5.API.h5a_iterate(f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC) do loc, name, info + @test HDF5.API.h5a_iterate( + f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC + ) do loc, name, info push!(names, unsafe_string(name)) return true end == 1 @test names == ["a"] # HDF5 error - @test_throws HDF5.API.H5Error HDF5.API.h5a_iterate(f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC) do loc, name, info - return -1 + @test_throws HDF5.API.H5Error HDF5.API.h5a_iterate( + f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC + ) do loc, name, info + return -1 end # Julia error - @test_throws AssertionError HDF5.API.h5a_iterate(f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC) do loc, name, info - @assert false + @test_throws AssertionError HDF5.API.h5a_iterate( + f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC + ) do loc, name, info + @assert false end - end @testset "h5l_iterate" begin - filename = tempname() f = h5open(filename, "w") @@ -53,7 +61,9 @@ end # iterate over groups names = String[] - @test HDF5.API.h5l_iterate(f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC) do loc, name, info + @test HDF5.API.h5l_iterate( + f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC + ) do loc, name, info push!(names, unsafe_string(name)) return false end == 2 @@ -61,7 +71,9 @@ end # iterate over attributes in reverse names = String[] - @test HDF5.API.h5l_iterate(f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_DEC) do loc, name, info + @test HDF5.API.h5l_iterate( + f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_DEC + ) do loc, name, info push!(names, unsafe_string(name)) return false end == 2 @@ -69,20 +81,25 @@ end # only iterate once names = String[] - @test HDF5.API.h5l_iterate(f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC) do loc, name, info + @test HDF5.API.h5l_iterate( + f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC + ) do loc, name, info push!(names, unsafe_string(name)) return true end == 1 @test names == ["a"] # HDF5 error - @test_throws HDF5.API.H5Error HDF5.API.h5l_iterate(f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC) do loc, name, info + @test_throws HDF5.API.H5Error HDF5.API.h5l_iterate( + f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC + ) do loc, name, info return -1 end # Julia error - @test_throws AssertionError HDF5.API.h5l_iterate(f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC) do loc, name, info + @test_throws AssertionError HDF5.API.h5l_iterate( + f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC + ) do loc, name, info @assert false end - -end \ No newline at end of file +end diff --git a/test/attributes.jl b/test/attributes.jl index 5678c690c..b2a710289 100644 --- a/test/attributes.jl +++ b/test/attributes.jl @@ -1,15 +1,14 @@ using HDF5, Test -function test_attrs(o::Union{HDF5.File, HDF5.Object}) - +function test_attrs(o::Union{HDF5.File,HDF5.Object}) @test attrs(o) isa HDF5.AttributeDict attrs(o)["a"] = 1 @test haskey(attrs(o), "a") @test attrs(o)["a"] == 1 - attrs(o)["b"] = [2,3] - @test attrs(o)["b"] == [2,3] + attrs(o)["b"] = [2, 3] + @test attrs(o)["b"] == [2, 3] @test haskey(attrs(o), "a") @test length(attrs(o)) == 2 @test sort(keys(attrs(o))) == ["a", "b"] @@ -24,8 +23,8 @@ function test_attrs(o::Union{HDF5.File, HDF5.Object}) @test sort(keys(attrs(o))) == ["a", "b"] # overwrite: different size - attrs(o)["b"] = [4,5,6] - @test attrs(o)["b"] == [4,5,6] + attrs(o)["b"] = [4, 5, 6] + @test attrs(o)["b"] == [4, 5, 6] @test length(attrs(o)) == 2 @test sort(keys(attrs(o))) == ["a", "b"] @@ -43,10 +42,8 @@ function test_attrs(o::Union{HDF5.File, HDF5.Object}) @test_throws KeyError attrs(o)["a"] @test isnothing(get(attrs(o), "a", nothing)) - end - @testset "attrs interface" begin filename = tempname() f = h5open(filename, "w") @@ -64,11 +61,11 @@ end test_attrs(d) # Test attrs on a HDF5.Datatype - t = commit_datatype(g, "datatype_int16", - HDF5.Datatype(HDF5.API.h5t_copy(HDF5.API.H5T_NATIVE_INT16)) + t = commit_datatype( + g, "datatype_int16", HDF5.Datatype(HDF5.API.h5t_copy(HDF5.API.H5T_NATIVE_INT16)) ) test_attrs(t) finally close(f) end -end \ No newline at end of file +end diff --git a/test/chunkstorage.jl b/test/chunkstorage.jl index 1a4985a1f..30e361a89 100644 --- a/test/chunkstorage.jl +++ b/test/chunkstorage.jl @@ -2,168 +2,171 @@ using HDF5 using Test @testset "Raw Chunk I/O" begin + fn = tempname() -fn = tempname() - -# Direct chunk write is no longer dependent on HL library -# Test direct chunk writing Cartesian index -h5open(fn, "w") do f - d = create_dataset(f, "dataset", datatype(Int), dataspace(4, 4), chunk=(2, 2)) - HDF5.API.h5d_extend(d, HDF5.API.hsize_t[3,3]) # should do nothing (deprecated call) - HDF5.API.h5d_extend(d, HDF5.API.hsize_t[4,4]) # should do nothing (deprecated call) - raw = HDF5.ChunkStorage(d) - raw[1,1] = 0, collect(reinterpret(UInt8, [1,2,5,6])) - raw[3,1] = 0, collect(reinterpret(UInt8, [3,4,7,8])) - raw[1,3] = 0, collect(reinterpret(UInt8, [9,10,13,14])) - raw[3,3] = 0, collect(reinterpret(UInt8, [11,12,15,16])) -end - -# Test read back -@test h5open(fn, "r") do f - vec(f["dataset"][:,:]) -end == collect(1:16) - -# Test reading direct chunks via linear indexing -h5open(fn, "r") do f - d = f["dataset"] - raw = HDF5.ChunkStorage{IndexLinear}(d) - @test size(raw) == (4,) - @test length(raw) == 4 - @test axes(raw) == (Base.OneTo(4),) - @test prod(HDF5.get_num_chunks_per_dim(d)) == HDF5.get_num_chunks(d) - if v"1.10.5" ≤ HDF5.API._libhdf5_build_ver - @test HDF5.get_chunk_length(d) == HDF5.API.h5d_get_chunk_info(d,1)[:size] - end - @test reinterpret(Int, raw[1][2]) == [1,2,5,6] - @test reinterpret(Int, raw[2][2]) == [3,4,7,8] - @test reinterpret(Int, raw[3][2]) == [9,10,13,14] - @test reinterpret(Int, raw[4][2]) == [11,12,15,16] - # Test 0-based indexed API - @test HDF5.get_chunk_offset(d, 0) == (0, 0) - @test HDF5.get_chunk_offset(d, 1) == (2, 0) - @test HDF5.get_chunk_offset(d, 2) == (0, 2) - @test HDF5.get_chunk_offset(d, 3) == (2, 2) - # Test reverse look up of index from coords - @test HDF5.get_chunk_index(d, (0, 0)) == 0 - @test HDF5.get_chunk_index(d, (2, 0)) == 1 - @test HDF5.get_chunk_index(d, (0, 2)) == 2 - @test HDF5.get_chunk_index(d, (2, 2)) == 3 - # Test internal coordinates - @test HDF5.get_chunk_index(d, (1, 1)) == 0 - @test HDF5.get_chunk_index(d, (3, 1)) == 1 - @test HDF5.get_chunk_index(d, (1, 3)) == 2 - @test HDF5.get_chunk_index(d, (3, 3)) == 3 -end - -# Test direct write chunk writing via linear indexing -h5open(fn, "w") do f - d = create_dataset(f, "dataset", datatype(Int64), dataspace(4, 6), chunk=(2, 3)) - raw = HDF5.ChunkStorage{IndexLinear}(d) - raw[1] = 0, collect(reinterpret(UInt8, Int64[1,2,5,6, 9,10])) - raw[2] = 0, collect(reinterpret(UInt8, Int64[3,4,7,8,11,12])) - raw[3] = 0, collect(reinterpret(UInt8, Int64[13,14,17,18,21,22])) - raw[4] = 0, collect(reinterpret(UInt8, Int64[15,16,19,20,23,24])) -end - -@test h5open(fn, "r") do f - f["dataset"][:,:] -end == reshape(1:24, 4, 6) - -h5open(fn, "r") do f - d = f["dataset"] - raw = HDF5.ChunkStorage(d) - chunk = HDF5.get_chunk(d) - extent = HDF5.get_extent_dims(d)[1] - - @test chunk == (2, 3) - @test extent == (4, 6) - @test size(raw) == (2, 2) - @test length(raw) == 4 - @test axes(raw) == (1:2:4, 1:3:6) - @test prod(HDF5.get_num_chunks_per_dim(d)) == HDF5.get_num_chunks(d) - - # Test 0-based indexed API - @test HDF5.get_chunk_offset(d, 0) == (0, 0) - @test HDF5.get_chunk_offset(d, 1) == (2, 0) - @test HDF5.get_chunk_offset(d, 2) == (0, 3) - @test HDF5.get_chunk_offset(d, 3) == (2, 3) - # Test reverse look up of index from coords - @test HDF5.get_chunk_index(d, (0, 0)) == 0 - @test HDF5.get_chunk_index(d, (2, 0)) == 1 - @test HDF5.get_chunk_index(d, (0, 3)) == 2 - @test HDF5.get_chunk_index(d, (2, 3)) == 3 - # Test internal coordinates - @test HDF5.get_chunk_index(d, (1, 1)) == 0 - @test HDF5.get_chunk_index(d, (3, 1)) == 1 - @test HDF5.get_chunk_index(d, (1, 4)) == 2 - @test HDF5.get_chunk_index(d, (3, 4)) == 3 - - if v"1.10.5" ≤ HDF5.API._libhdf5_build_ver - chunk_length = HDF5.get_chunk_length(d) - origin = HDF5.API.h5d_get_chunk_info(d, 0) - @test chunk_length == origin[:size] - chunk_info = HDF5.API.h5d_get_chunk_info_by_coord(d, HDF5.API.hsize_t[0, 1]) - @test chunk_info[:filter_mask] == 0 - @test chunk_info[:size] == chunk_length - - # Test HDF5.get_chunk_offset equivalence to h5d_get_chunk_info information - @test all(reverse(HDF5.API.h5d_get_chunk_info(d, 3)[:offset]) .== HDF5.get_chunk_offset(d, 3)) - - # Test HDF5.get_chunk_index equivalence to h5d_get_chunk_info_by_coord information - offset = HDF5.API.hsize_t[2,3] - chunk_info = HDF5.API.h5d_get_chunk_info_by_coord(d, reverse(offset)) - @test HDF5.get_chunk_index(d, offset) == (chunk_info[:addr] - origin[:addr]) ÷ chunk_info[:size] - - @test HDF5.API.h5d_get_chunk_storage_size(d, HDF5.API.hsize_t[0, 1]) == chunk_length - @test HDF5.API.h5d_get_storage_size(d) == sizeof(Int64)*24 - @test HDF5.API.h5d_get_space_status(d) == HDF5.API.H5D_SPACE_STATUS_ALLOCATED + # Direct chunk write is no longer dependent on HL library + # Test direct chunk writing Cartesian index + h5open(fn, "w") do f + d = create_dataset(f, "dataset", datatype(Int), dataspace(4, 4); chunk=(2, 2)) + HDF5.API.h5d_extend(d, HDF5.API.hsize_t[3, 3]) # should do nothing (deprecated call) + HDF5.API.h5d_extend(d, HDF5.API.hsize_t[4, 4]) # should do nothing (deprecated call) + raw = HDF5.ChunkStorage(d) + raw[1, 1] = 0, collect(reinterpret(UInt8, [1, 2, 5, 6])) + raw[3, 1] = 0, collect(reinterpret(UInt8, [3, 4, 7, 8])) + raw[1, 3] = 0, collect(reinterpret(UInt8, [9, 10, 13, 14])) + raw[3, 3] = 0, collect(reinterpret(UInt8, [11, 12, 15, 16])) end - # Manually reconstruct matrix - A = Matrix{Int}(undef, extent) - for (r,c) in Iterators.product(axes(raw)...) - A[r:r+chunk[1]-1, c:c+chunk[2]-1] .= reshape( reinterpret(Int64, raw[r,c][2]), chunk) + # Test read back + @test h5open(fn, "r") do f + vec(f["dataset"][:, :]) + end == collect(1:16) + + # Test reading direct chunks via linear indexing + h5open(fn, "r") do f + d = f["dataset"] + raw = HDF5.ChunkStorage{IndexLinear}(d) + @test size(raw) == (4,) + @test length(raw) == 4 + @test axes(raw) == (Base.OneTo(4),) + @test prod(HDF5.get_num_chunks_per_dim(d)) == HDF5.get_num_chunks(d) + if v"1.10.5" ≤ HDF5.API._libhdf5_build_ver + @test HDF5.get_chunk_length(d) == HDF5.API.h5d_get_chunk_info(d, 1)[:size] + end + @test reinterpret(Int, raw[1][2]) == [1, 2, 5, 6] + @test reinterpret(Int, raw[2][2]) == [3, 4, 7, 8] + @test reinterpret(Int, raw[3][2]) == [9, 10, 13, 14] + @test reinterpret(Int, raw[4][2]) == [11, 12, 15, 16] + # Test 0-based indexed API + @test HDF5.get_chunk_offset(d, 0) == (0, 0) + @test HDF5.get_chunk_offset(d, 1) == (2, 0) + @test HDF5.get_chunk_offset(d, 2) == (0, 2) + @test HDF5.get_chunk_offset(d, 3) == (2, 2) + # Test reverse look up of index from coords + @test HDF5.get_chunk_index(d, (0, 0)) == 0 + @test HDF5.get_chunk_index(d, (2, 0)) == 1 + @test HDF5.get_chunk_index(d, (0, 2)) == 2 + @test HDF5.get_chunk_index(d, (2, 2)) == 3 + # Test internal coordinates + @test HDF5.get_chunk_index(d, (1, 1)) == 0 + @test HDF5.get_chunk_index(d, (3, 1)) == 1 + @test HDF5.get_chunk_index(d, (1, 3)) == 2 + @test HDF5.get_chunk_index(d, (3, 3)) == 3 end - @test A == reshape(1:24, extent) -end + # Test direct write chunk writing via linear indexing + h5open(fn, "w") do f + d = create_dataset(f, "dataset", datatype(Int64), dataspace(4, 6); chunk=(2, 3)) + raw = HDF5.ChunkStorage{IndexLinear}(d) + raw[1] = 0, collect(reinterpret(UInt8, Int64[1, 2, 5, 6, 9, 10])) + raw[2] = 0, collect(reinterpret(UInt8, Int64[3, 4, 7, 8, 11, 12])) + raw[3] = 0, collect(reinterpret(UInt8, Int64[13, 14, 17, 18, 21, 22])) + raw[4] = 0, collect(reinterpret(UInt8, Int64[15, 16, 19, 20, 23, 24])) + end -@static if VERSION >= v"1.6" - # CartesianIndices does not accept StepRange + @test h5open(fn, "r") do f + f["dataset"][:, :] + end == reshape(1:24, 4, 6) - h5open(fn, "w") do f - d = create_dataset(f, "dataset", datatype(Int), dataspace(4, 6), chunk=(2, 3)) + h5open(fn, "r") do f + d = f["dataset"] raw = HDF5.ChunkStorage(d) - data = permutedims(reshape(1:24, 2, 2, 3, 2), (1,3,2,4)) - ci = CartesianIndices(raw) - for ind in eachindex(ci) - raw[ci[ind]] = data[:,:,ind] + chunk = HDF5.get_chunk(d) + extent = HDF5.get_extent_dims(d)[1] + + @test chunk == (2, 3) + @test extent == (4, 6) + @test size(raw) == (2, 2) + @test length(raw) == 4 + @test axes(raw) == (1:2:4, 1:3:6) + @test prod(HDF5.get_num_chunks_per_dim(d)) == HDF5.get_num_chunks(d) + + # Test 0-based indexed API + @test HDF5.get_chunk_offset(d, 0) == (0, 0) + @test HDF5.get_chunk_offset(d, 1) == (2, 0) + @test HDF5.get_chunk_offset(d, 2) == (0, 3) + @test HDF5.get_chunk_offset(d, 3) == (2, 3) + # Test reverse look up of index from coords + @test HDF5.get_chunk_index(d, (0, 0)) == 0 + @test HDF5.get_chunk_index(d, (2, 0)) == 1 + @test HDF5.get_chunk_index(d, (0, 3)) == 2 + @test HDF5.get_chunk_index(d, (2, 3)) == 3 + # Test internal coordinates + @test HDF5.get_chunk_index(d, (1, 1)) == 0 + @test HDF5.get_chunk_index(d, (3, 1)) == 1 + @test HDF5.get_chunk_index(d, (1, 4)) == 2 + @test HDF5.get_chunk_index(d, (3, 4)) == 3 + + if v"1.10.5" ≤ HDF5.API._libhdf5_build_ver + chunk_length = HDF5.get_chunk_length(d) + origin = HDF5.API.h5d_get_chunk_info(d, 0) + @test chunk_length == origin[:size] + chunk_info = HDF5.API.h5d_get_chunk_info_by_coord(d, HDF5.API.hsize_t[0, 1]) + @test chunk_info[:filter_mask] == 0 + @test chunk_info[:size] == chunk_length + + # Test HDF5.get_chunk_offset equivalence to h5d_get_chunk_info information + @test all( + reverse(HDF5.API.h5d_get_chunk_info(d, 3)[:offset]) .== + HDF5.get_chunk_offset(d, 3) + ) + + # Test HDF5.get_chunk_index equivalence to h5d_get_chunk_info_by_coord information + offset = HDF5.API.hsize_t[2, 3] + chunk_info = HDF5.API.h5d_get_chunk_info_by_coord(d, reverse(offset)) + @test HDF5.get_chunk_index(d, offset) == + (chunk_info[:addr] - origin[:addr]) ÷ chunk_info[:size] + + @test HDF5.API.h5d_get_chunk_storage_size(d, HDF5.API.hsize_t[0, 1]) == + chunk_length + @test HDF5.API.h5d_get_storage_size(d) == sizeof(Int64) * 24 + @test HDF5.API.h5d_get_space_status(d) == HDF5.API.H5D_SPACE_STATUS_ALLOCATED end + + # Manually reconstruct matrix + A = Matrix{Int}(undef, extent) + for (r, c) in Iterators.product(axes(raw)...) + A[r:(r + chunk[1] - 1), c:(c + chunk[2] - 1)] .= reshape( + reinterpret(Int64, raw[r, c][2]), chunk + ) + end + @test A == reshape(1:24, extent) end - @test h5open(fn, "r") do f - f["dataset"][:,:] - end == reshape(1:24, 4, 6) + @static if VERSION >= v"1.6" + # CartesianIndices does not accept StepRange + + h5open(fn, "w") do f + d = create_dataset(f, "dataset", datatype(Int), dataspace(4, 6); chunk=(2, 3)) + raw = HDF5.ChunkStorage(d) + data = permutedims(reshape(1:24, 2, 2, 3, 2), (1, 3, 2, 4)) + ci = CartesianIndices(raw) + for ind in eachindex(ci) + raw[ci[ind]] = data[:, :, ind] + end + end -end - -# Test direct write chunk writing via linear indexing, using views and without filter flag -h5open(fn, "w") do f - d = create_dataset(f, "dataset", datatype(Int), dataspace(4, 6), chunk=(2, 3)) - raw = HDF5.ChunkStorage{IndexLinear}(d) - data = permutedims(reshape(1:24, 2, 2, 3, 2), (1,3,2,4)) - chunks = Iterators.partition(data, 6) - i = 1 - for c in chunks - raw[i] = c - i += 1 + @test h5open(fn, "r") do f + f["dataset"][:, :] + end == reshape(1:24, 4, 6) end -end -@test h5open(fn, "r") do f - f["dataset"][:,:] -end == reshape(1:24, 4, 6) + # Test direct write chunk writing via linear indexing, using views and without filter flag + h5open(fn, "w") do f + d = create_dataset(f, "dataset", datatype(Int), dataspace(4, 6); chunk=(2, 3)) + raw = HDF5.ChunkStorage{IndexLinear}(d) + data = permutedims(reshape(1:24, 2, 2, 3, 2), (1, 3, 2, 4)) + chunks = Iterators.partition(data, 6) + i = 1 + for c in chunks + raw[i] = c + i += 1 + end + end -rm(fn) + @test h5open(fn, "r") do f + f["dataset"][:, :] + end == reshape(1:24, 4, 6) + rm(fn) end # testset "Raw Chunk I/O" diff --git a/test/compound.jl b/test/compound.jl index c7c10cb99..d0e76be9d 100644 --- a/test/compound.jl +++ b/test/compound.jl @@ -15,18 +15,19 @@ end struct foo_hdf5 a::Float64 b::Cstring - c::NTuple{20, UInt8} - d::NTuple{9, ComplexF64} + c::NTuple{20,UInt8} + d::NTuple{9,ComplexF64} e::HDF5.API.hvl_t end function unsafe_convert(::Type{foo_hdf5}, x::foo) - foo_hdf5(x.a, - Base.unsafe_convert(Cstring, x.b), - ntuple(i -> i <= ncodeunits(x.c) ? codeunit(x.c, i) : '\0', 20), - ntuple(i -> x.d[i], length(x.d)), - HDF5.API.hvl_t(length(x.e), pointer(x.e)) - ) + foo_hdf5( + x.a, + Base.unsafe_convert(Cstring, x.b), + ntuple(i -> i <= ncodeunits(x.c) ? codeunit(x.c, i) : '\0', 20), + ntuple(i -> x.d[i], length(x.d)), + HDF5.API.hvl_t(length(x.e), pointer(x.e)) + ) end function datatype(::Type{foo_hdf5}) @@ -44,7 +45,7 @@ function datatype(::Type{foo_hdf5}) HDF5.API.h5t_set_strpad(fixedstr_dtype, HDF5.API.H5T_STR_NULLPAD) HDF5.API.h5t_insert(dtype, "c", fieldoffset(foo_hdf5, 3), fixedstr_dtype) - hsz = HDF5.API.hsize_t[3,3] + hsz = HDF5.API.hsize_t[3, 3] array_dtype = HDF5.API.h5t_array_create(datatype(ComplexF64), 2, hsz) HDF5.API.h5t_insert(dtype, "d", fieldoffset(foo_hdf5, 4), array_dtype) @@ -59,7 +60,7 @@ struct bar end struct bar_hdf5 - a::NTuple{2, NTuple{20, UInt8}} + a::NTuple{2,NTuple{20,UInt8}} end function datatype(::Type{bar_hdf5}) @@ -78,25 +79,26 @@ function datatype(::Type{bar_hdf5}) end function convert(::Type{bar_hdf5}, x::bar) - bar_hdf5(ntuple(i -> ntuple(j -> j <= ncodeunits(x.a[i]) ? codeunit(x.a[i], j) : '\0', 20), 2)) + bar_hdf5( + ntuple( + i -> ntuple(j -> j <= ncodeunits(x.a[i]) ? codeunit(x.a[i], j) : '\0', 20), 2 + ) + ) end - @testset "compound" begin N = 10 - v = [foo(rand(), + v = [ + foo( + rand(), randstring(rand(10:100)), randstring(10), - rand(ComplexF64, 3,3), + rand(ComplexF64, 3, 3), rand(1:10, rand(10:100)) - ) - for _ in 1:N] + ) for _ in 1:N + ] - v[1] = foo(1.0, - "uniçº∂e", - "uniçº∂e", - rand(ComplexF64, 3,3), - rand(1:10, rand(10:100))) + v[1] = foo(1.0, "uniçº∂e", "uniçº∂e", rand(ComplexF64, 3, 3), rand(1:10, rand(10:100))) v_write = unsafe_convert.(foo_hdf5, v) @@ -128,18 +130,18 @@ end @test f.(w) == f.(w_read) end - T = NamedTuple{(:a, :b, :c, :d, :e, :f), Tuple{Int, Int, Int, Int, Int, Cstring}} - TT = NamedTuple{(:a, :b, :c, :d, :e, :f), Tuple{Int, Int, Int, Int, Int, T}} - TTT = NamedTuple{(:a, :b, :c, :d, :e, :f), Tuple{Int, Int, Int, Int, Int, TT}} - TTTT = NamedTuple{(:a, :b, :c, :d, :e, :f), Tuple{Int, Int, Int, Int, Int, TTT}} + T = NamedTuple{(:a, :b, :c, :d, :e, :f),Tuple{Int,Int,Int,Int,Int,Cstring}} + TT = NamedTuple{(:a, :b, :c, :d, :e, :f),Tuple{Int,Int,Int,Int,Int,T}} + TTT = NamedTuple{(:a, :b, :c, :d, :e, :f),Tuple{Int,Int,Int,Int,Int,TT}} + TTTT = NamedTuple{(:a, :b, :c, :d, :e, :f),Tuple{Int,Int,Int,Int,Int,TTT}} @test HDF5.do_reclaim(TTTT) == true @test HDF5.do_normalize(TTTT) == true - T = NamedTuple{(:a, :b, :c, :d, :e, :f), Tuple{Int, Int, Int, Int, Int, HDF5.FixedArray}} - TT = NamedTuple{(:a, :b, :c, :d, :e, :f), Tuple{Int, Int, Int, Int, Int, T}} - TTT = NamedTuple{(:a, :b, :c, :d, :e, :f), Tuple{Int, Int, Int, Int, Int, TT}} - TTTT = NamedTuple{(:a, :b, :c, :d, :e, :f), Tuple{Int, Int, Int, Int, Int, TTT}} + T = NamedTuple{(:a, :b, :c, :d, :e, :f),Tuple{Int,Int,Int,Int,Int,HDF5.FixedArray}} + TT = NamedTuple{(:a, :b, :c, :d, :e, :f),Tuple{Int,Int,Int,Int,Int,T}} + TTT = NamedTuple{(:a, :b, :c, :d, :e, :f),Tuple{Int,Int,Int,Int,Int,TT}} + TTTT = NamedTuple{(:a, :b, :c, :d, :e, :f),Tuple{Int,Int,Int,Int,Int,TTT}} @test HDF5.do_reclaim(TTTT) == false @test HDF5.do_normalize(TTTT) == true diff --git a/test/custom.jl b/test/custom.jl index 3db33aa11..b891fa23a 100644 --- a/test/custom.jl +++ b/test/custom.jl @@ -38,10 +38,10 @@ end v_read = read(dset, Simple, indices...) @test v_read == v[indices...] - v_read = read(h5f, "data"=>Simple) + v_read = read(h5f, "data" => Simple) @test v_read == v end - v_read = h5read(fn, "data"=>Simple) + v_read = h5read(fn, "data" => Simple) @test v_read == v end diff --git a/test/dataspace.jl b/test/dataspace.jl index 1cfb5a767..75c31d16e 100644 --- a/test/dataspace.jl +++ b/test/dataspace.jl @@ -16,7 +16,7 @@ using Test @test isvalid(ds_scalar) - @test ndims(ds_null) === 0 + @test ndims(ds_null) === 0 @test ndims(ds_scalar) === 0 @test ndims(ds_zerosz) === 1 @test ndims(ds_vector) === 1 @@ -25,23 +25,27 @@ using Test # Test that properties of existing datasets can be extracted. # Note: Julia reverses the order of dimensions when using the high-level API versus # the dimensions used above to create the reference objects. - @test size(ds_null) === () + @test size(ds_null) === () @test size(ds_scalar) === () @test size(ds_zerosz) === (0,) @test size(ds_vector) === (5,) @test size(ds_matrix) === (5, 7) @test size(ds_maxdim) === (5, 7) - @test size(ds_null, 5) === 1 + @test size(ds_null, 5) === 1 @test size(ds_scalar, 5) === 1 @test size(ds_zerosz, 5) === 1 @test size(ds_vector, 5) === 1 @test size(ds_matrix, 5) === 1 @test size(ds_maxdim, 5) === 1 - @test_throws ArgumentError("invalid dimension d; must be positive integer") size(ds_null, 0) - @test_throws ArgumentError("invalid dimension d; must be positive integer") size(ds_scalar, -1) - - @test length(ds_null) === 0 + @test_throws ArgumentError("invalid dimension d; must be positive integer") size( + ds_null, 0 + ) + @test_throws ArgumentError("invalid dimension d; must be positive integer") size( + ds_scalar, -1 + ) + + @test length(ds_null) === 0 @test length(ds_scalar) === 1 @test length(ds_zerosz) === 0 @test length(ds_vector) === 5 @@ -58,23 +62,23 @@ using Test @test !HDF5.isnull(ds_zerosz) @test !HDF5.isnull(ds_vector) - @test HDF5.get_extent_dims(ds_null) === ((), ()) + @test HDF5.get_extent_dims(ds_null) === ((), ()) @test HDF5.get_extent_dims(ds_scalar) === ((), ()) @test HDF5.get_extent_dims(ds_zerosz) === ((0,), (0,)) @test HDF5.get_extent_dims(ds_vector) === ((5,), (5,)) @test HDF5.get_extent_dims(ds_matrix) === ((5, 7), (5, 7)) @test HDF5.get_extent_dims(ds_maxdim) === ((5, 7), (20, 20)) - @test HDF5.get_extent_dims(ds_unlim) === ((1,), (-1,)) + @test HDF5.get_extent_dims(ds_unlim) === ((1,), (-1,)) # Can create new copies ds_tmp = copy(ds_maxdim) ds_tmp2 = HDF5.Dataspace(ds_tmp.id) # copy of ID, but new Julia object @test ds_tmp.id == ds_tmp2.id != ds_maxdim.id # Equality and hashing - @test ds_tmp == ds_maxdim + @test ds_tmp == ds_maxdim @test ds_tmp !== ds_maxdim @test hash(ds_tmp) != hash(ds_maxdim) - @test ds_tmp == ds_tmp2 + @test ds_tmp == ds_tmp2 @test ds_tmp !== ds_tmp2 @test hash(ds_tmp) == hash(ds_tmp2) @@ -98,8 +102,8 @@ using Test @test dataspace(()) == ds_scalar @test dataspace((5,)) == ds_vector @test dataspace((5, 7)) == ds_matrix != ds_maxdim - @test dataspace((5, 7), max_dims = (20, 20)) == ds_maxdim != ds_matrix - @test dataspace((1,), max_dims = (-1,)) == ds_unlim + @test dataspace((5, 7); max_dims=(20, 20)) == ds_maxdim != ds_matrix + @test dataspace((1,); max_dims=(-1,)) == ds_unlim # for ≥ 2 numbers, same as single tuple argument @test dataspace(5, 7) == ds_matrix @test dataspace(5, 7, 1) == dataspace((5, 7, 1)) @@ -127,9 +131,9 @@ using Test h5open(path, "w") do hid dset = create_dataset(hid, "dset", datatype(Int), ds_matrix) attr = create_attribute(dset, "attr", datatype(Bool), ds_vector) - @test dataspace(dset) == ds_matrix + @test dataspace(dset) == ds_matrix @test dataspace(dset) !== ds_matrix - @test dataspace(attr) == ds_vector + @test dataspace(attr) == ds_vector @test dataspace(attr) !== ds_vector close(dset) close(attr) @@ -138,20 +142,23 @@ using Test end end - # Test mid-level routines: set/get_extent_dims dspace_norm = dataspace((100, 4)) - @test HDF5.get_extent_dims(dspace_norm)[1] == HDF5.get_extent_dims(dspace_norm)[2] == (100, 4) + @test HDF5.get_extent_dims(dspace_norm)[1] == + HDF5.get_extent_dims(dspace_norm)[2] == + (100, 4) HDF5.set_extent_dims(dspace_norm, (8, 2)) - @test HDF5.get_extent_dims(dspace_norm)[1] == HDF5.get_extent_dims(dspace_norm)[2] == (8, 2) + @test HDF5.get_extent_dims(dspace_norm)[1] == + HDF5.get_extent_dims(dspace_norm)[2] == + (8, 2) - dspace_maxd = dataspace((100, 4), max_dims = (256, 5)) + dspace_maxd = dataspace((100, 4); max_dims=(256, 5)) @test HDF5.get_extent_dims(dspace_maxd)[1] == (100, 4) @test HDF5.get_extent_dims(dspace_maxd)[2] == (256, 5) HDF5.set_extent_dims(dspace_maxd, (8, 2)) @test HDF5.get_extent_dims(dspace_maxd)[1] == (8, 2) - HDF5.set_extent_dims(dspace_maxd, (3, 1), (4,2)) + HDF5.set_extent_dims(dspace_maxd, (3, 1), (4, 2)) @test HDF5.get_extent_dims(dspace_maxd)[1] == (3, 1) @test HDF5.get_extent_dims(dspace_maxd)[2] == (4, 2) HDF5.set_extent_dims(dspace_maxd, (3, 1), (-1, -1)) # unlimited max size diff --git a/test/drivers.jl b/test/drivers.jl index dcfd5b32c..709871b60 100644 --- a/test/drivers.jl +++ b/test/drivers.jl @@ -3,22 +3,19 @@ import HDF5.Drivers using Test @testset "Drivers" begin + fn = tempname() + A = rand(UInt8, 256, 128) + h5open(fn, "w"; driver=Drivers.Core()) do f + ds = write_dataset(f, "core_dataset", A) + end + @test isfile(fn) + h5open(fn, "r") do f + @test f["core_dataset"][] == A + end -fn = tempname() -A = rand(UInt8, 256, 128) -h5open(fn, "w"; driver = Drivers.Core()) do f - ds = write_dataset(f, "core_dataset", A) -end -@test isfile(fn) -h5open(fn, "r") do f - @test f["core_dataset"][] == A -end - -fn = tempname() -h5open(fn, "w"; driver = Drivers.Core(backing_store = false)) do f - ds = write_dataset(f, "core_dataset", A) -end -@test !isfile(fn) - - + fn = tempname() + h5open(fn, "w"; driver=Drivers.Core(; backing_store=false)) do f + ds = write_dataset(f, "core_dataset", A) + end + @test !isfile(fn) end diff --git a/test/extend_test.jl b/test/extend_test.jl index 928c7c1aa..9a9eb89f5 100644 --- a/test/extend_test.jl +++ b/test/extend_test.jl @@ -2,79 +2,77 @@ using HDF5 using Test @testset "extend" begin + fn = tempname() -fn = tempname() + fid = h5open(fn, "w") + g = create_group(fid, "shoe") + d = create_dataset(g, "foo", datatype(Float64), ((10, 20), (100, 200)); chunk=(1, 1)) + #println("d is size current $(map(int,HDF5.get_extent_dims(d)[1])) max $(map(int,HDF5.get_extent_dims(d)[2]))") + dims, max_dims = HDF5.get_extent_dims(d) + @test dims == (UInt64(10), UInt64(20)) + @test max_dims == (UInt64(100), UInt64(200)) + HDF5.set_extent_dims(d, (100, 150)) + dims, max_dims = HDF5.get_extent_dims(d) + @test dims == (UInt64(100), UInt64(150)) + @test max_dims == (UInt64(100), UInt64(200)) + d[1, 1:5] = [1.1231, 1.313, 5.123, 2.231, 4.1231] + HDF5.set_extent_dims(d, (1, 5)) + @test size(d) == (1, 5) -fid = h5open(fn, "w") -g = create_group(fid, "shoe") -d = create_dataset(g, "foo", datatype(Float64), ((10, 20), (100, 200)), chunk=(1, 1)) -#println("d is size current $(map(int,HDF5.get_extent_dims(d)[1])) max $(map(int,HDF5.get_extent_dims(d)[2]))") -dims, max_dims = HDF5.get_extent_dims(d) -@test dims == (UInt64(10), UInt64(20)) -@test max_dims == (UInt64(100), UInt64(200)) -HDF5.set_extent_dims(d, (100, 150)) -dims, max_dims = HDF5.get_extent_dims(d) -@test dims == (UInt64(100), UInt64(150)) -@test max_dims == (UInt64(100), UInt64(200)) -d[1, 1:5] = [1.1231, 1.313, 5.123, 2.231, 4.1231] -HDF5.set_extent_dims(d, (1, 5)) -@test size(d) == (1, 5) + # Indexing returns correct array dimensions + @test d[1, end] ≈ 4.1231 + @test d[:, end] ≈ [4.1231] + @test d[end, :] == [1.1231, 1.313, 5.123, 2.231, 4.1231] + @test d[:, :] == [1.1231 1.313 5.123 2.231 4.1231] -# Indexing returns correct array dimensions -@test d[1, end] ≈ 4.1231 -@test d[:, end] ≈ [4.1231] -@test d[end, :] == [1.1231, 1.313, 5.123, 2.231, 4.1231] -@test d[:, :] == [1.1231 1.313 5.123 2.231 4.1231] + # Test all integer types work + @test d[UInt8(1), UInt16(1)] == 1.1231 + @test d[UInt32(1), UInt128(1)] == 1.1231 + @test d[Int8(1), Int16(1)] == 1.1231 + @test d[Int32(1), Int128(1)] == 1.1231 -# Test all integer types work -@test d[UInt8(1), UInt16(1)] == 1.1231 -@test d[UInt32(1), UInt128(1)] == 1.1231 -@test d[Int8(1), Int16(1)] == 1.1231 -@test d[Int32(1), Int128(1)] == 1.1231 + # Test ranges work with steps + @test d[1, 1:2:5] == [1.1231, 5.123, 4.1231] + @test d[1:1, 2:2:4] == [1.313 2.231] -# Test ranges work with steps -@test d[1, 1:2:5] == [1.1231, 5.123, 4.1231] -@test d[1:1, 2:2:4] == [1.313 2.231] + # Test Array constructor + Array(d) == [1.1231 1.313 5.123 2.231 4.1231] -# Test Array constructor -Array(d) == [1.1231 1.313 5.123 2.231 4.1231] + #println("d is size current $(map(int,HDF5.get_extent_dims(d)[1])) max $(map(int,HDF5.get_extent_dims(d)[2]))") + b = create_dataset(fid, "b", Int, ((1000,), (-1,)); chunk=(100,)) #-1 is equivalent to typemax(hsize_t) as far as I can tell + #println("b is size current $(map(int,HDF5.get_extent_dims(b)[1])) max $(map(int,HDF5.get_extent_dims(b)[2]))") + b[1:200] = ones(200) + dims, max_dims = HDF5.get_extent_dims(b) + @test dims == (UInt64(1000),) + @test max_dims == (HDF5.API.H5S_UNLIMITED % Int,) + HDF5.set_extent_dims(b, (10000,)) + dims, max_dims = HDF5.get_extent_dims(b) + @test dims == (UInt64(10000),) + @test max_dims == (HDF5.API.H5S_UNLIMITED % Int,) + #println("b is size current $(map(int,HDF5.get_extent_dims(b)[1])) max $(map(int,HDF5.get_extent_dims(b)[2]))") + # b[:] = [1:10000] # gave error no method lastindex(HDF5.Dataset{PlainHDF5File},), + # so I defined lastindex(dset::HDF5.Dataset) = length(dset), and exported lastindex + # but that didn't fix the error, despite the lastindex function working + # d[1] produces error ERROR: Wrong number of indices supplied, should datasets support linear indexing? + b[1:10000] = [1:10000;] + #println(b[1:100]) -#println("d is size current $(map(int,HDF5.get_extent_dims(d)[1])) max $(map(int,HDF5.get_extent_dims(d)[2]))") -b = create_dataset(fid, "b", Int, ((1000,), (-1,)), chunk=(100,)) #-1 is equivalent to typemax(hsize_t) as far as I can tell -#println("b is size current $(map(int,HDF5.get_extent_dims(b)[1])) max $(map(int,HDF5.get_extent_dims(b)[2]))") -b[1:200] = ones(200) -dims, max_dims = HDF5.get_extent_dims(b) -@test dims == (UInt64(1000),) -@test max_dims == (HDF5.API.H5S_UNLIMITED % Int,) -HDF5.set_extent_dims(b, (10000,)) -dims, max_dims = HDF5.get_extent_dims(b) -@test dims == (UInt64(10000),) -@test max_dims == (HDF5.API.H5S_UNLIMITED % Int,) -#println("b is size current $(map(int,HDF5.get_extent_dims(b)[1])) max $(map(int,HDF5.get_extent_dims(b)[2]))") -# b[:] = [1:10000] # gave error no method lastindex(HDF5.Dataset{PlainHDF5File},), -# so I defined lastindex(dset::HDF5.Dataset) = length(dset), and exported lastindex -# but that didn't fix the error, despite the lastindex function working -# d[1] produces error ERROR: Wrong number of indices supplied, should datasets support linear indexing? -b[1:10000] = [1:10000;] -#println(b[1:100]) + close(fid) -close(fid) - -fid = h5open(fn, "r") -d_again = fid["shoe/foo"] -dims, max_dims = HDF5.get_extent_dims(d_again) -@test dims == (UInt64(1), UInt64(5)) -@test max_dims == (UInt64(100), UInt64(200)) -@test (sum(d_again[1, 1:5]) - sum([1.1231, 1.313, 5.123, 2.231, 4.1231])) == 0 -#println("d is size current $(map(int,HDF5.get_extent_dims(re_d)[1])) max $(map(int,HDF5.get_extent_dims(re_d)[2]))") -@test fid["b"][1:10000] == [1:10000;] -b_again = fid["b"] -dims, max_dims = HDF5.get_extent_dims(b_again) -@test dims == (UInt64(10000),) -@test max_dims == (HDF5.API.H5S_UNLIMITED % Int,) -#println("b is size current $(map(int,HDF5.get_extent_dims(b)[1])) max $(map(int,HDF5.get_extent_dims(b)[2]))") - -close(fid) -rm(fn) + fid = h5open(fn, "r") + d_again = fid["shoe/foo"] + dims, max_dims = HDF5.get_extent_dims(d_again) + @test dims == (UInt64(1), UInt64(5)) + @test max_dims == (UInt64(100), UInt64(200)) + @test (sum(d_again[1, 1:5]) - sum([1.1231, 1.313, 5.123, 2.231, 4.1231])) == 0 + #println("d is size current $(map(int,HDF5.get_extent_dims(re_d)[1])) max $(map(int,HDF5.get_extent_dims(re_d)[2]))") + @test fid["b"][1:10000] == [1:10000;] + b_again = fid["b"] + dims, max_dims = HDF5.get_extent_dims(b_again) + @test dims == (UInt64(10000),) + @test max_dims == (HDF5.API.H5S_UNLIMITED % Int,) + #println("b is size current $(map(int,HDF5.get_extent_dims(b)[1])) max $(map(int,HDF5.get_extent_dims(b)[2]))") + close(fid) + rm(fn) end # testset extend_test diff --git a/test/external.jl b/test/external.jl index 72bfe0183..d5327036f 100644 --- a/test/external.jl +++ b/test/external.jl @@ -3,56 +3,56 @@ using Test @testset "external" begin -# roughly following https://www.hdfgroup.org/ftp/HDF5/current/src/unpacked/examples/h5_extlink.c -fn1 = tempname() -fn2 = tempname() - -source_file = h5open(fn1, "w") -agroup = create_group(source_file, "agroup") -target_file = h5open(fn2, "w") -target_group = create_group(target_file, "target_group") -target_group["abc"] = "abc" -target_group["1"] = 1 -target_group["1.1"] = 1.1 -close(target_file) - -# create external link such that source_file["ext_link"] points to target_file["target_group"] -# test both an HDF5.File and an HDF5.Group for first argument -HDF5.create_external(source_file, "ext_link", target_file.filename, "target_group") -HDF5.create_external(agroup, "ext_link", target_file.filename, "target_group") -close(agroup) -# write some things via the external link -new_group = create_group(source_file["ext_link"], "new_group") -new_group["abc"] = "abc" -new_group["1"] = 1 -new_group["1.1"] = 1.1 -close(new_group) - -# read things from target_group via exernal link created with HDF5File argument -group = source_file["ext_link"] -@test read(group["abc"]) == "abc" -@test read(group["1"]) == 1 -@test read(group["1.1"]) == 1.1 -close(group) -# read things from target_group via the external link created with HDF5.Group argument -groupalt = source_file["agroup/ext_link"] -@test read(groupalt["abc"]) == "abc" -@test read(groupalt["1"]) == 1 -@test read(groupalt["1.1"]) == 1.1 -close(groupalt) -close(source_file) - -##### tests that should be included but don't work -# when ggggggggg restarts julia and keeps track of target_file.filename, -# these tests succeed -# reopening the target_file crashes due to "file close degree doesn't match" -# target_file = h5open(target_file.filename, "r") -# group2 = target_file["target_group"]["new_group"] -# @test read(group2["abc"])=="abc" -# @test read(group2["1"])==1 -# @test read(group2["1.1"])==1.1 - -rm(fn1) -# rm(fn2) + # roughly following https://www.hdfgroup.org/ftp/HDF5/current/src/unpacked/examples/h5_extlink.c + fn1 = tempname() + fn2 = tempname() + + source_file = h5open(fn1, "w") + agroup = create_group(source_file, "agroup") + target_file = h5open(fn2, "w") + target_group = create_group(target_file, "target_group") + target_group["abc"] = "abc" + target_group["1"] = 1 + target_group["1.1"] = 1.1 + close(target_file) + + # create external link such that source_file["ext_link"] points to target_file["target_group"] + # test both an HDF5.File and an HDF5.Group for first argument + HDF5.create_external(source_file, "ext_link", target_file.filename, "target_group") + HDF5.create_external(agroup, "ext_link", target_file.filename, "target_group") + close(agroup) + # write some things via the external link + new_group = create_group(source_file["ext_link"], "new_group") + new_group["abc"] = "abc" + new_group["1"] = 1 + new_group["1.1"] = 1.1 + close(new_group) + + # read things from target_group via exernal link created with HDF5File argument + group = source_file["ext_link"] + @test read(group["abc"]) == "abc" + @test read(group["1"]) == 1 + @test read(group["1.1"]) == 1.1 + close(group) + # read things from target_group via the external link created with HDF5.Group argument + groupalt = source_file["agroup/ext_link"] + @test read(groupalt["abc"]) == "abc" + @test read(groupalt["1"]) == 1 + @test read(groupalt["1.1"]) == 1.1 + close(groupalt) + close(source_file) + + ##### tests that should be included but don't work + # when ggggggggg restarts julia and keeps track of target_file.filename, + # these tests succeed + # reopening the target_file crashes due to "file close degree doesn't match" + # target_file = h5open(target_file.filename, "r") + # group2 = target_file["target_group"]["new_group"] + # @test read(group2["abc"])=="abc" + # @test read(group2["1"])==1 + # @test read(group2["1.1"])==1.1 + + rm(fn1) + # rm(fn2) end # testset external diff --git a/test/fileio.jl b/test/fileio.jl index cc81c8a88..7331eb311 100644 --- a/test/fileio.jl +++ b/test/fileio.jl @@ -1,120 +1,108 @@ using HDF5, OrderedCollections, FileIO, Test @testset "fileio" begin -fn = tempname() * ".h5" - -hfile = h5open(fn, "w") -hfile["A"] = 1.0 -hfile["B"] = [1,2,3] -create_group(hfile, "G") -hfile["G/A"] = collect(-3:4) -create_group(hfile, "G1/G2") -hfile["G1/G2/A"] = "hello" -close(hfile); - -# test loader -data = Dict("A" => 1.0, "B"=> [1,2,3], "G/A"=>collect(-3:4), "G1/G2/A"=>"hello") -@test load(fn) == data -@test load(fn, "A") == 1.0 -@test load(fn, "A", "B") == (1.0, [1,2,3]) -@test load(fn, "G/A") == collect(-3:4) - -rm(fn) - -# test saver -save(fn, data) -@test load(fn) == data -@test load(fn, "A") == 1.0 -fr = h5open(fn, "r") -read(fr, "A") == 1.0 -close(fr) - -rm(fn) + fn = tempname() * ".h5" + + hfile = h5open(fn, "w") + hfile["A"] = 1.0 + hfile["B"] = [1, 2, 3] + create_group(hfile, "G") + hfile["G/A"] = collect(-3:4) + create_group(hfile, "G1/G2") + hfile["G1/G2/A"] = "hello" + close(hfile) + + # test loader + data = Dict("A" => 1.0, "B" => [1, 2, 3], "G/A" => collect(-3:4), "G1/G2/A" => "hello") + @test load(fn) == data + @test load(fn, "A") == 1.0 + @test load(fn, "A", "B") == (1.0, [1, 2, 3]) + @test load(fn, "G/A") == collect(-3:4) + + rm(fn) + + # test saver + save(fn, data) + @test load(fn) == data + @test load(fn, "A") == 1.0 + fr = h5open(fn, "r") + read(fr, "A") == 1.0 + close(fr) + + rm(fn) end @testset "track order" begin - -let fn = tempname() * ".h5" - h5open(fn, "w"; track_order=true) do io - fcpl = HDF5.get_create_properties(io) - @test fcpl.track_order - io["b"] = 1 - io["a"] = 2 - g = create_group(io, "G"; track_order=true) - gcpl = HDF5.get_create_properties(io["G"]) - @test gcpl.track_order - write(g, "z", 3) - write(g, "f", 4) - end - - dat = load(fn; dict=OrderedDict()) # `track_order` is inferred from `OrderedDict` - - @test all(keys(dat) .== ["b", "a", "G/z", "G/f"]) - - # issue #939 - h5open(fn, "r"; track_order=true) do io - @test HDF5.get_context_property(:file_create).track_order - @test all(keys(io) .== ["b", "a", "G"]) - @test HDF5.get_context_property(:group_create).track_order - @test HDF5.get_create_properties(io["G"]).track_order # inferred from file, created with `track_order=true` - @test all(keys(io["G"]) .== ["z", "f"]) - end - - h5open(fn, "r"; track_order=false) do io - @test !HDF5.get_context_property(:file_create).track_order - @test all(keys(io) .== ["G", "a", "b"]) - @test !HDF5.get_context_property(:group_create).track_order - @test HDF5.get_create_properties(io["G"]).track_order # inferred from file - @test all(keys(io["G"]) .== ["z", "f"]) - end - - h5open(fn, "r") do io - @test !HDF5.get_create_properties(io).track_order - @test all(keys(io) .== ["G", "a", "b"]) - @test HDF5.get_create_properties(io["G"]).track_order # inferred from file - @test all(keys(io["G"]) .== ["z", "f"]) - end -end - -let fn = tempname() * ".h5" - save(fn, OrderedDict("b"=>1, "a"=>2, "G/z"=>3, "G/f"=>4)) - - dat = load(fn; dict=OrderedDict()) - - @test all(keys(dat) .== ["b", "a", "G/z", "G/f"]) -end - + let fn = tempname() * ".h5" + h5open(fn, "w"; track_order=true) do io + fcpl = HDF5.get_create_properties(io) + @test fcpl.track_order + io["b"] = 1 + io["a"] = 2 + g = create_group(io, "G"; track_order=true) + gcpl = HDF5.get_create_properties(io["G"]) + @test gcpl.track_order + write(g, "z", 3) + write(g, "f", 4) + end + + dat = load(fn; dict=OrderedDict()) # `track_order` is inferred from `OrderedDict` + + @test all(keys(dat) .== ["b", "a", "G/z", "G/f"]) + + # issue #939 + h5open(fn, "r"; track_order=true) do io + @test HDF5.get_context_property(:file_create).track_order + @test all(keys(io) .== ["b", "a", "G"]) + @test HDF5.get_context_property(:group_create).track_order + @test HDF5.get_create_properties(io["G"]).track_order # inferred from file, created with `track_order=true` + @test all(keys(io["G"]) .== ["z", "f"]) + end + + h5open(fn, "r"; track_order=false) do io + @test !HDF5.get_context_property(:file_create).track_order + @test all(keys(io) .== ["G", "a", "b"]) + @test !HDF5.get_context_property(:group_create).track_order + @test HDF5.get_create_properties(io["G"]).track_order # inferred from file + @test all(keys(io["G"]) .== ["z", "f"]) + end + + h5open(fn, "r") do io + @test !HDF5.get_create_properties(io).track_order + @test all(keys(io) .== ["G", "a", "b"]) + @test HDF5.get_create_properties(io["G"]).track_order # inferred from file + @test all(keys(io["G"]) .== ["z", "f"]) + end + end + + let fn = tempname() * ".h5" + save(fn, OrderedDict("b" => 1, "a" => 2, "G/z" => 3, "G/f" => 4)) + + dat = load(fn; dict=OrderedDict()) + + @test all(keys(dat) .== ["b", "a", "G/z", "G/f"]) + end end # @testset track_order @static if HDF5.API.h5_get_libversion() >= v"1.10.5" - -@testset "h5f_get_dset_no_attrs_hint" begin - fn = tempname() - threshold = 300 - h5open( - fn, "w"; - libver_bounds = :latest, - meta_block_size = threshold - ) do f - HDF5.API.h5f_set_dset_no_attrs_hint(f, true) - @test HDF5.API.h5f_get_dset_no_attrs_hint(f) - f["test"] = 0x1 - # We expect that with the hint, the offset will actually be 300 - @test HDF5.API.h5d_get_offset(f["test"]) == threshold - end - @test filesize(fn) == threshold + 1 - h5open( - fn, "w"; - libver_bounds = :latest, - meta_block_size = threshold - ) do f - HDF5.API.h5f_set_dset_no_attrs_hint(f, false) - @test !HDF5.API.h5f_get_dset_no_attrs_hint(f) - f["test"] = 0x1 - # We expect that with the hint, the offset will be greater than 300 - @test HDF5.API.h5d_get_offset(f["test"]) > threshold - end - @test filesize(fn) > threshold + 1 -end - + @testset "h5f_get_dset_no_attrs_hint" begin + fn = tempname() + threshold = 300 + h5open(fn, "w"; libver_bounds=:latest, meta_block_size=threshold) do f + HDF5.API.h5f_set_dset_no_attrs_hint(f, true) + @test HDF5.API.h5f_get_dset_no_attrs_hint(f) + f["test"] = 0x1 + # We expect that with the hint, the offset will actually be 300 + @test HDF5.API.h5d_get_offset(f["test"]) == threshold + end + @test filesize(fn) == threshold + 1 + h5open(fn, "w"; libver_bounds=:latest, meta_block_size=threshold) do f + HDF5.API.h5f_set_dset_no_attrs_hint(f, false) + @test !HDF5.API.h5f_get_dset_no_attrs_hint(f) + f["test"] = 0x1 + # We expect that with the hint, the offset will be greater than 300 + @test HDF5.API.h5d_get_offset(f["test"]) > threshold + end + @test filesize(fn) > threshold + 1 + end end # @static if HDF5.API.h5_get_libversion() >= v"1.10.5" diff --git a/test/filter.jl b/test/filter.jl index 51f537c55..f7f90a732 100644 --- a/test/filter.jl +++ b/test/filter.jl @@ -3,177 +3,223 @@ using HDF5.Filters using Test using H5Zblosc, H5Zlz4, H5Zbzip2, H5Zzstd -@static if VERSION >= v"1.6" using H5Zbitshuffle end +@static if VERSION >= v"1.6" + using H5Zbitshuffle +end using HDF5.Filters: ExternalFilter, isavailable, isencoderenabled, isdecoderenabled @testset "filter" begin -# Create a new file -fn = tempname() - -# Create test data -data = rand(1000, 1000) - -# Open temp file for writing -f = h5open(fn, "w") - -# Create datasets -dsdeflate = create_dataset(f, "deflate", datatype(data), dataspace(data), - chunk=(100, 100), deflate=3) - -dsshufdef = create_dataset(f, "shufdef", datatype(data), dataspace(data), - chunk=(100, 100), shuffle=true, deflate=3) + # Create a new file + fn = tempname() -dsfiltdef = create_dataset(f, "filtdef", datatype(data), dataspace(data), - chunk=(100, 100), filters=Filters.Deflate(3)) + # Create test data + data = rand(1000, 1000) -dsfiltshufdef = create_dataset(f, "filtshufdef", datatype(data), dataspace(data), - chunk=(100, 100), filters=[Filters.Shuffle(), Filters.Deflate(3)]) + # Open temp file for writing + f = h5open(fn, "w") + # Create datasets + dsdeflate = create_dataset( + f, "deflate", datatype(data), dataspace(data); chunk=(100, 100), deflate=3 + ) -# Write data -write(dsdeflate, data) -write(dsshufdef, data) -write(dsfiltdef, data) -write(dsfiltshufdef, data) - -# Test compression filters - -compressionFilters = Dict( - "blosc" => BloscFilter, - "bzip2" => Bzip2Filter, - "lz4" => Lz4Filter, - "zstd" => ZstdFilter, -) - -for (name, filter) in compressionFilters - - ds = create_dataset( - f, name, datatype(data), dataspace(data), - chunk=(100,100), filters=filter() + dsshufdef = create_dataset( + f, + "shufdef", + datatype(data), + dataspace(data); + chunk=(100, 100), + shuffle=true, + deflate=3 ) - write(ds, data) - ds = create_dataset( - f, "shuffle+"*name, datatype(data), dataspace(data), - chunk=(100,100), filters=[Filters.Shuffle(), filter()] + dsfiltdef = create_dataset( + f, + "filtdef", + datatype(data), + dataspace(data); + chunk=(100, 100), + filters=Filters.Deflate(3) ) - write(ds, data) -end + dsfiltshufdef = create_dataset( + f, + "filtshufdef", + datatype(data), + dataspace(data); + chunk=(100, 100), + filters=[Filters.Shuffle(), Filters.Deflate(3)] + ) -ds = create_dataset( - f, "blosc_bitshuffle", datatype(data), dataspace(data), - chunk=(100,100), filters=BloscFilter(shuffle=H5Zblosc.BITSHUFFLE) -) - -write(ds, data) + # Write data + write(dsdeflate, data) + write(dsshufdef, data) + write(dsfiltdef, data) + write(dsfiltshufdef, data) -function extra_bitshuffle() + # Test compression filters - ds = create_dataset( - f, "bitshuffle_lz4", datatype(data), dataspace(data), - chunk=(100,100), filters=BitshuffleFilter(compressor=:lz4) + compressionFilters = Dict( + "blosc" => BloscFilter, + "bzip2" => Bzip2Filter, + "lz4" => Lz4Filter, + "zstd" => ZstdFilter, ) - write(ds, data) + for (name, filter) in compressionFilters + ds = create_dataset( + f, name, datatype(data), dataspace(data); chunk=(100, 100), filters=filter() + ) + write(ds, data) + + ds = create_dataset( + f, + "shuffle+" * name, + datatype(data), + dataspace(data); + chunk=(100, 100), + filters=[Filters.Shuffle(), filter()] + ) + write(ds, data) + end ds = create_dataset( - f, "bitshuffle_zstd", datatype(data), dataspace(data), - chunk=(100,100), filters=BitshuffleFilter(compressor=:zstd,comp_level=5) + f, + "blosc_bitshuffle", + datatype(data), + dataspace(data); + chunk=(100, 100), + filters=BloscFilter(; shuffle=H5Zblosc.BITSHUFFLE) ) - - write(ds, data) - - ds = create_dataset( - f, "bitshuffle_plain", datatype(data), dataspace(data), - chunk=(100,100), filters=BitshuffleFilter() - ) - + write(ds, data) -end -@static VERSION >= v"1.6" ? extra_bitshuffle() : nothing - -# Close and re-open file for reading -close(f) -f = h5open(fn) - -# Read datasets and test for equality -for name in keys(f) - ds = f[name] - @testset "$name" begin - @debug "Filter Dataset" HDF5.name(ds) - @test ds[] == data - filters = HDF5.get_create_properties(ds).filters - if startswith(name, "shuffle+") - @test filters[1] isa Shuffle - @test filters[2] isa compressionFilters[name[9:end]] - elseif haskey(compressionFilters, name) || name == "blosc_bitshuffle" - name = replace(name, r"_.*"=>"") - @test filters[1] isa compressionFilters[name] - end + function extra_bitshuffle() + ds = create_dataset( + f, + "bitshuffle_lz4", + datatype(data), + dataspace(data); + chunk=(100, 100), + filters=BitshuffleFilter(; compressor=:lz4) + ) + + write(ds, data) + + ds = create_dataset( + f, + "bitshuffle_zstd", + datatype(data), + dataspace(data); + chunk=(100, 100), + filters=BitshuffleFilter(; compressor=:zstd, comp_level=5) + ) + + write(ds, data) + + ds = create_dataset( + f, + "bitshuffle_plain", + datatype(data), + dataspace(data); + chunk=(100, 100), + filters=BitshuffleFilter() + ) + + write(ds, data) end -end -close(f) - -# Issue #896 and https://github.com/JuliaIO/HDF5.jl/issues/285#issuecomment-1002243321 -# Create an ExternalFilter from a Tuple -h5open(fn, "w") do f - data = rand(UInt8, 512, 16, 512) - # Tuple of integers should become an Unknown Filter - ds, dt = create_dataset(f, "data", data, chunk=(256,1,256), filter=(H5Z_FILTER_BZIP2, 0)) - # Tuple of Filters should get pushed into the pipeline one by one - dsfiltshufdef = create_dataset(f, "filtshufdef", datatype(data), dataspace(data), - chunk=(128, 4, 128), filters=(Filters.Shuffle(), Filters.Deflate(3))) - write(ds, data) - close(ds) - write(dsfiltshufdef, data) - close(dsfiltshufdef) -end + @static VERSION >= v"1.6" ? extra_bitshuffle() : nothing + + # Close and re-open file for reading + close(f) + f = h5open(fn) + + # Read datasets and test for equality + for name in keys(f) + ds = f[name] + @testset "$name" begin + @debug "Filter Dataset" HDF5.name(ds) + @test ds[] == data + filters = HDF5.get_create_properties(ds).filters + if startswith(name, "shuffle+") + @test filters[1] isa Shuffle + @test filters[2] isa compressionFilters[name[9:end]] + elseif haskey(compressionFilters, name) || name == "blosc_bitshuffle" + name = replace(name, r"_.*" => "") + @test filters[1] isa compressionFilters[name] + end + end + end -h5open(fn, "r") do f - @test f["data"][] == data - @test f["filtshufdef"][] == data -end + close(f) + + # Issue #896 and https://github.com/JuliaIO/HDF5.jl/issues/285#issuecomment-1002243321 + # Create an ExternalFilter from a Tuple + h5open(fn, "w") do f + data = rand(UInt8, 512, 16, 512) + # Tuple of integers should become an Unknown Filter + ds, dt = create_dataset( + f, "data", data; chunk=(256, 1, 256), filter=(H5Z_FILTER_BZIP2, 0) + ) + # Tuple of Filters should get pushed into the pipeline one by one + dsfiltshufdef = create_dataset( + f, + "filtshufdef", + datatype(data), + dataspace(data); + chunk=(128, 4, 128), + filters=(Filters.Shuffle(), Filters.Deflate(3)) + ) + write(ds, data) + close(ds) + write(dsfiltshufdef, data) + close(dsfiltshufdef) + end -# Filter Pipeline test for ExternalFilter -FILTERS_backup = copy(HDF5.Filters.FILTERS) -empty!(HDF5.Filters.FILTERS) -h5open(fn, "w") do f - data = collect(1:128) - filter = ExternalFilter(H5Z_FILTER_LZ4, 0, Cuint[0, 2, 4, 6, 8, 10], "Unknown LZ4", 0) - ds, dt = create_dataset(f, "data", data, chunk=(32,), filters=filter) - dcpl = HDF5.get_create_properties(ds) - pipeline = HDF5.Filters.FilterPipeline(dcpl) - @test pipeline[1].data == filter.data -end -merge!(HDF5.Filters.FILTERS, FILTERS_backup) - -@test HDF5.API.h5z_filter_avail(HDF5.API.H5Z_FILTER_DEFLATE) -@test HDF5.API.h5z_filter_avail(HDF5.API.H5Z_FILTER_FLETCHER32) -@test HDF5.API.h5z_filter_avail(HDF5.API.H5Z_FILTER_NBIT) -@test HDF5.API.h5z_filter_avail(HDF5.API.H5Z_FILTER_SCALEOFFSET) -@test HDF5.API.h5z_filter_avail(HDF5.API.H5Z_FILTER_SHUFFLE) -@static if Sys.iswindows() || VERSION ≤ v"1.6" - @test HDF5.API.h5z_filter_avail(HDF5.API.H5Z_FILTER_SZIP) -elseif HDF5.API.h5_get_libversion() >= v"1.12.0" - # These are missing in the macOS and Linux JLLs for h5 version 1.12+ - @test_broken HDF5.API.h5z_filter_avail(HDF5.API.H5Z_FILTER_SZIP) -end -@test HDF5.API.h5z_filter_avail(H5Z_FILTER_BZIP2) -@test HDF5.API.h5z_filter_avail(H5Z_FILTER_LZ4) -@test HDF5.API.h5z_filter_avail(H5Z_FILTER_ZSTD) -@test HDF5.API.h5z_filter_avail(H5Z_FILTER_BLOSC) -HDF5.API.h5z_unregister(H5Z_FILTER_LZ4) -HDF5.Filters.register_filter(H5Zlz4.Lz4Filter) -@test isavailable(H5Z_FILTER_LZ4) -@test isavailable(Lz4Filter) -@test isencoderenabled(H5Z_FILTER_LZ4) -@test isdecoderenabled(H5Z_FILTER_LZ4) -@test isencoderenabled(Lz4Filter) -@test isdecoderenabled(Lz4Filter) + h5open(fn, "r") do f + @test f["data"][] == data + @test f["filtshufdef"][] == data + end + # Filter Pipeline test for ExternalFilter + FILTERS_backup = copy(HDF5.Filters.FILTERS) + empty!(HDF5.Filters.FILTERS) + h5open(fn, "w") do f + data = collect(1:128) + filter = ExternalFilter( + H5Z_FILTER_LZ4, 0, Cuint[0, 2, 4, 6, 8, 10], "Unknown LZ4", 0 + ) + ds, dt = create_dataset(f, "data", data; chunk=(32,), filters=filter) + dcpl = HDF5.get_create_properties(ds) + pipeline = HDF5.Filters.FilterPipeline(dcpl) + @test pipeline[1].data == filter.data + end + merge!(HDF5.Filters.FILTERS, FILTERS_backup) + + @test HDF5.API.h5z_filter_avail(HDF5.API.H5Z_FILTER_DEFLATE) + @test HDF5.API.h5z_filter_avail(HDF5.API.H5Z_FILTER_FLETCHER32) + @test HDF5.API.h5z_filter_avail(HDF5.API.H5Z_FILTER_NBIT) + @test HDF5.API.h5z_filter_avail(HDF5.API.H5Z_FILTER_SCALEOFFSET) + @test HDF5.API.h5z_filter_avail(HDF5.API.H5Z_FILTER_SHUFFLE) + @static if Sys.iswindows() || VERSION ≤ v"1.6" + @test HDF5.API.h5z_filter_avail(HDF5.API.H5Z_FILTER_SZIP) + elseif HDF5.API.h5_get_libversion() >= v"1.12.0" + # These are missing in the macOS and Linux JLLs for h5 version 1.12+ + @test_broken HDF5.API.h5z_filter_avail(HDF5.API.H5Z_FILTER_SZIP) + end + @test HDF5.API.h5z_filter_avail(H5Z_FILTER_BZIP2) + @test HDF5.API.h5z_filter_avail(H5Z_FILTER_LZ4) + @test HDF5.API.h5z_filter_avail(H5Z_FILTER_ZSTD) + @test HDF5.API.h5z_filter_avail(H5Z_FILTER_BLOSC) + HDF5.API.h5z_unregister(H5Z_FILTER_LZ4) + HDF5.Filters.register_filter(H5Zlz4.Lz4Filter) + @test isavailable(H5Z_FILTER_LZ4) + @test isavailable(Lz4Filter) + @test isencoderenabled(H5Z_FILTER_LZ4) + @test isdecoderenabled(H5Z_FILTER_LZ4) + @test isencoderenabled(Lz4Filter) + @test isdecoderenabled(Lz4Filter) end # @testset "filter" diff --git a/test/filters/FilterTestUtils.jl b/test/filters/FilterTestUtils.jl index 86c58cf97..4b9ee23c5 100644 --- a/test/filters/FilterTestUtils.jl +++ b/test/filters/FilterTestUtils.jl @@ -13,7 +13,7 @@ using Test export test_filter -function test_filter_init(; cd_values = Cuint[], data = ones(UInt8, 1024)) +function test_filter_init(; cd_values=Cuint[], data=ones(UInt8, 1024)) flags = Cuint(0) nbytes = sizeof(data) buf_size = Ref(Csize_t(sizeof(data))) @@ -24,7 +24,14 @@ function test_filter_init(; cd_values = Cuint[], data = ones(UInt8, 1024)) return flags, cd_values, nbytes, buf_size, buf end -function test_filter_compress!(filter_func, flags::Cuint, cd_values::Vector{Cuint}, nbytes::Integer, buf_size::Ref{Csize_t}, buf::Ref{Ptr{Cvoid}}) +function test_filter_compress!( + filter_func, + flags::Cuint, + cd_values::Vector{Cuint}, + nbytes::Integer, + buf_size::Ref{Csize_t}, + buf::Ref{Ptr{Cvoid}} +) nbytes = Csize_t(nbytes) cd_nelmts = Csize_t(length(cd_values)) GC.@preserve flags cd_nelmts cd_values nbytes buf_size buf begin @@ -44,7 +51,14 @@ function test_filter_compress!(filter_func, flags::Cuint, cd_values::Vector{Cuin return ret_code end -function test_filter_decompress!(filter_func, flags::Cuint, cd_values::Vector{Cuint}, nbytes::Integer, buf_size::Ref{Csize_t}, buf::Ref{Ptr{Cvoid}}) +function test_filter_decompress!( + filter_func, + flags::Cuint, + cd_values::Vector{Cuint}, + nbytes::Integer, + buf_size::Ref{Csize_t}, + buf::Ref{Ptr{Cvoid}} +) nbytes = Csize_t(nbytes) cd_nelmts = Csize_t(length(cd_values)) flags |= UInt32(API.H5Z_FLAG_REVERSE) @@ -54,7 +68,7 @@ function test_filter_decompress!(filter_func, flags::Cuint, cd_values::Vector{Cu cd_nelmts, pointer(cd_values), Csize_t(nbytes), - Base.unsafe_convert(Ptr{Csize_t},buf_size), + Base.unsafe_convert(Ptr{Csize_t}, buf_size), Base.unsafe_convert(Ptr{Ptr{Cvoid}}, buf) ) @debug "Decompression:" ret_code buf_size[] @@ -66,15 +80,21 @@ function test_filter_cleanup!(buf::Ref{Ptr{Cvoid}}) Libc.free(buf[]) end -function test_filter(filter_func; cd_values::Vector{Cuint} = Cuint[], data = ones(UInt8, 1024)) - flags, cd_values, nbytes, buf_size, buf = test_filter_init(; cd_values = cd_values, data = data) +function test_filter(filter_func; cd_values::Vector{Cuint}=Cuint[], data=ones(UInt8, 1024)) + flags, cd_values, nbytes, buf_size, buf = test_filter_init(; + cd_values=cd_values, data=data + ) nbytes_compressed, nbytes_decompressed = 0, 0 try - nbytes_compressed = test_filter_compress!(filter_func, flags, cd_values, nbytes, buf_size, buf) - nbytes_decompressed = test_filter_decompress!(filter_func, flags, cd_values, nbytes_compressed, buf_size, buf) + nbytes_compressed = test_filter_compress!( + filter_func, flags, cd_values, nbytes, buf_size, buf + ) + nbytes_decompressed = test_filter_decompress!( + filter_func, flags, cd_values, nbytes_compressed, buf_size, buf + ) if nbytes_decompressed > 0 # ret_code is the number of bytes out - round_trip_data = unsafe_wrap(Array,Ptr{UInt8}(buf[]), nbytes_decompressed) + round_trip_data = unsafe_wrap(Array, Ptr{UInt8}(buf[]), nbytes_decompressed) @debug "Is the data the same after a roundtrip?" data == round_trip_data end catch err @@ -86,19 +106,19 @@ function test_filter(filter_func; cd_values::Vector{Cuint} = Cuint[], data = one return nbytes_compressed, nbytes_decompressed end -function test_bzip2_filter(data = ones(UInt8, 1024)) +function test_bzip2_filter(data=ones(UInt8, 1024)) cd_values = Cuint[8] - test_filter(H5Z_filter_bzip2; cd_values = cd_values, data = data) + test_filter(H5Z_filter_bzip2; cd_values=cd_values, data=data) end -function test_lz4_filter(data = ones(UInt8, 1024)) +function test_lz4_filter(data=ones(UInt8, 1024)) cd_values = Cuint[1024] - test_filter(H5Z_filter_lz4; cd_values = cd_values, data = data) + test_filter(H5Z_filter_lz4; cd_values=cd_values, data=data) end -function test_zstd_filter(data = ones(UInt8, 1024)) +function test_zstd_filter(data=ones(UInt8, 1024)) cd_values = Cuint[3] # aggression - test_filter(H5Z_filter_zstd; cd_values = cd_values, data = data) + test_filter(H5Z_filter_zstd; cd_values=cd_values, data=data) end function __init__() @@ -113,4 +133,4 @@ function __init__() end end -end \ No newline at end of file +end diff --git a/test/gc.jl b/test/gc.jl index c34eede9f..8ec38d1fc 100644 --- a/test/gc.jl +++ b/test/gc.jl @@ -2,12 +2,15 @@ using HDF5 using Test macro gcvalid(args...) - Expr(:block, quote - GC.enable(true) - GC.gc() - GC.enable(false) - end, - [:(@test HDF5.isvalid($(esc(x)))) for x in args]...) + Expr( + :block, + quote + GC.enable(true) + GC.gc() + GC.enable(false) + end, + [:(@test HDF5.isvalid($(esc(x)))) for x in args]... + ) end macro closederror(x) @@ -16,59 +19,58 @@ macro closederror(x) $(esc(x)) catch e isa(e, ErrorException) || rethrow(e) - e.msg == "File or object has been closed" || error("Attempt to access closed object did not throw") + e.msg == "File or object has been closed" || + error("Attempt to access closed object did not throw") end end end @testset "gc" begin + GC.enable(false) + fn = tempname() + for i in 1:10 + file = h5open(fn, "w") + memtype_id = HDF5.API.h5t_create(HDF5.API.H5T_COMPOUND, 2 * sizeof(Float64)) + HDF5.API.h5t_insert(memtype_id, "real", 0, HDF5.hdf5_type_id(Float64)) + HDF5.API.h5t_insert(memtype_id, "imag", sizeof(Float64), HDF5.hdf5_type_id(Float64)) + dt = HDF5.Datatype(memtype_id) + commit_datatype(file, "dt", dt) + ds = dataspace((2,)) + d = create_dataset(file, "d", dt, ds) + g = create_group(file, "g") + a = create_attribute(file, "a", dt, ds) + @gcvalid dt ds d g a + close(file) -GC.enable(false) -fn = tempname() -for i = 1:10 - file = h5open(fn, "w") - memtype_id = HDF5.API.h5t_create(HDF5.API.H5T_COMPOUND, 2*sizeof(Float64)) - HDF5.API.h5t_insert(memtype_id, "real", 0, HDF5.hdf5_type_id(Float64)) - HDF5.API.h5t_insert(memtype_id, "imag", sizeof(Float64), HDF5.hdf5_type_id(Float64)) - dt = HDF5.Datatype(memtype_id) - commit_datatype(file, "dt", dt) - ds = dataspace((2,)) - d = create_dataset(file, "d", dt, ds) - g = create_group(file, "g") - a = create_attribute(file, "a", dt, ds) - @gcvalid dt ds d g a - close(file) - - @closederror read(d) - for obj in (d, g) - @closederror read_attribute(obj, "a") - @closederror write_attribute(obj, "a", 1) + @closederror read(d) + for obj in (d, g) + @closederror read_attribute(obj, "a") + @closederror write_attribute(obj, "a", 1) + end + for obj in (g, file) + @closederror open_dataset(obj, "d") + @closederror read_dataset(obj, "d") + @closederror write_dataset(obj, "d", 1) + @closederror read(obj, "x") + @closederror write(obj, "x", "y") + end end - for obj in (g, file) - @closederror open_dataset(obj, "d") - @closederror read_dataset(obj, "d") - @closederror write_dataset(obj, "d", 1) - @closederror read(obj, "x") - @closederror write(obj, "x", "y") + for i in 1:10 + file = h5open(fn, "r") + dt = file["dt"] + d = file["d"] + ds = dataspace(d) + g = file["g"] + a = attributes(file)["a"] + @gcvalid dt ds d g a + close(file) end -end -for i = 1:10 - file = h5open(fn, "r") - dt = file["dt"] - d = file["d"] - ds = dataspace(d) - g = file["g"] - a = attributes(file)["a"] - @gcvalid dt ds d g a - close(file) -end -GC.enable(true) - -let plist = HDF5.init!(HDF5.FileAccessProperties()) # related to issue #620 - HDF5.API.h5p_close(plist) - @test_nowarn finalize(plist) -end + GC.enable(true) -rm(fn) + let plist = HDF5.init!(HDF5.FileAccessProperties()) # related to issue #620 + HDF5.API.h5p_close(plist) + @test_nowarn finalize(plist) + end + rm(fn) end # testset gc diff --git a/test/memtest.jl b/test/memtest.jl index 30e8d7141..af1be7150 100644 --- a/test/memtest.jl +++ b/test/memtest.jl @@ -9,17 +9,19 @@ macro memtest(ex) $ex end # HDF5.h5_garbage_collect() - GC.gc(); print(rpad(i, 8)); run(PRINT_MEMORY) + GC.gc() + print(rpad(i, 8)) + run(PRINT_MEMORY) end end end @memtest h5open("/tmp/memtest.h5", "w") do file - dset = create_dataset(file, "A", datatype(DATA), dataspace(DATA), chunk=(100,)) + dset = create_dataset(file, "A", datatype(DATA), dataspace(DATA); chunk=(100,)) dset[:] = DATA[:] end @memtest h5open("/tmp/memtest.h5", "w") do file file["A", chunk=(100,)] = DATA[:] end @memtest h5open("/tmp/memtest.h5", "r") do file - file["A","dxpl_mpio", 0] + file["A", "dxpl_mpio", 0] end diff --git a/test/mmap.jl b/test/mmap.jl index 6b4394026..5c2af11b7 100644 --- a/test/mmap.jl +++ b/test/mmap.jl @@ -3,57 +3,59 @@ using Test @testset "mmap" begin -# Create a new file -fn = tempname() -f = h5open(fn, "w") -@test isopen(f) + # Create a new file + fn = tempname() + f = h5open(fn, "w") + @test isopen(f) -# Create two datasets, one with late allocation (the default for contiguous -# datasets) and the other with explicit early allocation. -hdf5_A = create_dataset(f, "A", datatype(Int64), dataspace(3,3)) -hdf5_B = create_dataset(f, "B", datatype(Float64), dataspace(3,3); - alloc_time = HDF5.API.H5D_ALLOC_TIME_EARLY) -# The late case cannot be mapped yet. -@test_throws ErrorException("Error getting offset") HDF5.readmmap(f["A"]) -# Then write and fill dataset A, making it mappable. B was filled with 0.0 at -# creation. -A = rand(Int64,3,3) -hdf5_A[:,:] = A -flush(f) -close(f) -# Read HDF5 file & MMAP -f = h5open(fn,"r") -A_mmaped = HDF5.readmmap(f["A"]) -@test all(A .== A_mmaped) -@test all(iszero, HDF5.readmmap(f["B"])) -# Check that it is read only -@test_throws ReadOnlyMemoryError A_mmaped[1,1] = 33 -close(f) -# Now check if we can write -f = h5open(fn,"r+") -A_mmaped = HDF5.readmmap(f["A"]) -A_mmaped[1,1] = 33 -close(f) - -# issue #863 - fix mmapping complex arrays -fn = tempname() -f = h5open(fn, "w") -A = rand(ComplexF32, 5, 5) -f["A"] = A -close(f) -f = h5open(fn, "r+") -complex_support = HDF5.COMPLEX_SUPPORT[] -# Complex arrays can be mmapped when complex support is enabled -complex_support || HDF5.enable_complex_support() -@test A == read(f["A"]) -@test A == HDF5.readmmap(f["A"]) -# But mmapping should throw an error when support is disabled -HDF5.disable_complex_support() -At = [(r = real(c), i = imag(c)) for c in A] -@test read(f["A"]) == At # readable as array of NamedTuples -@test_throws ErrorException("Cannot mmap datasets of type $(eltype(At))") HDF5.readmmap(f["A"]) -close(f) -# Restore complex support state -complex_support && HDF5.enable_complex_support() + # Create two datasets, one with late allocation (the default for contiguous + # datasets) and the other with explicit early allocation. + hdf5_A = create_dataset(f, "A", datatype(Int64), dataspace(3, 3)) + hdf5_B = create_dataset( + f, "B", datatype(Float64), dataspace(3, 3); alloc_time=HDF5.API.H5D_ALLOC_TIME_EARLY + ) + # The late case cannot be mapped yet. + @test_throws ErrorException("Error getting offset") HDF5.readmmap(f["A"]) + # Then write and fill dataset A, making it mappable. B was filled with 0.0 at + # creation. + A = rand(Int64, 3, 3) + hdf5_A[:, :] = A + flush(f) + close(f) + # Read HDF5 file & MMAP + f = h5open(fn, "r") + A_mmaped = HDF5.readmmap(f["A"]) + @test all(A .== A_mmaped) + @test all(iszero, HDF5.readmmap(f["B"])) + # Check that it is read only + @test_throws ReadOnlyMemoryError A_mmaped[1, 1] = 33 + close(f) + # Now check if we can write + f = h5open(fn, "r+") + A_mmaped = HDF5.readmmap(f["A"]) + A_mmaped[1, 1] = 33 + close(f) + # issue #863 - fix mmapping complex arrays + fn = tempname() + f = h5open(fn, "w") + A = rand(ComplexF32, 5, 5) + f["A"] = A + close(f) + f = h5open(fn, "r+") + complex_support = HDF5.COMPLEX_SUPPORT[] + # Complex arrays can be mmapped when complex support is enabled + complex_support || HDF5.enable_complex_support() + @test A == read(f["A"]) + @test A == HDF5.readmmap(f["A"]) + # But mmapping should throw an error when support is disabled + HDF5.disable_complex_support() + At = [(r=real(c), i=imag(c)) for c in A] + @test read(f["A"]) == At # readable as array of NamedTuples + @test_throws ErrorException("Cannot mmap datasets of type $(eltype(At))") HDF5.readmmap( + f["A"] + ) + close(f) + # Restore complex support state + complex_support && HDF5.enable_complex_support() end # testset diff --git a/test/mpio.jl b/test/mpio.jl index 7167a6dfe..801f50853 100644 --- a/test/mpio.jl +++ b/test/mpio.jl @@ -3,77 +3,88 @@ using HDF5 using Test @testset "mpio" begin + HDF5.FileAccessProperties() do fapl + Drivers.set_driver!(fapl, Drivers.Core()) + end -HDF5.FileAccessProperties() do fapl - Drivers.set_driver!(fapl, Drivers.Core()) -end + MPI.Init() -MPI.Init() + info = MPI.Info() + comm = MPI.COMM_WORLD -info = MPI.Info() -comm = MPI.COMM_WORLD + nprocs = MPI.Comm_size(comm) + myrank = MPI.Comm_rank(comm) -nprocs = MPI.Comm_size(comm) -myrank = MPI.Comm_rank(comm) + @test HDF5.has_parallel() -@test HDF5.has_parallel() + # Check that serial drivers are still there after loading MPI (#928) + @test Drivers.Core ∈ values(Drivers.DRIVERS) + @test Drivers.POSIX ∈ values(Drivers.DRIVERS) -# Check that serial drivers are still there after loading MPI (#928) -@test Drivers.Core ∈ values(Drivers.DRIVERS) -@test Drivers.POSIX ∈ values(Drivers.DRIVERS) + let fileprop = HDF5.FileAccessProperties() + fileprop.driver = HDF5.Drivers.MPIO(comm, info) + driver = fileprop.driver + h5comm = driver.comm + h5info = driver.info -let fileprop = HDF5.FileAccessProperties() - fileprop.driver = HDF5.Drivers.MPIO(comm, info) - driver = fileprop.driver - h5comm = driver.comm - h5info = driver.info + # check that the two communicators point to the same group + if isdefined(MPI, :Comm_compare) # requires recent MPI.jl version + @test MPI.Comm_compare(comm, h5comm) === MPI.CONGRUENT + end + end - # check that the two communicators point to the same group - if isdefined(MPI, :Comm_compare) # requires recent MPI.jl version - @test MPI.Comm_compare(comm, h5comm) === MPI.CONGRUENT + # open file in parallel and write dataset + fn = MPI.bcast(tempname(), 0, comm) + A = [myrank + i for i in 1:10] + h5open(fn, "w", comm, info) do f + @test isopen(f) + g = create_group(f, "mygroup") + dset = create_dataset( + g, + "B", + datatype(Int64), + dataspace(10, nprocs); + chunk=(10, 1), + dxpl_mpio=:collective + ) + dset[:, myrank + 1] = A end -end - -# open file in parallel and write dataset -fn = MPI.bcast(tempname(), 0, comm) -A = [myrank + i for i = 1:10] -h5open(fn, "w", comm, info) do f - @test isopen(f) - g = create_group(f, "mygroup") - dset = create_dataset(g, "B", datatype(Int64), dataspace(10, nprocs), chunk=(10, 1), dxpl_mpio=:collective) - dset[:, myrank + 1] = A -end - -MPI.Barrier(comm) -h5open(fn, comm) do f # default: opened in read mode, with default MPI.Info() - @test isopen(f) - @test keys(f) == ["mygroup"] - - B = read(f, "mygroup/B", dxpl_mpio=:collective) - @test !isempty(B) - @test A == vec(B[:, myrank + 1]) - B = f["mygroup/B", dxpl_mpio=:collective] - @test !isempty(B) - @test A == vec(B[:, myrank + 1]) -end + MPI.Barrier(comm) + h5open(fn, comm) do f # default: opened in read mode, with default MPI.Info() + @test isopen(f) + @test keys(f) == ["mygroup"] -MPI.Barrier(comm) + B = read(f, "mygroup/B"; dxpl_mpio=:collective) + @test !isempty(B) + @test A == vec(B[:, myrank + 1]) -B = h5read(fn, "mygroup/B", driver = HDF5.Drivers.MPIO(comm, info), dxpl_mpio=:collective) -@test A == vec(B[:, myrank + 1]) + B = f["mygroup/B", dxpl_mpio=:collective] + @test !isempty(B) + @test A == vec(B[:, myrank + 1]) + end + + MPI.Barrier(comm) -MPI.Barrier(comm) + B = h5read(fn, "mygroup/B"; driver=HDF5.Drivers.MPIO(comm, info), dxpl_mpio=:collective) + @test A == vec(B[:, myrank + 1]) -B = h5read(fn, "mygroup/B", (:, myrank + 1), driver=HDF5.Drivers.MPIO(comm, info), dxpl_mpio=:collective) -@test A == vec(B) + MPI.Barrier(comm) -# we need to close HDF5 and finalize the info object before finalizing MPI -finalize(info) -HDF5.API.h5_close() + B = h5read( + fn, + "mygroup/B", + (:, myrank + 1); + driver=HDF5.Drivers.MPIO(comm, info), + dxpl_mpio=:collective + ) + @test A == vec(B) -MPI.Barrier(MPI.COMM_WORLD) + # we need to close HDF5 and finalize the info object before finalizing MPI + finalize(info) + HDF5.API.h5_close() -MPI.Finalize() + MPI.Barrier(MPI.COMM_WORLD) + MPI.Finalize() end # testset mpio diff --git a/test/nonallocating.jl b/test/nonallocating.jl index ff00699f3..c0f896f8d 100644 --- a/test/nonallocating.jl +++ b/test/nonallocating.jl @@ -19,20 +19,20 @@ using Test v = h5f["data"][1:4, 1:4] buffer = similar(v) - @test size(buffer) == (4,4) + @test size(buffer) == (4, 4) copyto!(buffer, v) @test isequal(buffer, @view(data[1:4, 1:4])) @test size(similar(h5f["data"], Int16)) == size(h5f["data"]) - @test size(similar(h5f["data"], 5,6)) == (5, 6) - @test size(similar(h5f["data"], Int16, 8,7)) == (8,7) - @test size(similar(h5f["data"], Int16, 8,7; normalize = false)) == (8,7) - @test_broken size(similar(h5f["data"], Int8, 8,7)) == (8,7) - - @test size(similar(h5f["data"], (5,6))) == (5, 6) - @test size(similar(h5f["data"], Int16, (8,7))) == (8,7) - @test size(similar(h5f["data"], Int16, (8,7); normalize = false)) == (8,7) - @test size(similar(h5f["data"], Int16, 0x8,0x7; normalize = false)) == (8,7) + @test size(similar(h5f["data"], 5, 6)) == (5, 6) + @test size(similar(h5f["data"], Int16, 8, 7)) == (8, 7) + @test size(similar(h5f["data"], Int16, 8, 7; normalize=false)) == (8, 7) + @test_broken size(similar(h5f["data"], Int8, 8, 7)) == (8, 7) + + @test size(similar(h5f["data"], (5, 6))) == (5, 6) + @test size(similar(h5f["data"], Int16, (8, 7))) == (8, 7) + @test size(similar(h5f["data"], Int16, (8, 7); normalize=false)) == (8, 7) + @test size(similar(h5f["data"], Int16, 0x8, 0x7; normalize=false)) == (8, 7) end rm(fn) diff --git a/test/objects.jl b/test/objects.jl index c197f83e3..d68dd749a 100644 --- a/test/objects.jl +++ b/test/objects.jl @@ -64,9 +64,7 @@ using HDF5.API @test oninfo.hdr.mesg.present > 0 end - oinfo = API.h5o_get_info_by_idx( - h5f, ".", API.H5_INDEX_NAME, API.H5_ITER_INC, 0 - ) + oinfo = API.h5o_get_info_by_idx(h5f, ".", API.H5_INDEX_NAME, API.H5_ITER_INC, 0) @test oinfo.num_attrs == 0 @static if HDF5.API.h5_get_libversion() >= v"1.12.0" @@ -83,6 +81,5 @@ using HDF5.API @test oninfo.hdr.space.free > 0 @test oninfo.hdr.mesg.present > 0 end - end end diff --git a/test/plain.jl b/test/plain.jl index 313db01c6..aaf0aa91b 100644 --- a/test/plain.jl +++ b/test/plain.jl @@ -5,896 +5,1005 @@ using Test gatherf(dst_buf, dst_buf_bytes_used, op_data) = HDF5.API.herr_t(0) gatherf_bad(dst_buf, dst_buf_bytes_used, op_data) = HDF5.API.herr_t(-1) -gatherf_data(dst_buf, dst_buf_bytes_used, op_data) = HDF5.API.herr_t((op_data == 9)-1) - +gatherf_data(dst_buf, dst_buf_bytes_used, op_data) = HDF5.API.herr_t((op_data == 9) - 1) function scatterf(src_buf, src_buf_bytes_used, op_data) - A = [1,2,3,4] + A = [1, 2, 3, 4] unsafe_store!(src_buf, pointer(A)) unsafe_store!(src_buf_bytes_used, sizeof(A)) return HDF5.API.herr_t(0) end scatterf_bad(src_buf, src_buf_bytes_used, op_data) = HDF5.API.herr_t(-1) function scatterf_data(src_buf, src_buf_bytes_used, op_data) - A = [1,2,3,4] + A = [1, 2, 3, 4] unsafe_store!(src_buf, pointer(A)) unsafe_store!(src_buf_bytes_used, sizeof(A)) - return HDF5.API.herr_t((op_data == 9)-1) + return HDF5.API.herr_t((op_data == 9) - 1) end @testset "plain" begin -# Create a new file -fn = tempname() -f = h5open(fn, "w") -@test isopen(f) -# Write scalars -f["Float64"] = 3.2 -f["Int16"] = Int16(4) -# compression of empty array (issue #246) -f["compressedempty", shuffle=true, deflate=4] = Int64[] -# compression of zero-dimensional array (pull request #445) -f["compressed_zerodim", shuffle=true, deflate=4] = fill(Int32(42), ()) -f["bloscempty", blosc=4] = Int64[] -# test creation of an anonymouse dataset -f[nothing] = 5 -# Create arrays of different types -A = randn(3, 5) -write(f, "Afloat64", convert(Matrix{Float64}, A)) -write(f, "Afloat32", convert(Matrix{Float32}, A)) -Ai = rand(1:20, 2, 4) -write(f, "Aint8", convert(Matrix{Int8}, Ai)) -f["Aint16"] = convert(Matrix{Int16}, Ai) -write(f, "Aint32", convert(Matrix{Int32}, Ai)) -write(f, "Aint64", convert(Matrix{Int64}, Ai)) -write(f, "Auint8", convert(Matrix{UInt8}, Ai)) -write(f, "Auint16", convert(Matrix{UInt16}, Ai)) - -# test writing multiple variable (issue #599) -write(f, "Auint32", convert(Matrix{UInt32}, Ai), "Auint64", convert(Matrix{UInt64}, Ai)) - -# Arrays of bools (pull request #540) -Abool = [false, true, false] -write(f, "Abool", Abool) - -salut = "Hi there" -ucode = "uniçº∂e" -write(f, "salut", salut) -write(f, "ucode", ucode) -# Manually write a variable-length string (issue #187) -let - dtype = HDF5.Datatype(HDF5.API.h5t_copy(HDF5.API.H5T_C_S1)) - HDF5.API.h5t_set_size(dtype, HDF5.API.H5T_VARIABLE) - HDF5.API.h5t_set_cset(dtype, HDF5.cset(typeof(salut))) - dspace = dataspace(salut) - dset = create_dataset(f, "salut-vlen", dtype, dspace) - GC.@preserve salut begin - HDF5.API.h5d_write(dset, dtype, HDF5.API.H5S_ALL, HDF5.API.H5S_ALL, HDF5.API.H5P_DEFAULT, [pointer(salut)]) + # Create a new file + fn = tempname() + f = h5open(fn, "w") + @test isopen(f) + # Write scalars + f["Float64"] = 3.2 + f["Int16"] = Int16(4) + # compression of empty array (issue #246) + f["compressedempty", shuffle=true, deflate=4] = Int64[] + # compression of zero-dimensional array (pull request #445) + f["compressed_zerodim", shuffle=true, deflate=4] = fill(Int32(42), ()) + f["bloscempty", blosc=4] = Int64[] + # test creation of an anonymouse dataset + f[nothing] = 5 + # Create arrays of different types + A = randn(3, 5) + write(f, "Afloat64", convert(Matrix{Float64}, A)) + write(f, "Afloat32", convert(Matrix{Float32}, A)) + Ai = rand(1:20, 2, 4) + write(f, "Aint8", convert(Matrix{Int8}, Ai)) + f["Aint16"] = convert(Matrix{Int16}, Ai) + write(f, "Aint32", convert(Matrix{Int32}, Ai)) + write(f, "Aint64", convert(Matrix{Int64}, Ai)) + write(f, "Auint8", convert(Matrix{UInt8}, Ai)) + write(f, "Auint16", convert(Matrix{UInt16}, Ai)) + + # test writing multiple variable (issue #599) + write(f, "Auint32", convert(Matrix{UInt32}, Ai), "Auint64", convert(Matrix{UInt64}, Ai)) + + # Arrays of bools (pull request #540) + Abool = [false, true, false] + write(f, "Abool", Abool) + + salut = "Hi there" + ucode = "uniçº∂e" + write(f, "salut", salut) + write(f, "ucode", ucode) + # Manually write a variable-length string (issue #187) + let + dtype = HDF5.Datatype(HDF5.API.h5t_copy(HDF5.API.H5T_C_S1)) + HDF5.API.h5t_set_size(dtype, HDF5.API.H5T_VARIABLE) + HDF5.API.h5t_set_cset(dtype, HDF5.cset(typeof(salut))) + dspace = dataspace(salut) + dset = create_dataset(f, "salut-vlen", dtype, dspace) + GC.@preserve salut begin + HDF5.API.h5d_write( + dset, + dtype, + HDF5.API.H5S_ALL, + HDF5.API.H5S_ALL, + HDF5.API.H5P_DEFAULT, + [pointer(salut)] + ) + end + end + # Arrays of strings + salut_split = ["Hi", "there"] + write(f, "salut_split", salut_split) + salut_2d = ["Hi" "there"; "Salut" "friend"] + write(f, "salut_2d", salut_2d) + # Arrays of strings as vlen + vlen = HDF5.VLen(salut_split) + write_dataset(f, "salut_vlen", vlen) + # Arrays of scalars as vlen + vlen_int = [[3], [1], [4]] + vleni = HDF5.VLen(vlen_int) + write_dataset(f, "int_vlen", vleni) + write_attribute(f["int_vlen"], "vlen_attr", vleni) + # Empty arrays + empty = UInt32[] + write(f, "empty", empty) + write(f, nothing, empty) + # Empty strings + empty_string = "" + write(f, "empty_string", empty_string) + # Empty array of strings + empty_string_array = String[] + write(f, "empty_string_array", empty_string_array) + # Array of empty string + empty_array_of_strings = [""] + write(f, "empty_array_of_strings", empty_array_of_strings) + # attributes + species = [["N", "C"]; ["A", "B"]] + attributes(f)["species"] = species + @test read(attributes(f)["species"]) == species + @test attributes(f)["species"][] == species + C∞ = 42 + attributes(f)["C∞"] = C∞ + dset = f["salut"] + @test !isempty(dset) + label = "This is a string" + attributes(dset)["typeinfo"] = label + @test read(attributes(dset)["typeinfo"]) == label + @test attributes(dset)["typeinfo"][] == label + @test dset["typeinfo"][] == label + close(dset) + # Scalar reference values in attributes + attributes(f)["ref_test"] = HDF5.Reference(f, "empty_array_of_strings") + @test read(attributes(f)["ref_test"]) === HDF5.Reference(f, "empty_array_of_strings") + # Group + g = create_group(f, "mygroup") + # Test dataset with compression + R = rand(1:20, 20, 40) + g["CompressedA", chunk=(5, 6), shuffle=true, deflate=9] = R + g["BloscA", chunk=(5, 6), shuffle=true, blosc=9] = R + close(g) + # Copy group containing dataset + copy_object(f, "mygroup", f, "mygroup2") + # Copy dataset + g = create_group(f, "mygroup3") + copy_object(f["mygroup/CompressedA"], g, "CompressedA") + copy_object(f["mygroup/BloscA"], g, "BloscA") + close(g) + # Writing hyperslabs + dset = create_dataset( + f, "slab", datatype(Float64), dataspace(20, 20, 5); chunk=(5, 5, 1) + ) + Xslab = randn(20, 20, 5) + for i in 1:5 + dset[:, :, i] = Xslab[:, :, i] + end + dset = create_dataset( + f, nothing, datatype(Float64), dataspace(20, 20, 5); chunk=(5, 5, 1) + ) + dset[:, :, :] = 3.0 + # More complex hyperslab and assignment with "incorrect" types (issue #34) + d = create_dataset(f, "slab2", datatype(Float64), ((10, 20), (100, 200)); chunk=(1, 1)) + d[:, :] = 5 + d[1, 1] = 4 + # 1d indexing + d = create_dataset(f, "slab3", datatype(Int), ((10,), (-1,)); chunk=(5,)) + @test d[:] == zeros(Int, 10) + d[3:5] = 3:5 + # Create a dataset designed to be deleted + f["deleteme"] = 17.2 + close(f) + @test !isopen(f) + # Test the h5read/write interface, with attributes + W = copy(reshape(1:120, 15, 8)) + Wa = Dict("a" => 1, "b" => 2) + h5write(fn, "newgroup/W", W) + h5writeattr(fn, "newgroup/W", Wa) + + # Read the file back in + fr = h5open(fn) + x = read(fr, "Float64") + @test x == 3.2 && isa(x, Float64) + y = read(fr, "Int16") + @test y == 4 && isa(y, Int16) + zerodim = read(fr, "compressed_zerodim") + @test zerodim == 42 && isa(zerodim, Int32) + bloscempty = read(fr, "bloscempty") + @test bloscempty == Int64[] && isa(bloscempty, Vector{Int64}) + Af32 = read(fr, "Afloat32") + @test convert(Matrix{Float32}, A) == Af32 + @test eltype(Af32) == Float32 + Af64 = read(fr, "Afloat64") + @test convert(Matrix{Float64}, A) == Af64 + @test eltype(Af64) == Float64 + @test eltype(fr["Afloat64"]) == Float64 # issue 167 + Ai8 = read(fr, "Aint8") + @test Ai == Ai8 + @test eltype(Ai8) == Int8 + Ai16 = read(fr, "Aint16") + @test Ai == Ai16 + @test eltype(Ai16) == Int16 + Ai32 = read(fr, "Aint32") + @test Ai == Ai32 + @test eltype(Ai32) == Int32 + Ai64 = read(fr, "Aint64") + @test Ai == Ai64 + @test eltype(Ai64) == Int64 + Ai8 = read(fr, "Auint8") + @test Ai == Ai8 + @test eltype(Ai8) == UInt8 + Ai16 = read(fr, "Auint16") + @test Ai == Ai16 + @test eltype(Ai16) == UInt16 + Ai32 = read(fr, "Auint32") + @test Ai == Ai32 + @test eltype(Ai32) == UInt32 + Ai64 = read(fr, "Auint64") + @test Ai == Ai64 + @test eltype(Ai64) == UInt64 + + Abool_read = read(fr, "Abool") + @test Abool_read == Abool + @test eltype(Abool_read) == Bool + + salutr = read(fr, "salut") + @test salut == salutr + salutr = read(fr, "salut-vlen") + @test salut == salutr + ucoder = read(fr, "ucode") + @test ucode == ucoder + salut_splitr = read(fr, "salut_split") + @test salut_splitr == salut_split + salut_2dr = read(fr, "salut_2d") + @test salut_2d == salut_2dr + salut_vlenr = read(fr, "salut_vlen") + @test HDF5.vlen_get_buf_size(fr["salut_vlen"]) == 7 + @test HDF5.API.h5d_get_access_plist(fr["salut-vlen"]) != 0 + #@test salut_vlenr == salut_split + vlen_intr = read(fr, "int_vlen") + @test vlen_intr == vlen_int + vlen_attrr = read(fr["int_vlen"]["vlen_attr"]) + @test vlen_attrr == vlen_int + Rr = read(fr, "mygroup/CompressedA") + @test Rr == R + Rr2 = read(fr, "mygroup2/CompressedA") + @test Rr2 == R + Rr3 = read(fr, "mygroup3/CompressedA") + @test Rr3 == R + Rr4 = read(fr, "mygroup/BloscA") + @test Rr4 == R + Rr5 = read(fr, "mygroup2/BloscA") + @test Rr5 == R + Rr6 = read(fr, "mygroup3/BloscA") + @test Rr6 == R + dset = fr["mygroup/CompressedA"] + @test HDF5.get_chunk(dset) == (5, 6) + @test HDF5.name(dset) == "/mygroup/CompressedA" + dset2 = fr["mygroup/BloscA"] + @test HDF5.get_chunk(dset2) == (5, 6) + @test HDF5.name(dset2) == "/mygroup/BloscA" + Xslabr = read(fr, "slab") + @test Xslabr == Xslab + Xslabr = h5read(fn, "slab", (:, :, :)) # issue #87 + @test Xslabr == Xslab + Xslab2r = read(fr, "slab2") + target = fill(5, 10, 20) + target[1] = 4 + @test Xslab2r == target + dset = fr["slab3"] + @test dset[3:5] == [3:5;] + emptyr = read(fr, "empty") + @test isempty(emptyr) + empty_stringr = read(fr, "empty_string") + @test empty_stringr == empty_string + empty_string_arrayr = read(fr, "empty_string_array") + @test empty_string_arrayr == empty_string_array + empty_array_of_stringsr = read(fr, "empty_array_of_strings") + @test empty_array_of_stringsr == empty_array_of_strings + @test read_attribute(fr, "species") == species + @test read_attribute(fr, "C∞") == C∞ + dset = fr["salut"] + @test read_attribute(dset, "typeinfo") == label + close(dset) + # Test ref-based reading + Aref = fr["Afloat64"] + sel = (2:3, 1:2:5) + Asub = Aref[sel...] + @test Asub == A[sel...] + close(Aref) + # Test iteration, name, and parent + for obj in fr + @test HDF5.filename(obj) == fn + n = HDF5.name(obj) + p = parent(obj) + end + # Test reading multiple vars at once + z = read(fr, "Float64", "Int16") + @test z == (3.2, 4) + @test typeof(z) == Tuple{Float64,Int16} + # Test reading entire file at once + z = read(fr) + @test z["Float64"] == 3.2 + close(fr) + + # Test object deletion + fr = h5open(fn, "r+") + @test haskey(fr, "deleteme") + delete_object(fr, "deleteme") + @test !haskey(fr, "deleteme") + close(fr) + + # Test object move + h5open(fn, "r+") do io + io["moveme"] = [1, 2, 3] + create_group(io, "moveto") end -end -# Arrays of strings -salut_split = ["Hi", "there"] -write(f, "salut_split", salut_split) -salut_2d = ["Hi" "there"; "Salut" "friend"] -write(f, "salut_2d", salut_2d) -# Arrays of strings as vlen -vlen = HDF5.VLen(salut_split) -write_dataset(f, "salut_vlen", vlen) -# Arrays of scalars as vlen -vlen_int = [[3], [1], [4]] -vleni = HDF5.VLen(vlen_int) -write_dataset(f, "int_vlen", vleni) -write_attribute(f["int_vlen"], "vlen_attr", vleni) -# Empty arrays -empty = UInt32[] -write(f, "empty", empty) -write(f, nothing, empty) -# Empty strings -empty_string = "" -write(f, "empty_string", empty_string) -# Empty array of strings -empty_string_array = String[] -write(f, "empty_string_array", empty_string_array) -# Array of empty string -empty_array_of_strings = [""] -write(f, "empty_array_of_strings", empty_array_of_strings) -# attributes -species = [["N", "C"]; ["A", "B"]] -attributes(f)["species"] = species -@test read(attributes(f)["species"]) == species -@test attributes(f)["species"][] == species -C∞ = 42 -attributes(f)["C∞"] = C∞ -dset = f["salut"] -@test !isempty(dset) -label = "This is a string" -attributes(dset)["typeinfo"] = label -@test read(attributes(dset)["typeinfo"]) == label -@test attributes(dset)["typeinfo"][] == label -@test dset["typeinfo"][] == label -close(dset) -# Scalar reference values in attributes -attributes(f)["ref_test"] = HDF5.Reference(f, "empty_array_of_strings") -@test read(attributes(f)["ref_test"]) === HDF5.Reference(f, "empty_array_of_strings") -# Group -g = create_group(f, "mygroup") -# Test dataset with compression -R = rand(1:20, 20, 40); -g["CompressedA", chunk=(5, 6), shuffle=true, deflate=9] = R -g["BloscA", chunk=(5, 6), shuffle=true, blosc=9] = R -close(g) -# Copy group containing dataset -copy_object(f, "mygroup", f, "mygroup2") -# Copy dataset -g = create_group(f, "mygroup3") -copy_object(f["mygroup/CompressedA"], g, "CompressedA") -copy_object(f["mygroup/BloscA"], g, "BloscA") -close(g) -# Writing hyperslabs -dset = create_dataset(f, "slab", datatype(Float64), dataspace(20, 20, 5), chunk=(5, 5, 1)) -Xslab = randn(20, 20, 5) -for i = 1:5 - dset[:,:,i] = Xslab[:,:,i] -end -dset = create_dataset(f, nothing, datatype(Float64), dataspace(20, 20, 5), chunk=(5, 5, 1)) -dset[:, :, :] = 3.0 -# More complex hyperslab and assignment with "incorrect" types (issue #34) -d = create_dataset(f, "slab2", datatype(Float64), ((10, 20), (100, 200)), chunk=(1, 1)) -d[:,:] = 5 -d[1,1] = 4 -# 1d indexing -d = create_dataset(f, "slab3", datatype(Int), ((10,), (-1,)), chunk=(5,)) -@test d[:] == zeros(Int, 10) -d[3:5] = 3:5 -# Create a dataset designed to be deleted -f["deleteme"] = 17.2 -close(f) -@test !isopen(f) -# Test the h5read/write interface, with attributes -W = copy(reshape(1:120, 15, 8)) -Wa = Dict("a" => 1, "b" => 2) -h5write(fn, "newgroup/W", W) -h5writeattr(fn, "newgroup/W", Wa) - -# Read the file back in -fr = h5open(fn) -x = read(fr, "Float64") -@test x == 3.2 && isa(x, Float64) -y = read(fr, "Int16") -@test y == 4 && isa(y, Int16) -zerodim = read(fr, "compressed_zerodim") -@test zerodim == 42 && isa(zerodim, Int32) -bloscempty = read(fr, "bloscempty") -@test bloscempty == Int64[] && isa(bloscempty, Vector{Int64}) -Af32 = read(fr, "Afloat32") -@test convert(Matrix{Float32}, A) == Af32 -@test eltype(Af32) == Float32 -Af64 = read(fr, "Afloat64") -@test convert(Matrix{Float64}, A) == Af64 -@test eltype(Af64) == Float64 -@test eltype(fr["Afloat64"]) == Float64 # issue 167 -Ai8 = read(fr, "Aint8") -@test Ai == Ai8 -@test eltype(Ai8) == Int8 -Ai16 = read(fr, "Aint16") -@test Ai == Ai16 -@test eltype(Ai16) == Int16 -Ai32 = read(fr, "Aint32") -@test Ai == Ai32 -@test eltype(Ai32) == Int32 -Ai64 = read(fr, "Aint64") -@test Ai == Ai64 -@test eltype(Ai64) == Int64 -Ai8 = read(fr, "Auint8") -@test Ai == Ai8 -@test eltype(Ai8) == UInt8 -Ai16 = read(fr, "Auint16") -@test Ai == Ai16 -@test eltype(Ai16) == UInt16 -Ai32 = read(fr, "Auint32") -@test Ai == Ai32 -@test eltype(Ai32) == UInt32 -Ai64 = read(fr, "Auint64") -@test Ai == Ai64 -@test eltype(Ai64) == UInt64 - -Abool_read = read(fr, "Abool") -@test Abool_read == Abool -@test eltype(Abool_read) == Bool - -salutr = read(fr, "salut") -@test salut == salutr -salutr = read(fr, "salut-vlen") -@test salut == salutr -ucoder = read(fr, "ucode") -@test ucode == ucoder -salut_splitr = read(fr, "salut_split") -@test salut_splitr == salut_split -salut_2dr = read(fr, "salut_2d") -@test salut_2d == salut_2dr -salut_vlenr = read(fr, "salut_vlen") -@test HDF5.vlen_get_buf_size(fr["salut_vlen"]) == 7 -@test HDF5.API.h5d_get_access_plist(fr["salut-vlen"]) != 0 -#@test salut_vlenr == salut_split -vlen_intr = read(fr, "int_vlen") -@test vlen_intr == vlen_int -vlen_attrr = read(fr["int_vlen"]["vlen_attr"]) -@test vlen_attrr == vlen_int -Rr = read(fr, "mygroup/CompressedA") -@test Rr == R -Rr2 = read(fr, "mygroup2/CompressedA") -@test Rr2 == R -Rr3 = read(fr, "mygroup3/CompressedA") -@test Rr3 == R -Rr4 = read(fr, "mygroup/BloscA") -@test Rr4 == R -Rr5 = read(fr, "mygroup2/BloscA") -@test Rr5 == R -Rr6 = read(fr, "mygroup3/BloscA") -@test Rr6 == R -dset = fr["mygroup/CompressedA"] -@test HDF5.get_chunk(dset) == (5, 6) -@test HDF5.name(dset) == "/mygroup/CompressedA" -dset2 = fr["mygroup/BloscA"] -@test HDF5.get_chunk(dset2) == (5, 6) -@test HDF5.name(dset2) == "/mygroup/BloscA" -Xslabr = read(fr, "slab") -@test Xslabr == Xslab -Xslabr = h5read(fn, "slab", (:, :, :)) # issue #87 -@test Xslabr == Xslab -Xslab2r = read(fr, "slab2") -target = fill(5, 10, 20) -target[1] = 4 -@test Xslab2r == target -dset = fr["slab3"] -@test dset[3:5] == [3:5;] -emptyr = read(fr, "empty") -@test isempty(emptyr) -empty_stringr = read(fr, "empty_string") -@test empty_stringr == empty_string -empty_string_arrayr = read(fr, "empty_string_array") -@test empty_string_arrayr == empty_string_array -empty_array_of_stringsr = read(fr, "empty_array_of_strings") -@test empty_array_of_stringsr == empty_array_of_strings -@test read_attribute(fr, "species") == species -@test read_attribute(fr, "C∞") == C∞ -dset = fr["salut"] -@test read_attribute(dset, "typeinfo") == label -close(dset) -# Test ref-based reading -Aref = fr["Afloat64"] -sel = (2:3, 1:2:5) -Asub = Aref[sel...] -@test Asub == A[sel...] -close(Aref) -# Test iteration, name, and parent -for obj in fr - @test HDF5.filename(obj) == fn - n = HDF5.name(obj) - p = parent(obj) -end -# Test reading multiple vars at once -z = read(fr, "Float64", "Int16") -@test z == (3.2, 4) -@test typeof(z) == Tuple{Float64,Int16} -# Test reading entire file at once -z = read(fr) -@test z["Float64"] == 3.2 -close(fr) - -# Test object deletion -fr = h5open(fn, "r+") -@test haskey(fr, "deleteme") -delete_object(fr, "deleteme") -@test !haskey(fr, "deleteme") -close(fr) - -# Test object move -h5open(fn, "r+") do io - io["moveme"] = [1,2,3] - create_group(io, "moveto") -end -h5open(fn, "r+") do io - @test haskey(io, "moveme") - @test haskey(io, "moveto") && !haskey(io, "moveto/moveme") - move_link(io, "moveme", io["moveto"]) - @test haskey(io, "moveto/moveme") && !haskey(io, "moveme") -end + h5open(fn, "r+") do io + @test haskey(io, "moveme") + @test haskey(io, "moveto") && !haskey(io, "moveto/moveme") + move_link(io, "moveme", io["moveto"]) + @test haskey(io, "moveto/moveme") && !haskey(io, "moveme") + end -# Test the h5read interface -Wr = h5read(fn, "newgroup/W") -@test Wr == W -rng = (2:3:15, 3:5) -Wr = h5read(fn, "newgroup/W", rng) -@test Wr == W[rng...] -War = h5readattr(fn, "newgroup/W") -@test War == Wa - -# issue #618 -# Test that invalid writes treat implicit creation as a transaction, cleaning up the partial -# operation -hid = h5open(fn, "w") -A = rand(3, 3)' -@test !haskey(hid, "A") -@test_throws ArgumentError write(hid, "A", A) -@test !haskey(hid, "A") -dset = create_dataset(hid, "attr", datatype(Int), dataspace(0)) -@test !haskey(attributes(dset), "attr") -# broken test - writing attributes does not check that the stride is correct -@test_skip @test_throws ArgumentError write(dset, "attr", A) -@test !haskey(attributes(dset), "attr") -close(hid) - -# more do syntax -h5open(fn, "w") do fid - g = create_group(fid, "mygroup") - write(g, "x", 3.2) -end + # Test the h5read interface + Wr = h5read(fn, "newgroup/W") + @test Wr == W + rng = (2:3:15, 3:5) + Wr = h5read(fn, "newgroup/W", rng) + @test Wr == W[rng...] + War = h5readattr(fn, "newgroup/W") + @test War == Wa + + # issue #618 + # Test that invalid writes treat implicit creation as a transaction, cleaning up the partial + # operation + hid = h5open(fn, "w") + A = rand(3, 3)' + @test !haskey(hid, "A") + @test_throws ArgumentError write(hid, "A", A) + @test !haskey(hid, "A") + dset = create_dataset(hid, "attr", datatype(Int), dataspace(0)) + @test !haskey(attributes(dset), "attr") + # broken test - writing attributes does not check that the stride is correct + @test_skip @test_throws ArgumentError write(dset, "attr", A) + @test !haskey(attributes(dset), "attr") + close(hid) + + # more do syntax + h5open(fn, "w") do fid + g = create_group(fid, "mygroup") + write(g, "x", 3.2) + end -fid = h5open(fn, "r") -@test keys(fid) == ["mygroup"] -g = fid["mygroup"] -@test keys(g) == ["x"] -close(g) -close(fid) -rm(fn) - -# more do syntax: atomic rename version -tmpdir = mktempdir() -outfile = joinpath(tmpdir, "test.h5") - -# create a new file -h5rewrite(outfile) do fid - g = create_group(fid, "mygroup") - write(g, "x", 3.3) -end -@test length(readdir(tmpdir)) == 1 -h5open(outfile, "r") do fid + fid = h5open(fn, "r") @test keys(fid) == ["mygroup"] - @test keys(fid["mygroup"]) == ["x"] -end + g = fid["mygroup"] + @test keys(g) == ["x"] + close(g) + close(fid) + rm(fn) -# fail to overwrite -@test_throws ErrorException h5rewrite(outfile) do fid - g = create_group(fid, "mygroup") - write(g, "oops", 3.3) - error("failed") -end -@test length(readdir(tmpdir)) == 1 -h5open(outfile, "r") do fid - @test keys(fid) == ["mygroup"] - @test keys(fid["mygroup"]) == ["x"] -end + # more do syntax: atomic rename version + tmpdir = mktempdir() + outfile = joinpath(tmpdir, "test.h5") -# overwrite -h5rewrite(outfile) do fid - g = create_group(fid, "mygroup") - write(g, "y", 3.3) -end -@test length(readdir(tmpdir)) == 1 -h5open(outfile, "r") do fid - @test keys(fid) == ["mygroup"] - @test keys(fid["mygroup"]) == ["y"] -end -rm(tmpdir, recursive=true) - -test_files = joinpath(@__DIR__, "test_files") - -d = h5read(joinpath(test_files, "compound.h5"), "/data") -@test typeof(d[1]) == NamedTuple{(:wgt, :xyz, :uvw, :E), Tuple{Float64, Array{Float64, 1}, Array{Float64, 1}, Float64}} - -# get-datasets -fn = tempname() -fd = h5open(fn, "w") -fd["level_0"] = [1,2,3] -grp = create_group(fd, "mygroup") -fd["mygroup/level_1"] = [4, 5] -grp2 = create_group(grp, "deep_group") -fd["mygroup/deep_group/level_2"] = [6.0, 7.0] -datasets = HDF5.get_datasets(fd) -@test sort(map(HDF5.name, datasets)) == sort(["/level_0", "/mygroup/deep_group/level_2", "/mygroup/level_1"]) -close(fd) -rm(fn) - -# File creation and access property lists -fid = h5open(fn, "w", userblock=1024, libver_bounds=(HDF5.API.H5F_LIBVER_EARLIEST, HDF5.API.H5F_LIBVER_LATEST)) -write(fid, "intarray", [1, 2, 3]) -close(fid) -h5open(fn, "r", libver_bounds=(HDF5.API.H5F_LIBVER_EARLIEST, HDF5.API.H5F_LIBVER_LATEST)) do fid - intarray = read(fid, "intarray") - @test intarray == [1, 2, 3] -end - -# Test null terminated ASCII string (e.g. exported by h5py) #332 -h5open(joinpath(test_files, "nullterm_ascii.h5"), "r") do fid - str = read(fid["test"]) - @test str == "Hello World" -end + # create a new file + h5rewrite(outfile) do fid + g = create_group(fid, "mygroup") + write(g, "x", 3.3) + end + @test length(readdir(tmpdir)) == 1 + h5open(outfile, "r") do fid + @test keys(fid) == ["mygroup"] + @test keys(fid["mygroup"]) == ["x"] + end -@test HDF5.unpad(UInt8[0x43, 0x43, 0x41], 1) == "CCA" - -# Test the h5read/write interface with a filename as a first argument, when -# the file does not exist -rm(fn) -h5write(fn, "newgroup/W", W) -Wr = h5read(fn, "newgroup/W") -@test Wr == W -close(f) -rm(fn) - -# Test dataspace convenience versions of create_dataset -try - h5open(fn, "w") do f - create_dataset(f, "test", Int, (128, 32)) - create_dataset(f, "test2", Float64, 128, 64) - @test size(f["test"]) == (128, 32) - @test size(f["test2"]) == (128, 64) + # fail to overwrite + @test_throws ErrorException h5rewrite(outfile) do fid + g = create_group(fid, "mygroup") + write(g, "oops", 3.3) + error("failed") + end + @test length(readdir(tmpdir)) == 1 + h5open(outfile, "r") do fid + @test keys(fid) == ["mygroup"] + @test keys(fid["mygroup"]) == ["x"] end -finally - rm(fn) -end -@testset "h5d_fill" begin - val = 5 - h5open(fn, "w") do f - d = create_dataset(f, "dataset", datatype(Int), dataspace(6, 6), chunk=(2, 3)) - buf = Array{Int,2}(undef,(6,6)) - dtype = datatype(Int) - HDF5.API.h5d_fill(Ref(val), dtype, buf, datatype(Int), dataspace(d)) - @test all(buf .== 5) - HDF5.API.h5d_write(d, dtype, HDF5.API.H5S_ALL, HDF5.API.H5S_ALL, HDF5.API.H5P_DEFAULT, buf) + # overwrite + h5rewrite(outfile) do fid + g = create_group(fid, "mygroup") + write(g, "y", 3.3) end - h5open(fn, "r") do f - @test all( f["dataset"][:,:] .== 5 ) + @test length(readdir(tmpdir)) == 1 + h5open(outfile, "r") do fid + @test keys(fid) == ["mygroup"] + @test keys(fid["mygroup"]) == ["y"] end + rm(tmpdir; recursive=true) + + test_files = joinpath(@__DIR__, "test_files") + + d = h5read(joinpath(test_files, "compound.h5"), "/data") + @test typeof(d[1]) == NamedTuple{ + (:wgt, :xyz, :uvw, :E),Tuple{Float64,Array{Float64,1},Array{Float64,1},Float64} + } + + # get-datasets + fn = tempname() + fd = h5open(fn, "w") + fd["level_0"] = [1, 2, 3] + grp = create_group(fd, "mygroup") + fd["mygroup/level_1"] = [4, 5] + grp2 = create_group(grp, "deep_group") + fd["mygroup/deep_group/level_2"] = [6.0, 7.0] + datasets = HDF5.get_datasets(fd) + @test sort(map(HDF5.name, datasets)) == + sort(["/level_0", "/mygroup/deep_group/level_2", "/mygroup/level_1"]) + close(fd) rm(fn) -end # testset "Test h5d_fill - -@testset "h5d_gather" begin - src_buf = rand(Int, (4,4) ) - dst_buf = Array{Int,2}(undef,(4,4)) - h5open(fn ,"w") do f - d = create_dataset(f, "dataset", datatype(Int), dataspace(4, 4), chunk=(2, 2)) - @test HDF5.API.h5d_gather(dataspace(d), src_buf, datatype(Int), sizeof(dst_buf), dst_buf, C_NULL, C_NULL) |> isnothing - @test src_buf == dst_buf - gatherf_ptr = @cfunction(gatherf, HDF5.API.herr_t, (Ptr{Nothing}, Csize_t, Ptr{Nothing})) - @test HDF5.API.h5d_gather(dataspace(d), src_buf, datatype(Int), sizeof(dst_buf)÷2, dst_buf, gatherf_ptr, C_NULL) |> isnothing - gatherf_bad_ptr = @cfunction(gatherf_bad, HDF5.API.herr_t, (Ptr{Nothing}, Csize_t, Ptr{Nothing})) - @test_throws HDF5.API.H5Error HDF5.API.h5d_gather(dataspace(d), src_buf, datatype(Int), sizeof(dst_buf)÷2, dst_buf, gatherf_bad_ptr, C_NULL) - gatherf_data_ptr = @cfunction(gatherf_data, HDF5.API.herr_t, (Ptr{Nothing}, Csize_t, Ref{Int})) - @test HDF5.API.h5d_gather(dataspace(d), src_buf, datatype(Int), sizeof(dst_buf)÷2, dst_buf, gatherf_data_ptr, Ref(9)) |> isnothing - @test_throws HDF5.API.H5Error HDF5.API.h5d_gather(dataspace(d), src_buf, datatype(Int), sizeof(dst_buf)÷2, dst_buf, gatherf_data_ptr, 10) + + # File creation and access property lists + fid = h5open( + fn, + "w"; + userblock=1024, + libver_bounds=(HDF5.API.H5F_LIBVER_EARLIEST, HDF5.API.H5F_LIBVER_LATEST) + ) + write(fid, "intarray", [1, 2, 3]) + close(fid) + h5open( + fn, "r"; libver_bounds=(HDF5.API.H5F_LIBVER_EARLIEST, HDF5.API.H5F_LIBVER_LATEST) + ) do fid + intarray = read(fid, "intarray") + @test intarray == [1, 2, 3] end - rm(fn) -end + # Test null terminated ASCII string (e.g. exported by h5py) #332 + h5open(joinpath(test_files, "nullterm_ascii.h5"), "r") do fid + str = read(fid["test"]) + @test str == "Hello World" + end + @test HDF5.unpad(UInt8[0x43, 0x43, 0x41], 1) == "CCA" + # Test the h5read/write interface with a filename as a first argument, when + # the file does not exist + rm(fn) + h5write(fn, "newgroup/W", W) + Wr = h5read(fn, "newgroup/W") + @test Wr == W + close(f) + rm(fn) -@testset "h5d_scatter" begin - h5open(fn, "w") do f - dst_buf = Array{Int,2}(undef,(4,4)) - d = create_dataset(f, "dataset", datatype(Int), dataspace(4, 4), chunk=(2, 2)) - scatterf_ptr = @cfunction(scatterf, HDF5.API.herr_t, (Ptr{Ptr{Nothing}}, Ptr{Csize_t}, Ptr{Nothing})) - @test HDF5.API.h5d_scatter(scatterf_ptr, C_NULL, datatype(Int), dataspace(d), dst_buf) |> isnothing - scatterf_bad_ptr = @cfunction(scatterf_bad, HDF5.API.herr_t, (Ptr{Ptr{Nothing}}, Ptr{Csize_t}, Ptr{Nothing})) - @test_throws HDF5.API.H5Error HDF5.API.h5d_scatter(scatterf_bad_ptr, C_NULL, datatype(Int), dataspace(d), dst_buf) - scatterf_data_ptr = @cfunction(scatterf_data, HDF5.API.herr_t, (Ptr{Ptr{Int}}, Ptr{Csize_t}, Ref{Int})) - @test HDF5.API.h5d_scatter(scatterf_data_ptr, Ref(9), datatype(Int), dataspace(d), dst_buf) |> isnothing + # Test dataspace convenience versions of create_dataset + try + h5open(fn, "w") do f + create_dataset(f, "test", Int, (128, 32)) + create_dataset(f, "test2", Float64, 128, 64) + @test size(f["test"]) == (128, 32) + @test size(f["test2"]) == (128, 64) + end + finally + rm(fn) end - rm(fn) -end -# Test that switching time tracking off results in identical files -fn1 = tempname(); fn2 = tempname() -h5open(fn1, "w") do f - f["x", obj_track_times=false] = [1, 2, 3] -end -sleep(1) -h5open(fn2, "w") do f - f["x", obj_track_times=false] = [1, 2, 3] -end -@test open(crc32c, fn1) == open(crc32c, fn2) -rm(fn1); rm(fn2) + @testset "h5d_fill" begin + val = 5 + h5open(fn, "w") do f + d = create_dataset(f, "dataset", datatype(Int), dataspace(6, 6); chunk=(2, 3)) + buf = Array{Int,2}(undef, (6, 6)) + dtype = datatype(Int) + HDF5.API.h5d_fill(Ref(val), dtype, buf, datatype(Int), dataspace(d)) + @test all(buf .== 5) + HDF5.API.h5d_write( + d, dtype, HDF5.API.H5S_ALL, HDF5.API.H5S_ALL, HDF5.API.H5P_DEFAULT, buf + ) + end + h5open(fn, "r") do f + @test all(f["dataset"][:, :] .== 5) + end + rm(fn) + end # testset "Test h5d_fill + + @testset "h5d_gather" begin + src_buf = rand(Int, (4, 4)) + dst_buf = Array{Int,2}(undef, (4, 4)) + h5open(fn, "w") do f + d = create_dataset(f, "dataset", datatype(Int), dataspace(4, 4); chunk=(2, 2)) + @test isnothing( + HDF5.API.h5d_gather( + dataspace(d), + src_buf, + datatype(Int), + sizeof(dst_buf), + dst_buf, + C_NULL, + C_NULL + ) + ) + @test src_buf == dst_buf + gatherf_ptr = @cfunction( + gatherf, HDF5.API.herr_t, (Ptr{Nothing}, Csize_t, Ptr{Nothing}) + ) + @test isnothing( + HDF5.API.h5d_gather( + dataspace(d), + src_buf, + datatype(Int), + sizeof(dst_buf) ÷ 2, + dst_buf, + gatherf_ptr, + C_NULL + ) + ) + gatherf_bad_ptr = @cfunction( + gatherf_bad, HDF5.API.herr_t, (Ptr{Nothing}, Csize_t, Ptr{Nothing}) + ) + @test_throws HDF5.API.H5Error HDF5.API.h5d_gather( + dataspace(d), + src_buf, + datatype(Int), + sizeof(dst_buf) ÷ 2, + dst_buf, + gatherf_bad_ptr, + C_NULL + ) + gatherf_data_ptr = @cfunction( + gatherf_data, HDF5.API.herr_t, (Ptr{Nothing}, Csize_t, Ref{Int}) + ) + @test isnothing( + HDF5.API.h5d_gather( + dataspace(d), + src_buf, + datatype(Int), + sizeof(dst_buf) ÷ 2, + dst_buf, + gatherf_data_ptr, + Ref(9) + ) + ) + @test_throws HDF5.API.H5Error HDF5.API.h5d_gather( + dataspace(d), + src_buf, + datatype(Int), + sizeof(dst_buf) ÷ 2, + dst_buf, + gatherf_data_ptr, + 10 + ) + end + rm(fn) + end + + @testset "h5d_scatter" begin + h5open(fn, "w") do f + dst_buf = Array{Int,2}(undef, (4, 4)) + d = create_dataset(f, "dataset", datatype(Int), dataspace(4, 4); chunk=(2, 2)) + scatterf_ptr = @cfunction( + scatterf, HDF5.API.herr_t, (Ptr{Ptr{Nothing}}, Ptr{Csize_t}, Ptr{Nothing}) + ) + @test isnothing( + HDF5.API.h5d_scatter( + scatterf_ptr, C_NULL, datatype(Int), dataspace(d), dst_buf + ) + ) + scatterf_bad_ptr = @cfunction( + scatterf_bad, + HDF5.API.herr_t, + (Ptr{Ptr{Nothing}}, Ptr{Csize_t}, Ptr{Nothing}) + ) + @test_throws HDF5.API.H5Error HDF5.API.h5d_scatter( + scatterf_bad_ptr, C_NULL, datatype(Int), dataspace(d), dst_buf + ) + scatterf_data_ptr = @cfunction( + scatterf_data, HDF5.API.herr_t, (Ptr{Ptr{Int}}, Ptr{Csize_t}, Ref{Int}) + ) + @test isnothing( + HDF5.API.h5d_scatter( + scatterf_data_ptr, Ref(9), datatype(Int), dataspace(d), dst_buf + ) + ) + end + rm(fn) + end + # Test that switching time tracking off results in identical files + fn1 = tempname() + fn2 = tempname() + h5open(fn1, "w") do f + f["x", obj_track_times=false] = [1, 2, 3] + end + sleep(1) + h5open(fn2, "w") do f + f["x", obj_track_times=false] = [1, 2, 3] + end + @test open(crc32c, fn1) == open(crc32c, fn2) + rm(fn1) + rm(fn2) end # testset plain @testset "complex" begin - HDF5.enable_complex_support() - - fn = tempname() - f = h5open(fn, "w") - - f["ComplexF64"] = 1.0 + 2.0im - attributes(f["ComplexF64"])["ComplexInt64"] = 1im - - Acmplx = rand(ComplexF64, 3, 5) - write(f, "Acmplx64", convert(Matrix{ComplexF64}, Acmplx)) - write(f, "Acmplx32", convert(Matrix{ComplexF32}, Acmplx)) - - dset = create_dataset(f, "Acmplx64_hyperslab", datatype(Complex{Float64}), dataspace(Acmplx)) - for i in 1:size(Acmplx, 2) - dset[:, i] = Acmplx[:,i] - end - - HDF5.disable_complex_support() - @test_throws ErrorException f["_ComplexF64"] = 1.0 + 2.0im - @test_throws ErrorException write(f, "_Acmplx64", convert(Matrix{ComplexF64}, Acmplx)) - @test_throws ErrorException write(f, "_Acmplx32", convert(Matrix{ComplexF32}, Acmplx)) - HDF5.enable_complex_support() - - close(f) - - fr = h5open(fn) - z = read(fr, "ComplexF64") - @test z == 1.0 + 2.0im && isa(z, ComplexF64) - z_attrs = attributes(fr["ComplexF64"]) - @test read(z_attrs["ComplexInt64"]) == 1im - - Acmplx32 = read(fr, "Acmplx32") - @test convert(Matrix{ComplexF32}, Acmplx) == Acmplx32 - @test eltype(Acmplx32) == ComplexF32 - Acmplx64 = read(fr, "Acmplx64") - @test convert(Matrix{ComplexF64}, Acmplx) == Acmplx64 - @test eltype(Acmplx64) == ComplexF64 - - dset = fr["Acmplx64_hyperslab"] - Acmplx64_hyperslab = zeros(eltype(dset), size(dset)) - for i in 1:size(dset, 2) - Acmplx64_hyperslab[:,i] = dset[:,i] - end - @test convert(Matrix{ComplexF64}, Acmplx) == Acmplx64_hyperslab - - HDF5.disable_complex_support() - z = read(fr, "ComplexF64") - @test isa(z, NamedTuple{(:r, :i), Tuple{Float64, Float64}}) - - Acmplx32 = read(fr, "Acmplx32") - @test eltype(Acmplx32) == NamedTuple{(:r, :i), Tuple{Float32, Float32}} - Acmplx64 = read(fr, "Acmplx64") - @test eltype(Acmplx64) == NamedTuple{(:r, :i), Tuple{Float64, Float64}} - - close(fr) - - HDF5.enable_complex_support() + HDF5.enable_complex_support() + + fn = tempname() + f = h5open(fn, "w") + + f["ComplexF64"] = 1.0 + 2.0im + attributes(f["ComplexF64"])["ComplexInt64"] = 1im + + Acmplx = rand(ComplexF64, 3, 5) + write(f, "Acmplx64", convert(Matrix{ComplexF64}, Acmplx)) + write(f, "Acmplx32", convert(Matrix{ComplexF32}, Acmplx)) + + dset = create_dataset( + f, "Acmplx64_hyperslab", datatype(Complex{Float64}), dataspace(Acmplx) + ) + for i in 1:size(Acmplx, 2) + dset[:, i] = Acmplx[:, i] + end + + HDF5.disable_complex_support() + @test_throws ErrorException f["_ComplexF64"] = 1.0 + 2.0im + @test_throws ErrorException write(f, "_Acmplx64", convert(Matrix{ComplexF64}, Acmplx)) + @test_throws ErrorException write(f, "_Acmplx32", convert(Matrix{ComplexF32}, Acmplx)) + HDF5.enable_complex_support() + + close(f) + + fr = h5open(fn) + z = read(fr, "ComplexF64") + @test z == 1.0 + 2.0im && isa(z, ComplexF64) + z_attrs = attributes(fr["ComplexF64"]) + @test read(z_attrs["ComplexInt64"]) == 1im + + Acmplx32 = read(fr, "Acmplx32") + @test convert(Matrix{ComplexF32}, Acmplx) == Acmplx32 + @test eltype(Acmplx32) == ComplexF32 + Acmplx64 = read(fr, "Acmplx64") + @test convert(Matrix{ComplexF64}, Acmplx) == Acmplx64 + @test eltype(Acmplx64) == ComplexF64 + + dset = fr["Acmplx64_hyperslab"] + Acmplx64_hyperslab = zeros(eltype(dset), size(dset)) + for i in 1:size(dset, 2) + Acmplx64_hyperslab[:, i] = dset[:, i] + end + @test convert(Matrix{ComplexF64}, Acmplx) == Acmplx64_hyperslab + + HDF5.disable_complex_support() + z = read(fr, "ComplexF64") + @test isa(z, NamedTuple{(:r, :i),Tuple{Float64,Float64}}) + + Acmplx32 = read(fr, "Acmplx32") + @test eltype(Acmplx32) == NamedTuple{(:r, :i),Tuple{Float32,Float32}} + Acmplx64 = read(fr, "Acmplx64") + @test eltype(Acmplx64) == NamedTuple{(:r, :i),Tuple{Float64,Float64}} + + close(fr) + + HDF5.enable_complex_support() end # test strings with null and undefined references @testset "undefined and null" begin -fn = tempname() -f = h5open(fn, "w") - -# don't silently truncate data -@test_throws ArgumentError write(f, "test", ["hello","there","\0"]) -@test_throws ArgumentError write(f, "trunc1", "\0") -@test_throws ArgumentError write(f, "trunc2", "trunc\0ateme") + fn = tempname() + f = h5open(fn, "w") -# test writing uninitialized string arrays -undefstrarr = similar(Vector(1:3), String) # strs = String[#undef, #undef, #undef] -@test_throws UndefRefError write(f, "undef", undefstrarr) + # don't silently truncate data + @test_throws ArgumentError write(f, "test", ["hello", "there", "\0"]) + @test_throws ArgumentError write(f, "trunc1", "\0") + @test_throws ArgumentError write(f, "trunc2", "trunc\0ateme") -close(f) -rm(fn) + # test writing uninitialized string arrays + undefstrarr = similar(Vector(1:3), String) # strs = String[#undef, #undef, #undef] + @test_throws UndefRefError write(f, "undef", undefstrarr) + close(f) + rm(fn) end # testset null and undefined # test writing abstract arrays @testset "abstract arrays" begin -# test writing reinterpreted data -fn = tempname() -try - h5open(fn, "w") do f - data = reinterpret(UInt8, [true, false, false]) - write(f, "reinterpret array", data) + # test writing reinterpreted data + fn = tempname() + try + h5open(fn, "w") do f + data = reinterpret(UInt8, [true, false, false]) + write(f, "reinterpret array", data) + end + + @test h5open(fn, "r") do f + read(f, "reinterpret array") + end == UInt8[0x01, 0x00, 0x00] + finally + rm(fn) end - @test h5open(fn, "r") do f - read(f, "reinterpret array") - end == UInt8[0x01, 0x00, 0x00] -finally - rm(fn) -end + # don't silently fail for arrays with a different stride + fn = tempname() + try + data = rand(UInt16, 2, 3) + pdv_data = PermutedDimsArray(data, (2, 1)) -# don't silently fail for arrays with a different stride -fn = tempname() -try - data = rand(UInt16, 2, 3); - pdv_data = PermutedDimsArray(data, (2, 1)) - - @test_throws ArgumentError h5write(fn, "pdv_data", pdv_data) -finally - rm(fn) -end - -# test alignment -fn = tempname() -h5open(fn, "w", alignment=(0, 8)) do fid - fid["x"] = zeros(10, 10) -end + @test_throws ArgumentError h5write(fn, "pdv_data", pdv_data) + finally + rm(fn) + end + # test alignment + fn = tempname() + h5open(fn, "w"; alignment=(0, 8)) do fid + fid["x"] = zeros(10, 10) + end end # writing abstract arrays # issue #705 @testset "empty and 0-size arrays" begin -fn = tempname() -hfile = h5open(fn, "w") - -# Write datasets with various 0-sizes -write(hfile, "empty", HDF5.EmptyArray{Int64}()) # HDF5 empty -write(hfile, "zerodim", fill(1.0π)) # 0-dimensional -write(hfile, "zerovec", zeros(0)) # 1-dimensional, size 0 -write(hfile, "zeromat", zeros(0, 0)) # 2-dimensional, size 0 -write(hfile, "zeromat2", zeros(0, 1)) # 2-dimensional, size 0 with non-zero axis -dempty = hfile["empty"] -dzerodim = hfile["zerodim"] -dzerovec = hfile["zerovec"] -dzeromat = hfile["zeromat"] -dzeromat2 = hfile["zeromat2"] - -# Test that eltype is preserved (especially for EmptyArray) -@test eltype(dempty) == Int64 -@test eltype(dzerodim) == Float64 -@test eltype(dzerovec) == Float64 -@test eltype(dzeromat) == Float64 -@test eltype(dzeromat2) == Float64 -# Test sizes are as expected -@test size(dempty) == () -@test size(dzerovec) == (0,) -@test size(dzeromat) == (0, 0) -@test size(dzeromat2) == (0, 1) -@test HDF5.isnull(dempty) -@test !HDF5.isnull(dzerovec) -@test !HDF5.isnull(dzeromat) -@test !HDF5.isnull(dzeromat2) -# Reading back must preserve emptiness -@test read(dempty) isa HDF5.EmptyArray -# but 0-dimensional Array{T,0} are stored as HDF5 scalar -@test size(dzerodim) == () -@test !HDF5.isnull(dzerodim) -@test read(dzerodim) == 1.0π - -# Similar tests for writing to attributes -write(dempty, "attr", HDF5.EmptyArray{Float64}()) -write(dzerodim, "attr", fill(1.0ℯ)) -write(dzerovec, "attr", zeros(Int64, 0)) -write(dzeromat, "attr", zeros(Int64, 0, 0)) -write(dzeromat2, "attr", zeros(Int64, 0, 1)) -aempty = dempty["attr"] -azerodim = dzerodim["attr"] -azerovec = dzerovec["attr"] -azeromat = dzeromat["attr"] -azeromat2 = dzeromat2["attr"] -# Test that eltype is preserved (especially for EmptyArray) -@test eltype(aempty) == Float64 -@test eltype(azerodim) == Float64 -@test eltype(azerovec) == Int64 -@test eltype(azeromat) == Int64 -@test eltype(azeromat2) == Int64 -# Test sizes are as expected -@test size(aempty) == () -@test size(azerovec) == (0,) -@test size(azeromat) == (0, 0) -@test size(azeromat2) == (0, 1) -@test HDF5.isnull(aempty) -@test !HDF5.isnull(azerovec) -@test !HDF5.isnull(azeromat) -@test !HDF5.isnull(azeromat2) -# Reading back must preserve emptiness -@test read(aempty) isa HDF5.EmptyArray -# but 0-dimensional Array{T,0} are stored as HDF5 scalar -@test size(azerodim) == () -@test !HDF5.isnull(azerodim) -@test read(azerodim) == 1.0ℯ - -# Concatenation of EmptyArrays is not supported -x = HDF5.EmptyArray{Float64}() -@test_throws ErrorException [x x] -@test_throws ErrorException [x; x] -@test_throws ErrorException [x x; x x] - -close(hfile) -rm(fn) - -# check that printing EmptyArray doesn't error -buf = IOBuffer() -show(buf, HDF5.EmptyArray{Int64}()) -@test String(take!(buf)) == "HDF5.EmptyArray{Int64}()" -show(buf, MIME"text/plain"(), HDF5.EmptyArray{Int64}()) -@test String(take!(buf)) == "HDF5.EmptyArray{Int64}()" + fn = tempname() + hfile = h5open(fn, "w") + + # Write datasets with various 0-sizes + write(hfile, "empty", HDF5.EmptyArray{Int64}()) # HDF5 empty + write(hfile, "zerodim", fill(1.0π)) # 0-dimensional + write(hfile, "zerovec", zeros(0)) # 1-dimensional, size 0 + write(hfile, "zeromat", zeros(0, 0)) # 2-dimensional, size 0 + write(hfile, "zeromat2", zeros(0, 1)) # 2-dimensional, size 0 with non-zero axis + dempty = hfile["empty"] + dzerodim = hfile["zerodim"] + dzerovec = hfile["zerovec"] + dzeromat = hfile["zeromat"] + dzeromat2 = hfile["zeromat2"] + + # Test that eltype is preserved (especially for EmptyArray) + @test eltype(dempty) == Int64 + @test eltype(dzerodim) == Float64 + @test eltype(dzerovec) == Float64 + @test eltype(dzeromat) == Float64 + @test eltype(dzeromat2) == Float64 + # Test sizes are as expected + @test size(dempty) == () + @test size(dzerovec) == (0,) + @test size(dzeromat) == (0, 0) + @test size(dzeromat2) == (0, 1) + @test HDF5.isnull(dempty) + @test !HDF5.isnull(dzerovec) + @test !HDF5.isnull(dzeromat) + @test !HDF5.isnull(dzeromat2) + # Reading back must preserve emptiness + @test read(dempty) isa HDF5.EmptyArray + # but 0-dimensional Array{T,0} are stored as HDF5 scalar + @test size(dzerodim) == () + @test !HDF5.isnull(dzerodim) + @test read(dzerodim) == 1.0π + + # Similar tests for writing to attributes + write(dempty, "attr", HDF5.EmptyArray{Float64}()) + write(dzerodim, "attr", fill(1.0ℯ)) + write(dzerovec, "attr", zeros(Int64, 0)) + write(dzeromat, "attr", zeros(Int64, 0, 0)) + write(dzeromat2, "attr", zeros(Int64, 0, 1)) + aempty = dempty["attr"] + azerodim = dzerodim["attr"] + azerovec = dzerovec["attr"] + azeromat = dzeromat["attr"] + azeromat2 = dzeromat2["attr"] + # Test that eltype is preserved (especially for EmptyArray) + @test eltype(aempty) == Float64 + @test eltype(azerodim) == Float64 + @test eltype(azerovec) == Int64 + @test eltype(azeromat) == Int64 + @test eltype(azeromat2) == Int64 + # Test sizes are as expected + @test size(aempty) == () + @test size(azerovec) == (0,) + @test size(azeromat) == (0, 0) + @test size(azeromat2) == (0, 1) + @test HDF5.isnull(aempty) + @test !HDF5.isnull(azerovec) + @test !HDF5.isnull(azeromat) + @test !HDF5.isnull(azeromat2) + # Reading back must preserve emptiness + @test read(aempty) isa HDF5.EmptyArray + # but 0-dimensional Array{T,0} are stored as HDF5 scalar + @test size(azerodim) == () + @test !HDF5.isnull(azerodim) + @test read(azerodim) == 1.0ℯ + + # Concatenation of EmptyArrays is not supported + x = HDF5.EmptyArray{Float64}() + @test_throws ErrorException [x x] + @test_throws ErrorException [x; x] + @test_throws ErrorException [x x; x x] + + close(hfile) + rm(fn) + + # check that printing EmptyArray doesn't error + buf = IOBuffer() + show(buf, HDF5.EmptyArray{Int64}()) + @test String(take!(buf)) == "HDF5.EmptyArray{Int64}()" + show(buf, MIME"text/plain"(), HDF5.EmptyArray{Int64}()) + @test String(take!(buf)) == "HDF5.EmptyArray{Int64}()" end # empty and 0-size arrays @testset "generic read of native types" begin -fn = tempname() -hfile = h5open(fn, "w") - -dtype_varstring = HDF5.Datatype(HDF5.API.h5t_copy(HDF5.API.H5T_C_S1)) -HDF5.API.h5t_set_size(dtype_varstring, HDF5.API.H5T_VARIABLE) - -write(hfile, "uint8_array", UInt8[(1:8)...]) -write(hfile, "bool_scalar", true) - -fixstring = "fix" -varstring = "var" -write(hfile, "fixed_string", fixstring) -vardset = create_dataset(hfile, "variable_string", dtype_varstring, dataspace(varstring)) -GC.@preserve varstring begin - HDF5.API.h5d_write(vardset, dtype_varstring, HDF5.API.H5S_ALL, HDF5.API.H5S_ALL, HDF5.API.H5P_DEFAULT, [pointer(varstring)]) -end -flush(hfile) -close(dtype_varstring) - -# generic read() handles concrete types with definite sizes transparently -d = read(hfile["uint8_array"], UInt8) -@test d isa Vector{UInt8} -@test d == 1:8 -d = read(hfile["bool_scalar"], Bool) -@test d isa Bool -@test d == true -d = read(hfile["fixed_string"], HDF5.FixedString{length(fixstring),0}) -@test d isa String -@test d == fixstring -d = read(hfile["variable_string"], Cstring) -@test d isa String -@test d == varstring -# will also accept memory-compatible reinterpretations -d = read(hfile["uint8_array"], Int8) -@test d isa Vector{Int8} -@test d == 1:8 -d = read(hfile["bool_scalar"], UInt8) -@test d isa UInt8 -@test d == 0x1 -# but should throw on non-compatible types -@test_throws ErrorException(""" - Type size mismatch - sizeof(UInt16) = 2 - sizeof($(sprint(show, datatype(UInt8)))) = 1 - """) read(hfile["uint8_array"], UInt16) - -# Strings are not fixed size, but generic read still handles them if given the correct -# underlying FixedString or Cstring type; a method overload makes String work, too. -d = read(hfile["fixed_string"], String) -@test d isa String -@test d == fixstring -d = read(hfile["variable_string"], String) -@test d isa String -@test d == varstring - -close(hfile) -rm(fn) + fn = tempname() + hfile = h5open(fn, "w") + + dtype_varstring = HDF5.Datatype(HDF5.API.h5t_copy(HDF5.API.H5T_C_S1)) + HDF5.API.h5t_set_size(dtype_varstring, HDF5.API.H5T_VARIABLE) + + write(hfile, "uint8_array", UInt8[(1:8)...]) + write(hfile, "bool_scalar", true) + + fixstring = "fix" + varstring = "var" + write(hfile, "fixed_string", fixstring) + vardset = create_dataset( + hfile, "variable_string", dtype_varstring, dataspace(varstring) + ) + GC.@preserve varstring begin + HDF5.API.h5d_write( + vardset, + dtype_varstring, + HDF5.API.H5S_ALL, + HDF5.API.H5S_ALL, + HDF5.API.H5P_DEFAULT, + [pointer(varstring)] + ) + end + flush(hfile) + close(dtype_varstring) + + # generic read() handles concrete types with definite sizes transparently + d = read(hfile["uint8_array"], UInt8) + @test d isa Vector{UInt8} + @test d == 1:8 + d = read(hfile["bool_scalar"], Bool) + @test d isa Bool + @test d == true + d = read(hfile["fixed_string"], HDF5.FixedString{length(fixstring),0}) + @test d isa String + @test d == fixstring + d = read(hfile["variable_string"], Cstring) + @test d isa String + @test d == varstring + # will also accept memory-compatible reinterpretations + d = read(hfile["uint8_array"], Int8) + @test d isa Vector{Int8} + @test d == 1:8 + d = read(hfile["bool_scalar"], UInt8) + @test d isa UInt8 + @test d == 0x1 + # but should throw on non-compatible types + @test_throws ErrorException(""" + Type size mismatch + sizeof(UInt16) = 2 + sizeof($(sprint(show, datatype(UInt8)))) = 1 + """) read(hfile["uint8_array"], UInt16) + + # Strings are not fixed size, but generic read still handles them if given the correct + # underlying FixedString or Cstring type; a method overload makes String work, too. + d = read(hfile["fixed_string"], String) + @test d isa String + @test d == fixstring + d = read(hfile["variable_string"], String) + @test d isa String + @test d == varstring + + close(hfile) + rm(fn) end # generic read of native types @testset "show" begin -fn = tempname() + fn = tempname() + + # First create data objects and sure they print useful outputs + + hfile = h5open(fn, "w"; swmr=true) + @test sprint(show, hfile) == "HDF5.File: (read-write, swmr) $fn" + + group = create_group(hfile, "group") + @test sprint(show, group) == "HDF5.Group: /group (file: $fn)" + + dset = create_dataset(group, "dset", datatype(Int), dataspace((1,))) + @test sprint(show, dset) == "HDF5.Dataset: /group/dset (file: $fn xfer_mode: 0)" + + meta = create_attribute(dset, "meta", datatype(Bool), dataspace((1,))) + @test sprint(show, meta) == "HDF5.Attribute: meta" + + dsetattrs = attributes(dset) + @test sprint(show, dsetattrs) == + "Attributes of HDF5.Dataset: /group/dset (file: $fn xfer_mode: 0)" + + prop = HDF5.init!(HDF5.LinkCreateProperties()) + @test sprint(show, prop) == """ + HDF5.LinkCreateProperties( + create_intermediate_group = false, + char_encoding = :ascii, + )""" + + prop = HDF5.DatasetCreateProperties() + @test sprint(show, prop) == "HDF5.DatasetCreateProperties()" + + dtype = HDF5.Datatype(HDF5.API.h5t_copy(HDF5.API.H5T_IEEE_F64LE)) + @test sprint(show, dtype) == "HDF5.Datatype: H5T_IEEE_F64LE" + commit_datatype(hfile, "type", dtype) + @test sprint(show, dtype) == "HDF5.Datatype: /type H5T_IEEE_F64LE" + + dtypemeta = create_attribute(dtype, "dtypemeta", datatype(Bool), dataspace((1,))) + @test sprint(show, dtypemeta) == "HDF5.Attribute: dtypemeta" + + dtypeattrs = attributes(dtype) + @test sprint(show, dtypeattrs) == "Attributes of HDF5.Datatype: /type H5T_IEEE_F64LE" + + dspace_null = HDF5.Dataspace(HDF5.API.h5s_create(HDF5.API.H5S_NULL)) + dspace_scal = HDF5.Dataspace(HDF5.API.h5s_create(HDF5.API.H5S_SCALAR)) + dspace_norm = dataspace((100, 4)) + dspace_maxd = dataspace((100, 4); max_dims=(256, 4)) + dspace_slab = HDF5.hyperslab(dataspace((100, 4)), 1:20:100, 1:4) + if HDF5.libversion ≥ v"1.10.7" + dspace_irrg = HDF5.Dataspace( + HDF5.API.h5s_combine_select( + HDF5.API.h5s_copy(dspace_slab), + HDF5.API.H5S_SELECT_OR, + HDF5.hyperslab(dataspace((100, 4)), 2, 2) + ) + ) + @test sprint(show, dspace_irrg) == "HDF5.Dataspace: (100, 4) [irregular selection]" + end + @test sprint(show, dspace_null) == "HDF5.Dataspace: H5S_NULL" + @test sprint(show, dspace_scal) == "HDF5.Dataspace: H5S_SCALAR" + @test sprint(show, dspace_norm) == "HDF5.Dataspace: (100, 4)" + @test sprint(show, dspace_maxd) == "HDF5.Dataspace: (100, 4) / (256, 4)" + @test sprint(show, dspace_slab) == "HDF5.Dataspace: (1:20:81, 1:4) / (1:100, 1:4)" -# First create data objects and sure they print useful outputs + # Now test printing after closing each object -hfile = h5open(fn, "w", swmr = true) -@test sprint(show, hfile) == "HDF5.File: (read-write, swmr) $fn" + close(dspace_null) + @test sprint(show, dspace_null) == "HDF5.Dataspace: (invalid)" -group = create_group(hfile, "group") -@test sprint(show, group) == "HDF5.Group: /group (file: $fn)" + close(dtype) + @test sprint(show, dtype) == "HDF5.Datatype: (invalid)" -dset = create_dataset(group, "dset", datatype(Int), dataspace((1,))) -@test sprint(show, dset) == "HDF5.Dataset: /group/dset (file: $fn xfer_mode: 0)" + close(prop) + @test sprint(show, prop) == "HDF5.DatasetCreateProperties: (invalid)" -meta = create_attribute(dset, "meta", datatype(Bool), dataspace((1,))) -@test sprint(show, meta) == "HDF5.Attribute: meta" + close(meta) + @test sprint(show, meta) == "HDF5.Attribute: (invalid)" -dsetattrs = attributes(dset) -@test sprint(show, dsetattrs) == "Attributes of HDF5.Dataset: /group/dset (file: $fn xfer_mode: 0)" + close(dtypemeta) + @test sprint(show, dtypemeta) == "HDF5.Attribute: (invalid)" -prop = HDF5.init!(HDF5.LinkCreateProperties()) -@test sprint(show, prop) == """ -HDF5.LinkCreateProperties( - create_intermediate_group = false, - char_encoding = :ascii, -)""" + close(dset) + @test sprint(show, dset) == "HDF5.Dataset: (invalid)" + @test sprint(show, dsetattrs) == "Attributes of HDF5.Dataset: (invalid)" -prop = HDF5.DatasetCreateProperties() -@test sprint(show, prop) == "HDF5.DatasetCreateProperties()" + close(group) + @test sprint(show, group) == "HDF5.Group: (invalid)" -dtype = HDF5.Datatype(HDF5.API.h5t_copy(HDF5.API.H5T_IEEE_F64LE)) -@test sprint(show, dtype) == "HDF5.Datatype: H5T_IEEE_F64LE" -commit_datatype(hfile, "type", dtype) -@test sprint(show, dtype) == "HDF5.Datatype: /type H5T_IEEE_F64LE" + close(hfile) + @test sprint(show, hfile) == "HDF5.File: (closed) $fn" -dtypemeta = create_attribute(dtype, "dtypemeta", datatype(Bool), dataspace((1,))) -@test sprint(show, dtypemeta) == "HDF5.Attribute: dtypemeta" + # Go back and check different access modes for file printing + hfile = h5open(fn, "r+"; swmr=true) + @test sprint(show, hfile) == "HDF5.File: (read-write, swmr) $fn" + close(hfile) + hfile = h5open(fn, "r"; swmr=true) + @test sprint(show, hfile) == "HDF5.File: (read-only, swmr) $fn" + close(hfile) + hfile = h5open(fn, "r") + @test sprint(show, hfile) == "HDF5.File: (read-only) $fn" + close(hfile) + hfile = h5open(fn, "cw") + @test sprint(show, hfile) == "HDF5.File: (read-write) $fn" + close(hfile) -dtypeattrs = attributes(dtype) -@test sprint(show, dtypeattrs) == "Attributes of HDF5.Datatype: /type H5T_IEEE_F64LE" + rm(fn) -dspace_null = HDF5.Dataspace(HDF5.API.h5s_create(HDF5.API.H5S_NULL)) -dspace_scal = HDF5.Dataspace(HDF5.API.h5s_create(HDF5.API.H5S_SCALAR)) -dspace_norm = dataspace((100, 4)) -dspace_maxd = dataspace((100, 4), max_dims = (256, 4)) -dspace_slab = HDF5.hyperslab(dataspace((100, 4)), 1:20:100, 1:4) -if HDF5.libversion ≥ v"1.10.7" -dspace_irrg = HDF5.Dataspace(HDF5.API.h5s_combine_select( - HDF5.API.h5s_copy(dspace_slab), HDF5.API.H5S_SELECT_OR, - HDF5.hyperslab(dataspace((100, 4)), 2, 2))) -@test sprint(show, dspace_irrg) == "HDF5.Dataspace: (100, 4) [irregular selection]" -end -@test sprint(show, dspace_null) == "HDF5.Dataspace: H5S_NULL" -@test sprint(show, dspace_scal) == "HDF5.Dataspace: H5S_SCALAR" -@test sprint(show, dspace_norm) == "HDF5.Dataspace: (100, 4)" -@test sprint(show, dspace_maxd) == "HDF5.Dataspace: (100, 4) / (256, 4)" -@test sprint(show, dspace_slab) == "HDF5.Dataspace: (1:20:81, 1:4) / (1:100, 1:4)" - -# Now test printing after closing each object - -close(dspace_null) -@test sprint(show, dspace_null) == "HDF5.Dataspace: (invalid)" - -close(dtype) -@test sprint(show, dtype) == "HDF5.Datatype: (invalid)" - -close(prop) -@test sprint(show, prop) == "HDF5.DatasetCreateProperties: (invalid)" - -close(meta) -@test sprint(show, meta) == "HDF5.Attribute: (invalid)" - -close(dtypemeta) -@test sprint(show, dtypemeta) == "HDF5.Attribute: (invalid)" - -close(dset) -@test sprint(show, dset) == "HDF5.Dataset: (invalid)" -@test sprint(show, dsetattrs) == "Attributes of HDF5.Dataset: (invalid)" - -close(group) -@test sprint(show, group) == "HDF5.Group: (invalid)" - -close(hfile) -@test sprint(show, hfile) == "HDF5.File: (closed) $fn" - -# Go back and check different access modes for file printing -hfile = h5open(fn, "r+", swmr = true) -@test sprint(show, hfile) == "HDF5.File: (read-write, swmr) $fn" -close(hfile) -hfile = h5open(fn, "r", swmr = true) -@test sprint(show, hfile) == "HDF5.File: (read-only, swmr) $fn" -close(hfile) -hfile = h5open(fn, "r") -@test sprint(show, hfile) == "HDF5.File: (read-only) $fn" -close(hfile) -hfile = h5open(fn, "cw") -@test sprint(show, hfile) == "HDF5.File: (read-write) $fn" -close(hfile) - -rm(fn) - -# Make an interesting file tree -hfile = h5open(fn, "w") -# file level -hfile["version"] = 1.0 -attributes(hfile)["creator"] = "HDF5.jl" -# group level -create_group(hfile, "inner") -attributes(hfile["inner"])["dirty"] = true -# dataset level -hfile["inner/data"] = collect(-5:5) -attributes(hfile["inner/data"])["mode"] = 1 -# non-trivial committed datatype -# TODO: print more datatype information -tmeta = HDF5.Datatype(HDF5.API.h5t_create(HDF5.API.H5T_COMPOUND, sizeof(Int) + sizeof(Float64))) -HDF5.API.h5t_insert(tmeta, "scale", 0, HDF5.hdf5_type_id(Int)) -HDF5.API.h5t_insert(tmeta, "bias", sizeof(Int), HDF5.hdf5_type_id(Float64)) -tstr = datatype("fixed") -t = HDF5.Datatype(HDF5.API.h5t_create(HDF5.API.H5T_COMPOUND, sizeof(tmeta) + sizeof(tstr))) -HDF5.API.h5t_insert(t, "meta", 0, tmeta) -HDF5.API.h5t_insert(t, "type", sizeof(tmeta), tstr) -commit_datatype(hfile, "dtype", t) - -buf = IOBuffer() -iobuf = IOContext(buf, :limit => true, :module => Main) -show3(io::IO, x) = show(IOContext(io, iobuf), MIME"text/plain"(), x) - -HDF5.show_tree(iobuf, hfile) -msg = String(take!(buf)) -@test occursin(r""" + # Make an interesting file tree + hfile = h5open(fn, "w") + # file level + hfile["version"] = 1.0 + attributes(hfile)["creator"] = "HDF5.jl" + # group level + create_group(hfile, "inner") + attributes(hfile["inner"])["dirty"] = true + # dataset level + hfile["inner/data"] = collect(-5:5) + attributes(hfile["inner/data"])["mode"] = 1 + # non-trivial committed datatype + # TODO: print more datatype information + tmeta = HDF5.Datatype( + HDF5.API.h5t_create(HDF5.API.H5T_COMPOUND, sizeof(Int) + sizeof(Float64)) + ) + HDF5.API.h5t_insert(tmeta, "scale", 0, HDF5.hdf5_type_id(Int)) + HDF5.API.h5t_insert(tmeta, "bias", sizeof(Int), HDF5.hdf5_type_id(Float64)) + tstr = datatype("fixed") + t = HDF5.Datatype( + HDF5.API.h5t_create(HDF5.API.H5T_COMPOUND, sizeof(tmeta) + sizeof(tstr)) + ) + HDF5.API.h5t_insert(t, "meta", 0, tmeta) + HDF5.API.h5t_insert(t, "type", sizeof(tmeta), tstr) + commit_datatype(hfile, "dtype", t) + + buf = IOBuffer() + iobuf = IOContext(buf, :limit => true, :module => Main) + show3(io::IO, x) = show(IOContext(io, iobuf), MIME"text/plain"(), x) + + HDF5.show_tree(iobuf, hfile) + msg = String(take!(buf)) + @test occursin( + r""" 🗂️ HDF5.File: .*$ ├─ 🏷️ creator ├─ 📄 dtype @@ -902,65 +1011,95 @@ msg = String(take!(buf)) │ ├─ 🏷️ dirty │ └─ 🔢 data │ └─ 🏷️ mode -└─ 🔢 version"""m, msg) -@test sprint(show3, hfile) == msg +└─ 🔢 version"""m, + msg + ) + @test sprint(show3, hfile) == msg -HDF5.show_tree(iobuf, hfile, attributes = false) -@test occursin(r""" + HDF5.show_tree(iobuf, hfile; attributes=false) + @test occursin( + r""" 🗂️ HDF5.File: .*$ ├─ 📄 dtype ├─ 📂 inner │ └─ 🔢 data -└─ 🔢 version"""m, String(take!(buf))) +└─ 🔢 version"""m, + String(take!(buf)) + ) -HDF5.show_tree(iobuf, attributes(hfile)) -msg = String(take!(buf)) -@test occursin(r""" + HDF5.show_tree(iobuf, attributes(hfile)) + msg = String(take!(buf)) + @test occursin( + r""" 🗂️ Attributes of HDF5.File: .*$ -└─ 🏷️ creator"""m, msg) -@test sprint(show3, attributes(hfile)) == msg +└─ 🏷️ creator"""m, + msg + ) + @test sprint(show3, attributes(hfile)) == msg -HDF5.show_tree(iobuf, hfile["inner"]) -msg = String(take!(buf)) -@test occursin(r""" + HDF5.show_tree(iobuf, hfile["inner"]) + msg = String(take!(buf)) + @test occursin( + r""" 📂 HDF5.Group: /inner .*$ ├─ 🏷️ dirty └─ 🔢 data - └─ 🏷️ mode"""m, msg) -@test sprint(show3, hfile["inner"]) == msg - -HDF5.show_tree(iobuf, hfile["inner"], attributes = false) -@test occursin(r""" + └─ 🏷️ mode"""m, + msg + ) + @test sprint(show3, hfile["inner"]) == msg + + HDF5.show_tree(iobuf, hfile["inner"]; attributes=false) + @test occursin( + r""" 📂 HDF5.Group: /inner .*$ -└─ 🔢 data"""m, String(take!(buf))) +└─ 🔢 data"""m, + String(take!(buf)) + ) -HDF5.show_tree(iobuf, hfile["inner/data"]) -msg = String(take!(buf)) -@test occursin(r""" + HDF5.show_tree(iobuf, hfile["inner/data"]) + msg = String(take!(buf)) + @test occursin( + r""" 🔢 HDF5.Dataset: /inner/data .*$ -└─ 🏷️ mode"""m, msg) -# xfer_mode changes between printings, so need regex again -@test occursin(r""" +└─ 🏷️ mode"""m, + msg + ) + # xfer_mode changes between printings, so need regex again + @test occursin( + r""" 🔢 HDF5.Dataset: /inner/data .*$ -└─ 🏷️ mode"""m, sprint(show3, hfile["inner/data"])) - -HDF5.show_tree(iobuf, hfile["inner/data"], attributes = false) -@test occursin(r""" -🔢 HDF5.Dataset: /inner/data .*$"""m, String(take!(buf))) - -HDF5.show_tree(iobuf, hfile["dtype"]) -@test occursin(r""" -📄 HDF5.Datatype: /dtype""", String(take!(buf))) - -HDF5.show_tree(iobuf, hfile["inner/data"]["mode"], attributes = true) -@test occursin(r""" -🏷️ HDF5.Attribute: mode""", String(take!(buf))) - -# configurable options - -# no emoji icons -HDF5.SHOW_TREE_ICONS[] = false -@test occursin(r""" +└─ 🏷️ mode"""m, + sprint(show3, hfile["inner/data"]) + ) + + HDF5.show_tree(iobuf, hfile["inner/data"]; attributes=false) + @test occursin( + r""" +🔢 HDF5.Dataset: /inner/data .*$"""m, + String(take!(buf)) + ) + + HDF5.show_tree(iobuf, hfile["dtype"]) + @test occursin( + r""" +📄 HDF5.Datatype: /dtype""", + String(take!(buf)) + ) + + HDF5.show_tree(iobuf, hfile["inner/data"]["mode"]; attributes=true) + @test occursin( + r""" +🏷️ HDF5.Attribute: mode""", + String(take!(buf)) + ) + + # configurable options + + # no emoji icons + HDF5.SHOW_TREE_ICONS[] = false + @test occursin( + r""" \[F\] HDF5.File: .*$ ├─ \[A\] creator ├─ \[T\] dtype @@ -968,278 +1107,288 @@ HDF5.SHOW_TREE_ICONS[] = false │ ├─ \[A\] dirty │ └─ \[D\] data │ └─ \[A\] mode -└─ \[D\] version"""m, sprint(show3, hfile)) -HDF5.SHOW_TREE_ICONS[] = true +└─ \[D\] version"""m, + sprint(show3, hfile) + ) + HDF5.SHOW_TREE_ICONS[] = true -# no tree printing -show(IOContext(iobuf, :compact => true), MIME"text/plain"(), hfile) -msg = String(take!(buf)) -@test msg == sprint(show, hfile) + # no tree printing + show(IOContext(iobuf, :compact => true), MIME"text/plain"(), hfile) + msg = String(take!(buf)) + @test msg == sprint(show, hfile) -close(hfile) + close(hfile) -# Now test the print-limiting heuristics for large/complex datasets + # Now test the print-limiting heuristics for large/complex datasets -# group with a large number of children; tests child entry truncation heuristic -h5open(fn, "w") do hfile - dt, ds = datatype(Int), dataspace(()) - opts = Iterators.product('A':'Z', 1:9) - for ii in opts - create_dataset(hfile, string(ii...), dt, ds) - end + # group with a large number of children; tests child entry truncation heuristic + h5open(fn, "w") do hfile + dt, ds = datatype(Int), dataspace(()) + opts = Iterators.product('A':'Z', 1:9) + for ii in opts + create_dataset(hfile, string(ii...), dt, ds) + end - def = HDF5.SHOW_TREE_MAX_CHILDREN[] - HDF5.SHOW_TREE_MAX_CHILDREN[] = 5 + def = HDF5.SHOW_TREE_MAX_CHILDREN[] + HDF5.SHOW_TREE_MAX_CHILDREN[] = 5 - HDF5.show_tree(iobuf, hfile) - msg = String(take!(buf)) - @test occursin(r""" + HDF5.show_tree(iobuf, hfile) + msg = String(take!(buf)) + @test occursin( + r""" 🗂️ HDF5.File: .*$ ├─ 🔢 A1 ├─ 🔢 A2 ├─ 🔢 A3 ├─ 🔢 A4 ├─ 🔢 A5 -└─ \(229 more children\)"""m, msg) - @test sprint(show3, hfile) == msg +└─ \(229 more children\)"""m, + msg + ) + @test sprint(show3, hfile) == msg - HDF5.SHOW_TREE_MAX_CHILDREN[] = def + HDF5.SHOW_TREE_MAX_CHILDREN[] = def - # IOContext can halt limiting - HDF5.show_tree(IOContext(iobuf, :limit => false), hfile) - @test countlines(seekstart(buf)) == length(opts) + 1 - truncate(buf, 0) -end - -# deeply nested set of elements; test that the tree is truncated -h5open(fn, "w") do hfile - p = HDF5.root(hfile)::HDF5.Group - opts = 'A':'Z' - for ii in opts - p = create_group(p, string(ii)) + # IOContext can halt limiting + HDF5.show_tree(IOContext(iobuf, :limit => false), hfile) + @test countlines(seekstart(buf)) == length(opts) + 1 + truncate(buf, 0) end - def = HDF5.SHOW_TREE_MAX_DEPTH[] - HDF5.SHOW_TREE_MAX_DEPTH[] = 5 - - HDF5.show_tree(iobuf, hfile) - msg = String(take!(buf)) - @test occursin(r""" + # deeply nested set of elements; test that the tree is truncated + h5open(fn, "w") do hfile + p = HDF5.root(hfile)::HDF5.Group + opts = 'A':'Z' + for ii in opts + p = create_group(p, string(ii)) + end + + def = HDF5.SHOW_TREE_MAX_DEPTH[] + HDF5.SHOW_TREE_MAX_DEPTH[] = 5 + + HDF5.show_tree(iobuf, hfile) + msg = String(take!(buf)) + @test occursin( + r""" 🗂️ HDF5.File: .*$ └─ 📂 A └─ 📂 B └─ 📂 C └─ 📂 D └─ 📂 E - └─ \(1 child\)"""m, msg) - @test sprint(show3, hfile) == msg - - HDF5.SHOW_TREE_MAX_DEPTH[] = def + └─ \(1 child\)"""m, + msg + ) + @test sprint(show3, hfile) == msg - # IOContext can halt limiting - HDF5.show_tree(IOContext(iobuf, :limit => false), hfile) - @test countlines(seekstart(buf)) == length(opts) + 1 - truncate(buf, 0) -end + HDF5.SHOW_TREE_MAX_DEPTH[] = def -rm(fn) + # IOContext can halt limiting + HDF5.show_tree(IOContext(iobuf, :limit => false), hfile) + @test countlines(seekstart(buf)) == length(opts) + 1 + truncate(buf, 0) + end + rm(fn) end # show tests @testset "split1" begin - -@test HDF5.split1("/") == ("/", "") -@test HDF5.split1("a") == ("a", "") -@test HDF5.split1("/a/b/c") == ("/", "a/b/c") -@test HDF5.split1("a/b/c") == ("a", "b/c") -@test HDF5.split1(GenericString("a")) == ("a", "") -@test HDF5.split1(GenericString("/a/b/c")) == ("/", "a/b/c") -@test HDF5.split1(GenericString("a/b/c")) == ("a", "b/c") - -# The following two paths have the same graphemes but different code unit structures: -# the first one is -# "/" -# while the second one is -# "a" "/" -circa = "â" # -acomb = "â" # "a" + -path1 = circa * "/α" -path2 = acomb * "/α" -# Sanity checks that the two strings are different but equivalent under normalization -@test path1 != path2 -@test Base.Unicode.normalize(path1, :NFC) == Base.Unicode.normalize(path2, :NFC) -# Check split1 operates correctly -@test HDF5.split1(path1) == (circa, "α") -@test HDF5.split1(path2) == (acomb, "α") -@test HDF5.split1("/" * path1) == ("/", path1) -@test HDF5.split1("/" * path2) == ("/", path2) - + @test HDF5.split1("/") == ("/", "") + @test HDF5.split1("a") == ("a", "") + @test HDF5.split1("/a/b/c") == ("/", "a/b/c") + @test HDF5.split1("a/b/c") == ("a", "b/c") + @test HDF5.split1(GenericString("a")) == ("a", "") + @test HDF5.split1(GenericString("/a/b/c")) == ("/", "a/b/c") + @test HDF5.split1(GenericString("a/b/c")) == ("a", "b/c") + + # The following two paths have the same graphemes but different code unit structures: + # the first one is + # "/" + # while the second one is + # "a" "/" + circa = "â" # + acomb = "â" # "a" + + path1 = circa * "/α" + path2 = acomb * "/α" + # Sanity checks that the two strings are different but equivalent under normalization + @test path1 != path2 + @test Base.Unicode.normalize(path1, :NFC) == Base.Unicode.normalize(path2, :NFC) + # Check split1 operates correctly + @test HDF5.split1(path1) == (circa, "α") + @test HDF5.split1(path2) == (acomb, "α") + @test HDF5.split1("/" * path1) == ("/", path1) + @test HDF5.split1("/" * path2) == ("/", path2) end # split1 tests - # Also tests AbstractString interface @testset "haskey" begin -fn = tempname() -hfile = h5open(fn, "w") - -group1 = create_group(hfile, "group1") -group2 = create_group(group1, "group2") - -@test haskey(hfile, "/") -@test haskey(hfile, GenericString("group1")) -@test !haskey(hfile, GenericString("groupna")) -@test haskey(hfile, "group1/group2") -@test !haskey(hfile, "group1/groupna") -@test_throws KeyError hfile["nothing"] - -dset1 = create_dataset(hfile, "dset1", datatype(Int), dataspace((1,))) -dset2 = create_dataset(group1, "dset2", datatype(Int), dataspace((1,))) - -@test haskey(hfile, "dset1") -@test !haskey(hfile, "dsetna") -@test haskey(hfile, "group1/dset2") -@test !haskey(hfile, "group1/dsetna") - -meta1 = create_attribute(dset1, "meta1", datatype(Bool), dataspace((1,))) -@test haskey(dset1, "meta1") -@test !haskey(dset1, "metana") -@test_throws KeyError dset1["nothing"] - - -attribs = attributes(hfile) -attribs["test1"] = true -attribs["test2"] = "foo" - -@test haskey(attribs, "test1") -@test haskey(attribs, "test2") -@test !haskey(attribs, "testna") -@test_throws KeyError attribs["nothing"] - -attribs = attributes(dset2) -attribs["attr"] = "foo" -@test haskey(attribs, GenericString("attr")) - -close(hfile) -rm(fn) + fn = tempname() + hfile = h5open(fn, "w") + + group1 = create_group(hfile, "group1") + group2 = create_group(group1, "group2") + + @test haskey(hfile, "/") + @test haskey(hfile, GenericString("group1")) + @test !haskey(hfile, GenericString("groupna")) + @test haskey(hfile, "group1/group2") + @test !haskey(hfile, "group1/groupna") + @test_throws KeyError hfile["nothing"] + + dset1 = create_dataset(hfile, "dset1", datatype(Int), dataspace((1,))) + dset2 = create_dataset(group1, "dset2", datatype(Int), dataspace((1,))) + + @test haskey(hfile, "dset1") + @test !haskey(hfile, "dsetna") + @test haskey(hfile, "group1/dset2") + @test !haskey(hfile, "group1/dsetna") + + meta1 = create_attribute(dset1, "meta1", datatype(Bool), dataspace((1,))) + @test haskey(dset1, "meta1") + @test !haskey(dset1, "metana") + @test_throws KeyError dset1["nothing"] + + attribs = attributes(hfile) + attribs["test1"] = true + attribs["test2"] = "foo" + + @test haskey(attribs, "test1") + @test haskey(attribs, "test2") + @test !haskey(attribs, "testna") + @test_throws KeyError attribs["nothing"] + + attribs = attributes(dset2) + attribs["attr"] = "foo" + @test haskey(attribs, GenericString("attr")) + + close(hfile) + rm(fn) end # haskey tests - @testset "AbstractString" begin + fn = GenericString(tempname()) + hfile = h5open(fn, "w") + close(hfile) + hfile = h5open(fn) + close(hfile) + hfile = h5open(fn, "w") + + @test_nowarn create_group(hfile, GenericString("group1")) + @test_nowarn create_dataset( + hfile, GenericString("dset1"), datatype(Int), dataspace((1,)) + ) + @test_nowarn create_dataset(hfile, GenericString("dset2"), 1) + + @test_nowarn hfile[GenericString("group1")] + @test_nowarn hfile[GenericString("dset1")] + + dset1 = hfile["dset1"] + @test_nowarn create_attribute( + dset1, GenericString("meta1"), datatype(Bool), dataspace((1,)) + ) + @test_nowarn create_attribute(dset1, GenericString("meta2"), 1) + @test_nowarn dset1[GenericString("meta1")] + @test_nowarn dset1[GenericString("x")] = 2 + + array_of_strings = ["test",] + write(hfile, "array_of_strings", array_of_strings) + @test_nowarn attributes(hfile)[GenericString("ref_test")] = HDF5.Reference( + hfile, GenericString("array_of_strings") + ) + @test read(attributes(hfile)[GenericString("ref_test")]) === + HDF5.Reference(hfile, "array_of_strings") + + hfile[GenericString("test")] = 17.2 + @test_nowarn delete_object(hfile, GenericString("test")) + @test_nowarn delete_attribute(dset1, GenericString("meta1")) + + # transient types + memtype_id = HDF5.API.h5t_copy(HDF5.API.H5T_NATIVE_DOUBLE) + dt = HDF5.Datatype(memtype_id) + @test !HDF5.API.h5t_committed(dt) + commit_datatype(hfile, GenericString("dt"), dt) + @test HDF5.API.h5t_committed(dt) + + dt = datatype(Int) + ds = dataspace(0) + d = create_dataset(hfile, GenericString("d"), dt, ds) + g = create_group(hfile, GenericString("g")) + a = create_attribute(hfile, GenericString("a"), dt, ds) + + for obj in (d, g) + @test_nowarn write_attribute(obj, GenericString("a"), 1) + @test_nowarn read_attribute(obj, GenericString("a")) + @test_nowarn write(obj, GenericString("aa"), 1) + @test_nowarn attributes(obj)["attr1"] = GenericString("b") + end + @test_nowarn write(d, "attr2", GenericString("c")) + @test_nowarn write_dataset(g, GenericString("ag"), GenericString("gg")) + @test_nowarn write_dataset( + g, GenericString("ag_array"), [GenericString("a1"), GenericString("a2")] + ) + + genstrs = GenericString["fee", "fi", "foo"] + @test_nowarn write_attribute(d, GenericString("myattr"), genstrs) + @test genstrs == read(d["myattr"]) + + for obj in (hfile,) + @test_nowarn open_dataset(obj, GenericString("d")) + @test_nowarn write_dataset(obj, GenericString("dd"), 1) + @test_nowarn read_dataset(obj, GenericString("dd")) + @test_nowarn read(obj, GenericString("dd")) + @test_nowarn read(obj, GenericString("dd") => Int) + end + read(attributes(hfile), GenericString("a")) + + write(hfile, GenericString("ASD"), GenericString("Aa")) + write(g, GenericString("ASD"), GenericString("Aa")) + write(g, GenericString("ASD1"), [GenericString("Aa")]) + + # test writing multiple variable + @test_nowarn write( + hfile, GenericString("a1"), rand(2, 2), GenericString("a2"), rand(2, 2) + ) + + # copy methods + d1 = create_dataset(hfile, GenericString("d1"), dt, ds) + d1["x"] = 32 + @test_nowarn copy_object(hfile, GenericString("d1"), hfile, GenericString("d1copy1")) + @test_nowarn copy_object(d1, hfile, GenericString("d1copy2")) + + fn = GenericString(tempname()) + A = Matrix(reshape(1:120, 15, 8)) + @test_nowarn h5write(fn, GenericString("A"), A) + @test_nowarn h5read(fn, GenericString("A")) + @test_nowarn h5read(fn, GenericString("A"), (2:3:15, 3:5)) + + @test_nowarn h5write(fn, GenericString("x"), 1) + @test_nowarn h5read(fn, GenericString("x") => Int) + + @test_nowarn h5rewrite(fn) do fid + g = create_group(fid, "mygroup") + write(g, "x", 3.3) + end + @test_nowarn h5rewrite(fn) do fid + g = create_group(fid, "mygroup") + write(g, "y", 3.3) + end -fn = GenericString(tempname()) -hfile = h5open(fn, "w") -close(hfile) -hfile = h5open(fn); close(hfile) -hfile = h5open(fn, "w") - -@test_nowarn create_group(hfile, GenericString("group1")) -@test_nowarn create_dataset(hfile, GenericString("dset1"), datatype(Int), dataspace((1,))) -@test_nowarn create_dataset(hfile, GenericString("dset2"), 1) - -@test_nowarn hfile[GenericString("group1")] -@test_nowarn hfile[GenericString("dset1")] - - -dset1 = hfile["dset1"] -@test_nowarn create_attribute(dset1, GenericString("meta1"), datatype(Bool), dataspace((1,))) -@test_nowarn create_attribute(dset1, GenericString("meta2"), 1) -@test_nowarn dset1[GenericString("meta1")] -@test_nowarn dset1[GenericString("x")] = 2 - -array_of_strings = ["test",] -write(hfile, "array_of_strings", array_of_strings) -@test_nowarn attributes(hfile)[GenericString("ref_test")] = HDF5.Reference(hfile, GenericString("array_of_strings")) -@test read(attributes(hfile)[GenericString("ref_test")]) === HDF5.Reference(hfile, "array_of_strings") - -hfile[GenericString("test")] = 17.2 -@test_nowarn delete_object(hfile, GenericString("test")) -@test_nowarn delete_attribute(dset1, GenericString("meta1")) - -# transient types -memtype_id = HDF5.API.h5t_copy(HDF5.API.H5T_NATIVE_DOUBLE) -dt = HDF5.Datatype(memtype_id) -@test !HDF5.API.h5t_committed(dt) -commit_datatype(hfile, GenericString("dt"), dt) -@test HDF5.API.h5t_committed(dt) - -dt = datatype(Int) -ds = dataspace(0) -d = create_dataset(hfile, GenericString("d"), dt, ds) -g = create_group(hfile, GenericString("g")) -a = create_attribute(hfile, GenericString("a"), dt, ds) - -for obj in (d, g) - @test_nowarn write_attribute(obj, GenericString("a"), 1) - @test_nowarn read_attribute(obj, GenericString("a")) - @test_nowarn write(obj, GenericString("aa"), 1) - @test_nowarn attributes(obj)["attr1"] = GenericString("b") -end -@test_nowarn write(d, "attr2", GenericString("c")) -@test_nowarn write_dataset(g, GenericString("ag"), GenericString("gg")) -@test_nowarn write_dataset(g, GenericString("ag_array"), [GenericString("a1"), GenericString("a2")]) - -genstrs = GenericString["fee", "fi", "foo"] -@test_nowarn write_attribute(d, GenericString("myattr"), genstrs) -@test genstrs == read(d["myattr"]) - -for obj in (hfile,) - @test_nowarn open_dataset(obj, GenericString("d")) - @test_nowarn write_dataset(obj, GenericString("dd"), 1) - @test_nowarn read_dataset(obj, GenericString("dd")) - @test_nowarn read(obj, GenericString("dd")) - @test_nowarn read(obj, GenericString("dd")=>Int) -end -read(attributes(hfile), GenericString("a")) - -write(hfile, GenericString("ASD"), GenericString("Aa")) -write(g, GenericString("ASD"), GenericString("Aa")) -write(g, GenericString("ASD1"), [GenericString("Aa")]) - -# test writing multiple variable -@test_nowarn write(hfile, GenericString("a1"), rand(2,2), GenericString("a2"), rand(2,2)) - -# copy methods -d1 = create_dataset(hfile, GenericString("d1"), dt, ds) -d1["x"] = 32 -@test_nowarn copy_object(hfile, GenericString("d1"), hfile, GenericString("d1copy1")) -@test_nowarn copy_object(d1, hfile, GenericString("d1copy2")) - -fn = GenericString(tempname()) -A = Matrix(reshape(1:120, 15, 8)) -@test_nowarn h5write(fn, GenericString("A"), A) -@test_nowarn h5read(fn, GenericString("A")) -@test_nowarn h5read(fn, GenericString("A"), (2:3:15, 3:5)) - -@test_nowarn h5write(fn, GenericString("x"), 1) -@test_nowarn h5read(fn, GenericString("x") => Int) - - -@test_nowarn h5rewrite(fn) do fid - g = create_group(fid, "mygroup") - write(g, "x", 3.3) -end -@test_nowarn h5rewrite(fn) do fid - g = create_group(fid, "mygroup") - write(g, "y", 3.3) -end - -@test_nowarn h5write(fn, "W", [1 2; 3 4]) -@test_nowarn h5writeattr(fn, GenericString("W"), Dict("a" => 1, "b" => 2)) -@test_nowarn h5readattr(fn, GenericString("W")) - -fn_external = GenericString(tempname()) -dset = HDF5.create_external_dataset(hfile, "ext", fn_external, Int, (10,20)) -dcpl = HDF5.get_create_properties(dset) -@test HDF5.API.h5p_get_external_count(dcpl) == 1 -ext_prop = HDF5.API.h5p_get_external(dcpl) -@test ext_prop.name == fn_external -@test ext_prop.offset == 0 -@test ext_prop.size == 10*20*sizeof(Int) -dapl = HDF5.get_access_properties(dset) -dapl.efile_prefix = "efile_test" -@test HDF5.API.h5p_get_efile_prefix(dapl) == "efile_test" -close(hfile) - + @test_nowarn h5write(fn, "W", [1 2; 3 4]) + @test_nowarn h5writeattr(fn, GenericString("W"), Dict("a" => 1, "b" => 2)) + @test_nowarn h5readattr(fn, GenericString("W")) + + fn_external = GenericString(tempname()) + dset = HDF5.create_external_dataset(hfile, "ext", fn_external, Int, (10, 20)) + dcpl = HDF5.get_create_properties(dset) + @test HDF5.API.h5p_get_external_count(dcpl) == 1 + ext_prop = HDF5.API.h5p_get_external(dcpl) + @test ext_prop.name == fn_external + @test ext_prop.offset == 0 + @test ext_prop.size == 10 * 20 * sizeof(Int) + dapl = HDF5.get_access_properties(dset) + dapl.efile_prefix = "efile_test" + @test HDF5.API.h5p_get_efile_prefix(dapl) == "efile_test" + close(hfile) end @testset "opaque data" begin @@ -1268,7 +1417,9 @@ end write_dataset(fid["matrix"], otype, buf2) # opaque data within a compound data type - ctype = HDF5.Datatype(HDF5.API.h5t_create(HDF5.API.H5T_COMPOUND, sizeof(num) + sizeof(otype))) + ctype = HDF5.Datatype( + HDF5.API.h5t_create(HDF5.API.H5T_COMPOUND, sizeof(num) + sizeof(otype)) + ) HDF5.API.h5t_insert(ctype, "v", 0, datatype(num)) HDF5.API.h5t_insert(ctype, "d", sizeof(num), otype) cdat = vcat(reinterpret(UInt8, [num]), dat0) @@ -1287,7 +1438,7 @@ end # Note: opaque tag is lost compound = read(fid["compound"]) - @test compound == (v = num, d = dat0) + @test compound == (v=num, d=dat0) close(fid) end @@ -1304,13 +1455,15 @@ end mktemp() do path, io close(io) num = Int64(9) - ref = join('a':'z') ^ 1000 + ref = join('a':'z')^1000 fid = h5open(path, "w") # long string serialized as FixedString fid["longstring"] = ref # compound datatype containing a FixedString - compound_dtype = HDF5.Datatype(HDF5.API.h5t_create(HDF5.API.H5T_COMPOUND, sizeof(num) + sizeof(ref))) + compound_dtype = HDF5.Datatype( + HDF5.API.h5t_create(HDF5.API.H5T_COMPOUND, sizeof(num) + sizeof(ref)) + ) HDF5.API.h5t_insert(compound_dtype, "n", 0, datatype(num)) HDF5.API.h5t_insert(compound_dtype, "a", sizeof(num), datatype(ref)) c = create_dataset(fid, "compoundlongstring", compound_dtype, dataspace(())) @@ -1321,7 +1474,6 @@ end @assert position(buf) == sizeof(compound_dtype) write_dataset(c, compound_dtype, take!(buf)) - # Test reading without stalling d = fid["longstring"] T = HDF5.get_jl_type(d) @@ -1332,12 +1484,12 @@ end T = HDF5.get_jl_type(c) @test T <: NamedTuple @test fieldnames(T) == (:n, :a) - @test read(c) == (n = num, a = ref) + @test read(c) == (n=num, a=ref) close(fid) end - fix = HDF5.FixedArray{Float64,(2,2),4}((1, 2, 3, 4)) + fix = HDF5.FixedArray{Float64,(2, 2),4}((1, 2, 3, 4)) @test size(typeof(fix)) == (2, 2) @test size(fix) == (2, 2) @test eltype(typeof(fix)) == Float64 @@ -1346,7 +1498,9 @@ end mktemp() do path, io close(io) ref = rand(Float64, 3000) - t = HDF5.Datatype(HDF5.API.h5t_array_create(datatype(Float64), ndims(ref), collect(size(ref)))) + t = HDF5.Datatype( + HDF5.API.h5t_array_create(datatype(Float64), ndims(ref), collect(size(ref))) + ) scalarspace = dataspace(()) fid = h5open(path, "w") @@ -1364,55 +1518,50 @@ end end @testset "Object Exists" begin + hfile = h5open(tempname(), "w") + g1 = create_group(hfile, "group1") + @test_throws ErrorException create_group(hfile, "group1") + create_group(g1, "group1a") + @test_throws ErrorException create_group(hfile, "/group1/group1a") + @test_throws ErrorException create_group(g1, "group1a") -hfile = h5open(tempname(), "w") -g1 = create_group(hfile, "group1") -@test_throws ErrorException create_group(hfile, "group1") -create_group(g1, "group1a") -@test_throws ErrorException create_group(hfile, "/group1/group1a") -@test_throws ErrorException create_group(g1, "group1a") + create_dataset(hfile, "dset1", 1) + create_dataset(hfile, "/group1/dset1", 1) -create_dataset(hfile, "dset1", 1) -create_dataset(hfile, "/group1/dset1", 1) - -@test_throws ErrorException create_dataset(hfile, "dset1", 1) -@test_throws ErrorException create_dataset(hfile, "group1", 1) -@test_throws ErrorException create_dataset(g1, "dset1", 1) - -close(hfile) + @test_throws ErrorException create_dataset(hfile, "dset1", 1) + @test_throws ErrorException create_dataset(hfile, "group1", 1) + @test_throws ErrorException create_dataset(g1, "dset1", 1) + close(hfile) end @testset "HDF5 existance" begin + fn1 = tempname() + fn2 = tempname() -fn1 = tempname() -fn2 = tempname() - -open(fn1, "w") do f - write(f, "Hello text file") -end - -@test !HDF5.ishdf5(fn1) # check that a non-hdf5 file retuns false -@test !HDF5.ishdf5(fn2) # checks that a file that does not exist returns false + open(fn1, "w") do f + write(f, "Hello text file") + end -@test_throws ErrorException h5write(fn1, "x", 1) # non hdf5 file throws -h5write(fn2, "x", 1) + @test !HDF5.ishdf5(fn1) # check that a non-hdf5 file retuns false + @test !HDF5.ishdf5(fn2) # checks that a file that does not exist returns false -@test HDF5.ishdf5(fn2) + @test_throws ErrorException h5write(fn1, "x", 1) # non hdf5 file throws + h5write(fn2, "x", 1) -rm(fn1) -rm(fn2) + @test HDF5.ishdf5(fn2) + rm(fn1) + rm(fn2) end @testset "bounds" begin -# issue #954 -h5open(tempname(), "w") do f - a, _ = create_attribute(f, "a", zeros(4)) - @test_throws ArgumentError write(a, ones(2)) - d, _ = create_dataset(f, "dd", zeros(4)) - @test_throws ArgumentError write(d, ones(2)) -end - + # issue #954 + h5open(tempname(), "w") do f + a, _ = create_attribute(f, "a", zeros(4)) + @test_throws ArgumentError write(a, ones(2)) + d, _ = create_dataset(f, "dd", zeros(4)) + @test_throws ArgumentError write(d, ones(2)) + end end diff --git a/test/properties.jl b/test/properties.jl index 9030ffa03..d388c1337 100644 --- a/test/properties.jl +++ b/test/properties.jl @@ -2,115 +2,120 @@ using HDF5 using Test @testset "properties" begin - -fn = tempname() -h5open(fn, "w"; - userblock = 1024, - alignment = (0, sizeof(Int)), - libver_bounds = (:earliest, :latest), - meta_block_size = 1024, - strategy = :fsm_aggr, - persist = 1, - threshold = 2, - file_space_page_size = 0x800 - ) do hfile - # generic - g = create_group(hfile, "group") - if HDF5.API.h5_get_libversion() >= v"1.10.5" - kwargs = (:no_attrs_hint => true,) - else - kwargs = () + fn = tempname() + h5open( + fn, + "w"; + userblock=1024, + alignment=(0, sizeof(Int)), + libver_bounds=(:earliest, :latest), + meta_block_size=1024, + strategy=:fsm_aggr, + persist=1, + threshold=2, + file_space_page_size=0x800 + ) do hfile + # generic + g = create_group(hfile, "group") + if HDF5.API.h5_get_libversion() >= v"1.10.5" + kwargs = (:no_attrs_hint => true,) + else + kwargs = () + end + d = create_dataset( + g, + "dataset", + datatype(Int), + dataspace((500, 50)); + alloc_time=HDF5.API.H5D_ALLOC_TIME_EARLY, + chunk=(5, 10), + fill_value=1, + fill_time=:never, + obj_track_times=false, + chunk_cache=(522, 0x200000, 0.80), + efile_prefix=:origin, + virtual_prefix="virtual", + virtual_printf_gap=2, + virtual_view=:last_available, + kwargs... + ) + attributes(d)["metadata"] = "test" + + flush(hfile) + + fcpl = HDF5.get_create_properties(hfile) + fapl = HDF5.get_access_properties(hfile) + gcpl = HDF5.get_create_properties(hfile["group"]) + dcpl = HDF5.get_create_properties(d) + dapl = HDF5.get_access_properties(d) + acpl = HDF5.get_create_properties(attributes(d)["metadata"]) + + # Retrievability of properties + @test isvalid(fcpl) + @test isvalid(fapl) + @test isvalid(gcpl) + @test isvalid(dcpl) + @test isvalid(dapl) + @test isvalid(acpl) + + # Retrieving property values: + @test fcpl.userblock == 1024 + @test fcpl.obj_track_times + @test fcpl.file_space_page_size == 0x800 + @test fcpl.strategy == :fsm_aggr + @test fcpl.persist == 1 + @test fcpl.threshold == 2 + + @test fapl.alignment == (0, sizeof(Int)) + @test fapl.driver == Drivers.POSIX() + @test_throws HDF5.API.H5Error fapl.driver_info + @test fapl.fclose_degree == :strong + @test fapl.libver_bounds == (:earliest, Base.thisminor(HDF5.libversion)) + @test fapl.meta_block_size == 1024 + + @test gcpl.local_heap_size_hint == 0 + @test gcpl.obj_track_times + + @test HDF5.UTF8_LINK_PROPERTIES.char_encoding == :utf8 + @test HDF5.UTF8_LINK_PROPERTIES.create_intermediate_group + + @test dcpl.alloc_time == :early + @test dcpl.chunk == (5, 10) + @test dcpl.layout == :chunked + @test !dcpl.obj_track_times + @test dcpl.fill_time == :never + @test dcpl.fill_value == 1.0 + if HDF5.API.h5_get_libversion() >= v"1.10.5" + @test dcpl.no_attrs_hint == true + end + + @test dapl.chunk_cache.nslots == 522 + @test dapl.chunk_cache.nbytes == 0x200000 + @test dapl.chunk_cache.w0 == 0.8 + @test dapl.efile_prefix == raw"$ORIGIN" + @test dapl.virtual_prefix == "virtual" + # We probably need to actually use a virtual dataset + @test_broken dapl.virtual_printf_gap == 2 + @test_broken dapl.virtual_view == :last_available + + @test acpl.char_encoding == :utf8 + + # Test auto-initialization of property lists on get + dcpl2 = HDF5.DatasetCreateProperties() # uninitialized + @test dcpl2.id < 1 # 0 or -1 + @test !isvalid(dcpl2) + @test dcpl2.alloc_time == :late + @test isvalid(dcpl2) + + # Test H5Pcopy + dapl2 = copy(dapl) + @test dapl2.id != dapl.id + @test dapl2.virtual_prefix == dapl.virtual_prefix + dapl2.virtual_prefix = "somewhere_else" + @test dapl2.virtual_prefix != dapl.virtual_prefix + + nothing end - d = create_dataset(g, "dataset", datatype(Int), dataspace((500,50)); - alloc_time = HDF5.API.H5D_ALLOC_TIME_EARLY, - chunk = (5, 10), - fill_value = 1, - fill_time = :never, - obj_track_times = false, - chunk_cache = (522, 0x200000, 0.80), - efile_prefix = :origin, - virtual_prefix = "virtual", - virtual_printf_gap = 2, - virtual_view = :last_available, - kwargs...) - attributes(d)["metadata"] = "test" - - flush(hfile) - - fcpl = HDF5.get_create_properties(hfile) - fapl = HDF5.get_access_properties(hfile) - gcpl = HDF5.get_create_properties(hfile["group"]) - dcpl = HDF5.get_create_properties(d) - dapl = HDF5.get_access_properties(d) - acpl = HDF5.get_create_properties(attributes(d)["metadata"]) - - # Retrievability of properties - @test isvalid(fcpl) - @test isvalid(fapl) - @test isvalid(gcpl) - @test isvalid(dcpl) - @test isvalid(dapl) - @test isvalid(acpl) - - # Retrieving property values: - @test fcpl.userblock == 1024 - @test fcpl.obj_track_times - @test fcpl.file_space_page_size == 0x800 - @test fcpl.strategy == :fsm_aggr - @test fcpl.persist == 1 - @test fcpl.threshold == 2 - - @test fapl.alignment == (0, sizeof(Int)) - @test fapl.driver == Drivers.POSIX() - @test_throws HDF5.API.H5Error fapl.driver_info - @test fapl.fclose_degree == :strong - @test fapl.libver_bounds == (:earliest, Base.thisminor(HDF5.libversion)) - @test fapl.meta_block_size == 1024 - - @test gcpl.local_heap_size_hint == 0 - @test gcpl.obj_track_times - - @test HDF5.UTF8_LINK_PROPERTIES.char_encoding == :utf8 - @test HDF5.UTF8_LINK_PROPERTIES.create_intermediate_group - - @test dcpl.alloc_time == :early - @test dcpl.chunk == (5, 10) - @test dcpl.layout == :chunked - @test !dcpl.obj_track_times - @test dcpl.fill_time == :never - @test dcpl.fill_value == 1.0 - if HDF5.API.h5_get_libversion() >= v"1.10.5" - @test dcpl.no_attrs_hint == true - end - - @test dapl.chunk_cache.nslots == 522 - @test dapl.chunk_cache.nbytes == 0x200000 - @test dapl.chunk_cache.w0 == 0.8 - @test dapl.efile_prefix == raw"$ORIGIN" - @test dapl.virtual_prefix == "virtual" - # We probably need to actually use a virtual dataset - @test_broken dapl.virtual_printf_gap == 2 - @test_broken dapl.virtual_view == :last_available - - @test acpl.char_encoding == :utf8 - - # Test auto-initialization of property lists on get - dcpl2 = HDF5.DatasetCreateProperties() # uninitialized - @test dcpl2.id < 1 # 0 or -1 - @test !isvalid(dcpl2) - @test dcpl2.alloc_time == :late - @test isvalid(dcpl2) - - # Test H5Pcopy - dapl2 = copy(dapl) - @test dapl2.id != dapl.id - @test dapl2.virtual_prefix == dapl.virtual_prefix - dapl2.virtual_prefix = "somewhere_else" - @test dapl2.virtual_prefix != dapl.virtual_prefix - - nothing -end - -rm(fn, force=true) + rm(fn; force=true) end diff --git a/test/readremote.jl b/test/readremote.jl index 3baa46426..a8ea72300 100644 --- a/test/readremote.jl +++ b/test/readremote.jl @@ -4,187 +4,204 @@ using LinearAlgebra: norm @testset "readremote" begin -# check that we can read the official HDF5 example files - -# download and save test file via: -# urlbase = "https://support.hdfgroup.org/ftp/HDF5/examples/files/exbyapi/" -test_files = joinpath(@__DIR__, "test_files") -# if !isdir(test_files) -# mkdir(test_files) -# end -# function joinpath(test_files, name) -# file = joinpath(test_files, name) -# if !isfile(file) -# file = download(urlbase*name, file) -# end -# file -# end - -fcmp = [0 1 2 3 4 5 6; - 2 1.66667 2.4 3.28571 4.22222 5.18182 6.15385; - 4 2.33333 2.8 3.57143 4.44444 5.36364 6.30769; - 6 3 3.2 3.85714 4.66667 5.54545 6.46154]' -icmp = [0 -1 -2 -3 -4 -5 -6; - 0 0 0 0 0 0 0; - 0 1 2 3 4 5 6; - 0 2 4 6 8 10 12]' -SOLID, LIQUID, GAS, PLASMA = 0, 1, 2, 3 -ecmp = [SOLID SOLID SOLID SOLID SOLID SOLID SOLID; - SOLID LIQUID GAS PLASMA SOLID LIQUID GAS; - SOLID GAS SOLID GAS SOLID GAS SOLID; - SOLID PLASMA GAS LIQUID SOLID PLASMA GAS]' -scmp = ["Parting", "is such", "sweet", "sorrow."] -vicmp = Array{Int32}[[3, 2, 1],[1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144]] -opq = Array{UInt8}[[0x4f, 0x50, 0x41, 0x51, 0x55, 0x45, 0x30], - [0x4f, 0x50, 0x41, 0x51, 0x55, 0x45, 0x31], - [0x4f, 0x50, 0x41, 0x51, 0x55, 0x45, 0x32], - [0x4f, 0x50, 0x41, 0x51, 0x55, 0x45, 0x33]] -# For H5T_ARRAY -AA = Array{Int,2}[ - [0 0 0; - 0 -1 -2; - 0 -2 -4; - 0 -3 -6; - 0 -4 -8], - [0 1 2; - 1 1 1; - 2 1 0; - 3 1 -1; - 4 1 -2], - [0 2 4; - 2 3 4; - 4 4 4; - 6 5 4; - 8 6 4], - [0 3 6; - 3 5 7; - 6 7 8; - 9 9 9; - 12 11 10]] - - -file = joinpath(test_files, "h5ex_t_floatatt.h5") -fid = h5open(file, "r") -dset = fid["DS1"] -a = read_attribute(dset, "A1") -@test norm(a - fcmp) < 1.5e-5 -close(fid) - -file = joinpath(test_files, "h5ex_t_float.h5") -fid = h5open(file, "r") -d = read(fid, "DS1") -@test norm(d - fcmp) < 1.5e-5 -close(fid) - -file = joinpath(test_files, "h5ex_t_intatt.h5") -fid = h5open(file, "r") -dset = fid["DS1"] -a = read_attribute(dset, "A1") -@test a == icmp -close(fid) - -file = joinpath(test_files, "h5ex_t_int.h5") -fid = h5open(file, "r") -d = read(fid, "DS1") -@test d == icmp -close(fid) - -if HDF5.API.h5_get_libversion() >= v"1.8.11" - file = joinpath(test_files, "h5ex_t_enumatt.h5") - fid = h5open(file, "r") - dset = fid["DS1"] - a = read_attribute(dset, "A1") - @test a == ecmp - close(fid) - - file = joinpath(test_files, "h5ex_t_enum.h5") - fid = h5open(file, "r") - d = read(fid, "DS1") - @test d == ecmp - close(fid) -end - -file = joinpath(test_files, "h5ex_t_objrefatt.h5") -fid = h5open(file, "r") -dset = fid["DS1"] -a = read_attribute(dset, "A1") -g = fid[a[1]] -@test isa(g, HDF5.Group) -ds2 = fid[a[2]] -ds2v = read(ds2) -@test isa(ds2v, HDF5.EmptyArray{Int32}) -@test isempty(ds2v) -close(fid) - -file = joinpath(test_files, "h5ex_t_objref.h5") -fid = h5open(file, "r") -d = read(fid, "DS1") -g = fid[d[1]] -@test isa(g, HDF5.Group) -ds2 = fid[d[2]] -ds2v = read(ds2) -@test isa(ds2v, HDF5.EmptyArray{Int32}) -@test isempty(ds2v) -close(fid) - -file = joinpath(test_files, "h5ex_t_stringatt.h5") -fid = h5open(file, "r") -dset = fid["DS1"] -a = read_attribute(dset, "A1") -@test a == scmp -close(fid) - -file = joinpath(test_files, "h5ex_t_string.h5") -fid = h5open(file, "r") -d = read(fid, "DS1") -@test d == scmp -close(fid) - -file = joinpath(test_files, "h5ex_t_vlenatt.h5") -fid = h5open(file, "r") -dset = fid["DS1"] -a = read_attribute(dset, "A1") -@test a == vicmp -close(fid) - -file = joinpath(test_files, "h5ex_t_vlen.h5") -fid = h5open(file, "r") -d = read(fid, "DS1") -@test d == vicmp -close(fid) - -file = joinpath(test_files, "h5ex_t_vlstringatt.h5") -fid = h5open(file, "r") -dset = fid["DS1"] -a = read_attribute(dset, "A1") -@test a == scmp -close(fid) - -file = joinpath(test_files, "h5ex_t_vlstring.h5") -fid = h5open(file, "r") -d = read(fid, "DS1") -@test d == scmp -close(fid) - -file = joinpath(test_files, "h5ex_t_opaqueatt.h5") -fid = h5open(file, "r") -dset = fid["DS1"] -a = read_attribute(dset, "A1") -@test a.tag == "Character array" -@test a.data == opq -close(fid) - -file = joinpath(test_files, "h5ex_t_opaque.h5") -fid = h5open(file, "r") -d = read(fid, "DS1") -@test d.tag == "Character array" -@test d.data == opq -close(fid) - -file = joinpath(test_files, "h5ex_t_array.h5") -fid = h5open(file, "r") -A = read(fid, "DS1") -@test A == AA -close(fid) - + # check that we can read the official HDF5 example files + + # download and save test file via: + # urlbase = "https://support.hdfgroup.org/ftp/HDF5/examples/files/exbyapi/" + test_files = joinpath(@__DIR__, "test_files") + # if !isdir(test_files) + # mkdir(test_files) + # end + # function joinpath(test_files, name) + # file = joinpath(test_files, name) + # if !isfile(file) + # file = download(urlbase*name, file) + # end + # file + # end + + fcmp = + [ + 0 1 2 3 4 5 6 + 2 1.66667 2.4 3.28571 4.22222 5.18182 6.15385 + 4 2.33333 2.8 3.57143 4.44444 5.36364 6.30769 + 6 3 3.2 3.85714 4.66667 5.54545 6.46154 + ]' + icmp = [ + 0 -1 -2 -3 -4 -5 -6 + 0 0 0 0 0 0 0 + 0 1 2 3 4 5 6 + 0 2 4 6 8 10 12 + ]' + SOLID, LIQUID, GAS, PLASMA = 0, 1, 2, 3 + ecmp = + [ + SOLID SOLID SOLID SOLID SOLID SOLID SOLID + SOLID LIQUID GAS PLASMA SOLID LIQUID GAS + SOLID GAS SOLID GAS SOLID GAS SOLID + SOLID PLASMA GAS LIQUID SOLID PLASMA GAS + ]' + scmp = ["Parting", "is such", "sweet", "sorrow."] + vicmp = Array{Int32}[[3, 2, 1], [1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144]] + opq = Array{UInt8}[ + [0x4f, 0x50, 0x41, 0x51, 0x55, 0x45, 0x30], + [0x4f, 0x50, 0x41, 0x51, 0x55, 0x45, 0x31], + [0x4f, 0x50, 0x41, 0x51, 0x55, 0x45, 0x32], + [0x4f, 0x50, 0x41, 0x51, 0x55, 0x45, 0x33] + ] + # For H5T_ARRAY + AA = Array{Int,2}[ + [ + 0 0 0 + 0 -1 -2 + 0 -2 -4 + 0 -3 -6 + 0 -4 -8 + ], + [ + 0 1 2 + 1 1 1 + 2 1 0 + 3 1 -1 + 4 1 -2 + ], + [ + 0 2 4 + 2 3 4 + 4 4 4 + 6 5 4 + 8 6 4 + ], + [ + 0 3 6 + 3 5 7 + 6 7 8 + 9 9 9 + 12 11 10 + ] + ] + + file = joinpath(test_files, "h5ex_t_floatatt.h5") + fid = h5open(file, "r") + dset = fid["DS1"] + a = read_attribute(dset, "A1") + @test norm(a - fcmp) < 1.5e-5 + close(fid) + + file = joinpath(test_files, "h5ex_t_float.h5") + fid = h5open(file, "r") + d = read(fid, "DS1") + @test norm(d - fcmp) < 1.5e-5 + close(fid) + + file = joinpath(test_files, "h5ex_t_intatt.h5") + fid = h5open(file, "r") + dset = fid["DS1"] + a = read_attribute(dset, "A1") + @test a == icmp + close(fid) + + file = joinpath(test_files, "h5ex_t_int.h5") + fid = h5open(file, "r") + d = read(fid, "DS1") + @test d == icmp + close(fid) + + if HDF5.API.h5_get_libversion() >= v"1.8.11" + file = joinpath(test_files, "h5ex_t_enumatt.h5") + fid = h5open(file, "r") + dset = fid["DS1"] + a = read_attribute(dset, "A1") + @test a == ecmp + close(fid) + + file = joinpath(test_files, "h5ex_t_enum.h5") + fid = h5open(file, "r") + d = read(fid, "DS1") + @test d == ecmp + close(fid) + end + + file = joinpath(test_files, "h5ex_t_objrefatt.h5") + fid = h5open(file, "r") + dset = fid["DS1"] + a = read_attribute(dset, "A1") + g = fid[a[1]] + @test isa(g, HDF5.Group) + ds2 = fid[a[2]] + ds2v = read(ds2) + @test isa(ds2v, HDF5.EmptyArray{Int32}) + @test isempty(ds2v) + close(fid) + + file = joinpath(test_files, "h5ex_t_objref.h5") + fid = h5open(file, "r") + d = read(fid, "DS1") + g = fid[d[1]] + @test isa(g, HDF5.Group) + ds2 = fid[d[2]] + ds2v = read(ds2) + @test isa(ds2v, HDF5.EmptyArray{Int32}) + @test isempty(ds2v) + close(fid) + + file = joinpath(test_files, "h5ex_t_stringatt.h5") + fid = h5open(file, "r") + dset = fid["DS1"] + a = read_attribute(dset, "A1") + @test a == scmp + close(fid) + + file = joinpath(test_files, "h5ex_t_string.h5") + fid = h5open(file, "r") + d = read(fid, "DS1") + @test d == scmp + close(fid) + + file = joinpath(test_files, "h5ex_t_vlenatt.h5") + fid = h5open(file, "r") + dset = fid["DS1"] + a = read_attribute(dset, "A1") + @test a == vicmp + close(fid) + + file = joinpath(test_files, "h5ex_t_vlen.h5") + fid = h5open(file, "r") + d = read(fid, "DS1") + @test d == vicmp + close(fid) + + file = joinpath(test_files, "h5ex_t_vlstringatt.h5") + fid = h5open(file, "r") + dset = fid["DS1"] + a = read_attribute(dset, "A1") + @test a == scmp + close(fid) + + file = joinpath(test_files, "h5ex_t_vlstring.h5") + fid = h5open(file, "r") + d = read(fid, "DS1") + @test d == scmp + close(fid) + + file = joinpath(test_files, "h5ex_t_opaqueatt.h5") + fid = h5open(file, "r") + dset = fid["DS1"] + a = read_attribute(dset, "A1") + @test a.tag == "Character array" + @test a.data == opq + close(fid) + + file = joinpath(test_files, "h5ex_t_opaque.h5") + fid = h5open(file, "r") + d = read(fid, "DS1") + @test d.tag == "Character array" + @test d.data == opq + close(fid) + + file = joinpath(test_files, "h5ex_t_array.h5") + fid = h5open(file, "r") + A = read(fid, "DS1") + @test A == AA + close(fid) end # testset readremote diff --git a/test/runtests.jl b/test/runtests.jl index 612ba4402..eb05d9352 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -2,12 +2,12 @@ using HDF5 using Test using Pkg filter_path = joinpath(dirname(pathof(HDF5)), "..", "filters") -Pkg.develop(PackageSpec(path=joinpath(filter_path, "H5Zblosc"))) -Pkg.develop(PackageSpec(path=joinpath(filter_path, "H5Zbzip2"))) -Pkg.develop(PackageSpec(path=joinpath(filter_path, "H5Zlz4"))) -Pkg.develop(PackageSpec(path=joinpath(filter_path, "H5Zzstd"))) +Pkg.develop(PackageSpec(; path=joinpath(filter_path, "H5Zblosc"))) +Pkg.develop(PackageSpec(; path=joinpath(filter_path, "H5Zbzip2"))) +Pkg.develop(PackageSpec(; path=joinpath(filter_path, "H5Zlz4"))) +Pkg.develop(PackageSpec(; path=joinpath(filter_path, "H5Zzstd"))) @static if VERSION >= v"1.6" - Pkg.develop(PackageSpec(path=joinpath(filter_path, "H5Zbitshuffle"))) + Pkg.develop(PackageSpec(; path=joinpath(filter_path, "H5Zbitshuffle"))) end @info "libhdf5 v$(HDF5.API.h5_get_libversion())" @@ -16,61 +16,59 @@ end # ENV["JULIA_DEBUG"] = "Main" @testset "HDF5.jl" begin + @debug "plain" + include("plain.jl") + @debug "api" + include("api.jl") + @debug "compound" + include("compound.jl") + @debug "custom" + include("custom.jl") + @debug "reference" + include("reference.jl") + @debug "dataspace" + include("dataspace.jl") + @debug "datatype" + include("datatype.jl") + @debug "hyperslab" + include("hyperslab.jl") + @debug "attributes" + include("attributes.jl") + @debug "readremote" + include("readremote.jl") + @debug "extend_test" + include("extend_test.jl") + @debug "gc" + include("gc.jl") + @debug "external" + include("external.jl") + @debug "swmr" + include("swmr.jl") + @debug "mmap" + include("mmap.jl") + @debug "properties" + include("properties.jl") + @debug "table" + include("table.jl") + @debug "filter" + include("filter.jl") + @debug "chunkstorage" + include("chunkstorage.jl") + @debug "fileio" + include("fileio.jl") + @debug "nonallocating" + include("nonallocating.jl") + @debug "filter test utils" + include("filters/FilterTestUtils.jl") + @debug "objects" + include("objects.jl") -@debug "plain" -include("plain.jl") -@debug "api" -include("api.jl") -@debug "compound" -include("compound.jl") -@debug "custom" -include("custom.jl") -@debug "reference" -include("reference.jl") -@debug "dataspace" -include("dataspace.jl") -@debug "datatype" -include("datatype.jl") -@debug "hyperslab" -include("hyperslab.jl") -@debug "attributes" -include("attributes.jl") -@debug "readremote" -include("readremote.jl") -@debug "extend_test" -include("extend_test.jl") -@debug "gc" -include("gc.jl") -@debug "external" -include("external.jl") -@debug "swmr" -include("swmr.jl") -@debug "mmap" -include("mmap.jl") -@debug "properties" -include("properties.jl") -@debug "table" -include("table.jl") -@debug "filter" -include("filter.jl") -@debug "chunkstorage" -include("chunkstorage.jl") -@debug "fileio" -include("fileio.jl") -@debug "nonallocating" -include("nonallocating.jl") -@debug "filter test utils" -include("filters/FilterTestUtils.jl") -@debug "objects" -include("objects.jl") - -using MPI -if HDF5.has_parallel() - # basic MPI tests, for actual parallel tests we need to run in MPI mode - include("mpio.jl") -end - -# Clean up after all resources -HDF5.API.h5_close() + using MPI + if HDF5.has_parallel() + # basic MPI tests, for actual parallel tests we need to run in MPI mode + include("mpio.jl") + end + # Clean up after all resources + HDF5.API.h5_close() end diff --git a/test/swmr.jl b/test/swmr.jl index 0cc6e7e2a..1b7b7046e 100644 --- a/test/swmr.jl +++ b/test/swmr.jl @@ -12,113 +12,112 @@ end @everywhere using HDF5 @testset "swmr" begin -fname = tempname() + fname = tempname() -@testset "swmr modes" begin - h5open(fname, "w", swmr=true) do h5 - h5["foo"] = collect(1:10) - end - h5open(fname, "r", swmr=true) do h5 - @test read(h5["foo"]) == collect(1:10) - end - h5open(fname, "r+", swmr=true) do h5 - @test read(h5["foo"]) == collect(1:10) + @testset "swmr modes" begin + h5open(fname, "w"; swmr=true) do h5 + h5["foo"] = collect(1:10) + end + h5open(fname, "r"; swmr=true) do h5 + @test read(h5["foo"]) == collect(1:10) + end + h5open(fname, "r+"; swmr=true) do h5 + @test read(h5["foo"]) == collect(1:10) + end end -end -@testset "h5d_oappend" begin - h5open(fname, "w") do h5 - g = create_group(h5, "shoe") - d = create_dataset(g, "bar", datatype(Float64), ((1,), (-1,)), chunk=(100,)) - dxpl_id = HDF5.get_create_properties(d) - v = [1.0, 2.0] - memtype = datatype(Float64) - # @test HDF5.h5d_oappend(d, dxpl_id, 0, length(v), memtype, v) + @testset "h5d_oappend" begin + h5open(fname, "w") do h5 + g = create_group(h5, "shoe") + d = create_dataset(g, "bar", datatype(Float64), ((1,), (-1,)); chunk=(100,)) + dxpl_id = HDF5.get_create_properties(d) + v = [1.0, 2.0] + memtype = datatype(Float64) + # @test HDF5.h5d_oappend(d, dxpl_id, 0, length(v), memtype, v) + end end -end -function dataset_write(d, ch_written, ch_read) - for i = 1:10 - @assert take!(ch_read) == true - HDF5.set_extent_dims(d, (i*10,)) - inds::UnitRange{Int} = (1:10) .+ (i - 1) * 10 - d[inds] = inds - flush(d) # flush the dataset - put!(ch_written,i) + function dataset_write(d, ch_written, ch_read) + for i in 1:10 + @assert take!(ch_read) == true + HDF5.set_extent_dims(d, (i * 10,)) + inds::UnitRange{Int} = (1:10) .+ (i - 1) * 10 + d[inds] = inds + flush(d) # flush the dataset + put!(ch_written, i) + end end -end -@everywhere function dataset_read(d, ch_written, ch_read) - n = nlast = length(d) - nbigger = 0 - i = 0 - put!(ch_read, true) - while n < 100 - i = take!(ch_written) - for j = 1:1000 # wait for new data to be available to avoid CI failures - HDF5.refresh(d) - nlast, n = n, length(d) - n > nlast && break - sleep(0.001) + @everywhere function dataset_read(d, ch_written, ch_read) + n = nlast = length(d) + nbigger = 0 + i = 0 + put!(ch_read, true) + while n < 100 + i = take!(ch_written) + for j in 1:1000 # wait for new data to be available to avoid CI failures + HDF5.refresh(d) + nlast, n = n, length(d) + n > nlast && break + sleep(0.001) + end + vals = read(d) + @assert vals == collect(1:n) + n > nlast && (nbigger += 1) + put!(ch_read, true) end - vals = read(d) - @assert vals == collect(1:n) - n > nlast && (nbigger += 1) - put!(ch_read,true) + return nbigger end - return nbigger -end -@everywhere function swmr_reader(fname, ch_written, ch_read) - h5open(fname, "r", swmr=true) do h5 - d = h5["foo"] - dataset_read(d, ch_written, ch_read) + @everywhere function swmr_reader(fname, ch_written, ch_read) + h5open(fname, "r"; swmr=true) do h5 + d = h5["foo"] + dataset_read(d, ch_written, ch_read) + end end -end -# Spawn a reader function in a 2nd process, provide two channels for synchronization. -# Run a writing function in this process. The writing function writes, -# then notifies `ch_read`, then the reading function reads, and notifies `ch_read`. So read -# attempts should always follow writes, though there may be a delay before the data is available -# so there is a step that sleeps until data is available. -function remote_test(h5) - ch_written, ch_read = RemoteChannel(1), RemoteChannel(1) - a = @spawn(swmr_reader(fname, ch_written, ch_read)) - dataset_write(h5["foo"], ch_written, ch_read) - nbigger = fetch(a) - @test nbigger == 10 -end - -# create datasets and attributes before staring swmr writing -function prep_h5_file(h5) - d = create_dataset(h5, "foo", datatype(Int), ((1,), (100,)), chunk=(1,)) - attributes(h5)["bar"] = "bar" - g = create_group(h5, "group") -end - -@testset "create by libver, then start_swmr_write" begin - #test this h5open method with keyword arg - h5open(fname, "w", libver_bounds=(:latest, :latest), swmr=false) do h5 - prep_h5_file(h5) - HDF5.start_swmr_write(h5) # after creating datasets - remote_test(h5) + # Spawn a reader function in a 2nd process, provide two channels for synchronization. + # Run a writing function in this process. The writing function writes, + # then notifies `ch_read`, then the reading function reads, and notifies `ch_read`. So read + # attempts should always follow writes, though there may be a delay before the data is available + # so there is a step that sleeps until data is available. + function remote_test(h5) + ch_written, ch_read = RemoteChannel(1), RemoteChannel(1) + a = @spawn(swmr_reader(fname, ch_written, ch_read)) + dataset_write(h5["foo"], ch_written, ch_read) + nbigger = fetch(a) + @test nbigger == 10 end -end -@testset "create by swmr mode, then close and open again" begin - h5open(fname, "w", swmr=true) do h5 - prep_h5_file(h5) + # create datasets and attributes before staring swmr writing + function prep_h5_file(h5) + d = create_dataset(h5, "foo", datatype(Int), ((1,), (100,)); chunk=(1,)) + attributes(h5)["bar"] = "bar" + g = create_group(h5, "group") end - # close the file after creating datasets, open again with swmr write access but not truncate - h5open(fname, "r+", swmr=true) do h5 - remote_test(h5) + + @testset "create by libver, then start_swmr_write" begin + #test this h5open method with keyword arg + h5open(fname, "w"; libver_bounds=(:latest, :latest), swmr=false) do h5 + prep_h5_file(h5) + HDF5.start_swmr_write(h5) # after creating datasets + remote_test(h5) + end end -end -rm(fname) # cleanup file created by swmr tests + @testset "create by swmr mode, then close and open again" begin + h5open(fname, "w"; swmr=true) do h5 + prep_h5_file(h5) + end + # close the file after creating datasets, open again with swmr write access but not truncate + h5open(fname, "r+"; swmr=true) do h5 + remote_test(h5) + end + end -if nprocs() > 1 - rmprocs(procs) -end + rm(fname) # cleanup file created by swmr tests + if nprocs() > 1 + rmprocs(procs) + end end # testset swmr diff --git a/test/table.jl b/test/table.jl index b41884148..a682b502f 100644 --- a/test/table.jl +++ b/test/table.jl @@ -1,11 +1,10 @@ using HDF5 using Test - hf = h5open(tempname(), "w") fv = 3.14 -data = [1.,2.,3.,4.,5.,6.] +data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] floatsize = sizeof(data[1]) h5t = datatype(data[1]) title = "lal" @@ -14,16 +13,32 @@ nfield = 2 nrec = 3 recsize = nfield * floatsize colname = ["f1_verylongnameforfun", "f2"] -offset = [0,floatsize] +offset = [0, floatsize] tid = [h5t.id, h5t.id] chunk = 7 fillvalue = [3.14, 2.71] compress = 1 -HDF5.API.h5tb_make_table(title, hf, name, nfield, nrec, recsize, colname, offset, tid, chunk, fillvalue, compress, data) +HDF5.API.h5tb_make_table( + title, + hf, + name, + nfield, + nrec, + recsize, + colname, + offset, + tid, + chunk, + fillvalue, + compress, + data +) fieldsize = [floatsize, floatsize] HDF5.API.h5tb_append_records(hf, name, nrec, recsize, offset, fieldsize, data) -HDF5.API.h5tb_write_records(hf, name, 1, 4, recsize, offset, fieldsize, collect(1:8) .+ 20.0) +HDF5.API.h5tb_write_records( + hf, name, 1, 4, recsize, offset, fieldsize, collect(1:8) .+ 20.0 +) buf = fill(0.0, 100) HDF5.API.h5tb_read_table(hf, name, recsize, offset, fieldsize, buf) From 0cdf4a66a2c2991469125c275eea5f6bfaa444e8 Mon Sep 17 00:00:00 2001 From: kittisopikulm Date: Fri, 5 Aug 2022 19:17:26 -0400 Subject: [PATCH 4/6] Leave some sections up to manual formatting for now --- src/file.jl | 10 +- src/properties.jl | 524 +++++++++++++++-------------------------- src/typeconversions.jl | 24 +- test/readremote.jl | 69 +++--- 4 files changed, 242 insertions(+), 385 deletions(-) diff --git a/src/file.jl b/src/file.jl index a2d071175..6674122b7 100644 --- a/src/file.jl +++ b/src/file.jl @@ -24,14 +24,16 @@ function h5open( fcpl::FileCreateProperties=FileCreateProperties(); swmr::Bool=false ) + #! format: off rd, wr, cr, tr, ff = - mode == "r" ? (true, false, false, false, false) : - mode == "r+" ? (true, true, false, false, true) : - mode == "cw" ? (false, true, true, false, true) : - mode == "w" ? (false, true, true, true, false) : + mode == "r" ? (true, false, false, false, false) : + mode == "r+" ? (true, true, false, false, true ) : + mode == "cw" ? (false, true, true, false, true ) : + mode == "w" ? (false, true, true, true, false) : # mode == "w+" ? (true, true, true, true, false) : # mode == "a" ? (true, true, true, true, true ) : error("invalid open mode: ", mode) + #! format: on if ff && !wr error("HDF5 does not support appending without writing") end diff --git a/src/properties.jl b/src/properties.jl index 3cc05ace2..f4b4069fb 100644 --- a/src/properties.jl +++ b/src/properties.jl @@ -228,6 +228,8 @@ end ### Define Properties types ### +#! format: off + """ ObjectCreateProperties(;kws...) ObjectCreateProperties(f::Function; kws...) @@ -245,44 +247,24 @@ that will be closed. @bool_property(obj_track_times) -class_propertynames(::Type{ObjectCreateProperties}) = (:obj_track_times, :track_times,) +class_propertynames(::Type{ObjectCreateProperties}) = ( + :obj_track_times, + :track_times, + ) function class_getproperty(::Type{ObjectCreateProperties}, p::Properties, name::Symbol) - if name === :obj_track_times - get_obj_track_times(p) - # deprecated - elseif name === :track_times - ( - depwarn( - "`track_times` property is deprecated, use `obj_track_times` instead", - :track_times - ); - get_obj_track_times(p) - ) - else - class_getproperty(superclass(ObjectCreateProperties), p, name) - end + name === :obj_track_times ? get_obj_track_times(p) : + # deprecated + name === :track_times ? (depwarn("`track_times` property is deprecated, use `obj_track_times` instead",:track_times); get_obj_track_times(p)) : + class_getproperty(superclass(ObjectCreateProperties), p, name) end -function class_setproperty!( - ::Type{ObjectCreateProperties}, p::Properties, name::Symbol, val -) - if name === :obj_track_times - set_obj_track_times!(p, val) - # deprecated - elseif name === :track_times - ( - depwarn( - "`track_times=$val` keyword option is deprecated, use `obj_track_times=$val` instead", - :track_times - ); - set_obj_track_times!(p, val) - ) - else - class_setproperty!(superclass(ObjectCreateProperties), p, name, val) - end +function class_setproperty!(::Type{ObjectCreateProperties}, p::Properties, name::Symbol, val) + name === :obj_track_times ? set_obj_track_times!(p, val) : + # deprecated + name === :track_times ? (depwarn("`track_times=$val` keyword option is deprecated, use `obj_track_times=$val` instead",:track_times); set_obj_track_times!(p, val)) : + class_setproperty!(superclass(ObjectCreateProperties), p, name, val) end -get_track_order(p::Properties) = - API.h5p_get_link_creation_order(p) != 0 && API.h5p_get_attr_creation_order(p) != 0 +get_track_order(p::Properties) = API.h5p_get_link_creation_order(p) != 0 && API.h5p_get_attr_creation_order(p) != 0 function set_track_order!(p::Properties, val::Bool) crt_order_flags = val ? (API.H5P_CRT_ORDER_TRACKED | API.H5P_CRT_ORDER_INDEXED) : 0 @@ -308,24 +290,19 @@ that will be closed. @propertyclass GroupCreateProperties API.H5P_GROUP_CREATE superclass(::Type{GroupCreateProperties}) = ObjectCreateProperties -class_propertynames(::Type{GroupCreateProperties}) = (:local_heap_size_hint, :track_order,) +class_propertynames(::Type{GroupCreateProperties}) = ( + :local_heap_size_hint, + :track_order, + ) function class_getproperty(::Type{GroupCreateProperties}, p::Properties, name::Symbol) - if name === :local_heap_size_hint - API.h5p_get_local_heap_size_hint(p) - elseif name === :track_order - get_track_order(p) - else - class_getproperty(superclass(GroupCreateProperties), p, name) - end + name === :local_heap_size_hint ? API.h5p_get_local_heap_size_hint(p) : + name === :track_order ? get_track_order(p) : + class_getproperty(superclass(GroupCreateProperties), p, name) end function class_setproperty!(::Type{GroupCreateProperties}, p::Properties, name::Symbol, val) - if name === :local_heap_size_hint - API.h5p_set_local_heap_size_hint(p, val) - elseif name === :track_order - set_track_order!(p, val) - else - class_setproperty!(superclass(GroupCreateProperties), p, name, val) - end + name === :local_heap_size_hint ? API.h5p_set_local_heap_size_hint(p, val) : + name === :track_order ? set_track_order!(p, val) : + class_setproperty!(superclass(GroupCreateProperties), p, name, val) end """ @@ -346,8 +323,15 @@ that will be closed. @propertyclass FileCreateProperties API.H5P_FILE_CREATE superclass(::Type{FileCreateProperties}) = ObjectCreateProperties -class_propertynames(::Type{FileCreateProperties}) = - (:userblock, :track_order, :strategy, :persist, :threshold, :file_space_page_size) + +class_propertynames(::Type{FileCreateProperties}) = ( + :userblock, + :track_order, + :strategy, + :persist, + :threshold, + :file_space_page_size + ) const FSPACE_STRATEGY_SYMBOLS = Dict( :fsm_aggr => API.H5F_FSPACE_STRATEGY_FSM_AGGR, @@ -357,10 +341,8 @@ const FSPACE_STRATEGY_SYMBOLS = Dict( :ntypes => API.H5F_FSPACE_STRATEGY_NTYPES ) -set_strategy!(p::FileCreateProperties, val) = - API.h5p_set_file_space_strategy(p; strategy=val) -set_strategy!(p::FileCreateProperties, val::Symbol) = - API.h5p_set_file_space_strategy(p; strategy=FSPACE_STRATEGY_SYMBOLS[val]) +set_strategy!(p::FileCreateProperties, val) = API.h5p_set_file_space_strategy(p, strategy = val) +set_strategy!(p::FileCreateProperties, val::Symbol) = API.h5p_set_file_space_strategy(p, strategy = FSPACE_STRATEGY_SYMBOLS[val]) function get_strategy(p::FileCreateProperties) strategy = API.h5p_get_file_space_strategy(p)[:strategy] for (k, v) in FSPACE_STRATEGY_SYMBOLS @@ -372,32 +354,25 @@ function get_strategy(p::FileCreateProperties) end function class_getproperty(::Type{FileCreateProperties}, p::Properties, name::Symbol) - if name === :userblock - API.h5p_get_userblock(p) - elseif name === :track_order - get_track_order(p) - elseif name === :strategy - get_strategy(p) - elseif name === :persist - API.h5p_get_file_space_strategy(p)[:persist] - elseif name === :threshold - API.h5p_get_file_space_strategy(p)[:threshold] - elseif name === :file_space_page_size - API.h5p_get_file_space_page_size(p) - else - class_getproperty(superclass(FileCreateProperties), p, name) - end + name === :userblock ? API.h5p_get_userblock(p) : + name === :track_order ? get_track_order(p) : + name === :strategy ? get_strategy(p) : + name === :persist ? API.h5p_get_file_space_strategy(p)[:persist] : + name === :threshold ? API.h5p_get_file_space_strategy(p)[:threshold] : + name === :file_space_page_size ? API.h5p_get_file_space_page_size(p) : + class_getproperty(superclass(FileCreateProperties), p, name) end function class_setproperty!(::Type{FileCreateProperties}, p::Properties, name::Symbol, val) - name === :userblock ? API.h5p_set_userblock(p, val) : - name === :track_order ? set_track_order!(p, val) : - name === :strategy ? set_strategy!(p, val) : - name === :persist ? API.h5p_set_file_space_strategy(p; persist=val) : - name === :threshold ? API.h5p_set_file_space_strategy(p; threshold=val) : - name === :file_space_page_size ? API.h5p_set_file_space_page_size(p, val) : + name === :userblock ? API.h5p_set_userblock(p, val) : + name === :track_order ? set_track_order!(p, val) : + name === :strategy ? set_strategy!(p, val) : + name === :persist ? API.h5p_set_file_space_strategy(p, persist = val) : + name === :threshold ? API.h5p_set_file_space_strategy(p, threshold = val) : + name === :file_space_page_size ? API.h5p_set_file_space_page_size(p, val) : class_setproperty!(superclass(FileCreateProperties), p, name, val) end + """ DatatypeCreateProperties(;kws...) DatatypeCreateProperties(f::Function; kws...) @@ -485,33 +460,27 @@ that will be closed. @propertyclass DatasetCreateProperties API.H5P_DATASET_CREATE superclass(::Type{DatasetCreateProperties}) = ObjectCreateProperties -@enum_property( - alloc_time, +@enum_property(alloc_time, :default => API.H5D_ALLOC_TIME_DEFAULT, :early => API.H5D_ALLOC_TIME_EARLY, :incremental => API.H5D_ALLOC_TIME_INCR, - :late => API.H5D_ALLOC_TIME_LATE -) + :late => API.H5D_ALLOC_TIME_LATE) # reverse indices function get_chunk(p::Properties) dims, N = API.h5p_get_chunk(p) - ntuple(i -> Int(dims[N - i + 1]), N) + ntuple(i -> Int(dims[N-i+1]), N) end -set_chunk!(p::Properties, dims) = - API.h5p_set_chunk(p, length(dims), API.hsize_t[reverse(dims)...]) +set_chunk!(p::Properties, dims) = API.h5p_set_chunk(p, length(dims), API.hsize_t[reverse(dims)...]) -@enum_property( - layout, +@enum_property(layout, :compact => API.H5D_COMPACT, :contiguous => API.H5D_CONTIGUOUS, :chunked => API.H5D_CHUNKED, - :virtual => API.H5D_VIRTUAL -) + :virtual => API.H5D_VIRTUAL) # See https://portal.hdfgroup.org/display/HDF5/H5P_SET_FILL_TIME -@enum_property( - fill_time, +@enum_property(fill_time, :alloc => API.H5D_FILL_TIME_ALLOC, :never => API.H5D_FILL_TIME_NEVER, :ifset => API.H5D_FILL_TIME_IFSET @@ -519,22 +488,16 @@ set_chunk!(p::Properties, dims) = # filters getters/setters get_filters(p::Properties) = Filters.FilterPipeline(p) -set_filters!(p::Properties, val::Filters.Filter) = - push!(empty!(Filters.FilterPipeline(p)), val) -set_filters!(p::Properties, vals::Union{Tuple,AbstractVector}) = - append!(empty!(Filters.FilterPipeline(p)), vals) +set_filters!(p::Properties, val::Filters.Filter) = push!(empty!(Filters.FilterPipeline(p)), val) +set_filters!(p::Properties, vals::Union{Tuple, AbstractVector}) = append!(empty!(Filters.FilterPipeline(p)), vals) # convenience -set_deflate!(p::Properties, val::Bool) = - val && push!(Filters.FilterPipeline(p), Filters.Deflate()) -set_deflate!(p::Properties, level::Integer) = - push!(Filters.FilterPipeline(p), Filters.Deflate(; level=level)) -set_shuffle!(p::Properties, val::Bool) = - val && push!(Filters.FilterPipeline(p), Filters.Shuffle()) -set_fletcher32!(p::Properties, val::Bool) = - val && push!(Filters.FilterPipeline(p), Filters.Fletcher32()) -set_blosc!(p::Properties, val) = - error("The Blosc filter now requires the H5Zblosc package be loaded") +set_deflate!(p::Properties, val::Bool) = val && push!(Filters.FilterPipeline(p), Filters.Deflate()) +set_deflate!(p::Properties, level::Integer) = push!(Filters.FilterPipeline(p), Filters.Deflate(level=level)) +set_shuffle!(p::Properties, val::Bool) = val && push!(Filters.FilterPipeline(p), Filters.Shuffle()) +set_fletcher32!(p::Properties, val::Bool) = val && push!(Filters.FilterPipeline(p), Filters.Fletcher32()) +set_blosc!(p::Properties, val) = error("The Blosc filter now requires the H5Zblosc package be loaded") + class_propertynames(::Type{DatasetCreateProperties}) = ( :alloc_time, @@ -553,96 +516,48 @@ class_propertynames(::Type{DatasetCreateProperties}) = ( # deprecated :compress, :filter -) + ) + function class_getproperty(::Type{DatasetCreateProperties}, p::Properties, name::Symbol) - if name === :alloc_time - get_alloc_time(p) - elseif name === :fill_time - get_fill_time(p) - elseif name === :fill_value - get_fill_value(p) - elseif name === :chunk - get_chunk(p) - elseif name === :external - API.h5p_get_external(p) - elseif name === :filters - get_filters(p) - elseif name === :layout - get_layout(p) - elseif name === :no_attrs_hint - @static( - API.h5_get_libversion() < v"1.10.5" ? false : API.h5p_get_dset_no_attrs_hint(p) - ) - # deprecated - elseif name === :filter - ( - depwarn( - "`filter` property name is deprecated, use `filters` instead", - :class_getproperty - ); - get_filters(p) - ) - else - class_getproperty(superclass(DatasetCreateProperties), p, name) - end -end -function class_setproperty!( - ::Type{DatasetCreateProperties}, p::Properties, name::Symbol, val -) - if name === :alloc_time - set_alloc_time!(p, val) - elseif name === :fill_time - set_fill_time!(p, val) - elseif name === :fill_value - set_fill_value!(p, val) - elseif name === :chunk - set_chunk!(p, val) - elseif name === :external - API.h5p_set_external(p, val...) - elseif name === :filters - set_filters!(p, val) - elseif name === :layout - set_layout!(p, val) - elseif name === :no_attrs_hint - @static( - if API.h5_get_libversion() < v"1.10.5" - error( - "no_attrs_hint is only valid for HDF5 library versions 1.10.5 or greater" - ) - else - API.h5p_set_dset_no_attrs_hint(p, val) - end - ) - # set-only for convenience - elseif name === :blosc - set_blosc!(p, val) - elseif name === :deflate - set_deflate!(p, val) - elseif name === :fletcher32 - set_fletcher32!(p, val) - elseif name === :shuffle - set_shuffle!(p, val) - # deprecated - elseif name === :filter - ( - depwarn( - "`filter=$val` keyword option is deprecated, use `filters=$val` instead", - :class_setproperty! - ); - set_filters!(p, val) - ) - elseif name === :compress - ( - depwarn( - "`compress=$val` keyword option is deprecated, use `deflate=$val` instead", - :class_setproperty! - ); - set_deflate!(p, val) - ) - else - class_setproperty!(superclass(DatasetCreateProperties), p, name, val) - end + name === :alloc_time ? get_alloc_time(p) : + name === :fill_time ? get_fill_time(p) : + name === :fill_value ? get_fill_value(p) : + name === :chunk ? get_chunk(p) : + name === :external ? API.h5p_get_external(p) : + name === :filters ? get_filters(p) : + name === :layout ? get_layout(p) : + name === :no_attrs_hint ? + @static(API.h5_get_libversion() < v"1.10.5" ? + false : + API.h5p_get_dset_no_attrs_hint(p) + ) : + # deprecated + name === :filter ? (depwarn("`filter` property name is deprecated, use `filters` instead",:class_getproperty); get_filters(p)) : + class_getproperty(superclass(DatasetCreateProperties), p, name) +end +function class_setproperty!(::Type{DatasetCreateProperties}, p::Properties, name::Symbol, val) + name === :alloc_time ? set_alloc_time!(p, val) : + name === :fill_time ? set_fill_time!(p, val) : + name === :fill_value ? set_fill_value!(p, val) : + name === :chunk ? set_chunk!(p, val) : + name === :external ? API.h5p_set_external(p, val...) : + name === :filters ? set_filters!(p, val) : + name === :layout ? set_layout!(p, val) : + name === :no_attrs_hint ? + @static(API.h5_get_libversion() < v"1.10.5" ? + error("no_attrs_hint is only valid for HDF5 library versions 1.10.5 or greater") : + API.h5p_set_dset_no_attrs_hint(p, val) + ) : + # set-only for convenience + name === :blosc ? set_blosc!(p, val) : + name === :deflate ? set_deflate!(p, val) : + name === :fletcher32 ? set_fletcher32!(p, val) : + name === :shuffle ? set_shuffle!(p, val) : + # deprecated + name === :filter ? (depwarn("`filter=$val` keyword option is deprecated, use `filters=$val` instead",:class_setproperty!); set_filters!(p, val)) : + name === :compress ? (depwarn("`compress=$val` keyword option is deprecated, use `deflate=$val` instead",:class_setproperty!); set_deflate!(p, val)) : + class_setproperty!(superclass(DatasetCreateProperties), p, name, val) end """ @@ -654,24 +569,21 @@ that will be closed. """ @propertyclass StringCreateProperties API.H5P_STRING_CREATE -@enum_property(char_encoding, :ascii => API.H5T_CSET_ASCII, :utf8 => API.H5T_CSET_UTF8) +@enum_property(char_encoding, + :ascii => API.H5T_CSET_ASCII, + :utf8 => API.H5T_CSET_UTF8) + -class_propertynames(::Type{StringCreateProperties}) = (:char_encoding,) +class_propertynames(::Type{StringCreateProperties}) = ( + :char_encoding, + ) function class_getproperty(::Type{StringCreateProperties}, p::Properties, name::Symbol) - if name === :char_encoding - get_char_encoding(p) - else - class_getproperty(superclass(StringCreateProperties), p, name) - end + name === :char_encoding ? get_char_encoding(p) : + class_getproperty(superclass(StringCreateProperties), p, name) end -function class_setproperty!( - ::Type{StringCreateProperties}, p::Properties, name::Symbol, val -) - if name === :char_encoding - set_char_encoding!(p, val) - else - class_setproperty!(superclass(StringCreateProperties), p, name, val) - end +function class_setproperty!(::Type{StringCreateProperties}, p::Properties, name::Symbol, val) + name === :char_encoding ? set_char_encoding!(p, val) : + class_setproperty!(superclass(StringCreateProperties), p, name, val) end """ @@ -693,20 +605,16 @@ superclass(::Type{LinkCreateProperties}) = StringCreateProperties @bool_property(create_intermediate_group) -class_propertynames(::Type{LinkCreateProperties}) = (:create_intermediate_group,) +class_propertynames(::Type{LinkCreateProperties}) = ( + :create_intermediate_group, + ) function class_getproperty(::Type{LinkCreateProperties}, p::Properties, name::Symbol) - if name === :create_intermediate_group - get_create_intermediate_group(p) - else - class_getproperty(superclass(LinkCreateProperties), p, name) - end + name === :create_intermediate_group ? get_create_intermediate_group(p) : + class_getproperty(superclass(LinkCreateProperties), p, name) end function class_setproperty!(::Type{LinkCreateProperties}, p::Properties, name::Symbol, val) - if name === :create_intermediate_group - set_create_intermediate_group!(p, val) - else - class_setproperty!(superclass(LinkCreateProperties), p, name, val) - end + name === :create_intermediate_group ? set_create_intermediate_group!(p, val) : + class_setproperty!(superclass(LinkCreateProperties), p, name, val) end """ @@ -723,6 +631,7 @@ that will be closed. @propertyclass AttributeCreateProperties API.H5P_ATTRIBUTE_CREATE superclass(::Type{AttributeCreateProperties}) = StringCreateProperties + """ FileAccessProperties(;kws...) FileAccessProperties(f::Function; kws...) @@ -770,21 +679,19 @@ end @tuple_property(alignment) -@enum_property( - fclose_degree, - :weak => API.H5F_CLOSE_WEAK, - :semi => API.H5F_CLOSE_SEMI, - :strong => API.H5F_CLOSE_STRONG, - :default => API.H5F_CLOSE_DEFAULT -) +@enum_property(fclose_degree, + :weak => API.H5F_CLOSE_WEAK, + :semi => API.H5F_CLOSE_SEMI, + :strong => API.H5F_CLOSE_STRONG, + :default => API.H5F_CLOSE_DEFAULT) # getter/setter for libver_bounds libver_bound_to_enum(val::Integer) = val libver_bound_to_enum(val::API.H5F_libver_t) = val function libver_bound_to_enum(val::VersionNumber) - val >= v"1.12" ? API.H5F_LIBVER_V112 : - val >= v"1.10" ? API.H5F_LIBVER_V110 : - val >= v"1.8" ? API.H5F_LIBVER_V18 : + val >= v"1.12" ? API.H5F_LIBVER_V112 : + val >= v"1.10" ? API.H5F_LIBVER_V110 : + val >= v"1.8" ? API.H5F_LIBVER_V18 : throw(ArgumentError("libver_bound must be >= v\"1.8\".")) end function libver_bound_to_enum(val::Symbol) @@ -811,6 +718,7 @@ function set_libver_bounds!(p::Properties, val) API.h5p_set_libver_bounds(p, libver_bound_to_enum(val), libver_bound_to_enum(val)) end + class_propertynames(::Type{FileAccessProperties}) = ( :alignment, :driver, @@ -820,64 +728,33 @@ class_propertynames(::Type{FileAccessProperties}) = ( :file_locking, :libver_bounds, :meta_block_size, -) + ) function class_getproperty(::Type{FileAccessProperties}, p::Properties, name::Symbol) - if name === :alignment - get_alignment(p) - elseif name === :driver - Drivers.get_driver(p) - elseif name === :driver_info - API.h5p_get_driver_info(p) # get only - elseif name === :fclose_degree - get_fclose_degree(p) - elseif name === :file_locking - API.h5p_get_file_locking(p) - elseif name === :libver_bounds - get_libver_bounds(p) - elseif name === :meta_block_size - API.h5p_get_meta_block_size(p) - # deprecated - elseif name === :fapl_mpio - ( - depwarn( - "The `fapl_mpio` property is deprecated, use `driver=HDF5.Drivers.MPIO(...)` instead.", - :fapl_mpio - ); - drv = get_driver(p, MPIO); - (drv.comm, drv.info) - ) - else - class_getproperty(superclass(FileAccessProperties), p, name) - end + name === :alignment ? get_alignment(p) : + name === :driver ? Drivers.get_driver(p) : + name === :driver_info ? API.h5p_get_driver_info(p) : # get only + name === :fclose_degree ? get_fclose_degree(p) : + name === :file_locking ? API.h5p_get_file_locking(p) : + name === :libver_bounds ? get_libver_bounds(p) : + name === :meta_block_size ? API.h5p_get_meta_block_size(p) : + # deprecated + name === :fapl_mpio ? (depwarn("The `fapl_mpio` property is deprecated, use `driver=HDF5.Drivers.MPIO(...)` instead.", :fapl_mpio); drv = get_driver(p, MPIO); (drv.comm, drv.info)) : + class_getproperty(superclass(FileAccessProperties), p, name) end function class_setproperty!(::Type{FileAccessProperties}, p::Properties, name::Symbol, val) - if name === :alignment - set_alignment!(p, val) - elseif name === :driver - Drivers.set_driver!(p, val) - elseif name === :fclose_degree - set_fclose_degree!(p, val) - elseif name === :file_locking - API.h5p_set_file_locking(p, val...) - elseif name === :libver_bounds - set_libver_bounds!(p, val) - elseif name === :meta_block_size - API.h5p_set_meta_block_size(p, val) - # deprecated - elseif name === :fapl_mpio - ( - depwarn( - "The `fapl_mpio` property is deprecated, use `driver=HDF5.Drivers.MPIO(...)` instead.", - :fapl_mpio - ); - p.driver = Drivers.MPIO(val...) - ) - else - class_setproperty!(superclass(FileAccessProperties), p, name, val) - end + name === :alignment ? set_alignment!(p, val) : + name === :driver ? Drivers.set_driver!(p, val) : + name === :fclose_degree ? set_fclose_degree!(p, val) : + name === :file_locking ? API.h5p_set_file_locking(p, val...) : + name === :libver_bounds ? set_libver_bounds!(p, val) : + name === :meta_block_size ? API.h5p_set_meta_block_size(p, val) : + # deprecated + name === :fapl_mpio ? (depwarn("The `fapl_mpio` property is deprecated, use `driver=HDF5.Drivers.MPIO(...)` instead.", :fapl_mpio); p.driver = Drivers.MPIO(val...)) : + class_setproperty!(superclass(FileAccessProperties), p, name, val) end + @propertyclass LinkAccessProperties API.H5P_LINK_ACCESS @propertyclass GroupAccessProperties API.H5P_GROUP_ACCESS @@ -916,46 +793,34 @@ See [Dataset Access Properties](https://portal.hdfgroup.org/display/HDF5/Dataset @propertyclass DatasetAccessProperties API.H5P_DATASET_ACCESS superclass(::Type{DatasetAccessProperties}) = LinkAccessProperties -class_propertynames(::Type{DatasetAccessProperties}) = - (:chunk_cache, :efile_prefix, :virtual_prefix, :virtual_printf_gap, :virtual_view) +class_propertynames(::Type{DatasetAccessProperties}) = ( + :chunk_cache, + :efile_prefix, + :virtual_prefix, + :virtual_printf_gap, + :virtual_view +) -@enum_property( - virtual_view, +@enum_property(virtual_view, :first_missing => API.H5D_VDS_FIRST_MISSING, :last_available => API.H5D_VDS_LAST_AVAILABLE ) function class_getproperty(::Type{DatasetAccessProperties}, p::Properties, name::Symbol) - if name === :chunk_cache - API.h5p_get_chunk_cache(p) - elseif name === :efile_prefix - API.h5p_get_efile_prefix(p) - elseif name === :virtual_prefix - API.h5p_get_virtual_prefix(p) - elseif name === :virtual_printf_gap - API.h5p_get_virtual_printf_gap(p) - elseif name === :virtual_view - get_virtual_view(p) - else - class_getproperty(superclass(DatasetAccessProperties), p, name) - end -end -function class_setproperty!( - ::Type{DatasetAccessProperties}, p::Properties, name::Symbol, val -) - if name === :chunk_cache - API.h5p_set_chunk_cache(p, val...) - elseif name === :efile_prefix - API.h5p_set_efile_prefix(p, val) - elseif name === :virtual_prefix - API.h5p_set_virtual_prefix(p, val) - elseif name === :virtual_printf_gap - API.h5p_set_virtual_printf_gap(p, val) - elseif name === :virtual_view - set_virtual_view!(p, val) - else - class_setproperty!(superclass(DatasetAccessProperties), p, name, val) - end + name === :chunk_cache ? API.h5p_get_chunk_cache(p) : + name === :efile_prefix ? API.h5p_get_efile_prefix(p) : + name === :virtual_prefix ? API.h5p_get_virtual_prefix(p) : + name === :virtual_printf_gap ? API.h5p_get_virtual_printf_gap(p) : + name === :virtual_view ? get_virtual_view(p) : + class_getproperty(superclass(DatasetAccessProperties), p, name) +end +function class_setproperty!(::Type{DatasetAccessProperties}, p::Properties, name::Symbol, val) + name === :chunk_cache ? API.h5p_set_chunk_cache(p, val...) : + name === :efile_prefix ? API.h5p_set_efile_prefix(p, val) : + name === :virtual_prefix ? API.h5p_set_virtual_prefix(p, val) : + name === :virtual_printf_gap ? API.h5p_set_virtual_printf_gap(p, val) : + name === :virtual_view ? set_virtual_view!(p, val) : + class_setproperty!(superclass(DatasetAccessProperties), p, name, val) end @propertyclass AttributeAccessProperties API.H5P_ATTRIBUTE_ACCESS @@ -976,33 +841,26 @@ that will be closed. """ @propertyclass DatasetTransferProperties API.H5P_DATASET_XFER -@enum_property( - dxpl_mpio, - :independent => API.H5FD_MPIO_INDEPENDENT, - :collective => API.H5FD_MPIO_COLLECTIVE -) +@enum_property(dxpl_mpio, + :independent => API.H5FD_MPIO_INDEPENDENT, + :collective => API.H5FD_MPIO_COLLECTIVE) -class_propertynames(::Type{DatasetTransferProperties}) = (:dxpl_mpio,) +class_propertynames(::Type{DatasetTransferProperties}) = ( + :dxpl_mpio, + ) function class_getproperty(::Type{DatasetTransferProperties}, p::Properties, name::Symbol) - if name === :dxpl_mpio - get_dxpl_mpio(p) - else - class_getproperty(superclass(DatasetTransferProperties), p, name) - end + name === :dxpl_mpio ? get_dxpl_mpio(p) : + class_getproperty(superclass(DatasetTransferProperties), p, name) end -function class_setproperty!( - ::Type{DatasetTransferProperties}, p::Properties, name::Symbol, val -) - if name === :dxpl_mpio - set_dxpl_mpio!(p, val) - else - class_setproperty!(superclass(DatasetTransferProperties), p, name, val) - end +function class_setproperty!(::Type{DatasetTransferProperties}, p::Properties, name::Symbol, val) + name === :dxpl_mpio ? set_dxpl_mpio!(p, val) : + class_setproperty!(superclass(DatasetTransferProperties), p, name, val) end @propertyclass FileMountProperties API.H5P_FILE_MOUNT @propertyclass ObjectCopyProperties API.H5P_OBJECT_COPY + const DEFAULT_PROPERTIES = GenericProperties() # These properties are initialized in __init__() const ASCII_LINK_PROPERTIES = LinkCreateProperties() @@ -1012,3 +870,5 @@ const UTF8_ATTRIBUTE_PROPERTIES = AttributeCreateProperties() _link_properties(::AbstractString) = UTF8_LINK_PROPERTIES _attr_properties(::AbstractString) = UTF8_ATTRIBUTE_PROPERTIES + +#! format: on diff --git a/src/typeconversions.jl b/src/typeconversions.jl index 87b1119a4..9941b0e8f 100644 --- a/src/typeconversions.jl +++ b/src/typeconversions.jl @@ -111,19 +111,21 @@ end Base.eltype(::Type{VariableArray{T}}) where {T} = T ## Conversion between Julia types and HDF5 atomic types -hdf5_type_id(::Type{Bool}) = API.H5T_NATIVE_B8 -hdf5_type_id(::Type{Int8}) = API.H5T_NATIVE_INT8 -hdf5_type_id(::Type{UInt8}) = API.H5T_NATIVE_UINT8 -hdf5_type_id(::Type{Int16}) = API.H5T_NATIVE_INT16 -hdf5_type_id(::Type{UInt16}) = API.H5T_NATIVE_UINT16 -hdf5_type_id(::Type{Int32}) = API.H5T_NATIVE_INT32 -hdf5_type_id(::Type{UInt32}) = API.H5T_NATIVE_UINT32 -hdf5_type_id(::Type{Int64}) = API.H5T_NATIVE_INT64 -hdf5_type_id(::Type{UInt64}) = API.H5T_NATIVE_UINT64 -hdf5_type_id(::Type{Float32}) = API.H5T_NATIVE_FLOAT -hdf5_type_id(::Type{Float64}) = API.H5T_NATIVE_DOUBLE +#! format: off +hdf5_type_id(::Type{Bool}) = API.H5T_NATIVE_B8 +hdf5_type_id(::Type{Int8}) = API.H5T_NATIVE_INT8 +hdf5_type_id(::Type{UInt8}) = API.H5T_NATIVE_UINT8 +hdf5_type_id(::Type{Int16}) = API.H5T_NATIVE_INT16 +hdf5_type_id(::Type{UInt16}) = API.H5T_NATIVE_UINT16 +hdf5_type_id(::Type{Int32}) = API.H5T_NATIVE_INT32 +hdf5_type_id(::Type{UInt32}) = API.H5T_NATIVE_UINT32 +hdf5_type_id(::Type{Int64}) = API.H5T_NATIVE_INT64 +hdf5_type_id(::Type{UInt64}) = API.H5T_NATIVE_UINT64 +hdf5_type_id(::Type{Float32}) = API.H5T_NATIVE_FLOAT +hdf5_type_id(::Type{Float64}) = API.H5T_NATIVE_DOUBLE hdf5_type_id(::Type{Reference}) = API.H5T_STD_REF_OBJ hdf5_type_id(::Type{<:AbstractString}) = API.H5T_C_S1 +#! format: on # It's not safe to use particular id codes because these can change, so we use characteristics of the type. function _hdf5_type_map(class_id, is_signed, native_size) diff --git a/test/readremote.jl b/test/readremote.jl index a8ea72300..3c4b0c501 100644 --- a/test/readremote.jl +++ b/test/readremote.jl @@ -20,26 +20,27 @@ using LinearAlgebra: norm # file # end + #! format: off fcmp = [ - 0 1 2 3 4 5 6 + 0 1 2 3 4 5 6 2 1.66667 2.4 3.28571 4.22222 5.18182 6.15385 4 2.33333 2.8 3.57143 4.44444 5.36364 6.30769 - 6 3 3.2 3.85714 4.66667 5.54545 6.46154 + 6 3 3.2 3.85714 4.66667 5.54545 6.46154 ]' icmp = [ 0 -1 -2 -3 -4 -5 -6 - 0 0 0 0 0 0 0 - 0 1 2 3 4 5 6 - 0 2 4 6 8 10 12 + 0 0 0 0 0 0 0 + 0 1 2 3 4 5 6 + 0 2 4 6 8 10 12 ]' SOLID, LIQUID, GAS, PLASMA = 0, 1, 2, 3 ecmp = [ - SOLID SOLID SOLID SOLID SOLID SOLID SOLID - SOLID LIQUID GAS PLASMA SOLID LIQUID GAS - SOLID GAS SOLID GAS SOLID GAS SOLID - SOLID PLASMA GAS LIQUID SOLID PLASMA GAS + SOLID SOLID SOLID SOLID SOLID SOLID SOLID + SOLID LIQUID GAS PLASMA SOLID LIQUID GAS + SOLID GAS SOLID GAS SOLID GAS SOLID + SOLID PLASMA GAS LIQUID SOLID PLASMA GAS ]' scmp = ["Parting", "is such", "sweet", "sorrow."] vicmp = Array{Int32}[[3, 2, 1], [1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144]] @@ -51,35 +52,27 @@ using LinearAlgebra: norm ] # For H5T_ARRAY AA = Array{Int,2}[ - [ - 0 0 0 - 0 -1 -2 - 0 -2 -4 - 0 -3 -6 - 0 -4 -8 - ], - [ - 0 1 2 - 1 1 1 - 2 1 0 - 3 1 -1 - 4 1 -2 - ], - [ - 0 2 4 - 2 3 4 - 4 4 4 - 6 5 4 - 8 6 4 - ], - [ - 0 3 6 - 3 5 7 - 6 7 8 - 9 9 9 - 12 11 10 - ] - ] + [0 0 0; + 0 -1 -2; + 0 -2 -4; + 0 -3 -6; + 0 -4 -8], + [0 1 2; + 1 1 1; + 2 1 0; + 3 1 -1; + 4 1 -2], + [0 2 4; + 2 3 4; + 4 4 4; + 6 5 4; + 8 6 4], + [0 3 6; + 3 5 7; + 6 7 8; + 9 9 9; + 12 11 10]] + #! format: on file = joinpath(test_files, "h5ex_t_floatatt.h5") fid = h5open(file, "r") From 84164f44a64b307f43c410f2e2f074ce64b67f30 Mon Sep 17 00:00:00 2001 From: kittisopikulm Date: Fri, 5 Aug 2022 19:26:02 -0400 Subject: [PATCH 5/6] Fix some dos endline markers --- gen/gen_wrappers.jl | 304 ++++++++++++++--------------- src/api_midlevel.jl | 456 ++++++++++++++++++++++---------------------- 2 files changed, 380 insertions(+), 380 deletions(-) diff --git a/gen/gen_wrappers.jl b/gen/gen_wrappers.jl index 75198c833..42d6d2482 100644 --- a/gen/gen_wrappers.jl +++ b/gen/gen_wrappers.jl @@ -1,152 +1,152 @@ -# Generate ../src/api/functions.jl -# Run `julia --project=.. gen_wrappers.jl`` to execute this script - -include(joinpath(@__DIR__, "bind_generator.jl")) - -# Read in the API definition macros from the definitions file -defs = read(joinpath(@__DIR__, "api_defs.jl"), String) -# Have Julia expand/run the @bind macro to generate expressions for all of the functions -exprs = Base.include_string( - @__MODULE__, "@macroexpand1 begin\n" * defs * "\nend", "api_defs.jl" -) -# Insert the conditional version helper expression -prepend!(exprs.args, _libhdf5_build_ver_expr.args) -Base.remove_linenums!(exprs) - -# Definitions which are not automatically generated, but should still be documented as -# part of the raw low-level API: -append!(bound_api["H5P"], ["h5p_get_class_name"]) # defined in src/api/helpers.jl -append!(bound_api["H5T"], [ - "h5t_get_member_name", # defined in src/api/helpers.jl - "h5t_get_tag" -]) # defined in src/api/helpers.jl - -# Now dump the text representation to disk -open(joinpath(@__DIR__, "..", "src", "api", "functions.jl"), "w") do fid - println( - fid, - """ -#! format: off -# This file is autogenerated by HDF5.jl's `gen/gen_wrappers.jl` and should not be editted. -# -# To add new bindings, define the binding in `gen/api_defs.jl`, re-run -# `gen/gen_wrappers.jl`, and commit the updated `src/api/functions.jl`. -""" - ) - function triplequote(s::String, indent="", prefix="") - ret = indent * prefix * "\"\"\"\n" - for l in eachline(IOBuffer(s)) - ret *= isempty(l) ? "\n" : indent * l * "\n" - end - ret *= indent * "\"\"\"\n" - return ret - end - ismacro(ex, sym, n=0) = - isexpr(ex, :macrocall) && length(ex.args) >= n + 2 && ex.args[1] == sym - for funcblock in exprs.args - if ismacro(funcblock, Symbol("@doc"), 2) - # Pretty print the doc macro as just a juxtaposed doc string and function - # definition; the `@doc` construction is necessary in AST form for the docs - # to be included in interactive use of `@bind`, but in source form we can - # rely on Julia's parsing behavior. - print(fid, triplequote(funcblock.args[3]), funcblock.args[4], "\n\n") - elseif ismacro(funcblock, Symbol("@static"), 1) && - isexpr(funcblock.args[3], :if, 2) && - ismacro(funcblock.args[3].args[2], Symbol("@doc"), 2) - # Within a @static block, we have to keep the @doc prefix, but we can still - # switch to triple-quoting and there's special parsing to allow the function - # definition to be on the next line. - # - # Work around the expression printer in this more complex case by printing - # to a buffer and string-replacing a sentinel value - docstr = funcblock.args[3].args[2].args[3] - funcblock.args[3].args[2].args[3] = "SENTINEL_DOC" - buf = sprint(print, funcblock) - # Two-step deindent since `r"^\s{4}(\s{4})?"m => s"\1"` errors: see JuliaLang/julia#31456 - buf = replace(buf, r"^\s{4}"m => s"") # deindent - buf = replace(buf, r"^(\s{4})\s{4}"m => s"\1") # deindent - # Now format the doc string and replace (note need to indent `function`) - buf = replace( - buf, - r"^\s+@doc \"SENTINEL_DOC\" "m => - triplequote(docstr, " "^4, "@doc ") * " "^4 - ) - print(fid, buf, "\n\n") - else - # passthrough - print(fid, funcblock, "\n\n") - end - end -end - -# Also generate auto-docs that simply list all of the bound API functions -apidocs = "" -for (mod, desc, urltail) in ( - ("H5", "General Library Functions", "Library"), - ("H5A", "Attribute Interface", "Attributes"), - ("H5D", "Dataset Interface", "Datasets"), - ("H5E", "Error Interface", "Error+Handling"), - ("H5F", "File Interface", "Files"), - ("H5G", "Group Interface", "Groups"), - ("H5I", "Identifier Interface", "Identifiers"), - ("H5L", "Link Interface", "Links"), - ("H5O", "Object Interface", "Objects"), - ("H5PL", "Plugin Interface", "Plugins"), - ("H5P", "Property Interface", "Property+Lists"), - ("H5R", "Reference Interface", "References"), - ("H5S", "Dataspace Interface", "Dataspaces"), - ("H5T", "Datatype Interface", "Datatypes"), - ("H5Z", "Filter Interface", "Filters"), - ("H5DO", "Optimized Functions Interface", "Optimizations"), - ("H5DS", "Dimension Scale Interface", "Dimension+Scales"), - ("H5LT", "Lite Interface", "Lite"), - ("H5TB", "Table Interface", "Tables"), -) - global apidocs - funclist = sort!(bound_api[mod]) - index = join(["- [`$f`](@ref $f)" for f in funclist], "\n") - funcs = join(funclist, "\n") - apidocs *= """ - --- - - ## [[`$mod`](https://portal.hdfgroup.org/display/HDF5/$urltail) — $desc](@id $mod) - $index - ```@docs - $funcs - ``` - - """ -end - -open(joinpath(@__DIR__, "..", "docs", "src", "api_bindings.md"), "w") do fid - write( - fid, - """ - ```@raw html - - ``` - ```@meta - CurrentModule = HDF5.API - ``` - - # Low-level library bindings - - At the lowest level, `HDF5.jl` operates by calling the public API of the HDF5 shared - library through a set of `ccall` wrapper functions. - This page documents the function names and nominal C argument types of the API which - have bindings in this package. - Note that in many cases, high-level data types are valid arguments through automatic - `ccall` conversions. - For instance, `HDF5.Datatype` objects will be automatically converted to their `hid_t` ID - by Julia's `cconvert`+`unsafe_convert` `ccall` rules. - - There are additional helper wrappers (often for out-argument functions) which are not - documented here. - - $apidocs - """ - ) -end - -nothing +# Generate ../src/api/functions.jl +# Run `julia --project=.. gen_wrappers.jl`` to execute this script + +include(joinpath(@__DIR__, "bind_generator.jl")) + +# Read in the API definition macros from the definitions file +defs = read(joinpath(@__DIR__, "api_defs.jl"), String) +# Have Julia expand/run the @bind macro to generate expressions for all of the functions +exprs = Base.include_string( + @__MODULE__, "@macroexpand1 begin\n" * defs * "\nend", "api_defs.jl" +) +# Insert the conditional version helper expression +prepend!(exprs.args, _libhdf5_build_ver_expr.args) +Base.remove_linenums!(exprs) + +# Definitions which are not automatically generated, but should still be documented as +# part of the raw low-level API: +append!(bound_api["H5P"], ["h5p_get_class_name"]) # defined in src/api/helpers.jl +append!(bound_api["H5T"], [ + "h5t_get_member_name", # defined in src/api/helpers.jl + "h5t_get_tag" +]) # defined in src/api/helpers.jl + +# Now dump the text representation to disk +open(joinpath(@__DIR__, "..", "src", "api", "functions.jl"), "w") do fid + println( + fid, + """ +#! format: off +# This file is autogenerated by HDF5.jl's `gen/gen_wrappers.jl` and should not be editted. +# +# To add new bindings, define the binding in `gen/api_defs.jl`, re-run +# `gen/gen_wrappers.jl`, and commit the updated `src/api/functions.jl`. +""" + ) + function triplequote(s::String, indent="", prefix="") + ret = indent * prefix * "\"\"\"\n" + for l in eachline(IOBuffer(s)) + ret *= isempty(l) ? "\n" : indent * l * "\n" + end + ret *= indent * "\"\"\"\n" + return ret + end + ismacro(ex, sym, n=0) = + isexpr(ex, :macrocall) && length(ex.args) >= n + 2 && ex.args[1] == sym + for funcblock in exprs.args + if ismacro(funcblock, Symbol("@doc"), 2) + # Pretty print the doc macro as just a juxtaposed doc string and function + # definition; the `@doc` construction is necessary in AST form for the docs + # to be included in interactive use of `@bind`, but in source form we can + # rely on Julia's parsing behavior. + print(fid, triplequote(funcblock.args[3]), funcblock.args[4], "\n\n") + elseif ismacro(funcblock, Symbol("@static"), 1) && + isexpr(funcblock.args[3], :if, 2) && + ismacro(funcblock.args[3].args[2], Symbol("@doc"), 2) + # Within a @static block, we have to keep the @doc prefix, but we can still + # switch to triple-quoting and there's special parsing to allow the function + # definition to be on the next line. + # + # Work around the expression printer in this more complex case by printing + # to a buffer and string-replacing a sentinel value + docstr = funcblock.args[3].args[2].args[3] + funcblock.args[3].args[2].args[3] = "SENTINEL_DOC" + buf = sprint(print, funcblock) + # Two-step deindent since `r"^\s{4}(\s{4})?"m => s"\1"` errors: see JuliaLang/julia#31456 + buf = replace(buf, r"^\s{4}"m => s"") # deindent + buf = replace(buf, r"^(\s{4})\s{4}"m => s"\1") # deindent + # Now format the doc string and replace (note need to indent `function`) + buf = replace( + buf, + r"^\s+@doc \"SENTINEL_DOC\" "m => + triplequote(docstr, " "^4, "@doc ") * " "^4 + ) + print(fid, buf, "\n\n") + else + # passthrough + print(fid, funcblock, "\n\n") + end + end +end + +# Also generate auto-docs that simply list all of the bound API functions +apidocs = "" +for (mod, desc, urltail) in ( + ("H5", "General Library Functions", "Library"), + ("H5A", "Attribute Interface", "Attributes"), + ("H5D", "Dataset Interface", "Datasets"), + ("H5E", "Error Interface", "Error+Handling"), + ("H5F", "File Interface", "Files"), + ("H5G", "Group Interface", "Groups"), + ("H5I", "Identifier Interface", "Identifiers"), + ("H5L", "Link Interface", "Links"), + ("H5O", "Object Interface", "Objects"), + ("H5PL", "Plugin Interface", "Plugins"), + ("H5P", "Property Interface", "Property+Lists"), + ("H5R", "Reference Interface", "References"), + ("H5S", "Dataspace Interface", "Dataspaces"), + ("H5T", "Datatype Interface", "Datatypes"), + ("H5Z", "Filter Interface", "Filters"), + ("H5DO", "Optimized Functions Interface", "Optimizations"), + ("H5DS", "Dimension Scale Interface", "Dimension+Scales"), + ("H5LT", "Lite Interface", "Lite"), + ("H5TB", "Table Interface", "Tables"), +) + global apidocs + funclist = sort!(bound_api[mod]) + index = join(["- [`$f`](@ref $f)" for f in funclist], "\n") + funcs = join(funclist, "\n") + apidocs *= """ + --- + + ## [[`$mod`](https://portal.hdfgroup.org/display/HDF5/$urltail) — $desc](@id $mod) + $index + ```@docs + $funcs + ``` + + """ +end + +open(joinpath(@__DIR__, "..", "docs", "src", "api_bindings.md"), "w") do fid + write( + fid, + """ + ```@raw html + + ``` + ```@meta + CurrentModule = HDF5.API + ``` + + # Low-level library bindings + + At the lowest level, `HDF5.jl` operates by calling the public API of the HDF5 shared + library through a set of `ccall` wrapper functions. + This page documents the function names and nominal C argument types of the API which + have bindings in this package. + Note that in many cases, high-level data types are valid arguments through automatic + `ccall` conversions. + For instance, `HDF5.Datatype` objects will be automatically converted to their `hid_t` ID + by Julia's `cconvert`+`unsafe_convert` `ccall` rules. + + There are additional helper wrappers (often for out-argument functions) which are not + documented here. + + $apidocs + """ + ) +end + +nothing diff --git a/src/api_midlevel.jl b/src/api_midlevel.jl index 45d1939c8..b6847e4a4 100644 --- a/src/api_midlevel.jl +++ b/src/api_midlevel.jl @@ -1,228 +1,228 @@ -# This file defines midlevel api wrappers. We include name normalization for methods that are -# applicable to different hdf5 api-layers. We still try to adhere close proximity to the underlying -# method name in the hdf5-library. - -""" - HDF5.set_extent_dims(dset::HDF5.Dataset, new_dims::Dims) - -Change the current dimensions of a dataset to `new_dims`, limited by -`max_dims = get_extent_dims(dset)[2]`. Reduction is possible and leads to loss of truncated data. -""" -function set_extent_dims(dset::Dataset, size::Dims) - checkvalid(dset) - API.h5d_set_extent(dset, API.hsize_t[reverse(size)...]) -end - -""" - HDF5.set_extent_dims(dspace::HDF5.Dataspace, new_dims::Dims, max_dims::Union{Dims,Nothing} = nothing) - -Change the dimensions of a dataspace `dspace` to `new_dims`, optionally with the maximum possible -dimensions `max_dims` different from the active size `new_dims`. If not given, `max_dims` is set equal -to `new_dims`. -""" -function set_extent_dims( - dspace::Dataspace, size::Dims, max_dims::Union{Dims,Nothing}=nothing -) - checkvalid(dspace) - rank = length(size) - current_size = API.hsize_t[reverse(size)...] - maximum_size = isnothing(max_dims) ? C_NULL : [reverse(max_dims .% API.hsize_t)...] - API.h5s_set_extent_simple(dspace, rank, current_size, maximum_size) - return nothing -end - -""" - HDF5.get_extent_dims(obj::Union{HDF5.Dataspace, HDF5.Dataset, HDF5.Attribute}) -> dims, maxdims - -Get the array dimensions from a dataspace, dataset, or attribute and return a tuple of `dims` and `maxdims`. -""" -function get_extent_dims(obj::Union{Dataspace,Dataset,Attribute}) - dspace = obj isa Dataspace ? checkvalid(obj) : dataspace(obj) - h5_dims, h5_maxdims = API.h5s_get_simple_extent_dims(dspace) - # reverse dimensions since hdf5 uses C-style order - N = length(h5_dims) - dims = ntuple(i -> @inbounds(Int(h5_dims[N - i + 1])), N) - maxdims = ntuple(i -> @inbounds(h5_maxdims[N - i + 1]) % Int, N) # allows max_dims to be specified as -1 without triggering an overflow - obj isa Dataspace || close(dspace) - return dims, maxdims -end - -""" - HDF5.get_chunk_offset(dataset_id, index) - -Get 0-based offset of chunk from 0-based `index`. The offsets are returned in Julia's column-major order rather than hdf5 row-major order. -For a 1-based API, see `HDF5.ChunkStorage`. -""" -function get_chunk_offset(dataset_id, index) - extent = size(dataset_id) - chunk = get_chunk(dataset_id) - chunk_indices = CartesianIndices( - ntuple(i -> 0:(extent[i] ÷ chunk[i] - 1), length(extent)) - ) - offset = API.hsize_t.(chunk_indices[index + 1].I .* chunk) - return offset -end - -""" - HDF5.get_chunk_index(dataset_id, offset) - -Get 0-based index of chunk from 0-based `offset` returned in Julia's column-major order. -For a 1-based API, see `HDF5.ChunkStorage`. -""" -function get_chunk_index(dataset_id, offset) - extent = size(dataset_id) - chunk = get_chunk(dataset_id) - chunk_indices = LinearIndices(ntuple(i -> 0:(extent[i] ÷ chunk[i] - 1), length(extent))) - chunk_indices[(offset .÷ chunk .+ 1)...] - 1 -end - -""" - HDF5.get_num_chunks_per_dim(dataset_id) - -Get the number of chunks in each dimension in Julia's column-major order. -""" -function get_num_chunks_per_dim(dataset_id) - extent = size(dataset_id) - chunk = get_chunk(dataset_id) - return extent .÷ chunk -end - -""" - HDF5.get_num_chunks(dataset_id) - -Returns the number of chunks in a dataset. Equivalent to `API.h5d_get_num_chunks(dataset_id, HDF5.H5S_ALL)`. -""" -function get_num_chunks(dataset_id) - @static if v"1.10.5" ≤ API._libhdf5_build_ver - API.h5d_get_num_chunks(dataset_id) - else - prod(get_num_chunks_per_dim(dataset_id)) - end -end - -""" - HDF5.get_chunk_length(dataset_id) - -Retrieves the chunk size in bytes. Equivalent to `API.h5d_get_chunk_info(dataset_id, index)[:size]`. -""" -function get_chunk_length(dataset_id) - type = API.h5d_get_type(dataset_id) - chunk = get_chunk(dataset_id) - return Int(API.h5t_get_size(type) * prod(chunk)) -end - -vlen_get_buf_size(dset::Dataset, dtype::Datatype, dspace::Dataspace) = - API.h5d_vlen_get_buf_size(dset, dtype, dspace) -function vlen_get_buf_size(dataset_id) - type = API.h5d_get_type(dataset_id) - space = API.h5d_get_space(dataset_id) - API.h5d_vlen_get_buf_size(dataset_id, type, space) -end - -""" - HDF5.read_chunk(dataset_id, offset, [buf]; dxpl_id = HDF5.API.H5P_DEFAULT, filters = Ref{UInt32}()) - -Helper method to read chunks via 0-based offsets in a `Tuple`. - -Argument `buf` is optional and defaults to a `Vector{UInt8}` of length determined by `HDF5.get_chunk_length`. -Argument `dxpl_id` can be supplied a keyword and defaults to `HDF5.API.H5P_DEFAULT`. -Argument `filters` can be retrieved by supplying a `Ref{UInt32}` value via a keyword argument. - -This method returns `Vector{UInt8}`. -""" -function read_chunk( - dataset_id, - offset, - buf::Vector{UInt8}=Vector{UInt8}(undef, get_chunk_length(dataset_id)); - dxpl_id=API.H5P_DEFAULT, - filters=Ref{UInt32}() -) - API.h5d_read_chunk(dataset_id, dxpl_id, offset, filters, buf) - return buf -end - -""" - HDF5.read_chunk(dataset_id, index::Integer, [buf]; dxpl_id = HDF5.API.H5P_DEFAULT, filters = Ref{UInt32}()) - -Helper method to read chunks via 0-based integer `index`. - -Argument `buf` is optional and defaults to a `Vector{UInt8}` of length determined by `HDF5.h5d_get_chunk_info`. -Argument `dxpl_id` can be supplied a keyword and defaults to `HDF5.API.H5P_DEFAULT`. -Argument `filters` can be retrieved by supplying a `Ref{UInt32}` value via a keyword argument. - -This method returns `Vector{UInt8}`. -""" -function read_chunk( - dataset_id, - index::Integer, - buf::Vector{UInt8}=Vector{UInt8}(undef, get_chunk_length(dataset_id)); - dxpl_id=API.H5P_DEFAULT, - filters=Ref{UInt32}() -) - offset = [reverse(get_chunk_offset(dataset_id, index))...] - read_chunk(dataset_id, offset, buf; dxpl_id=dxpl_id, filters=filters) -end - -""" - HDF5.write_chunk(dataset_id, offset, buf::AbstractArray; dxpl_id = HDF5.API.H5P_DEFAULT, filter_mask = 0) - -Helper method to write chunks via 0-based offsets `offset` as a `Tuple`. -""" -function write_chunk( - dataset_id, offset, buf::AbstractArray; dxpl_id=API.H5P_DEFAULT, filter_mask=0 -) - # Borrowed from write_dataset stride detection - stride(buf, 1) == 1 || - throw(ArgumentError("Cannot write arrays with a different stride than `Array`")) - API.h5d_write_chunk(dataset_id, dxpl_id, filter_mask, offset, sizeof(buf), buf) -end - -function write_chunk( - dataset_id, - offset, - buf::Union{DenseArray,Base.FastContiguousSubArray}; - dxpl_id=API.H5P_DEFAULT, - filter_mask=0 -) - # We can bypass the need to check stride with Array and FastContiguousSubArray - API.h5d_write_chunk(dataset_id, dxpl_id, filter_mask, offset, sizeof(buf), buf) -end - -""" - HDF5.write_chunk(dataset_id, index::Integer, buf::AbstractArray; dxpl_id = API.H5P_DEFAULT, filter_mask = 0) - -Helper method to write chunks via 0-based integer `index`. -""" -function write_chunk( - dataset_id, index::Integer, buf::AbstractArray; dxpl_id=API.H5P_DEFAULT, filter_mask=0 -) - offset = [reverse(get_chunk_offset(dataset_id, index))...] - write_chunk(dataset_id, offset, buf; dxpl_id=dxpl_id, filter_mask=filter_mask) -end - -# Avoid ambiguous method with offset based versions -function write_chunk( - dataset_id, - index::Integer, - buf::Union{DenseArray,Base.FastContiguousSubArray}; - dxpl_id=API.H5P_DEFAULT, - filter_mask=0 -) - # We can bypass the need to check stride with Array and FastContiguousSubArray - offset = [reverse(get_chunk_offset(dataset_id, index))...] - write_chunk(dataset_id, offset, buf; dxpl_id=dxpl_id, filter_mask=filter_mask) -end - -function get_fill_value(plist_id, ::Type{T}) where {T} - value = Ref{T}() - API.h5p_get_fill_value(plist_id, datatype(T), value) - return value[] -end - -get_fill_value(plist_id) = get_fill_value(plist_id, Float64) - -function set_fill_value!(plist_id, value) - ref_value = Ref(value) - API.h5p_set_fill_value(plist_id, datatype(value), ref_value) - return plist_id -end +# This file defines midlevel api wrappers. We include name normalization for methods that are +# applicable to different hdf5 api-layers. We still try to adhere close proximity to the underlying +# method name in the hdf5-library. + +""" + HDF5.set_extent_dims(dset::HDF5.Dataset, new_dims::Dims) + +Change the current dimensions of a dataset to `new_dims`, limited by +`max_dims = get_extent_dims(dset)[2]`. Reduction is possible and leads to loss of truncated data. +""" +function set_extent_dims(dset::Dataset, size::Dims) + checkvalid(dset) + API.h5d_set_extent(dset, API.hsize_t[reverse(size)...]) +end + +""" + HDF5.set_extent_dims(dspace::HDF5.Dataspace, new_dims::Dims, max_dims::Union{Dims,Nothing} = nothing) + +Change the dimensions of a dataspace `dspace` to `new_dims`, optionally with the maximum possible +dimensions `max_dims` different from the active size `new_dims`. If not given, `max_dims` is set equal +to `new_dims`. +""" +function set_extent_dims( + dspace::Dataspace, size::Dims, max_dims::Union{Dims,Nothing}=nothing +) + checkvalid(dspace) + rank = length(size) + current_size = API.hsize_t[reverse(size)...] + maximum_size = isnothing(max_dims) ? C_NULL : [reverse(max_dims .% API.hsize_t)...] + API.h5s_set_extent_simple(dspace, rank, current_size, maximum_size) + return nothing +end + +""" + HDF5.get_extent_dims(obj::Union{HDF5.Dataspace, HDF5.Dataset, HDF5.Attribute}) -> dims, maxdims + +Get the array dimensions from a dataspace, dataset, or attribute and return a tuple of `dims` and `maxdims`. +""" +function get_extent_dims(obj::Union{Dataspace,Dataset,Attribute}) + dspace = obj isa Dataspace ? checkvalid(obj) : dataspace(obj) + h5_dims, h5_maxdims = API.h5s_get_simple_extent_dims(dspace) + # reverse dimensions since hdf5 uses C-style order + N = length(h5_dims) + dims = ntuple(i -> @inbounds(Int(h5_dims[N - i + 1])), N) + maxdims = ntuple(i -> @inbounds(h5_maxdims[N - i + 1]) % Int, N) # allows max_dims to be specified as -1 without triggering an overflow + obj isa Dataspace || close(dspace) + return dims, maxdims +end + +""" + HDF5.get_chunk_offset(dataset_id, index) + +Get 0-based offset of chunk from 0-based `index`. The offsets are returned in Julia's column-major order rather than hdf5 row-major order. +For a 1-based API, see `HDF5.ChunkStorage`. +""" +function get_chunk_offset(dataset_id, index) + extent = size(dataset_id) + chunk = get_chunk(dataset_id) + chunk_indices = CartesianIndices( + ntuple(i -> 0:(extent[i] ÷ chunk[i] - 1), length(extent)) + ) + offset = API.hsize_t.(chunk_indices[index + 1].I .* chunk) + return offset +end + +""" + HDF5.get_chunk_index(dataset_id, offset) + +Get 0-based index of chunk from 0-based `offset` returned in Julia's column-major order. +For a 1-based API, see `HDF5.ChunkStorage`. +""" +function get_chunk_index(dataset_id, offset) + extent = size(dataset_id) + chunk = get_chunk(dataset_id) + chunk_indices = LinearIndices(ntuple(i -> 0:(extent[i] ÷ chunk[i] - 1), length(extent))) + chunk_indices[(offset .÷ chunk .+ 1)...] - 1 +end + +""" + HDF5.get_num_chunks_per_dim(dataset_id) + +Get the number of chunks in each dimension in Julia's column-major order. +""" +function get_num_chunks_per_dim(dataset_id) + extent = size(dataset_id) + chunk = get_chunk(dataset_id) + return extent .÷ chunk +end + +""" + HDF5.get_num_chunks(dataset_id) + +Returns the number of chunks in a dataset. Equivalent to `API.h5d_get_num_chunks(dataset_id, HDF5.H5S_ALL)`. +""" +function get_num_chunks(dataset_id) + @static if v"1.10.5" ≤ API._libhdf5_build_ver + API.h5d_get_num_chunks(dataset_id) + else + prod(get_num_chunks_per_dim(dataset_id)) + end +end + +""" + HDF5.get_chunk_length(dataset_id) + +Retrieves the chunk size in bytes. Equivalent to `API.h5d_get_chunk_info(dataset_id, index)[:size]`. +""" +function get_chunk_length(dataset_id) + type = API.h5d_get_type(dataset_id) + chunk = get_chunk(dataset_id) + return Int(API.h5t_get_size(type) * prod(chunk)) +end + +vlen_get_buf_size(dset::Dataset, dtype::Datatype, dspace::Dataspace) = + API.h5d_vlen_get_buf_size(dset, dtype, dspace) +function vlen_get_buf_size(dataset_id) + type = API.h5d_get_type(dataset_id) + space = API.h5d_get_space(dataset_id) + API.h5d_vlen_get_buf_size(dataset_id, type, space) +end + +""" + HDF5.read_chunk(dataset_id, offset, [buf]; dxpl_id = HDF5.API.H5P_DEFAULT, filters = Ref{UInt32}()) + +Helper method to read chunks via 0-based offsets in a `Tuple`. + +Argument `buf` is optional and defaults to a `Vector{UInt8}` of length determined by `HDF5.get_chunk_length`. +Argument `dxpl_id` can be supplied a keyword and defaults to `HDF5.API.H5P_DEFAULT`. +Argument `filters` can be retrieved by supplying a `Ref{UInt32}` value via a keyword argument. + +This method returns `Vector{UInt8}`. +""" +function read_chunk( + dataset_id, + offset, + buf::Vector{UInt8}=Vector{UInt8}(undef, get_chunk_length(dataset_id)); + dxpl_id=API.H5P_DEFAULT, + filters=Ref{UInt32}() +) + API.h5d_read_chunk(dataset_id, dxpl_id, offset, filters, buf) + return buf +end + +""" + HDF5.read_chunk(dataset_id, index::Integer, [buf]; dxpl_id = HDF5.API.H5P_DEFAULT, filters = Ref{UInt32}()) + +Helper method to read chunks via 0-based integer `index`. + +Argument `buf` is optional and defaults to a `Vector{UInt8}` of length determined by `HDF5.h5d_get_chunk_info`. +Argument `dxpl_id` can be supplied a keyword and defaults to `HDF5.API.H5P_DEFAULT`. +Argument `filters` can be retrieved by supplying a `Ref{UInt32}` value via a keyword argument. + +This method returns `Vector{UInt8}`. +""" +function read_chunk( + dataset_id, + index::Integer, + buf::Vector{UInt8}=Vector{UInt8}(undef, get_chunk_length(dataset_id)); + dxpl_id=API.H5P_DEFAULT, + filters=Ref{UInt32}() +) + offset = [reverse(get_chunk_offset(dataset_id, index))...] + read_chunk(dataset_id, offset, buf; dxpl_id=dxpl_id, filters=filters) +end + +""" + HDF5.write_chunk(dataset_id, offset, buf::AbstractArray; dxpl_id = HDF5.API.H5P_DEFAULT, filter_mask = 0) + +Helper method to write chunks via 0-based offsets `offset` as a `Tuple`. +""" +function write_chunk( + dataset_id, offset, buf::AbstractArray; dxpl_id=API.H5P_DEFAULT, filter_mask=0 +) + # Borrowed from write_dataset stride detection + stride(buf, 1) == 1 || + throw(ArgumentError("Cannot write arrays with a different stride than `Array`")) + API.h5d_write_chunk(dataset_id, dxpl_id, filter_mask, offset, sizeof(buf), buf) +end + +function write_chunk( + dataset_id, + offset, + buf::Union{DenseArray,Base.FastContiguousSubArray}; + dxpl_id=API.H5P_DEFAULT, + filter_mask=0 +) + # We can bypass the need to check stride with Array and FastContiguousSubArray + API.h5d_write_chunk(dataset_id, dxpl_id, filter_mask, offset, sizeof(buf), buf) +end + +""" + HDF5.write_chunk(dataset_id, index::Integer, buf::AbstractArray; dxpl_id = API.H5P_DEFAULT, filter_mask = 0) + +Helper method to write chunks via 0-based integer `index`. +""" +function write_chunk( + dataset_id, index::Integer, buf::AbstractArray; dxpl_id=API.H5P_DEFAULT, filter_mask=0 +) + offset = [reverse(get_chunk_offset(dataset_id, index))...] + write_chunk(dataset_id, offset, buf; dxpl_id=dxpl_id, filter_mask=filter_mask) +end + +# Avoid ambiguous method with offset based versions +function write_chunk( + dataset_id, + index::Integer, + buf::Union{DenseArray,Base.FastContiguousSubArray}; + dxpl_id=API.H5P_DEFAULT, + filter_mask=0 +) + # We can bypass the need to check stride with Array and FastContiguousSubArray + offset = [reverse(get_chunk_offset(dataset_id, index))...] + write_chunk(dataset_id, offset, buf; dxpl_id=dxpl_id, filter_mask=filter_mask) +end + +function get_fill_value(plist_id, ::Type{T}) where {T} + value = Ref{T}() + API.h5p_get_fill_value(plist_id, datatype(T), value) + return value[] +end + +get_fill_value(plist_id) = get_fill_value(plist_id, Float64) + +function set_fill_value!(plist_id, value) + ref_value = Ref(value) + API.h5p_set_fill_value(plist_id, datatype(value), ref_value) + return plist_id +end From b8e6f58d2f81f6ef48f40d98ef38a87ff3c5b1b4 Mon Sep 17 00:00:00 2001 From: kittisopikulm Date: Fri, 5 Aug 2022 19:34:34 -0400 Subject: [PATCH 6/6] Remove excess endline from src/api/functions.jl --- gen/gen_wrappers.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gen/gen_wrappers.jl b/gen/gen_wrappers.jl index 42d6d2482..f9f22840c 100644 --- a/gen/gen_wrappers.jl +++ b/gen/gen_wrappers.jl @@ -77,6 +77,8 @@ open(joinpath(@__DIR__, "..", "src", "api", "functions.jl"), "w") do fid print(fid, funcblock, "\n\n") end end + # Remove last endline + truncate(fid, position(fid) - 1) end # Also generate auto-docs that simply list all of the bound API functions