diff --git a/.gitignore b/.gitignore index 42cde8160..b4726d8d1 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,4 @@ deps/usr deps.jl *.log +Manifest.toml diff --git a/.travis.yml b/.travis.yml index 89e607fe4..a4ffda34d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,28 +4,36 @@ language: julia os: - linux - osx - # - windows - + # - windows + julia: - - 1.0 - 1.3 + - 1 - nightly -matrix: - allow_failures: - - julia: nightly notifications: email: false -git: - depth: 99999999 - env: # Disable test fuzzing for the moment, as we're a little too slow for Travis - - NNLIB_TEST_FUZZING=false + - NNLIB_TEST_FUZZING=false JULIA_NUM_THREADS=0 + - NNLIB_TEST_FUZZING=false JULIA_NUM_THREADS=2 # Submit to Codecov after_success: - julia -e 'using Pkg; Pkg.add("Coverage"); using Coverage; Codecov.submit(process_folder())'; - \ No newline at end of file + + +jobs: + allow_failures: + - julia: nightly + # include: # no documentation yet + # - stage: "Documentation" + # julia: 1.3 + # os: linux + # script: + # - julia --project=docs/ -e 'using Pkg; Pkg.develop(PackageSpec(path=pwd())); + # Pkg.instantiate()' + # - julia --project=docs/ docs/make.jl + # after_success: skip \ No newline at end of file diff --git a/Manifest.toml b/Manifest.toml deleted file mode 100644 index c244e94bc..000000000 --- a/Manifest.toml +++ /dev/null @@ -1,42 +0,0 @@ -# This file is machine-generated - editing it directly is not advised - -[[BinaryProvider]] -deps = ["Libdl", "SHA"] -git-tree-sha1 = "5b08ed6036d9d3f0ee6369410b830f8873d4024c" -uuid = "b99e7846-7c00-51b0-8f62-c81ae34c0232" -version = "0.5.8" - -[[Libdl]] -uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" - -[[LinearAlgebra]] -deps = ["Libdl"] -uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" - -[[Random]] -deps = ["Serialization"] -uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" - -[[Requires]] -deps = ["UUIDs"] -git-tree-sha1 = "d37400976e98018ee840e0ca4f9d20baa231dc6b" -uuid = "ae029012-a4dd-5104-9daa-d747884805df" -version = "1.0.1" - -[[SHA]] -uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" - -[[Serialization]] -uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" - -[[SparseArrays]] -deps = ["LinearAlgebra", "Random"] -uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" - -[[Statistics]] -deps = ["LinearAlgebra", "SparseArrays"] -uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" - -[[UUIDs]] -deps = ["Random", "SHA"] -uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" diff --git a/Project.toml b/Project.toml index 588ed6c88..ec09ec770 100644 --- a/Project.toml +++ b/Project.toml @@ -1,20 +1,20 @@ name = "NNlib" uuid = "872c559c-99b0-510c-b3b7-b6c96a88d5cd" -version = "0.6.6" +version = "0.7.1" [deps] -BinaryProvider = "b99e7846-7c00-51b0-8f62-c81ae34c0232" Compat = "34da2185-b29b-5c13-b0c7-acf172513d20" Libdl = "8f399da3-3557-5675-b5ff-fb832c97cbdb" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +NNPACK_jll = "a6bfbf70-4841-5cb9-aa18-3a8ad3c413ee" +Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" Requires = "ae029012-a4dd-5104-9daa-d747884805df" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" [compat] -BinaryProvider = "0.5" Compat = "3.13" Requires = "0.5, 1.0" -julia = "1" +julia = "1.3" [extras] Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" diff --git a/appveyor.yml b/appveyor.yml index b62994f8c..874afe2ac 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,7 +1,8 @@ environment: matrix: - - julia_version: 1 + - julia_version: 1.3 - julia_version: nightly + NNLIB_TEST_FUZZING: "false" platform: - x86 # 32-bit @@ -9,9 +10,9 @@ platform: # # Uncomment the following lines to allow failures on nightly julia # # (tests will run but not make your overall status red) -# matrix: -# allow_failures: -# - julia_version: nightly +matrix: + allow_failures: + - julia_version: nightly branches: only: @@ -41,5 +42,6 @@ test_script: # - echo "%JL_CODECOV_SCRIPT%" # - C:\julia\bin\julia -e "%JL_CODECOV_SCRIPT%" + after_test: - C:\julia\bin\julia -e "using Pkg; Pkg.add(\"Coverage\"); using Coverage; Codecov.submit(process_folder())" \ No newline at end of file diff --git a/deps/build.jl b/deps/build.jl deleted file mode 100644 index 3da8d53c2..000000000 --- a/deps/build.jl +++ /dev/null @@ -1,50 +0,0 @@ -using BinaryProvider - -# Parse some basic command-line arguments -const verbose = "--verbose" in ARGS -const prefix = Prefix(get([a for a in ARGS if a != "--verbose"], 1, joinpath(@__DIR__, "usr"))) -products = [ - LibraryProduct(prefix, ["libnnpack"], :libnnpack), -] - -# Download binaries from hosted location -bin_prefix = "https://github.com/JuliaPackaging/Yggdrasil/releases/download/NNPACK-v2018.06.22-0" - -# Listing of files generated by BinaryBuilder: -download_info = Dict( - Linux(:aarch64, libc=:glibc) => ("$bin_prefix/NNPACK.v2018.6.22.aarch64-linux-gnu.tar.gz", "e0c6e21ba4c47acfd5a3d3e3510e8786474080f654338f4583b88860296c1437"), - Linux(:i686, libc=:glibc) => ("$bin_prefix/NNPACK.v2018.6.22.i686-linux-gnu.tar.gz", "e9b6685001bc5a5d17acef15f3f6ffeb7beb6081926300f23ed4a442beac71ca"), - Linux(:i686, libc=:musl) => ("$bin_prefix/NNPACK.v2018.6.22.i686-linux-musl.tar.gz", "36c1d3c30b3bc3e0b34f215945bb46319f88e28f011fc758f21ba888b1fd9e25"), - MacOS(:x86_64) => ("$bin_prefix/NNPACK.v2018.6.22.x86_64-apple-darwin14.tar.gz", "b30046223a11470b15a2ceb0d0df6f7d8a43260fe52f4a2f8ebe5f0b2df822ca"), - Linux(:x86_64, libc=:glibc) => ("$bin_prefix/NNPACK.v2018.6.22.x86_64-linux-gnu.tar.gz", "150d5b6ca81fa72bfdc8bbda2428f0d3483fd11a5813724646c6d6c6a7ef969f"), - Linux(:x86_64, libc=:musl) => ("$bin_prefix/NNPACK.v2018.6.22.x86_64-linux-musl.tar.gz", "d961a104f814ec5b356519a82746a70a1df193ae37fc8130f38ffb61336def16"), -) - -# Install unsatisfied or updated dependencies: -unsatisfied = any(!satisfied(p; verbose=verbose) for p in products) -dl_info = choose_download(download_info, platform_key_abi()) -if dl_info === nothing && unsatisfied - # If we don't have a compatible .tar.gz to download, complain. - # Alternatively, you could attempt to install from a separate provider, - # build from source or something even more ambitious here. - @warn "Your platform (\"$(Sys.MACHINE)\", parsed as \"$(triplet(platform_key_abi()))\") is not supported by NNPACK! - You will only be able to use only the default NNlib backend." -end - -# If we have a download, and we are unsatisfied (or the version we're -# trying to install is not itself installed) then load it up! -# Download and install binaries -use_nnpack = get(ENV, "NNLIB_USE_NNPACK", "false") == "true" -os_support = Sys.islinux() || Sys.isapple() -if use_nnpack && os_support - if unsatisfied || !isinstalled(dl_info...; prefix=prefix) - install(dl_info...; prefix=prefix, force=true, verbose=verbose) - end - # Write out a deps.jl file that will contain mappings for our products - write_deps_file(joinpath(@__DIR__, "deps.jl"), products, verbose=verbose) -else - open(joinpath(@__DIR__, "deps.jl"), "w") do io - write(io, "check_deps() = false") - end -end - diff --git a/src/NNlib.jl b/src/NNlib.jl index 597817f66..b1f4503a3 100644 --- a/src/NNlib.jl +++ b/src/NNlib.jl @@ -1,14 +1,19 @@ module NNlib +using Pkg using Requires +using NNPACK_jll # Include APIs include("dim_helpers.jl") -# NNPACK support -include(joinpath(@__DIR__, "..", "deps", "deps.jl")) -if check_deps() == nothing + +if isdefined(NNPACK_jll, :libnnpack) include("nnpack/NNPACK.jl") else + @warn "NNPACK not available for your platform: " * + "$( Pkg.BinaryPlatforms.platform_name(Pkg.BinaryPlatforms.platform_key_abi()))" * + "($( Pkg.BinaryPlatforms.triplet(Pkg.BinaryPlatforms.platform_key_abi()))) + You will be able to use only the default Julia NNlib backend" is_nnpack_available() = false end diff --git a/src/activation.jl b/src/activation.jl index 0f95824c1..947dc496e 100644 --- a/src/activation.jl +++ b/src/activation.jl @@ -2,7 +2,7 @@ export σ, sigmoid, hardσ, hardsigmoid, hardtanh, relu, leakyrelu, relu6, rrelu logsigmoid, logcosh, mish, tanhshrink, softshrink, thresholdrelu, trelu, lisht ## Activation functions -# +# # Some of activation functions have its wrapper function for GPU in CuArrays.jl. # https://github.com/JuliaGPU/CuArrays.jl/issues/614 @@ -12,15 +12,11 @@ export σ, sigmoid, hardσ, hardsigmoid, hardtanh, relu, leakyrelu, relu6, rrelu Classic [sigmoid](https://en.wikipedia.org/wiki/Sigmoid_function) activation function. """ -σ(x::Real) = one(x) / (one(x) + exp(-x)) -const sigmoid = σ - -# ForwardDiff numerical stability hack -σ_stable(x::Real) = ifelse(x < -80, zero(x), one(x) / (one(x) + exp(-x))) -σ(x::Float32) = σ_stable(x) -@init @require ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" begin - σ(x::ForwardDiff.Dual{T,Float32}) where T = σ_stable(x) +function σ(x::Real) + t = exp(-abs(x)) + ifelse(x ≥ 0, inv(one(t) + t), t / (one(t) + t)) end +const sigmoid = σ """ hardσ(x, a=0.2) = max(0, min(1.0, a * x + 0.5)) @@ -159,17 +155,17 @@ function selu(x::Real) end """ - celu(x, α=1) = + celu(x, α=1) = (x ≥ 0 ? x : α * (exp(x/α) - 1)) Continuously Differentiable Exponential Linear Units See [Continuously Differentiable Exponential Linear Units](https://arxiv.org/pdf/1704.07483.pdf). """ -celu(x::Real, α::Real = one(x)) = ifelse(x ≥ 0, x / one(x), α * (exp(x/α) - one(x))) +celu(x::Real, α::Real = one(x)) = ifelse(x ≥ 0, x / one(x), α * (exp(x/α) - one(x))) """ - trelu(x, theta = 1.0) = x > theta ? x : 0 + trelu(x, theta = 1.0) = x > theta ? x : 0 Threshold Gated Rectified Linear. See [ThresholdRelu](https://arxiv.org/pdf/1402.3337.pdf) @@ -218,7 +214,7 @@ See [Tanhshrink Activation Function](https://www.gabormelli.com/RKB/Tanhshrink_A tanhshrink(x::Real) = x - tanh(x) """ - softshrink(x, λ=0.5) = + softshrink(x, λ=0.5) = (x ≥ λ ? x - λ : (-λ ≥ x ? x + λ : 0)) See [Softshrink Activation Function](https://www.gabormelli.com/RKB/Softshrink_Activation_Function). @@ -226,7 +222,7 @@ See [Softshrink Activation Function](https://www.gabormelli.com/RKB/Softshrink_A softshrink(x::Real, λ = oftype(x/1, 0.5)) = min(max(zero(x), x - λ), x + λ) # Provide an informative error message if activation functions are called with an array -for f in (:σ, :σ_stable, :hardσ, :logσ, :hardtanh, :relu, :leakyrelu, :relu6, :rrelu, :elu, :gelu, :swish, :lisht, :selu, :celu, :trelu, :softsign, :softplus, :logcosh, :mish, :tanhshrink, :softshrink) +for f in (:σ, :hardσ, :logσ, :hardtanh, :relu, :leakyrelu, :relu6, :rrelu, :elu, :gelu, :swish, :lisht, :selu, :celu, :trelu, :softsign, :softplus, :logcosh, :mish, :tanhshrink, :softshrink) @eval $(f)(x::AbstractArray, args...) = error("Use broadcasting (`", $(string(f)), ".(x)`) to apply activation functions to arrays.") end diff --git a/src/conv.jl b/src/conv.jl index 3a5d83d56..27d9f6c75 100644 --- a/src/conv.jl +++ b/src/conv.jl @@ -26,85 +26,60 @@ export conv, conv!, ∇conv_data, ∇conv_data!, ∇conv_filter, ∇conv_filter! # cdims = ConvDims(x, w; stride=2, dilation=(3,2)) # dx = ∇conv_data(conv(x, w, cdims), w, cdims) +# The computational flow, starting from the user facing functions, +# goes through the following steps: +# +# STEP 1: +# use ConvDims objects (only for `conv` and `depthwiseconv`) +# STEP 2: +# define autoallocating version (frontend and implementations) +# STEP 3: +# reshape to 3d convolutions (frontend and implementions) +# STEP 4: +# choose implementation + +# TODO: should we also add +# STEP X: +# use homogeneus datatypes +# to handle etherogeneus inputs now handled by conv_direct? + + +########## STEP 1 ############ +""" + conv(x, w; stride=1, pad=0, dilation=1, flipped=false) - -# First, we will define mappings from the generic API names to our accelerated backend -# implementations. For homogeneous-datatype 1, 2 and 3d convolutions, we default to using -# im2col + GEMM. Do so in a loop, here: -for (front_name, backend) in ( - # This maps from public, front-facing name, to internal backend name - :conv => :im2col, - :∇conv_data => :im2col, - :∇conv_filter => :im2col, - :depthwiseconv => :im2col, - :∇depthwiseconv_data => :im2col, - :∇depthwiseconv_filter => :im2col, - ) - - # These are the GEMM types we will accelerate with `im2col` - G = Union{[x[2] for x in gemm_datatype_mappings]...} - - # We only define 3d conv primitives, we reshape lower down to get 1d and 2d convolution - @eval begin - # im2col-accelerated function forwarding definition - function $(Symbol("$(front_name)!"))( - out::AbstractArray{T,5}, in1::AbstractArray{T,5}, - in2::AbstractArray{T,5}, cdims::ConvDims; kwargs...) where {T <: $G} - $(Symbol("$(front_name)_$(backend)!"))(out, in1, in2, cdims; kwargs...) - end - end +Apply convolution filter `w` to input `x`. `x` and `w` are 3d/4d/5d tensors +in 1d/2d/3d convolutions respectively. +""" +function conv(x, w::AbstractArray{T, N}; stride=1, pad=0, dilation=1, flipped=false) where {T, N} + stride = expand(Val(N-2), stride) + pad = expand(Val(N-2), pad) + dilation = expand(Val(N-2), dilation) + cdims = DenseConvDims(x, w; stride=stride, padding=pad, dilation=dilation, flipkernel=flipped) + return conv(x, w, cdims) end -# Our strategy for 1d and 2d convolution is to reshape to 3d convolutions, which -# makes things MUCH EASIER for us on the backend side, and is in general pretty fast, -# since we can specialize on sizes. -for front_name in (:conv, :∇conv_data, :∇conv_filter, - :depthwiseconv, :∇depthwiseconv_data, :∇depthwiseconv_filter) - for backend in (Symbol(), :_direct, :_im2col) - for N in (3, 4) - @eval begin - function $(Symbol("$(front_name)$(backend)!"))( - y::AbstractArray{yT,$N}, x::AbstractArray{xT,$N}, - w::AbstractArray{wT,$N}, cdims::ConvDims; - kwargs...) where {yT, xT, wT} - $(Symbol("$(front_name)$(backend)!"))( - insert_singleton_spatial_dimension(y, $(5 - N)), - insert_singleton_spatial_dimension(x, $(5 - N)), - insert_singleton_spatial_dimension(w, $(5 - N)), - insert_singleton_spatial_dimension(cdims, $(5 - N)); - kwargs... - ) +""" + depthwiseconv(x, w; stride=1, pad=0, dilation=1, flipped=false) - # We explicitly return `y` here, because the backend call - # itself may return a reshaped view, which we don't want. - return y - end - end - end - end +Depthwise convolution operation with filter `w` on input `x`. `x` and `w` +are 3d/4d/5d tensors in 1d/2d/3d convolutions respectively. +""" +function depthwiseconv(x, w::AbstractArray{T, N}; stride=1, pad=0, dilation=1, flipped=false) where {T, N} + stride = expand(Val(N-2), stride) + pad = expand(Val(N-2), pad) + dilation = expand(Val(N-2), dilation) + cdims = DepthwiseConvDims(x, w; stride=stride, padding=pad, dilation=dilation, flipkernel=flipped) + return depthwiseconv(x, w, cdims) end +############################## -# We always support a fallback, non-accelerated path, where we use the direct, but -# slow, implementations. These should not typically be used, hence the `@debug`, -# but let's ggo ahead and define them first: -for front_name in (:conv, :∇conv_data, :∇conv_filter, - :depthwiseconv, :∇depthwiseconv_data, :∇depthwiseconv_filter) - @eval begin - function $(Symbol("$(front_name)!"))( - y::AbstractArray{yT,N}, in1::AbstractArray{T1,N}, - in2::AbstractArray{T2,N}, cdims::ConvDims; - kwargs...) where {yT, T1, T2, N} - @debug string("Slow fallback implementation invoked for ", $(string(front_name)), "! ", - "You probably don't want this; check your datatypes.") yT T1 T2 - $(Symbol("$(front_name)_direct!"))(y, in1, in2, cdims; kwargs...) - end - end -end -# Finally, let's generate auto-allocating versions of all our functions, for all backends. +########### STEP 2 ################### +# Let's generate auto-allocating versions of all our functions, for all backends. # We `@timeit` these methods separately, as we want to know how much time is spent in # allocation. :P -for backend in (Symbol(), :_direct, :_im2col) +for backend in (Symbol(), :_direct, :_im2col, :_nnpack) # First make auto-allocating versions of the conv()-like calls: for name in (:conv, :depthwiseconv) @eval begin @@ -153,44 +128,107 @@ for backend in (Symbol(), :_direct, :_im2col) end end end +########################################## -# Use NNPACK if it is available and the operation is supported -if is_nnpack_available() - function conv(x::Array{xT, 4}, w::Array{wT, 4}, - cdims::DenseConvDims{2, K, C_in, C_out, (1, 1), P, (1, 1), F}; - kwargs...) where {xT, wT, K, C_in, C_out, P, F} - return conv_nnpack(x, w, cdims; kwargs...) - end -end +########## STEP 3 ############ -""" - conv(x, w; stride=1, pad=0, dilation=1, flipped=false) +# Our strategy for 1d and 2d convolution is to reshape to 3d convolutions, which +# makes things MUCH EASIER for us on the backend side, and is in general pretty fast, +# since we can specialize on sizes. +for front_name in (:conv, :∇conv_data, :∇conv_filter, + :depthwiseconv, :∇depthwiseconv_data, :∇depthwiseconv_filter) + for backend in (Symbol(), :_direct, :_im2col) ## NNPACK is only for 2d conv + for N in (3, 4) + @eval begin + function $(Symbol("$(front_name)$(backend)!"))( + y::AbstractArray{yT,$N}, x::AbstractArray{xT,$N}, + w::AbstractArray{wT,$N}, cdims::ConvDims; + kwargs...) where {yT, xT, wT} + $(Symbol("$(front_name)$(backend)!"))( + insert_singleton_spatial_dimension(y, $(5 - N)), + insert_singleton_spatial_dimension(x, $(5 - N)), + insert_singleton_spatial_dimension(w, $(5 - N)), + insert_singleton_spatial_dimension(cdims, $(5 - N)); + kwargs... + ) -Apply convolution filter `w` to input `x`. `x` and `w` are 3d/4d/5d tensors -in 1d/2d/3d convolutions respectively. -""" -function conv(x, w::AbstractArray{T, N}; stride=1, pad=0, dilation=1, flipped=false) where {T, N} - stride = expand(Val(N-2), stride) - pad = expand(Val(N-2), pad) - dilation = expand(Val(N-2), dilation) - cdims = DenseConvDims(x, w; stride=stride, padding=pad, dilation=dilation, flipkernel=flipped) - return conv(x, w, cdims) + # We explicitly return `y` here, because the backend call + # itself may return a reshaped view, which we don't want. + return y + end + end + end + end end +####################################### + +########### STEP 4 ############ +# First, we will define mappings from the generic API names to our accelerated backend +# implementations. For homogeneous-datatype 1, 2 and 3d convolutions, we default to using +# im2col + GEMM. Do so in a loop, here: +for (front_name, backend) in ( + # This maps from public, front-facing name, to internal backend name + :conv => :im2col, + :∇conv_data => :im2col, + :∇conv_filter => :im2col, + :depthwiseconv => :im2col, + :∇depthwiseconv_data => :im2col, + :∇depthwiseconv_filter => :im2col, + ) + # These are the GEMM types we will accelerate with `im2col` + G = Union{[x[2] for x in gemm_datatype_mappings]...} -""" - depthwiseconv(x, w; stride=1, pad=0, dilation=1, flipped=false) + # We only define 3d conv primitives, we reshape lower down to get 1d and 2d convolution + @eval begin + # im2col-accelerated function forwarding definition + function $(Symbol("$(front_name)!"))( + out::AbstractArray{T,5}, in1::AbstractArray{T,5}, + in2::AbstractArray{T,5}, cdims::ConvDims; kwargs...) where {T <: $G} + $(Symbol("$(front_name)_$(backend)!"))(out, in1, in2, cdims; kwargs...) + end + end +end -Depthwise convolution operation with filter `w` on input `x`. `x` and `w` -are 3d/4d/5d tensors in 1d/2d/3d convolutions respectively. -""" -function depthwiseconv(x, w::AbstractArray{T, N}; stride=1, pad=0, dilation=1, flipped=false) where {T, N} - stride = expand(Val(N-2), stride) - pad = expand(Val(N-2), pad) - dilation = expand(Val(N-2), dilation) - cdims = DepthwiseConvDims(x, w; stride=stride, padding=pad, dilation=dilation, flipkernel=flipped) - return depthwiseconv(x, w, cdims) +# We always support a fallback, non-accelerated path, where we use the direct, but +# slow, implementations. These should not typically be used, hence the `@warn`, +# but let's go ahead and define them first: +for front_name in (:conv, :∇conv_data, :∇conv_filter, + :depthwiseconv, :∇depthwiseconv_data, :∇depthwiseconv_filter) + @eval begin + function $(Symbol("$(front_name)!"))( + y::AbstractArray{yT,N}, in1::AbstractArray{T1,N}, + in2::AbstractArray{T2,N}, cdims::ConvDims; + kwargs...) where {yT, T1, T2, N} + @warn string("Slow fallback implementation invoked for ", $(string(front_name)), "! ", + "You probably don't want this; check your datatypes.") yT T1 T2 + $(Symbol("$(front_name)_direct!"))(y, in1, in2, cdims; kwargs...) + end + end end + +# Use NNPACK if it is available and the operation is supported +# commented out 'till proper benchmarking and more correctness test are performed +# if is_nnpack_available() +# function conv(x::Array{Float32, 4}, w::Array{Float32, 4}, +# cdims::DenseConvDims{2, K, C_in, C_out, (1, 1), P, (1, 1), F}; +# kwargs...) where {K, C_in, C_out, P, F} +# return conv_nnpack(x, w, cdims; kwargs...) +# end + +# function ∇conv_data(dy::Array{Float32, 4}, w::Array{Float32, 4}, +# cdims::DenseConvDims{2, K, C_in, C_out, (1, 1), P, (1, 1), F}; +# kwargs...) where {K, C_in, C_out, P, F} +# return ∇conv_data_nnpack(dy, w, cdims; kwargs...) +# end + +# function ∇conv_filter(x::Array{Float32, 4}, dy::Array{Float32, 4}, +# cdims::DenseConvDims{2, K, C_in, C_out, (1, 1), P, (1, 1), F}; +# kwargs...) where {K, C_in, C_out, P, F} +# return ∇conv_filter_nnpack(x, dy, cdims; kwargs...) +# end +# end +######################################################## \ No newline at end of file diff --git a/src/dim_helpers/PoolDims.jl b/src/dim_helpers/PoolDims.jl index 97144968e..8c22072f6 100644 --- a/src/dim_helpers/PoolDims.jl +++ b/src/dim_helpers/PoolDims.jl @@ -1,7 +1,8 @@ export PoolDims """ - PoolDims + PoolDims(x_size::NTuple{M}, k::Union{NTuple{L, Int}, Int}; + stride=k, padding=0, dilation=1) where {M, L} Dimensions for a "pooling" operation that can have an arbitrary input size, kernel size, stride, dilation, and channel count. Used to dispatch onto efficient implementations at diff --git a/src/impl/conv_im2col.jl b/src/impl/conv_im2col.jl index e06231325..d571a97c5 100644 --- a/src/impl/conv_im2col.jl +++ b/src/impl/conv_im2col.jl @@ -250,27 +250,12 @@ function im2col!(col::AbstractArray{T,2}, x::AbstractArray{T,4}, kidxs = kernel_index(kw, kh, kd, cdims) - # If this d is off the edge, then deal with the entire plane - # in one fell swoop, like a ravenous flock of crows. CAW CAW. - if input_kd <= 0 || input_kd > depth - for kh in 1:kernel_h, - kw in 1:kernel_w - col_reshaped[w, h, d, kidxs..., c] = T(0) - end - continue - end - - # Same for `h`, but in this case it's only a line, not a plane. - # This results in slightly less caw'ing. - if input_kh <= 0 || input_kh > height - for kw in 1:kernel_w - col_reshaped[w, h, d, kidxs..., c] = T(0) - end - continue - end - - # If this `w` is off the edge it and only it gets cleared out - if input_kw <= 0 || input_kw > width + out_of_bounds = ( + input_kd <= 0 || input_kd > depth || + input_kh <= 0 || input_kh > height || + input_kw <= 0 || input_kw > width + ) + if out_of_bounds col_reshaped[w, h, d, kidxs..., c] = T(0) continue end diff --git a/src/impl/pooling_direct.jl b/src/impl/pooling_direct.jl index f95ab32f5..d74768937 100644 --- a/src/impl/pooling_direct.jl +++ b/src/impl/pooling_direct.jl @@ -6,6 +6,7 @@ for name in (:max, :mean) @eval function $((Symbol("$(name)pool_direct!")))( y::AbstractArray{T,5}, x::AbstractArray{T,5}, pdims::PoolDims; alpha::T = T(1), beta::T = T(0)) where {T} + @assert beta == T(0) "beta not supported yet" check_dims(size(x), size(y), pdims) width, height, depth = input_size(pdims) @@ -175,8 +176,10 @@ for name in (:max, :mean) if $(name == :max) # If it's equal; this is the one we chose. We only choose one per # kernel window, all other elements of dx must be zero. - if y_idx == x[x_idxs...] && !maxpool_already_chose - dx[x_idxs...] = dy_idx*alpha + beta*dx[x_idxs...] + # Uncomment line below if using with non-precise output (e.g. by NNPACK) + # if abs(y_idx - x[x_idxs...]) < 1e-5 && !maxpool_already_chose + if y_idx ≈ x[x_idxs...] && !maxpool_already_chose + dx[x_idxs...] += dy_idx*alpha + beta*dx[x_idxs...] maxpool_already_chose = true # Maxpooling does not support `beta` right now. :( #else @@ -227,8 +230,10 @@ for name in (:max, :mean) # Same as above x_idxs = (input_kw, input_kh, input_kd, c, batch_idx) if $(name == :max) - if y_idx == x[x_idxs...] && !maxpool_already_chose - dx[x_idxs...] = dy_idx*alpha + beta*dx[x_idxs...] + # Uncomment line below if using with non-precise output + # if abs(y_idx - x[x_idxs...]) < 1e-5 && !maxpool_already_chose + if y_idx ≈ x[x_idxs...] && !maxpool_already_chose + dx[x_idxs...] += dy_idx*alpha + beta*dx[x_idxs...] maxpool_already_chose = true #else # dx[x_idxs...] = T(0) + beta*dx[x_idxs...] diff --git a/src/nnpack/NNPACK.jl b/src/nnpack/NNPACK.jl index 1e420a9cc..3527296f1 100644 --- a/src/nnpack/NNPACK.jl +++ b/src/nnpack/NNPACK.jl @@ -4,10 +4,6 @@ include("libnnpack.jl") include("performance.jl") include("interface.jl") -const depsjl_path = joinpath(dirname(@__FILE__), "..", "..", "deps", "deps.jl") -if !isfile(depsjl_path) - error("NNPACK not installed properly, run Pkg.build(\"NNlib\"), restart Julia and try again") -end const shared_threadpool_dict = Dict{UInt64, Base.RefValue}() @@ -17,7 +13,6 @@ const shared_threadpool_dict = Dict{UInt64, Base.RefValue}() Checks if the current hardware is supported by NNPACK. """ function is_nnpack_available() - check_deps() isa Nothing || return false status = nnp_initialize() if status == nnp_status_unsupported_hardware return false @@ -34,15 +29,14 @@ Allows NNPACK to intelligently choose which threadpool to use for getting the be performance. """ function allocate_threadpool() - global NNPACK_CPU_THREADS = NNPACK_CPU_THREADS > 8 ? UInt64(8) : floor(log2(NNPACK_CPU_THREADS)) - for i in 1:Int(NNPACK_CPU_THREADS) + global NNPACK_CPU_THREADS = NNPACK_CPU_THREADS > 8 ? UInt64(8) : UInt64(exp2(floor(log2(NNPACK_CPU_THREADS)))) + for i in 0:Int(log2(NNPACK_CPU_THREADS)) threads = UInt64(2^i) push!(shared_threadpool_dict, threads => Ref(pthreadpool_create(threads))) end end @init begin - check_deps() status = nnp_initialize() if status == nnp_status_unsupported_hardware @warn "Hardware is unsupported by NNPACK so falling back to default NNlib" diff --git a/src/nnpack/impl.jl b/src/nnpack/impl.jl index 5d3086583..3309404e1 100644 --- a/src/nnpack/impl.jl +++ b/src/nnpack/impl.jl @@ -23,7 +23,7 @@ end function ∇conv_data_nnpack!(dx::A, dy::A, w::A, cdims::ConvDims; algo = UInt32(0)) where{A<:Array{Float32, 4}} check_dims(size(dx), size(w), size(dy), cdims) - threadpool = select_threadpool(cdims, size(y, 4)) + threadpool = select_threadpool(cdims, size(dy, 4)) if flipkernel(cdims) == 0 w = flipweight(w) @@ -36,7 +36,7 @@ end function ∇conv_filter_nnpack!(dw::A, x::A, dy::A, cdims::ConvDims; algo = UInt32(0)) where{A<:Array{Float32, 4}} check_dims(size(x), size(dw), size(dy), cdims) - threadpool = select_threadpool(cdims, size(y, 4)) + threadpool = select_threadpool(cdims, size(dy, 4)) nnp_convolution_kernel_gradient(dw, x, dy, algo = algo, padding = padding(cdims), stride = stride(cdims), threadpool = threadpool) diff --git a/src/nnpack/interface.jl b/src/nnpack/interface.jl index 25ab93632..5cdaccb4d 100644 --- a/src/nnpack/interface.jl +++ b/src/nnpack/interface.jl @@ -1,6 +1,6 @@ include("impl.jl") - +## NNPACK supports only Float32 for (front_name, backend) in ( :conv => :_nnpack, :∇conv_data => :_nnpack, @@ -18,25 +18,6 @@ for (front_name, backend) in ( end end - -function conv_nnpack(x::Array{T1, 4}, w::Array{T2, 4}, cdims::ConvDims; kwargs...) where {T1, T2} - y = similar(x, output_size(cdims)..., channels_out(cdims), size(x, 4)) - return conv_nnpack!(y, x, w, cdims; kwargs...) -end - - -function ∇conv_data(dy::Array{T1, 4}, w::Array{T2, 4}, cdims::ConvDims; kwargs...) where {T1, T2} - dx = similar(dy, input_size(cdims)..., channels_in(cdims), size(dy, 4)) - return ∇conv_data!(dx, dy, w, cdims; kwargs...) -end - - -function ∇conv_filter(x::Array{T1, 4}, dy::Array{T2, 4}, cdims::ConvDims; kwargs...) where {T1, T2} - dw = similar(x, kernel_size(cdims)..., channels_in(cdims), channels_out(cdims)) - return ∇conv_filter!(dw, x, dy, cdims; kwargs...) -end - - function maxpool_nnpack!(y::Array{T1, 4}, x::Array{T2, 4}, pdims::PoolDims; kwargs...) where {T1, T2} @warn "Automatically converting input tensor to Float32. This will have performance implications" maxlog=1 @@ -44,13 +25,6 @@ function maxpool_nnpack!(y::Array{T1, 4}, x::Array{T2, 4}, pdims::PoolDims; T1.(maxpool_nnpack!(Float32.(y), Float32.(x), pdims; kwargs...)) end - -function maxpool_nnpack(x::Array{T, 4}, pdims::PoolDims; kwargs...) where {T} - y = similar(x, output_size(pdims)..., channels_out(pdims), size(x, 4)) - return maxpool_nnpack!(y, x, pdims; kwargs...) -end - - """ nnpack_supported_operation(cdims::ConvDims) nnpack_supported_operation(pdims::PoolDims) diff --git a/src/pooling.jl b/src/pooling.jl index 84dfc7cd0..c97df891d 100644 --- a/src/pooling.jl +++ b/src/pooling.jl @@ -103,7 +103,7 @@ end # Finally, let's generate auto-allocating versions of all our functions, for all backends: -for backend in (Symbol(), :_direct, :_im2col) +for backend in (Symbol(), :_direct, :_nnpack) # First make auto-allocating versions of the basic pooling calls: for name in (:maxpool, :meanpool) @eval begin @@ -128,14 +128,15 @@ for backend in (Symbol(), :_direct, :_im2col) end end - -# Use NNPACK if it is available and operation is supported -if is_nnpack_available() - function maxpool(x::Array{T, 4}, pdims::PoolDims{2, K, S, P, (1, 1)}; kwargs...) where {T, K, S, P} - func = nnpack_supported_operation(pdims) ? maxpool_nnpack : maxpool_direct - return func(x, pdims; kwargs...) - end -end +## Use NNPACK if it is available and operation is supported. +## The corresponding gradient is not available in NNPACK +## Commented out due to #210 +# if is_nnpack_available() +# function maxpool(x::Array{Float32, 4}, pdims::PoolDims{2, K, S, P, (1, 1)}; kwargs...) where {T, K, S, P} +# func = nnpack_supported_operation(pdims) ? maxpool_nnpack : maxpool_direct +# return func(x, pdims; kwargs...) +# end +# end expand(N, i::Tuple) = i expand(N, i::Integer) = ntuple(_ -> i, N) diff --git a/src/softmax.jl b/src/softmax.jl index 9aa12d63e..875fffa5b 100644 --- a/src/softmax.jl +++ b/src/softmax.jl @@ -3,8 +3,8 @@ export softmax, softmax!, ∇softmax, ∇softmax!, """ softmax(x; dims=1) - -[Softmax](https://en.wikipedia.org/wiki/Softmax_function) turns input array `x` + +[Softmax](https://en.wikipedia.org/wiki/Softmax_function) turns input array `x` into probability distributions that sum to 1 along the dimensions specified by `dims`. It is semantically equivalent to the following: @@ -13,7 +13,7 @@ It is semantically equivalent to the following: with additional manipulations enhancing numerical stability. For a matrix input `x` it will by default (`dims=1`) treat it as a batch of vectors, -with each column independent. Keyword `dims=2` will instead treat rows independently, +with each column independent. Keyword `dims=2` will instead treat rows independently, etc... ```julia-repl julia> softmax([1, 2, 3]) @@ -108,5 +108,9 @@ function logsoftmax!(out::AbstractVecOrMat, xs::AbstractVecOrMat) return out end +function ∇logsoftmax!(out::AbstractVecOrMat, Δ::AbstractVecOrMat, xs::AbstractVecOrMat) + out .= Δ .- sum(Δ, dims=1) .* softmax(xs, dims=1) +end + ∇logsoftmax(Δ, xs; dims=1) = Δ .- sum(Δ, dims=dims) .* softmax(xs, dims=dims) -∇logsoftmax!(Δ, xs) = ∇softmax!(Δ, Δ, xs) +∇logsoftmax!(Δ, xs) = ∇logsoftmax!(Δ, Δ, xs) diff --git a/test/activation.jl b/test/activation.jl index 4e546a58f..70558fc62 100644 --- a/test/activation.jl +++ b/test/activation.jl @@ -68,8 +68,8 @@ end @test rrelu(1.0) == 1.0 @test elu(1.0) == 1.0 @test gelu(1.0) == 0.8411919906082768 - @test swish(1.0) == 1.0 / (1.0 + exp(-1.0)) - @test lisht(1.0) ≈ 1.0 * tanh(1.0) + @test swish(1.0) == σ(1.0) + @test lisht(1.0) ≈ 1.0 * tanh(1.0) @test softplus(1.0) ≈ log(exp(1.0) + 1.0) @test softsign(1.0) == 0.5 @test selu(1.0) == 1.0507009873554804934193349852946 @@ -80,7 +80,7 @@ end @test tanhshrink(1.0) ≈ 0.23840584404423515 @test softshrink(1.0) == 0.5 - @test σ(-1.0) == 1.0 / (1.0 + exp(1.0)) + @test σ(-1.0) == exp(-1.0) / (1.0 + exp(-1.0)) @test hardσ(-1.0) == max(0,min(1,0.2*-1.0 + 0.5)) @test hardtanh(-1.0) == -1.0 @test relu(-1.0) == 0.0 @@ -89,7 +89,7 @@ end @test -1/3.0 <= rrelu(-1.0) <= -1/8.0 @test elu(-1.0) == exp(-1.0) - 1.0 @test gelu(-1.0) == -0.15880800939172324 - @test swish(-1.0) == -1.0 / (1.0 + exp(1.0)) + @test swish(-1.0) == -σ(-1.0) @test lisht(-1.0) ≈ -1.0 * tanh(-1.0) @test softplus(-1.0) ≈ log(exp(-1.0) + 1.0) @test softsign(-1.0) == -0.5 @@ -126,20 +126,20 @@ end @test typeof(relu6(Int64(1))) == Int64 @test typeof(relu6(Int32(1))) == Int32 end - + @testset "hardtanh: " begin # hardtanh doesn't have to force floating point outputs @test typeof(hardtanh(Int64(1))) == Int64 @test typeof(hardtanh(Int32(1))) == Int32 end - + @testset "trelu: " begin # trelu doesn't have to force floating point outputs @test typeof(trelu(Int64(1))) == Int64 @test typeof(trelu(Int32(1))) == Int32 end end - + @testset "Float gradient inference" begin test_gradient_float_precision_preserving.(ACTIVATION_FUNCTIONS) end @@ -201,7 +201,7 @@ end @test leakyrelu(-0.4,0.3) ≈ -0.12 @test relu6(10.0) == 6.0 - @test -0.2 <= rrelu(-0.4,0.25,0.5) <= -0.1 + @test -0.2 <= rrelu(-0.4,0.25,0.5) <= -0.1 @testset "celu" begin @test celu(42) == 42 @@ -225,7 +225,7 @@ end end @test logcosh(1_000.0) + log(2) == 1_000.0 - + @testset "hardsigmoid" begin @test hardsigmoid(0.3) == 0.56 @test hardsigmoid(-0.3) == 0.44 @@ -234,14 +234,77 @@ end @eval @test hardsigmoid.($T[-100_000, 100_000.]) ≈ $T[0., 1.] end end - + @test hardtanh(10.0) == 1.0 @test lisht(2.5) == 2.5*tanh(2.5) - + @testset "trelu" begin @test trelu(0.5) == 0.0 @test trelu(1.0) == 0.0 @test trelu(1.1) == 1.1 @test trelu(0.9,0.5) == 0.9 end + + @testset "mutating softmax" begin + xs = Float64[1 2 3; 5 6 7] + + out = zeros(Float64, size(xs)) + NNlib.softmax!(out, xs) + @test isapprox(out, softmax(xs); rtol=1e-6) + NNlib.logsoftmax!(out, xs) + @test isapprox(out, logsoftmax(xs); rtol=1e-6) + + out = ones(Float64, size(xs)) + NNlib.softmax!(out, xs) + @test isapprox(out, softmax(xs); rtol=1e-6) + NNlib.logsoftmax!(out, xs) + @test isapprox(out, logsoftmax(xs); rtol=1e-6) + + out = zeros(Float64, size(xs)) + NNlib.∇softmax!(out, xs) + @test isapprox(out, NNlib.∇softmax(zeros(size(xs)), xs); rtol=1e-6) + out = zeros(Float64, size(xs)) + NNlib.∇logsoftmax!(out, xs) + @test isapprox(out, NNlib.∇logsoftmax(zeros(size(xs)), xs); rtol=1e-6) + + out = ones(Float64, size(xs)) + NNlib.∇softmax!(out, xs) + @test isapprox(out, NNlib.∇softmax(ones(size(xs)), xs); rtol=1e-6) + out = ones(Float64, size(xs)) + NNlib.∇logsoftmax!(out, xs) + @test isapprox(out, NNlib.∇logsoftmax(ones(size(xs)), xs); rtol=1e-6) + + xs = [ + -0.238639 0.748142 -0.283194 -0.525461 -1.5348 -0.797842; + 0.690384 0.211427 0.254794 -0.213572 -0.314174 -0.372663; + -1.146370 -0.577988 0.718952 0.919720 -0.620773 0.929977 + ] + + out = zeros(Float64, size(xs)) + NNlib.softmax!(out, xs) + @test isapprox(out, softmax(xs); rtol=1e-6) + NNlib.logsoftmax!(out, xs) + @test isapprox(out, logsoftmax(xs); rtol=1e-6) + + out = ones(Float64, size(xs)) + NNlib.softmax!(out, xs) + @test isapprox(out, softmax(xs); rtol=1e-6) + NNlib.logsoftmax!(out, xs) + @test isapprox(out, logsoftmax(xs); rtol=1e-6) + + out = zeros(Float64, size(xs)) + NNlib.∇softmax!(out, xs) + @test isapprox(out, NNlib.∇softmax(zeros(size(xs)), xs); rtol=1e-6) + out = zeros(Float64, size(xs)) + NNlib.∇logsoftmax!(out, xs) + @test isapprox(out, NNlib.∇logsoftmax(zeros(size(xs)), xs); rtol=1e-6) + + out = ones(Float64, size(xs)) + NNlib.∇softmax!(out, xs) + @test isapprox(out, NNlib.∇softmax(ones(size(xs)), xs); rtol=1e-6) + out = ones(Float64, size(xs)) + NNlib.∇logsoftmax!(out, xs) + @test isapprox(out, NNlib.∇logsoftmax(ones(size(xs)), xs); rtol=1e-6) + end + end diff --git a/test/batchedmul.jl b/test/batchedmul.jl index 18cf16ae1..e08abb91b 100644 --- a/test/batchedmul.jl +++ b/test/batchedmul.jl @@ -143,3 +143,65 @@ end =# end + +@testset "BatchedAdjOrTrans interface * $TB" for TB in [Float64, Float32] + A = randn(7,5,3) + B = randn(TB, 5,7,3) + C = randn(7,6,3) + + function interface_tests(X, _X) + @test length(_X) == length(X) + @test size(_X) == (size(X, 2), size(X, 1), size(X, 3)) + @test axes(_X) == (axes(X, 2), axes(X, 1), axes(X, 3)) + # + @test getindex(_X, 2, 3, 3) == getindex(X, 3, 2, 3) + @test getindex(_X, 5, 4, 1) == getindex(X, 4, 5, 1) + # + setindex!(_X, 2.0, 2, 4, 1) + @test getindex(_X, 2, 4, 1) == 2.0 + setindex!(_X, 3.0, 1, 2, 2) + @test getindex(_X, 1, 2, 2) == 3.0 + + _sim = similar(_X, TB, (2, 3)) + @test size(_sim) == (2, 3) + @test typeof(_sim) == Array{TB, 2} + + _sim = similar(_X, TB) + @test length(_sim) == length(_X) + @test typeof(_sim) == Array{TB, 3} + + _sim = similar(_X, (2, 3)) + @test size(_sim) == (2, 3) + @test typeof(_sim) == Array{Float64, 2} + + _sim = similar(_X) + @test length(_sim) == length(_X) + @test typeof(_sim) == Array{Float64, 3} + + @test parent(_X) == _X.parent + end + + for (X, _X) in zip([A, B, C], map(batched_adjoint, [A, B, C])) + interface_tests(X, _X) + + @test -_X == NNlib.BatchedAdjoint(-_X.parent) + + _copyX = copy(_X) + @test _X == _copyX + + setindex!(_copyX, 2.0, 1, 2, 1) + @test _X != _copyX + end + + for (X, _X) in zip([A, B, C], map(batched_transpose, [A, B, C])) + interface_tests(X, _X) + + @test -_X == NNlib.BatchedTranspose(-_X.parent) + + _copyX = copy(_X) + @test _X == _copyX + + setindex!(_copyX, 2.0, 1, 2, 1) + @test _X != _copyX + end +end diff --git a/test/conv.jl b/test/conv.jl index 63fc4fc71..cd198db71 100644 --- a/test/conv.jl +++ b/test/conv.jl @@ -358,7 +358,7 @@ conv_answer_dict = Dict( end end -if get(ENV,"NNLIB_TEST_FUZZING","false") == "true" +if get(ENV, "NNLIB_TEST_FUZZING", "true") == "true" # @info("Skipping Convolutional fuzzing tests, set NNLIB_TEST_FUZZING=true to run them") @testset "fuzzing" begin @info("Starting Convolutional fuzzing tests; this can take a few minutes...") @@ -559,7 +559,7 @@ end end -if get(ENV,"NNLIB_TEST_FUZZING","false") == "true" +if get(ENV,"NNLIB_TEST_FUZZING","true") == "true" @testset "fuzzing" begin @info("Starting Depthwise Convolutional fuzzing tests; this can take a few minutes...") # Now that we're fairly certain things are working, let's fuzz things a little bit: diff --git a/test/inference.jl b/test/inference.jl index 39b5108ce..5d695ae24 100644 --- a/test/inference.jl +++ b/test/inference.jl @@ -1,13 +1,18 @@ import NNlib: conv_direct, conv_im2col @testset "Conv Inference" begin - x = rand(10, 10, 3, 2) - w = rand(3, 3, 3, 1) + + for T in (Float32, Float64) + impl = [conv, conv_direct, conv_im2col] + if NNlib.is_nnpack_available() && T == Float32 + push!(impl, NNlib.conv_nnpack) + end - impl = [conv, conv_direct, conv_im2col] - NNlib.is_nnpack_available() && push!(impl, NNlib.conv_nnpack) - - for T in impl - @test T(x, w, DenseConvDims(x, w)) isa AbstractArray{K,4} where K + x = rand(T, 10, 10, 3, 2) + w = rand(T, 3, 3, 3, 1) + + for f in impl + @test @inferred(f(x, w, DenseConvDims(x, w))) isa Array{T,4} + end end end diff --git a/test/perf/Manifest.toml b/test/perf/Manifest.toml index af3330b39..ad38057a1 100644 --- a/test/perf/Manifest.toml +++ b/test/perf/Manifest.toml @@ -1,73 +1,79 @@ # This file is machine-generated - editing it directly is not advised [[AbstractFFTs]] -deps = ["Compat", "LinearAlgebra"] -git-tree-sha1 = "8d59c3b1463b5e0ad05a3698167f85fac90e184d" +deps = ["LinearAlgebra"] +git-tree-sha1 = "051c95d6836228d120f5f4b984dd5aba1624f716" uuid = "621f4979-c628-5d54-868e-fcf4e3e8185c" -version = "0.3.2" +version = "0.5.0" [[Arpack]] -deps = ["BinaryProvider", "Libdl", "LinearAlgebra", "Random", "SparseArrays", "Test"] -git-tree-sha1 = "1ce1ce9984683f0b6a587d5bdbc688ecb480096f" +deps = ["Arpack_jll", "Libdl", "LinearAlgebra"] +git-tree-sha1 = "2ff92b71ba1747c5fdd541f8fc87736d82f40ec9" uuid = "7d9fca2a-8960-54d3-9f78-7d1dccf2cb97" -version = "0.3.0" +version = "0.4.0" + +[[Arpack_jll]] +deps = ["Libdl", "OpenBLAS_jll", "Pkg"] +git-tree-sha1 = "e214a9b9bd1b4e1b4f15b22c0994862b66af7ff7" +uuid = "68821587-b530-5797-8361-c406ea357684" +version = "3.5.0+3" + +[[ArrayInterface]] +deps = ["LinearAlgebra", "Requires", "SparseArrays"] +git-tree-sha1 = "649c08a5a3a513f4662673d3777fe6ccb4df9f5d" +uuid = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9" +version = "2.8.7" [[AxisAlgorithms]] -deps = ["Compat", "WoodburyMatrices"] -git-tree-sha1 = "99dabbe853e4f641ab21a676131f2cf9fb29937e" +deps = ["LinearAlgebra", "Random", "SparseArrays", "WoodburyMatrices"] +git-tree-sha1 = "a4d07a1c313392a77042855df46c5f534076fab9" uuid = "13072b0f-2c55-5437-9ae7-d433b7a33950" -version = "0.3.0" +version = "1.0.0" [[Base64]] uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" [[BenchmarkTools]] -deps = ["JSON", "Printf", "Statistics", "Test"] -git-tree-sha1 = "5d1dd8577643ba9014574cd40d9c028cd5e4b85a" +deps = ["JSON", "Logging", "Printf", "Statistics", "UUIDs"] +git-tree-sha1 = "9e62e66db34540a0c919d72172cc2f642ac71260" uuid = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf" -version = "0.4.2" - -[[BinDeps]] -deps = ["Compat", "Libdl", "SHA", "URIParser"] -git-tree-sha1 = "12093ca6cdd0ee547c39b1870e0c9c3f154d9ca9" -uuid = "9e28174c-4ba2-5203-b857-d8d62c4213ee" -version = "0.8.10" - -[[BinaryProvider]] -deps = ["Libdl", "Pkg", "SHA", "Test"] -git-tree-sha1 = "055eb2690182ebc31087859c3dd8598371d3ef9e" -uuid = "b99e7846-7c00-51b0-8f62-c81ae34c0232" -version = "0.5.3" +version = "0.5.0" -[[Calculus]] -deps = ["Compat"] -git-tree-sha1 = "f60954495a7afcee4136f78d1d60350abd37a409" -uuid = "49dc2e85-a5d0-5ad3-a950-438e2897f1b9" -version = "0.4.1" +[[Bzip2_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "3663bfffede2ef41358b6fc2e1d8a6d50b3c3904" +uuid = "6e34b625-4abd-537c-b88f-471c36dfa7a0" +version = "1.0.6+2" [[Clustering]] -deps = ["Dates", "Distances", "LinearAlgebra", "NearestNeighbors", "Printf", "Random", "SparseArrays", "Statistics", "StatsBase", "Test"] -git-tree-sha1 = "c39b2cbf3ee27716f725e358bcb6952f3ac177b3" +deps = ["Distances", "LinearAlgebra", "NearestNeighbors", "Printf", "SparseArrays", "Statistics", "StatsBase"] +git-tree-sha1 = "7846d785d9e4bcc904b70689bde8413f85b0ca20" uuid = "aaaa29a8-35af-508c-8bc3-b662a17a0fe5" -version = "0.12.2" +version = "0.14.0" [[CodecZlib]] -deps = ["BinaryProvider", "Libdl", "Test", "TranscodingStreams"] -git-tree-sha1 = "36bbf5374c661054d41410dc53ff752972583b9b" +deps = ["TranscodingStreams", "Zlib_jll"] +git-tree-sha1 = "ded953804d019afa9a3f98981d99b33e3db7b6da" uuid = "944b1d66-785c-5afd-91f1-9de20f533193" -version = "0.5.2" +version = "0.7.0" + +[[ColorSchemes]] +deps = ["ColorTypes", "Colors", "FixedPointNumbers", "Random", "StaticArrays"] +git-tree-sha1 = "7a15e3690529fd1042f0ab954dff7445b1efc8a5" +uuid = "35d6a980-a343-548e-a6ea-1d62b119f2f4" +version = "3.9.0" [[ColorTypes]] -deps = ["FixedPointNumbers", "Random", "Test"] -git-tree-sha1 = "f73b0e10f2a5756de7019818a41654686da06b09" +deps = ["FixedPointNumbers", "Random"] +git-tree-sha1 = "c73d9cfc2a9d8433dc77f5bff4bddf46b1d78c20" uuid = "3da002f7-5984-5a60-b8a6-cbb66c0b333f" -version = "0.7.5" +version = "0.10.3" [[Colors]] -deps = ["ColorTypes", "FixedPointNumbers", "InteractiveUtils", "Printf", "Reexport", "Test"] -git-tree-sha1 = "9f0a0210450acb91c730b730a994f8eef1d3d543" +deps = ["ColorTypes", "FixedPointNumbers", "InteractiveUtils", "Reexport"] +git-tree-sha1 = "1e9bba7984e78aa8cdeea7f9f7cc984ad4e4b1c7" uuid = "5ae59095-9a9b-59fe-a467-6f913c188581" -version = "0.9.5" +version = "0.12.2" [[CommonSubexpressions]] deps = ["Test"] @@ -76,40 +82,44 @@ uuid = "bbf7d656-a473-5ed7-a52c-81e309532950" version = "0.2.0" [[Compat]] -deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"] -git-tree-sha1 = "49269e311ffe11ac5b334681d212329002a9832a" +deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "SHA", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"] +git-tree-sha1 = "054993b6611376ddb40203e973e954fd9d1d1902" uuid = "34da2185-b29b-5c13-b0c7-acf172513d20" -version = "1.5.1" +version = "3.12.0" -[[Conda]] -deps = ["Compat", "JSON", "VersionParsing"] -git-tree-sha1 = "b625d802587c2150c279a40a646fba63f9bd8187" -uuid = "8f4d0f93-b110-5947-807f-2305c1781a2d" -version = "1.2.0" +[[CompilerSupportLibraries_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "7c4f882c41faa72118841185afc58a2eb00ef612" +uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae" +version = "0.3.3+0" [[Contour]] -deps = ["LinearAlgebra", "StaticArrays", "Test"] -git-tree-sha1 = "b974e164358fea753ef853ce7bad97afec15bb80" +deps = ["StaticArrays"] +git-tree-sha1 = "0b17db36e7e03f8437e0d1f55aea3e4a60c74353" uuid = "d38c429a-6771-53c6-b99e-75d170b6e991" -version = "0.5.1" +version = "0.5.3" -[[Crayons]] -deps = ["Test"] -git-tree-sha1 = "3017c662a988bcb8a3f43306a793617c6524d476" -uuid = "a8cc5b0e-0ffa-5ad4-8c14-923d3ee1735f" -version = "1.0.0" +[[DataAPI]] +git-tree-sha1 = "176e23402d80e7743fc26c19c681bfb11246af32" +uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a" +version = "1.3.0" [[DataStructures]] -deps = ["InteractiveUtils", "OrderedCollections", "Random", "Serialization", "Test"] -git-tree-sha1 = "ca971f03e146cf144a9e2f2ce59674f5bf0e8038" +deps = ["InteractiveUtils", "OrderedCollections"] +git-tree-sha1 = "be680f1ad03c0a03796aa3fda5a2180df7f83b46" uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" -version = "0.15.0" +version = "0.17.18" + +[[DataValueInterfaces]] +git-tree-sha1 = "bfc1187b79289637fa0ef6d4436ebdfe6905cbd6" +uuid = "e2d170a0-9d28-54be-80f0-106bbe20a464" +version = "1.0.0" [[DataValues]] -deps = ["Dates", "InteractiveUtils", "LinearAlgebra", "Random", "Test"] -git-tree-sha1 = "05e4a87fe52a2af1b4a1ffd3ab2fc996c038b192" +deps = ["DataValueInterfaces", "Dates"] +git-tree-sha1 = "d88a19299eba280a6d062e135a43f00323ae70bf" uuid = "e7dc6d0d-1eca-5fa6-8ad6-5aecde8b7ea5" -version = "0.4.7" +version = "0.4.13" [[Dates]] deps = ["Printf"] @@ -119,103 +129,155 @@ uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" deps = ["Mmap"] uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab" -[[DiffEqDiffTools]] -deps = ["LinearAlgebra", "Test"] -git-tree-sha1 = "4b21dd83c341412a0607334ac64bb5593a4bd583" -uuid = "01453d9d-ee7c-5054-8395-0335cb756afa" -version = "0.8.0" - [[DiffResults]] -deps = ["Compat", "StaticArrays"] -git-tree-sha1 = "34a4a1e8be7bc99bc9c611b895b5baf37a80584c" +deps = ["StaticArrays"] +git-tree-sha1 = "da24935df8e0c6cf28de340b958f6aac88eaa0cc" uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5" -version = "0.0.4" +version = "1.0.2" [[DiffRules]] -deps = ["Random", "Test"] -git-tree-sha1 = "dc0869fb2f5b23466b32ea799bd82c76480167f7" +deps = ["NaNMath", "Random", "SpecialFunctions"] +git-tree-sha1 = "eb0c34204c8410888844ada5359ac8b96292cfd1" uuid = "b552c78f-8df3-52c6-915a-8e097449b14b" -version = "0.0.10" +version = "1.0.1" [[Distances]] -deps = ["LinearAlgebra", "Printf", "Random", "Statistics", "Test"] -git-tree-sha1 = "a135c7c062023051953141da8437ed74f89d767a" +deps = ["LinearAlgebra", "Statistics"] +git-tree-sha1 = "23717536c81b63e250f682b0e0933769eecd1411" uuid = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" -version = "0.8.0" +version = "0.8.2" [[Distributed]] deps = ["Random", "Serialization", "Sockets"] uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" [[Distributions]] -deps = ["Distributed", "LinearAlgebra", "PDMats", "Printf", "QuadGK", "Random", "SpecialFunctions", "Statistics", "StatsBase", "StatsFuns", "Test"] -git-tree-sha1 = "c24e9b6500c037673f0241a2783472b8c3d080c7" +deps = ["FillArrays", "LinearAlgebra", "PDMats", "Printf", "QuadGK", "Random", "SpecialFunctions", "Statistics", "StatsBase", "StatsFuns"] +git-tree-sha1 = "78c4c32a2357a00a0a7d614880f02c2c6e1ec73c" uuid = "31c24e10-a181-5473-b8eb-7969acd0382f" -version = "0.16.4" +version = "0.23.4" + +[[FFMPEG]] +deps = ["FFMPEG_jll"] +git-tree-sha1 = "c82bef6fc01e30d500f588cd01d29bdd44f1924e" +uuid = "c87230d0-a227-11e9-1b43-d7ebe4e7570a" +version = "0.3.0" + +[[FFMPEG_jll]] +deps = ["Bzip2_jll", "FreeType2_jll", "FriBidi_jll", "LAME_jll", "LibVPX_jll", "Libdl", "Ogg_jll", "OpenSSL_jll", "Opus_jll", "Pkg", "Zlib_jll", "libass_jll", "libfdk_aac_jll", "libvorbis_jll", "x264_jll", "x265_jll"] +git-tree-sha1 = "0fa07f43e5609ea54848b82b4bb330b250e9645b" +uuid = "b22a6f82-2f65-5046-a5b2-351ab43fb4e5" +version = "4.1.0+3" [[FFTW]] -deps = ["AbstractFFTs", "BinaryProvider", "Compat", "Conda", "Libdl", "LinearAlgebra", "Reexport", "Test"] -git-tree-sha1 = "29cda58afbf62f35b1a094882ad6c745a47b2eaa" +deps = ["AbstractFFTs", "FFTW_jll", "IntelOpenMP_jll", "Libdl", "LinearAlgebra", "MKL_jll", "Reexport"] +git-tree-sha1 = "14536c95939aadcee44014728a459d2fe3ca9acf" uuid = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" -version = "0.2.4" +version = "1.2.2" + +[[FFTW_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "6c975cd606128d45d1df432fb812d6eb10fee00b" +uuid = "f5851436-0d7a-5f13-b9de-f02708fd171a" +version = "3.3.9+5" [[FileIO]] -deps = ["Pkg", "Random", "Test"] -git-tree-sha1 = "c94b0787956629036fb2b20fccde9e52b89d079a" +deps = ["Pkg"] +git-tree-sha1 = "202335fd24c2776493e198d6c66a6d910400a895" uuid = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549" -version = "1.0.5" +version = "1.3.0" + +[[FillArrays]] +deps = ["LinearAlgebra", "Random", "SparseArrays"] +git-tree-sha1 = "44f561e293987ffc84272cd3d2b14b0b93123d63" +uuid = "1a297f60-69ca-5386-bcde-b61e274b549b" +version = "0.8.10" + +[[FiniteDiff]] +deps = ["ArrayInterface", "LinearAlgebra", "Requires", "SparseArrays", "StaticArrays"] +git-tree-sha1 = "fec7c2cb45c27071ef487fa7cae4fcac7509aa10" +uuid = "6a86dc24-6348-571c-b903-95158fe2bd41" +version = "2.3.2" [[FixedPointNumbers]] -deps = ["Test"] -git-tree-sha1 = "b8045033701c3b10bf2324d7203404be7aef88ba" +git-tree-sha1 = "3ba9ea634d4c8b289d590403b4a06f8e227a6238" uuid = "53c48c17-4a7d-5ca2-90c5-79b7896eea93" -version = "0.5.3" +version = "0.8.0" [[ForwardDiff]] -deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "InteractiveUtils", "LinearAlgebra", "NaNMath", "Random", "SparseArrays", "SpecialFunctions", "StaticArrays", "Test"] -git-tree-sha1 = "4c4d727f1b7e0092134fabfab6396b8945c1ea5b" +deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "NaNMath", "Random", "SpecialFunctions", "StaticArrays"] +git-tree-sha1 = "869540e4367122fbffaace383a5bdc34d6e5e5ac" uuid = "f6369f11-7733-5829-9624-2563aa707210" -version = "0.10.3" +version = "0.10.10" + +[[FreeType2_jll]] +deps = ["Bzip2_jll", "Libdl", "Pkg", "Zlib_jll"] +git-tree-sha1 = "7d900f32a3788d4eacac2bfa3bf5c770179c8afd" +uuid = "d7e528f0-a631-5988-bf34-fe36492bcfd7" +version = "2.10.1+2" + +[[FriBidi_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "2f56bee16bd0151de7b6a1eeea2ced190a2ad8d4" +uuid = "559328eb-81f9-559d-9380-de523a88c83c" +version = "1.0.5+3" [[GR]] -deps = ["Base64", "DelimitedFiles", "LinearAlgebra", "Pkg", "Printf", "Random", "Serialization", "Sockets", "Test"] -git-tree-sha1 = "41bd911efffb56957b45366770eaaa443de3f782" +deps = ["Base64", "DelimitedFiles", "HTTP", "JSON", "LinearAlgebra", "Printf", "Random", "Serialization", "Sockets", "Test", "UUIDs"] +git-tree-sha1 = "247adbd2b33c0c4b42efa20d1e807acf6312145f" uuid = "28b8d3ca-fb5f-59d9-8090-bfdbd6d07a71" -version = "0.38.1" +version = "0.50.1" + +[[GeometryTypes]] +deps = ["ColorTypes", "FixedPointNumbers", "LinearAlgebra", "StaticArrays"] +git-tree-sha1 = "34bfa994967e893ab2f17b864eec221b3521ba4d" +uuid = "4d00f742-c7ba-57c2-abde-4428a4b178cb" +version = "0.8.3" + +[[HTTP]] +deps = ["Base64", "Dates", "IniFile", "MbedTLS", "Sockets"] +git-tree-sha1 = "ec87d5e2acbe1693789efbbe14f5ea7525758f71" +uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3" +version = "0.8.15" + +[[IniFile]] +deps = ["Test"] +git-tree-sha1 = "098e4d2c533924c921f9f9847274f2ad89e018b8" +uuid = "83e8ac13-25f8-5344-8a64-a9f2b223428f" +version = "0.5.0" + +[[IntelOpenMP_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "fb8e1c7a5594ba56f9011310790e03b5384998d6" +uuid = "1d5cc7b8-4909-519e-a0f8-d0f5ad9712d0" +version = "2018.0.3+0" [[InteractiveUtils]] deps = ["Markdown"] uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[Interpolations]] -deps = ["AxisAlgorithms", "LinearAlgebra", "OffsetArrays", "Random", "Ratios", "SharedArrays", "SparseArrays", "StaticArrays", "Test", "WoodburyMatrices"] -git-tree-sha1 = "e8d1c381b1dc5343e5b6d37265acbe1de493d512" +deps = ["AxisAlgorithms", "LinearAlgebra", "OffsetArrays", "Random", "Ratios", "SharedArrays", "SparseArrays", "StaticArrays", "WoodburyMatrices"] +git-tree-sha1 = "2b7d4e9be8b74f03115e64cf36ed2f48ae83d946" uuid = "a98d9a8b-a2ab-59e6-89dd-64a1c18fca59" -version = "0.11.2" - -[[IterableTables]] -deps = ["DataValues", "IteratorInterfaceExtensions", "Requires", "TableTraits", "TableTraitsUtils", "Test"] -git-tree-sha1 = "0eec91e8185899f3926f56db515559bfe95b9db7" -uuid = "1c8ee90f-4401-5389-894e-7a04a3dc0f4d" -version = "0.10.0" +version = "0.12.10" [[IteratorInterfaceExtensions]] -deps = ["Test"] -git-tree-sha1 = "5484e5ede2a4137b9643f4d646e8e7b87b794415" +git-tree-sha1 = "a3f24677c21f5bbe9d2a714f95dcd58337fb2856" uuid = "82899510-4779-5014-852e-03e436cf321d" -version = "0.1.1" +version = "1.0.0" [[JLD2]] -deps = ["CodecZlib", "DataStructures", "FileIO", "LinearAlgebra", "Mmap", "Printf", "Random", "Test"] -git-tree-sha1 = "3ba90ff93e1d5b9b2103588051c2d349fae54dac" +deps = ["CodecZlib", "DataStructures", "FileIO", "Mmap", "Pkg", "Printf", "UUIDs"] +git-tree-sha1 = "e03e697cf84c275ece9cbefd1eabaf49bf5e7254" uuid = "033835bb-8acc-5ee8-8aae-3f567f8a3819" -version = "0.1.2" +version = "0.1.13" [[JSON]] -deps = ["Dates", "Distributed", "Mmap", "Sockets", "Test", "Unicode"] -git-tree-sha1 = "1f7a25b53ec67f5e9422f1f551ee216503f4a0fa" +deps = ["Dates", "Mmap", "Parsers", "Unicode"] +git-tree-sha1 = "b34d7cef7b337321e97d22242c3c2b91f476748e" uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" -version = "0.20.0" +version = "0.21.0" [[KernelDensity]] deps = ["Distributions", "FFTW", "Interpolations", "Optim", "StatsBase", "Test"] @@ -223,9 +285,22 @@ git-tree-sha1 = "c1048817fe5711f699abc8fabd47b1ac6ba4db04" uuid = "5ab0869b-81aa-558d-bb23-cbf5423bbe9b" version = "0.5.1" +[[LAME_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "221cc8998b9060677448cbb6375f00032554c4fd" +uuid = "c1c5ebd0-6772-5130-a774-d5fcae4a789d" +version = "3.100.0+1" + [[LibGit2]] +deps = ["Printf"] uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" +[[LibVPX_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "e3549ca9bf35feb9d9d954f4c6a9032e92f46e7c" +uuid = "dd192d2f-8180-539f-9fb4-cc70b1dcf69a" +version = "1.8.1+1" + [[Libdl]] uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" @@ -242,122 +317,183 @@ uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" [[Logging]] uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" +[[MKL_jll]] +deps = ["IntelOpenMP_jll", "Libdl", "Pkg"] +git-tree-sha1 = "0ce9a7fa68c70cf83c49d05d2c04d91b47404b08" +uuid = "856f044c-d86e-5d09-b602-aeab76dc8ba7" +version = "2020.1.216+0" + [[Markdown]] deps = ["Base64"] uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" +[[MbedTLS]] +deps = ["Dates", "MbedTLS_jll", "Random", "Sockets"] +git-tree-sha1 = "426a6978b03a97ceb7ead77775a1da066343ec6e" +uuid = "739be429-bea8-5141-9913-cc70e7f3736d" +version = "1.0.2" + +[[MbedTLS_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "c83f5a1d038f034ad0549f9ee4d5fac3fb429e33" +uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" +version = "2.16.0+2" + [[Measures]] -deps = ["Test"] -git-tree-sha1 = "ddfd6d13e330beacdde2c80de27c1c671945e7d9" +git-tree-sha1 = "e498ddeee6f9fdb4551ce855a46f54dbd900245f" uuid = "442fdcdd-2543-5da2-b0f3-8c86c306513e" -version = "0.3.0" +version = "0.3.1" [[Missings]] -deps = ["Dates", "InteractiveUtils", "SparseArrays", "Test"] -git-tree-sha1 = "d1d2585677f2bd93a97cfeb8faa7a0de0f982042" +deps = ["DataAPI"] +git-tree-sha1 = "de0a5ce9e5289f27df672ffabef4d1e5861247d5" uuid = "e1d29d7a-bbdc-5cf2-9ac0-f12de2c33e28" -version = "0.4.0" +version = "0.4.3" [[Mmap]] uuid = "a63ad114-7e13-5084-954f-fe012c677804" +[[MultivariateStats]] +deps = ["Arpack", "LinearAlgebra", "SparseArrays", "Statistics", "StatsBase"] +git-tree-sha1 = "352fae519b447bf52e6de627b89f448bcd469e4e" +uuid = "6f286f6a-111f-5878-ab1e-185364afe411" +version = "0.7.0" + [[NLSolversBase]] -deps = ["Calculus", "DiffEqDiffTools", "DiffResults", "Distributed", "ForwardDiff", "LinearAlgebra", "Random", "SparseArrays", "Test"] -git-tree-sha1 = "0c6f0e7f2178f78239cfb75310359eed10f2cacb" +deps = ["DiffResults", "Distributed", "FiniteDiff", "ForwardDiff"] +git-tree-sha1 = "7c4e66c47848562003250f28b579c584e55becc0" uuid = "d41bc354-129a-5804-8e4c-c37616107c6c" -version = "7.3.1" +version = "7.6.1" + +[[NNPACK_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "c3d1a616362645754b18e12dbba96ec311b0867f" +uuid = "a6bfbf70-4841-5cb9-aa18-3a8ad3c413ee" +version = "2018.6.22+0" [[NNlib]] -deps = ["Libdl", "LinearAlgebra", "Requires", "Statistics", "Test", "TimerOutputs"] +deps = ["Libdl", "LinearAlgebra", "NNPACK_jll", "Pkg", "Requires", "Statistics"] path = "../.." uuid = "872c559c-99b0-510c-b3b7-b6c96a88d5cd" -version = "0.4.3+" +version = "0.6.7" [[NaNMath]] -deps = ["Compat"] -git-tree-sha1 = "ce3b85e484a5d4c71dd5316215069311135fa9f2" +git-tree-sha1 = "928b8ca9b2791081dc71a51c55347c27c618760f" uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" -version = "0.3.2" +version = "0.3.3" [[NearestNeighbors]] -deps = ["Distances", "LinearAlgebra", "Mmap", "StaticArrays", "Test"] -git-tree-sha1 = "f47c5d97cf9a8caefa47e9fa9d99d8fda1a65154" +deps = ["Distances", "StaticArrays"] +git-tree-sha1 = "8bc6180f328f3c0ea2663935db880d34c57d6eae" uuid = "b8a86587-4115-5ab1-83bc-aa920d37bbce" -version = "0.4.3" +version = "0.4.4" [[Observables]] -deps = ["Test"] -git-tree-sha1 = "dc02cec22747d1d10d9f70d8a1c03432b5bfbcd0" +git-tree-sha1 = "11832878355305984235a2e90d0e3737383c634c" uuid = "510215fc-4207-5dde-b226-833fc4488ee2" -version = "0.2.3" +version = "0.3.1" [[OffsetArrays]] -deps = ["DelimitedFiles", "Test"] -git-tree-sha1 = "e6893807f09c1d5517861ded8b203cb96cb7d44a" +git-tree-sha1 = "930db8ef90483570107f2396b1ffc6680f08e8b7" uuid = "6fe1bfb0-de20-5000-8ca7-80f57d26f881" -version = "0.10.0" +version = "1.0.4" + +[[Ogg_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "59cf7a95bf5ac39feac80b796e0f39f9d69dc887" +uuid = "e7412a2a-1a6e-54c0-be00-318e2571c051" +version = "1.3.4+0" + +[[OpenBLAS_jll]] +deps = ["CompilerSupportLibraries_jll", "Libdl", "Pkg"] +git-tree-sha1 = "1887096f6897306a4662f7c5af936da7d5d1a062" +uuid = "4536629a-c528-5b80-bd46-f80d51c5b363" +version = "0.3.9+4" + +[[OpenSSL_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "d2a6f25262d568b5a7e454cf7ff5066a79d16c7d" +uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95" +version = "1.1.1+2" + +[[OpenSpecFun_jll]] +deps = ["CompilerSupportLibraries_jll", "Libdl", "Pkg"] +git-tree-sha1 = "d51c416559217d974a1113522d5919235ae67a87" +uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e" +version = "0.5.3+3" [[Optim]] -deps = ["Calculus", "DiffEqDiffTools", "ForwardDiff", "LineSearches", "LinearAlgebra", "NLSolversBase", "NaNMath", "Parameters", "PositiveFactorizations", "Printf", "Random", "SparseArrays", "StatsBase", "Test"] -git-tree-sha1 = "0f2a6c6ff9db396cc7af15bb1cf057a26662ff17" +deps = ["Compat", "FillArrays", "LineSearches", "LinearAlgebra", "NLSolversBase", "NaNMath", "Parameters", "PositiveFactorizations", "Printf", "SparseArrays", "StatsBase"] +git-tree-sha1 = "33af70b64e8ce2f2b857e3d5de7b71f67715c121" uuid = "429524aa-4258-5aef-a3af-852621145aeb" -version = "0.17.2" +version = "0.21.0" + +[[Opus_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "002c18f222a542907e16c83c64a1338992da7e2c" +uuid = "91d4177d-7536-5919-b921-800302f37372" +version = "1.3.1+1" [[OrderedCollections]] -deps = ["Random", "Serialization", "Test"] -git-tree-sha1 = "85619a3f3e17bb4761fe1b1fd47f0e979f964d5b" +git-tree-sha1 = "12ce190210d278e12644bcadf5b21cbdcf225cd3" uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" -version = "1.0.2" +version = "1.2.0" [[PDMats]] deps = ["Arpack", "LinearAlgebra", "SparseArrays", "SuiteSparse", "Test"] -git-tree-sha1 = "b6c91fc0ab970c0563cbbe69af18d741a49ce551" +git-tree-sha1 = "2fc6f50ddd959e462f0a2dbc802ddf2a539c6e35" uuid = "90014a1f-27ba-587c-ab20-58faa44d9150" -version = "0.9.6" +version = "0.9.12" [[Parameters]] -deps = ["Markdown", "OrderedCollections", "REPL", "Test"] -git-tree-sha1 = "70bdbfb2bceabb15345c0b54be4544813b3444e4" +deps = ["OrderedCollections", "UnPack"] +git-tree-sha1 = "38b2e970043613c187bd56a995fe2e551821eb4a" uuid = "d96e819e-fc66-5662-9728-84c9c7592b0a" -version = "0.10.3" +version = "0.12.1" + +[[Parsers]] +deps = ["Dates", "Test"] +git-tree-sha1 = "eb3e09940c0d7ae01b01d9291ebad7b081c844d3" +uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" +version = "1.0.5" [[Pkg]] -deps = ["Dates", "LibGit2", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"] +deps = ["Dates", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"] uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" [[PlotThemes]] -deps = ["PlotUtils", "Requires", "Test"] -git-tree-sha1 = "f3afd2d58e1f6ac9be2cea46e4a9083ccc1d990b" +deps = ["PlotUtils", "Requires", "Statistics"] +git-tree-sha1 = "c6f5ea535551b3b16835134697f0c65d06c94b91" uuid = "ccf2f8ad-2431-5c83-bf29-c5338b663b6a" -version = "0.3.0" +version = "2.0.0" [[PlotUtils]] -deps = ["Colors", "Dates", "Printf", "Random", "Reexport", "Test"] -git-tree-sha1 = "fd28f30a294a38ec847de95d8ac7ac916ccd7c06" +deps = ["ColorSchemes", "Colors", "Dates", "Printf", "Random", "Reexport", "Statistics"] +git-tree-sha1 = "e18e0e51ff07bf92bb7e06dcb9c082a4e125e20c" uuid = "995b91a9-d308-5afd-9ec6-746e21dbc043" -version = "0.5.5" +version = "1.0.5" [[Plots]] -deps = ["Base64", "Contour", "Dates", "FixedPointNumbers", "GR", "JSON", "LinearAlgebra", "Measures", "NaNMath", "Pkg", "PlotThemes", "PlotUtils", "Printf", "REPL", "Random", "RecipesBase", "Reexport", "Requires", "Showoff", "SparseArrays", "StaticArrays", "Statistics", "StatsBase", "Test", "UUIDs"] -git-tree-sha1 = "c68a9ec8a13a5bdcb85c311378a86b7d7b9b0792" +deps = ["Base64", "Contour", "Dates", "FFMPEG", "FixedPointNumbers", "GR", "GeometryTypes", "JSON", "LinearAlgebra", "Measures", "NaNMath", "PlotThemes", "PlotUtils", "Printf", "REPL", "Random", "RecipesBase", "RecipesPipeline", "Reexport", "Requires", "Showoff", "SparseArrays", "Statistics", "StatsBase", "UUIDs"] +git-tree-sha1 = "e61dab1d6e9c9888f39d7e8ad8e84b301d13518d" uuid = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" -version = "0.23.1" +version = "1.4.0" [[PositiveFactorizations]] deps = ["LinearAlgebra", "Test"] -git-tree-sha1 = "86ae7329c4b5c266acf5c7c524a972300d991e1c" +git-tree-sha1 = "127c47b91990c101ee3752291c4f45640eeb03d1" uuid = "85a6dd25-e78a-55b7-8502-1745935b8125" -version = "0.2.1" +version = "0.2.3" [[Printf]] deps = ["Unicode"] uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" [[QuadGK]] -deps = ["DataStructures", "LinearAlgebra", "Test"] -git-tree-sha1 = "3ce467a8e76c6030d4c3786e7d3a73442017cdc0" +deps = ["DataStructures", "LinearAlgebra"] +git-tree-sha1 = "dc84e810393cfc6294248c9032a9cdacc14a3db4" uuid = "1fd47b50-473d-5c70-9696-f719f8f3bcdc" -version = "2.0.3" +version = "2.3.1" [[REPL]] deps = ["InteractiveUtils", "Markdown", "Sockets"] @@ -368,16 +504,20 @@ deps = ["Serialization"] uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" [[Ratios]] -deps = ["Compat"] -git-tree-sha1 = "fd159bead0a24e6270fd0573a340312bd4645cc2" +git-tree-sha1 = "37d210f612d70f3f7d57d488cb3b6eff56ad4e41" uuid = "c84ed2f1-dad5-54f0-aa8e-dbefe2724439" -version = "0.3.0" +version = "0.4.0" [[RecipesBase]] -deps = ["Random", "Test"] -git-tree-sha1 = "0b3cb370ee4dc00f47f1193101600949f3dcf884" +git-tree-sha1 = "54f8ceb165a0f6d083f0d12cb4996f5367c6edbc" uuid = "3cdcf5f2-1ef4-517c-9805-6587b60abb01" -version = "0.6.0" +version = "1.0.1" + +[[RecipesPipeline]] +deps = ["Dates", "PlotUtils", "RecipesBase"] +git-tree-sha1 = "9477d23b9ded11153622d8619d0c20c4626a4ac8" +uuid = "01d81517-befc-4cb6-b9ec-a95719d0359c" +version = "0.1.10" [[Reexport]] deps = ["Pkg"] @@ -386,16 +526,22 @@ uuid = "189a3867-3050-52da-a836-e630ba90ab69" version = "0.2.0" [[Requires]] -deps = ["Test"] -git-tree-sha1 = "f6fbf4ba64d295e146e49e021207993b6b48c7d1" +deps = ["UUIDs"] +git-tree-sha1 = "d37400976e98018ee840e0ca4f9d20baa231dc6b" uuid = "ae029012-a4dd-5104-9daa-d747884805df" -version = "0.5.2" +version = "1.0.1" [[Rmath]] -deps = ["BinaryProvider", "Libdl", "Random", "Statistics", "Test"] -git-tree-sha1 = "9a6c758cdf73036c3239b0afbea790def1dabff9" +deps = ["Random", "Rmath_jll"] +git-tree-sha1 = "86c5647b565873641538d8f812c04e4c9dbeb370" uuid = "79098fc4-a85e-5d69-aa6a-4863f24498fa" -version = "0.5.0" +version = "0.6.1" + +[[Rmath_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "d76185aa1f421306dec73c057aa384bad74188f0" +uuid = "f50d1b31-88e8-58de-be2c-1cc44531875f" +version = "0.2.2+1" [[SHA]] uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" @@ -408,10 +554,10 @@ deps = ["Distributed", "Mmap", "Random", "Serialization"] uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383" [[Showoff]] -deps = ["Compat"] -git-tree-sha1 = "276b24f3ace98bec911be7ff2928d497dc759085" +deps = ["Dates"] +git-tree-sha1 = "e032c9df551fb23c9f98ae1064de074111b7bc39" uuid = "992d4aef-0814-514b-bc4d-f2e9a6c4116f" -version = "0.2.1" +version = "0.3.1" [[Sockets]] uuid = "6462fe0b-24de-5631-8697-dd941f90decc" @@ -427,98 +573,133 @@ deps = ["LinearAlgebra", "Random"] uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" [[SpecialFunctions]] -deps = ["BinDeps", "BinaryProvider", "Libdl", "Test"] -git-tree-sha1 = "0b45dc2e45ed77f445617b99ff2adf0f5b0f23ea" +deps = ["OpenSpecFun_jll"] +git-tree-sha1 = "d8d8b8a9f4119829410ecd706da4cc8594a1e020" uuid = "276daf66-3868-5448-9aa4-cd146d93841b" -version = "0.7.2" +version = "0.10.3" [[StaticArrays]] -deps = ["InteractiveUtils", "LinearAlgebra", "Random", "Statistics", "Test"] -git-tree-sha1 = "3841b39ed5f047db1162627bf5f80a9cd3e39ae2" +deps = ["LinearAlgebra", "Random", "Statistics"] +git-tree-sha1 = "5c06c0aeb81bef54aed4b3f446847905eb6cbda0" uuid = "90137ffa-7385-5640-81b9-e52037218182" -version = "0.10.3" +version = "0.12.3" [[Statistics]] deps = ["LinearAlgebra", "SparseArrays"] uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" [[StatsBase]] -deps = ["DataStructures", "DelimitedFiles", "LinearAlgebra", "Missings", "Printf", "Random", "SortingAlgorithms", "SparseArrays", "Statistics", "Test"] -git-tree-sha1 = "435707791dc85a67d98d671c1c3fcf1b20b00f94" +deps = ["DataAPI", "DataStructures", "LinearAlgebra", "Missings", "Printf", "Random", "SortingAlgorithms", "SparseArrays", "Statistics"] +git-tree-sha1 = "a6102b1f364befdb05746f386b67c6b7e3262c45" uuid = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" -version = "0.29.0" +version = "0.33.0" [[StatsFuns]] -deps = ["Rmath", "SpecialFunctions", "Test"] -git-tree-sha1 = "b3a4e86aa13c732b8a8c0ba0c3d3264f55e6bb3e" +deps = ["Rmath", "SpecialFunctions"] +git-tree-sha1 = "04a5a8e6ab87966b43f247920eab053fd5fdc925" uuid = "4c63d2b9-4356-54db-8cca-17b64c39e42c" -version = "0.8.0" +version = "0.9.5" [[StatsPlots]] -deps = ["Clustering", "DataStructures", "DataValues", "Distributions", "IterableTables", "KernelDensity", "Observables", "Plots", "RecipesBase", "Reexport", "StatsBase", "TableTraits", "TableTraitsUtils", "Test", "Widgets"] -git-tree-sha1 = "d722a2d4293ded61124654aae6696c68d7946a95" +deps = ["Clustering", "DataStructures", "DataValues", "Distributions", "Interpolations", "KernelDensity", "MultivariateStats", "Observables", "Plots", "RecipesBase", "RecipesPipeline", "Reexport", "StatsBase", "TableOperations", "Tables", "Widgets"] +git-tree-sha1 = "b9b7fff81f573465fcac4685df1497d968537a9e" uuid = "f3b207a7-027a-5e70-b257-86293d7955fd" -version = "0.10.2" +version = "0.14.6" [[SuiteSparse]] -deps = ["Libdl", "LinearAlgebra", "SparseArrays"] +deps = ["Libdl", "LinearAlgebra", "Serialization", "SparseArrays"] uuid = "4607b0f0-06f3-5cda-b6b1-a6196a1729e9" +[[TableOperations]] +deps = ["Tables", "Test"] +git-tree-sha1 = "208630a14884abd110a8f8008b0882f0d0f5632c" +uuid = "ab02a1b2-a7df-11e8-156e-fb1833f50b87" +version = "0.2.1" + [[TableTraits]] -deps = ["IteratorInterfaceExtensions", "Test"] -git-tree-sha1 = "eba4b1d0a82bdd773307d652c6e5f8c82104c676" +deps = ["IteratorInterfaceExtensions"] +git-tree-sha1 = "b1ad568ba658d8cbb3b892ed5380a6f3e781a81e" uuid = "3783bdb8-4a98-5b6b-af9a-565f29a5fe9c" -version = "0.4.1" +version = "1.0.0" -[[TableTraitsUtils]] -deps = ["DataValues", "IteratorInterfaceExtensions", "Missings", "TableTraits", "Test"] -git-tree-sha1 = "55133a5476b61ec31060e555ffe12da27ac13682" -uuid = "382cd787-c1b6-5bf2-a167-d5b971a19bda" -version = "0.4.0" +[[Tables]] +deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "LinearAlgebra", "TableTraits", "Test"] +git-tree-sha1 = "c45dcc27331febabc20d86cb3974ef095257dcf3" +uuid = "bd369af6-aec1-5ad0-b16a-f7cc5008161c" +version = "1.0.4" [[Test]] deps = ["Distributed", "InteractiveUtils", "Logging", "Random"] uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [[TimerOutputs]] -deps = ["Crayons", "Printf", "Test", "Unicode"] -git-tree-sha1 = "b80671c06f8f8bae08c55d67b5ce292c5ae2660c" +deps = ["Printf"] +git-tree-sha1 = "f458ca23ff80e46a630922c555d838303e4b9603" uuid = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f" -version = "0.5.0" +version = "0.5.6" [[TranscodingStreams]] -deps = ["Pkg", "Random", "Test"] -git-tree-sha1 = "8a032ceb5cf7a28bf1bdb77746b250b9e9fda565" +deps = ["Random", "Test"] +git-tree-sha1 = "7c53c35547de1c5b9d46a4797cf6d8253807108c" uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa" -version = "0.9.0" - -[[URIParser]] -deps = ["Test", "Unicode"] -git-tree-sha1 = "6ddf8244220dfda2f17539fa8c9de20d6c575b69" -uuid = "30578b45-9adc-5946-b283-645ec420af67" -version = "0.4.0" +version = "0.9.5" [[UUIDs]] deps = ["Random", "SHA"] uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" +[[UnPack]] +git-tree-sha1 = "d4bfa022cd30df012700cf380af2141961bb3bfb" +uuid = "3a884ed6-31ef-47d7-9d2a-63182c4928ed" +version = "1.0.1" + [[Unicode]] uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" -[[VersionParsing]] -deps = ["Compat"] -git-tree-sha1 = "c9d5aa108588b978bd859554660c8a5c4f2f7669" -uuid = "81def892-9a0e-5fdd-b105-ffc91e053289" -version = "1.1.3" - [[Widgets]] -deps = ["Colors", "Dates", "Observables", "OrderedCollections", "Test"] -git-tree-sha1 = "f48ee34d9495924aba50eeb328d83b0034b787f5" +deps = ["Colors", "Dates", "Observables", "OrderedCollections"] +git-tree-sha1 = "fc0feda91b3fef7fe6948ee09bb628f882b49ca4" uuid = "cc8bc4a8-27d6-5769-a93b-9d913e69aa62" -version = "0.5.0" +version = "0.6.2" [[WoodburyMatrices]] -deps = ["LinearAlgebra", "Random", "SparseArrays", "Test"] -git-tree-sha1 = "21772c33b447757ec7d3e61fcdfb9ea5c47eedcf" +deps = ["LinearAlgebra", "SparseArrays"] +git-tree-sha1 = "28ffe06d28b1ba8fdb2f36ec7bb079fac81bac0d" uuid = "efce3f68-66dc-5838-9240-27a6d6f5f9b6" -version = "0.4.1" +version = "0.5.2" + +[[Zlib_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "a2e0d558f6031002e380a90613b199e37a8565bf" +uuid = "83775a58-1f1d-513f-b197-d71354ab007a" +version = "1.2.11+10" + +[[libass_jll]] +deps = ["Bzip2_jll", "FreeType2_jll", "FriBidi_jll", "Libdl", "Pkg", "Zlib_jll"] +git-tree-sha1 = "027a304b2a90de84f690949a21f94e5ae0f92c73" +uuid = "0ac62f75-1d6f-5e53-bd7c-93b484bb37c0" +version = "0.14.0+2" + +[[libfdk_aac_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "480c7ed04f68ea3edd4c757f5db5b6a0a4e0bd99" +uuid = "f638f0a6-7fb0-5443-88ba-1cc74229b280" +version = "0.1.6+2" + +[[libvorbis_jll]] +deps = ["Libdl", "Ogg_jll", "Pkg"] +git-tree-sha1 = "6a66f65b5275dfa799036c8a3a26616a0a271c4a" +uuid = "f27f6e37-5d2b-51aa-960f-b287f2bc3b7a" +version = "1.3.6+4" + +[[x264_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "d89346fe63a6465a9f44e958ac0e3d366af90b74" +uuid = "1270edf5-f2f9-52d2-97e9-ab00b5d0237a" +version = "2019.5.25+2" + +[[x265_jll]] +deps = ["Libdl", "Pkg"] +git-tree-sha1 = "61324ad346b00a6e541896b94201c9426591e43a" +uuid = "dfaa095f-4041-5dcd-9319-2fabd8486b76" +version = "3.0.0+1" diff --git a/test/perf/perf_report.jl b/test/perf/perf_report.jl index 943d487f3..b1010f416 100644 --- a/test/perf/perf_report.jl +++ b/test/perf/perf_report.jl @@ -1,5 +1,7 @@ using JLD2, NNlib, BenchmarkTools +# TODO organize and compare benchmarks using BenchmarkGroups + # We need things to go quickly here BenchmarkTools.DEFAULT_PARAMETERS.samples = 20 BenchmarkTools.DEFAULT_PARAMETERS.seconds = 2.5 @@ -104,11 +106,13 @@ for rank in (2,), end if NNlib.is_nnpack_available() - t_fwd = @benchmark NNlib.maxpool_nnpack!($y, $x, $pdims) + if NNlib.nnpack_supported_operation(pdims) + t_fwd = @benchmark NNlib.maxpool_nnpack!($y, $x, $pdims) - add_result(t_fwd, "maxpool2d", "nnpack", pdims) + add_result(t_fwd, "maxpool2d", "nnpack", pdims) - @show(pdims) - @save "results.jld2" results + @show(pdims) + @save "results.jld2" results + end end end diff --git a/test/pooling.jl b/test/pooling.jl index 7135a3770..9b8458c79 100644 --- a/test/pooling.jl +++ b/test/pooling.jl @@ -316,3 +316,511 @@ end dx = ∇maxpool(dy, y, x, pdims) @test dx[:,:,1,1] == [1.0 0.0 1.0; 0.0 0.0 0.0; 1.0 0.0 1.0] end + +# test "true" strided case, see https://github.com/FluxML/NNlib.jl/issues/205 + + +# obtained with +# using FiniteDifferences +maxpool_answer_nature = Dict( + "rank1" => Dict( + "k2s1p0" => ( # kernel size 2, stride 1, pad 0 + size = (2,), + stride = 1, + pad = 0, + + x = reshape([ + 0.0299635, 0.233456, 0.596161, 0.161514, 0.0094027 + ], 5, 1, 1), # width, channel, batch_size + + dx_maxpool = reshape([ + 0.0, 1.0, 2.0, 1.0, 0.0 + ], 5, 1, 1), + + dx_meanpool = reshape([ + 0.5, 1.0, 1.0, 1.0, 0.5 + ], 5, 1, 1), + ), + "k2s1p1" => ( + size = (2,), + stride = 1, + pad = 1, + + x = reshape([ + 0.0299635, 0.233456, 0.596161, 0.161514, 0.0094027 + ], 5, 1, 1), + + dx_maxpool = reshape([ + 1.0, 1.0, 2.0, 1.0, 1.0 + ], 5, 1, 1), + + dx_meanpool = reshape([ + 1.0, 1.0, 1.0, 1.0, 1.0 + ], 5, 1, 1), + + ), + "k3s1p1" => ( + size = (3,), + stride = 1, + pad = 1, + + x = reshape([ + 0.0299635, 0.233456, 0.596161, 0.161514, 0.0094027 + ], 5, 1, 1), + + dx_maxpool = reshape([ + 0.0, 1.0, 3.0, 1.0, 0.0 + ], 5, 1, 1), + + dx_meanpool = reshape([ + 0.6666666666, 1.0, 1.0, 1.0, 0.6666666666 + ], 5, 1, 1), + + ), + "k3s2p1" => ( + size = (3,), + stride = 2, + pad = 1, + + x = reshape([ + 0.0299635, 0.233456, 0.596161, 0.161514, 0.0094027 + ], 5, 1, 1), + + dx_maxpool = reshape([ + 0.0, 1.0, 1.0, 1.0, 0.0 + ], 5, 1, 1), + + dx_meanpool = reshape([ + 0.333333333, + 0.666666666, + 0.333333333, + 0.666666666, + 0.333333333, + ], 5, 1, 1), + ) + ), + "rank2" => Dict( + "k2s1p0" => ( # kernel size 2, stride 1, pad 0 + size = (2,2), + stride = 1, + pad = 0, + + x = reshape([ + 0.0299635 0.233456 0.596161 0.161514 0.0094027 + 0.389984 0.235158 0.579525 0.301893 0.561358 + 0.0830242 0.483759 0.914904 0.253871 0.820061 + 0.425287 0.53451 0.0405225 0.729861 0.403925 + 0.473724 0.571418 0.558427 0.552183 0.561624 + ], 5, 5, 1, 1), + + dx_maxpool = reshape([ + 0.0 0.0 2.0 0.0 0.0 + 1.0 0.0 0.0 0.0 1.0 + 0.0 1.0 4.0 0.0 2.0 + 0.0 1.0 0.0 2.0 0.0 + 0.0 2.0 0.0 0.0 0.0 + ], 5, 5, 1, 1), + + dx_meanpool = reshape([ + 0.25 0.5 0.5 0.5 0.25 + 0.5 1.0 1.0 1.0 0.5 + 0.5 1.0 1.0 1.0 0.5 + 0.5 1.0 1.0 1.0 0.5 + 0.25 0.5 0.5 0.5 0.25 + ], 5, 5, 1, 1) + ), + "k2s1p1" => ( + size = (2,2), + stride = 1, + pad = 1, + + x = reshape([ + 0.0299635 0.233456 0.596161 0.161514 0.0094027 + 0.389984 0.235158 0.579525 0.301893 0.561358 + 0.0830242 0.483759 0.914904 0.253871 0.820061 + 0.425287 0.53451 0.0405225 0.729861 0.403925 + 0.473724 0.571418 0.558427 0.552183 0.561624 + ], 5, 5, 1, 1), + + dx_maxpool = reshape([ + 1.0 1.0 4.0 1.0 1.0 + 3.0 0.0 0.0 0.0 2.0 + 0.0 1.0 4.0 0.0 4.0 + 1.0 1.0 0.0 2.0 0.0 + 2.0 4.0 1.0 0.0 3.0 + ], 5, 5, 1, 1), + + dx_meanpool = reshape([ + 1.0 1.0 1.0 1.0 1.0 + 1.0 1.0 1.0 1.0 1.0 + 1.0 1.0 1.0 1.0 1.0 + 1.0 1.0 1.0 1.0 1.0 + 1.0 1.0 1.0 1.0 1.0 + ], 5, 5, 1, 1) + ), + "k3s1p1" => ( + size = (3,3), + stride = 1, + pad = 1, + + x = reshape([ + 0.0299635 0.233456 0.596161 0.161514 0.0094027 + 0.389984 0.235158 0.579525 0.301893 0.561358 + 0.0830242 0.483759 0.914904 0.253871 0.820061 + 0.425287 0.53451 0.0405225 0.729861 0.403925 + 0.473724 0.571418 0.558427 0.552183 0.561624 + ], 5, 5, 1, 1), + + dx_maxpool = reshape([ + 0.0 0.0 3.0 0.0 0.0 + 1.0 0.0 0.0 0.0 1.0 + 0.0 1.0 9.0 0.0 3.0 + 0.0 1.0 0.0 3.0 0.0 + 0.0 3.0 0.0 0.0 0.0 + ], 5, 5, 1, 1), + + dx_meanpool = reshape([ + 0.444444 0.666667 0.666667 0.666667 0.444444 + 0.666667 1.0 1.0 1.0 0.666667 + 0.666667 1.0 1.0 1.0 0.666667 + 0.666667 1.0 1.0 1.0 0.666667 + 0.444444 0.666667 0.666667 0.666667 0.444444 + ], 5, 5, 1, 1) + ), + "k3s2p1" => ( + size = (3,3), + stride = 2, + pad = 1, + + x = reshape([ + 0.0299635 0.233456 0.596161 0.161514 0.0094027 + 0.389984 0.235158 0.579525 0.301893 0.561358 + 0.0830242 0.483759 0.914904 0.253871 0.820061 + 0.425287 0.53451 0.0405225 0.729861 0.403925 + 0.473724 0.571418 0.558427 0.552183 0.561624 + ], 5, 5, 1, 1), + + dx_maxpool = reshape([ + 0.0 0.0 1.0 0.0 0.0 + 1.0 0.0 0.0 0.0 1.0 + 0.0 0.0 1.0 0.0 1.0 + 0.0 1.0 0.0 2.0 0.0 + 0.0 1.0 0.0 0.0 0.0 + ], 5, 5, 1, 1), + + dx_meanpool = reshape([ + 0.111111 0.222222 0.111111 0.222222 0.111111 + 0.222222 0.444444 0.222222 0.444444 0.222222 + 0.111111 0.222222 0.111111 0.222222 0.111111 + 0.222222 0.444444 0.222222 0.444444 0.222222 + 0.111111 0.222222 0.111111 0.222222 0.111111 + ], 5, 5, 1, 1) + ) + ), + "rank3" => Dict( + "k2s1p0" => ( # kernel size 2, stride 1, pad 0 + size = (2,2,2), + stride = 1, + pad = 0, + + x = reshape(cat([ + 0.82584 0.416818 0.92668 0.471931 + 0.798798 0.131608 0.344556 0.79681 + 0.716898 0.320672 0.24453 0.288568 + 0.261484 0.258469 0.121916 0.0685961 + ], + [ + 0.73934 0.16631 0.525109 0.0223458 + 0.164918 0.790875 0.444085 0.469671 + 0.116848 0.359845 0.0653075 0.804886 + 0.525431 0.0402844 0.846814 0.84876 + ], + [ + 0.709245 0.325828 0.715952 0.719116 + 0.576722 0.405659 0.770104 0.259131 + 0.640221 0.28811 0.129229 0.97571 + 0.953795 0.1316 0.94538 0.705337 + ],dims=3), 4,4,3,1,1), + + dx_maxpool = reshape(cat([ + 1.0 0.0 2.0 0.0 + 1.0 0.0 0.0 0.0 + 1.0 0.0 0.0 0.0 + 0.0 0.0 0.0 0.0 + ], + [ + 0.0 0.0 0.0 0.0 + 0.0 5.0 0.0 0.0 + 0.0 0.0 0.0 1.0 + 0.0 0.0 1.0 1.0 + ], + [ + 0.0 0.0 0.0 0.0 + 0.0 0.0 1.0 0.0 + 0.0 0.0 0.0 2.0 + 1.0 0.0 1.0 0.0 + ],dims=3), 4,4,3,1,1), + + dx_meanpool = reshape(cat([ + 0.125 0.25 0.25 0.125 + 0.25 0.5 0.5 0.25 + 0.25 0.5 0.5 0.25 + 0.125 0.25 0.25 0.125 + ], + [ + 0.25 0.5 0.5 0.25 + 0.5 1.0 1.0 0.5 + 0.5 1.0 1.0 0.5 + 0.25 0.5 0.5 0.25 + ], + [ + 0.125 0.25 0.25 0.125 + 0.25 0.5 0.5 0.25 + 0.25 0.5 0.5 0.25 + 0.125 0.25 0.25 0.125 + ],dims=3), 4,4,3,1,1) + ), + "k2s1p1" => ( + size = (2,2,2), + stride = 1, + pad = 1, + + x = reshape(cat([ + 0.82584 0.416818 0.92668 0.471931 + 0.798798 0.131608 0.344556 0.79681 + 0.716898 0.320672 0.24453 0.288568 + 0.261484 0.258469 0.121916 0.0685961 + ], + [ + 0.73934 0.16631 0.525109 0.0223458 + 0.164918 0.790875 0.444085 0.469671 + 0.116848 0.359845 0.0653075 0.804886 + 0.525431 0.0402844 0.846814 0.84876 + ], + [ + 0.709245 0.325828 0.715952 0.719116 + 0.576722 0.405659 0.770104 0.259131 + 0.640221 0.28811 0.129229 0.97571 + 0.953795 0.1316 0.94538 0.705337 + ],dims=3), 4,4,3,1,1), + + dx_maxpool = reshape(cat([ + 8.0 0.0 8.0 2.0 + 4.0 0.0 1.0 4.0 + 4.0 1.0 0.0 2.0 + 2.0 1.0 1.0 1.0 + ], + [ + 3.0 0.0 0.0 0.0 + 0.0 5.0 0.0 0.0 + 0.0 0.0 0.0 2.0 + 2.0 0.0 2.0 5.0 + ], + [ + 4.0 0.0 2.0 6.0 + 0.0 0.0 4.0 0.0 + 3.0 0.0 0.0 8.0 + 8.0 0.0 6.0 1.0 + ],dims=3), 4,4,3,1,1), + + dx_meanpool = reshape(cat([ + 1.0 1.0 1.0 1.0 + 1.0 1.0 1.0 1.0 + 1.0 1.0 1.0 1.0 + 1.0 1.0 1.0 1.0 + ], + [ + 1.0 1.0 1.0 1.0 + 1.0 1.0 1.0 1.0 + 1.0 1.0 1.0 1.0 + 1.0 1.0 1.0 1.0 + ], + [ + 1.0 1.0 1.0 1.0 + 1.0 1.0 1.0 1.0 + 1.0 1.0 1.0 1.0 + 1.0 1.0 1.0 1.0 + ],dims=3), 4,4,3,1,1) + ), + "k3s1p1" => ( + size = (3,3,2), + stride = 1, + pad = 1, + + x = reshape(cat([ + 0.82584 0.416818 0.92668 0.471931 + 0.798798 0.131608 0.344556 0.79681 + 0.716898 0.320672 0.24453 0.288568 + 0.261484 0.258469 0.121916 0.0685961 + ], + [ + 0.73934 0.16631 0.525109 0.0223458 + 0.164918 0.790875 0.444085 0.469671 + 0.116848 0.359845 0.0653075 0.804886 + 0.525431 0.0402844 0.846814 0.84876 + ], + [ + 0.709245 0.325828 0.715952 0.719116 + 0.576722 0.405659 0.770104 0.259131 + 0.640221 0.28811 0.129229 0.97571 + 0.953795 0.1316 0.94538 0.705337 + ],dims=3), 4,4,3,1,1), + + dx_maxpool = reshape(cat([ + 4.0 0.0 12.0 0.0 + 3.0 0.0 0.0 2.0 + 3.0 1.0 0.0 1.0 + 0.0 0.0 0.0 0.0 + ], + [ + 0.0 0.0 0.0 0.0 + 0.0 5.0 0.0 0.0 + 0.0 0.0 0.0 0.0 + 0.0 0.0 2.0 4.0 + ], + [ + 2.0 0.0 0.0 0.0 + 0.0 0.0 5.0 0.0 + 0.0 0.0 0.0 12.0 + 8.0 0.0 0.0 0.0 + ],dims=3), 4,4,3,1,1), + + dx_meanpool = reshape(cat([ + 0.444444 0.666667 0.666667 0.444444 + 0.666667 1.0 1.0 0.666667 + 0.666667 1.0 1.0 0.666667 + 0.444444 0.666667 0.666667 0.444444 + ], + [ + 0.444444 0.666667 0.666667 0.444444 + 0.666667 1.0 1.0 0.666667 + 0.666667 1.0 1.0 0.666667 + 0.444444 0.666667 0.666667 0.444444 + ], + [ + 0.444444 0.666667 0.666667 0.444444 + 0.666667 1.0 1.0 0.666667 + 0.666667 1.0 1.0 0.666667 + 0.444444 0.666667 0.666667 0.444444 + ],dims=3), 4,4,3,1,1) + ), + "k3s2p1" => ( + size = (3,3,2), + stride = 2, + pad = 1, + + x = reshape(cat([ + 0.82584 0.416818 0.92668 0.471931 + 0.798798 0.131608 0.344556 0.79681 + 0.716898 0.320672 0.24453 0.288568 + 0.261484 0.258469 0.121916 0.0685961 + ], + [ + 0.73934 0.16631 0.525109 0.0223458 + 0.164918 0.790875 0.444085 0.469671 + 0.116848 0.359845 0.0653075 0.804886 + 0.525431 0.0402844 0.846814 0.84876 + ], + [ + 0.709245 0.325828 0.715952 0.719116 + 0.576722 0.405659 0.770104 0.259131 + 0.640221 0.28811 0.129229 0.97571 + 0.953795 0.1316 0.94538 0.705337 + ],dims=3), 4,4,3,1,1), + + dx_maxpool = reshape(cat([ + 1.0 0.0 1.0 0.0 + 1.0 0.0 0.0 1.0 + 0.0 0.0 0.0 0.0 + 0.0 0.0 0.0 0.0 + ], + [ + 0.0 0.0 0.0 0.0 + 0.0 2.0 0.0 0.0 + 0.0 0.0 0.0 0.0 + 0.0 0.0 0.0 0.0 + ], + [ + 0.0 0.0 0.0 0.0 + 0.0 0.0 0.0 0.0 + 0.0 0.0 0.0 1.0 + 1.0 0.0 0.0 0.0 + ],dims=3), 4,4,3,1,1), + + dx_meanpool = reshape(cat([ + 0.0555556 0.111111 0.0555556 0.0555556 + 0.111111 0.222222 0.111111 0.111111 + 0.0555556 0.111111 0.0555556 0.0555556 + 0.0555556 0.111111 0.0555556 0.0555556 + ], + [ + 0.0555556 0.111111 0.0555556 0.0555556 + 0.111111 0.222222 0.111111 0.111111 + 0.0555556 0.111111 0.0555556 0.0555556 + 0.0555556 0.111111 0.0555556 0.0555556 + ], + [ + 0.0555556 0.111111 0.0555556 0.0555556 + 0.111111 0.222222 0.111111 0.111111 + 0.0555556 0.111111 0.0555556 0.0555556 + 0.0555556 0.111111 0.0555556 0.0555556 + ],dims=3), 4,4,3,1,1) + ) + ) +) + + +@testset "more maxpool and meanpool tests" begin + # issue #205 + function check(config, T) + # CHECK DEFAULT + pdims = PoolDims(config.x, config.size; stride=config.stride, padding=config.pad) + x = T.(config.x) + y_maxpool = NNlib.maxpool(x, pdims) + y_meanpool = NNlib.meanpool(x, pdims) + dy = ones(T, size(y_maxpool)...) # size(y_maxpool) == size(y_meanpool) + @test isapprox(config.dx_maxpool, NNlib.∇maxpool(dy, y_maxpool, x, pdims), rtol=1e-5) + @test isapprox(config.dx_meanpool, NNlib.∇meanpool(dy, y_meanpool, x, pdims), rtol=1e-5) + # CHECK DIRECT + y_maxpool_dir = NNlib.maxpool_direct(x, pdims) + y_meanpool_dir = NNlib.meanpool_direct(x, pdims) + @test y_maxpool_dir ≈ y_maxpool atol=1e-6 + @test isapprox(config.dx_maxpool, NNlib.∇maxpool_direct(dy, y_maxpool_dir, x, pdims), rtol=1e-5) + @test isapprox(config.dx_meanpool, NNlib.∇meanpool_direct(dy, y_meanpool_dir, x, pdims), rtol=1e-5) + + # CHECK NNPACK + if NNlib.is_nnpack_available() && T == Float32 + if NNlib.nnpack_supported_operation(pdims) + y_maxpool_nnp = NNlib.maxpool_nnpack(x, pdims) + @test y_maxpool_nnp ≈ y_maxpool atol=1e-6 + # NNPACK maxpool gradient still missing + #@test isapprox(config.dx_maxpool, NNlib.∇maxpool_nnpack(dy, y_maxpool_nnp, config.x, pdims), rtol=1e-5) + end + end + end + + for (rank_name, config_dict) in maxpool_answer_nature + for (setting_name, config) in config_dict + for T in (Float32, Float64) + check(config, T) + end + end + end + + # issue 210 + x, k = rand(Float32,5,2,1,3), (2,1) + pdims1 = NNlib.PoolDims(x,k, padding=1,stride=1) + pdims2 = NNlib.PoolDims(x,k, padding=(1,0,0,0),stride=1) + @test maxpool(x, pdims1) isa Array{Float32, 4} + @test maxpool(x, pdims2) isa Array{Float32, 4} + # if NNlib.is_nnpack_available() + # if NNlib.nnpack_supported_operation(pdims1) + # @test NNlib.maxpool_nnpack(x, pdims1) isa Array{Float32, 4} + # end + # if NNlib.nnpack_supported_operation(pdims2) + # print("you should not see this") + # @test NNlib.maxpool_nnpack(x, pdims2) isa Array{Float32, 4} + # end + # end +end +