Skip to content

Commit

Permalink
Merge branch 'master' into fix2
Browse files Browse the repository at this point in the history
  • Loading branch information
mcabbott authored Jul 2, 2020
2 parents 732636c + be66282 commit 39bc6bd
Show file tree
Hide file tree
Showing 24 changed files with 1,299 additions and 554 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,4 @@
deps/usr
deps.jl
*.log
Manifest.toml
30 changes: 19 additions & 11 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,28 +4,36 @@ language: julia
os:
- linux
- osx
# - windows
# - windows

julia:
- 1.0
- 1.3
- 1
- nightly

matrix:
allow_failures:
- julia: nightly

notifications:
email: false

git:
depth: 99999999

env:
# Disable test fuzzing for the moment, as we're a little too slow for Travis
- NNLIB_TEST_FUZZING=false
- NNLIB_TEST_FUZZING=false JULIA_NUM_THREADS=0
- NNLIB_TEST_FUZZING=false JULIA_NUM_THREADS=2

# Submit to Codecov
after_success:
- julia -e 'using Pkg; Pkg.add("Coverage"); using Coverage; Codecov.submit(process_folder())';



jobs:
allow_failures:
- julia: nightly
# include: # no documentation yet
# - stage: "Documentation"
# julia: 1.3
# os: linux
# script:
# - julia --project=docs/ -e 'using Pkg; Pkg.develop(PackageSpec(path=pwd()));
# Pkg.instantiate()'
# - julia --project=docs/ docs/make.jl
# after_success: skip
42 changes: 0 additions & 42 deletions Manifest.toml

This file was deleted.

8 changes: 4 additions & 4 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
name = "NNlib"
uuid = "872c559c-99b0-510c-b3b7-b6c96a88d5cd"
version = "0.6.6"
version = "0.7.1"

[deps]
BinaryProvider = "b99e7846-7c00-51b0-8f62-c81ae34c0232"
Compat = "34da2185-b29b-5c13-b0c7-acf172513d20"
Libdl = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
NNPACK_jll = "a6bfbf70-4841-5cb9-aa18-3a8ad3c413ee"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
Requires = "ae029012-a4dd-5104-9daa-d747884805df"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"

[compat]
BinaryProvider = "0.5"
Compat = "3.13"
Requires = "0.5, 1.0"
julia = "1"
julia = "1.3"

[extras]
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Expand Down
10 changes: 6 additions & 4 deletions appveyor.yml
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
environment:
matrix:
- julia_version: 1
- julia_version: 1.3
- julia_version: nightly
NNLIB_TEST_FUZZING: "false"

platform:
- x86 # 32-bit
- x64 # 64-bit

# # Uncomment the following lines to allow failures on nightly julia
# # (tests will run but not make your overall status red)
# matrix:
# allow_failures:
# - julia_version: nightly
matrix:
allow_failures:
- julia_version: nightly

branches:
only:
Expand Down Expand Up @@ -41,5 +42,6 @@ test_script:
# - echo "%JL_CODECOV_SCRIPT%"
# - C:\julia\bin\julia -e "%JL_CODECOV_SCRIPT%"


after_test:
- C:\julia\bin\julia -e "using Pkg; Pkg.add(\"Coverage\"); using Coverage; Codecov.submit(process_folder())"
50 changes: 0 additions & 50 deletions deps/build.jl

This file was deleted.

11 changes: 8 additions & 3 deletions src/NNlib.jl
Original file line number Diff line number Diff line change
@@ -1,14 +1,19 @@
module NNlib
using Pkg
using Requires
using NNPACK_jll

# Include APIs
include("dim_helpers.jl")

# NNPACK support
include(joinpath(@__DIR__, "..", "deps", "deps.jl"))
if check_deps() == nothing

if isdefined(NNPACK_jll, :libnnpack)
include("nnpack/NNPACK.jl")
else
@warn "NNPACK not available for your platform: " *
"$( Pkg.BinaryPlatforms.platform_name(Pkg.BinaryPlatforms.platform_key_abi()))" *
"($( Pkg.BinaryPlatforms.triplet(Pkg.BinaryPlatforms.platform_key_abi())))
You will be able to use only the default Julia NNlib backend"
is_nnpack_available() = false
end

Expand Down
24 changes: 10 additions & 14 deletions src/activation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ export σ, sigmoid, hardσ, hardsigmoid, hardtanh, relu, leakyrelu, relu6, rrelu
logsigmoid, logcosh, mish, tanhshrink, softshrink, thresholdrelu, trelu, lisht

## Activation functions
#
#
# Some of activation functions have its wrapper function for GPU in CuArrays.jl.
# https://github.com/JuliaGPU/CuArrays.jl/issues/614

Expand All @@ -12,15 +12,11 @@ export σ, sigmoid, hardσ, hardsigmoid, hardtanh, relu, leakyrelu, relu6, rrelu
Classic [sigmoid](https://en.wikipedia.org/wiki/Sigmoid_function) activation
function.
"""
σ(x::Real) = one(x) / (one(x) + exp(-x))
const sigmoid = σ

# ForwardDiff numerical stability hack
σ_stable(x::Real) = ifelse(x < -80, zero(x), one(x) / (one(x) + exp(-x)))
σ(x::Float32) = σ_stable(x)
@init @require ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" begin
σ(x::ForwardDiff.Dual{T,Float32}) where T = σ_stable(x)
function σ(x::Real)
t = exp(-abs(x))
ifelse(x 0, inv(one(t) + t), t / (one(t) + t))
end
const sigmoid = σ

"""
hardσ(x, a=0.2) = max(0, min(1.0, a * x + 0.5))
Expand Down Expand Up @@ -159,17 +155,17 @@ function selu(x::Real)
end

"""
celu(x, α=1) =
celu(x, α=1) =
(x ≥ 0 ? x : α * (exp(x/α) - 1))
Continuously Differentiable Exponential Linear Units
See [Continuously Differentiable Exponential Linear Units](https://arxiv.org/pdf/1704.07483.pdf).
"""
celu(x::Real, α::Real = one(x)) = ifelse(x 0, x / one(x), α * (exp(x/α) - one(x)))
celu(x::Real, α::Real = one(x)) = ifelse(x 0, x / one(x), α * (exp(x/α) - one(x)))


"""
trelu(x, theta = 1.0) = x > theta ? x : 0
trelu(x, theta = 1.0) = x > theta ? x : 0
Threshold Gated Rectified Linear.
See [ThresholdRelu](https://arxiv.org/pdf/1402.3337.pdf)
Expand Down Expand Up @@ -218,15 +214,15 @@ See [Tanhshrink Activation Function](https://www.gabormelli.com/RKB/Tanhshrink_A
tanhshrink(x::Real) = x - tanh(x)

"""
softshrink(x, λ=0.5) =
softshrink(x, λ=0.5) =
(x ≥ λ ? x - λ : (-λ ≥ x ? x + λ : 0))
See [Softshrink Activation Function](https://www.gabormelli.com/RKB/Softshrink_Activation_Function).
"""
softshrink(x::Real, λ = oftype(x/1, 0.5)) = min(max(zero(x), x - λ), x + λ)

# Provide an informative error message if activation functions are called with an array
for f in (, :σ_stable, :hardσ, :logσ, :hardtanh, :relu, :leakyrelu, :relu6, :rrelu, :elu, :gelu, :swish, :lisht, :selu, :celu, :trelu, :softsign, :softplus, :logcosh, :mish, :tanhshrink, :softshrink)
for f in (, :hardσ, :logσ, :hardtanh, :relu, :leakyrelu, :relu6, :rrelu, :elu, :gelu, :swish, :lisht, :selu, :celu, :trelu, :softsign, :softplus, :logcosh, :mish, :tanhshrink, :softshrink)
@eval $(f)(x::AbstractArray, args...) =
error("Use broadcasting (`", $(string(f)), ".(x)`) to apply activation functions to arrays.")
end
Loading

0 comments on commit 39bc6bd

Please sign in to comment.