From 325d68109122e7799ad117de3330ba637af1306b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johan=20Bl=C3=A5b=C3=A4ck?= Date: Tue, 6 Sep 2022 15:00:00 +0200 Subject: [PATCH] Switching acosh_abs to acosh_nan as was done for the logs This also removes the regularizing '1' that was present in the old definition. Instead the test domain is shifted. --- src/Core.jl | 2 +- src/Operators.jl | 7 ++++--- src/Options.jl | 4 ++-- src/SymbolicRegression.jl | 4 ++-- test/test_operators.jl | 4 +++- test/test_tree_construction.jl | 8 ++++++-- 6 files changed, 18 insertions(+), 11 deletions(-) diff --git a/src/Core.jl b/src/Core.jl index 314dd7252..7dc17425e 100644 --- a/src/Core.jl +++ b/src/Core.jl @@ -35,7 +35,7 @@ import .OperatorsModule: log10_nan, log1p_nan, sqrt_abs, - acosh_abs, + acosh_nan, neg, greater, greater, diff --git a/src/Operators.jl b/src/Operators.jl index d5d655e8b..8f7ea89e2 100644 --- a/src/Operators.jl +++ b/src/Operators.jl @@ -59,8 +59,9 @@ function log1p_nan(x::T)::T where {T<:Real} x <= T(-1) && return T(NaN) return log1p(x) end -function acosh_abs(x::T)::T where {T<:Real} - return acosh(abs(x) + convert(T, 1)) +function acosh_nan(x::T)::T where {T<:Real} + x < T(1) && return T(NaN) + return acosh(x) end # Generics: @@ -75,7 +76,7 @@ log_nan(x) = log(x) log2_nan(x) = log2(x) log10_nan(x) = log10(x) log1p_nan(x) = log1p(x) -acosh_abs(x) = acosh(abs(x) + 1) +acosh_nan(x) = acosh(x) function sqrt_abs(x::T)::T where {T} return sqrt(abs(x)) diff --git a/src/Options.jl b/src/Options.jl index b27fa21a2..30e89ab10 100644 --- a/src/Options.jl +++ b/src/Options.jl @@ -19,7 +19,7 @@ import ..OperatorsModule: log2_nan, log1p_nan, sqrt_abs, - acosh_abs, + acosh_nan, atanh_clip import ..EquationModule: Node, string_tree import ..OptionsStructModule: Options, ComplexityMapping @@ -112,7 +112,7 @@ function unaopmap(op) elseif op == sqrt return sqrt_abs elseif op == acosh - return acosh_abs + return acosh_nan elseif op == atanh return atanh_clip end diff --git a/src/SymbolicRegression.jl b/src/SymbolicRegression.jl index 519add720..11e8d5839 100644 --- a/src/SymbolicRegression.jl +++ b/src/SymbolicRegression.jl @@ -39,7 +39,7 @@ export Population, log2_nan, log10_nan, log1p_nan, - acosh_abs, + acosh_nan, sqrt_abs, neg, greater, @@ -130,7 +130,7 @@ import .CoreModule: log10_nan, log1p_nan, sqrt_abs, - acosh_abs, + acosh_nan, neg, greater, greater, diff --git a/test/test_operators.jl b/test/test_operators.jl index b2ba99053..0facd6243 100644 --- a/test/test_operators.jl +++ b/test/test_operators.jl @@ -10,7 +10,7 @@ using SymbolicRegression: log2_nan, log10_nan, sqrt_abs, - acosh_abs, + acosh_nan, neg, greater, greater, @@ -33,6 +33,8 @@ for T in types_to_test @test isnan(log2_nan(-val)) @test abs(log10_nan(val) - log10(val)) < 1e-6 @test isnan(log10_nan(-val)) + @test abs(acosh_nan(val2) - acosh(val2)) < 1e-6 + @test isnan(acosh_nan(-val2)) @test neg(-val) == val @test sqrt_abs(val) == sqrt(val) @test mult(val, val2) == val * val2 diff --git a/test/test_tree_construction.jl b/test/test_tree_construction.jl index 64b276534..be26cc693 100644 --- a/test/test_tree_construction.jl +++ b/test/test_tree_construction.jl @@ -8,7 +8,7 @@ include("test_params.jl") x1 = 2.0 # Initialize functions in Base.... -for unaop in [cos, exp, log_nan, log2_nan, log10_nan, relu, gamma, acosh_abs] +for unaop in [cos, exp, log_nan, log2_nan, log10_nan, relu, gamma, acosh_nan] for binop in [sub] function make_options(; kw...) return Options(; @@ -56,12 +56,16 @@ for unaop in [cos, exp, log_nan, log2_nan, log10_nan, relu, gamma, acosh_abs] Random.seed!(0) N = 100 - if unaop in [log_nan, log2_nan, log10_nan] + if unaop in [log_nan, log2_nan, log10_nan, acosh_nan] X = T.(rand(MersenneTwister(0), 5, N) / 3) else X = T.(randn(MersenneTwister(0), 5, N) / 3) end X = X + sign.(X) * T(0.1) + if unaop == acosh_nan + X = X .+ T(1.0) + end + y = T.(f_true.(X[1, :])) dataset = Dataset(X, y) test_y, complete = eval_tree_array(tree, X, make_options())