diff --git a/src/Tensors.jl b/src/Tensors.jl index 40f95c46..287487dc 100644 --- a/src/Tensors.jl +++ b/src/Tensors.jl @@ -4,6 +4,8 @@ module Tensors import Base.@pure +import ForwardDiff: Dual, partials, value, Tag + import Statistics using Statistics: mean using LinearAlgebra diff --git a/src/automatic_differentiation.jl b/src/automatic_differentiation.jl index 6d8aeb64..f34cac56 100644 --- a/src/automatic_differentiation.jl +++ b/src/automatic_differentiation.jl @@ -1,5 +1,3 @@ -import ForwardDiff: Dual, partials, value, Tag - @static if isdefined(LinearAlgebra, :gradient) import LinearAlgebra.gradient end diff --git a/src/eigen.jl b/src/eigen.jl index 0f9e98e4..4372f8cd 100644 --- a/src/eigen.jl +++ b/src/eigen.jl @@ -1,3 +1,7 @@ +# Support functions +get_max_iterations(::Type{T}) where{T} = 2 * (1 + precision(T) - exponent(floatmin(T))) +get_max_iterations(::Type{T}) where{T<:Dual{TT,VT}} where{TT,VT} = get_max_iterations(VT) + # MIT License: Copyright (c) 2016: Andy Ferris. # See LICENSE.md for further licensing test @@ -134,7 +138,7 @@ function LinearAlgebra.eigen(R::SymmetricTensor{2,3,T′}) where T′ b22 = a22 # Givens reflections, B' = G^T * B * G, preserve tridiagonal matrices - max_iteration = 2 * (1 + precision(T) - exponent(floatmin(T))) + max_iteration = get_max_iterations(T) if abs(b12) <= abs(b01) saveB00, saveB01, saveB11 = b00, b01, b11