diff --git a/src/CuArrays.jl.save b/src/CuArrays.jl.save deleted file mode 100644 index 7a5f77ad..00000000 --- a/src/CuArrays.jl.save +++ /dev/null @@ -1,91 +0,0 @@ -module CuArrays - -using CUDAapi, CUDAdrv, CUDAnative - -using GPUArrays - -export CuArray, CuVector, CuMatrix, CuVecOrMat, cu -export CUBLAS, CUSPARSE, CUSOLVER, CUFFT, CURAND, CUDNN, CUTENSOR - -import LinearAlgebra - -using Adapt - -using Libdl - -using Requires - - -## source code includes - -include("bindeps.jl") - -# core array functionality -include("memory.jl") -include("array.jl") -include("gpuarrays.jl") -include("subarray.jl") -include("utils.jl") - -# integrations and specialized functionality -include("indexing.jl") -include("broadcast.jl") -include("mapreduce.jl") -include("accumulate.jl") -include("linalg.jl") - -# vendor libraries -include("blas/CUBLAS.jl") -include("sparse/CUSPARSE.jl") -include("solver/CUSOLVER.jl") -include("fft/CUFFT.jl") -include("rand/CURAND.jl") -include("dnn/CUDNN.jl") -include("tensor/CUTENSOR.jl") - -include("deprecated.jl") - -include("nnlib.jl") - -## initialization - -const __initialized__ = Ref(false) -functional() = __initialized__[] - -export has_cudnn, has_cutensor -has_cudnn() = Libdl.dlopen_e(CUDNN.libcudnn[]) !== C_NULL -has_cutensor() = Libdl.dlopen_e(CUTENSOR.libcutensor[]) !== C_NULL - -function __init__() - precompiling = ccall(:jl_generating_output, Cint, ()) != 0 - silent = parse(Bool, get(ENV, "JULIA_CUDA_SILENT", "false")) || precompiling - verbose = parse(Bool, get(ENV, "JULIA_CUDA_VERBOSE", "false")) - - # if any dependent GPU package failed, expect it to have logged an error and bail out - if !CUDAdrv.functional() || !CUDAnative.functional() - verbose && @warn "CuArrays.jl did not initialize because CUDAdrv.jl or CUDAnative.jl failed to" - return - end - - try - __init_bindeps__(silent=silent, verbose=verbose) - - # package integrations - @require ForwardDiff="f6369f11-7733-5829-9624-2563aa707210" include("forwarddiff.jl") - - __init_memory__() - - __initialized__[] = true - catch ex - # don't actually fail to keep the package loadable - if !silent - if verbose - @error "CuArrays.jl failed to initialize" exception=(ex, catch_backtrace()) - else - @info "CuArrays.jl failed to initialize and will be unavailable (set JULIA_CUDA_SILENT or JULIA_CUDA_VERBOSE to silence or expand this message)" - end - end - end -end - -end # module diff --git a/src/CuArrays.jl.save.1 b/src/CuArrays.jl.save.1 deleted file mode 100644 index 7a5f77ad..00000000 --- a/src/CuArrays.jl.save.1 +++ /dev/null @@ -1,91 +0,0 @@ -module CuArrays - -using CUDAapi, CUDAdrv, CUDAnative - -using GPUArrays - -export CuArray, CuVector, CuMatrix, CuVecOrMat, cu -export CUBLAS, CUSPARSE, CUSOLVER, CUFFT, CURAND, CUDNN, CUTENSOR - -import LinearAlgebra - -using Adapt - -using Libdl - -using Requires - - -## source code includes - -include("bindeps.jl") - -# core array functionality -include("memory.jl") -include("array.jl") -include("gpuarrays.jl") -include("subarray.jl") -include("utils.jl") - -# integrations and specialized functionality -include("indexing.jl") -include("broadcast.jl") -include("mapreduce.jl") -include("accumulate.jl") -include("linalg.jl") - -# vendor libraries -include("blas/CUBLAS.jl") -include("sparse/CUSPARSE.jl") -include("solver/CUSOLVER.jl") -include("fft/CUFFT.jl") -include("rand/CURAND.jl") -include("dnn/CUDNN.jl") -include("tensor/CUTENSOR.jl") - -include("deprecated.jl") - -include("nnlib.jl") - -## initialization - -const __initialized__ = Ref(false) -functional() = __initialized__[] - -export has_cudnn, has_cutensor -has_cudnn() = Libdl.dlopen_e(CUDNN.libcudnn[]) !== C_NULL -has_cutensor() = Libdl.dlopen_e(CUTENSOR.libcutensor[]) !== C_NULL - -function __init__() - precompiling = ccall(:jl_generating_output, Cint, ()) != 0 - silent = parse(Bool, get(ENV, "JULIA_CUDA_SILENT", "false")) || precompiling - verbose = parse(Bool, get(ENV, "JULIA_CUDA_VERBOSE", "false")) - - # if any dependent GPU package failed, expect it to have logged an error and bail out - if !CUDAdrv.functional() || !CUDAnative.functional() - verbose && @warn "CuArrays.jl did not initialize because CUDAdrv.jl or CUDAnative.jl failed to" - return - end - - try - __init_bindeps__(silent=silent, verbose=verbose) - - # package integrations - @require ForwardDiff="f6369f11-7733-5829-9624-2563aa707210" include("forwarddiff.jl") - - __init_memory__() - - __initialized__[] = true - catch ex - # don't actually fail to keep the package loadable - if !silent - if verbose - @error "CuArrays.jl failed to initialize" exception=(ex, catch_backtrace()) - else - @info "CuArrays.jl failed to initialize and will be unavailable (set JULIA_CUDA_SILENT or JULIA_CUDA_VERBOSE to silence or expand this message)" - end - end - end -end - -end # module