Skip to content

Commit

Permalink
Merge pull request #603 from LuxDL/ap/load_times
Browse files Browse the repository at this point in the history
remove `NNPACK` and move `ForwardDiff` to an extension
  • Loading branch information
avik-pal authored Aug 29, 2024
2 parents 013aa51 + 393e830 commit f76a38d
Show file tree
Hide file tree
Showing 16 changed files with 15 additions and 599 deletions.
7 changes: 3 additions & 4 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,15 @@ ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527"
KernelAbstractions = "63c18a36-062a-441e-b654-da1e3ab1ce7c"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Requires = "ae029012-a4dd-5104-9daa-d747884805df"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"

[weakdeps]
AMDGPU = "21141c5a-9bdb-4563-92ae-f87d6854732e"
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
EnzymeCore = "f151be2c-9106-41f4-ab19-57ee4f262869"
FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
cuDNN = "02a925ec-e4fe-4b08-9a7e-0d78e3d38ccd"

[extensions]
Expand All @@ -27,6 +26,7 @@ NNlibCUDACUDNNExt = ["CUDA", "cuDNN"]
NNlibCUDAExt = "CUDA"
NNlibEnzymeCoreExt = "EnzymeCore"
NNlibFFTWExt = "FFTW"
NNlibForwardDiffExt = "ForwardDiff"

[compat]
AMDGPU = "0.9.4, 1"
Expand All @@ -36,12 +36,11 @@ CUDA = "4, 5"
ChainRulesCore = "1.13"
EnzymeCore = "0.5, 0.6, 0.7"
FFTW = "1.8.0"
ForwardDiff = "0.10.36"
GPUArraysCore = "0.1"
KernelAbstractions = "0.9.2"
LinearAlgebra = "<0.0.1, 1"
Pkg = "<0.0.1, 1"
Random = "<0.0.1, 1"
Requires = "1.0"
Statistics = "1"
cuDNN = "1"
julia = "1.9"
15 changes: 0 additions & 15 deletions benchmark/perf_report.jl
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,6 @@ for rank in (2,),
(NNlib.depthwiseconv_im2col!, NNlib.∇depthwiseconv_data_im2col!, NNlib.∇depthwiseconv_filter_im2col!, DepthwiseConvDims, "im2col"),
]

if NNlib.is_nnpack_available()
push!(benchmark_items, (NNlib.conv_nnpack!, NNlib.∇conv_data_nnpack!, NNlib.∇conv_filter_nnpack!, DenseConvDims, "nnpack"))
end

for (conv!, ∇conv_data!, ∇conv_filter!, cT, backend) in benchmark_items

x = zeros(Float32, repeat([N], rank)..., C_in, 1)
Expand Down Expand Up @@ -105,15 +101,4 @@ for rank in (2,),
@show(pdims)
@save "results.jld2" results
end

if NNlib.is_nnpack_available()
if NNlib.nnpack_supported_operation(pdims)
t_fwd = @benchmark NNlib.maxpool_nnpack!($y, $x, $pdims)

add_result(t_fwd, "maxpool2d", "nnpack", pdims)

@show(pdims)
@save "results.jld2" results
end
end
end
9 changes: 9 additions & 0 deletions ext/NNlibForwardDiffExt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
module NNlibForwardDiffExt

using ForwardDiff: ForwardDiff
using NNlib: NNlib

NNlib.within_gradient(x::ForwardDiff.Dual) = true
NNlib.within_gradient(x::AbstractArray{<:ForwardDiff.Dual}) = true

end
20 changes: 0 additions & 20 deletions src/NNlib.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,7 @@ using KernelAbstractions: @atomic
using LinearAlgebra
using LinearAlgebra.BLAS: @blasfunc, BlasInt
using LinearAlgebra: AdjOrTransAbsMat, Adjoint, BlasFloat, Transpose
using Pkg
using Random
using Requires
using Statistics
using Statistics: mean

Expand All @@ -24,19 +22,6 @@ const Numeric = Union{AbstractArray{<:T}, T} where {T<:Number}
include("dim_helpers.jl")
export ConvDims, DenseConvDims, PoolDims, DepthwiseConvDims

is_nnpack_available() = false

@init @require NNPACK_jll="a6bfbf70-4841-5cb9-aa18-3a8ad3c413ee" begin
if isdefined(NNPACK_jll, :libnnpack)
include("nnpack/NNPACK.jl")
else
@warn "NNPACK not available for your platform: " *
"$( Pkg.BinaryPlatforms.platform_name(Pkg.BinaryPlatforms.platform_key_abi()))" *
"($( Pkg.BinaryPlatforms.triplet(Pkg.BinaryPlatforms.platform_key_abi())))
You will be able to use only the default Julia NNlib backend"
end
end

include("activations.jl")
for f in ACTIVATIONS
@eval export $(f)
Expand Down Expand Up @@ -95,11 +80,6 @@ export upsample_nearest, ∇upsample_nearest,
include("gather.jl")
include("scatter.jl")
include("utils.jl")
@init @require ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" begin
using .ForwardDiff
within_gradient(x::ForwardDiff.Dual) = true
within_gradient(x::AbstractArray{<:ForwardDiff.Dual}) = true
end

include("sampling.jl")
include("functions.jl")
Expand Down
27 changes: 2 additions & 25 deletions src/conv.jl
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ end
# Let's generate auto-allocating versions of all our functions, for all backends.
# We `@timeit` these methods separately, as we want to know how much time is spent in
# allocation. :P
for backend in (Symbol(), :_direct, :_im2col, :_nnpack)
for backend in (Symbol(), :_direct, :_im2col)
# First make auto-allocating versions of the conv()-like calls:
for name in (:conv, :depthwiseconv)
@eval begin
Expand Down Expand Up @@ -134,7 +134,7 @@ end
# since we can specialize on sizes.
for front_name in (:conv, :∇conv_data, :∇conv_filter,
:depthwiseconv, :∇depthwiseconv_data, :∇depthwiseconv_filter)
for backend in (Symbol(), :_direct, :_im2col) ## NNPACK is only for 2d conv
for backend in (Symbol(), :_direct, :_im2col)
for N in (3, 4)
@eval begin
function $(Symbol("$(front_name)$(backend)!"))(
Expand Down Expand Up @@ -381,26 +381,3 @@ function rrule(::typeof(∇conv_filter), x, dy, cdims; kw...)
end
return ∇conv_filter(x, dy, cdims; kw...), ∇conv_filter_pullback
end

# Use NNPACK if it is available and the operation is supported
# commented out 'till proper benchmarking and more correctness test are performed
# if is_nnpack_available()
# function conv(x::Array{Float32, 4}, w::Array{Float32, 4},
# cdims::DenseConvDims{2, K, C_in, C_out, (1, 1), P, (1, 1), F};
# kwargs...) where {K, C_in, C_out, P, F}
# return conv_nnpack(x, w, cdims; kwargs...)
# end

# function ∇conv_data(dy::Array{Float32, 4}, w::Array{Float32, 4},
# cdims::DenseConvDims{2, K, C_in, C_out, (1, 1), P, (1, 1), F};
# kwargs...) where {K, C_in, C_out, P, F}
# return ∇conv_data_nnpack(dy, w, cdims; kwargs...)
# end

# function ∇conv_filter(x::Array{Float32, 4}, dy::Array{Float32, 4},
# cdims::DenseConvDims{2, K, C_in, C_out, (1, 1), P, (1, 1), F};
# kwargs...) where {K, C_in, C_out, P, F}
# return ∇conv_filter_nnpack(x, dy, cdims; kwargs...)
# end
# end
########################################################
55 changes: 0 additions & 55 deletions src/nnpack/NNPACK.jl

This file was deleted.

83 changes: 0 additions & 83 deletions src/nnpack/error.jl

This file was deleted.

50 changes: 0 additions & 50 deletions src/nnpack/impl.jl

This file was deleted.

44 changes: 0 additions & 44 deletions src/nnpack/interface.jl

This file was deleted.

Loading

0 comments on commit f76a38d

Please sign in to comment.