-
Notifications
You must be signed in to change notification settings - Fork 52
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Optimizer regression on 1.11 #506
Comments
It looks like this doesn't even need overlay methods: using GPUCompiler
cudacall(f, types::Type, args...; kwargs...) = nothing
function outer(f)
@inline cudacall(f, Tuple{}; stream=Ref(42), shmem=1)
return
end
struct TestCompilerParams <: AbstractCompilerParams end
function main()
source = methodinstance(typeof(outer), Tuple{Nothing})
target = NativeCompilerTarget()
params = TestCompilerParams()
config = CompilerConfig(target, params)
job = CompilerJob(source, config)
interp = GPUCompiler.get_interpreter(job)
println("Native interpreter:")
display(Base.code_ircode(outer, Tuple{Nothing}))
println()
println("GPUCompiler interpreter:")
display(Base.code_ircode(outer, Tuple{Nothing}; interp))
return
end
@aviatesk Can you help me debug this? How would you approach this, and/or do you know what could be up here? |
MWE without GPUCompiler: const CC = Core.Compiler
using Core: MethodInstance, CodeInstance, CodeInfo, MethodTable
## code instance cache
struct CodeCache
dict::IdDict{MethodInstance,Vector{CodeInstance}}
CodeCache() = new(IdDict{MethodInstance,Vector{CodeInstance}}())
end
function CC.setindex!(cache::CodeCache, ci::CodeInstance, mi::MethodInstance)
cis = get!(cache.dict, mi, CodeInstance[])
push!(cis, ci)
end
## world view of the cache
function CC.haskey(wvc::CC.WorldView{CodeCache}, mi::MethodInstance)
CC.get(wvc, mi, nothing) !== nothing
end
function CC.get(wvc::CC.WorldView{CodeCache}, mi::MethodInstance, default)
# check the cache
for ci in get!(wvc.cache.dict, mi, CodeInstance[])
if ci.min_world <= wvc.worlds.min_world && wvc.worlds.max_world <= ci.max_world
# TODO: if (code && (code == jl_nothing || jl_ir_flag_inferred((jl_array_t*)code)))
src = if ci.inferred isa Vector{UInt8}
ccall(:jl_uncompress_ir, Any, (Any, Ptr{Cvoid}, Any),
mi.def, C_NULL, ci.inferred)
else
ci.inferred
end
return ci
end
end
return default
end
function CC.getindex(wvc::CC.WorldView{CodeCache}, mi::MethodInstance)
r = CC.get(wvc, mi, nothing)
r === nothing && throw(KeyError(mi))
return r::CodeInstance
end
function CC.setindex!(wvc::CC.WorldView{CodeCache}, ci::CodeInstance, mi::MethodInstance)
src = if ci.inferred isa Vector{UInt8}
ccall(:jl_uncompress_ir, Any, (Any, Ptr{Cvoid}, Any),
mi.def, C_NULL, ci.inferred)
else
ci.inferred
end
CC.setindex!(wvc.cache, ci, mi)
end
## interpreter
if isdefined(CC, :CachedMethodTable)
const ExternalMethodTableView = CC.CachedMethodTable{CC.OverlayMethodTable}
get_method_table_view(world::UInt, mt::MethodTable) =
CC.CachedMethodTable(CC.OverlayMethodTable(world, mt))
else
const ExternalMethodTableView = CC.OverlayMethodTable
get_method_table_view(world::UInt, mt::MethodTable) = CC.OverlayMethodTable(world, mt)
end
struct ExternalInterpreter <: CC.AbstractInterpreter
world::UInt
method_table::ExternalMethodTableView
code_cache
inf_cache::Vector{CC.InferenceResult}
end
function ExternalInterpreter(world::UInt=Base.get_world_counter(); method_table, code_cache)
@assert world <= Base.get_world_counter()
method_table = get_method_table_view(world, method_table)
inf_cache = Vector{CC.InferenceResult}()
return ExternalInterpreter(world, method_table, code_cache, inf_cache)
end
CC.InferenceParams(interp::ExternalInterpreter) = CC.InferenceParams()
CC.OptimizationParams(interp::ExternalInterpreter) = CC.OptimizationParams()
CC.get_world_counter(interp::ExternalInterpreter) = interp.world
CC.get_inference_cache(interp::ExternalInterpreter) = interp.inf_cache
CC.code_cache(interp::ExternalInterpreter) = CC.WorldView(interp.code_cache, interp.world)
# No need to do any locking since we're not putting our results into the runtime cache
CC.lock_mi_inference(interp::ExternalInterpreter, mi::MethodInstance) = nothing
CC.unlock_mi_inference(interp::ExternalInterpreter, mi::MethodInstance) = nothing
function CC.add_remark!(interp::ExternalInterpreter, sv::CC.InferenceState, msg)
@debug "Inference remark during External compilation of $(sv.linfo): $msg"
end
CC.may_optimize(interp::ExternalInterpreter) = true
CC.may_compress(interp::ExternalInterpreter) = true
CC.may_discard_trees(interp::ExternalInterpreter) = true
CC.verbose_stmt_info(interp::ExternalInterpreter) = false
CC.method_table(interp::ExternalInterpreter) = interp.method_table
# main
Base.Experimental.@MethodTable(GLOBAL_METHOD_TABLE)
inner(f, types::Type, args...; kwargs...) = nothing
outer(f) = @inline inner(f, Tuple{}; foo=Ref(42), bar=1)
function main()
println("Native:")
display(Base.code_ircode(outer, Tuple{Nothing}))
println()
println("External:")
interp = ExternalInterpreter(; method_table=GLOBAL_METHOD_TABLE, code_cache=CodeCache())
display(Base.code_ircode(outer, Tuple{Nothing}; interp))
return
end
isinteractive() || main()
|
Closing this in favor of JuliaLang/julia#52938 |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
MWE:
Before JuliaLang/julia#51092:
After:
So basically, JuliaLang/julia#48097 got revived by JuliaLang/julia#51092. @aviatesk any thoughts?
The text was updated successfully, but these errors were encountered: