Skip to content

Commit

Permalink
cmd/compile: mark architecture-specific unsafe points
Browse files Browse the repository at this point in the history
Introduce a mechanism for marking architecture-specific Ops
unsafe. And mark ones that use REGTMP on ARM64, as for async
preemption we will be using REGTMP as a temporary register in the
injected call.

Change-Id: I8ff22e87d8f9cb10d02a2f0af7c12ad6d7d58f54
Reviewed-on: https://go-review.googlesource.com/c/go/+/203459
Run-TryBot: Cherry Zhang <[email protected]>
Reviewed-by: Austin Clements <[email protected]>
  • Loading branch information
cherrymui committed Nov 5, 2019
1 parent 0f992b9 commit 4a7ed1f
Show file tree
Hide file tree
Showing 5 changed files with 32 additions and 8 deletions.
9 changes: 9 additions & 0 deletions src/cmd/compile/internal/gc/plive.go
Original file line number Diff line number Diff line change
Expand Up @@ -639,6 +639,15 @@ func (lv *Liveness) markUnsafePoints() {

lv.unsafePoints = bvalloc(int32(lv.f.NumValues()))

// Mark architecture-specific unsafe pointes.
for _, b := range lv.f.Blocks {
for _, v := range b.Values {
if v.Op.UnsafePoint() {
lv.unsafePoints.Set(int32(v.ID))
}
}
}

// Mark write barrier unsafe points.
for _, wbBlock := range lv.f.WBLoads {
if wbBlock.Kind == ssa.BlockPlain && len(wbBlock.Values) == 0 {
Expand Down
16 changes: 8 additions & 8 deletions src/cmd/compile/internal/ssa/gen/ARM64Ops.go
Original file line number Diff line number Diff line change
Expand Up @@ -620,17 +620,17 @@ func init() {
// LDAXR (Rarg0), Rout
// STLXR Rarg1, (Rarg0), Rtmp
// CBNZ Rtmp, -2(PC)
{name: "LoweredAtomicExchange64", argLength: 3, reg: gpxchg, resultNotInArgs: true, faultOnNilArg0: true, hasSideEffects: true},
{name: "LoweredAtomicExchange32", argLength: 3, reg: gpxchg, resultNotInArgs: true, faultOnNilArg0: true, hasSideEffects: true},
{name: "LoweredAtomicExchange64", argLength: 3, reg: gpxchg, resultNotInArgs: true, faultOnNilArg0: true, hasSideEffects: true, unsafePoint: true},
{name: "LoweredAtomicExchange32", argLength: 3, reg: gpxchg, resultNotInArgs: true, faultOnNilArg0: true, hasSideEffects: true, unsafePoint: true},

// atomic add.
// *arg0 += arg1. arg2=mem. returns <new content of *arg0, memory>. auxint must be zero.
// LDAXR (Rarg0), Rout
// ADD Rarg1, Rout
// STLXR Rout, (Rarg0), Rtmp
// CBNZ Rtmp, -3(PC)
{name: "LoweredAtomicAdd64", argLength: 3, reg: gpxchg, resultNotInArgs: true, faultOnNilArg0: true, hasSideEffects: true},
{name: "LoweredAtomicAdd32", argLength: 3, reg: gpxchg, resultNotInArgs: true, faultOnNilArg0: true, hasSideEffects: true},
{name: "LoweredAtomicAdd64", argLength: 3, reg: gpxchg, resultNotInArgs: true, faultOnNilArg0: true, hasSideEffects: true, unsafePoint: true},
{name: "LoweredAtomicAdd32", argLength: 3, reg: gpxchg, resultNotInArgs: true, faultOnNilArg0: true, hasSideEffects: true, unsafePoint: true},

// atomic add variant.
// *arg0 += arg1. arg2=mem. returns <new content of *arg0, memory>. auxint must be zero.
Expand All @@ -653,17 +653,17 @@ func init() {
// STLXR Rarg2, (Rarg0), Rtmp
// CBNZ Rtmp, -4(PC)
// CSET EQ, Rout
{name: "LoweredAtomicCas64", argLength: 4, reg: gpcas, resultNotInArgs: true, clobberFlags: true, faultOnNilArg0: true, hasSideEffects: true},
{name: "LoweredAtomicCas32", argLength: 4, reg: gpcas, resultNotInArgs: true, clobberFlags: true, faultOnNilArg0: true, hasSideEffects: true},
{name: "LoweredAtomicCas64", argLength: 4, reg: gpcas, resultNotInArgs: true, clobberFlags: true, faultOnNilArg0: true, hasSideEffects: true, unsafePoint: true},
{name: "LoweredAtomicCas32", argLength: 4, reg: gpcas, resultNotInArgs: true, clobberFlags: true, faultOnNilArg0: true, hasSideEffects: true, unsafePoint: true},

// atomic and/or.
// *arg0 &= (|=) arg1. arg2=mem. returns <new content of *arg0, memory>. auxint must be zero.
// LDAXRB (Rarg0), Rout
// AND/OR Rarg1, Rout
// STLXRB Rout, (Rarg0), Rtmp
// CBNZ Rtmp, -3(PC)
{name: "LoweredAtomicAnd8", argLength: 3, reg: gpxchg, resultNotInArgs: true, asm: "AND", typ: "(UInt8,Mem)", faultOnNilArg0: true, hasSideEffects: true},
{name: "LoweredAtomicOr8", argLength: 3, reg: gpxchg, resultNotInArgs: true, asm: "ORR", typ: "(UInt8,Mem)", faultOnNilArg0: true, hasSideEffects: true},
{name: "LoweredAtomicAnd8", argLength: 3, reg: gpxchg, resultNotInArgs: true, asm: "AND", typ: "(UInt8,Mem)", faultOnNilArg0: true, hasSideEffects: true, unsafePoint: true},
{name: "LoweredAtomicOr8", argLength: 3, reg: gpxchg, resultNotInArgs: true, asm: "ORR", typ: "(UInt8,Mem)", faultOnNilArg0: true, hasSideEffects: true, unsafePoint: true},

// LoweredWB invokes runtime.gcWriteBarrier. arg0=destptr, arg1=srcptr, arg2=mem, aux=runtime.gcWriteBarrier
// It saves all GP registers if necessary,
Expand Down
5 changes: 5 additions & 0 deletions src/cmd/compile/internal/ssa/gen/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ type opData struct {
usesScratch bool // this op requires scratch memory space
hasSideEffects bool // for "reasons", not to be eliminated. E.g., atomic store, #19182.
zeroWidth bool // op never translates into any machine code. example: copy, which may sometimes translate to machine code, is not zero-width.
unsafePoint bool // this op is an unsafe point, i.e. not safe for async preemption
symEffect string // effect this op has on symbol in aux
scale uint8 // amd64/386 indexed load scale
}
Expand Down Expand Up @@ -325,6 +326,9 @@ func genOp() {
if v.zeroWidth {
fmt.Fprintln(w, "zeroWidth: true,")
}
if v.unsafePoint {
fmt.Fprintln(w, "unsafePoint: true,")
}
needEffect := strings.HasPrefix(v.aux, "Sym")
if v.symEffect != "" {
if !needEffect {
Expand Down Expand Up @@ -401,6 +405,7 @@ func genOp() {

fmt.Fprintln(w, "func (o Op) SymEffect() SymEffect { return opcodeTable[o].symEffect }")
fmt.Fprintln(w, "func (o Op) IsCall() bool { return opcodeTable[o].call }")
fmt.Fprintln(w, "func (o Op) UnsafePoint() bool { return opcodeTable[o].unsafePoint }")

// generate registers
for _, a := range archs {
Expand Down
1 change: 1 addition & 0 deletions src/cmd/compile/internal/ssa/op.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ type opInfo struct {
usesScratch bool // this op requires scratch memory space
hasSideEffects bool // for "reasons", not to be eliminated. E.g., atomic store, #19182.
zeroWidth bool // op never translates into any machine code. example: copy, which may sometimes translate to machine code, is not zero-width.
unsafePoint bool // this op is an unsafe point, i.e. not safe for async preemption
symEffect SymEffect // effect this op has on symbol in aux
scale uint8 // amd64/386 indexed load scale
}
Expand Down
9 changes: 9 additions & 0 deletions src/cmd/compile/internal/ssa/opGen.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 4a7ed1f

Please sign in to comment.