From b94dc384cabf75e7e8703265cd80f5324f84b642 Mon Sep 17 00:00:00 2001 From: Matthew Dempsky Date: Tue, 28 Feb 2023 13:27:51 -0800 Subject: [PATCH] cmd/compile/internal/ir: explicit Pos for New{Bool,Int,String} Stop depending on base.Pos for these. Change-Id: I58dea44f8141eb37b59a6e9f7db0c6baa516ad93 Reviewed-on: https://go-review.googlesource.com/c/go/+/472296 Reviewed-by: Keith Randall Run-TryBot: Matthew Dempsky Auto-Submit: Matthew Dempsky Reviewed-by: Cuong Manh Le Reviewed-by: Keith Randall TryBot-Result: Gopher Robot --- src/cmd/compile/internal/compare/compare.go | 2 +- src/cmd/compile/internal/coverage/cover.go | 14 +++---- src/cmd/compile/internal/escape/desugar.go | 2 +- src/cmd/compile/internal/ir/const.go | 13 ++++--- src/cmd/compile/internal/ir/val.go | 5 --- src/cmd/compile/internal/pkginit/init.go | 4 +- .../internal/pkginit/initAsanGlobals.go | 18 ++++----- src/cmd/compile/internal/reflectdata/alg.go | 24 ++++++------ src/cmd/compile/internal/typecheck/func.go | 4 +- .../compile/internal/typecheck/typecheck.go | 2 +- src/cmd/compile/internal/walk/assign.go | 8 ++-- src/cmd/compile/internal/walk/builtin.go | 32 ++++++++-------- src/cmd/compile/internal/walk/compare.go | 38 +++++++++---------- src/cmd/compile/internal/walk/complit.go | 12 +++--- src/cmd/compile/internal/walk/convert.go | 6 +-- src/cmd/compile/internal/walk/order.go | 6 +-- src/cmd/compile/internal/walk/range.go | 20 +++++----- src/cmd/compile/internal/walk/select.go | 14 +++---- src/cmd/compile/internal/walk/switch.go | 10 ++--- 19 files changed, 115 insertions(+), 119 deletions(-) diff --git a/src/cmd/compile/internal/compare/compare.go b/src/cmd/compile/internal/compare/compare.go index 512ad25237805a..9758be1fe88da0 100644 --- a/src/cmd/compile/internal/compare/compare.go +++ b/src/cmd/compile/internal/compare/compare.go @@ -331,7 +331,7 @@ func eqmem(p ir.Node, q ir.Node, field *types.Sym, size int64) ir.Node { call.Args.Append(nx) call.Args.Append(ny) if needsize { - call.Args.Append(ir.NewInt(size)) + call.Args.Append(ir.NewInt(base.Pos, size)) } return call diff --git a/src/cmd/compile/internal/coverage/cover.go b/src/cmd/compile/internal/coverage/cover.go index 688728d53a23dd..3e0350b51ae66b 100644 --- a/src/cmd/compile/internal/coverage/cover.go +++ b/src/cmd/compile/internal/coverage/cover.go @@ -157,7 +157,7 @@ func registerMeta(cnames Names, hashv [16]byte, mdlen int) { pos := cnames.InitFn.Pos() elist := make([]ir.Node, 0, 16) for i := 0; i < 16; i++ { - elem := ir.NewInt(int64(hashv[i])) + elem := ir.NewInt(base.Pos, int64(hashv[i])) elist = append(elist, elem) } ht := types.NewArray(types.Types[types.TUINT8], 16) @@ -168,7 +168,7 @@ func registerMeta(cnames Names, hashv [16]byte, mdlen int) { mdauspx := typecheck.ConvNop(mdax, types.Types[types.TUNSAFEPTR]) // Materialize expression for length. - lenx := ir.NewInt(int64(mdlen)) // untyped + lenx := ir.NewInt(base.Pos, int64(mdlen)) // untyped // Generate a call to runtime.addCovMeta, e.g. // @@ -176,10 +176,10 @@ func registerMeta(cnames Names, hashv [16]byte, mdlen int) { // fn := typecheck.LookupRuntime("addCovMeta") pkid := coverage.HardCodedPkgID(base.Ctxt.Pkgpath) - pkIdNode := ir.NewInt(int64(pkid)) - cmodeNode := ir.NewInt(int64(cnames.CounterMode)) - cgranNode := ir.NewInt(int64(cnames.CounterGran)) - pkPathNode := ir.NewString(base.Ctxt.Pkgpath) + pkIdNode := ir.NewInt(base.Pos, int64(pkid)) + cmodeNode := ir.NewInt(base.Pos, int64(cnames.CounterMode)) + cgranNode := ir.NewInt(base.Pos, int64(cnames.CounterGran)) + pkPathNode := ir.NewString(base.Pos, base.Ctxt.Pkgpath) callx := typecheck.Call(pos, fn, []ir.Node{mdauspx, lenx, hashx, pkPathNode, pkIdNode, cmodeNode, cgranNode}, false) assign := callx @@ -202,7 +202,7 @@ func addInitHookCall(initfn *ir.Func, cmode coverage.CounterMode) { pos := initfn.Pos() istest := cmode == coverage.CtrModeTestMain initf := typecheck.LookupCoverage("initHook") - istestNode := ir.NewBool(istest) + istestNode := ir.NewBool(base.Pos, istest) args := []ir.Node{istestNode} callx := typecheck.Call(pos, initf, args, false) initfn.Body.Append(callx) diff --git a/src/cmd/compile/internal/escape/desugar.go b/src/cmd/compile/internal/escape/desugar.go index 6c21981acad09a..b2c42947dd8dd5 100644 --- a/src/cmd/compile/internal/escape/desugar.go +++ b/src/cmd/compile/internal/escape/desugar.go @@ -27,7 +27,7 @@ func fixRecoverCall(call *ir.CallExpr) { // FP is equal to caller's SP plus FixedFrameSize. var fp ir.Node = ir.NewCallExpr(pos, ir.OGETCALLERSP, nil, nil) if off := base.Ctxt.Arch.FixedFrameSize; off != 0 { - fp = ir.NewBinaryExpr(fp.Pos(), ir.OADD, fp, ir.NewInt(off)) + fp = ir.NewBinaryExpr(fp.Pos(), ir.OADD, fp, ir.NewInt(base.Pos, off)) } // TODO(mdempsky): Replace *int32 with unsafe.Pointer, without upsetting checkptr. fp = ir.NewConvExpr(pos, ir.OCONVNOP, types.NewPtr(types.Types[types.TINT32]), fp) diff --git a/src/cmd/compile/internal/ir/const.go b/src/cmd/compile/internal/ir/const.go index f0b66957f1201b..751620f26a3b01 100644 --- a/src/cmd/compile/internal/ir/const.go +++ b/src/cmd/compile/internal/ir/const.go @@ -11,18 +11,19 @@ import ( "cmd/compile/internal/base" "cmd/compile/internal/types" + "cmd/internal/src" ) -func NewBool(b bool) Node { - return NewLiteral(constant.MakeBool(b)) +func NewBool(pos src.XPos, b bool) Node { + return NewBasicLit(pos, constant.MakeBool(b)) } -func NewInt(v int64) Node { - return NewLiteral(constant.MakeInt64(v)) +func NewInt(pos src.XPos, v int64) Node { + return NewBasicLit(pos, constant.MakeInt64(v)) } -func NewString(s string) Node { - return NewLiteral(constant.MakeString(s)) +func NewString(pos src.XPos, s string) Node { + return NewBasicLit(pos, constant.MakeString(s)) } const ( diff --git a/src/cmd/compile/internal/ir/val.go b/src/cmd/compile/internal/ir/val.go index 925222b1137e20..b62174b141d82b 100644 --- a/src/cmd/compile/internal/ir/val.go +++ b/src/cmd/compile/internal/ir/val.go @@ -60,11 +60,6 @@ func ValidTypeForConst(t *types.Type, v constant.Value) bool { panic("unreachable") } -// NewLiteral returns a new untyped constant with value v. -func NewLiteral(v constant.Value) Node { - return NewBasicLit(base.Pos, v) -} - func idealType(ct constant.Kind) *types.Type { switch ct { case constant.String: diff --git a/src/cmd/compile/internal/pkginit/init.go b/src/cmd/compile/internal/pkginit/init.go index f8d5ee08a589c0..814127c66cab18 100644 --- a/src/cmd/compile/internal/pkginit/init.go +++ b/src/cmd/compile/internal/pkginit/init.go @@ -143,8 +143,8 @@ func Task() *ir.Name { }, nil)) asancall := ir.NewCallExpr(base.Pos, ir.OCALL, asanf, nil) asancall.Args.Append(typecheck.ConvNop(typecheck.NodAddr( - ir.NewIndexExpr(base.Pos, globals, ir.NewInt(0))), types.Types[types.TUNSAFEPTR])) - asancall.Args.Append(typecheck.ConvNop(ir.NewInt(int64(ni)), types.Types[types.TUINTPTR])) + ir.NewIndexExpr(base.Pos, globals, ir.NewInt(base.Pos, 0))), types.Types[types.TUNSAFEPTR])) + asancall.Args.Append(typecheck.ConvNop(ir.NewInt(base.Pos, int64(ni)), types.Types[types.TUINTPTR])) fnInit.Body.Append(asancall) typecheck.FinishFuncBody() diff --git a/src/cmd/compile/internal/pkginit/initAsanGlobals.go b/src/cmd/compile/internal/pkginit/initAsanGlobals.go index 464787a2d7e672..4164dee8dbdacf 100644 --- a/src/cmd/compile/internal/pkginit/initAsanGlobals.go +++ b/src/cmd/compile/internal/pkginit/initAsanGlobals.go @@ -69,7 +69,7 @@ func instrumentGlobals(fn *ir.Func) *ir.Name { for i, n := range InstrumentGlobalsSlice { setField := func(f string, val ir.Node, i int) { r := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, - ir.NewIndexExpr(base.Pos, globals, ir.NewInt(int64(i))), lname(f)), val) + ir.NewIndexExpr(base.Pos, globals, ir.NewInt(base.Pos, int64(i))), lname(f)), val) init.Append(typecheck.Stmt(r)) } // globals[i].beg = uintptr(unsafe.Pointer(&n)) @@ -79,19 +79,19 @@ func instrumentGlobals(fn *ir.Func) *ir.Name { // Assign globals[i].size. g := n.(*ir.Name) size := g.Type().Size() - c = tconv(ir.NewInt(size), types.Types[types.TUINTPTR]) + c = tconv(ir.NewInt(base.Pos, size), types.Types[types.TUINTPTR]) setField("size", c, i) // Assign globals[i].sizeWithRedzone. rzSize := GetRedzoneSizeForGlobal(size) sizeWithRz := rzSize + size - c = tconv(ir.NewInt(sizeWithRz), types.Types[types.TUINTPTR]) + c = tconv(ir.NewInt(base.Pos, sizeWithRz), types.Types[types.TUINTPTR]) setField("sizeWithRedzone", c, i) // The C string type is terminated by a null character "\0", Go should use three-digit // octal "\000" or two-digit hexadecimal "\x00" to create null terminated string. // asanName = symbol's linkname + "\000" // globals[i].name = (*defString)(unsafe.Pointer(&asanName)).data name := g.Linksym().Name - init.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, asanName, ir.NewString(name+"\000")))) + init.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, asanName, ir.NewString(base.Pos, name+"\000")))) c = tconv(typecheck.NodAddr(asanName), types.Types[types.TUNSAFEPTR]) c = tconv(c, types.NewPtr(defStringstruct)) c = ir.NewSelectorExpr(base.Pos, ir.ODOT, c, lname("data")) @@ -99,23 +99,23 @@ func instrumentGlobals(fn *ir.Func) *ir.Name { // Set the name of package being compiled as a unique identifier of a module. // asanModulename = pkgName + "\000" - init.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, asanModulename, ir.NewString(types.LocalPkg.Name+"\000")))) + init.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, asanModulename, ir.NewString(base.Pos, types.LocalPkg.Name+"\000")))) c = tconv(typecheck.NodAddr(asanModulename), types.Types[types.TUNSAFEPTR]) c = tconv(c, types.NewPtr(defStringstruct)) c = ir.NewSelectorExpr(base.Pos, ir.ODOT, c, lname("data")) setField("moduleName", c, i) // Assign asanL[i].filename, asanL[i].line, asanL[i].column // and assign globals[i].location = uintptr(unsafe.Pointer(&asanL[i])) - asanLi := ir.NewIndexExpr(base.Pos, asanlocation, ir.NewInt(int64(i))) - filename := ir.NewString(base.Ctxt.PosTable.Pos(n.Pos()).Filename() + "\000") + asanLi := ir.NewIndexExpr(base.Pos, asanlocation, ir.NewInt(base.Pos, int64(i))) + filename := ir.NewString(base.Pos, base.Ctxt.PosTable.Pos(n.Pos()).Filename()+"\000") init.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, asanFilename, filename))) c = tconv(typecheck.NodAddr(asanFilename), types.Types[types.TUNSAFEPTR]) c = tconv(c, types.NewPtr(defStringstruct)) c = ir.NewSelectorExpr(base.Pos, ir.ODOT, c, lname("data")) init.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, asanLi, lname("filename")), c))) - line := ir.NewInt(int64(n.Pos().Line())) + line := ir.NewInt(base.Pos, int64(n.Pos().Line())) init.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, asanLi, lname("line")), line))) - col := ir.NewInt(int64(n.Pos().Col())) + col := ir.NewInt(base.Pos, int64(n.Pos().Col())) init.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, asanLi, lname("column")), col))) c = tconv(typecheck.NodAddr(asanLi), types.Types[types.TUNSAFEPTR]) c = tconv(c, types.Types[types.TUINTPTR]) diff --git a/src/cmd/compile/internal/reflectdata/alg.go b/src/cmd/compile/internal/reflectdata/alg.go index e793d4920dca32..a2ba1a2bbec3b3 100644 --- a/src/cmd/compile/internal/reflectdata/alg.go +++ b/src/cmd/compile/internal/reflectdata/alg.go @@ -163,9 +163,9 @@ func hashFunc(t *types.Type) *ir.Func { // for i := 0; i < nelem; i++ ni := typecheck.Temp(types.Types[types.TINT]) - init := ir.NewAssignStmt(base.Pos, ni, ir.NewInt(0)) - cond := ir.NewBinaryExpr(base.Pos, ir.OLT, ni, ir.NewInt(t.NumElem())) - post := ir.NewAssignStmt(base.Pos, ni, ir.NewBinaryExpr(base.Pos, ir.OADD, ni, ir.NewInt(1))) + init := ir.NewAssignStmt(base.Pos, ni, ir.NewInt(base.Pos, 0)) + cond := ir.NewBinaryExpr(base.Pos, ir.OLT, ni, ir.NewInt(base.Pos, t.NumElem())) + post := ir.NewAssignStmt(base.Pos, ni, ir.NewBinaryExpr(base.Pos, ir.OADD, ni, ir.NewInt(base.Pos, 1))) loop := ir.NewForStmt(base.Pos, nil, cond, post, nil) loop.PtrInit().Append(init) @@ -216,7 +216,7 @@ func hashFunc(t *types.Type) *ir.Func { na := typecheck.NodAddr(nx) call.Args.Append(na) call.Args.Append(nh) - call.Args.Append(ir.NewInt(size)) + call.Args.Append(ir.NewInt(base.Pos, size)) fn.Body.Append(ir.NewAssignStmt(base.Pos, nh, call)) i = next @@ -440,8 +440,8 @@ func eqFunc(t *types.Type) *ir.Func { // Generate an unrolled for loop. // for i := 0; i < nelem/unroll*unroll; i += unroll i := typecheck.Temp(types.Types[types.TINT]) - init := ir.NewAssignStmt(base.Pos, i, ir.NewInt(0)) - cond := ir.NewBinaryExpr(base.Pos, ir.OLT, i, ir.NewInt(iterateTo)) + init := ir.NewAssignStmt(base.Pos, i, ir.NewInt(base.Pos, 0)) + cond := ir.NewBinaryExpr(base.Pos, ir.OLT, i, ir.NewInt(base.Pos, iterateTo)) loop := ir.NewForStmt(base.Pos, nil, cond, nil, nil) loop.PtrInit().Append(init) @@ -454,7 +454,7 @@ func eqFunc(t *types.Type) *ir.Func { nif := ir.NewIfStmt(base.Pos, checkIdx(i), nil, nil) nif.Else.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, neq)) loop.Body.Append(nif) - post := ir.NewAssignStmt(base.Pos, i, ir.NewBinaryExpr(base.Pos, ir.OADD, i, ir.NewInt(1))) + post := ir.NewAssignStmt(base.Pos, i, ir.NewBinaryExpr(base.Pos, ir.OADD, i, ir.NewInt(base.Pos, 1))) loop.Body.Append(post) } @@ -462,7 +462,7 @@ func eqFunc(t *types.Type) *ir.Func { if nelem == iterateTo { if last { - fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, ir.NewBool(true))) + fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, ir.NewBool(base.Pos, true))) } return } @@ -479,12 +479,12 @@ func eqFunc(t *types.Type) *ir.Func { // } for j := iterateTo; j < nelem; j++ { // if check {} else { goto neq } - nif := ir.NewIfStmt(base.Pos, checkIdx(ir.NewInt(j)), nil, nil) + nif := ir.NewIfStmt(base.Pos, checkIdx(ir.NewInt(base.Pos, j)), nil, nil) nif.Else.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, neq)) fn.Body.Append(nif) } if last { - fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, checkIdx(ir.NewInt(nelem)))) + fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, checkIdx(ir.NewInt(base.Pos, nelem)))) } } @@ -518,7 +518,7 @@ func eqFunc(t *types.Type) *ir.Func { case types.TSTRUCT: flatConds := compare.EqStruct(t, np, nq) if len(flatConds) == 0 { - fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, ir.NewBool(true))) + fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, ir.NewBool(base.Pos, true))) } else { for _, c := range flatConds[:len(flatConds)-1] { // if cond {} else { goto neq } @@ -540,7 +540,7 @@ func eqFunc(t *types.Type) *ir.Func { // r = false // return (or goto ret) fn.Body.Append(ir.NewLabelStmt(base.Pos, neq)) - fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, ir.NewBool(false))) + fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, ir.NewBool(base.Pos, false))) if compare.EqCanPanic(t) || anyCall(fn) { // Epilogue is large, so share it with the equal case. fn.Body.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, ret)) diff --git a/src/cmd/compile/internal/typecheck/func.go b/src/cmd/compile/internal/typecheck/func.go index f64523c9a0356d..de8b8b325c4f19 100644 --- a/src/cmd/compile/internal/typecheck/func.go +++ b/src/cmd/compile/internal/typecheck/func.go @@ -678,7 +678,7 @@ func tcMake(n *ir.CallExpr) ir.Node { return n } } else { - l = ir.NewInt(0) + l = ir.NewInt(base.Pos, 0) } nn = ir.NewMakeExpr(n.Pos(), ir.OMAKEMAP, l, nil) nn.SetEsc(n.Esc()) @@ -699,7 +699,7 @@ func tcMake(n *ir.CallExpr) ir.Node { return n } } else { - l = ir.NewInt(0) + l = ir.NewInt(base.Pos, 0) } nn = ir.NewMakeExpr(n.Pos(), ir.OMAKECHAN, l, nil) } diff --git a/src/cmd/compile/internal/typecheck/typecheck.go b/src/cmd/compile/internal/typecheck/typecheck.go index b06b9d9753fba7..0c84bfe2421be8 100644 --- a/src/cmd/compile/internal/typecheck/typecheck.go +++ b/src/cmd/compile/internal/typecheck/typecheck.go @@ -1605,7 +1605,7 @@ func stringtoruneslit(n *ir.ConvExpr) ir.Node { var l []ir.Node i := 0 for _, r := range ir.StringVal(n.X) { - l = append(l, ir.NewKeyExpr(base.Pos, ir.NewInt(int64(i)), ir.NewInt(int64(r)))) + l = append(l, ir.NewKeyExpr(base.Pos, ir.NewInt(base.Pos, int64(i)), ir.NewInt(base.Pos, int64(r)))) i++ } diff --git a/src/cmd/compile/internal/walk/assign.go b/src/cmd/compile/internal/walk/assign.go index 1450ec6ba2b994..8f27329da80234 100644 --- a/src/cmd/compile/internal/walk/assign.go +++ b/src/cmd/compile/internal/walk/assign.go @@ -559,7 +559,7 @@ func appendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node { fn := typecheck.LookupRuntime("slicecopy") fn = typecheck.SubstArgTypes(fn, ptr1.Type().Elem(), ptr2.Type().Elem()) - ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, ptr1, len1, ptr2, len2, ir.NewInt(elemtype.Size())) + ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, ptr1, len1, ptr2, len2, ir.NewInt(base.Pos, elemtype.Size())) } else { // memmove(&s[idx], &l2[0], len(l2)*sizeof(T)) ix := ir.NewIndexExpr(base.Pos, s, idx) @@ -569,7 +569,7 @@ func appendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node { sptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, l2) nwid := cheapExpr(typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, l2), types.Types[types.TUINTPTR]), &nodes) - nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(elemtype.Size())) + nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(base.Pos, elemtype.Size())) // instantiate func memmove(to *any, frm *any, length uintptr) fn := typecheck.LookupRuntime("memmove") @@ -667,7 +667,7 @@ func extendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node { var nodes []ir.Node // if l2 >= 0 (likely happens), do nothing - nifneg := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGE, l2, ir.NewInt(0)), nil, nil) + nifneg := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGE, l2, ir.NewInt(base.Pos, 0)), nil, nil) nifneg.Likely = true // else panicmakeslicelen() @@ -718,7 +718,7 @@ func extendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node { hp := typecheck.ConvNop(typecheck.NodAddr(ix), types.Types[types.TUNSAFEPTR]) // hn := l2 * sizeof(elem(s)) - hn := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, l2, ir.NewInt(elemtype.Size())), types.Types[types.TUINTPTR]) + hn := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, l2, ir.NewInt(base.Pos, elemtype.Size())), types.Types[types.TUINTPTR]) clrname := "memclrNoHeapPointers" hasPointers := elemtype.HasPointers() diff --git a/src/cmd/compile/internal/walk/builtin.go b/src/cmd/compile/internal/walk/builtin.go index 3c85b19a36a0d3..98825bd95bacce 100644 --- a/src/cmd/compile/internal/walk/builtin.go +++ b/src/cmd/compile/internal/walk/builtin.go @@ -83,7 +83,7 @@ func walkAppend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node { l = append(l, ir.NewAssignStmt(base.Pos, s, nsrc)) // num = number of things to append - num := ir.NewInt(int64(argc)) + num := ir.NewInt(base.Pos, int64(argc)) // newLen := s.len + num newLen := typecheck.Temp(types.Types[types.TINT]) @@ -119,7 +119,7 @@ func walkAppend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node { ls = n.Args[1:] for i, n := range ls { // s[s.len-argc+i] = arg - ix := ir.NewIndexExpr(base.Pos, s, ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewInt(int64(argc-i)))) + ix := ir.NewIndexExpr(base.Pos, s, ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewInt(base.Pos, int64(argc-i)))) ix.SetBounded(true) l = append(l, ir.NewAssignStmt(base.Pos, ix, n)) } @@ -184,7 +184,7 @@ func walkCopy(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node { fn := typecheck.LookupRuntime("slicecopy") fn = typecheck.SubstArgTypes(fn, ptrL.Type().Elem(), ptrR.Type().Elem()) - return mkcall1(fn, n.Type(), init, ptrL, lenL, ptrR, lenR, ir.NewInt(n.X.Type().Elem().Size())) + return mkcall1(fn, n.Type(), init, ptrL, lenL, ptrR, lenR, ir.NewInt(base.Pos, n.X.Type().Elem().Size())) } n.X = walkExpr(n.X, init) @@ -220,7 +220,7 @@ func walkCopy(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node { nwid := ir.Node(typecheck.Temp(types.Types[types.TUINTPTR])) setwid := ir.NewAssignStmt(base.Pos, nwid, typecheck.Conv(nlen, types.Types[types.TUINTPTR])) ne.Body.Append(setwid) - nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(nl.Type().Elem().Size())) + nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(base.Pos, nl.Type().Elem().Size())) call := mkcall1(fn, nil, init, nto, nfrm, nwid) ne.Body.Append(call) @@ -320,7 +320,7 @@ func walkMakeMap(n *ir.MakeExpr, init *ir.Nodes) ir.Node { // h.buckets = b // } - nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, hint, ir.NewInt(reflectdata.BUCKETSIZE)), nil, nil) + nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, hint, ir.NewInt(base.Pos, reflectdata.BUCKETSIZE)), nil, nil) nif.Likely = true // var bv bmap @@ -416,8 +416,8 @@ func walkMakeSlice(n *ir.MakeExpr, init *ir.Nodes) ir.Node { // if len < 0 { panicmakeslicelen() } // panicmakeslicecap() // } - nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(l, types.Types[types.TUINT64]), ir.NewInt(i)), nil, nil) - niflen := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLT, l, ir.NewInt(0)), nil, nil) + nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(l, types.Types[types.TUINT64]), ir.NewInt(base.Pos, i)), nil, nil) + niflen := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLT, l, ir.NewInt(base.Pos, 0)), nil, nil) niflen.Body = []ir.Node{mkcall("panicmakeslicelen", nil, init)} nif.Body.Append(niflen, mkcall("panicmakeslicecap", nil, init)) init.Append(typecheck.Stmt(nif)) @@ -478,11 +478,11 @@ func walkMakeSliceCopy(n *ir.MakeExpr, init *ir.Nodes) ir.Node { // We do not check for overflow of len(to)*elem.Width here // since len(from) is an existing checked slice capacity // with same elem.Width for the from slice. - size := ir.NewBinaryExpr(base.Pos, ir.OMUL, typecheck.Conv(length, types.Types[types.TUINTPTR]), typecheck.Conv(ir.NewInt(t.Elem().Size()), types.Types[types.TUINTPTR])) + size := ir.NewBinaryExpr(base.Pos, ir.OMUL, typecheck.Conv(length, types.Types[types.TUINTPTR]), typecheck.Conv(ir.NewInt(base.Pos, t.Elem().Size()), types.Types[types.TUINTPTR])) // instantiate mallocgc(size uintptr, typ *byte, needszero bool) unsafe.Pointer fn := typecheck.LookupRuntime("mallocgc") - ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, size, typecheck.NodNil(), ir.NewBool(false)) + ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, size, typecheck.NodNil(), ir.NewBool(base.Pos, false)) ptr.MarkNonNil() sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, length, length) @@ -536,11 +536,11 @@ func walkPrint(nn *ir.CallExpr, init *ir.Nodes) ir.Node { t := make([]ir.Node, 0, len(s)*2) for i, n := range s { if i != 0 { - t = append(t, ir.NewString(" ")) + t = append(t, ir.NewString(base.Pos, " ")) } t = append(t, n) } - t = append(t, ir.NewString("\n")) + t = append(t, ir.NewString(base.Pos, "\n")) nn.Args = t } @@ -554,7 +554,7 @@ func walkPrint(nn *ir.CallExpr, init *ir.Nodes) ir.Node { i++ } if len(strs) > 0 { - t = append(t, ir.NewString(strings.Join(strs, ""))) + t = append(t, ir.NewString(base.Pos, strings.Join(strs, ""))) } if i < len(s) { t = append(t, s[i]) @@ -713,7 +713,7 @@ func walkUnsafeSlice(n *ir.BinaryExpr, init *ir.Nodes) ir.Node { // if len < 0 { panicunsafeslicelen() } nif := ir.NewIfStmt(base.Pos, nil, nil, nil) - nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(0)) + nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0)) nif.Body.Append(mkcall("panicunsafeslicelen", nil, &nif.Body)) appendWalkStmt(init, nif) @@ -723,7 +723,7 @@ func walkUnsafeSlice(n *ir.BinaryExpr, init *ir.Nodes) ir.Node { // } nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil) isNil := ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil()) - gtZero := ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, lenType), ir.NewInt(0)) + gtZero := ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0)) nifPtr.Cond = ir.NewLogicalExpr(base.Pos, ir.OANDAND, isNil, gtZero) nifPtr.Body.Append(mkcall("panicunsafeslicenilptr", nil, &nifPtr.Body)) @@ -740,7 +740,7 @@ func walkUnsafeSlice(n *ir.BinaryExpr, init *ir.Nodes) ir.Node { mem := typecheck.Temp(types.Types[types.TUINTPTR]) overflow := typecheck.Temp(types.Types[types.TBOOL]) fn := typecheck.LookupRuntime("mulUintptr") - call := mkcall1(fn, fn.Type().Results(), init, ir.NewInt(sliceType.Elem().Size()), typecheck.Conv(typecheck.Conv(len, lenType), types.Types[types.TUINTPTR])) + call := mkcall1(fn, fn.Type().Results(), init, ir.NewInt(base.Pos, sliceType.Elem().Size()), typecheck.Conv(typecheck.Conv(len, lenType), types.Types[types.TUINTPTR])) appendWalkStmt(init, ir.NewAssignListStmt(base.Pos, ir.OAS2, []ir.Node{mem, overflow}, []ir.Node{call})) // if overflow || mem > -uintptr(ptr) { @@ -799,7 +799,7 @@ func walkUnsafeString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node { // if len < 0 { panicunsafestringlen() } nif := ir.NewIfStmt(base.Pos, nil, nil, nil) - nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(0)) + nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0)) nif.Body.Append(mkcall("panicunsafestringlen", nil, &nif.Body)) appendWalkStmt(init, nif) diff --git a/src/cmd/compile/internal/walk/compare.go b/src/cmd/compile/internal/walk/compare.go index a2fd3b4974f13c..58d6b574969370 100644 --- a/src/cmd/compile/internal/walk/compare.go +++ b/src/cmd/compile/internal/walk/compare.go @@ -33,7 +33,7 @@ func fakePC(n ir.Node) ir.Node { // those get the same `src.XPos` io.WriteString(hash, fmt.Sprintf("%v", n)) - return ir.NewInt(int64(hash.Sum32())) + return ir.NewInt(base.Pos, int64(hash.Sum32())) } // The result of walkCompare MUST be assigned back to n, e.g. @@ -195,7 +195,7 @@ func walkCompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node { call.Args.Append(typecheck.NodAddr(cmpl)) call.Args.Append(typecheck.NodAddr(cmpr)) if needsLength { - call.Args.Append(ir.NewInt(t.Size())) + call.Args.Append(ir.NewInt(base.Pos, t.Size())) } res := ir.Node(call) if n.Op() != ir.OEQ { @@ -262,31 +262,31 @@ func walkCompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node { } if step == 1 { comp( - ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(i)), - ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(i)), + ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(base.Pos, i)), + ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(base.Pos, i)), ) i++ remains -= t.Elem().Size() } else { elemType := t.Elem().ToUnsigned() - cmplw := ir.Node(ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(i))) + cmplw := ir.Node(ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(base.Pos, i))) cmplw = typecheck.Conv(cmplw, elemType) // convert to unsigned cmplw = typecheck.Conv(cmplw, convType) // widen - cmprw := ir.Node(ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(i))) + cmprw := ir.Node(ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(base.Pos, i))) cmprw = typecheck.Conv(cmprw, elemType) cmprw = typecheck.Conv(cmprw, convType) // For code like this: uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ... // ssa will generate a single large load. for offset := int64(1); offset < step; offset++ { - lb := ir.Node(ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(i+offset))) + lb := ir.Node(ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(base.Pos, i+offset))) lb = typecheck.Conv(lb, elemType) lb = typecheck.Conv(lb, convType) - lb = ir.NewBinaryExpr(base.Pos, ir.OLSH, lb, ir.NewInt(8*t.Elem().Size()*offset)) + lb = ir.NewBinaryExpr(base.Pos, ir.OLSH, lb, ir.NewInt(base.Pos, 8*t.Elem().Size()*offset)) cmplw = ir.NewBinaryExpr(base.Pos, ir.OOR, cmplw, lb) - rb := ir.Node(ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(i+offset))) + rb := ir.Node(ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(base.Pos, i+offset))) rb = typecheck.Conv(rb, elemType) rb = typecheck.Conv(rb, convType) - rb = ir.NewBinaryExpr(base.Pos, ir.OLSH, rb, ir.NewInt(8*t.Elem().Size()*offset)) + rb = ir.NewBinaryExpr(base.Pos, ir.OLSH, rb, ir.NewInt(base.Pos, 8*t.Elem().Size()*offset)) cmprw = ir.NewBinaryExpr(base.Pos, ir.OOR, cmprw, rb) } comp(cmplw, cmprw) @@ -296,7 +296,7 @@ func walkCompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node { } } if expr == nil { - expr = ir.NewBool(n.Op() == ir.OEQ) + expr = ir.NewBool(base.Pos, n.Op() == ir.OEQ) // We still need to use cmpl and cmpr, in case they contain // an expression which might panic. See issue 23837. a1 := typecheck.Stmt(ir.NewAssignStmt(base.Pos, ir.BlankNode, cmpl)) @@ -382,12 +382,12 @@ func walkCompareString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node { if len(s) > 0 { ncs = safeExpr(ncs, init) } - r := ir.Node(ir.NewBinaryExpr(base.Pos, cmp, ir.NewUnaryExpr(base.Pos, ir.OLEN, ncs), ir.NewInt(int64(len(s))))) + r := ir.Node(ir.NewBinaryExpr(base.Pos, cmp, ir.NewUnaryExpr(base.Pos, ir.OLEN, ncs), ir.NewInt(base.Pos, int64(len(s))))) remains := len(s) for i := 0; remains > 0; { if remains == 1 || !canCombineLoads { - cb := ir.NewInt(int64(s[i])) - ncb := ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(int64(i))) + cb := ir.NewInt(base.Pos, int64(s[i])) + ncb := ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(base.Pos, int64(i))) r = ir.NewLogicalExpr(base.Pos, and, r, ir.NewBinaryExpr(base.Pos, cmp, ncb, cb)) remains-- i++ @@ -406,18 +406,18 @@ func walkCompareString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node { convType = types.Types[types.TUINT16] step = 2 } - ncsubstr := typecheck.Conv(ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(int64(i))), convType) + ncsubstr := typecheck.Conv(ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(base.Pos, int64(i))), convType) csubstr := int64(s[i]) // Calculate large constant from bytes as sequence of shifts and ors. // Like this: uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ... // ssa will combine this into a single large load. for offset := 1; offset < step; offset++ { - b := typecheck.Conv(ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(int64(i+offset))), convType) - b = ir.NewBinaryExpr(base.Pos, ir.OLSH, b, ir.NewInt(int64(8*offset))) + b := typecheck.Conv(ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(base.Pos, int64(i+offset))), convType) + b = ir.NewBinaryExpr(base.Pos, ir.OLSH, b, ir.NewInt(base.Pos, int64(8*offset))) ncsubstr = ir.NewBinaryExpr(base.Pos, ir.OOR, ncsubstr, b) csubstr |= int64(s[i+offset]) << uint8(8*offset) } - csubstrPart := ir.NewInt(csubstr) + csubstrPart := ir.NewInt(base.Pos, csubstr) // Compare "step" bytes as once r = ir.NewLogicalExpr(base.Pos, and, r, ir.NewBinaryExpr(base.Pos, cmp, csubstrPart, ncsubstr)) remains -= step @@ -446,7 +446,7 @@ func walkCompareString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node { } else { // sys_cmpstring(s1, s2) :: 0 r = mkcall("cmpstring", types.Types[types.TINT], init, typecheck.Conv(n.X, types.Types[types.TSTRING]), typecheck.Conv(n.Y, types.Types[types.TSTRING])) - r = ir.NewBinaryExpr(base.Pos, n.Op(), r, ir.NewInt(0)) + r = ir.NewBinaryExpr(base.Pos, n.Op(), r, ir.NewInt(base.Pos, 0)) } return finishCompare(n, r, init) diff --git a/src/cmd/compile/internal/walk/complit.go b/src/cmd/compile/internal/walk/complit.go index 187c28b62fd72a..0a8ce65a1690cc 100644 --- a/src/cmd/compile/internal/walk/complit.go +++ b/src/cmd/compile/internal/walk/complit.go @@ -204,7 +204,7 @@ func fixedlit(ctxt initContext, kind initKind, n *ir.CompLitExpr, var_ ir.Node, } r = kv.Value } - a := ir.NewIndexExpr(base.Pos, var_, ir.NewInt(k)) + a := ir.NewIndexExpr(base.Pos, var_, ir.NewInt(base.Pos, k)) k++ if isBlank { return ir.BlankNode, r @@ -377,7 +377,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes) } value = kv.Value } - a := ir.NewIndexExpr(base.Pos, vauto, ir.NewInt(index)) + a := ir.NewIndexExpr(base.Pos, vauto, ir.NewInt(base.Pos, index)) a.SetBounded(true) index++ @@ -416,7 +416,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes) func maplit(n *ir.CompLitExpr, m ir.Node, init *ir.Nodes) { // make the map var - args := []ir.Node{ir.TypeNode(n.Type()), ir.NewInt(n.Len + int64(len(n.List)))} + args := []ir.Node{ir.TypeNode(n.Type()), ir.NewInt(base.Pos, n.Len+int64(len(n.List)))} a := typecheck.Expr(ir.NewCallExpr(base.Pos, ir.OMAKE, nil, args)).(*ir.MakeExpr) a.RType = n.RType a.SetEsc(n.Esc()) @@ -476,9 +476,9 @@ func maplit(n *ir.CompLitExpr, m ir.Node, init *ir.Nodes) { base.AssertfAt(lhs.Op() == ir.OINDEXMAP, lhs.Pos(), "want OINDEXMAP, have %+v", lhs) lhs.RType = n.RType - zero := ir.NewAssignStmt(base.Pos, i, ir.NewInt(0)) - cond := ir.NewBinaryExpr(base.Pos, ir.OLT, i, ir.NewInt(tk.NumElem())) - incr := ir.NewAssignStmt(base.Pos, i, ir.NewBinaryExpr(base.Pos, ir.OADD, i, ir.NewInt(1))) + zero := ir.NewAssignStmt(base.Pos, i, ir.NewInt(base.Pos, 0)) + cond := ir.NewBinaryExpr(base.Pos, ir.OLT, i, ir.NewInt(base.Pos, tk.NumElem())) + incr := ir.NewAssignStmt(base.Pos, i, ir.NewBinaryExpr(base.Pos, ir.OADD, i, ir.NewInt(base.Pos, 1))) var body ir.Node = ir.NewAssignStmt(base.Pos, lhs, rhs) body = typecheck.Stmt(body) diff --git a/src/cmd/compile/internal/walk/convert.go b/src/cmd/compile/internal/walk/convert.go index 629dd9af4fec39..07ddd0458ffcf9 100644 --- a/src/cmd/compile/internal/walk/convert.go +++ b/src/cmd/compile/internal/walk/convert.go @@ -140,9 +140,9 @@ func dataWord(conv *ir.ConvExpr, init *ir.Nodes) ir.Node { n = cheapExpr(n, init) n = soleComponent(init, n) // byteindex widens n so that the multiplication doesn't overflow. - index := ir.NewBinaryExpr(base.Pos, ir.OLSH, byteindex(n), ir.NewInt(3)) + index := ir.NewBinaryExpr(base.Pos, ir.OLSH, byteindex(n), ir.NewInt(base.Pos, 3)) if ssagen.Arch.LinkArch.ByteOrder == binary.BigEndian { - index = ir.NewBinaryExpr(base.Pos, ir.OADD, index, ir.NewInt(7)) + index = ir.NewBinaryExpr(base.Pos, ir.OADD, index, ir.NewInt(base.Pos, 7)) } // The actual type is [256]uint64, but we use [256*8]uint8 so we can address // individual bytes. @@ -418,7 +418,7 @@ func soleComponent(init *ir.Nodes, n ir.Node) ir.Node { } n = typecheck.Expr(ir.NewSelectorExpr(n.Pos(), ir.OXDOT, n, n.Type().Field(0).Sym)) case n.Type().IsArray(): - n = typecheck.Expr(ir.NewIndexExpr(n.Pos(), n, ir.NewInt(0))) + n = typecheck.Expr(ir.NewIndexExpr(n.Pos(), n, ir.NewInt(base.Pos, 0))) default: return n } diff --git a/src/cmd/compile/internal/walk/order.go b/src/cmd/compile/internal/walk/order.go index b6b277c9a51781..038844af262b4f 100644 --- a/src/cmd/compile/internal/walk/order.go +++ b/src/cmd/compile/internal/walk/order.go @@ -433,9 +433,9 @@ func (o *orderState) edge() { // freezes the counter when it reaches the value of 255. However, a range // of experiments showed that that decreases overall performance. o.append(ir.NewIfStmt(base.Pos, - ir.NewBinaryExpr(base.Pos, ir.OEQ, counter, ir.NewInt(0xff)), - []ir.Node{ir.NewAssignStmt(base.Pos, counter, ir.NewInt(1))}, - []ir.Node{ir.NewAssignOpStmt(base.Pos, ir.OADD, counter, ir.NewInt(1))})) + ir.NewBinaryExpr(base.Pos, ir.OEQ, counter, ir.NewInt(base.Pos, 0xff)), + []ir.Node{ir.NewAssignStmt(base.Pos, counter, ir.NewInt(base.Pos, 1))}, + []ir.Node{ir.NewAssignOpStmt(base.Pos, ir.OADD, counter, ir.NewInt(base.Pos, 1))})) } // orderBlock orders the block of statements in n into a new slice, diff --git a/src/cmd/compile/internal/walk/range.go b/src/cmd/compile/internal/walk/range.go index 67c13a847c8fa0..ae2d9c250bb9f4 100644 --- a/src/cmd/compile/internal/walk/range.go +++ b/src/cmd/compile/internal/walk/range.go @@ -102,7 +102,7 @@ func walkRange(nrange *ir.RangeStmt) ir.Node { init = append(init, ir.NewAssignStmt(base.Pos, hn, ir.NewUnaryExpr(base.Pos, ir.OLEN, ha))) nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, hn) - nfor.Post = ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, ir.NewInt(1))) + nfor.Post = ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, ir.NewInt(base.Pos, 1))) // for range ha { body } if v1 == nil { @@ -214,7 +214,7 @@ func walkRange(nrange *ir.RangeStmt) ir.Node { // This reads from hp and writes to hu. huVal = ir.NewConvExpr(base.Pos, ir.OCONVNOP, types.Types[types.TUNSAFEPTR], hp) huVal = ir.NewConvExpr(base.Pos, ir.OCONVNOP, types.Types[types.TUINTPTR], huVal) - as := ir.NewAssignStmt(base.Pos, hu, ir.NewBinaryExpr(base.Pos, ir.OADD, huVal, ir.NewInt(elem.Size()))) + as := ir.NewAssignStmt(base.Pos, hu, ir.NewBinaryExpr(base.Pos, ir.OADD, huVal, ir.NewInt(base.Pos, elem.Size()))) nfor.Post = ir.NewBlockStmt(base.Pos, []ir.Node{nfor.Post, as}) case types.TMAP: @@ -260,7 +260,7 @@ func walkRange(nrange *ir.RangeStmt) ir.Node { } hb := typecheck.Temp(types.Types[types.TBOOL]) - nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, hb, ir.NewBool(false)) + nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, hb, ir.NewBool(base.Pos, false)) lhs := []ir.Node{hv1, hb} rhs := []ir.Node{ir.NewUnaryExpr(base.Pos, ir.ORECV, ha)} a := ir.NewAssignListStmt(base.Pos, ir.OAS2RECV, lhs, rhs) @@ -317,10 +317,10 @@ func walkRange(nrange *ir.RangeStmt) ir.Node { // if hv2 < utf8.RuneSelf nif := ir.NewIfStmt(base.Pos, nil, nil, nil) - nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv2, ir.NewInt(utf8.RuneSelf)) + nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv2, ir.NewInt(base.Pos, utf8.RuneSelf)) // hv1++ - nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, ir.NewInt(1)))} + nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, ir.NewInt(base.Pos, 1)))} // } else { // hv2, hv1 = decoderune(ha, hv1) @@ -524,19 +524,19 @@ func arrayClear(wbPos src.XPos, a ir.Node, nrange *ir.RangeStmt) ir.Node { // i = len(a) - 1 // } n := ir.NewIfStmt(base.Pos, nil, nil, nil) - n.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(0)) + n.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(base.Pos, 0)) // hp = &a[0] hp := typecheck.Temp(types.Types[types.TUNSAFEPTR]) - ix := ir.NewIndexExpr(base.Pos, a, ir.NewInt(0)) + ix := ir.NewIndexExpr(base.Pos, a, ir.NewInt(base.Pos, 0)) ix.SetBounded(true) addr := typecheck.ConvNop(typecheck.NodAddr(ix), types.Types[types.TUNSAFEPTR]) n.Body.Append(ir.NewAssignStmt(base.Pos, hp, addr)) // hn = len(a) * sizeof(elem(a)) hn := typecheck.Temp(types.Types[types.TUINTPTR]) - mul := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(elemsize)), types.Types[types.TUINTPTR]) + mul := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(base.Pos, elemsize)), types.Types[types.TUINTPTR]) n.Body.Append(ir.NewAssignStmt(base.Pos, hn, mul)) var fn ir.Node @@ -553,7 +553,7 @@ func arrayClear(wbPos src.XPos, a ir.Node, nrange *ir.RangeStmt) ir.Node { // For array range clear, also set "i = len(a) - 1" if nrange != nil { - idx := ir.NewAssignStmt(base.Pos, nrange.Key, ir.NewBinaryExpr(base.Pos, ir.OSUB, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(1))) + idx := ir.NewAssignStmt(base.Pos, nrange.Key, ir.NewBinaryExpr(base.Pos, ir.OSUB, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(base.Pos, 1))) n.Body.Append(idx) } @@ -570,7 +570,7 @@ func addptr(p ir.Node, n int64) ir.Node { p = ir.NewConvExpr(base.Pos, ir.OCONVNOP, nil, p) p.SetType(types.Types[types.TUINTPTR]) - p = ir.NewBinaryExpr(base.Pos, ir.OADD, p, ir.NewInt(n)) + p = ir.NewBinaryExpr(base.Pos, ir.OADD, p, ir.NewInt(base.Pos, n)) p = ir.NewConvExpr(base.Pos, ir.OCONVNOP, nil, p) p.SetType(t) diff --git a/src/cmd/compile/internal/walk/select.go b/src/cmd/compile/internal/walk/select.go index 13beb70bd92ad3..c676a765bc3a63 100644 --- a/src/cmd/compile/internal/walk/select.go +++ b/src/cmd/compile/internal/walk/select.go @@ -157,7 +157,7 @@ func walkSelectCases(cases []*ir.CommClause) []ir.Node { var pc0, pcs ir.Node if base.Flag.Race { pcs = typecheck.Temp(types.NewArray(types.Types[types.TUINTPTR], int64(ncas))) - pc0 = typecheck.Expr(typecheck.NodAddr(ir.NewIndexExpr(base.Pos, pcs, ir.NewInt(0)))) + pc0 = typecheck.Expr(typecheck.NodAddr(ir.NewIndexExpr(base.Pos, pcs, ir.NewInt(base.Pos, 0)))) } else { pc0 = typecheck.NodNil() } @@ -196,7 +196,7 @@ func walkSelectCases(cases []*ir.CommClause) []ir.Node { casorder[i] = cas setField := func(f string, val ir.Node) { - r := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, ir.NewIndexExpr(base.Pos, selv, ir.NewInt(int64(i))), typecheck.Lookup(f)), val) + r := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, ir.NewIndexExpr(base.Pos, selv, ir.NewInt(base.Pos, int64(i))), typecheck.Lookup(f)), val) init = append(init, typecheck.Stmt(r)) } @@ -210,7 +210,7 @@ func walkSelectCases(cases []*ir.CommClause) []ir.Node { // TODO(mdempsky): There should be a cleaner way to // handle this. if base.Flag.Race { - r := mkcallstmt("selectsetpc", typecheck.NodAddr(ir.NewIndexExpr(base.Pos, pcs, ir.NewInt(int64(i))))) + r := mkcallstmt("selectsetpc", typecheck.NodAddr(ir.NewIndexExpr(base.Pos, pcs, ir.NewInt(base.Pos, int64(i))))) init = append(init, r) } } @@ -226,7 +226,7 @@ func walkSelectCases(cases []*ir.CommClause) []ir.Node { r.Lhs = []ir.Node{chosen, recvOK} fn := typecheck.LookupRuntime("selectgo") var fnInit ir.Nodes - r.Rhs = []ir.Node{mkcall1(fn, fn.Type().Results(), &fnInit, bytePtrToIndex(selv, 0), bytePtrToIndex(order, 0), pc0, ir.NewInt(int64(nsends)), ir.NewInt(int64(nrecvs)), ir.NewBool(dflt == nil))} + r.Rhs = []ir.Node{mkcall1(fn, fn.Type().Results(), &fnInit, bytePtrToIndex(selv, 0), bytePtrToIndex(order, 0), pc0, ir.NewInt(base.Pos, int64(nsends)), ir.NewInt(base.Pos, int64(nrecvs)), ir.NewBool(base.Pos, dflt == nil))} init = append(init, fnInit...) init = append(init, typecheck.Stmt(r)) @@ -261,7 +261,7 @@ func walkSelectCases(cases []*ir.CommClause) []ir.Node { if dflt != nil { ir.SetPos(dflt) - dispatch(ir.NewBinaryExpr(base.Pos, ir.OLT, chosen, ir.NewInt(0)), dflt) + dispatch(ir.NewBinaryExpr(base.Pos, ir.OLT, chosen, ir.NewInt(base.Pos, 0)), dflt) } for i, cas := range casorder { ir.SetPos(cas) @@ -269,7 +269,7 @@ func walkSelectCases(cases []*ir.CommClause) []ir.Node { dispatch(nil, cas) break } - dispatch(ir.NewBinaryExpr(base.Pos, ir.OEQ, chosen, ir.NewInt(int64(i))), cas) + dispatch(ir.NewBinaryExpr(base.Pos, ir.OEQ, chosen, ir.NewInt(base.Pos, int64(i))), cas) } return init @@ -277,7 +277,7 @@ func walkSelectCases(cases []*ir.CommClause) []ir.Node { // bytePtrToIndex returns a Node representing "(*byte)(&n[i])". func bytePtrToIndex(n ir.Node, i int64) ir.Node { - s := typecheck.NodAddr(ir.NewIndexExpr(base.Pos, n, ir.NewInt(i))) + s := typecheck.NodAddr(ir.NewIndexExpr(base.Pos, n, ir.NewInt(base.Pos, i))) t := types.NewPtr(types.Types[types.TUINT8]) return typecheck.ConvNop(s, t) } diff --git a/src/cmd/compile/internal/walk/switch.go b/src/cmd/compile/internal/walk/switch.go index 67af2ae57ebd10..1a167d363ecc8a 100644 --- a/src/cmd/compile/internal/walk/switch.go +++ b/src/cmd/compile/internal/walk/switch.go @@ -42,7 +42,7 @@ func walkSwitchExpr(sw *ir.SwitchStmt) { // convert switch {...} to switch true {...} if cond == nil { - cond = ir.NewBool(true) + cond = ir.NewBool(base.Pos, true) cond = typecheck.Expr(cond) cond = typecheck.DefaultLit(cond, nil) } @@ -620,13 +620,13 @@ func (s *typeSwitch) flush() { // TODO: figure out if we could use a jump table using some low bits of the type hashes. binarySearch(len(cc), &s.done, func(i int) ir.Node { - return ir.NewBinaryExpr(base.Pos, ir.OLE, s.hashname, ir.NewInt(int64(cc[i-1].hash))) + return ir.NewBinaryExpr(base.Pos, ir.OLE, s.hashname, ir.NewInt(base.Pos, int64(cc[i-1].hash))) }, func(i int, nif *ir.IfStmt) { // TODO(mdempsky): Omit hash equality check if // there's only one type. c := cc[i] - nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, s.hashname, ir.NewInt(int64(c.hash))) + nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, s.hashname, ir.NewInt(base.Pos, int64(c.hash))) nif.Body.Append(c.body.Take()...) }, ) @@ -737,9 +737,9 @@ func stringSearch(expr ir.Node, cc []exprClause, out *ir.Nodes) { slice := ir.NewConvExpr(base.Pos, ir.OSTR2BYTESTMP, types.NewSlice(types.Types[types.TINT8]), expr) slice.SetTypecheck(1) // legacy typechecker doesn't handle this op // Load the byte we're splitting on. - load := ir.NewIndexExpr(base.Pos, slice, ir.NewInt(int64(bestIdx))) + load := ir.NewIndexExpr(base.Pos, slice, ir.NewInt(base.Pos, int64(bestIdx))) // Compare with the value we're splitting on. - cmp := ir.Node(ir.NewBinaryExpr(base.Pos, ir.OLE, load, ir.NewInt(int64(bestByte)))) + cmp := ir.Node(ir.NewBinaryExpr(base.Pos, ir.OLE, load, ir.NewInt(base.Pos, int64(bestByte)))) cmp = typecheck.DefaultLit(typecheck.Expr(cmp), nil) nif := ir.NewIfStmt(base.Pos, cmp, nil, nil)