1
0
mirror of https://github.com/golang/go synced 2024-11-26 05:07:59 -07:00

[dev.typeparams] cmd/compile: rewrite method calls during typecheck

CL 330671 move rewriting method call to method expression to escape
analysis. This CL move the rewriting up further, into typecheck. It
helps simplify the code for dowstream passes, as they now only have to
deal with OCALLFUNC.

There're two notes:

 - For -G=3, we can't rewrite d.M() where d is an instantiated receiver
   in transformCall, but let irgen.stencil to rewrite it.

 - Escape analysis still have to check for rewriting method calls, as
   the devirtualization pass can still generate OCALLMETH.

Does not pass toolstash, since when the export data now contains method
expression calls instead of method calls.

Change-Id: I77465ef04d50dc4efedddca7eb55b3fc9483db0e
Reviewed-on: https://go-review.googlesource.com/c/go/+/330831
Trust: Cuong Manh Le <cuong.manhle.vn@gmail.com>
Run-TryBot: Cuong Manh Le <cuong.manhle.vn@gmail.com>
TryBot-Result: Go Bot <gobot@golang.org>
Reviewed-by: Matthew Dempsky <mdempsky@google.com>
This commit is contained in:
Cuong Manh Le 2021-06-25 22:24:29 +07:00
parent 180c338c68
commit 0cf71f7f92
10 changed files with 65 additions and 60 deletions

View File

@ -68,7 +68,7 @@ func (e *escape) callCommon(ks []hole, call ir.Node, init *ir.Nodes, wrapper *ir
fn = ir.MethodExprName(v)
}
case ir.OCALLMETH:
fn = ir.MethodExprName(call.X)
base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck")
}
fntype := call.X.Type()

View File

@ -273,6 +273,19 @@ func (v *hairyVisitor) doNode(n ir.Node) bool {
}
}
}
if n.X.Op() == ir.OMETHEXPR {
if meth := ir.MethodExprName(n.X); meth != nil {
fn := meth.Func
if fn != nil && types.IsRuntimePkg(fn.Sym().Pkg) && fn.Sym().Name == "heapBits.nextArena" {
// Special case: explicitly allow
// mid-stack inlining of
// runtime.heapBits.next even though
// it calls slow-path
// runtime.heapBits.nextArena.
break
}
}
}
if ir.IsIntrinsicCall(n) {
// Treat like any other node.
@ -287,28 +300,8 @@ func (v *hairyVisitor) doNode(n ir.Node) bool {
// Call cost for non-leaf inlining.
v.budget -= v.extraCallCost
// Call is okay if inlinable and we have the budget for the body.
case ir.OCALLMETH:
n := n.(*ir.CallExpr)
t := n.X.Type()
if t == nil {
base.Fatalf("no function type for [%p] %+v\n", n.X, n.X)
}
fn := ir.MethodExprName(n.X).Func
if types.IsRuntimePkg(fn.Sym().Pkg) && fn.Sym().Name == "heapBits.nextArena" {
// Special case: explicitly allow
// mid-stack inlining of
// runtime.heapBits.next even though
// it calls slow-path
// runtime.heapBits.nextArena.
break
}
if fn.Inl != nil {
v.budget -= fn.Inl.Cost
break
}
// Call cost for non-leaf inlining.
v.budget -= v.extraCallCost
base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
// Things that are too hairy, irrespective of the budget
case ir.OCALL, ir.OCALLINTER:
@ -575,7 +568,9 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
case ir.ODEFER, ir.OGO:
n := n.(*ir.GoDeferStmt)
switch call := n.Call; call.Op() {
case ir.OCALLFUNC, ir.OCALLMETH:
case ir.OCALLMETH:
base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck")
case ir.OCALLFUNC:
call := call.(*ir.CallExpr)
call.NoInline = true
}
@ -585,11 +580,18 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
case ir.OCLOSURE:
return n
case ir.OCALLMETH:
// Prevent inlining some reflect.Value methods when using checkptr,
// even when package reflect was compiled without it (#35073).
base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
case ir.OCALLFUNC:
n := n.(*ir.CallExpr)
if s := ir.MethodExprName(n.X).Sym(); base.Debug.Checkptr != 0 && types.IsReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") {
return n
if n.X.Op() == ir.OMETHEXPR {
// Prevent inlining some reflect.Value methods when using checkptr,
// even when package reflect was compiled without it (#35073).
if meth := ir.MethodExprName(n.X); meth != nil {
s := meth.Sym()
if base.Debug.Checkptr != 0 && types.IsReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") {
return n
}
}
}
}
@ -611,7 +613,9 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
// transmogrify this node itself unless inhibited by the
// switch at the top of this function.
switch n.Op() {
case ir.OCALLFUNC, ir.OCALLMETH:
case ir.OCALLMETH:
base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
case ir.OCALLFUNC:
n := n.(*ir.CallExpr)
if n.NoInline {
return n
@ -631,19 +635,8 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
if fn := inlCallee(call.X); fn != nil && fn.Inl != nil {
n = mkinlcall(call, fn, maxCost, inlMap, edit)
}
case ir.OCALLMETH:
call = n.(*ir.CallExpr)
if base.Flag.LowerM > 3 {
fmt.Printf("%v:call to meth %v\n", ir.Line(n), call.X.(*ir.SelectorExpr).Sel)
}
// typecheck should have resolved ODOTMETH->type, whose nname points to the actual function.
if call.X.Type() == nil {
base.Fatalf("no function type for [%p] %+v\n", call.X, call.X)
}
n = mkinlcall(call, ir.MethodExprName(call.X).Func, maxCost, inlMap, edit)
base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
}
base.Pos = lno
@ -723,7 +716,7 @@ var SSADumpInline = func(*ir.Func) {}
// instead.
var NewInline = func(call *ir.CallExpr, fn *ir.Func, inlIndex int) *ir.InlinedCallExpr { return nil }
// If n is a call node (OCALLFUNC or OCALLMETH), and fn is an ONAME node for a
// If n is a OCALLFUNC node, and fn is an ONAME node for a
// function with an inlinable body, return an OINLCALL node that can replace n.
// The returned node's Ninit has the parameter assignments, the Nbody is the
// inlined function body, and (List, Rlist) contain the (input, output)
@ -906,11 +899,7 @@ func oldInline(call *ir.CallExpr, fn *ir.Func, inlIndex int) *ir.InlinedCallExpr
as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
as.Def = true
if call.Op() == ir.OCALLMETH {
sel := call.X.(*ir.SelectorExpr)
if sel.X == nil {
base.Fatalf("method call without receiver: %+v", call)
}
as.Rhs.Append(sel.X)
base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck")
}
as.Rhs.Append(call.Args...)

View File

@ -1807,8 +1807,7 @@ func InlineCall(call *ir.CallExpr, fn *ir.Func, inlIndex int) *ir.InlinedCallExp
var args ir.Nodes
if call.Op() == ir.OCALLMETH {
assert(call.X.Op() == ir.ODOTMETH)
args.Append(call.X.(*ir.SelectorExpr).X)
base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck")
}
args.Append(call.Args...)

View File

@ -149,6 +149,9 @@ func transformCall(n *ir.CallExpr) {
}
typecheckaste(ir.OCALL, n.X, n.IsDDD, t.Params(), n.Args)
if l.Op() == ir.ODOTMETH && len(deref(n.X.Type().Recv().Type).RParams()) == 0 {
typecheck.FixMethodCall(n)
}
if t.NumResults() == 1 {
n.SetType(l.Type().Results().Field(0).Type)

View File

@ -516,6 +516,7 @@ func tcCall(n *ir.CallExpr, top int) ir.Node {
}
typecheckaste(ir.OCALL, n.X, n.IsDDD, t.Params(), n.Args, func() string { return fmt.Sprintf("argument to %v", n.X) })
FixMethodCall(n)
if t.NumResults() == 0 {
return n
}

View File

@ -462,7 +462,9 @@ func walkCheckPtrArithmetic(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
// TODO(mdempsky): Make stricter. We only need to exempt
// reflect.Value.Pointer and reflect.Value.UnsafeAddr.
switch n.X.Op() {
case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER:
case ir.OCALLMETH:
base.FatalfAt(n.X.Pos(), "OCALLMETH missed by typecheck")
case ir.OCALLFUNC, ir.OCALLINTER:
return n
}

View File

@ -167,7 +167,7 @@ func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OCFUNC:
return n
case ir.OCALLINTER, ir.OCALLFUNC, ir.OCALLMETH:
case ir.OCALLINTER, ir.OCALLFUNC:
n := n.(*ir.CallExpr)
return walkCall(n, init)
@ -487,9 +487,12 @@ func walkAddString(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
return r1
}
// walkCall walks an OCALLFUNC, OCALLINTER, or OCALLMETH node.
// walkCall walks an OCALLFUNC or OCALLINTER node.
func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
if n.Op() == ir.OCALLINTER || n.Op() == ir.OCALLMETH || n.X.Op() == ir.OMETHEXPR {
if n.Op() == ir.OCALLMETH {
base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
}
if n.Op() == ir.OCALLINTER || n.X.Op() == ir.OMETHEXPR {
// We expect both interface call reflect.Type.Method and concrete
// call reflect.(*rtype).Method.
usemethod(n)
@ -550,7 +553,7 @@ func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
n.SetWalked(true)
if n.Op() == ir.OCALLMETH {
typecheck.FixMethodCall(n)
base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
}
args := n.Args

View File

@ -506,15 +506,18 @@ func (o *orderState) init(n ir.Node) {
}
// call orders the call expression n.
// n.Op is OCALLMETH/OCALLFUNC/OCALLINTER or a builtin like OCOPY.
// n.Op is OCALLFUNC/OCALLINTER or a builtin like OCOPY.
func (o *orderState) call(nn ir.Node) {
if len(nn.Init()) > 0 {
// Caller should have already called o.init(nn).
base.Fatalf("%v with unexpected ninit", nn.Op())
}
if nn.Op() == ir.OCALLMETH {
base.FatalfAt(nn.Pos(), "OCALLMETH missed by typecheck")
}
// Builtin functions.
if nn.Op() != ir.OCALLFUNC && nn.Op() != ir.OCALLMETH && nn.Op() != ir.OCALLINTER {
if nn.Op() != ir.OCALLFUNC && nn.Op() != ir.OCALLINTER {
switch n := nn.(type) {
default:
base.Fatalf("unexpected call: %+v", n)
@ -707,7 +710,7 @@ func (o *orderState) stmt(n ir.Node) {
o.out = append(o.out, n)
// Special: handle call arguments.
case ir.OCALLFUNC, ir.OCALLINTER, ir.OCALLMETH:
case ir.OCALLFUNC, ir.OCALLINTER:
n := n.(*ir.CallExpr)
t := o.markTemp()
o.call(n)
@ -1147,7 +1150,10 @@ func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
case ir.OCONVNOP:
n := n.(*ir.ConvExpr)
if n.Type().IsKind(types.TUNSAFEPTR) && n.X.Type().IsKind(types.TUINTPTR) && (n.X.Op() == ir.OCALLFUNC || n.X.Op() == ir.OCALLINTER || n.X.Op() == ir.OCALLMETH) {
if n.X.Op() == ir.OCALLMETH {
base.FatalfAt(n.X.Pos(), "OCALLMETH missed by typecheck")
}
if n.Type().IsKind(types.TUNSAFEPTR) && n.X.Type().IsKind(types.TUINTPTR) && (n.X.Op() == ir.OCALLFUNC || n.X.Op() == ir.OCALLINTER) {
call := n.X.(*ir.CallExpr)
// When reordering unsafe.Pointer(f()) into a separate
// statement, the conversion and function call must stay
@ -1200,9 +1206,12 @@ func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
o.out = append(o.out, nif)
return r
case ir.OCALLMETH:
base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
panic("unreachable")
case ir.OCALLFUNC,
ir.OCALLINTER,
ir.OCALLMETH,
ir.OCAP,
ir.OCOMPLEX,
ir.OCOPY,

View File

@ -40,7 +40,6 @@ func walkStmt(n ir.Node) ir.Node {
ir.OAS2MAPR,
ir.OCLOSE,
ir.OCOPY,
ir.OCALLMETH,
ir.OCALLINTER,
ir.OCALL,
ir.OCALLFUNC,

View File

@ -308,7 +308,7 @@ func mayCall(n ir.Node) bool {
default:
base.FatalfAt(n.Pos(), "mayCall %+v", n)
case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER,
case ir.OCALLFUNC, ir.OCALLINTER,
ir.OUNSAFEADD, ir.OUNSAFESLICE:
return true