2013-08-27 16:49:13 -06:00
|
|
|
// Copyright 2013 The Go Authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2013-05-17 14:25:48 -06:00
|
|
|
package ssa
|
|
|
|
|
2013-06-03 14:46:57 -06:00
|
|
|
// This file implements the BUILD phase of SSA construction.
|
2013-05-17 14:25:48 -06:00
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
// SSA construction has two phases, CREATE and BUILD. In the CREATE phase
|
|
|
|
// (create.go), all packages are constructed and type-checked and
|
2013-05-17 14:25:48 -06:00
|
|
|
// definitions of all package members are created, method-sets are
|
2013-11-05 11:02:46 -07:00
|
|
|
// computed, and wrapper methods are synthesized.
|
|
|
|
// ssa.Packages are created in arbitrary order.
|
2013-05-17 14:25:48 -06:00
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
// In the BUILD phase (builder.go), the builder traverses the AST of
|
|
|
|
// each Go source function and generates SSA instructions for the
|
2013-11-05 11:02:46 -07:00
|
|
|
// function body. Initializer expressions for package-level variables
|
|
|
|
// are emitted to the package's init() function in the order specified
|
|
|
|
// by go/types.Info.InitOrder, then code for each function in the
|
|
|
|
// package is generated in lexical order.
|
|
|
|
// The BUILD phases for distinct packages are independent and are
|
|
|
|
// executed in parallel.
|
|
|
|
//
|
|
|
|
// TODO(adonovan): indeed, building functions is now embarrassingly parallel.
|
|
|
|
// Audit for concurrency then benchmark using more goroutines.
|
2013-05-17 14:25:48 -06:00
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
// The builder's and Program's indices (maps) are populated and
|
2013-05-17 14:25:48 -06:00
|
|
|
// mutated during the CREATE phase, but during the BUILD phase they
|
2013-07-26 09:22:34 -06:00
|
|
|
// remain constant. The sole exception is Prog.methodSets and its
|
|
|
|
// related maps, which are protected by a dedicated mutex.
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"go/ast"
|
|
|
|
"go/token"
|
|
|
|
"os"
|
|
|
|
"sync"
|
|
|
|
"sync/atomic"
|
|
|
|
|
|
|
|
"code.google.com/p/go.tools/go/exact"
|
|
|
|
"code.google.com/p/go.tools/go/types"
|
|
|
|
)
|
|
|
|
|
2013-05-17 15:02:47 -06:00
|
|
|
type opaqueType struct {
|
|
|
|
types.Type
|
|
|
|
name string
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *opaqueType) String() string { return t.name }
|
|
|
|
|
2013-05-17 14:25:48 -06:00
|
|
|
var (
|
2013-06-24 12:15:13 -06:00
|
|
|
varOk = types.NewVar(token.NoPos, nil, "ok", tBool)
|
|
|
|
varIndex = types.NewVar(token.NoPos, nil, "index", tInt)
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
// Type constants.
|
|
|
|
tBool = types.Typ[types.Bool]
|
|
|
|
tByte = types.Typ[types.Byte]
|
|
|
|
tInt = types.Typ[types.Int]
|
|
|
|
tInvalid = types.Typ[types.Invalid]
|
|
|
|
tUntypedNil = types.Typ[types.UntypedNil]
|
2013-05-17 15:02:47 -06:00
|
|
|
tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators
|
2013-05-17 14:25:48 -06:00
|
|
|
tEface = new(types.Interface)
|
|
|
|
|
|
|
|
// SSA Value constants.
|
2013-07-16 11:50:08 -06:00
|
|
|
vZero = intConst(0)
|
|
|
|
vOne = intConst(1)
|
|
|
|
vTrue = NewConst(exact.MakeBool(true), tBool)
|
|
|
|
vFalse = NewConst(exact.MakeBool(false), tBool)
|
2013-05-17 14:25:48 -06:00
|
|
|
)
|
|
|
|
|
2013-06-03 14:46:57 -06:00
|
|
|
// builder holds state associated with the package currently being built.
|
|
|
|
// Its methods contain all the logic for AST-to-SSA conversion.
|
2013-11-05 11:02:46 -07:00
|
|
|
type builder struct{}
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
// cond emits to fn code to evaluate boolean condition e and jump
|
|
|
|
// to t or f depending on its value, performing various simplifications.
|
|
|
|
//
|
|
|
|
// Postcondition: fn.currentBlock is nil.
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) {
|
2013-05-17 14:25:48 -06:00
|
|
|
switch e := e.(type) {
|
|
|
|
case *ast.ParenExpr:
|
|
|
|
b.cond(fn, e.X, t, f)
|
|
|
|
return
|
|
|
|
|
|
|
|
case *ast.BinaryExpr:
|
|
|
|
switch e.Op {
|
|
|
|
case token.LAND:
|
|
|
|
ltrue := fn.newBasicBlock("cond.true")
|
|
|
|
b.cond(fn, e.X, ltrue, f)
|
|
|
|
fn.currentBlock = ltrue
|
|
|
|
b.cond(fn, e.Y, t, f)
|
|
|
|
return
|
|
|
|
|
|
|
|
case token.LOR:
|
|
|
|
lfalse := fn.newBasicBlock("cond.false")
|
|
|
|
b.cond(fn, e.X, t, lfalse)
|
|
|
|
fn.currentBlock = lfalse
|
|
|
|
b.cond(fn, e.Y, t, f)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
case *ast.UnaryExpr:
|
|
|
|
if e.Op == token.NOT {
|
|
|
|
b.cond(fn, e.X, f, t)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
switch cond := b.expr(fn, e).(type) {
|
2013-07-16 11:50:08 -06:00
|
|
|
case *Const:
|
2013-05-17 14:25:48 -06:00
|
|
|
// Dispatch constant conditions statically.
|
|
|
|
if exact.BoolVal(cond.Value) {
|
|
|
|
emitJump(fn, t)
|
|
|
|
} else {
|
|
|
|
emitJump(fn, f)
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
emitIf(fn, cond, t, f)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// logicalBinop emits code to fn to evaluate e, a &&- or
|
|
|
|
// ||-expression whose reified boolean value is wanted.
|
|
|
|
// The value is returned.
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value {
|
2013-05-17 14:25:48 -06:00
|
|
|
rhs := fn.newBasicBlock("binop.rhs")
|
|
|
|
done := fn.newBasicBlock("binop.done")
|
|
|
|
|
2013-08-19 10:50:40 -06:00
|
|
|
// T(e) = T(e.X) = T(e.Y) after untyped constants have been
|
|
|
|
// eliminated.
|
go.tools/ssa: implement correct control flow for recovered panic.
A function such as this:
func one() (x int) {
defer func() { recover() }()
x = 1
panic("return")
}
that combines named return parameters (NRPs) with deferred calls
that call recover, may return non-zero values despite the
fact it doesn't even contain a return statement. (!)
This requires a change to the SSA API: all functions'
control-flow graphs now have a second entry point, called
Recover, which is the block at which control flow resumes
after a recovered panic. The Recover block simply loads the
NRPs and returns them.
As an optimization, most functions don't need a Recover block,
so it is omitted. In fact it is only needed for functions that
have NRPs and defer a call to another function that _may_ call
recover.
Dataflow analysis of SSA now requires extra work, since every
may-panic instruction has an implicit control-flow edge to
the Recover block. The only dataflow analysis so far implemented
is SSA renaming, for which we make the following simplifying
assumption: the Recover block only loads the NRPs and returns.
This means we don't really need to analyze it, we can just
skip the "lifting" of such NRPs. We also special-case the Recover
block in the dominance computation.
Rejected alternative approaches:
- Specifying a Recover block for every defer instruction (like a
traditional exception handler).
This seemed like excessive generality, since Go programs
only need the same degenerate form of Recover block.
- Adding an instruction to set the Recover block immediately
after the named return values are set up, so that dominance
can be computed without special-casing.
This didn't seem worth the effort.
Interpreter:
- This CL completely reimplements the panic/recover/
defer logic in the interpreter. It's clearer and simpler
and closer to the model in the spec.
- Some runtime panic messages have been changed to be closer
to gc's, since tests depend on it.
- The interpreter now requires that the runtime.runtimeError
type be part of the SSA program. This requires that clients
import this package prior to invoking the interpreter.
This in turn requires (Importer).ImportPackage(path string),
which this CL adds.
- All $GOROOT/test/recover{,1,2,3}.go tests are now passing.
NB, the bug described in coverage.go (defer/recover in a concatenated
init function) remains. Will be fixed in a follow-up.
Fixes golang/go#6381
R=gri
CC=crawshaw, golang-dev
https://golang.org/cl/13844043
2013-10-14 13:38:56 -06:00
|
|
|
// TODO(adonovan): not true; MyBool==MyBool yields UntypedBool.
|
2013-08-19 10:50:40 -06:00
|
|
|
t := fn.Pkg.typeOf(e)
|
|
|
|
|
2013-05-17 14:25:48 -06:00
|
|
|
var short Value // value of the short-circuit path
|
|
|
|
switch e.Op {
|
|
|
|
case token.LAND:
|
|
|
|
b.cond(fn, e.X, rhs, done)
|
2013-08-19 10:50:40 -06:00
|
|
|
short = NewConst(exact.MakeBool(false), t)
|
|
|
|
|
2013-05-17 14:25:48 -06:00
|
|
|
case token.LOR:
|
|
|
|
b.cond(fn, e.X, done, rhs)
|
2013-08-19 10:50:40 -06:00
|
|
|
short = NewConst(exact.MakeBool(true), t)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// Is rhs unreachable?
|
|
|
|
if rhs.Preds == nil {
|
|
|
|
// Simplify false&&y to false, true||y to true.
|
|
|
|
fn.currentBlock = done
|
|
|
|
return short
|
|
|
|
}
|
|
|
|
|
|
|
|
// Is done unreachable?
|
|
|
|
if done.Preds == nil {
|
|
|
|
// Simplify true&&y (or false||y) to y.
|
|
|
|
fn.currentBlock = rhs
|
|
|
|
return b.expr(fn, e.Y)
|
|
|
|
}
|
|
|
|
|
|
|
|
// All edges from e.X to done carry the short-circuit value.
|
|
|
|
var edges []Value
|
|
|
|
for _ = range done.Preds {
|
|
|
|
edges = append(edges, short)
|
|
|
|
}
|
|
|
|
|
|
|
|
// The edge from e.Y to done carries the value of e.Y.
|
|
|
|
fn.currentBlock = rhs
|
|
|
|
edges = append(edges, b.expr(fn, e.Y))
|
|
|
|
emitJump(fn, done)
|
|
|
|
fn.currentBlock = done
|
|
|
|
|
|
|
|
phi := &Phi{Edges: edges, Comment: e.Op.String()}
|
2013-05-30 07:59:17 -06:00
|
|
|
phi.pos = e.OpPos
|
2013-08-19 10:50:40 -06:00
|
|
|
phi.typ = t
|
2013-05-17 14:25:48 -06:00
|
|
|
return done.emit(phi)
|
|
|
|
}
|
|
|
|
|
|
|
|
// exprN lowers a multi-result expression e to SSA form, emitting code
|
2013-05-30 07:59:17 -06:00
|
|
|
// to fn and returning a single Value whose type is a *types.Tuple.
|
|
|
|
// The caller must access the components via Extract.
|
2013-05-17 14:25:48 -06:00
|
|
|
//
|
|
|
|
// Multi-result expressions include CallExprs in a multi-value
|
|
|
|
// assignment or return statement, and "value,ok" uses of
|
|
|
|
// TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op
|
|
|
|
// is token.ARROW).
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) exprN(fn *Function, e ast.Expr) Value {
|
2013-05-17 14:25:48 -06:00
|
|
|
var typ types.Type
|
|
|
|
var tuple Value
|
|
|
|
switch e := e.(type) {
|
|
|
|
case *ast.ParenExpr:
|
|
|
|
return b.exprN(fn, e.X)
|
|
|
|
|
|
|
|
case *ast.CallExpr:
|
|
|
|
// Currently, no built-in function nor type conversion
|
|
|
|
// has multiple results, so we can avoid some of the
|
|
|
|
// cases for single-valued CallExpr.
|
|
|
|
var c Call
|
|
|
|
b.setCall(fn, e, &c.Call)
|
2013-05-31 14:14:13 -06:00
|
|
|
c.typ = fn.Pkg.typeOf(e)
|
2013-05-17 14:25:48 -06:00
|
|
|
return fn.emit(&c)
|
|
|
|
|
|
|
|
case *ast.IndexExpr:
|
2013-05-31 14:14:13 -06:00
|
|
|
mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map)
|
2013-05-17 15:02:47 -06:00
|
|
|
typ = mapt.Elem()
|
|
|
|
lookup := &Lookup{
|
2013-05-17 14:25:48 -06:00
|
|
|
X: b.expr(fn, e.X),
|
2013-05-17 15:02:47 -06:00
|
|
|
Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()),
|
2013-05-17 14:25:48 -06:00
|
|
|
CommaOk: true,
|
2013-05-17 15:02:47 -06:00
|
|
|
}
|
|
|
|
lookup.setPos(e.Lbrack)
|
|
|
|
tuple = fn.emit(lookup)
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
case *ast.TypeAssertExpr:
|
2013-07-31 19:31:46 -06:00
|
|
|
t := fn.Pkg.typeOf(e).(*types.Tuple).At(0).Type()
|
|
|
|
return emitTypeTest(fn, b.expr(fn, e.X), t, e.Lparen)
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
case *ast.UnaryExpr: // must be receive <-
|
2013-05-31 14:14:13 -06:00
|
|
|
typ = fn.Pkg.typeOf(e.X).Underlying().(*types.Chan).Elem()
|
2013-05-17 15:02:47 -06:00
|
|
|
unop := &UnOp{
|
2013-05-17 14:25:48 -06:00
|
|
|
Op: token.ARROW,
|
|
|
|
X: b.expr(fn, e.X),
|
|
|
|
CommaOk: true,
|
2013-05-17 15:02:47 -06:00
|
|
|
}
|
|
|
|
unop.setPos(e.OpPos)
|
|
|
|
tuple = fn.emit(unop)
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
default:
|
|
|
|
panic(fmt.Sprintf("unexpected exprN: %T", e))
|
|
|
|
}
|
|
|
|
|
|
|
|
// The typechecker sets the type of the expression to just the
|
2013-05-30 07:59:17 -06:00
|
|
|
// asserted type in the "value, ok" form, not to *types.Tuple
|
2013-05-17 14:25:48 -06:00
|
|
|
// (though it includes the valueOk operand in its error messages).
|
|
|
|
|
|
|
|
tuple.(interface {
|
|
|
|
setType(types.Type)
|
2013-05-17 15:02:47 -06:00
|
|
|
}).setType(types.NewTuple(
|
2013-06-04 13:15:41 -06:00
|
|
|
types.NewVar(token.NoPos, nil, "value", typ),
|
2013-05-17 14:25:48 -06:00
|
|
|
varOk,
|
2013-05-17 15:02:47 -06:00
|
|
|
))
|
2013-05-17 14:25:48 -06:00
|
|
|
return tuple
|
|
|
|
}
|
|
|
|
|
|
|
|
// builtin emits to fn SSA instructions to implement a call to the
|
2013-09-23 16:18:35 -06:00
|
|
|
// built-in function obj with the specified arguments
|
2013-05-17 14:25:48 -06:00
|
|
|
// and return type. It returns the value defined by the result.
|
|
|
|
//
|
|
|
|
// The result is nil if no special handling was required; in this case
|
|
|
|
// the caller should treat this like an ordinary library function
|
|
|
|
// call.
|
|
|
|
//
|
2013-09-23 16:18:35 -06:00
|
|
|
func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value {
|
|
|
|
switch obj.Name() {
|
2013-05-17 14:25:48 -06:00
|
|
|
case "make":
|
2013-05-17 15:02:47 -06:00
|
|
|
switch typ.Underlying().(type) {
|
2013-05-17 14:25:48 -06:00
|
|
|
case *types.Slice:
|
|
|
|
n := b.expr(fn, args[1])
|
|
|
|
m := n
|
|
|
|
if len(args) == 3 {
|
|
|
|
m = b.expr(fn, args[2])
|
|
|
|
}
|
|
|
|
v := &MakeSlice{
|
|
|
|
Len: n,
|
|
|
|
Cap: m,
|
|
|
|
}
|
2013-05-17 15:02:47 -06:00
|
|
|
v.setPos(pos)
|
2013-05-17 14:25:48 -06:00
|
|
|
v.setType(typ)
|
|
|
|
return fn.emit(v)
|
|
|
|
|
|
|
|
case *types.Map:
|
|
|
|
var res Value
|
|
|
|
if len(args) == 2 {
|
|
|
|
res = b.expr(fn, args[1])
|
|
|
|
}
|
2013-05-17 15:02:47 -06:00
|
|
|
v := &MakeMap{Reserve: res}
|
|
|
|
v.setPos(pos)
|
2013-05-17 14:25:48 -06:00
|
|
|
v.setType(typ)
|
|
|
|
return fn.emit(v)
|
|
|
|
|
|
|
|
case *types.Chan:
|
|
|
|
var sz Value = vZero
|
|
|
|
if len(args) == 2 {
|
|
|
|
sz = b.expr(fn, args[1])
|
|
|
|
}
|
2013-05-17 15:02:47 -06:00
|
|
|
v := &MakeChan{Size: sz}
|
|
|
|
v.setPos(pos)
|
2013-05-17 14:25:48 -06:00
|
|
|
v.setType(typ)
|
|
|
|
return fn.emit(v)
|
|
|
|
}
|
|
|
|
|
|
|
|
case "new":
|
2013-08-01 12:06:10 -06:00
|
|
|
alloc := emitNew(fn, deref(typ), pos)
|
|
|
|
alloc.Comment = "new"
|
|
|
|
return alloc
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
case "len", "cap":
|
|
|
|
// Special case: len or cap of an array or *array is
|
|
|
|
// based on the type, not the value which may be nil.
|
|
|
|
// We must still evaluate the value, though. (If it
|
|
|
|
// was side-effect free, the whole call would have
|
|
|
|
// been constant-folded.)
|
2013-07-12 22:09:33 -06:00
|
|
|
t := deref(fn.Pkg.typeOf(args[0])).Underlying()
|
2013-05-17 14:25:48 -06:00
|
|
|
if at, ok := t.(*types.Array); ok {
|
|
|
|
b.expr(fn, args[0]) // for effects only
|
2013-07-16 11:50:08 -06:00
|
|
|
return intConst(at.Len())
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
// Otherwise treat as normal.
|
|
|
|
|
|
|
|
case "panic":
|
2013-05-17 15:02:47 -06:00
|
|
|
fn.emit(&Panic{
|
|
|
|
X: emitConv(fn, b.expr(fn, args[0]), tEface),
|
|
|
|
pos: pos,
|
|
|
|
})
|
2013-05-17 14:25:48 -06:00
|
|
|
fn.currentBlock = fn.newBasicBlock("unreachable")
|
|
|
|
return vFalse // any non-nil Value will do
|
|
|
|
}
|
|
|
|
return nil // treat all others as a regular function call
|
|
|
|
}
|
|
|
|
|
|
|
|
// addr lowers a single-result addressable expression e to SSA form,
|
|
|
|
// emitting code to fn and returning the location (an lvalue) defined
|
|
|
|
// by the expression.
|
|
|
|
//
|
|
|
|
// If escaping is true, addr marks the base variable of the
|
|
|
|
// addressable expression e as being a potentially escaping pointer
|
|
|
|
// value. For example, in this code:
|
|
|
|
//
|
|
|
|
// a := A{
|
|
|
|
// b: [1]B{B{c: 1}}
|
|
|
|
// }
|
|
|
|
// return &a.b[0].c
|
|
|
|
//
|
|
|
|
// the application of & causes a.b[0].c to have its address taken,
|
|
|
|
// which means that ultimately the local variable a must be
|
|
|
|
// heap-allocated. This is a simple but very conservative escape
|
|
|
|
// analysis.
|
|
|
|
//
|
|
|
|
// Operations forming potentially escaping pointers include:
|
|
|
|
// - &x, including when implicit in method call or composite literals.
|
|
|
|
// - a[:] iff a is an array (not *array)
|
|
|
|
// - references to variables in lexically enclosing functions.
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue {
|
2013-05-17 14:25:48 -06:00
|
|
|
switch e := e.(type) {
|
|
|
|
case *ast.Ident:
|
go.tools/ssa: add debug information for all ast.Idents.
This CL adds three new functions to determine the SSA Value
for a given syntactic var, func or const object:
Program.{Const,Func,Var}Value.
Since constants and functions are immutable, the first
two only need a types.Object; but each distinct
reference to a var may return a distinct Value, so the third
requires an ast.Ident parameter too.
Debug information for local vars is encoded in the
instruction stream in the form of DebugRef instructions,
which are a no-op but relate their operand to a particular
ident in the AST. The beauty of this approach is that it
naturally stays consistent during optimisation passes
(e.g. lifting) without additional bookkeeping.
DebugRef instructions are only generated if the DebugMode
builder flag is set; I plan to make the policy more fine-
grained (per function).
DebugRef instructions are inserted for:
- expr(Ident) for rvalue idents
- address.store() for idents that update an lvalue
- address.address() for idents that take address of lvalue
(this new method replaces all uses of lval.(address).addr)
- expr() for all constant expressions
- local ValueSpecs with implicit zero initialization (no RHS)
(this case doesn't call store() or address())
To ensure we don't forget to emit debug info for uses of Idents,
we must use the lvalue mechanism consistently. (Previously,
many simple cases had effectively inlined these functions.)
Similarly setCallFunc no longer inlines expr(Ident).
Also:
- Program.Value() has been inlined & specialized.
- Program.Package() has moved nearer the new lookup functions.
- refactoring: funcSyntax has lost paramFields, resultFields;
gained funcType, which provides access to both.
- add package-level constants to Package.values map.
- opt: don't call localValueSpec for constants.
(The resulting code is always optimised away.)
There are a number of comments asking whether Literals
should have positions. Will address in a follow-up.
Added tests of all interesting cases.
R=gri
CC=golang-dev
https://golang.org/cl/11259044
2013-07-15 11:56:46 -06:00
|
|
|
if isBlankIdent(e) {
|
|
|
|
return blank{}
|
|
|
|
}
|
2013-05-31 14:14:13 -06:00
|
|
|
obj := fn.Pkg.objectOf(e)
|
2013-11-05 11:02:46 -07:00
|
|
|
v := fn.Prog.packageLevelValue(obj) // var (address)
|
2013-06-03 12:15:19 -06:00
|
|
|
if v == nil {
|
2013-05-17 14:25:48 -06:00
|
|
|
v = fn.lookup(obj, escaping)
|
|
|
|
}
|
2013-08-22 08:13:51 -06:00
|
|
|
return &address{addr: v, expr: e}
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
case *ast.CompositeLit:
|
2013-07-12 22:09:33 -06:00
|
|
|
t := deref(fn.Pkg.typeOf(e))
|
2013-08-01 12:06:10 -06:00
|
|
|
var v *Alloc
|
2013-05-17 14:25:48 -06:00
|
|
|
if escaping {
|
|
|
|
v = emitNew(fn, t, e.Lbrace)
|
|
|
|
} else {
|
|
|
|
v = fn.addLocal(t, e.Lbrace)
|
|
|
|
}
|
2013-08-01 12:06:10 -06:00
|
|
|
v.Comment = "complit"
|
2013-10-31 15:59:52 -06:00
|
|
|
b.compLit(fn, v, e) // initialize in place
|
2013-07-31 11:30:59 -06:00
|
|
|
return &address{addr: v, expr: e}
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
case *ast.ParenExpr:
|
|
|
|
return b.addr(fn, e.X, escaping)
|
|
|
|
|
|
|
|
case *ast.SelectorExpr:
|
2013-07-26 20:29:44 -06:00
|
|
|
switch sel := fn.Pkg.info.Selections[e]; sel.Kind() {
|
|
|
|
case types.PackageObj:
|
|
|
|
obj := sel.Obj()
|
2013-11-05 11:02:46 -07:00
|
|
|
if v := fn.Prog.packageLevelValue(obj); v != nil {
|
2013-07-31 11:30:59 -06:00
|
|
|
return &address{addr: v, expr: e}
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
2013-05-17 15:02:47 -06:00
|
|
|
panic("undefined package-qualified name: " + obj.Name())
|
2013-07-26 09:22:34 -06:00
|
|
|
|
2013-07-26 20:29:44 -06:00
|
|
|
case types.FieldVal:
|
|
|
|
wantAddr := true
|
|
|
|
v := b.receiver(fn, e.X, wantAddr, escaping, sel)
|
|
|
|
last := len(sel.Index()) - 1
|
2013-07-29 15:10:11 -06:00
|
|
|
return &address{
|
2013-08-22 08:13:51 -06:00
|
|
|
addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel.Pos()),
|
|
|
|
expr: e.Sel,
|
2013-07-29 15:10:11 -06:00
|
|
|
}
|
2013-07-26 20:29:44 -06:00
|
|
|
}
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
case *ast.IndexExpr:
|
|
|
|
var x Value
|
|
|
|
var et types.Type
|
2013-05-31 14:14:13 -06:00
|
|
|
switch t := fn.Pkg.typeOf(e.X).Underlying().(type) {
|
2013-05-17 14:25:48 -06:00
|
|
|
case *types.Array:
|
go.tools/ssa: add debug information for all ast.Idents.
This CL adds three new functions to determine the SSA Value
for a given syntactic var, func or const object:
Program.{Const,Func,Var}Value.
Since constants and functions are immutable, the first
two only need a types.Object; but each distinct
reference to a var may return a distinct Value, so the third
requires an ast.Ident parameter too.
Debug information for local vars is encoded in the
instruction stream in the form of DebugRef instructions,
which are a no-op but relate their operand to a particular
ident in the AST. The beauty of this approach is that it
naturally stays consistent during optimisation passes
(e.g. lifting) without additional bookkeeping.
DebugRef instructions are only generated if the DebugMode
builder flag is set; I plan to make the policy more fine-
grained (per function).
DebugRef instructions are inserted for:
- expr(Ident) for rvalue idents
- address.store() for idents that update an lvalue
- address.address() for idents that take address of lvalue
(this new method replaces all uses of lval.(address).addr)
- expr() for all constant expressions
- local ValueSpecs with implicit zero initialization (no RHS)
(this case doesn't call store() or address())
To ensure we don't forget to emit debug info for uses of Idents,
we must use the lvalue mechanism consistently. (Previously,
many simple cases had effectively inlined these functions.)
Similarly setCallFunc no longer inlines expr(Ident).
Also:
- Program.Value() has been inlined & specialized.
- Program.Package() has moved nearer the new lookup functions.
- refactoring: funcSyntax has lost paramFields, resultFields;
gained funcType, which provides access to both.
- add package-level constants to Package.values map.
- opt: don't call localValueSpec for constants.
(The resulting code is always optimised away.)
There are a number of comments asking whether Literals
should have positions. Will address in a follow-up.
Added tests of all interesting cases.
R=gri
CC=golang-dev
https://golang.org/cl/11259044
2013-07-15 11:56:46 -06:00
|
|
|
x = b.addr(fn, e.X, escaping).address(fn)
|
2013-07-16 10:23:55 -06:00
|
|
|
et = types.NewPointer(t.Elem())
|
2013-05-17 14:25:48 -06:00
|
|
|
case *types.Pointer: // *array
|
|
|
|
x = b.expr(fn, e.X)
|
2013-07-16 10:23:55 -06:00
|
|
|
et = types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())
|
2013-05-17 14:25:48 -06:00
|
|
|
case *types.Slice:
|
|
|
|
x = b.expr(fn, e.X)
|
2013-07-16 10:23:55 -06:00
|
|
|
et = types.NewPointer(t.Elem())
|
2013-05-17 14:25:48 -06:00
|
|
|
case *types.Map:
|
|
|
|
return &element{
|
2013-08-22 08:13:51 -06:00
|
|
|
m: b.expr(fn, e.X),
|
|
|
|
k: emitConv(fn, b.expr(fn, e.Index), t.Key()),
|
|
|
|
t: t.Elem(),
|
|
|
|
pos: e.Lbrack,
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
default:
|
|
|
|
panic("unexpected container type in IndexExpr: " + t.String())
|
|
|
|
}
|
|
|
|
v := &IndexAddr{
|
|
|
|
X: x,
|
|
|
|
Index: emitConv(fn, b.expr(fn, e.Index), tInt),
|
|
|
|
}
|
2013-07-31 11:30:59 -06:00
|
|
|
v.setPos(e.Lbrack)
|
2013-05-17 14:25:48 -06:00
|
|
|
v.setType(et)
|
2013-07-31 11:30:59 -06:00
|
|
|
return &address{addr: fn.emit(v), expr: e}
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
case *ast.StarExpr:
|
2013-07-31 11:30:59 -06:00
|
|
|
return &address{addr: b.expr(fn, e.X), starPos: e.Star, expr: e}
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
panic(fmt.Sprintf("unexpected address expression: %T", e))
|
|
|
|
}
|
|
|
|
|
|
|
|
// exprInPlace emits to fn code to initialize the lvalue loc with the
|
|
|
|
// value of expression e.
|
|
|
|
//
|
|
|
|
// This is equivalent to loc.store(fn, b.expr(fn, e)) but may
|
|
|
|
// generate better code in some cases, e.g. for composite literals
|
|
|
|
// in an addressable location.
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) exprInPlace(fn *Function, loc lvalue, e ast.Expr) {
|
2013-10-29 09:07:09 -06:00
|
|
|
if e, ok := unparen(e).(*ast.CompositeLit); ok {
|
2013-08-22 08:13:51 -06:00
|
|
|
// A CompositeLit never evaluates to a pointer,
|
|
|
|
// so if the type of the location is a pointer,
|
|
|
|
// an &-operation is implied.
|
|
|
|
if _, ok := loc.(blank); !ok { // avoid calling blank.typ()
|
|
|
|
if isPointer(loc.typ()) {
|
go.tools/ssa: add debug information for all ast.Idents.
This CL adds three new functions to determine the SSA Value
for a given syntactic var, func or const object:
Program.{Const,Func,Var}Value.
Since constants and functions are immutable, the first
two only need a types.Object; but each distinct
reference to a var may return a distinct Value, so the third
requires an ast.Ident parameter too.
Debug information for local vars is encoded in the
instruction stream in the form of DebugRef instructions,
which are a no-op but relate their operand to a particular
ident in the AST. The beauty of this approach is that it
naturally stays consistent during optimisation passes
(e.g. lifting) without additional bookkeeping.
DebugRef instructions are only generated if the DebugMode
builder flag is set; I plan to make the policy more fine-
grained (per function).
DebugRef instructions are inserted for:
- expr(Ident) for rvalue idents
- address.store() for idents that update an lvalue
- address.address() for idents that take address of lvalue
(this new method replaces all uses of lval.(address).addr)
- expr() for all constant expressions
- local ValueSpecs with implicit zero initialization (no RHS)
(this case doesn't call store() or address())
To ensure we don't forget to emit debug info for uses of Idents,
we must use the lvalue mechanism consistently. (Previously,
many simple cases had effectively inlined these functions.)
Similarly setCallFunc no longer inlines expr(Ident).
Also:
- Program.Value() has been inlined & specialized.
- Program.Package() has moved nearer the new lookup functions.
- refactoring: funcSyntax has lost paramFields, resultFields;
gained funcType, which provides access to both.
- add package-level constants to Package.values map.
- opt: don't call localValueSpec for constants.
(The resulting code is always optimised away.)
There are a number of comments asking whether Literals
should have positions. Will address in a follow-up.
Added tests of all interesting cases.
R=gri
CC=golang-dev
https://golang.org/cl/11259044
2013-07-15 11:56:46 -06:00
|
|
|
ptr := b.addr(fn, e, true).address(fn)
|
|
|
|
loc.store(fn, ptr) // copy address
|
2013-05-17 14:25:48 -06:00
|
|
|
return
|
2013-08-22 08:13:51 -06:00
|
|
|
}
|
|
|
|
}
|
2013-05-17 14:25:48 -06:00
|
|
|
|
2013-08-22 08:13:51 -06:00
|
|
|
if _, ok := loc.(*address); ok {
|
2013-10-31 15:59:52 -06:00
|
|
|
if _, ok := loc.typ().Underlying().(*types.Interface); ok {
|
2013-05-17 14:25:48 -06:00
|
|
|
// e.g. var x interface{} = T{...}
|
|
|
|
// Can't in-place initialize an interface value.
|
|
|
|
// Fall back to copying.
|
2013-08-22 08:13:51 -06:00
|
|
|
} else {
|
2013-10-29 09:07:09 -06:00
|
|
|
addr := loc.address(fn)
|
2013-10-31 15:59:52 -06:00
|
|
|
b.compLit(fn, addr, e) // in place
|
2013-10-29 09:07:09 -06:00
|
|
|
emitDebugRef(fn, e, addr, true)
|
2013-05-17 14:25:48 -06:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
loc.store(fn, b.expr(fn, e)) // copy value
|
|
|
|
}
|
|
|
|
|
|
|
|
// expr lowers a single-result expression e to SSA form, emitting code
|
|
|
|
// to fn and returning the Value defined by the expression.
|
|
|
|
//
|
2013-08-27 15:57:55 -06:00
|
|
|
func (b *builder) expr(fn *Function, e ast.Expr) Value {
|
2013-07-31 11:30:59 -06:00
|
|
|
// Is expression a constant?
|
2013-05-31 14:14:13 -06:00
|
|
|
if v := fn.Pkg.info.ValueOf(e); v != nil {
|
2013-07-31 11:30:59 -06:00
|
|
|
return NewConst(v, fn.Pkg.typeOf(e))
|
|
|
|
}
|
|
|
|
e = unparen(e)
|
2013-08-27 15:57:55 -06:00
|
|
|
v := b.expr0(fn, e)
|
2013-07-31 11:30:59 -06:00
|
|
|
if fn.debugInfo() {
|
go.tools/ssa: record lvalue/rvalue distinction precisely in DebugRef.
A DebugRef associates a source expression E with an ssa.Value
V, but until now did not record whether V was the value or the
address of E. So, we would guess from the "pointerness" of
the Value, leading to confusion in some cases, e.g.
type N *N
var n N
n = &n // lvalue and rvalue are both pointers
Now we explicitly record 'IsAddress bool' in DebugRef, and
plumb this everywhere: through (*Function).ValueForExpr and
(*Program).VarValue, all the way to forming the pointer
analysis query.
Also:
- VarValue now treats each reference to a global distinctly,
just like it does for other vars. So:
var g int
func f() {
g = 1 // VarValue(g) == Const(1:int), !isAddress
print(g) // VarValue(g) == Global(g), isAddress
}
- DebugRefs are not emitted for references to predeclared
identifiers (nil, built-in).
- DebugRefs no longer prevent lifting of an Alloc var into a
register; now we update or discard the debug info.
- TestValueForExpr: improve coverage of ssa.EnclosingFunction
by putting expectations in methods and init funcs, not just
normal funcs.
- oracle: fix golden file broken by recent
(*types.Var).IsField change.
R=gri
CC=golang-dev
https://golang.org/cl/16610045
2013-10-24 16:31:50 -06:00
|
|
|
emitDebugRef(fn, e, v, false)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
2013-08-27 15:57:55 -06:00
|
|
|
return v
|
|
|
|
}
|
2013-05-17 14:25:48 -06:00
|
|
|
|
2013-08-27 15:57:55 -06:00
|
|
|
func (b *builder) expr0(fn *Function, e ast.Expr) Value {
|
2013-05-17 14:25:48 -06:00
|
|
|
switch e := e.(type) {
|
|
|
|
case *ast.BasicLit:
|
|
|
|
panic("non-constant BasicLit") // unreachable
|
|
|
|
|
|
|
|
case *ast.FuncLit:
|
2013-07-01 13:24:50 -06:00
|
|
|
posn := fn.Prog.Fset.Position(e.Type.Func)
|
2013-05-17 14:25:48 -06:00
|
|
|
fn2 := &Function{
|
2013-05-30 07:59:17 -06:00
|
|
|
name: fmt.Sprintf("func@%d.%d", posn.Line, posn.Column),
|
2013-05-31 14:14:13 -06:00
|
|
|
Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature),
|
2013-05-17 15:02:47 -06:00
|
|
|
pos: e.Type.Func,
|
2013-05-17 14:25:48 -06:00
|
|
|
Enclosing: fn,
|
|
|
|
Pkg: fn.Pkg,
|
2013-06-03 14:46:57 -06:00
|
|
|
Prog: fn.Prog,
|
2013-10-27 08:55:21 -06:00
|
|
|
syntax: e,
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
fn.AnonFuncs = append(fn.AnonFuncs, fn2)
|
|
|
|
b.buildFunction(fn2)
|
|
|
|
if fn2.FreeVars == nil {
|
|
|
|
return fn2
|
|
|
|
}
|
|
|
|
v := &MakeClosure{Fn: fn2}
|
2013-05-31 14:14:13 -06:00
|
|
|
v.setType(fn.Pkg.typeOf(e))
|
2013-05-17 14:25:48 -06:00
|
|
|
for _, fv := range fn2.FreeVars {
|
2013-05-22 15:56:18 -06:00
|
|
|
v.Bindings = append(v.Bindings, fv.outer)
|
|
|
|
fv.outer = nil
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
return fn.emit(v)
|
|
|
|
|
|
|
|
case *ast.TypeAssertExpr: // single-result form only
|
2013-06-13 12:43:35 -06:00
|
|
|
return emitTypeAssert(fn, b.expr(fn, e.X), fn.Pkg.typeOf(e), e.Lparen)
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
case *ast.CallExpr:
|
2013-05-31 14:14:13 -06:00
|
|
|
typ := fn.Pkg.typeOf(e)
|
|
|
|
if fn.Pkg.info.IsType(e.Fun) {
|
2013-05-17 15:02:47 -06:00
|
|
|
// Explicit type conversion, e.g. string(x) or big.Int(x)
|
|
|
|
x := b.expr(fn, e.Args[0])
|
|
|
|
y := emitConv(fn, x, typ)
|
|
|
|
if y != x {
|
|
|
|
switch y := y.(type) {
|
|
|
|
case *Convert:
|
|
|
|
y.pos = e.Lparen
|
|
|
|
case *ChangeType:
|
|
|
|
y.pos = e.Lparen
|
|
|
|
case *MakeInterface:
|
|
|
|
y.pos = e.Lparen
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return y
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
// Call to "intrinsic" built-ins, e.g. new, make, panic.
|
2013-10-29 09:07:09 -06:00
|
|
|
if id, ok := unparen(e.Fun).(*ast.Ident); ok {
|
2013-09-23 16:18:35 -06:00
|
|
|
if obj, ok := fn.Pkg.objectOf(id).(*types.Builtin); ok {
|
|
|
|
if v := b.builtin(fn, obj, e.Args, typ, e.Lparen); v != nil {
|
2013-05-17 14:25:48 -06:00
|
|
|
return v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Regular function call.
|
|
|
|
var v Call
|
|
|
|
b.setCall(fn, e, &v.Call)
|
|
|
|
v.setType(typ)
|
|
|
|
return fn.emit(&v)
|
|
|
|
|
|
|
|
case *ast.UnaryExpr:
|
|
|
|
switch e.Op {
|
|
|
|
case token.AND: // &X --- potentially escaping.
|
2013-08-19 15:51:33 -06:00
|
|
|
addr := b.addr(fn, e.X, true)
|
|
|
|
if _, ok := unparen(e.X).(*ast.StarExpr); ok {
|
|
|
|
// &*p must panic if p is nil (http://golang.org/s/go12nil).
|
|
|
|
// For simplicity, we'll just (suboptimally) rely
|
|
|
|
// on the side effects of a load.
|
|
|
|
addr.load(fn)
|
|
|
|
}
|
|
|
|
return addr.address(fn)
|
2013-05-17 14:25:48 -06:00
|
|
|
case token.ADD:
|
|
|
|
return b.expr(fn, e.X)
|
|
|
|
case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^
|
|
|
|
v := &UnOp{
|
|
|
|
Op: e.Op,
|
|
|
|
X: b.expr(fn, e.X),
|
|
|
|
}
|
2013-05-17 15:02:47 -06:00
|
|
|
v.setPos(e.OpPos)
|
2013-05-31 14:14:13 -06:00
|
|
|
v.setType(fn.Pkg.typeOf(e))
|
2013-05-17 14:25:48 -06:00
|
|
|
return fn.emit(v)
|
|
|
|
default:
|
|
|
|
panic(e.Op)
|
|
|
|
}
|
|
|
|
|
|
|
|
case *ast.BinaryExpr:
|
|
|
|
switch e.Op {
|
|
|
|
case token.LAND, token.LOR:
|
|
|
|
return b.logicalBinop(fn, e)
|
|
|
|
case token.SHL, token.SHR:
|
|
|
|
fallthrough
|
|
|
|
case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
|
2013-05-31 14:14:13 -06:00
|
|
|
return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), fn.Pkg.typeOf(e), e.OpPos)
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ:
|
2013-08-19 10:50:40 -06:00
|
|
|
cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos)
|
2013-11-05 15:32:45 -07:00
|
|
|
// The type of x==y may be UntypedBool.
|
2013-08-19 10:50:40 -06:00
|
|
|
return emitConv(fn, cmp, DefaultType(fn.Pkg.typeOf(e)))
|
2013-05-17 14:25:48 -06:00
|
|
|
default:
|
|
|
|
panic("illegal op in BinaryExpr: " + e.Op.String())
|
|
|
|
}
|
|
|
|
|
|
|
|
case *ast.SliceExpr:
|
|
|
|
var low, high Value
|
|
|
|
var x Value
|
2013-05-31 14:14:13 -06:00
|
|
|
switch fn.Pkg.typeOf(e.X).Underlying().(type) {
|
2013-05-17 14:25:48 -06:00
|
|
|
case *types.Array:
|
|
|
|
// Potentially escaping.
|
go.tools/ssa: add debug information for all ast.Idents.
This CL adds three new functions to determine the SSA Value
for a given syntactic var, func or const object:
Program.{Const,Func,Var}Value.
Since constants and functions are immutable, the first
two only need a types.Object; but each distinct
reference to a var may return a distinct Value, so the third
requires an ast.Ident parameter too.
Debug information for local vars is encoded in the
instruction stream in the form of DebugRef instructions,
which are a no-op but relate their operand to a particular
ident in the AST. The beauty of this approach is that it
naturally stays consistent during optimisation passes
(e.g. lifting) without additional bookkeeping.
DebugRef instructions are only generated if the DebugMode
builder flag is set; I plan to make the policy more fine-
grained (per function).
DebugRef instructions are inserted for:
- expr(Ident) for rvalue idents
- address.store() for idents that update an lvalue
- address.address() for idents that take address of lvalue
(this new method replaces all uses of lval.(address).addr)
- expr() for all constant expressions
- local ValueSpecs with implicit zero initialization (no RHS)
(this case doesn't call store() or address())
To ensure we don't forget to emit debug info for uses of Idents,
we must use the lvalue mechanism consistently. (Previously,
many simple cases had effectively inlined these functions.)
Similarly setCallFunc no longer inlines expr(Ident).
Also:
- Program.Value() has been inlined & specialized.
- Program.Package() has moved nearer the new lookup functions.
- refactoring: funcSyntax has lost paramFields, resultFields;
gained funcType, which provides access to both.
- add package-level constants to Package.values map.
- opt: don't call localValueSpec for constants.
(The resulting code is always optimised away.)
There are a number of comments asking whether Literals
should have positions. Will address in a follow-up.
Added tests of all interesting cases.
R=gri
CC=golang-dev
https://golang.org/cl/11259044
2013-07-15 11:56:46 -06:00
|
|
|
x = b.addr(fn, e.X, true).address(fn)
|
2013-05-17 14:25:48 -06:00
|
|
|
case *types.Basic, *types.Slice, *types.Pointer: // *array
|
|
|
|
x = b.expr(fn, e.X)
|
|
|
|
default:
|
|
|
|
unreachable()
|
|
|
|
}
|
|
|
|
if e.High != nil {
|
|
|
|
high = b.expr(fn, e.High)
|
|
|
|
}
|
|
|
|
if e.Low != nil {
|
|
|
|
low = b.expr(fn, e.Low)
|
|
|
|
}
|
|
|
|
v := &Slice{
|
|
|
|
X: x,
|
|
|
|
Low: low,
|
|
|
|
High: high,
|
|
|
|
}
|
2013-06-13 12:43:35 -06:00
|
|
|
v.setPos(e.Lbrack)
|
2013-05-31 14:14:13 -06:00
|
|
|
v.setType(fn.Pkg.typeOf(e))
|
2013-05-17 14:25:48 -06:00
|
|
|
return fn.emit(v)
|
|
|
|
|
|
|
|
case *ast.Ident:
|
2013-05-31 14:14:13 -06:00
|
|
|
obj := fn.Pkg.objectOf(e)
|
2013-10-09 15:17:25 -06:00
|
|
|
// Universal built-in or nil?
|
|
|
|
switch obj := obj.(type) {
|
|
|
|
case *types.Builtin:
|
2013-07-01 13:24:50 -06:00
|
|
|
return fn.Prog.builtins[obj]
|
2013-10-09 15:17:25 -06:00
|
|
|
case *types.Nil:
|
|
|
|
return nilConst(fn.Pkg.typeOf(e))
|
2013-06-03 12:15:19 -06:00
|
|
|
}
|
|
|
|
// Package-level func or var?
|
2013-11-05 11:02:46 -07:00
|
|
|
if v := fn.Prog.packageLevelValue(obj); v != nil {
|
2013-05-30 10:13:42 -06:00
|
|
|
if _, ok := obj.(*types.Var); ok {
|
|
|
|
return emitLoad(fn, v) // var (address)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
2013-05-30 10:13:42 -06:00
|
|
|
return v // (func)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
go.tools/ssa: add debug information for all ast.Idents.
This CL adds three new functions to determine the SSA Value
for a given syntactic var, func or const object:
Program.{Const,Func,Var}Value.
Since constants and functions are immutable, the first
two only need a types.Object; but each distinct
reference to a var may return a distinct Value, so the third
requires an ast.Ident parameter too.
Debug information for local vars is encoded in the
instruction stream in the form of DebugRef instructions,
which are a no-op but relate their operand to a particular
ident in the AST. The beauty of this approach is that it
naturally stays consistent during optimisation passes
(e.g. lifting) without additional bookkeeping.
DebugRef instructions are only generated if the DebugMode
builder flag is set; I plan to make the policy more fine-
grained (per function).
DebugRef instructions are inserted for:
- expr(Ident) for rvalue idents
- address.store() for idents that update an lvalue
- address.address() for idents that take address of lvalue
(this new method replaces all uses of lval.(address).addr)
- expr() for all constant expressions
- local ValueSpecs with implicit zero initialization (no RHS)
(this case doesn't call store() or address())
To ensure we don't forget to emit debug info for uses of Idents,
we must use the lvalue mechanism consistently. (Previously,
many simple cases had effectively inlined these functions.)
Similarly setCallFunc no longer inlines expr(Ident).
Also:
- Program.Value() has been inlined & specialized.
- Program.Package() has moved nearer the new lookup functions.
- refactoring: funcSyntax has lost paramFields, resultFields;
gained funcType, which provides access to both.
- add package-level constants to Package.values map.
- opt: don't call localValueSpec for constants.
(The resulting code is always optimised away.)
There are a number of comments asking whether Literals
should have positions. Will address in a follow-up.
Added tests of all interesting cases.
R=gri
CC=golang-dev
https://golang.org/cl/11259044
2013-07-15 11:56:46 -06:00
|
|
|
// Local var.
|
2013-07-31 11:30:59 -06:00
|
|
|
return emitLoad(fn, fn.lookup(obj, false)) // var (address)
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
case *ast.SelectorExpr:
|
2013-07-26 20:29:44 -06:00
|
|
|
switch sel := fn.Pkg.info.Selections[e]; sel.Kind() {
|
|
|
|
case types.PackageObj:
|
2013-05-17 14:25:48 -06:00
|
|
|
return b.expr(fn, e.Sel)
|
|
|
|
|
2013-07-26 20:29:44 -06:00
|
|
|
case types.MethodExpr:
|
|
|
|
// (*T).f or T.f, the method f from the method-set of type T.
|
2013-07-26 09:22:34 -06:00
|
|
|
// For declared methods, a simple conversion will suffice.
|
2013-07-30 14:36:58 -06:00
|
|
|
return emitConv(fn, fn.Prog.Method(sel), fn.Pkg.typeOf(e))
|
2013-05-17 14:25:48 -06:00
|
|
|
|
2013-07-26 20:29:44 -06:00
|
|
|
case types.MethodVal:
|
2013-07-26 09:22:34 -06:00
|
|
|
// e.f where e is an expression and f is a method.
|
|
|
|
// The result is a bound method closure.
|
2013-07-26 20:29:44 -06:00
|
|
|
obj := sel.Obj().(*types.Func)
|
2013-07-26 09:22:34 -06:00
|
|
|
wantAddr := isPointer(recvType(obj))
|
|
|
|
escaping := true
|
2013-07-26 20:29:44 -06:00
|
|
|
v := b.receiver(fn, e.X, wantAddr, escaping, sel)
|
2013-07-01 13:17:36 -06:00
|
|
|
c := &MakeClosure{
|
2013-07-26 09:22:34 -06:00
|
|
|
Fn: boundMethodWrapper(fn.Prog, obj),
|
|
|
|
Bindings: []Value{v},
|
2013-07-01 13:17:36 -06:00
|
|
|
}
|
|
|
|
c.setPos(e.Sel.Pos())
|
|
|
|
c.setType(fn.Pkg.typeOf(e))
|
|
|
|
return fn.emit(c)
|
2013-07-26 09:22:34 -06:00
|
|
|
|
2013-07-26 20:29:44 -06:00
|
|
|
case types.FieldVal:
|
|
|
|
indices := sel.Index()
|
2013-07-26 09:22:34 -06:00
|
|
|
last := len(indices) - 1
|
|
|
|
v := b.expr(fn, e.X)
|
|
|
|
v = emitImplicitSelections(fn, v, indices[:last])
|
2013-07-29 15:10:11 -06:00
|
|
|
v = emitFieldSelection(fn, v, indices[last], false, e.Sel.Pos())
|
go.tools/ssa: record lvalue/rvalue distinction precisely in DebugRef.
A DebugRef associates a source expression E with an ssa.Value
V, but until now did not record whether V was the value or the
address of E. So, we would guess from the "pointerness" of
the Value, leading to confusion in some cases, e.g.
type N *N
var n N
n = &n // lvalue and rvalue are both pointers
Now we explicitly record 'IsAddress bool' in DebugRef, and
plumb this everywhere: through (*Function).ValueForExpr and
(*Program).VarValue, all the way to forming the pointer
analysis query.
Also:
- VarValue now treats each reference to a global distinctly,
just like it does for other vars. So:
var g int
func f() {
g = 1 // VarValue(g) == Const(1:int), !isAddress
print(g) // VarValue(g) == Global(g), isAddress
}
- DebugRefs are not emitted for references to predeclared
identifiers (nil, built-in).
- DebugRefs no longer prevent lifting of an Alloc var into a
register; now we update or discard the debug info.
- TestValueForExpr: improve coverage of ssa.EnclosingFunction
by putting expectations in methods and init funcs, not just
normal funcs.
- oracle: fix golden file broken by recent
(*types.Var).IsField change.
R=gri
CC=golang-dev
https://golang.org/cl/16610045
2013-10-24 16:31:50 -06:00
|
|
|
emitDebugRef(fn, e.Sel, v, false)
|
2013-07-29 15:10:11 -06:00
|
|
|
return v
|
2013-07-01 13:17:36 -06:00
|
|
|
}
|
|
|
|
|
2013-07-26 09:22:34 -06:00
|
|
|
panic("unexpected expression-relative selector")
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
case *ast.IndexExpr:
|
2013-05-31 14:14:13 -06:00
|
|
|
switch t := fn.Pkg.typeOf(e.X).Underlying().(type) {
|
2013-05-17 14:25:48 -06:00
|
|
|
case *types.Array:
|
|
|
|
// Non-addressable array (in a register).
|
|
|
|
v := &Index{
|
|
|
|
X: b.expr(fn, e.X),
|
|
|
|
Index: emitConv(fn, b.expr(fn, e.Index), tInt),
|
|
|
|
}
|
2013-07-31 11:30:59 -06:00
|
|
|
v.setPos(e.Lbrack)
|
2013-05-17 15:02:47 -06:00
|
|
|
v.setType(t.Elem())
|
2013-05-17 14:25:48 -06:00
|
|
|
return fn.emit(v)
|
|
|
|
|
|
|
|
case *types.Map:
|
|
|
|
// Maps are not addressable.
|
2013-05-31 14:14:13 -06:00
|
|
|
mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map)
|
2013-05-17 14:25:48 -06:00
|
|
|
v := &Lookup{
|
|
|
|
X: b.expr(fn, e.X),
|
2013-05-17 15:02:47 -06:00
|
|
|
Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()),
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
2013-05-17 15:02:47 -06:00
|
|
|
v.setPos(e.Lbrack)
|
|
|
|
v.setType(mapt.Elem())
|
2013-05-17 14:25:48 -06:00
|
|
|
return fn.emit(v)
|
|
|
|
|
|
|
|
case *types.Basic: // => string
|
|
|
|
// Strings are not addressable.
|
|
|
|
v := &Lookup{
|
|
|
|
X: b.expr(fn, e.X),
|
|
|
|
Index: b.expr(fn, e.Index),
|
|
|
|
}
|
2013-05-17 15:02:47 -06:00
|
|
|
v.setPos(e.Lbrack)
|
2013-05-17 14:25:48 -06:00
|
|
|
v.setType(tByte)
|
|
|
|
return fn.emit(v)
|
|
|
|
|
|
|
|
case *types.Slice, *types.Pointer: // *array
|
|
|
|
// Addressable slice/array; use IndexAddr and Load.
|
|
|
|
return b.addr(fn, e, false).load(fn)
|
|
|
|
|
|
|
|
default:
|
|
|
|
panic("unexpected container type in IndexExpr: " + t.String())
|
|
|
|
}
|
|
|
|
|
|
|
|
case *ast.CompositeLit, *ast.StarExpr:
|
|
|
|
// Addressable types (lvalues)
|
|
|
|
return b.addr(fn, e, false).load(fn)
|
|
|
|
}
|
|
|
|
|
|
|
|
panic(fmt.Sprintf("unexpected expr: %T", e))
|
|
|
|
}
|
|
|
|
|
|
|
|
// stmtList emits to fn code for all statements in list.
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) stmtList(fn *Function, list []ast.Stmt) {
|
2013-05-17 14:25:48 -06:00
|
|
|
for _, s := range list {
|
|
|
|
b.stmt(fn, s)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-07-26 09:22:34 -06:00
|
|
|
// receiver emits to fn code for expression e in the "receiver"
|
|
|
|
// position of selection e.f (where f may be a field or a method) and
|
|
|
|
// returns the effective receiver after applying the implicit field
|
2013-07-26 20:29:44 -06:00
|
|
|
// selections of sel.
|
2013-07-26 09:22:34 -06:00
|
|
|
//
|
|
|
|
// wantAddr requests that the result is an an address. If
|
2013-07-26 20:29:44 -06:00
|
|
|
// !sel.Indirect(), this may require that e be build in addr() mode; it
|
2013-07-26 09:22:34 -06:00
|
|
|
// must thus be addressable.
|
2013-05-22 15:56:18 -06:00
|
|
|
//
|
2013-07-26 09:22:34 -06:00
|
|
|
// escaping is defined as per builder.addr().
|
2013-05-22 15:56:18 -06:00
|
|
|
//
|
2013-07-26 20:29:44 -06:00
|
|
|
func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection) Value {
|
2013-07-26 09:22:34 -06:00
|
|
|
var v Value
|
2013-07-26 20:29:44 -06:00
|
|
|
if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) {
|
2013-07-26 09:22:34 -06:00
|
|
|
v = b.addr(fn, e, escaping).address(fn)
|
|
|
|
} else {
|
|
|
|
v = b.expr(fn, e)
|
|
|
|
}
|
|
|
|
|
2013-07-26 20:29:44 -06:00
|
|
|
last := len(sel.Index()) - 1
|
|
|
|
v = emitImplicitSelections(fn, v, sel.Index()[:last])
|
2013-07-26 19:49:27 -06:00
|
|
|
if !wantAddr && isPointer(v.Type()) {
|
|
|
|
v = emitLoad(fn, v)
|
|
|
|
}
|
|
|
|
return v
|
2013-05-22 15:56:18 -06:00
|
|
|
}
|
|
|
|
|
2013-05-17 14:25:48 -06:00
|
|
|
// setCallFunc populates the function parts of a CallCommon structure
|
|
|
|
// (Func, Method, Recv, Args[0]) based on the kind of invocation
|
|
|
|
// occurring in e.
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) {
|
2013-05-17 15:02:47 -06:00
|
|
|
c.pos = e.Lparen
|
2013-05-17 14:25:48 -06:00
|
|
|
c.HasEllipsis = e.Ellipsis != 0
|
|
|
|
|
2013-07-26 20:29:44 -06:00
|
|
|
// Is this a method call?
|
|
|
|
if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok {
|
|
|
|
switch sel := fn.Pkg.info.Selections[selector]; sel.Kind() {
|
|
|
|
case types.PackageObj:
|
|
|
|
// e.g. fmt.Println
|
|
|
|
|
|
|
|
case types.MethodExpr:
|
|
|
|
// T.f() or (*T).f(): a statically dispatched
|
|
|
|
// call to the method f in the method-set of T
|
|
|
|
// or *T. T may be an interface.
|
|
|
|
|
|
|
|
// e.Fun would evaluate to a concrete method,
|
|
|
|
// interface wrapper function, or promotion
|
|
|
|
// wrapper.
|
|
|
|
//
|
|
|
|
// For now, we evaluate it in the usual way.
|
|
|
|
|
|
|
|
// TODO(adonovan): opt: inline expr() here, to
|
|
|
|
// make the call static and to avoid
|
|
|
|
// generation of wrappers. It's somewhat
|
|
|
|
// tricky as it may consume the first actual
|
|
|
|
// parameter if the call is "invoke" mode.
|
|
|
|
//
|
|
|
|
// Examples:
|
|
|
|
// type T struct{}; func (T) f() {} // "call" mode
|
|
|
|
// type T interface { f() } // "invoke" mode
|
|
|
|
//
|
|
|
|
// type S struct{ T }
|
|
|
|
//
|
|
|
|
// var s S
|
|
|
|
// S.f(s)
|
|
|
|
// (*S).f(&s)
|
|
|
|
//
|
|
|
|
// Suggested approach:
|
|
|
|
// - consume the first actual parameter expression
|
|
|
|
// and build it with b.expr().
|
|
|
|
// - apply implicit field selections.
|
|
|
|
// - use MethodVal logic to populate fields of c.
|
|
|
|
|
|
|
|
case types.FieldVal:
|
|
|
|
// A field access, not a method call.
|
|
|
|
|
|
|
|
case types.MethodVal:
|
|
|
|
obj := sel.Obj().(*types.Func)
|
|
|
|
wantAddr := isPointer(recvType(obj))
|
|
|
|
escaping := true
|
|
|
|
v := b.receiver(fn, selector.X, wantAddr, escaping, sel)
|
|
|
|
if _, ok := deref(v.Type()).Underlying().(*types.Interface); ok {
|
|
|
|
// Invoke-mode call.
|
|
|
|
c.Value = v
|
|
|
|
c.Method = obj
|
|
|
|
} else {
|
|
|
|
// "Call"-mode call.
|
2013-08-27 15:57:55 -06:00
|
|
|
// TODO(adonovan): fix: in -build=G
|
|
|
|
// mode, declaredFunc panics for
|
|
|
|
// cross-package calls.
|
2013-07-29 12:24:09 -06:00
|
|
|
c.Value = fn.Prog.declaredFunc(obj)
|
2013-07-26 20:29:44 -06:00
|
|
|
c.Args = append(c.Args, v)
|
|
|
|
}
|
|
|
|
return
|
2013-05-17 14:25:48 -06:00
|
|
|
|
2013-07-26 20:29:44 -06:00
|
|
|
default:
|
|
|
|
panic(fmt.Sprintf("illegal (%s).%s() call; X:%T",
|
|
|
|
fn.Pkg.typeOf(selector.X), selector.Sel.Name, selector.X))
|
2013-07-26 09:22:34 -06:00
|
|
|
}
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
2013-07-26 09:22:34 -06:00
|
|
|
|
2013-07-26 20:29:44 -06:00
|
|
|
// Evaluate the function operand in the usual way.
|
|
|
|
c.Value = b.expr(fn, e.Fun)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// emitCallArgs emits to f code for the actual parameters of call e to
|
|
|
|
// a (possibly built-in) function of effective type sig.
|
|
|
|
// The argument values are appended to args, which is then returned.
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value {
|
2013-05-17 14:25:48 -06:00
|
|
|
// f(x, y, z...): pass slice z straight through.
|
|
|
|
if e.Ellipsis != 0 {
|
|
|
|
for i, arg := range e.Args {
|
2013-07-16 10:22:22 -06:00
|
|
|
v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type())
|
|
|
|
args = append(args, v)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
return args
|
|
|
|
}
|
|
|
|
|
|
|
|
offset := len(args) // 1 if call has receiver, 0 otherwise
|
|
|
|
|
|
|
|
// Evaluate actual parameter expressions.
|
|
|
|
//
|
|
|
|
// If this is a chained call of the form f(g()) where g has
|
|
|
|
// multiple return values (MRV), they are flattened out into
|
|
|
|
// args; a suffix of them may end up in a varargs slice.
|
|
|
|
for _, arg := range e.Args {
|
|
|
|
v := b.expr(fn, arg)
|
2013-05-17 15:02:47 -06:00
|
|
|
if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain
|
|
|
|
for i, n := 0, ttuple.Len(); i < n; i++ {
|
|
|
|
args = append(args, emitExtract(fn, v, i, ttuple.At(i).Type()))
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
args = append(args, v)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Actual->formal assignability conversions for normal parameters.
|
2013-05-17 15:02:47 -06:00
|
|
|
np := sig.Params().Len() // number of normal parameters
|
|
|
|
if sig.IsVariadic() {
|
2013-05-17 14:25:48 -06:00
|
|
|
np--
|
|
|
|
}
|
|
|
|
for i := 0; i < np; i++ {
|
2013-05-17 15:02:47 -06:00
|
|
|
args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type())
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// Actual->formal assignability conversions for variadic parameter,
|
|
|
|
// and construction of slice.
|
2013-05-17 15:02:47 -06:00
|
|
|
if sig.IsVariadic() {
|
2013-05-17 14:25:48 -06:00
|
|
|
varargs := args[offset+np:]
|
2013-07-16 10:22:22 -06:00
|
|
|
st := sig.Params().At(np).Type().(*types.Slice)
|
|
|
|
vt := st.Elem()
|
2013-05-17 14:25:48 -06:00
|
|
|
if len(varargs) == 0 {
|
2013-07-16 11:50:08 -06:00
|
|
|
args = append(args, nilConst(st))
|
2013-05-17 14:25:48 -06:00
|
|
|
} else {
|
|
|
|
// Replace a suffix of args with a slice containing it.
|
2013-05-17 15:02:47 -06:00
|
|
|
at := types.NewArray(vt, int64(len(varargs)))
|
2013-07-26 09:22:34 -06:00
|
|
|
// Don't set pos for implicit Allocs.
|
2013-06-13 12:43:35 -06:00
|
|
|
a := emitNew(fn, at, token.NoPos)
|
2013-08-01 12:06:10 -06:00
|
|
|
a.Comment = "varargs"
|
2013-05-17 14:25:48 -06:00
|
|
|
for i, arg := range varargs {
|
|
|
|
iaddr := &IndexAddr{
|
|
|
|
X: a,
|
2013-07-16 11:50:08 -06:00
|
|
|
Index: intConst(int64(i)),
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
2013-07-16 10:23:55 -06:00
|
|
|
iaddr.setType(types.NewPointer(vt))
|
2013-05-17 14:25:48 -06:00
|
|
|
fn.emit(iaddr)
|
|
|
|
emitStore(fn, iaddr, arg)
|
|
|
|
}
|
|
|
|
s := &Slice{X: a}
|
|
|
|
s.setType(st)
|
|
|
|
args[offset+np] = fn.emit(s)
|
|
|
|
args = args[:offset+np+1]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return args
|
|
|
|
}
|
|
|
|
|
|
|
|
// setCall emits to fn code to evaluate all the parameters of a function
|
|
|
|
// call e, and populates *c with those values.
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) {
|
2013-05-17 14:25:48 -06:00
|
|
|
// First deal with the f(...) part and optional receiver.
|
|
|
|
b.setCallFunc(fn, e, c)
|
|
|
|
|
|
|
|
// Then append the other actual parameters.
|
2013-05-31 14:14:13 -06:00
|
|
|
sig, _ := fn.Pkg.typeOf(e.Fun).Underlying().(*types.Signature)
|
2013-05-17 14:25:48 -06:00
|
|
|
if sig == nil {
|
2013-10-11 13:26:31 -06:00
|
|
|
panic(fmt.Sprintf("no signature for call of %s", e.Fun))
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
c.Args = b.emitCallArgs(fn, sig, e, c.Args)
|
|
|
|
}
|
|
|
|
|
|
|
|
// assignOp emits to fn code to perform loc += incr or loc -= incr.
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) assignOp(fn *Function, loc lvalue, incr Value, op token.Token) {
|
2013-05-17 14:25:48 -06:00
|
|
|
oldv := loc.load(fn)
|
2013-05-30 07:59:17 -06:00
|
|
|
loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, incr, oldv.Type()), loc.typ(), token.NoPos))
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// localValueSpec emits to fn code to define all of the vars in the
|
|
|
|
// function-local ValueSpec, spec.
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) {
|
2013-05-17 14:25:48 -06:00
|
|
|
switch {
|
|
|
|
case len(spec.Values) == len(spec.Names):
|
|
|
|
// e.g. var x, y = 0, 1
|
|
|
|
// 1:1 assignment
|
|
|
|
for i, id := range spec.Names {
|
|
|
|
if !isBlankIdent(id) {
|
go.tools/ssa: add debug information for all ast.Idents.
This CL adds three new functions to determine the SSA Value
for a given syntactic var, func or const object:
Program.{Const,Func,Var}Value.
Since constants and functions are immutable, the first
two only need a types.Object; but each distinct
reference to a var may return a distinct Value, so the third
requires an ast.Ident parameter too.
Debug information for local vars is encoded in the
instruction stream in the form of DebugRef instructions,
which are a no-op but relate their operand to a particular
ident in the AST. The beauty of this approach is that it
naturally stays consistent during optimisation passes
(e.g. lifting) without additional bookkeeping.
DebugRef instructions are only generated if the DebugMode
builder flag is set; I plan to make the policy more fine-
grained (per function).
DebugRef instructions are inserted for:
- expr(Ident) for rvalue idents
- address.store() for idents that update an lvalue
- address.address() for idents that take address of lvalue
(this new method replaces all uses of lval.(address).addr)
- expr() for all constant expressions
- local ValueSpecs with implicit zero initialization (no RHS)
(this case doesn't call store() or address())
To ensure we don't forget to emit debug info for uses of Idents,
we must use the lvalue mechanism consistently. (Previously,
many simple cases had effectively inlined these functions.)
Similarly setCallFunc no longer inlines expr(Ident).
Also:
- Program.Value() has been inlined & specialized.
- Program.Package() has moved nearer the new lookup functions.
- refactoring: funcSyntax has lost paramFields, resultFields;
gained funcType, which provides access to both.
- add package-level constants to Package.values map.
- opt: don't call localValueSpec for constants.
(The resulting code is always optimised away.)
There are a number of comments asking whether Literals
should have positions. Will address in a follow-up.
Added tests of all interesting cases.
R=gri
CC=golang-dev
https://golang.org/cl/11259044
2013-07-15 11:56:46 -06:00
|
|
|
fn.addLocalForIdent(id)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
go.tools/ssa: add debug information for all ast.Idents.
This CL adds three new functions to determine the SSA Value
for a given syntactic var, func or const object:
Program.{Const,Func,Var}Value.
Since constants and functions are immutable, the first
two only need a types.Object; but each distinct
reference to a var may return a distinct Value, so the third
requires an ast.Ident parameter too.
Debug information for local vars is encoded in the
instruction stream in the form of DebugRef instructions,
which are a no-op but relate their operand to a particular
ident in the AST. The beauty of this approach is that it
naturally stays consistent during optimisation passes
(e.g. lifting) without additional bookkeeping.
DebugRef instructions are only generated if the DebugMode
builder flag is set; I plan to make the policy more fine-
grained (per function).
DebugRef instructions are inserted for:
- expr(Ident) for rvalue idents
- address.store() for idents that update an lvalue
- address.address() for idents that take address of lvalue
(this new method replaces all uses of lval.(address).addr)
- expr() for all constant expressions
- local ValueSpecs with implicit zero initialization (no RHS)
(this case doesn't call store() or address())
To ensure we don't forget to emit debug info for uses of Idents,
we must use the lvalue mechanism consistently. (Previously,
many simple cases had effectively inlined these functions.)
Similarly setCallFunc no longer inlines expr(Ident).
Also:
- Program.Value() has been inlined & specialized.
- Program.Package() has moved nearer the new lookup functions.
- refactoring: funcSyntax has lost paramFields, resultFields;
gained funcType, which provides access to both.
- add package-level constants to Package.values map.
- opt: don't call localValueSpec for constants.
(The resulting code is always optimised away.)
There are a number of comments asking whether Literals
should have positions. Will address in a follow-up.
Added tests of all interesting cases.
R=gri
CC=golang-dev
https://golang.org/cl/11259044
2013-07-15 11:56:46 -06:00
|
|
|
lval := b.addr(fn, id, false) // non-escaping
|
2013-05-17 14:25:48 -06:00
|
|
|
b.exprInPlace(fn, lval, spec.Values[i])
|
|
|
|
}
|
|
|
|
|
|
|
|
case len(spec.Values) == 0:
|
|
|
|
// e.g. var x, y int
|
|
|
|
// Locals are implicitly zero-initialized.
|
|
|
|
for _, id := range spec.Names {
|
|
|
|
if !isBlankIdent(id) {
|
go.tools/ssa: add debug information for all ast.Idents.
This CL adds three new functions to determine the SSA Value
for a given syntactic var, func or const object:
Program.{Const,Func,Var}Value.
Since constants and functions are immutable, the first
two only need a types.Object; but each distinct
reference to a var may return a distinct Value, so the third
requires an ast.Ident parameter too.
Debug information for local vars is encoded in the
instruction stream in the form of DebugRef instructions,
which are a no-op but relate their operand to a particular
ident in the AST. The beauty of this approach is that it
naturally stays consistent during optimisation passes
(e.g. lifting) without additional bookkeeping.
DebugRef instructions are only generated if the DebugMode
builder flag is set; I plan to make the policy more fine-
grained (per function).
DebugRef instructions are inserted for:
- expr(Ident) for rvalue idents
- address.store() for idents that update an lvalue
- address.address() for idents that take address of lvalue
(this new method replaces all uses of lval.(address).addr)
- expr() for all constant expressions
- local ValueSpecs with implicit zero initialization (no RHS)
(this case doesn't call store() or address())
To ensure we don't forget to emit debug info for uses of Idents,
we must use the lvalue mechanism consistently. (Previously,
many simple cases had effectively inlined these functions.)
Similarly setCallFunc no longer inlines expr(Ident).
Also:
- Program.Value() has been inlined & specialized.
- Program.Package() has moved nearer the new lookup functions.
- refactoring: funcSyntax has lost paramFields, resultFields;
gained funcType, which provides access to both.
- add package-level constants to Package.values map.
- opt: don't call localValueSpec for constants.
(The resulting code is always optimised away.)
There are a number of comments asking whether Literals
should have positions. Will address in a follow-up.
Added tests of all interesting cases.
R=gri
CC=golang-dev
https://golang.org/cl/11259044
2013-07-15 11:56:46 -06:00
|
|
|
lhs := fn.addLocalForIdent(id)
|
|
|
|
if fn.debugInfo() {
|
go.tools/ssa: record lvalue/rvalue distinction precisely in DebugRef.
A DebugRef associates a source expression E with an ssa.Value
V, but until now did not record whether V was the value or the
address of E. So, we would guess from the "pointerness" of
the Value, leading to confusion in some cases, e.g.
type N *N
var n N
n = &n // lvalue and rvalue are both pointers
Now we explicitly record 'IsAddress bool' in DebugRef, and
plumb this everywhere: through (*Function).ValueForExpr and
(*Program).VarValue, all the way to forming the pointer
analysis query.
Also:
- VarValue now treats each reference to a global distinctly,
just like it does for other vars. So:
var g int
func f() {
g = 1 // VarValue(g) == Const(1:int), !isAddress
print(g) // VarValue(g) == Global(g), isAddress
}
- DebugRefs are not emitted for references to predeclared
identifiers (nil, built-in).
- DebugRefs no longer prevent lifting of an Alloc var into a
register; now we update or discard the debug info.
- TestValueForExpr: improve coverage of ssa.EnclosingFunction
by putting expectations in methods and init funcs, not just
normal funcs.
- oracle: fix golden file broken by recent
(*types.Var).IsField change.
R=gri
CC=golang-dev
https://golang.org/cl/16610045
2013-10-24 16:31:50 -06:00
|
|
|
emitDebugRef(fn, id, lhs, true)
|
go.tools/ssa: add debug information for all ast.Idents.
This CL adds three new functions to determine the SSA Value
for a given syntactic var, func or const object:
Program.{Const,Func,Var}Value.
Since constants and functions are immutable, the first
two only need a types.Object; but each distinct
reference to a var may return a distinct Value, so the third
requires an ast.Ident parameter too.
Debug information for local vars is encoded in the
instruction stream in the form of DebugRef instructions,
which are a no-op but relate their operand to a particular
ident in the AST. The beauty of this approach is that it
naturally stays consistent during optimisation passes
(e.g. lifting) without additional bookkeeping.
DebugRef instructions are only generated if the DebugMode
builder flag is set; I plan to make the policy more fine-
grained (per function).
DebugRef instructions are inserted for:
- expr(Ident) for rvalue idents
- address.store() for idents that update an lvalue
- address.address() for idents that take address of lvalue
(this new method replaces all uses of lval.(address).addr)
- expr() for all constant expressions
- local ValueSpecs with implicit zero initialization (no RHS)
(this case doesn't call store() or address())
To ensure we don't forget to emit debug info for uses of Idents,
we must use the lvalue mechanism consistently. (Previously,
many simple cases had effectively inlined these functions.)
Similarly setCallFunc no longer inlines expr(Ident).
Also:
- Program.Value() has been inlined & specialized.
- Program.Package() has moved nearer the new lookup functions.
- refactoring: funcSyntax has lost paramFields, resultFields;
gained funcType, which provides access to both.
- add package-level constants to Package.values map.
- opt: don't call localValueSpec for constants.
(The resulting code is always optimised away.)
There are a number of comments asking whether Literals
should have positions. Will address in a follow-up.
Added tests of all interesting cases.
R=gri
CC=golang-dev
https://golang.org/cl/11259044
2013-07-15 11:56:46 -06:00
|
|
|
}
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
default:
|
|
|
|
// e.g. var x, y = pos()
|
|
|
|
tuple := b.exprN(fn, spec.Values[0])
|
2013-05-17 15:02:47 -06:00
|
|
|
result := tuple.Type().(*types.Tuple)
|
2013-05-17 14:25:48 -06:00
|
|
|
for i, id := range spec.Names {
|
|
|
|
if !isBlankIdent(id) {
|
go.tools/ssa: add debug information for all ast.Idents.
This CL adds three new functions to determine the SSA Value
for a given syntactic var, func or const object:
Program.{Const,Func,Var}Value.
Since constants and functions are immutable, the first
two only need a types.Object; but each distinct
reference to a var may return a distinct Value, so the third
requires an ast.Ident parameter too.
Debug information for local vars is encoded in the
instruction stream in the form of DebugRef instructions,
which are a no-op but relate their operand to a particular
ident in the AST. The beauty of this approach is that it
naturally stays consistent during optimisation passes
(e.g. lifting) without additional bookkeeping.
DebugRef instructions are only generated if the DebugMode
builder flag is set; I plan to make the policy more fine-
grained (per function).
DebugRef instructions are inserted for:
- expr(Ident) for rvalue idents
- address.store() for idents that update an lvalue
- address.address() for idents that take address of lvalue
(this new method replaces all uses of lval.(address).addr)
- expr() for all constant expressions
- local ValueSpecs with implicit zero initialization (no RHS)
(this case doesn't call store() or address())
To ensure we don't forget to emit debug info for uses of Idents,
we must use the lvalue mechanism consistently. (Previously,
many simple cases had effectively inlined these functions.)
Similarly setCallFunc no longer inlines expr(Ident).
Also:
- Program.Value() has been inlined & specialized.
- Program.Package() has moved nearer the new lookup functions.
- refactoring: funcSyntax has lost paramFields, resultFields;
gained funcType, which provides access to both.
- add package-level constants to Package.values map.
- opt: don't call localValueSpec for constants.
(The resulting code is always optimised away.)
There are a number of comments asking whether Literals
should have positions. Will address in a follow-up.
Added tests of all interesting cases.
R=gri
CC=golang-dev
https://golang.org/cl/11259044
2013-07-15 11:56:46 -06:00
|
|
|
fn.addLocalForIdent(id)
|
|
|
|
lhs := b.addr(fn, id, false) // non-escaping
|
|
|
|
lhs.store(fn, emitExtract(fn, tuple, i, result.At(i).Type()))
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// assignStmt emits code to fn for a parallel assignment of rhss to lhss.
|
|
|
|
// isDef is true if this is a short variable declaration (:=).
|
|
|
|
//
|
|
|
|
// Note the similarity with localValueSpec.
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) {
|
2013-05-17 14:25:48 -06:00
|
|
|
// Side effects of all LHSs and RHSs must occur in left-to-right order.
|
|
|
|
var lvals []lvalue
|
|
|
|
for _, lhs := range lhss {
|
|
|
|
var lval lvalue = blank{}
|
|
|
|
if !isBlankIdent(lhs) {
|
|
|
|
if isDef {
|
|
|
|
// Local may be "redeclared" in the same
|
|
|
|
// scope, so don't blindly create anew.
|
2013-05-31 14:14:13 -06:00
|
|
|
obj := fn.Pkg.objectOf(lhs.(*ast.Ident))
|
2013-05-17 14:25:48 -06:00
|
|
|
if _, ok := fn.objects[obj]; !ok {
|
|
|
|
fn.addNamedLocal(obj)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
lval = b.addr(fn, lhs, false) // non-escaping
|
|
|
|
}
|
|
|
|
lvals = append(lvals, lval)
|
|
|
|
}
|
|
|
|
if len(lhss) == len(rhss) {
|
|
|
|
// e.g. x, y = f(), g()
|
|
|
|
if len(lhss) == 1 {
|
|
|
|
// x = type{...}
|
|
|
|
// Optimization: in-place construction
|
|
|
|
// of composite literals.
|
|
|
|
b.exprInPlace(fn, lvals[0], rhss[0])
|
|
|
|
} else {
|
|
|
|
// Parallel assignment. All reads must occur
|
|
|
|
// before all updates, precluding exprInPlace.
|
|
|
|
// TODO(adonovan): opt: is it sound to
|
|
|
|
// perform exprInPlace if !isDef?
|
|
|
|
var rvals []Value
|
|
|
|
for _, rval := range rhss {
|
|
|
|
rvals = append(rvals, b.expr(fn, rval))
|
|
|
|
}
|
|
|
|
for i, lval := range lvals {
|
|
|
|
lval.store(fn, rvals[i])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// e.g. x, y = pos()
|
|
|
|
tuple := b.exprN(fn, rhss[0])
|
2013-05-17 15:02:47 -06:00
|
|
|
result := tuple.Type().(*types.Tuple)
|
2013-05-17 14:25:48 -06:00
|
|
|
for i, lval := range lvals {
|
2013-05-17 15:02:47 -06:00
|
|
|
lval.store(fn, emitExtract(fn, tuple, i, result.At(i).Type()))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// arrayLen returns the length of the array whose composite literal elements are elts.
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 {
|
2013-05-17 15:02:47 -06:00
|
|
|
var max int64 = -1
|
|
|
|
var i int64 = -1
|
|
|
|
for _, e := range elts {
|
|
|
|
if kv, ok := e.(*ast.KeyValueExpr); ok {
|
2013-07-16 11:50:08 -06:00
|
|
|
i = b.expr(fn, kv.Key).(*Const).Int64()
|
2013-05-17 15:02:47 -06:00
|
|
|
} else {
|
|
|
|
i++
|
|
|
|
}
|
|
|
|
if i > max {
|
|
|
|
max = i
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
}
|
2013-05-17 15:02:47 -06:00
|
|
|
return max + 1
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// compLit emits to fn code to initialize a composite literal e at
|
|
|
|
// address addr with type typ, typically allocated by Alloc.
|
|
|
|
// Nested composite literals are recursively initialized in place
|
|
|
|
// where possible.
|
|
|
|
//
|
2013-10-31 15:59:52 -06:00
|
|
|
// A CompositeLit may have pointer type only in the recursive (nested)
|
|
|
|
// case when the type name is implicit. e.g. in []*T{{}}, the inner
|
|
|
|
// literal has type *T behaves like &T{}.
|
|
|
|
// In that case, addr must hold a T, not a *T.
|
|
|
|
//
|
|
|
|
func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit) {
|
|
|
|
typ := deref(fn.Pkg.typeOf(e))
|
2013-05-17 15:02:47 -06:00
|
|
|
switch t := typ.Underlying().(type) {
|
2013-05-17 14:25:48 -06:00
|
|
|
case *types.Struct:
|
|
|
|
for i, e := range e.Elts {
|
|
|
|
fieldIndex := i
|
|
|
|
if kv, ok := e.(*ast.KeyValueExpr); ok {
|
|
|
|
fname := kv.Key.(*ast.Ident).Name
|
2013-05-17 15:02:47 -06:00
|
|
|
for i, n := 0, t.NumFields(); i < n; i++ {
|
|
|
|
sf := t.Field(i)
|
2013-06-04 13:15:41 -06:00
|
|
|
if sf.Name() == fname {
|
2013-05-17 14:25:48 -06:00
|
|
|
fieldIndex = i
|
|
|
|
e = kv.Value
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2013-05-17 15:02:47 -06:00
|
|
|
sf := t.Field(fieldIndex)
|
2013-05-17 14:25:48 -06:00
|
|
|
faddr := &FieldAddr{
|
|
|
|
X: addr,
|
|
|
|
Field: fieldIndex,
|
|
|
|
}
|
2013-07-16 10:23:55 -06:00
|
|
|
faddr.setType(types.NewPointer(sf.Type()))
|
2013-05-17 14:25:48 -06:00
|
|
|
fn.emit(faddr)
|
2013-07-31 11:30:59 -06:00
|
|
|
b.exprInPlace(fn, &address{addr: faddr, expr: e}, e)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
case *types.Array, *types.Slice:
|
|
|
|
var at *types.Array
|
|
|
|
var array Value
|
|
|
|
switch t := t.(type) {
|
|
|
|
case *types.Slice:
|
2013-05-17 15:02:47 -06:00
|
|
|
at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts))
|
2013-08-01 12:06:10 -06:00
|
|
|
alloc := emitNew(fn, at, e.Lbrace)
|
|
|
|
alloc.Comment = "slicelit"
|
|
|
|
array = alloc
|
2013-05-17 14:25:48 -06:00
|
|
|
case *types.Array:
|
|
|
|
at = t
|
|
|
|
array = addr
|
|
|
|
}
|
2013-05-17 15:02:47 -06:00
|
|
|
|
2013-07-16 11:50:08 -06:00
|
|
|
var idx *Const
|
2013-05-17 14:25:48 -06:00
|
|
|
for _, e := range e.Elts {
|
|
|
|
if kv, ok := e.(*ast.KeyValueExpr); ok {
|
2013-07-16 11:50:08 -06:00
|
|
|
idx = b.expr(fn, kv.Key).(*Const)
|
2013-05-17 14:25:48 -06:00
|
|
|
e = kv.Value
|
|
|
|
} else {
|
|
|
|
var idxval int64
|
|
|
|
if idx != nil {
|
|
|
|
idxval = idx.Int64() + 1
|
|
|
|
}
|
2013-07-16 11:50:08 -06:00
|
|
|
idx = intConst(idxval)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
iaddr := &IndexAddr{
|
|
|
|
X: array,
|
|
|
|
Index: idx,
|
|
|
|
}
|
2013-07-16 10:23:55 -06:00
|
|
|
iaddr.setType(types.NewPointer(at.Elem()))
|
2013-05-17 14:25:48 -06:00
|
|
|
fn.emit(iaddr)
|
2013-07-31 11:30:59 -06:00
|
|
|
b.exprInPlace(fn, &address{addr: iaddr, expr: e}, e)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
if t != at { // slice
|
|
|
|
s := &Slice{X: array}
|
2013-05-17 15:02:47 -06:00
|
|
|
s.setPos(e.Lbrace)
|
2013-10-31 15:59:52 -06:00
|
|
|
s.setType(typ)
|
2013-05-17 14:25:48 -06:00
|
|
|
emitStore(fn, addr, fn.emit(s))
|
|
|
|
}
|
|
|
|
|
|
|
|
case *types.Map:
|
2013-07-16 11:50:08 -06:00
|
|
|
m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))}
|
2013-05-17 15:02:47 -06:00
|
|
|
m.setPos(e.Lbrace)
|
2013-05-17 14:25:48 -06:00
|
|
|
m.setType(typ)
|
|
|
|
emitStore(fn, addr, fn.emit(m))
|
|
|
|
for _, e := range e.Elts {
|
|
|
|
e := e.(*ast.KeyValueExpr)
|
2013-08-22 08:13:51 -06:00
|
|
|
loc := &element{
|
|
|
|
m: m,
|
|
|
|
k: emitConv(fn, b.expr(fn, e.Key), t.Key()),
|
|
|
|
t: t.Elem(),
|
|
|
|
pos: e.Colon,
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
2013-08-22 08:13:51 -06:00
|
|
|
b.exprInPlace(fn, loc, e.Value)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
default:
|
|
|
|
panic("unexpected CompositeLit type: " + t.String())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// switchStmt emits to fn code for the switch statement s, optionally
|
|
|
|
// labelled by label.
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) {
|
2013-05-17 14:25:48 -06:00
|
|
|
// We treat SwitchStmt like a sequential if-else chain.
|
|
|
|
// More efficient strategies (e.g. multiway dispatch)
|
|
|
|
// are possible if all cases are free of side effects.
|
|
|
|
if s.Init != nil {
|
|
|
|
b.stmt(fn, s.Init)
|
|
|
|
}
|
|
|
|
var tag Value = vTrue
|
|
|
|
if s.Tag != nil {
|
|
|
|
tag = b.expr(fn, s.Tag)
|
|
|
|
}
|
|
|
|
done := fn.newBasicBlock("switch.done")
|
|
|
|
if label != nil {
|
|
|
|
label._break = done
|
|
|
|
}
|
|
|
|
// We pull the default case (if present) down to the end.
|
|
|
|
// But each fallthrough label must point to the next
|
|
|
|
// body block in source order, so we preallocate a
|
|
|
|
// body block (fallthru) for the next case.
|
|
|
|
// Unfortunately this makes for a confusing block order.
|
|
|
|
var dfltBody *[]ast.Stmt
|
|
|
|
var dfltFallthrough *BasicBlock
|
|
|
|
var fallthru, dfltBlock *BasicBlock
|
|
|
|
ncases := len(s.Body.List)
|
|
|
|
for i, clause := range s.Body.List {
|
|
|
|
body := fallthru
|
|
|
|
if body == nil {
|
|
|
|
body = fn.newBasicBlock("switch.body") // first case only
|
|
|
|
}
|
|
|
|
|
|
|
|
// Preallocate body block for the next case.
|
|
|
|
fallthru = done
|
|
|
|
if i+1 < ncases {
|
|
|
|
fallthru = fn.newBasicBlock("switch.body")
|
|
|
|
}
|
|
|
|
|
|
|
|
cc := clause.(*ast.CaseClause)
|
|
|
|
if cc.List == nil {
|
|
|
|
// Default case.
|
|
|
|
dfltBody = &cc.Body
|
|
|
|
dfltFallthrough = fallthru
|
|
|
|
dfltBlock = body
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
var nextCond *BasicBlock
|
|
|
|
for _, cond := range cc.List {
|
|
|
|
nextCond = fn.newBasicBlock("switch.next")
|
|
|
|
// TODO(adonovan): opt: when tag==vTrue, we'd
|
|
|
|
// get better much code if we use b.cond(cond)
|
|
|
|
// instead of BinOp(EQL, tag, b.expr(cond))
|
|
|
|
// followed by If. Don't forget conversions
|
|
|
|
// though.
|
2013-05-30 07:59:17 -06:00
|
|
|
cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), token.NoPos)
|
2013-05-17 14:25:48 -06:00
|
|
|
emitIf(fn, cond, body, nextCond)
|
|
|
|
fn.currentBlock = nextCond
|
|
|
|
}
|
|
|
|
fn.currentBlock = body
|
|
|
|
fn.targets = &targets{
|
|
|
|
tail: fn.targets,
|
|
|
|
_break: done,
|
|
|
|
_fallthrough: fallthru,
|
|
|
|
}
|
|
|
|
b.stmtList(fn, cc.Body)
|
|
|
|
fn.targets = fn.targets.tail
|
|
|
|
emitJump(fn, done)
|
|
|
|
fn.currentBlock = nextCond
|
|
|
|
}
|
|
|
|
if dfltBlock != nil {
|
|
|
|
emitJump(fn, dfltBlock)
|
|
|
|
fn.currentBlock = dfltBlock
|
|
|
|
fn.targets = &targets{
|
|
|
|
tail: fn.targets,
|
|
|
|
_break: done,
|
|
|
|
_fallthrough: dfltFallthrough,
|
|
|
|
}
|
|
|
|
b.stmtList(fn, *dfltBody)
|
|
|
|
fn.targets = fn.targets.tail
|
|
|
|
}
|
|
|
|
emitJump(fn, done)
|
|
|
|
fn.currentBlock = done
|
|
|
|
}
|
|
|
|
|
|
|
|
// typeSwitchStmt emits to fn code for the type switch statement s, optionally
|
|
|
|
// labelled by label.
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) {
|
2013-05-17 14:25:48 -06:00
|
|
|
// We treat TypeSwitchStmt like a sequential if-else
|
|
|
|
// chain. More efficient strategies (e.g. multiway
|
|
|
|
// dispatch) are possible.
|
|
|
|
|
|
|
|
// Typeswitch lowering:
|
|
|
|
//
|
|
|
|
// var x X
|
|
|
|
// switch y := x.(type) {
|
|
|
|
// case T1, T2: S1 // >1 (y := x)
|
2013-07-24 12:03:53 -06:00
|
|
|
// case nil: SN // nil (y := x)
|
2013-05-17 14:25:48 -06:00
|
|
|
// default: SD // 0 types (y := x)
|
|
|
|
// case T3: S3 // 1 type (y := x.(T3))
|
|
|
|
// }
|
|
|
|
//
|
|
|
|
// ...s.Init...
|
|
|
|
// x := eval x
|
|
|
|
// .caseT1:
|
|
|
|
// t1, ok1 := typeswitch,ok x <T1>
|
|
|
|
// if ok1 then goto S1 else goto .caseT2
|
|
|
|
// .caseT2:
|
|
|
|
// t2, ok2 := typeswitch,ok x <T2>
|
2013-07-24 12:03:53 -06:00
|
|
|
// if ok2 then goto S1 else goto .caseNil
|
2013-05-17 14:25:48 -06:00
|
|
|
// .S1:
|
2013-07-24 12:03:53 -06:00
|
|
|
// y := x
|
2013-05-17 14:25:48 -06:00
|
|
|
// ...S1...
|
|
|
|
// goto done
|
2013-07-24 12:03:53 -06:00
|
|
|
// .caseNil:
|
|
|
|
// if t2, ok2 := typeswitch,ok x <T2>
|
|
|
|
// if x == nil then goto SN else goto .caseT3
|
|
|
|
// .SN:
|
|
|
|
// y := x
|
|
|
|
// ...SN...
|
|
|
|
// goto done
|
2013-05-17 14:25:48 -06:00
|
|
|
// .caseT3:
|
|
|
|
// t3, ok3 := typeswitch,ok x <T3>
|
|
|
|
// if ok3 then goto S3 else goto default
|
|
|
|
// .S3:
|
2013-07-24 12:03:53 -06:00
|
|
|
// y := t3
|
2013-05-17 14:25:48 -06:00
|
|
|
// ...S3...
|
|
|
|
// goto done
|
|
|
|
// .default:
|
2013-07-24 12:03:53 -06:00
|
|
|
// y := x
|
|
|
|
// ...SD...
|
2013-05-17 14:25:48 -06:00
|
|
|
// goto done
|
|
|
|
// .done:
|
|
|
|
|
|
|
|
if s.Init != nil {
|
|
|
|
b.stmt(fn, s.Init)
|
|
|
|
}
|
|
|
|
|
2013-07-03 13:10:49 -06:00
|
|
|
var x Value
|
2013-05-17 14:25:48 -06:00
|
|
|
switch ass := s.Assign.(type) {
|
|
|
|
case *ast.ExprStmt: // x.(type)
|
2013-05-31 14:14:13 -06:00
|
|
|
x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X)
|
2013-05-17 14:25:48 -06:00
|
|
|
case *ast.AssignStmt: // y := x.(type)
|
2013-05-31 14:14:13 -06:00
|
|
|
x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
done := fn.newBasicBlock("typeswitch.done")
|
|
|
|
if label != nil {
|
|
|
|
label._break = done
|
|
|
|
}
|
2013-07-24 12:03:53 -06:00
|
|
|
var default_ *ast.CaseClause
|
2013-05-17 14:25:48 -06:00
|
|
|
for _, clause := range s.Body.List {
|
|
|
|
cc := clause.(*ast.CaseClause)
|
|
|
|
if cc.List == nil {
|
2013-07-24 12:03:53 -06:00
|
|
|
default_ = cc
|
2013-05-17 14:25:48 -06:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
body := fn.newBasicBlock("typeswitch.body")
|
|
|
|
var next *BasicBlock
|
|
|
|
var casetype types.Type
|
2013-07-24 12:03:53 -06:00
|
|
|
var ti Value // ti, ok := typeassert,ok x <Ti>
|
2013-05-17 14:25:48 -06:00
|
|
|
for _, cond := range cc.List {
|
|
|
|
next = fn.newBasicBlock("typeswitch.next")
|
2013-05-31 14:14:13 -06:00
|
|
|
casetype = fn.Pkg.typeOf(cond)
|
2013-05-17 14:25:48 -06:00
|
|
|
var condv Value
|
|
|
|
if casetype == tUntypedNil {
|
2013-07-16 11:50:08 -06:00
|
|
|
condv = emitCompare(fn, token.EQL, x, nilConst(x.Type()), token.NoPos)
|
2013-07-24 12:03:53 -06:00
|
|
|
ti = x
|
2013-05-17 14:25:48 -06:00
|
|
|
} else {
|
2013-07-31 11:30:59 -06:00
|
|
|
yok := emitTypeTest(fn, x, casetype, cc.Case)
|
2013-05-17 14:25:48 -06:00
|
|
|
ti = emitExtract(fn, yok, 0, casetype)
|
|
|
|
condv = emitExtract(fn, yok, 1, tBool)
|
|
|
|
}
|
|
|
|
emitIf(fn, condv, body, next)
|
|
|
|
fn.currentBlock = next
|
|
|
|
}
|
2013-07-24 12:03:53 -06:00
|
|
|
if len(cc.List) != 1 {
|
|
|
|
ti = x
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
2013-07-24 12:03:53 -06:00
|
|
|
fn.currentBlock = body
|
2013-07-26 20:29:44 -06:00
|
|
|
b.typeCaseBody(fn, cc, ti, done)
|
2013-05-17 14:25:48 -06:00
|
|
|
fn.currentBlock = next
|
|
|
|
}
|
2013-07-24 12:03:53 -06:00
|
|
|
if default_ != nil {
|
2013-07-26 20:29:44 -06:00
|
|
|
b.typeCaseBody(fn, default_, x, done)
|
2013-07-24 12:03:53 -06:00
|
|
|
} else {
|
|
|
|
emitJump(fn, done)
|
|
|
|
}
|
|
|
|
fn.currentBlock = done
|
|
|
|
}
|
|
|
|
|
2013-07-26 20:29:44 -06:00
|
|
|
func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) {
|
2013-07-24 12:03:53 -06:00
|
|
|
if obj := fn.Pkg.info.TypeCaseVar(cc); obj != nil {
|
|
|
|
// In a switch y := x.(type), each case clause
|
|
|
|
// implicitly declares a distinct object y.
|
|
|
|
// In a single-type case, y has that type.
|
|
|
|
// In multi-type cases, 'case nil' and default,
|
|
|
|
// y has the same type as the interface operand.
|
|
|
|
emitStore(fn, fn.addNamedLocal(obj), x)
|
|
|
|
}
|
2013-05-17 14:25:48 -06:00
|
|
|
fn.targets = &targets{
|
|
|
|
tail: fn.targets,
|
|
|
|
_break: done,
|
|
|
|
}
|
2013-07-24 12:03:53 -06:00
|
|
|
b.stmtList(fn, cc.Body)
|
2013-05-17 14:25:48 -06:00
|
|
|
fn.targets = fn.targets.tail
|
|
|
|
emitJump(fn, done)
|
|
|
|
}
|
|
|
|
|
|
|
|
// selectStmt emits to fn code for the select statement s, optionally
|
|
|
|
// labelled by label.
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) {
|
2013-05-17 14:25:48 -06:00
|
|
|
// A blocking select of a single case degenerates to a
|
|
|
|
// simple send or receive.
|
|
|
|
// TODO(adonovan): opt: is this optimization worth its weight?
|
|
|
|
if len(s.Body.List) == 1 {
|
|
|
|
clause := s.Body.List[0].(*ast.CommClause)
|
|
|
|
if clause.Comm != nil {
|
|
|
|
b.stmt(fn, clause.Comm)
|
|
|
|
done := fn.newBasicBlock("select.done")
|
|
|
|
if label != nil {
|
|
|
|
label._break = done
|
|
|
|
}
|
|
|
|
fn.targets = &targets{
|
|
|
|
tail: fn.targets,
|
|
|
|
_break: done,
|
|
|
|
}
|
|
|
|
b.stmtList(fn, clause.Body)
|
|
|
|
fn.targets = fn.targets.tail
|
|
|
|
emitJump(fn, done)
|
|
|
|
fn.currentBlock = done
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// First evaluate all channels in all cases, and find
|
|
|
|
// the directions of each state.
|
2013-07-31 11:30:59 -06:00
|
|
|
var states []*SelectState
|
2013-05-17 14:25:48 -06:00
|
|
|
blocking := true
|
2013-07-31 11:30:59 -06:00
|
|
|
debugInfo := fn.debugInfo()
|
2013-05-17 14:25:48 -06:00
|
|
|
for _, clause := range s.Body.List {
|
2013-07-31 11:30:59 -06:00
|
|
|
var st *SelectState
|
2013-05-17 14:25:48 -06:00
|
|
|
switch comm := clause.(*ast.CommClause).Comm.(type) {
|
|
|
|
case nil: // default case
|
|
|
|
blocking = false
|
2013-07-31 11:30:59 -06:00
|
|
|
continue
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
case *ast.SendStmt: // ch<- i
|
|
|
|
ch := b.expr(fn, comm.Chan)
|
2013-07-31 11:30:59 -06:00
|
|
|
st = &SelectState{
|
2013-05-17 14:25:48 -06:00
|
|
|
Dir: ast.SEND,
|
|
|
|
Chan: ch,
|
|
|
|
Send: emitConv(fn, b.expr(fn, comm.Value),
|
2013-05-17 15:02:47 -06:00
|
|
|
ch.Type().Underlying().(*types.Chan).Elem()),
|
2013-07-31 11:30:59 -06:00
|
|
|
Pos: comm.Arrow,
|
|
|
|
}
|
|
|
|
if debugInfo {
|
|
|
|
st.DebugNode = comm
|
|
|
|
}
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
case *ast.AssignStmt: // x := <-ch
|
2013-07-31 11:30:59 -06:00
|
|
|
recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr)
|
|
|
|
st = &SelectState{
|
2013-05-17 14:25:48 -06:00
|
|
|
Dir: ast.RECV,
|
2013-07-31 11:30:59 -06:00
|
|
|
Chan: b.expr(fn, recv.X),
|
|
|
|
Pos: recv.OpPos,
|
|
|
|
}
|
|
|
|
if debugInfo {
|
|
|
|
st.DebugNode = recv
|
|
|
|
}
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
case *ast.ExprStmt: // <-ch
|
2013-07-31 11:30:59 -06:00
|
|
|
recv := unparen(comm.X).(*ast.UnaryExpr)
|
|
|
|
st = &SelectState{
|
2013-05-17 14:25:48 -06:00
|
|
|
Dir: ast.RECV,
|
2013-07-31 11:30:59 -06:00
|
|
|
Chan: b.expr(fn, recv.X),
|
|
|
|
Pos: recv.OpPos,
|
|
|
|
}
|
|
|
|
if debugInfo {
|
|
|
|
st.DebugNode = recv
|
|
|
|
}
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
2013-07-31 11:30:59 -06:00
|
|
|
states = append(states, st)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// We dispatch on the (fair) result of Select using a
|
|
|
|
// sequential if-else chain, in effect:
|
|
|
|
//
|
2013-06-24 12:15:13 -06:00
|
|
|
// idx, recvOk, r0...r_n-1 := select(...)
|
|
|
|
// if idx == 0 { // receive on channel 0 (first receive => r0)
|
|
|
|
// x, ok := r0, recvOk
|
2013-05-17 14:25:48 -06:00
|
|
|
// ...state0...
|
|
|
|
// } else if v == 1 { // send on channel 1
|
|
|
|
// ...state1...
|
|
|
|
// } else {
|
|
|
|
// ...default...
|
|
|
|
// }
|
2013-06-24 12:15:13 -06:00
|
|
|
sel := &Select{
|
2013-05-17 14:25:48 -06:00
|
|
|
States: states,
|
|
|
|
Blocking: blocking,
|
|
|
|
}
|
2013-06-24 12:15:13 -06:00
|
|
|
sel.setPos(s.Select)
|
|
|
|
var vars []*types.Var
|
|
|
|
vars = append(vars, varIndex, varOk)
|
|
|
|
for _, st := range states {
|
|
|
|
if st.Dir == ast.RECV {
|
|
|
|
tElem := st.Chan.Type().Underlying().(*types.Chan).Elem()
|
|
|
|
vars = append(vars, types.NewVar(token.NoPos, nil, "", tElem))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
sel.setType(types.NewTuple(vars...))
|
|
|
|
|
|
|
|
fn.emit(sel)
|
|
|
|
idx := emitExtract(fn, sel, 0, tInt)
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
done := fn.newBasicBlock("select.done")
|
|
|
|
if label != nil {
|
|
|
|
label._break = done
|
|
|
|
}
|
|
|
|
|
2013-07-24 12:03:53 -06:00
|
|
|
var defaultBody *[]ast.Stmt
|
2013-05-17 14:25:48 -06:00
|
|
|
state := 0
|
2013-06-24 12:15:13 -06:00
|
|
|
r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV
|
2013-05-17 14:25:48 -06:00
|
|
|
for _, cc := range s.Body.List {
|
|
|
|
clause := cc.(*ast.CommClause)
|
|
|
|
if clause.Comm == nil {
|
2013-07-24 12:03:53 -06:00
|
|
|
defaultBody = &clause.Body
|
2013-05-17 14:25:48 -06:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
body := fn.newBasicBlock("select.body")
|
|
|
|
next := fn.newBasicBlock("select.next")
|
2013-07-16 11:50:08 -06:00
|
|
|
emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next)
|
2013-05-17 14:25:48 -06:00
|
|
|
fn.currentBlock = body
|
|
|
|
fn.targets = &targets{
|
|
|
|
tail: fn.targets,
|
|
|
|
_break: done,
|
|
|
|
}
|
|
|
|
switch comm := clause.Comm.(type) {
|
2013-06-24 12:15:13 -06:00
|
|
|
case *ast.ExprStmt: // <-ch
|
2013-07-31 11:30:59 -06:00
|
|
|
if debugInfo {
|
|
|
|
v := emitExtract(fn, sel, r, vars[r].Type())
|
go.tools/ssa: record lvalue/rvalue distinction precisely in DebugRef.
A DebugRef associates a source expression E with an ssa.Value
V, but until now did not record whether V was the value or the
address of E. So, we would guess from the "pointerness" of
the Value, leading to confusion in some cases, e.g.
type N *N
var n N
n = &n // lvalue and rvalue are both pointers
Now we explicitly record 'IsAddress bool' in DebugRef, and
plumb this everywhere: through (*Function).ValueForExpr and
(*Program).VarValue, all the way to forming the pointer
analysis query.
Also:
- VarValue now treats each reference to a global distinctly,
just like it does for other vars. So:
var g int
func f() {
g = 1 // VarValue(g) == Const(1:int), !isAddress
print(g) // VarValue(g) == Global(g), isAddress
}
- DebugRefs are not emitted for references to predeclared
identifiers (nil, built-in).
- DebugRefs no longer prevent lifting of an Alloc var into a
register; now we update or discard the debug info.
- TestValueForExpr: improve coverage of ssa.EnclosingFunction
by putting expectations in methods and init funcs, not just
normal funcs.
- oracle: fix golden file broken by recent
(*types.Var).IsField change.
R=gri
CC=golang-dev
https://golang.org/cl/16610045
2013-10-24 16:31:50 -06:00
|
|
|
emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false)
|
2013-07-31 11:30:59 -06:00
|
|
|
}
|
2013-06-24 12:15:13 -06:00
|
|
|
r++
|
|
|
|
|
2013-05-17 14:25:48 -06:00
|
|
|
case *ast.AssignStmt: // x := <-states[state].Chan
|
2013-07-03 13:10:49 -06:00
|
|
|
if comm.Tok == token.DEFINE {
|
|
|
|
fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident))
|
|
|
|
}
|
|
|
|
x := b.addr(fn, comm.Lhs[0], false) // non-escaping
|
2013-07-31 11:30:59 -06:00
|
|
|
v := emitExtract(fn, sel, r, vars[r].Type())
|
|
|
|
if debugInfo {
|
go.tools/ssa: record lvalue/rvalue distinction precisely in DebugRef.
A DebugRef associates a source expression E with an ssa.Value
V, but until now did not record whether V was the value or the
address of E. So, we would guess from the "pointerness" of
the Value, leading to confusion in some cases, e.g.
type N *N
var n N
n = &n // lvalue and rvalue are both pointers
Now we explicitly record 'IsAddress bool' in DebugRef, and
plumb this everywhere: through (*Function).ValueForExpr and
(*Program).VarValue, all the way to forming the pointer
analysis query.
Also:
- VarValue now treats each reference to a global distinctly,
just like it does for other vars. So:
var g int
func f() {
g = 1 // VarValue(g) == Const(1:int), !isAddress
print(g) // VarValue(g) == Global(g), isAddress
}
- DebugRefs are not emitted for references to predeclared
identifiers (nil, built-in).
- DebugRefs no longer prevent lifting of an Alloc var into a
register; now we update or discard the debug info.
- TestValueForExpr: improve coverage of ssa.EnclosingFunction
by putting expectations in methods and init funcs, not just
normal funcs.
- oracle: fix golden file broken by recent
(*types.Var).IsField change.
R=gri
CC=golang-dev
https://golang.org/cl/16610045
2013-10-24 16:31:50 -06:00
|
|
|
emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false)
|
2013-07-31 11:30:59 -06:00
|
|
|
}
|
|
|
|
x.store(fn, v)
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
if len(comm.Lhs) == 2 { // x, ok := ...
|
2013-07-03 13:10:49 -06:00
|
|
|
if comm.Tok == token.DEFINE {
|
|
|
|
fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident))
|
|
|
|
}
|
|
|
|
ok := b.addr(fn, comm.Lhs[1], false) // non-escaping
|
2013-07-12 22:09:33 -06:00
|
|
|
ok.store(fn, emitExtract(fn, sel, 1, deref(ok.typ())))
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
2013-06-24 12:15:13 -06:00
|
|
|
r++
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
b.stmtList(fn, clause.Body)
|
|
|
|
fn.targets = fn.targets.tail
|
|
|
|
emitJump(fn, done)
|
|
|
|
fn.currentBlock = next
|
|
|
|
state++
|
|
|
|
}
|
2013-07-24 12:03:53 -06:00
|
|
|
if defaultBody != nil {
|
2013-05-17 14:25:48 -06:00
|
|
|
fn.targets = &targets{
|
|
|
|
tail: fn.targets,
|
|
|
|
_break: done,
|
|
|
|
}
|
2013-07-24 12:03:53 -06:00
|
|
|
b.stmtList(fn, *defaultBody)
|
2013-05-17 14:25:48 -06:00
|
|
|
fn.targets = fn.targets.tail
|
|
|
|
}
|
|
|
|
emitJump(fn, done)
|
|
|
|
fn.currentBlock = done
|
|
|
|
}
|
|
|
|
|
|
|
|
// forStmt emits to fn code for the for statement s, optionally
|
|
|
|
// labelled by label.
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) {
|
2013-05-17 14:25:48 -06:00
|
|
|
// ...init...
|
|
|
|
// jump loop
|
|
|
|
// loop:
|
|
|
|
// if cond goto body else done
|
|
|
|
// body:
|
|
|
|
// ...body...
|
|
|
|
// jump post
|
|
|
|
// post: (target of continue)
|
|
|
|
// ...post...
|
|
|
|
// jump loop
|
|
|
|
// done: (target of break)
|
|
|
|
if s.Init != nil {
|
|
|
|
b.stmt(fn, s.Init)
|
|
|
|
}
|
|
|
|
body := fn.newBasicBlock("for.body")
|
|
|
|
done := fn.newBasicBlock("for.done") // target of 'break'
|
|
|
|
loop := body // target of back-edge
|
|
|
|
if s.Cond != nil {
|
|
|
|
loop = fn.newBasicBlock("for.loop")
|
|
|
|
}
|
|
|
|
cont := loop // target of 'continue'
|
|
|
|
if s.Post != nil {
|
|
|
|
cont = fn.newBasicBlock("for.post")
|
|
|
|
}
|
|
|
|
if label != nil {
|
|
|
|
label._break = done
|
|
|
|
label._continue = cont
|
|
|
|
}
|
|
|
|
emitJump(fn, loop)
|
|
|
|
fn.currentBlock = loop
|
|
|
|
if loop != body {
|
|
|
|
b.cond(fn, s.Cond, body, done)
|
|
|
|
fn.currentBlock = body
|
|
|
|
}
|
|
|
|
fn.targets = &targets{
|
|
|
|
tail: fn.targets,
|
|
|
|
_break: done,
|
|
|
|
_continue: cont,
|
|
|
|
}
|
|
|
|
b.stmt(fn, s.Body)
|
|
|
|
fn.targets = fn.targets.tail
|
|
|
|
emitJump(fn, cont)
|
|
|
|
|
|
|
|
if s.Post != nil {
|
|
|
|
fn.currentBlock = cont
|
|
|
|
b.stmt(fn, s.Post)
|
|
|
|
emitJump(fn, loop) // back-edge
|
|
|
|
}
|
|
|
|
fn.currentBlock = done
|
|
|
|
}
|
|
|
|
|
|
|
|
// rangeIndexed emits to fn the header for an integer indexed loop
|
|
|
|
// over array, *array or slice value x.
|
|
|
|
// The v result is defined only if tv is non-nil.
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type) (k, v Value, loop, done *BasicBlock) {
|
2013-05-17 14:25:48 -06:00
|
|
|
//
|
|
|
|
// length = len(x)
|
|
|
|
// index = -1
|
|
|
|
// loop: (target of continue)
|
|
|
|
// index++
|
|
|
|
// if index < length goto body else done
|
|
|
|
// body:
|
|
|
|
// k = index
|
|
|
|
// v = x[index]
|
|
|
|
// ...body...
|
|
|
|
// jump loop
|
|
|
|
// done: (target of break)
|
|
|
|
|
|
|
|
// Determine number of iterations.
|
|
|
|
var length Value
|
2013-07-12 22:09:33 -06:00
|
|
|
if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok {
|
2013-05-17 14:25:48 -06:00
|
|
|
// For array or *array, the number of iterations is
|
|
|
|
// known statically thanks to the type. We avoid a
|
|
|
|
// data dependence upon x, permitting later dead-code
|
|
|
|
// elimination if x is pure, static unrolling, etc.
|
|
|
|
// Ranging over a nil *array may have >0 iterations.
|
2013-07-16 11:50:08 -06:00
|
|
|
length = intConst(arr.Len())
|
2013-05-17 14:25:48 -06:00
|
|
|
} else {
|
|
|
|
// length = len(x).
|
|
|
|
var c Call
|
2013-09-23 16:18:35 -06:00
|
|
|
c.Call.Value = fn.Prog.builtins[types.Universe.Lookup("len").(*types.Builtin)]
|
2013-05-17 14:25:48 -06:00
|
|
|
c.Call.Args = []Value{x}
|
|
|
|
c.setType(tInt)
|
|
|
|
length = fn.emit(&c)
|
|
|
|
}
|
|
|
|
|
|
|
|
index := fn.addLocal(tInt, token.NoPos)
|
2013-07-16 11:50:08 -06:00
|
|
|
emitStore(fn, index, intConst(-1))
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
loop = fn.newBasicBlock("rangeindex.loop")
|
|
|
|
emitJump(fn, loop)
|
|
|
|
fn.currentBlock = loop
|
|
|
|
|
|
|
|
incr := &BinOp{
|
|
|
|
Op: token.ADD,
|
|
|
|
X: emitLoad(fn, index),
|
|
|
|
Y: vOne,
|
|
|
|
}
|
|
|
|
incr.setType(tInt)
|
|
|
|
emitStore(fn, index, fn.emit(incr))
|
|
|
|
|
|
|
|
body := fn.newBasicBlock("rangeindex.body")
|
|
|
|
done = fn.newBasicBlock("rangeindex.done")
|
2013-05-30 07:59:17 -06:00
|
|
|
emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done)
|
2013-05-17 14:25:48 -06:00
|
|
|
fn.currentBlock = body
|
|
|
|
|
|
|
|
k = emitLoad(fn, index)
|
|
|
|
if tv != nil {
|
2013-05-17 15:02:47 -06:00
|
|
|
switch t := x.Type().Underlying().(type) {
|
2013-05-17 14:25:48 -06:00
|
|
|
case *types.Array:
|
|
|
|
instr := &Index{
|
|
|
|
X: x,
|
|
|
|
Index: k,
|
|
|
|
}
|
2013-05-17 15:02:47 -06:00
|
|
|
instr.setType(t.Elem())
|
2013-05-17 14:25:48 -06:00
|
|
|
v = fn.emit(instr)
|
|
|
|
|
|
|
|
case *types.Pointer: // *array
|
|
|
|
instr := &IndexAddr{
|
|
|
|
X: x,
|
|
|
|
Index: k,
|
|
|
|
}
|
2013-07-16 10:23:55 -06:00
|
|
|
instr.setType(types.NewPointer(t.Elem().(*types.Array).Elem()))
|
2013-05-17 14:25:48 -06:00
|
|
|
v = emitLoad(fn, fn.emit(instr))
|
|
|
|
|
|
|
|
case *types.Slice:
|
|
|
|
instr := &IndexAddr{
|
|
|
|
X: x,
|
|
|
|
Index: k,
|
|
|
|
}
|
2013-07-16 10:23:55 -06:00
|
|
|
instr.setType(types.NewPointer(t.Elem()))
|
2013-05-17 14:25:48 -06:00
|
|
|
v = emitLoad(fn, fn.emit(instr))
|
|
|
|
|
|
|
|
default:
|
|
|
|
panic("rangeIndexed x:" + t.String())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// rangeIter emits to fn the header for a loop using
|
|
|
|
// Range/Next/Extract to iterate over map or string value x.
|
|
|
|
// tk and tv are the types of the key/value results k and v, or nil
|
|
|
|
// if the respective component is not wanted.
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) {
|
2013-05-17 14:25:48 -06:00
|
|
|
//
|
|
|
|
// it = range x
|
|
|
|
// loop: (target of continue)
|
|
|
|
// okv = next it (ok, key, value)
|
|
|
|
// ok = extract okv #0
|
|
|
|
// if ok goto body else done
|
|
|
|
// body:
|
|
|
|
// k = extract okv #1
|
|
|
|
// v = extract okv #2
|
|
|
|
// ...body...
|
|
|
|
// jump loop
|
|
|
|
// done: (target of break)
|
|
|
|
//
|
|
|
|
|
|
|
|
if tk == nil {
|
|
|
|
tk = tInvalid
|
|
|
|
}
|
|
|
|
if tv == nil {
|
|
|
|
tv = tInvalid
|
|
|
|
}
|
|
|
|
|
|
|
|
rng := &Range{X: x}
|
2013-05-17 15:02:47 -06:00
|
|
|
rng.setPos(pos)
|
2013-05-17 14:25:48 -06:00
|
|
|
rng.setType(tRangeIter)
|
|
|
|
it := fn.emit(rng)
|
|
|
|
|
|
|
|
loop = fn.newBasicBlock("rangeiter.loop")
|
|
|
|
emitJump(fn, loop)
|
|
|
|
fn.currentBlock = loop
|
|
|
|
|
2013-05-17 15:02:47 -06:00
|
|
|
_, isString := x.Type().Underlying().(*types.Basic)
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
okv := &Next{
|
|
|
|
Iter: it,
|
|
|
|
IsString: isString,
|
|
|
|
}
|
2013-05-17 15:02:47 -06:00
|
|
|
okv.setType(types.NewTuple(
|
2013-05-17 14:25:48 -06:00
|
|
|
varOk,
|
2013-06-04 13:15:41 -06:00
|
|
|
types.NewVar(token.NoPos, nil, "k", tk),
|
|
|
|
types.NewVar(token.NoPos, nil, "v", tv),
|
2013-05-17 15:02:47 -06:00
|
|
|
))
|
2013-05-17 14:25:48 -06:00
|
|
|
fn.emit(okv)
|
|
|
|
|
|
|
|
body := fn.newBasicBlock("rangeiter.body")
|
|
|
|
done = fn.newBasicBlock("rangeiter.done")
|
|
|
|
emitIf(fn, emitExtract(fn, okv, 0, tBool), body, done)
|
|
|
|
fn.currentBlock = body
|
|
|
|
|
|
|
|
if tk != tInvalid {
|
|
|
|
k = emitExtract(fn, okv, 1, tk)
|
|
|
|
}
|
|
|
|
if tv != tInvalid {
|
|
|
|
v = emitExtract(fn, okv, 2, tv)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// rangeChan emits to fn the header for a loop that receives from
|
|
|
|
// channel x until it fails.
|
|
|
|
// tk is the channel's element type, or nil if the k result is
|
|
|
|
// not wanted
|
2013-08-27 09:18:31 -06:00
|
|
|
// pos is the position of the '=' or ':=' token.
|
2013-05-17 14:25:48 -06:00
|
|
|
//
|
2013-08-27 09:18:31 -06:00
|
|
|
func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) {
|
2013-05-17 14:25:48 -06:00
|
|
|
//
|
|
|
|
// loop: (target of continue)
|
|
|
|
// ko = <-x (key, ok)
|
|
|
|
// ok = extract ko #1
|
|
|
|
// if ok goto body else done
|
|
|
|
// body:
|
|
|
|
// k = extract ko #0
|
|
|
|
// ...
|
|
|
|
// goto loop
|
|
|
|
// done: (target of break)
|
|
|
|
|
|
|
|
loop = fn.newBasicBlock("rangechan.loop")
|
|
|
|
emitJump(fn, loop)
|
|
|
|
fn.currentBlock = loop
|
|
|
|
recv := &UnOp{
|
|
|
|
Op: token.ARROW,
|
|
|
|
X: x,
|
|
|
|
CommaOk: true,
|
|
|
|
}
|
2013-08-27 09:18:31 -06:00
|
|
|
recv.setPos(pos)
|
2013-05-17 15:02:47 -06:00
|
|
|
recv.setType(types.NewTuple(
|
2013-08-27 09:18:31 -06:00
|
|
|
types.NewVar(token.NoPos, nil, "k", x.Type().Underlying().(*types.Chan).Elem()),
|
2013-05-17 14:25:48 -06:00
|
|
|
varOk,
|
2013-05-17 15:02:47 -06:00
|
|
|
))
|
2013-05-17 14:25:48 -06:00
|
|
|
ko := fn.emit(recv)
|
|
|
|
body := fn.newBasicBlock("rangechan.body")
|
|
|
|
done = fn.newBasicBlock("rangechan.done")
|
|
|
|
emitIf(fn, emitExtract(fn, ko, 1, tBool), body, done)
|
|
|
|
fn.currentBlock = body
|
|
|
|
if tk != nil {
|
|
|
|
k = emitExtract(fn, ko, 0, tk)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// rangeStmt emits to fn code for the range statement s, optionally
|
|
|
|
// labelled by label.
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) {
|
2013-05-17 14:25:48 -06:00
|
|
|
var tk, tv types.Type
|
|
|
|
if !isBlankIdent(s.Key) {
|
2013-05-31 14:14:13 -06:00
|
|
|
tk = fn.Pkg.typeOf(s.Key)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
if s.Value != nil && !isBlankIdent(s.Value) {
|
2013-05-31 14:14:13 -06:00
|
|
|
tv = fn.Pkg.typeOf(s.Value)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// If iteration variables are defined (:=), this
|
|
|
|
// occurs once outside the loop.
|
|
|
|
//
|
|
|
|
// Unlike a short variable declaration, a RangeStmt
|
|
|
|
// using := never redeclares an existing variable; it
|
|
|
|
// always creates a new one.
|
|
|
|
if s.Tok == token.DEFINE {
|
|
|
|
if tk != nil {
|
2013-07-03 13:10:49 -06:00
|
|
|
fn.addLocalForIdent(s.Key.(*ast.Ident))
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
if tv != nil {
|
2013-07-03 13:10:49 -06:00
|
|
|
fn.addLocalForIdent(s.Value.(*ast.Ident))
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
x := b.expr(fn, s.X)
|
|
|
|
|
|
|
|
var k, v Value
|
|
|
|
var loop, done *BasicBlock
|
2013-05-17 15:02:47 -06:00
|
|
|
switch rt := x.Type().Underlying().(type) {
|
2013-05-17 14:25:48 -06:00
|
|
|
case *types.Slice, *types.Array, *types.Pointer: // *array
|
|
|
|
k, v, loop, done = b.rangeIndexed(fn, x, tv)
|
|
|
|
|
|
|
|
case *types.Chan:
|
2013-08-27 09:18:31 -06:00
|
|
|
k, loop, done = b.rangeChan(fn, x, tk, s.TokPos)
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
case *types.Map, *types.Basic: // string
|
2013-05-17 15:02:47 -06:00
|
|
|
k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For)
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
default:
|
|
|
|
panic("Cannot range over: " + rt.String())
|
|
|
|
}
|
|
|
|
|
|
|
|
// Evaluate both LHS expressions before we update either.
|
|
|
|
var kl, vl lvalue
|
|
|
|
if tk != nil {
|
|
|
|
kl = b.addr(fn, s.Key, false) // non-escaping
|
|
|
|
}
|
|
|
|
if tv != nil {
|
|
|
|
vl = b.addr(fn, s.Value, false) // non-escaping
|
|
|
|
}
|
|
|
|
if tk != nil {
|
|
|
|
kl.store(fn, k)
|
|
|
|
}
|
|
|
|
if tv != nil {
|
|
|
|
vl.store(fn, v)
|
|
|
|
}
|
|
|
|
|
|
|
|
if label != nil {
|
|
|
|
label._break = done
|
|
|
|
label._continue = loop
|
|
|
|
}
|
|
|
|
|
|
|
|
fn.targets = &targets{
|
|
|
|
tail: fn.targets,
|
|
|
|
_break: done,
|
|
|
|
_continue: loop,
|
|
|
|
}
|
|
|
|
b.stmt(fn, s.Body)
|
|
|
|
fn.targets = fn.targets.tail
|
|
|
|
emitJump(fn, loop) // back-edge
|
|
|
|
fn.currentBlock = done
|
|
|
|
}
|
|
|
|
|
|
|
|
// stmt lowers statement s to SSA form, emitting code to fn.
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) stmt(fn *Function, _s ast.Stmt) {
|
2013-05-17 14:25:48 -06:00
|
|
|
// The label of the current statement. If non-nil, its _goto
|
|
|
|
// target is always set; its _break and _continue are set only
|
|
|
|
// within the body of switch/typeswitch/select/for/range.
|
|
|
|
// It is effectively an additional default-nil parameter of stmt().
|
|
|
|
var label *lblock
|
|
|
|
start:
|
|
|
|
switch s := _s.(type) {
|
|
|
|
case *ast.EmptyStmt:
|
|
|
|
// ignore. (Usually removed by gofmt.)
|
|
|
|
|
|
|
|
case *ast.DeclStmt: // Con, Var or Typ
|
|
|
|
d := s.Decl.(*ast.GenDecl)
|
go.tools/ssa: add debug information for all ast.Idents.
This CL adds three new functions to determine the SSA Value
for a given syntactic var, func or const object:
Program.{Const,Func,Var}Value.
Since constants and functions are immutable, the first
two only need a types.Object; but each distinct
reference to a var may return a distinct Value, so the third
requires an ast.Ident parameter too.
Debug information for local vars is encoded in the
instruction stream in the form of DebugRef instructions,
which are a no-op but relate their operand to a particular
ident in the AST. The beauty of this approach is that it
naturally stays consistent during optimisation passes
(e.g. lifting) without additional bookkeeping.
DebugRef instructions are only generated if the DebugMode
builder flag is set; I plan to make the policy more fine-
grained (per function).
DebugRef instructions are inserted for:
- expr(Ident) for rvalue idents
- address.store() for idents that update an lvalue
- address.address() for idents that take address of lvalue
(this new method replaces all uses of lval.(address).addr)
- expr() for all constant expressions
- local ValueSpecs with implicit zero initialization (no RHS)
(this case doesn't call store() or address())
To ensure we don't forget to emit debug info for uses of Idents,
we must use the lvalue mechanism consistently. (Previously,
many simple cases had effectively inlined these functions.)
Similarly setCallFunc no longer inlines expr(Ident).
Also:
- Program.Value() has been inlined & specialized.
- Program.Package() has moved nearer the new lookup functions.
- refactoring: funcSyntax has lost paramFields, resultFields;
gained funcType, which provides access to both.
- add package-level constants to Package.values map.
- opt: don't call localValueSpec for constants.
(The resulting code is always optimised away.)
There are a number of comments asking whether Literals
should have positions. Will address in a follow-up.
Added tests of all interesting cases.
R=gri
CC=golang-dev
https://golang.org/cl/11259044
2013-07-15 11:56:46 -06:00
|
|
|
if d.Tok == token.VAR {
|
|
|
|
for _, spec := range d.Specs {
|
|
|
|
if vs, ok := spec.(*ast.ValueSpec); ok {
|
|
|
|
b.localValueSpec(fn, vs)
|
|
|
|
}
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
case *ast.LabeledStmt:
|
|
|
|
label = fn.labelledBlock(s.Label)
|
|
|
|
emitJump(fn, label._goto)
|
|
|
|
fn.currentBlock = label._goto
|
|
|
|
_s = s.Stmt
|
|
|
|
goto start // effectively: tailcall stmt(fn, s.Stmt, label)
|
|
|
|
|
|
|
|
case *ast.ExprStmt:
|
|
|
|
b.expr(fn, s.X)
|
|
|
|
|
|
|
|
case *ast.SendStmt:
|
|
|
|
fn.emit(&Send{
|
|
|
|
Chan: b.expr(fn, s.Chan),
|
|
|
|
X: emitConv(fn, b.expr(fn, s.Value),
|
2013-05-31 14:14:13 -06:00
|
|
|
fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem()),
|
2013-05-17 15:02:47 -06:00
|
|
|
pos: s.Arrow,
|
2013-05-17 14:25:48 -06:00
|
|
|
})
|
|
|
|
|
|
|
|
case *ast.IncDecStmt:
|
|
|
|
op := token.ADD
|
|
|
|
if s.Tok == token.DEC {
|
|
|
|
op = token.SUB
|
|
|
|
}
|
|
|
|
b.assignOp(fn, b.addr(fn, s.X, false), vOne, op)
|
|
|
|
|
|
|
|
case *ast.AssignStmt:
|
|
|
|
switch s.Tok {
|
|
|
|
case token.ASSIGN, token.DEFINE:
|
|
|
|
b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE)
|
|
|
|
|
|
|
|
default: // +=, etc.
|
|
|
|
op := s.Tok + token.ADD - token.ADD_ASSIGN
|
|
|
|
b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op)
|
|
|
|
}
|
|
|
|
|
|
|
|
case *ast.GoStmt:
|
|
|
|
// The "intrinsics" new/make/len/cap are forbidden here.
|
|
|
|
// panic is treated like an ordinary function call.
|
2013-07-31 11:30:59 -06:00
|
|
|
v := Go{pos: s.Go}
|
2013-05-17 14:25:48 -06:00
|
|
|
b.setCall(fn, s.Call, &v.Call)
|
|
|
|
fn.emit(&v)
|
|
|
|
|
|
|
|
case *ast.DeferStmt:
|
|
|
|
// The "intrinsics" new/make/len/cap are forbidden here.
|
|
|
|
// panic is treated like an ordinary function call.
|
2013-07-31 11:30:59 -06:00
|
|
|
v := Defer{pos: s.Defer}
|
2013-05-17 14:25:48 -06:00
|
|
|
b.setCall(fn, s.Call, &v.Call)
|
|
|
|
fn.emit(&v)
|
|
|
|
|
go.tools/ssa: implement correct control flow for recovered panic.
A function such as this:
func one() (x int) {
defer func() { recover() }()
x = 1
panic("return")
}
that combines named return parameters (NRPs) with deferred calls
that call recover, may return non-zero values despite the
fact it doesn't even contain a return statement. (!)
This requires a change to the SSA API: all functions'
control-flow graphs now have a second entry point, called
Recover, which is the block at which control flow resumes
after a recovered panic. The Recover block simply loads the
NRPs and returns them.
As an optimization, most functions don't need a Recover block,
so it is omitted. In fact it is only needed for functions that
have NRPs and defer a call to another function that _may_ call
recover.
Dataflow analysis of SSA now requires extra work, since every
may-panic instruction has an implicit control-flow edge to
the Recover block. The only dataflow analysis so far implemented
is SSA renaming, for which we make the following simplifying
assumption: the Recover block only loads the NRPs and returns.
This means we don't really need to analyze it, we can just
skip the "lifting" of such NRPs. We also special-case the Recover
block in the dominance computation.
Rejected alternative approaches:
- Specifying a Recover block for every defer instruction (like a
traditional exception handler).
This seemed like excessive generality, since Go programs
only need the same degenerate form of Recover block.
- Adding an instruction to set the Recover block immediately
after the named return values are set up, so that dominance
can be computed without special-casing.
This didn't seem worth the effort.
Interpreter:
- This CL completely reimplements the panic/recover/
defer logic in the interpreter. It's clearer and simpler
and closer to the model in the spec.
- Some runtime panic messages have been changed to be closer
to gc's, since tests depend on it.
- The interpreter now requires that the runtime.runtimeError
type be part of the SSA program. This requires that clients
import this package prior to invoking the interpreter.
This in turn requires (Importer).ImportPackage(path string),
which this CL adds.
- All $GOROOT/test/recover{,1,2,3}.go tests are now passing.
NB, the bug described in coverage.go (defer/recover in a concatenated
init function) remains. Will be fixed in a follow-up.
Fixes golang/go#6381
R=gri
CC=crawshaw, golang-dev
https://golang.org/cl/13844043
2013-10-14 13:38:56 -06:00
|
|
|
// A deferred call can cause recovery from panic.
|
|
|
|
// If the panicking function has named results,
|
|
|
|
// control resumes at the Recover block to load those
|
|
|
|
// locals (which may be mutated by the deferred call)
|
|
|
|
// and return them.
|
|
|
|
if fn.namedResults != nil {
|
|
|
|
// Optimization: if we can prove the deferred call
|
|
|
|
// won't cause recovery from panic, we can avoid a
|
|
|
|
// Recover block.
|
|
|
|
// We scan the callee for calls to recover() iff:
|
|
|
|
// - it's a static call
|
|
|
|
// - to a function in the same package
|
|
|
|
// (other packages' SSA building happens concurrently)
|
|
|
|
// - whose SSA building has started (Blocks != nil)
|
|
|
|
// - and finished (i.e. not this function)
|
|
|
|
// NB, this is always true for: defer func() { ... } ()
|
|
|
|
//
|
|
|
|
// TODO(adonovan): optimize interpackage cases, e.g.
|
|
|
|
// (sync.Mutex).Unlock(), (io.Closer).Close
|
|
|
|
if callee, ok := v.Call.Value.(*Function); ok && callee.Pkg == fn.Pkg && callee != fn && callee.Blocks != nil && !callsRecover(callee) {
|
|
|
|
// Deferred call cannot cause recovery from panic.
|
|
|
|
} else {
|
|
|
|
createRecoverBlock(fn)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-05-17 14:25:48 -06:00
|
|
|
case *ast.ReturnStmt:
|
|
|
|
var results []Value
|
2013-05-17 15:02:47 -06:00
|
|
|
if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 {
|
2013-05-17 14:25:48 -06:00
|
|
|
// Return of one expression in a multi-valued function.
|
|
|
|
tuple := b.exprN(fn, s.Results[0])
|
2013-05-17 15:02:47 -06:00
|
|
|
ttuple := tuple.Type().(*types.Tuple)
|
|
|
|
for i, n := 0, ttuple.Len(); i < n; i++ {
|
2013-05-17 14:25:48 -06:00
|
|
|
results = append(results,
|
2013-05-17 15:02:47 -06:00
|
|
|
emitConv(fn, emitExtract(fn, tuple, i, ttuple.At(i).Type()),
|
|
|
|
fn.Signature.Results().At(i).Type()))
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// 1:1 return, or no-arg return in non-void function.
|
|
|
|
for i, r := range s.Results {
|
2013-05-17 15:02:47 -06:00
|
|
|
v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type())
|
2013-05-17 14:25:48 -06:00
|
|
|
results = append(results, v)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if fn.namedResults != nil {
|
|
|
|
// Function has named result parameters (NRPs).
|
|
|
|
// Perform parallel assignment of return operands to NRPs.
|
|
|
|
for i, r := range results {
|
|
|
|
emitStore(fn, fn.namedResults[i], r)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Run function calls deferred in this
|
|
|
|
// function when explicitly returning from it.
|
|
|
|
fn.emit(new(RunDefers))
|
|
|
|
if fn.namedResults != nil {
|
|
|
|
// Reload NRPs to form the result tuple.
|
|
|
|
results = results[:0]
|
|
|
|
for _, r := range fn.namedResults {
|
|
|
|
results = append(results, emitLoad(fn, r))
|
|
|
|
}
|
|
|
|
}
|
2013-10-08 10:31:39 -06:00
|
|
|
fn.emit(&Return{Results: results, pos: s.Return})
|
2013-05-17 14:25:48 -06:00
|
|
|
fn.currentBlock = fn.newBasicBlock("unreachable")
|
|
|
|
|
|
|
|
case *ast.BranchStmt:
|
|
|
|
var block *BasicBlock
|
|
|
|
switch s.Tok {
|
|
|
|
case token.BREAK:
|
|
|
|
if s.Label != nil {
|
|
|
|
block = fn.labelledBlock(s.Label)._break
|
|
|
|
} else {
|
|
|
|
for t := fn.targets; t != nil && block == nil; t = t.tail {
|
|
|
|
block = t._break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
case token.CONTINUE:
|
|
|
|
if s.Label != nil {
|
|
|
|
block = fn.labelledBlock(s.Label)._continue
|
|
|
|
} else {
|
|
|
|
for t := fn.targets; t != nil && block == nil; t = t.tail {
|
|
|
|
block = t._continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
case token.FALLTHROUGH:
|
|
|
|
for t := fn.targets; t != nil && block == nil; t = t.tail {
|
|
|
|
block = t._fallthrough
|
|
|
|
}
|
|
|
|
|
|
|
|
case token.GOTO:
|
|
|
|
block = fn.labelledBlock(s.Label)._goto
|
|
|
|
}
|
2013-07-03 15:54:55 -06:00
|
|
|
emitJump(fn, block)
|
|
|
|
fn.currentBlock = fn.newBasicBlock("unreachable")
|
2013-05-17 14:25:48 -06:00
|
|
|
|
|
|
|
case *ast.BlockStmt:
|
|
|
|
b.stmtList(fn, s.List)
|
|
|
|
|
|
|
|
case *ast.IfStmt:
|
|
|
|
if s.Init != nil {
|
|
|
|
b.stmt(fn, s.Init)
|
|
|
|
}
|
|
|
|
then := fn.newBasicBlock("if.then")
|
|
|
|
done := fn.newBasicBlock("if.done")
|
|
|
|
els := done
|
|
|
|
if s.Else != nil {
|
|
|
|
els = fn.newBasicBlock("if.else")
|
|
|
|
}
|
|
|
|
b.cond(fn, s.Cond, then, els)
|
|
|
|
fn.currentBlock = then
|
|
|
|
b.stmt(fn, s.Body)
|
|
|
|
emitJump(fn, done)
|
|
|
|
|
|
|
|
if s.Else != nil {
|
|
|
|
fn.currentBlock = els
|
|
|
|
b.stmt(fn, s.Else)
|
|
|
|
emitJump(fn, done)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn.currentBlock = done
|
|
|
|
|
|
|
|
case *ast.SwitchStmt:
|
|
|
|
b.switchStmt(fn, s, label)
|
|
|
|
|
|
|
|
case *ast.TypeSwitchStmt:
|
|
|
|
b.typeSwitchStmt(fn, s, label)
|
|
|
|
|
|
|
|
case *ast.SelectStmt:
|
|
|
|
b.selectStmt(fn, s, label)
|
|
|
|
|
|
|
|
case *ast.ForStmt:
|
|
|
|
b.forStmt(fn, s, label)
|
|
|
|
|
|
|
|
case *ast.RangeStmt:
|
|
|
|
b.rangeStmt(fn, s, label)
|
|
|
|
|
|
|
|
default:
|
|
|
|
panic(fmt.Sprintf("unexpected statement kind: %T", s))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// buildFunction builds SSA code for the body of function fn. Idempotent.
|
2013-06-03 14:46:57 -06:00
|
|
|
func (b *builder) buildFunction(fn *Function) {
|
2013-05-17 14:25:48 -06:00
|
|
|
if fn.Blocks != nil {
|
|
|
|
return // building already started
|
|
|
|
}
|
2013-10-27 08:55:21 -06:00
|
|
|
|
|
|
|
var recvField *ast.FieldList
|
|
|
|
var body *ast.BlockStmt
|
|
|
|
var functype *ast.FuncType
|
|
|
|
switch n := fn.syntax.(type) {
|
|
|
|
case nil:
|
2013-05-17 14:25:48 -06:00
|
|
|
return // not a Go source function. (Synthetic, or from object file.)
|
2013-10-27 08:55:21 -06:00
|
|
|
case *ast.FuncDecl:
|
|
|
|
functype = n.Type
|
|
|
|
recvField = n.Recv
|
|
|
|
body = n.Body
|
|
|
|
case *ast.FuncLit:
|
|
|
|
functype = n.Type
|
|
|
|
body = n.Body
|
|
|
|
default:
|
|
|
|
panic(n)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
2013-10-27 08:55:21 -06:00
|
|
|
|
|
|
|
if body == nil {
|
2013-05-17 14:25:48 -06:00
|
|
|
// External function.
|
|
|
|
if fn.Params == nil {
|
|
|
|
// This condition ensures we add a non-empty
|
|
|
|
// params list once only, but we may attempt
|
|
|
|
// the degenerate empty case repeatedly.
|
|
|
|
// TODO(adonovan): opt: don't do that.
|
|
|
|
|
|
|
|
// We set Function.Params even though there is no body
|
|
|
|
// code to reference them. This simplifies clients.
|
2013-05-17 15:02:47 -06:00
|
|
|
if recv := fn.Signature.Recv(); recv != nil {
|
2013-05-30 07:59:17 -06:00
|
|
|
fn.addParamObj(recv)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
2013-05-30 22:58:14 -06:00
|
|
|
params := fn.Signature.Params()
|
|
|
|
for i, n := 0, params.Len(); i < n; i++ {
|
|
|
|
fn.addParamObj(params.At(i))
|
|
|
|
}
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
2013-06-03 14:46:57 -06:00
|
|
|
if fn.Prog.mode&LogSource != 0 {
|
2013-07-01 13:24:50 -06:00
|
|
|
defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))()
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
fn.startBody()
|
2013-10-27 08:55:21 -06:00
|
|
|
fn.createSyntacticParams(recvField, functype)
|
|
|
|
b.stmt(fn, body)
|
go.tools/ssa: implement correct control flow for recovered panic.
A function such as this:
func one() (x int) {
defer func() { recover() }()
x = 1
panic("return")
}
that combines named return parameters (NRPs) with deferred calls
that call recover, may return non-zero values despite the
fact it doesn't even contain a return statement. (!)
This requires a change to the SSA API: all functions'
control-flow graphs now have a second entry point, called
Recover, which is the block at which control flow resumes
after a recovered panic. The Recover block simply loads the
NRPs and returns them.
As an optimization, most functions don't need a Recover block,
so it is omitted. In fact it is only needed for functions that
have NRPs and defer a call to another function that _may_ call
recover.
Dataflow analysis of SSA now requires extra work, since every
may-panic instruction has an implicit control-flow edge to
the Recover block. The only dataflow analysis so far implemented
is SSA renaming, for which we make the following simplifying
assumption: the Recover block only loads the NRPs and returns.
This means we don't really need to analyze it, we can just
skip the "lifting" of such NRPs. We also special-case the Recover
block in the dominance computation.
Rejected alternative approaches:
- Specifying a Recover block for every defer instruction (like a
traditional exception handler).
This seemed like excessive generality, since Go programs
only need the same degenerate form of Recover block.
- Adding an instruction to set the Recover block immediately
after the named return values are set up, so that dominance
can be computed without special-casing.
This didn't seem worth the effort.
Interpreter:
- This CL completely reimplements the panic/recover/
defer logic in the interpreter. It's clearer and simpler
and closer to the model in the spec.
- Some runtime panic messages have been changed to be closer
to gc's, since tests depend on it.
- The interpreter now requires that the runtime.runtimeError
type be part of the SSA program. This requires that clients
import this package prior to invoking the interpreter.
This in turn requires (Importer).ImportPackage(path string),
which this CL adds.
- All $GOROOT/test/recover{,1,2,3}.go tests are now passing.
NB, the bug described in coverage.go (defer/recover in a concatenated
init function) remains. Will be fixed in a follow-up.
Fixes golang/go#6381
R=gri
CC=crawshaw, golang-dev
https://golang.org/cl/13844043
2013-10-14 13:38:56 -06:00
|
|
|
if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) {
|
2013-05-17 14:25:48 -06:00
|
|
|
// Run function calls deferred in this function when
|
|
|
|
// falling off the end of the body block.
|
|
|
|
fn.emit(new(RunDefers))
|
2013-10-08 10:31:39 -06:00
|
|
|
fn.emit(new(Return))
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
fn.finishBody()
|
|
|
|
}
|
|
|
|
|
2013-07-29 12:24:09 -06:00
|
|
|
// buildFuncDecl builds SSA code for the function or method declared
|
|
|
|
// by decl in package pkg.
|
|
|
|
//
|
|
|
|
func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) {
|
|
|
|
id := decl.Name
|
|
|
|
if isBlankIdent(id) {
|
2013-10-14 12:08:23 -06:00
|
|
|
return // discard
|
2013-07-29 12:24:09 -06:00
|
|
|
}
|
2013-10-14 12:08:23 -06:00
|
|
|
var fn *Function
|
2013-07-29 12:24:09 -06:00
|
|
|
if decl.Recv == nil && id.Name == "init" {
|
2013-10-14 12:08:23 -06:00
|
|
|
pkg.ninit++
|
|
|
|
fn = &Function{
|
|
|
|
name: fmt.Sprintf("init$%d", pkg.ninit),
|
|
|
|
Signature: new(types.Signature),
|
|
|
|
pos: decl.Name.NamePos,
|
|
|
|
Pkg: pkg,
|
|
|
|
Prog: pkg.Prog,
|
2013-10-27 08:55:21 -06:00
|
|
|
syntax: decl,
|
2013-10-14 12:08:23 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
var v Call
|
|
|
|
v.Call.Value = fn
|
|
|
|
v.setType(types.NewTuple())
|
|
|
|
pkg.init.emit(&v)
|
|
|
|
} else {
|
|
|
|
fn = pkg.values[pkg.objectOf(id)].(*Function)
|
2013-07-29 12:24:09 -06:00
|
|
|
}
|
2013-10-14 12:08:23 -06:00
|
|
|
b.buildFunction(fn)
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
|
2013-06-03 14:46:57 -06:00
|
|
|
// BuildAll calls Package.Build() for each package in prog.
|
|
|
|
// Building occurs in parallel unless the BuildSerially mode flag was set.
|
2013-05-17 14:25:48 -06:00
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
// BuildAll is idempotent and thread-safe.
|
2013-05-17 14:25:48 -06:00
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (prog *Program) BuildAll() {
|
2013-05-17 14:25:48 -06:00
|
|
|
var wg sync.WaitGroup
|
2013-09-06 16:13:57 -06:00
|
|
|
for _, p := range prog.packages {
|
2013-06-03 14:46:57 -06:00
|
|
|
if prog.mode&BuildSerially != 0 {
|
|
|
|
p.Build()
|
2013-05-17 14:25:48 -06:00
|
|
|
} else {
|
|
|
|
wg.Add(1)
|
|
|
|
go func(p *Package) {
|
2013-06-03 14:46:57 -06:00
|
|
|
p.Build()
|
2013-05-17 14:25:48 -06:00
|
|
|
wg.Done()
|
|
|
|
}(p)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
wg.Wait()
|
|
|
|
}
|
|
|
|
|
2013-06-03 14:46:57 -06:00
|
|
|
// Build builds SSA code for all functions and vars in package p.
|
2013-05-17 14:25:48 -06:00
|
|
|
//
|
2013-07-30 12:28:14 -06:00
|
|
|
// Precondition: CreatePackage must have been called for all of p's
|
|
|
|
// direct imports (and hence its direct imports must have been
|
|
|
|
// error-free).
|
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
// Build is idempotent and thread-safe.
|
2013-05-17 14:25:48 -06:00
|
|
|
//
|
2013-06-03 14:46:57 -06:00
|
|
|
func (p *Package) Build() {
|
2013-05-17 14:25:48 -06:00
|
|
|
if !atomic.CompareAndSwapInt32(&p.started, 0, 1) {
|
|
|
|
return // already started
|
|
|
|
}
|
2013-10-23 16:07:53 -06:00
|
|
|
if p.info == nil {
|
|
|
|
return // synthetic package, e.g. "testmain"
|
|
|
|
}
|
go.tools/ssa: fix computation of set of types requiring method sets.
Motivation:
Previously, we assumed that the set of types for which a
complete method set (containing all synthesized wrapper
functions) is required at runtime was the set of types
used as operands to some *ssa.MakeInterface instruction.
In fact, this is an underapproximation because types can
be derived from other ones via reflection, and some of
these may need methods. The reflect.Type API allows *T to
be derived from T, and these may have different method
sets. Reflection also allows almost any subcomponent of a
type to be accessed (with one exception: given T, defined
'type T struct{S}', you can reach S but not struct{S}).
As a result, the pointer analysis was unable to generate
all necessary constraints before running the solver,
causing a crash when reflection derives types whose
methods are unavailable. (A similar problem would afflict
an ahead-of-time compiler based on ssa. The ssa/interp
interpreter was immune only because it does not require
all wrapper methods to be created before execution
begins.)
Description:
This change causes the SSA builder to record, for each
package, the set of all types with non-empty method sets that
are referenced within that package. This set is accessed via
Packages.TypesWithMethodSets(). Program.TypesWithMethodSets()
returns its union across all packages.
The set of references that matter are:
- types of operands to some MakeInterface instruction (as before)
- types of all exported package members
- all subcomponents of the above, recursively.
This is a conservative approximation to the set of types
whose methods may be called dynamically.
We define the owning package of a type as follows:
- the owner of a named type is the package in which it is defined;
- the owner of a pointer-to-named type is the owner of that named type;
- the owner of all other types is nil.
A package must include the method sets for all types that it
owns, and all subcomponents of that type that are not owned by
another package, recursively. Types with an owner appear in
exactly one package; types with no owner (such as struct{T})
may appear within multiple packages.
(A typical Go compiler would emit multiple copies of these
methods as weak symbols; a typical linker would eliminate
duplicates.)
Also:
- go/types/typemap: implement hash function for *Tuple.
- pointer: generate nodes/constraints for all of
ssa.Program.TypesWithMethodSets().
Add rtti.go regression test.
- Add API test of Package.TypesWithMethodSets().
- Set Function.Pkg to nil (again) for wrapper functions,
since these may be shared by many packages.
- Remove a redundant logging statement.
- Document that ssa CREATE phase is in fact sequential.
Fixes golang/go#6605
R=gri
CC=golang-dev
https://golang.org/cl/14920056
2013-10-23 15:07:52 -06:00
|
|
|
// Ensure we have runtime type info for all exported members.
|
|
|
|
// TODO(adonovan): ideally belongs in memberFromObject, but
|
|
|
|
// that would require package creation in topological order.
|
|
|
|
for obj := range p.values {
|
|
|
|
if obj.IsExported() {
|
|
|
|
p.needMethodsOf(obj.Type())
|
|
|
|
}
|
|
|
|
}
|
2013-06-03 14:46:57 -06:00
|
|
|
if p.Prog.mode&LogSource != 0 {
|
2013-05-31 14:14:13 -06:00
|
|
|
defer logStack("build %s", p)()
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
2013-07-10 16:37:52 -06:00
|
|
|
init := p.init
|
2013-05-17 14:25:48 -06:00
|
|
|
init.startBody()
|
|
|
|
|
|
|
|
// Make init() skip if package is already initialized.
|
2013-05-22 15:56:18 -06:00
|
|
|
initguard := p.Var("init$guard")
|
2013-05-17 14:25:48 -06:00
|
|
|
doinit := init.newBasicBlock("init.start")
|
|
|
|
done := init.newBasicBlock("init.done")
|
|
|
|
emitIf(init, emitLoad(init, initguard), done, doinit)
|
|
|
|
init.currentBlock = doinit
|
|
|
|
emitStore(init, initguard, vTrue)
|
|
|
|
|
2013-05-17 15:02:47 -06:00
|
|
|
// Call the init() function of each package we import.
|
2013-09-13 10:52:57 -06:00
|
|
|
for _, pkg := range p.info.Pkg.Imports() {
|
|
|
|
prereq := p.Prog.packages[pkg]
|
2013-07-30 12:28:14 -06:00
|
|
|
if prereq == nil {
|
2013-09-13 10:52:57 -06:00
|
|
|
panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Object.Path(), pkg.Path()))
|
2013-07-30 12:28:14 -06:00
|
|
|
}
|
2013-06-03 14:46:57 -06:00
|
|
|
var v Call
|
2013-07-30 12:28:14 -06:00
|
|
|
v.Call.Value = prereq.init
|
2013-06-03 14:46:57 -06:00
|
|
|
v.Call.pos = init.pos
|
|
|
|
v.setType(types.NewTuple())
|
|
|
|
init.emit(&v)
|
|
|
|
}
|
2013-05-17 14:25:48 -06:00
|
|
|
|
2013-11-05 15:32:45 -07:00
|
|
|
var b builder
|
2013-05-17 14:25:48 -06:00
|
|
|
|
2013-11-05 11:02:46 -07:00
|
|
|
// Initialize package-level vars in correct order.
|
|
|
|
for _, varinit := range p.info.InitOrder {
|
|
|
|
if init.Prog.mode&LogSource != 0 {
|
|
|
|
fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n",
|
|
|
|
varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos()))
|
|
|
|
}
|
|
|
|
if len(varinit.Lhs) == 1 {
|
|
|
|
// 1:1 initialization: var x, y = a(), b()
|
|
|
|
var lval lvalue
|
|
|
|
if v := varinit.Lhs[0]; v.Name() != "_" {
|
|
|
|
lval = &address{addr: p.values[v].(*Global)}
|
|
|
|
} else {
|
|
|
|
lval = blank{}
|
|
|
|
}
|
|
|
|
b.exprInPlace(init, lval, varinit.Rhs)
|
|
|
|
} else {
|
|
|
|
// n:1 initialization: var x, y := f()
|
|
|
|
tuple := b.exprN(init, varinit.Rhs)
|
|
|
|
result := tuple.Type().(*types.Tuple)
|
|
|
|
for i, v := range varinit.Lhs {
|
|
|
|
if v.Name() == "_" {
|
|
|
|
continue
|
2013-10-14 12:08:23 -06:00
|
|
|
}
|
2013-11-05 11:02:46 -07:00
|
|
|
emitStore(init, p.values[v].(*Global),
|
|
|
|
emitExtract(init, tuple, i, result.At(i).Type()))
|
2013-10-14 12:08:23 -06:00
|
|
|
}
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-11-05 11:02:46 -07:00
|
|
|
// Build all package-level functions, init functions
|
|
|
|
// and methods, including unreachable/blank ones.
|
|
|
|
// We build them in source order, but it's not significant.
|
2013-07-29 12:24:09 -06:00
|
|
|
for _, file := range p.info.Files {
|
|
|
|
for _, decl := range file.Decls {
|
|
|
|
if decl, ok := decl.(*ast.FuncDecl); ok {
|
|
|
|
b.buildFuncDecl(p, decl)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-10-14 12:08:23 -06:00
|
|
|
// Finish up init().
|
|
|
|
emitJump(init, done)
|
|
|
|
init.currentBlock = done
|
|
|
|
init.emit(new(Return))
|
|
|
|
init.finishBody()
|
|
|
|
|
2013-07-29 12:24:09 -06:00
|
|
|
p.info = nil // We no longer need ASTs or go/types deductions.
|
2013-05-17 14:25:48 -06:00
|
|
|
}
|
2013-05-31 14:14:13 -06:00
|
|
|
|
2013-06-03 14:46:57 -06:00
|
|
|
// Only valid during p's create and build phases.
|
2013-05-31 14:14:13 -06:00
|
|
|
func (p *Package) objectOf(id *ast.Ident) types.Object {
|
|
|
|
if o := p.info.ObjectOf(id); o != nil {
|
|
|
|
return o
|
|
|
|
}
|
|
|
|
panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s",
|
2013-07-01 13:24:50 -06:00
|
|
|
id.Name, p.Prog.Fset.Position(id.Pos())))
|
2013-05-31 14:14:13 -06:00
|
|
|
}
|
|
|
|
|
2013-06-03 14:46:57 -06:00
|
|
|
// Only valid during p's create and build phases.
|
2013-05-31 14:14:13 -06:00
|
|
|
func (p *Package) typeOf(e ast.Expr) types.Type {
|
|
|
|
return p.info.TypeOf(e)
|
|
|
|
}
|
go.tools/ssa: fix computation of set of types requiring method sets.
Motivation:
Previously, we assumed that the set of types for which a
complete method set (containing all synthesized wrapper
functions) is required at runtime was the set of types
used as operands to some *ssa.MakeInterface instruction.
In fact, this is an underapproximation because types can
be derived from other ones via reflection, and some of
these may need methods. The reflect.Type API allows *T to
be derived from T, and these may have different method
sets. Reflection also allows almost any subcomponent of a
type to be accessed (with one exception: given T, defined
'type T struct{S}', you can reach S but not struct{S}).
As a result, the pointer analysis was unable to generate
all necessary constraints before running the solver,
causing a crash when reflection derives types whose
methods are unavailable. (A similar problem would afflict
an ahead-of-time compiler based on ssa. The ssa/interp
interpreter was immune only because it does not require
all wrapper methods to be created before execution
begins.)
Description:
This change causes the SSA builder to record, for each
package, the set of all types with non-empty method sets that
are referenced within that package. This set is accessed via
Packages.TypesWithMethodSets(). Program.TypesWithMethodSets()
returns its union across all packages.
The set of references that matter are:
- types of operands to some MakeInterface instruction (as before)
- types of all exported package members
- all subcomponents of the above, recursively.
This is a conservative approximation to the set of types
whose methods may be called dynamically.
We define the owning package of a type as follows:
- the owner of a named type is the package in which it is defined;
- the owner of a pointer-to-named type is the owner of that named type;
- the owner of all other types is nil.
A package must include the method sets for all types that it
owns, and all subcomponents of that type that are not owned by
another package, recursively. Types with an owner appear in
exactly one package; types with no owner (such as struct{T})
may appear within multiple packages.
(A typical Go compiler would emit multiple copies of these
methods as weak symbols; a typical linker would eliminate
duplicates.)
Also:
- go/types/typemap: implement hash function for *Tuple.
- pointer: generate nodes/constraints for all of
ssa.Program.TypesWithMethodSets().
Add rtti.go regression test.
- Add API test of Package.TypesWithMethodSets().
- Set Function.Pkg to nil (again) for wrapper functions,
since these may be shared by many packages.
- Remove a redundant logging statement.
- Document that ssa CREATE phase is in fact sequential.
Fixes golang/go#6605
R=gri
CC=golang-dev
https://golang.org/cl/14920056
2013-10-23 15:07:52 -06:00
|
|
|
|
|
|
|
// needMethodsOf ensures that runtime type information (including the
|
|
|
|
// complete method set) is available for the specified type T and all
|
|
|
|
// its subcomponents.
|
|
|
|
//
|
|
|
|
// needMethodsOf must be called for at least every type that is an
|
|
|
|
// operand of some MakeInterface instruction, and for the type of
|
|
|
|
// every exported package member.
|
|
|
|
//
|
|
|
|
func (p *Package) needMethodsOf(T types.Type) {
|
|
|
|
p.needMethods(T, false)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Recursive case: skip => don't call makeMethods(T).
|
|
|
|
func (p *Package) needMethods(T types.Type, skip bool) {
|
|
|
|
// Each package maintains its own set of types it has visited.
|
|
|
|
if p.needRTTI.Set(T, true) != nil {
|
|
|
|
return // already seen
|
|
|
|
}
|
|
|
|
|
|
|
|
// Prune the recursion if we find a named or *named type
|
|
|
|
// belonging to another package.
|
|
|
|
var n *types.Named
|
|
|
|
switch T := T.(type) {
|
|
|
|
case *types.Named:
|
|
|
|
n = T
|
|
|
|
case *types.Pointer:
|
|
|
|
n, _ = T.Elem().(*types.Named)
|
|
|
|
}
|
|
|
|
if n != nil {
|
|
|
|
owner := n.Obj().Pkg()
|
|
|
|
if owner == nil {
|
|
|
|
return // built-in error type
|
|
|
|
}
|
|
|
|
if owner != p.Object {
|
|
|
|
return // belongs to another package
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// All the actual method sets live in the Program so that
|
|
|
|
// multiple packages can share a single copy in memory of the
|
|
|
|
// symbols that would be compiled into multiple packages (as
|
|
|
|
// weak symbols).
|
|
|
|
if !skip && p.Prog.makeMethods(T) {
|
|
|
|
p.methodSets = append(p.methodSets, T)
|
|
|
|
}
|
|
|
|
|
|
|
|
switch t := T.(type) {
|
|
|
|
case *types.Basic:
|
|
|
|
// nop
|
|
|
|
|
|
|
|
case *types.Interface:
|
|
|
|
for i, n := 0, t.NumMethods(); i < n; i++ {
|
|
|
|
p.needMethodsOf(t.Method(i).Type())
|
|
|
|
}
|
|
|
|
|
|
|
|
case *types.Pointer:
|
|
|
|
p.needMethodsOf(t.Elem())
|
|
|
|
|
|
|
|
case *types.Slice:
|
|
|
|
p.needMethodsOf(t.Elem())
|
|
|
|
|
|
|
|
case *types.Chan:
|
|
|
|
p.needMethodsOf(t.Elem())
|
|
|
|
|
|
|
|
case *types.Map:
|
|
|
|
p.needMethodsOf(t.Key())
|
|
|
|
p.needMethodsOf(t.Elem())
|
|
|
|
|
|
|
|
case *types.Signature:
|
|
|
|
if t.Recv() != nil {
|
|
|
|
p.needMethodsOf(t.Recv().Type())
|
|
|
|
}
|
|
|
|
p.needMethodsOf(t.Params())
|
|
|
|
p.needMethodsOf(t.Results())
|
|
|
|
|
|
|
|
case *types.Named:
|
|
|
|
// A pointer-to-named type can be derived from a named
|
|
|
|
// type via reflection. It may have methods too.
|
|
|
|
p.needMethodsOf(types.NewPointer(T))
|
|
|
|
|
|
|
|
// Consider 'type T struct{S}' where S has methods.
|
|
|
|
// Reflection provides no way to get from T to struct{S},
|
|
|
|
// only to S, so the method set of struct{S} is unwanted,
|
|
|
|
// so set 'skip' flag during recursion.
|
|
|
|
p.needMethods(t.Underlying(), true)
|
|
|
|
|
|
|
|
case *types.Array:
|
|
|
|
p.needMethodsOf(t.Elem())
|
|
|
|
|
|
|
|
case *types.Struct:
|
|
|
|
// TODO(adonovan): must we recur over the types of promoted methods?
|
|
|
|
for i, n := 0, t.NumFields(); i < n; i++ {
|
|
|
|
p.needMethodsOf(t.Field(i).Type())
|
|
|
|
}
|
|
|
|
|
|
|
|
case *types.Tuple:
|
|
|
|
for i, n := 0, t.Len(); i < n; i++ {
|
|
|
|
p.needMethodsOf(t.At(i).Type())
|
|
|
|
}
|
|
|
|
|
|
|
|
default:
|
|
|
|
panic(T)
|
|
|
|
}
|
|
|
|
}
|