mirror of
https://github.com/golang/go
synced 2024-11-19 00:54:42 -07:00
3b5de067a1
Core: reflect.TypeOf reflect.ValueOf reflect.Zero reflect.Value.Interface Maps: (reflect.Value).MapIndex (reflect.Value).MapKeys (reflect.Value).SetMapIndex (*reflect.rtype).Elem (*reflect.rtype).Key + tests: pointer/testdata/mapreflect.go. oracle/testdata/src/main/reflection.go. Interface objects (T, V...) have been renamed "tagged objects". Abstraction: we model reflect.Value similar to interface{}---as a pointer that points only to tagged objects---but a reflect.Value may also point to an "indirect tagged object", one in which the payload V is of type *T not T. These are required because reflect.Values can hold lvalues, e.g. when derived via Field() or Elem(), though we won't use them till we get to structs and pointers. Solving: each reflection intrinsic defines a new constraint and resolution rule. Because of the nature of reflection, generalizing across types, the resolution rules dynamically create additional complex constraints during solving, where previously only simple (copy) constraints were created. This requires some solver changes: The work done before the main solver loop (to attach new constraints to the graph) is now done before each iteration, in processNewConstraints. Its loop over constraints is broken into two passes: the first handles base (addr-of) constraints, the second handles simple and complex constraints. constraint.init() has been inlined. The only behaviour that varies across constraints is ptr() Sadly this will pessimize presolver optimisations, when we get there; such is the price of reflection. Objects: reflection intrinsics create objects (i.e. cause memory allocations) with no SSA operation. We will represent them as the cgnode of the instrinsic (e.g. reflect.New), so we extend Labels and node.data to represent objects as a product (not sum) of ssa.Value and cgnode and pull this out into its own type, struct object. This simplifies a number of invariants and saves space. The ntObject flag is now represented by obj!=nil; the other flags are moved into object. cgnodes are now always recorded in objects/Labels for which it is appropriate (all but those for globals, constants and the shared contours for functions). Also: - Prepopulate the flattenMemo cache to consider reflect.Value a fake pointer, not a struct. - Improve accessors and documentation on type Label. - @conctypes assertions renamed @types (since dyn. types needn't be concrete). - add oracle 'describe' test on an interface (missing, an oversight). R=crawshaw CC=golang-dev https://golang.org/cl/13418048
350 lines
8.4 KiB
Go
350 lines
8.4 KiB
Go
// Copyright 2013 The Go Authors. All rights reserved.
|
|
// Use of this source code is governed by a BSD-style
|
|
// license that can be found in the LICENSE file.
|
|
|
|
package pointer
|
|
|
|
// This file defines a naive Andersen-style solver for the inclusion
|
|
// constraint system.
|
|
|
|
import (
|
|
"fmt"
|
|
|
|
"code.google.com/p/go.tools/go/types"
|
|
)
|
|
|
|
func (a *analysis) solve() {
|
|
a.work.swap()
|
|
|
|
// Solver main loop.
|
|
for round := 1; ; round++ {
|
|
// Add new constraints to the graph:
|
|
// static constraints from SSA on round 1,
|
|
// dynamic constraints from reflection thereafter.
|
|
a.processNewConstraints()
|
|
|
|
if a.work.swap() {
|
|
if a.log != nil {
|
|
fmt.Fprintf(a.log, "Solving, round %d\n", round)
|
|
}
|
|
|
|
// Next iteration.
|
|
if a.work.empty() {
|
|
break // done
|
|
}
|
|
}
|
|
|
|
id := a.work.take()
|
|
if a.log != nil {
|
|
fmt.Fprintf(a.log, "\tnode n%d\n", id)
|
|
}
|
|
|
|
n := a.nodes[id]
|
|
|
|
// Difference propagation.
|
|
delta := n.pts.diff(n.prevPts)
|
|
if delta == nil {
|
|
continue
|
|
}
|
|
n.prevPts = n.pts.clone()
|
|
|
|
// Apply all resolution rules attached to n.
|
|
a.solveConstraints(n, delta)
|
|
|
|
if a.log != nil {
|
|
fmt.Fprintf(a.log, "\t\tpts(n%d) = %s\n", id, n.pts)
|
|
}
|
|
}
|
|
|
|
if a.log != nil {
|
|
fmt.Fprintf(a.log, "Solver done\n")
|
|
}
|
|
}
|
|
|
|
// processNewConstraints takes the new constraints from a.constraints
|
|
// and adds them to the graph, ensuring
|
|
// that new constraints are applied to pre-existing labels and
|
|
// that pre-existing constraints are applied to new labels.
|
|
//
|
|
func (a *analysis) processNewConstraints() {
|
|
// Take the slice of new constraints.
|
|
// (May grow during call to solveConstraints.)
|
|
constraints := a.constraints
|
|
a.constraints = nil
|
|
|
|
// Initialize points-to sets from addr-of (base) constraints.
|
|
for _, c := range constraints {
|
|
if c, ok := c.(*addrConstraint); ok {
|
|
dst := a.nodes[c.dst]
|
|
dst.pts.add(c.src)
|
|
|
|
// Populate the worklist with nodes that point to
|
|
// something initially (due to addrConstraints) and
|
|
// have other constraints attached.
|
|
// (A no-op in round 1.)
|
|
if dst.copyTo != nil || dst.complex != nil {
|
|
a.addWork(c.dst)
|
|
}
|
|
}
|
|
}
|
|
|
|
// Attach simple (copy) and complex constraints to nodes.
|
|
var stale nodeset
|
|
for _, c := range constraints {
|
|
var id nodeid
|
|
switch c := c.(type) {
|
|
case *addrConstraint:
|
|
// base constraints handled in previous loop
|
|
continue
|
|
case *copyConstraint:
|
|
// simple (copy) constraint
|
|
id = c.src
|
|
a.nodes[id].copyTo.add(c.dst)
|
|
default:
|
|
// complex constraint
|
|
id = c.ptr()
|
|
a.nodes[id].complex.add(c)
|
|
}
|
|
|
|
if n := a.nodes[id]; len(n.pts) > 0 {
|
|
if len(n.prevPts) > 0 {
|
|
stale.add(id)
|
|
}
|
|
if a.log != nil {
|
|
fmt.Fprintf(a.log, "Adding to worklist n%d\n", id)
|
|
}
|
|
a.addWork(id)
|
|
}
|
|
}
|
|
// Apply new constraints to pre-existing PTS labels.
|
|
for id := range stale {
|
|
n := a.nodes[id]
|
|
a.solveConstraints(n, n.prevPts)
|
|
}
|
|
}
|
|
|
|
// solveConstraints applies each resolution rule attached to node n to
|
|
// the set of labels delta. It may generate new constraints in
|
|
// a.constraints.
|
|
//
|
|
func (a *analysis) solveConstraints(n *node, delta nodeset) {
|
|
if delta == nil {
|
|
return
|
|
}
|
|
|
|
// Process complex constraints dependent on n.
|
|
for c := range n.complex {
|
|
if a.log != nil {
|
|
fmt.Fprintf(a.log, "\t\tconstraint %s\n", c)
|
|
}
|
|
// TODO(adonovan): parameter n is never used. Remove?
|
|
c.solve(a, n, delta)
|
|
}
|
|
|
|
// Process copy constraints.
|
|
var copySeen nodeset
|
|
for mid := range n.copyTo {
|
|
if copySeen.add(mid) {
|
|
if a.nodes[mid].pts.addAll(delta) {
|
|
a.addWork(mid)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
func (a *analysis) addWork(id nodeid) {
|
|
a.work.add(id)
|
|
if a.log != nil {
|
|
fmt.Fprintf(a.log, "\t\twork: n%d\n", id)
|
|
}
|
|
}
|
|
|
|
func (c *addrConstraint) ptr() nodeid {
|
|
panic("addrConstraint: not a complex constraint")
|
|
}
|
|
func (c *copyConstraint) ptr() nodeid {
|
|
panic("addrConstraint: not a complex constraint")
|
|
}
|
|
|
|
// Complex constraints attach themselves to the relevant pointer node.
|
|
|
|
func (c *storeConstraint) ptr() nodeid {
|
|
return c.dst
|
|
}
|
|
func (c *loadConstraint) ptr() nodeid {
|
|
return c.src
|
|
}
|
|
func (c *offsetAddrConstraint) ptr() nodeid {
|
|
return c.src
|
|
}
|
|
func (c *typeAssertConstraint) ptr() nodeid {
|
|
return c.src
|
|
}
|
|
func (c *invokeConstraint) ptr() nodeid {
|
|
return c.iface
|
|
}
|
|
|
|
// onlineCopy adds a copy edge. It is called online, i.e. during
|
|
// solving, so it adds edges and pts members directly rather than by
|
|
// instantiating a 'constraint'.
|
|
//
|
|
// The size of the copy is implicitly 1.
|
|
// It returns true if pts(dst) changed.
|
|
//
|
|
func (a *analysis) onlineCopy(dst, src nodeid) bool {
|
|
if dst != src {
|
|
if nsrc := a.nodes[src]; nsrc.copyTo.add(dst) {
|
|
if a.log != nil {
|
|
fmt.Fprintf(a.log, "\t\t\tdynamic copy n%d <- n%d\n", dst, src)
|
|
}
|
|
return a.nodes[dst].pts.addAll(nsrc.pts)
|
|
}
|
|
}
|
|
return false
|
|
}
|
|
|
|
// Returns sizeof.
|
|
// Implicitly adds nodes to worklist.
|
|
func (a *analysis) onlineCopyN(dst, src nodeid, sizeof uint32) uint32 {
|
|
for i := uint32(0); i < sizeof; i++ {
|
|
if a.onlineCopy(dst, src) {
|
|
a.addWork(dst)
|
|
}
|
|
src++
|
|
dst++
|
|
}
|
|
return sizeof
|
|
}
|
|
|
|
func (c *loadConstraint) solve(a *analysis, n *node, delta nodeset) {
|
|
var changed bool
|
|
for k := range delta {
|
|
koff := k + nodeid(c.offset)
|
|
if a.onlineCopy(c.dst, koff) {
|
|
changed = true
|
|
}
|
|
}
|
|
if changed {
|
|
a.addWork(c.dst)
|
|
}
|
|
}
|
|
|
|
func (c *storeConstraint) solve(a *analysis, n *node, delta nodeset) {
|
|
for k := range delta {
|
|
koff := k + nodeid(c.offset)
|
|
if a.onlineCopy(koff, c.src) {
|
|
a.addWork(koff)
|
|
}
|
|
}
|
|
}
|
|
|
|
func (c *offsetAddrConstraint) solve(a *analysis, n *node, delta nodeset) {
|
|
dst := a.nodes[c.dst]
|
|
for k := range delta {
|
|
if dst.pts.add(k + nodeid(c.offset)) {
|
|
a.addWork(c.dst)
|
|
}
|
|
}
|
|
}
|
|
|
|
func (c *typeAssertConstraint) solve(a *analysis, n *node, delta nodeset) {
|
|
tIface, _ := c.typ.Underlying().(*types.Interface)
|
|
|
|
for ifaceObj := range delta {
|
|
tDyn, v, indirect := a.taggedValue(ifaceObj)
|
|
if tDyn == nil {
|
|
panic("not a tagged value")
|
|
}
|
|
if indirect {
|
|
// TODO(adonovan): we'll need to implement this
|
|
// when we start creating indirect tagged objects.
|
|
panic("indirect tagged object")
|
|
}
|
|
|
|
if tIface != nil {
|
|
if types.IsAssignableTo(tDyn, tIface) {
|
|
if a.nodes[c.dst].pts.add(ifaceObj) {
|
|
a.addWork(c.dst)
|
|
}
|
|
}
|
|
} else {
|
|
if types.IsIdentical(tDyn, c.typ) {
|
|
// Copy entire payload to dst.
|
|
//
|
|
// TODO(adonovan): opt: if tConc is
|
|
// nonpointerlike we can skip this
|
|
// entire constraint, perhaps. We
|
|
// only care about pointers among the
|
|
// fields.
|
|
a.onlineCopyN(c.dst, v, a.sizeof(tDyn))
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
func (c *invokeConstraint) solve(a *analysis, n *node, delta nodeset) {
|
|
for ifaceObj := range delta {
|
|
tDyn, v, indirect := a.taggedValue(ifaceObj)
|
|
if tDyn == nil {
|
|
panic("not a tagged value")
|
|
}
|
|
if indirect {
|
|
// TODO(adonovan): we may need to implement this if
|
|
// we ever apply invokeConstraints to reflect.Value PTSs,
|
|
// e.g. for (reflect.Value).Call.
|
|
panic("indirect tagged object")
|
|
}
|
|
|
|
// Look up the concrete method.
|
|
meth := tDyn.MethodSet().Lookup(c.method.Pkg(), c.method.Name())
|
|
if meth == nil {
|
|
panic(fmt.Sprintf("n%d: type %s has no method %s (iface=n%d)",
|
|
c.iface, tDyn, c.method, ifaceObj))
|
|
}
|
|
fn := a.prog.Method(meth)
|
|
if fn == nil {
|
|
panic(fmt.Sprintf("n%d: no ssa.Function for %s", c.iface, meth))
|
|
}
|
|
sig := fn.Signature
|
|
|
|
fnObj := a.funcObj[fn] // dynamic calls use shared contour
|
|
if fnObj == 0 {
|
|
// a.valueNode(fn) was not called during gen phase.
|
|
panic(fmt.Sprintf("a.funcObj(%s)==nil", fn))
|
|
}
|
|
|
|
// Make callsite's fn variable point to identity of
|
|
// concrete method. (There's no need to add it to
|
|
// worklist since it never has attached constraints.)
|
|
a.nodes[c.params].pts.add(fnObj)
|
|
|
|
// Extract value and connect to method's receiver.
|
|
// Copy payload to method's receiver param (arg0).
|
|
arg0 := a.funcParams(fnObj)
|
|
recvSize := a.sizeof(sig.Recv().Type())
|
|
a.onlineCopyN(arg0, v, recvSize)
|
|
|
|
// Copy iface object payload to method receiver.
|
|
src := c.params + 1 // skip past identity
|
|
dst := arg0 + nodeid(recvSize)
|
|
|
|
// Copy caller's argument block to method formal parameters.
|
|
paramsSize := a.sizeof(sig.Params())
|
|
a.onlineCopyN(dst, src, paramsSize)
|
|
src += nodeid(paramsSize)
|
|
dst += nodeid(paramsSize)
|
|
|
|
// Copy method results to caller's result block.
|
|
resultsSize := a.sizeof(sig.Results())
|
|
a.onlineCopyN(src, dst, resultsSize)
|
|
}
|
|
}
|
|
|
|
func (c *addrConstraint) solve(a *analysis, n *node, delta nodeset) {
|
|
panic("addr is not a complex constraint")
|
|
}
|
|
|
|
func (c *copyConstraint) solve(a *analysis, n *node, delta nodeset) {
|
|
panic("copy is not a complex constraint")
|
|
}
|