2013-08-27 16:49:13 -06:00
|
|
|
// Copyright 2013 The Go Authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2013-08-22 10:27:55 -06:00
|
|
|
package pointer
|
|
|
|
|
|
|
|
// This file defines the constraint generation phase.
|
|
|
|
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
// TODO(adonovan): move the constraint definitions and the store() etc
|
|
|
|
// functions which add them (and are also used by the solver) into a
|
|
|
|
// new file, constraints.go.
|
|
|
|
|
2013-08-22 10:27:55 -06:00
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"go/ast"
|
|
|
|
"go/token"
|
|
|
|
|
|
|
|
"code.google.com/p/go.tools/go/types"
|
|
|
|
"code.google.com/p/go.tools/ssa"
|
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
|
|
|
tEface = types.NewInterface(nil)
|
|
|
|
tInvalid = types.Typ[types.Invalid]
|
|
|
|
tUnsafePtr = types.Typ[types.UnsafePointer]
|
|
|
|
)
|
|
|
|
|
|
|
|
// ---------- Node creation ----------
|
|
|
|
|
|
|
|
// nextNode returns the index of the next unused node.
|
|
|
|
func (a *analysis) nextNode() nodeid {
|
|
|
|
return nodeid(len(a.nodes))
|
|
|
|
}
|
|
|
|
|
|
|
|
// addNodes creates nodes for all scalar elements in type typ, and
|
|
|
|
// returns the id of the first one, or zero if the type was
|
|
|
|
// analytically uninteresting.
|
|
|
|
//
|
|
|
|
// comment explains the origin of the nodes, as a debugging aid.
|
|
|
|
//
|
|
|
|
func (a *analysis) addNodes(typ types.Type, comment string) nodeid {
|
|
|
|
id := a.nextNode()
|
|
|
|
for _, fi := range a.flatten(typ) {
|
|
|
|
a.addOneNode(fi.typ, comment, fi)
|
|
|
|
}
|
|
|
|
if id == a.nextNode() {
|
|
|
|
return 0 // type contained no pointers
|
|
|
|
}
|
|
|
|
return id
|
|
|
|
}
|
|
|
|
|
|
|
|
// addOneNode creates a single node with type typ, and returns its id.
|
|
|
|
//
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
// typ should generally be scalar (except for tagged.T nodes
|
2013-08-22 10:27:55 -06:00
|
|
|
// and struct/array identity nodes). Use addNodes for non-scalar types.
|
|
|
|
//
|
|
|
|
// comment explains the origin of the nodes, as a debugging aid.
|
|
|
|
// subelement indicates the subelement, e.g. ".a.b[*].c".
|
|
|
|
//
|
|
|
|
func (a *analysis) addOneNode(typ types.Type, comment string, subelement *fieldInfo) nodeid {
|
|
|
|
id := a.nextNode()
|
|
|
|
a.nodes = append(a.nodes, &node{typ: typ, subelement: subelement})
|
|
|
|
if a.log != nil {
|
|
|
|
fmt.Fprintf(a.log, "\tcreate n%d %s for %s%s\n",
|
|
|
|
id, typ, comment, subelement.path())
|
|
|
|
}
|
|
|
|
return id
|
|
|
|
}
|
|
|
|
|
|
|
|
// setValueNode associates node id with the value v.
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
// cgn identifies the context iff v is a local variable.
|
|
|
|
//
|
|
|
|
func (a *analysis) setValueNode(v ssa.Value, id nodeid, cgn *cgnode) {
|
|
|
|
if cgn != nil {
|
|
|
|
a.localval[v] = id
|
|
|
|
} else {
|
|
|
|
a.globalval[v] = id
|
|
|
|
}
|
2013-08-22 10:27:55 -06:00
|
|
|
if a.log != nil {
|
|
|
|
fmt.Fprintf(a.log, "\tval[%s] = n%d (%T)\n", v.Name(), id, v)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Record the (v, id) relation if the client has queried v.
|
2013-09-25 15:17:42 -06:00
|
|
|
if indirect, ok := a.config.Queries[v]; ok {
|
2013-09-09 19:06:25 -06:00
|
|
|
if indirect {
|
|
|
|
tmp := a.addNodes(v.Type(), "query.indirect")
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.genLoad(cgn, tmp, v, 0, a.sizeof(v.Type()))
|
2013-09-09 19:06:25 -06:00
|
|
|
id = tmp
|
|
|
|
}
|
2013-09-30 10:39:54 -06:00
|
|
|
a.result.Queries[v] = append(a.result.Queries[v], ptr{a, cgn, id})
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// endObject marks the end of a sequence of calls to addNodes denoting
|
|
|
|
// a single object allocation.
|
|
|
|
//
|
|
|
|
// obj is the start node of the object, from a prior call to nextNode.
|
2013-10-01 07:46:33 -06:00
|
|
|
// Its size, flags and optional data will be updated.
|
2013-08-22 10:27:55 -06:00
|
|
|
//
|
2013-10-01 07:46:33 -06:00
|
|
|
func (a *analysis) endObject(obj nodeid, cgn *cgnode, data interface{}) *object {
|
2013-08-22 10:27:55 -06:00
|
|
|
// Ensure object is non-empty by padding;
|
|
|
|
// the pad will be the object node.
|
|
|
|
size := uint32(a.nextNode() - obj)
|
|
|
|
if size == 0 {
|
|
|
|
a.addOneNode(tInvalid, "padding", nil)
|
|
|
|
}
|
|
|
|
objNode := a.nodes[obj]
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
o := &object{
|
|
|
|
size: size, // excludes padding
|
|
|
|
cgn: cgn,
|
2013-10-01 07:46:33 -06:00
|
|
|
data: data,
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
objNode.obj = o
|
|
|
|
|
|
|
|
return o
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
|
|
|
|
2013-10-09 10:41:55 -06:00
|
|
|
// makeFunctionObject creates and returns a new function object
|
|
|
|
// (contour) for fn, and returns the id of its first node. It also
|
|
|
|
// enqueues fn for subsequent constraint generation.
|
2013-08-22 10:27:55 -06:00
|
|
|
//
|
2013-10-09 10:41:55 -06:00
|
|
|
// For a context-sensitive contour, callersite identifies the sole
|
|
|
|
// callsite; for shared contours, caller is nil.
|
|
|
|
//
|
|
|
|
func (a *analysis) makeFunctionObject(fn *ssa.Function, callersite *callsite) nodeid {
|
2013-08-22 10:27:55 -06:00
|
|
|
if a.log != nil {
|
|
|
|
fmt.Fprintf(a.log, "\t---- makeFunctionObject %s\n", fn)
|
|
|
|
}
|
|
|
|
|
|
|
|
// obj is the function object (identity, params, results).
|
|
|
|
obj := a.nextNode()
|
2013-10-09 10:41:55 -06:00
|
|
|
cgn := a.makeCGNode(fn, obj, callersite)
|
2013-08-22 10:27:55 -06:00
|
|
|
sig := fn.Signature
|
|
|
|
a.addOneNode(sig, "func.cgnode", nil) // (scalar with Signature type)
|
|
|
|
if recv := sig.Recv(); recv != nil {
|
|
|
|
a.addNodes(recv.Type(), "func.recv")
|
|
|
|
}
|
|
|
|
a.addNodes(sig.Params(), "func.params")
|
|
|
|
a.addNodes(sig.Results(), "func.results")
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
a.endObject(obj, cgn, fn).flags |= otFunction
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
if a.log != nil {
|
|
|
|
fmt.Fprintf(a.log, "\t----\n")
|
|
|
|
}
|
|
|
|
|
|
|
|
// Queue it up for constraint processing.
|
|
|
|
a.genq = append(a.genq, cgn)
|
|
|
|
|
|
|
|
return obj
|
|
|
|
}
|
|
|
|
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
// makeTagged creates a tagged object of type typ.
|
2013-10-01 07:46:33 -06:00
|
|
|
func (a *analysis) makeTagged(typ types.Type, cgn *cgnode, data interface{}) nodeid {
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
obj := a.addOneNode(typ, "tagged.T", nil) // NB: type may be non-scalar!
|
|
|
|
a.addNodes(typ, "tagged.v")
|
2013-10-01 07:46:33 -06:00
|
|
|
a.endObject(obj, cgn, data).flags |= otTagged
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
return obj
|
|
|
|
}
|
|
|
|
|
|
|
|
// makeRtype returns the canonical tagged object of type *rtype whose
|
|
|
|
// payload points to the sole rtype object for T.
|
|
|
|
func (a *analysis) makeRtype(T types.Type) nodeid {
|
|
|
|
if v := a.rtypes.At(T); v != nil {
|
|
|
|
return v.(nodeid)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create the object for the reflect.rtype itself, which is
|
|
|
|
// ordinarily a large struct but here a single node will do.
|
|
|
|
obj := a.nextNode()
|
|
|
|
a.addOneNode(T, "reflect.rtype", nil)
|
2013-10-01 07:46:33 -06:00
|
|
|
a.endObject(obj, nil, T)
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
|
2013-10-01 07:46:33 -06:00
|
|
|
id := a.makeTagged(a.reflectRtypePtr, nil, T)
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
a.nodes[id+1].typ = T // trick (each *rtype tagged object is a singleton)
|
|
|
|
a.addressOf(id+1, obj)
|
|
|
|
|
|
|
|
a.rtypes.Set(T, id)
|
|
|
|
return id
|
|
|
|
}
|
|
|
|
|
2013-09-23 14:13:01 -06:00
|
|
|
// rtypeValue returns the type of the *reflect.rtype-tagged object obj.
|
|
|
|
func (a *analysis) rtypeTaggedValue(obj nodeid) types.Type {
|
|
|
|
tDyn, t, _ := a.taggedValue(obj)
|
|
|
|
if tDyn != a.reflectRtypePtr {
|
2013-10-29 19:57:53 -06:00
|
|
|
panic(fmt.Sprintf("not a *reflect.rtype-tagged object: obj=n%d tag=%v payload=n%d", obj, tDyn, t))
|
2013-09-23 14:13:01 -06:00
|
|
|
}
|
|
|
|
return a.nodes[t].typ
|
|
|
|
}
|
|
|
|
|
2013-08-22 10:27:55 -06:00
|
|
|
// valueNode returns the id of the value node for v, creating it (and
|
|
|
|
// the association) as needed. It may return zero for uninteresting
|
|
|
|
// values containing no pointers.
|
|
|
|
//
|
|
|
|
func (a *analysis) valueNode(v ssa.Value) nodeid {
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
// Value nodes for locals are created en masse by genFunc.
|
|
|
|
if id, ok := a.localval[v]; ok {
|
|
|
|
return id
|
|
|
|
}
|
2013-08-22 10:27:55 -06:00
|
|
|
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
// Value nodes for globals are created on demand.
|
|
|
|
id, ok := a.globalval[v]
|
|
|
|
if !ok {
|
|
|
|
var comment string
|
|
|
|
if a.log != nil {
|
|
|
|
comment = v.String()
|
|
|
|
}
|
|
|
|
id = a.addOneNode(v.Type(), comment, nil)
|
|
|
|
if obj := a.objectNode(nil, v); obj != 0 {
|
|
|
|
a.addressOf(id, obj)
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.setValueNode(v, id, nil)
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
|
|
|
return id
|
|
|
|
}
|
|
|
|
|
|
|
|
// valueOffsetNode ascertains the node for tuple/struct value v,
|
|
|
|
// then returns the node for its subfield #index.
|
|
|
|
//
|
|
|
|
func (a *analysis) valueOffsetNode(v ssa.Value, index int) nodeid {
|
|
|
|
id := a.valueNode(v)
|
|
|
|
if id == 0 {
|
|
|
|
panic(fmt.Sprintf("cannot offset within n0: %s = %s", v.Name(), v))
|
|
|
|
}
|
|
|
|
return id + nodeid(a.offsetOf(v.Type(), index))
|
|
|
|
}
|
|
|
|
|
2013-10-29 19:57:53 -06:00
|
|
|
// isTaggedObject reports whether object obj is a tagged object.
|
|
|
|
func (a *analysis) isTaggedObject(obj nodeid) bool {
|
|
|
|
return a.nodes[obj].obj.flags&otTagged != 0
|
|
|
|
}
|
|
|
|
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
// taggedValue returns the dynamic type tag, the (first node of the)
|
|
|
|
// payload, and the indirect flag of the tagged object starting at id.
|
2013-10-29 19:57:53 -06:00
|
|
|
// Panic ensues if !isTaggedObject(id).
|
2013-08-22 10:27:55 -06:00
|
|
|
//
|
2013-10-29 19:57:53 -06:00
|
|
|
func (a *analysis) taggedValue(obj nodeid) (tDyn types.Type, v nodeid, indirect bool) {
|
|
|
|
n := a.nodes[obj]
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
flags := n.obj.flags
|
2013-10-29 19:57:53 -06:00
|
|
|
if flags&otTagged == 0 {
|
|
|
|
panic(fmt.Sprintf("not a tagged object: n%d", obj))
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
2013-10-29 19:57:53 -06:00
|
|
|
return n.typ, obj + 1, flags&otIndirect != 0
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// funcParams returns the first node of the params block of the
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
// function whose object node (obj.flags&otFunction) is id.
|
2013-08-22 10:27:55 -06:00
|
|
|
//
|
|
|
|
func (a *analysis) funcParams(id nodeid) nodeid {
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
n := a.nodes[id]
|
|
|
|
if n.obj == nil || n.obj.flags&otFunction == 0 {
|
2013-08-22 10:27:55 -06:00
|
|
|
panic(fmt.Sprintf("funcParams(n%d): not a function object block", id))
|
|
|
|
}
|
|
|
|
return id + 1
|
|
|
|
}
|
|
|
|
|
|
|
|
// funcResults returns the first node of the results block of the
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
// function whose object node (obj.flags&otFunction) is id.
|
2013-08-22 10:27:55 -06:00
|
|
|
//
|
|
|
|
func (a *analysis) funcResults(id nodeid) nodeid {
|
|
|
|
n := a.nodes[id]
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
if n.obj == nil || n.obj.flags&otFunction == 0 {
|
2013-08-22 10:27:55 -06:00
|
|
|
panic(fmt.Sprintf("funcResults(n%d): not a function object block", id))
|
|
|
|
}
|
|
|
|
sig := n.typ.(*types.Signature)
|
|
|
|
id += 1 + nodeid(a.sizeof(sig.Params()))
|
|
|
|
if sig.Recv() != nil {
|
|
|
|
id += nodeid(a.sizeof(sig.Recv().Type()))
|
|
|
|
}
|
|
|
|
return id
|
|
|
|
}
|
|
|
|
|
|
|
|
// ---------- Constraint creation ----------
|
|
|
|
|
|
|
|
// copy creates a constraint of the form dst = src.
|
|
|
|
// sizeof is the width (in logical fields) of the copied type.
|
|
|
|
//
|
|
|
|
func (a *analysis) copy(dst, src nodeid, sizeof uint32) {
|
|
|
|
if src == dst || sizeof == 0 {
|
|
|
|
return // trivial
|
|
|
|
}
|
|
|
|
if src == 0 || dst == 0 {
|
|
|
|
panic(fmt.Sprintf("ill-typed copy dst=n%d src=n%d", dst, src))
|
|
|
|
}
|
|
|
|
for i := uint32(0); i < sizeof; i++ {
|
|
|
|
a.addConstraint(©Constraint{dst, src})
|
|
|
|
src++
|
|
|
|
dst++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// addressOf creates a constraint of the form id = &obj.
|
|
|
|
func (a *analysis) addressOf(id, obj nodeid) {
|
|
|
|
if id == 0 {
|
|
|
|
panic("addressOf: zero id")
|
|
|
|
}
|
|
|
|
if obj == 0 {
|
|
|
|
panic("addressOf: zero obj")
|
|
|
|
}
|
|
|
|
a.addConstraint(&addrConstraint{id, obj})
|
|
|
|
}
|
|
|
|
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
// load creates a load constraint of the form dst = src[offset].
|
2013-08-22 10:27:55 -06:00
|
|
|
// offset is the pointer offset in logical fields.
|
|
|
|
// sizeof is the width (in logical fields) of the loaded type.
|
|
|
|
//
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
func (a *analysis) load(dst, src nodeid, offset, sizeof uint32) {
|
2013-08-22 10:27:55 -06:00
|
|
|
if dst == 0 {
|
|
|
|
return // load of non-pointerlike value
|
|
|
|
}
|
|
|
|
if src == 0 && dst == 0 {
|
|
|
|
return // non-pointerlike operation
|
|
|
|
}
|
|
|
|
if src == 0 || dst == 0 {
|
|
|
|
panic(fmt.Sprintf("ill-typed load dst=n%d src=n%d", dst, src))
|
|
|
|
}
|
|
|
|
for i := uint32(0); i < sizeof; i++ {
|
|
|
|
a.addConstraint(&loadConstraint{offset, dst, src})
|
|
|
|
offset++
|
|
|
|
dst++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
// store creates a store constraint of the form dst[offset] = src.
|
2013-08-22 10:27:55 -06:00
|
|
|
// offset is the pointer offset in logical fields.
|
|
|
|
// sizeof is the width (in logical fields) of the stored type.
|
|
|
|
//
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
func (a *analysis) store(dst, src nodeid, offset uint32, sizeof uint32) {
|
2013-08-22 10:27:55 -06:00
|
|
|
if src == 0 {
|
|
|
|
return // store of non-pointerlike value
|
|
|
|
}
|
|
|
|
if src == 0 && dst == 0 {
|
|
|
|
return // non-pointerlike operation
|
|
|
|
}
|
|
|
|
if src == 0 || dst == 0 {
|
|
|
|
panic(fmt.Sprintf("ill-typed store dst=n%d src=n%d", dst, src))
|
|
|
|
}
|
|
|
|
for i := uint32(0); i < sizeof; i++ {
|
|
|
|
a.addConstraint(&storeConstraint{offset, dst, src})
|
|
|
|
offset++
|
|
|
|
src++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// offsetAddr creates an offsetAddr constraint of the form dst = &src.#offset.
|
|
|
|
// offset is the field offset in logical fields.
|
|
|
|
//
|
|
|
|
func (a *analysis) offsetAddr(dst, src nodeid, offset uint32) {
|
|
|
|
if offset == 0 {
|
|
|
|
// Simplify dst = &src->f0
|
|
|
|
// to dst = src
|
|
|
|
// (NB: this optimisation is defeated by the identity
|
|
|
|
// field prepended to struct and array objects.)
|
|
|
|
a.copy(dst, src, 1)
|
|
|
|
} else {
|
|
|
|
a.addConstraint(&offsetAddrConstraint{offset, dst, src})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-10-29 19:57:53 -06:00
|
|
|
// typeAssert creates a typeFilter or untag constraint of the form dst = src.(T):
|
|
|
|
// typeFilter for an interface, untag for a concrete type.
|
|
|
|
// The exact flag is specified as for untagConstraint.
|
|
|
|
//
|
|
|
|
func (a *analysis) typeAssert(T types.Type, dst, src nodeid, exact bool) {
|
|
|
|
if isInterface(T) {
|
|
|
|
a.addConstraint(&typeFilterConstraint{T, dst, src})
|
|
|
|
} else {
|
|
|
|
a.addConstraint(&untagConstraint{T, dst, src, exact})
|
|
|
|
}
|
2013-09-23 14:13:01 -06:00
|
|
|
}
|
|
|
|
|
2013-08-22 10:27:55 -06:00
|
|
|
// addConstraint adds c to the constraint set.
|
|
|
|
func (a *analysis) addConstraint(c constraint) {
|
|
|
|
a.constraints = append(a.constraints, c)
|
|
|
|
if a.log != nil {
|
|
|
|
fmt.Fprintf(a.log, "\t%s\n", c)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// copyElems generates load/store constraints for *dst = *src,
|
|
|
|
// where src and dst are slices or *arrays.
|
|
|
|
//
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
func (a *analysis) copyElems(cgn *cgnode, typ types.Type, dst, src ssa.Value) {
|
2013-08-22 10:27:55 -06:00
|
|
|
tmp := a.addNodes(typ, "copy")
|
|
|
|
sz := a.sizeof(typ)
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.genLoad(cgn, tmp, src, 1, sz)
|
|
|
|
a.genStore(cgn, dst, tmp, 1, sz)
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// ---------- Constraint generation ----------
|
|
|
|
|
|
|
|
// genConv generates constraints for the conversion operation conv.
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
func (a *analysis) genConv(conv *ssa.Convert, cgn *cgnode) {
|
2013-08-22 10:27:55 -06:00
|
|
|
res := a.valueNode(conv)
|
|
|
|
if res == 0 {
|
|
|
|
return // result is non-pointerlike
|
|
|
|
}
|
|
|
|
|
|
|
|
tSrc := conv.X.Type()
|
|
|
|
tDst := conv.Type()
|
|
|
|
|
|
|
|
switch utSrc := tSrc.Underlying().(type) {
|
|
|
|
case *types.Slice:
|
|
|
|
// []byte/[]rune -> string?
|
|
|
|
return
|
|
|
|
|
|
|
|
case *types.Pointer:
|
|
|
|
// *T -> unsafe.Pointer?
|
|
|
|
if tDst == tUnsafePtr {
|
|
|
|
// ignore for now
|
|
|
|
// a.copy(res, a.valueNode(conv.X), 1)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
case *types.Basic:
|
|
|
|
switch utDst := tDst.Underlying().(type) {
|
|
|
|
case *types.Pointer:
|
|
|
|
// unsafe.Pointer -> *T? (currently unsound)
|
|
|
|
if utSrc == tUnsafePtr {
|
2013-09-12 08:56:37 -06:00
|
|
|
// For now, suppress unsafe.Pointer conversion
|
|
|
|
// warnings on "syscall" package.
|
|
|
|
// TODO(adonovan): audit for soundness.
|
|
|
|
if conv.Parent().Pkg.Object.Path() != "syscall" {
|
|
|
|
a.warnf(conv.Pos(),
|
|
|
|
"unsound: %s contains an unsafe.Pointer conversion (to %s)",
|
|
|
|
conv.Parent(), tDst)
|
|
|
|
}
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
// For now, we treat unsafe.Pointer->*T
|
|
|
|
// conversion like new(T) and create an
|
|
|
|
// unaliased object. In future we may handle
|
|
|
|
// unsafe conversions soundly; see TODO file.
|
|
|
|
obj := a.addNodes(mustDeref(tDst), "unsafe.Pointer conversion")
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
a.endObject(obj, cgn, conv)
|
2013-08-22 10:27:55 -06:00
|
|
|
a.addressOf(res, obj)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
case *types.Slice:
|
|
|
|
// string -> []byte/[]rune (or named aliases)?
|
|
|
|
if utSrc.Info()&types.IsString != 0 {
|
|
|
|
obj := a.addNodes(sliceToArray(tDst), "convert")
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
a.endObject(obj, cgn, conv)
|
2013-08-22 10:27:55 -06:00
|
|
|
a.addressOf(res, obj)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
case *types.Basic:
|
|
|
|
// TODO(adonovan):
|
|
|
|
// unsafe.Pointer -> uintptr?
|
|
|
|
// uintptr -> unsafe.Pointer
|
|
|
|
//
|
|
|
|
// The language doesn't adequately specify the
|
|
|
|
// behaviour of these operations, but almost
|
|
|
|
// all uses of these conversions (even in the
|
|
|
|
// spec) seem to imply a non-moving garbage
|
|
|
|
// collection strategy, or implicit "pinning"
|
|
|
|
// semantics for unsafe.Pointer conversions.
|
|
|
|
|
|
|
|
// TODO(adonovan): we need more work before we can handle
|
|
|
|
// cryptopointers well.
|
|
|
|
if utSrc == tUnsafePtr || utDst == tUnsafePtr {
|
|
|
|
// Ignore for now. See TODO file for ideas.
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
return // ignore all other basic type conversions
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
panic(fmt.Sprintf("illegal *ssa.Convert %s -> %s: %s", tSrc, tDst, conv.Parent()))
|
|
|
|
}
|
|
|
|
|
|
|
|
// genAppend generates constraints for a call to append.
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
func (a *analysis) genAppend(instr *ssa.Call, cgn *cgnode) {
|
2013-08-22 10:27:55 -06:00
|
|
|
// Consider z = append(x, y). y is optional.
|
|
|
|
// This may allocate a new [1]T array; call its object w.
|
|
|
|
// We get the following constraints:
|
|
|
|
// z = x
|
|
|
|
// z = &w
|
|
|
|
// *z = *y
|
|
|
|
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
x := instr.Call.Args[0]
|
2013-08-22 10:27:55 -06:00
|
|
|
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
z := instr
|
|
|
|
a.copy(a.valueNode(z), a.valueNode(x), 1) // z = x
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
if len(instr.Call.Args) == 1 {
|
|
|
|
return // no allocation for z = append(x) or _ = append(x).
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO(adonovan): test append([]byte, ...string) []byte.
|
|
|
|
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
y := instr.Call.Args[1]
|
2013-08-22 10:27:55 -06:00
|
|
|
tArray := sliceToArray(instr.Call.Args[0].Type())
|
|
|
|
|
|
|
|
var w nodeid
|
|
|
|
w = a.nextNode()
|
|
|
|
a.addNodes(tArray, "append")
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
a.endObject(w, cgn, instr)
|
2013-08-22 10:27:55 -06:00
|
|
|
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.copyElems(cgn, tArray.Elem(), z, y) // *z = *y
|
|
|
|
a.addressOf(a.valueNode(z), w) // z = &w
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// genBuiltinCall generates contraints for a call to a built-in.
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
func (a *analysis) genBuiltinCall(instr ssa.CallInstruction, cgn *cgnode) {
|
2013-08-22 10:27:55 -06:00
|
|
|
call := instr.Common()
|
|
|
|
switch call.Value.(*ssa.Builtin).Object().Name() {
|
|
|
|
case "append":
|
|
|
|
// Safe cast: append cannot appear in a go or defer statement.
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
a.genAppend(instr.(*ssa.Call), cgn)
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
case "copy":
|
|
|
|
tElem := call.Args[0].Type().Underlying().(*types.Slice).Elem()
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.copyElems(cgn, tElem, call.Args[0], call.Args[1])
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
case "panic":
|
|
|
|
a.copy(a.panicNode, a.valueNode(call.Args[0]), 1)
|
|
|
|
|
|
|
|
case "recover":
|
|
|
|
if v := instr.Value(); v != nil {
|
|
|
|
a.copy(a.valueNode(v), a.panicNode, 1)
|
|
|
|
}
|
|
|
|
|
|
|
|
case "print":
|
|
|
|
// Analytically print is a no-op, but it's a convenient hook
|
|
|
|
// for testing the pts of an expression, so we notify the client.
|
|
|
|
// Existing uses in Go core libraries are few and harmless.
|
|
|
|
if Print := a.config.Print; Print != nil {
|
|
|
|
// Due to context-sensitivity, we may encounter
|
|
|
|
// the same print() call in many contexts, so
|
|
|
|
// we merge them to a canonical node.
|
|
|
|
probe := a.probes[call]
|
|
|
|
t := call.Args[0].Type()
|
|
|
|
|
|
|
|
// First time? Create the canonical probe node.
|
|
|
|
if probe == 0 {
|
|
|
|
probe = a.addNodes(t, "print")
|
|
|
|
a.probes[call] = probe
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
Print(call, ptr{a, nil, probe}) // notify client
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
a.copy(probe, a.valueNode(call.Args[0]), a.sizeof(t))
|
|
|
|
}
|
|
|
|
|
|
|
|
default:
|
|
|
|
// No-ops: close len cap real imag complex println delete.
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// shouldUseContext defines the context-sensitivity policy. It
|
|
|
|
// returns true if we should analyse all static calls to fn anew.
|
|
|
|
//
|
|
|
|
// Obviously this interface rather limits how much freedom we have to
|
|
|
|
// choose a policy. The current policy, rather arbitrarily, is true
|
|
|
|
// for intrinsics and accessor methods (actually: short, single-block,
|
|
|
|
// call-free functions). This is just a starting point.
|
|
|
|
//
|
|
|
|
func (a *analysis) shouldUseContext(fn *ssa.Function) bool {
|
|
|
|
if a.findIntrinsic(fn) != nil {
|
|
|
|
return true // treat intrinsics context-sensitively
|
|
|
|
}
|
|
|
|
if len(fn.Blocks) != 1 {
|
|
|
|
return false // too expensive
|
|
|
|
}
|
|
|
|
blk := fn.Blocks[0]
|
|
|
|
if len(blk.Instrs) > 10 {
|
|
|
|
return false // too expensive
|
|
|
|
}
|
|
|
|
if fn.Synthetic != "" && (fn.Pkg == nil || fn != fn.Pkg.Func("init")) {
|
|
|
|
return true // treat synthetic wrappers context-sensitively
|
|
|
|
}
|
|
|
|
for _, instr := range blk.Instrs {
|
|
|
|
switch instr := instr.(type) {
|
|
|
|
case ssa.CallInstruction:
|
|
|
|
// Disallow function calls (except to built-ins)
|
|
|
|
// because of the danger of unbounded recursion.
|
|
|
|
if _, ok := instr.Common().Value.(*ssa.Builtin); !ok {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2013-10-09 10:41:55 -06:00
|
|
|
// genStaticCall generates constraints for a statically dispatched function call.
|
|
|
|
func (a *analysis) genStaticCall(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
|
2013-08-22 10:27:55 -06:00
|
|
|
fn := call.StaticCallee()
|
2013-10-17 07:26:44 -06:00
|
|
|
|
|
|
|
// Special cases for inlined intrinsics.
|
|
|
|
switch fn {
|
|
|
|
case a.runtimeSetFinalizer:
|
|
|
|
// Inline SetFinalizer so the call appears direct.
|
|
|
|
site.targets = a.addOneNode(tInvalid, "SetFinalizer.targets", nil)
|
|
|
|
a.addConstraint(&runtimeSetFinalizerConstraint{
|
|
|
|
targets: site.targets,
|
|
|
|
x: a.valueNode(call.Args[0]),
|
|
|
|
f: a.valueNode(call.Args[1]),
|
|
|
|
})
|
|
|
|
return
|
2013-10-29 19:57:53 -06:00
|
|
|
|
|
|
|
case a.reflectValueCall:
|
|
|
|
// Inline (reflect.Value).Call so the call appears direct.
|
|
|
|
dotdotdot := false
|
|
|
|
ret := reflectCallImpl(a, caller, site, a.valueNode(call.Args[0]), a.valueNode(call.Args[1]), dotdotdot)
|
|
|
|
if result != 0 {
|
|
|
|
a.addressOf(result, ret)
|
|
|
|
}
|
|
|
|
return
|
2013-10-17 07:26:44 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// Ascertain the context (contour/cgnode) for a particular call.
|
|
|
|
var obj nodeid
|
2013-08-22 10:27:55 -06:00
|
|
|
if a.shouldUseContext(fn) {
|
2013-10-09 10:41:55 -06:00
|
|
|
obj = a.makeFunctionObject(fn, site) // new contour
|
2013-08-22 10:27:55 -06:00
|
|
|
} else {
|
2013-10-09 10:41:55 -06:00
|
|
|
obj = a.objectNode(nil, fn) // shared contour
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
2013-10-17 07:26:44 -06:00
|
|
|
a.callEdge(site, obj)
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
sig := call.Signature()
|
|
|
|
|
|
|
|
// Copy receiver, if any.
|
|
|
|
params := a.funcParams(obj)
|
|
|
|
args := call.Args
|
|
|
|
if sig.Recv() != nil {
|
|
|
|
sz := a.sizeof(sig.Recv().Type())
|
|
|
|
a.copy(params, a.valueNode(args[0]), sz)
|
|
|
|
params += nodeid(sz)
|
|
|
|
args = args[1:]
|
|
|
|
}
|
|
|
|
|
|
|
|
// Copy actual parameters into formal params block.
|
|
|
|
// Must loop, since the actuals aren't contiguous.
|
|
|
|
for i, arg := range args {
|
|
|
|
sz := a.sizeof(sig.Params().At(i).Type())
|
|
|
|
a.copy(params, a.valueNode(arg), sz)
|
|
|
|
params += nodeid(sz)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Copy formal results block to actual result.
|
|
|
|
if result != 0 {
|
|
|
|
a.copy(result, a.funcResults(obj), a.sizeof(sig.Results()))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// genDynamicCall generates constraints for a dynamic function call.
|
2013-10-09 10:41:55 -06:00
|
|
|
func (a *analysis) genDynamicCall(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
|
2013-10-17 07:26:44 -06:00
|
|
|
// pts(targets) will be the set of possible call targets.
|
|
|
|
site.targets = a.valueNode(call.Value)
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
// We add dynamic closure rules that store the arguments into,
|
|
|
|
// and load the results from, the P/R block of each function
|
2013-10-17 07:26:44 -06:00
|
|
|
// discovered in pts(targets).
|
2013-08-22 10:27:55 -06:00
|
|
|
|
2013-10-17 07:26:44 -06:00
|
|
|
sig := call.Signature()
|
2013-08-22 10:27:55 -06:00
|
|
|
var offset uint32 = 1 // P/R block starts at offset 1
|
|
|
|
for i, arg := range call.Args {
|
|
|
|
sz := a.sizeof(sig.Params().At(i).Type())
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.genStore(caller, call.Value, a.valueNode(arg), offset, sz)
|
2013-08-22 10:27:55 -06:00
|
|
|
offset += sz
|
|
|
|
}
|
|
|
|
if result != 0 {
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.genLoad(caller, result, call.Value, offset, a.sizeof(sig.Results()))
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// genInvoke generates constraints for a dynamic method invocation.
|
2013-10-09 10:41:55 -06:00
|
|
|
func (a *analysis) genInvoke(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
|
2013-09-23 14:13:01 -06:00
|
|
|
if call.Value.Type() == a.reflectType {
|
2013-10-09 10:41:55 -06:00
|
|
|
a.genInvokeReflectType(caller, site, call, result)
|
|
|
|
return
|
2013-09-23 14:13:01 -06:00
|
|
|
}
|
2013-08-22 10:27:55 -06:00
|
|
|
|
2013-09-23 14:13:01 -06:00
|
|
|
sig := call.Signature()
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
|
2013-08-22 10:27:55 -06:00
|
|
|
// Allocate a contiguous targets/params/results block for this call.
|
|
|
|
block := a.nextNode()
|
2013-10-09 10:41:55 -06:00
|
|
|
// pts(targets) will be the set of possible call targets
|
|
|
|
site.targets = a.addOneNode(sig, "invoke.targets", nil)
|
2013-08-22 10:27:55 -06:00
|
|
|
p := a.addNodes(sig.Params(), "invoke.params")
|
|
|
|
r := a.addNodes(sig.Results(), "invoke.results")
|
|
|
|
|
|
|
|
// Copy the actual parameters into the call's params block.
|
|
|
|
for i, n := 0, sig.Params().Len(); i < n; i++ {
|
|
|
|
sz := a.sizeof(sig.Params().At(i).Type())
|
|
|
|
a.copy(p, a.valueNode(call.Args[i]), sz)
|
|
|
|
p += nodeid(sz)
|
|
|
|
}
|
|
|
|
// Copy the call's results block to the actual results.
|
|
|
|
if result != 0 {
|
|
|
|
a.copy(result, r, a.sizeof(sig.Results()))
|
|
|
|
}
|
|
|
|
|
|
|
|
// We add a dynamic invoke constraint that will add
|
|
|
|
// edges from the caller's P/R block to the callee's
|
|
|
|
// P/R block for each discovered call target.
|
|
|
|
a.addConstraint(&invokeConstraint{call.Method, a.valueNode(call.Value), block})
|
|
|
|
}
|
|
|
|
|
2013-09-23 14:13:01 -06:00
|
|
|
// genInvokeReflectType is a specialization of genInvoke where the
|
|
|
|
// receiver type is a reflect.Type, under the assumption that there
|
|
|
|
// can be at most one implementation of this interface, *reflect.rtype.
|
|
|
|
//
|
|
|
|
// (Though this may appear to be an instance of a pattern---method
|
|
|
|
// calls on interfaces known to have exactly one implementation---in
|
|
|
|
// practice it occurs rarely, so we special case for reflect.Type.)
|
|
|
|
//
|
|
|
|
// In effect we treat this:
|
|
|
|
// var rt reflect.Type = ...
|
|
|
|
// rt.F()
|
|
|
|
// as this:
|
|
|
|
// rt.(*reflect.rtype).F()
|
|
|
|
//
|
2013-10-09 10:41:55 -06:00
|
|
|
func (a *analysis) genInvokeReflectType(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
|
2013-09-23 14:13:01 -06:00
|
|
|
// Unpack receiver into rtype
|
|
|
|
rtype := a.addOneNode(a.reflectRtypePtr, "rtype.recv", nil)
|
|
|
|
recv := a.valueNode(call.Value)
|
2013-10-29 19:57:53 -06:00
|
|
|
a.typeAssert(a.reflectRtypePtr, rtype, recv, true)
|
2013-09-23 14:13:01 -06:00
|
|
|
|
|
|
|
// Look up the concrete method.
|
|
|
|
meth := a.reflectRtypePtr.MethodSet().Lookup(call.Method.Pkg(), call.Method.Name())
|
|
|
|
fn := a.prog.Method(meth)
|
|
|
|
|
2013-10-09 10:41:55 -06:00
|
|
|
obj := a.makeFunctionObject(fn, site) // new contour for this call
|
2013-10-17 07:26:44 -06:00
|
|
|
a.callEdge(site, obj)
|
2013-09-23 14:13:01 -06:00
|
|
|
|
|
|
|
// From now on, it's essentially a static call, but little is
|
|
|
|
// gained by factoring together the code for both cases.
|
|
|
|
|
|
|
|
sig := fn.Signature // concrete method
|
|
|
|
targets := a.addOneNode(sig, "call.targets", nil)
|
|
|
|
a.addressOf(targets, obj) // (a singleton)
|
|
|
|
|
|
|
|
// Copy receiver.
|
|
|
|
params := a.funcParams(obj)
|
|
|
|
a.copy(params, rtype, 1)
|
|
|
|
params++
|
|
|
|
|
|
|
|
// Copy actual parameters into formal params block.
|
|
|
|
// Must loop, since the actuals aren't contiguous.
|
|
|
|
for i, arg := range call.Args {
|
|
|
|
sz := a.sizeof(sig.Params().At(i).Type())
|
|
|
|
a.copy(params, a.valueNode(arg), sz)
|
|
|
|
params += nodeid(sz)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Copy formal results block to actual result.
|
|
|
|
if result != 0 {
|
|
|
|
a.copy(result, a.funcResults(obj), a.sizeof(sig.Results()))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-08-22 10:27:55 -06:00
|
|
|
// genCall generates contraints for call instruction instr.
|
|
|
|
func (a *analysis) genCall(caller *cgnode, instr ssa.CallInstruction) {
|
|
|
|
call := instr.Common()
|
|
|
|
|
|
|
|
// Intrinsic implementations of built-in functions.
|
|
|
|
if _, ok := call.Value.(*ssa.Builtin); ok {
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
a.genBuiltinCall(instr, caller)
|
2013-08-22 10:27:55 -06:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
var result nodeid
|
|
|
|
if v := instr.Value(); v != nil {
|
|
|
|
result = a.valueNode(v)
|
|
|
|
}
|
|
|
|
|
2013-10-09 10:41:55 -06:00
|
|
|
site := &callsite{instr: instr}
|
2013-10-17 07:26:44 -06:00
|
|
|
if call.StaticCallee() != nil {
|
2013-10-09 10:41:55 -06:00
|
|
|
a.genStaticCall(caller, site, call, result)
|
2013-10-17 07:26:44 -06:00
|
|
|
} else if call.IsInvoke() {
|
2013-10-09 10:41:55 -06:00
|
|
|
a.genInvoke(caller, site, call, result)
|
2013-10-17 07:26:44 -06:00
|
|
|
} else {
|
2013-10-09 10:41:55 -06:00
|
|
|
a.genDynamicCall(caller, site, call, result)
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
|
|
|
|
2013-09-25 15:17:42 -06:00
|
|
|
caller.sites = append(caller.sites, site)
|
2013-10-09 10:41:55 -06:00
|
|
|
|
2013-08-22 10:27:55 -06:00
|
|
|
if a.log != nil {
|
2013-09-25 15:17:42 -06:00
|
|
|
fmt.Fprintf(a.log, "\t%s to targets %s from %s\n", site, site.targets, caller)
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
// objectNode returns the object to which v points, if known.
|
|
|
|
// In other words, if the points-to set of v is a singleton, it
|
|
|
|
// returns the sole label, zero otherwise.
|
|
|
|
//
|
|
|
|
// We exploit this information to make the generated constraints less
|
|
|
|
// dynamic. For example, a complex load constraint can be replaced by
|
|
|
|
// a simple copy constraint when the sole destination is known a priori.
|
|
|
|
//
|
|
|
|
// Some SSA instructions always have singletons points-to sets:
|
|
|
|
// Alloc, Function, Global, MakeChan, MakeClosure, MakeInterface, MakeMap, MakeSlice.
|
|
|
|
// Others may be singletons depending on their operands:
|
|
|
|
// Capture, Const, Convert, FieldAddr, IndexAddr, Slice.
|
|
|
|
//
|
|
|
|
// Idempotent. Objects are created as needed, possibly via recursion
|
|
|
|
// down the SSA value graph, e.g IndexAddr(FieldAddr(Alloc))).
|
|
|
|
//
|
|
|
|
func (a *analysis) objectNode(cgn *cgnode, v ssa.Value) nodeid {
|
|
|
|
if cgn == nil {
|
|
|
|
// Global object.
|
|
|
|
obj, ok := a.globalobj[v]
|
|
|
|
if !ok {
|
|
|
|
switch v := v.(type) {
|
|
|
|
case *ssa.Global:
|
|
|
|
obj = a.nextNode()
|
|
|
|
a.addNodes(mustDeref(v.Type()), "global")
|
|
|
|
a.endObject(obj, nil, v)
|
|
|
|
|
|
|
|
case *ssa.Function:
|
2013-10-09 10:41:55 -06:00
|
|
|
obj = a.makeFunctionObject(v, nil)
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
|
|
|
|
case *ssa.Const:
|
|
|
|
if t, ok := v.Type().Underlying().(*types.Slice); ok && !v.IsNil() {
|
|
|
|
// Non-nil []byte or []rune constant.
|
|
|
|
obj = a.nextNode()
|
|
|
|
a.addNodes(sliceToArray(t), "array in slice constant")
|
|
|
|
a.endObject(obj, nil, v)
|
|
|
|
}
|
|
|
|
|
|
|
|
case *ssa.Capture:
|
|
|
|
// For now, Captures have the same cardinality as globals.
|
|
|
|
// TODO(adonovan): treat captures context-sensitively.
|
|
|
|
}
|
2013-10-01 07:46:33 -06:00
|
|
|
|
|
|
|
if a.log != nil {
|
2013-10-14 11:53:41 -06:00
|
|
|
fmt.Fprintf(a.log, "\tglobalobj[%s] = n%d\n", v, obj)
|
2013-10-01 07:46:33 -06:00
|
|
|
}
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.globalobj[v] = obj
|
|
|
|
}
|
|
|
|
return obj
|
|
|
|
}
|
|
|
|
|
|
|
|
// Local object.
|
|
|
|
obj, ok := a.localobj[v]
|
|
|
|
if !ok {
|
|
|
|
switch v := v.(type) {
|
|
|
|
case *ssa.Alloc:
|
|
|
|
obj = a.nextNode()
|
|
|
|
a.addNodes(mustDeref(v.Type()), "alloc")
|
|
|
|
a.endObject(obj, cgn, v)
|
|
|
|
|
|
|
|
case *ssa.MakeSlice:
|
|
|
|
obj = a.nextNode()
|
|
|
|
a.addNodes(sliceToArray(v.Type()), "makeslice")
|
|
|
|
a.endObject(obj, cgn, v)
|
|
|
|
|
|
|
|
case *ssa.MakeChan:
|
|
|
|
obj = a.nextNode()
|
|
|
|
a.addNodes(v.Type().Underlying().(*types.Chan).Elem(), "makechan")
|
|
|
|
a.endObject(obj, cgn, v)
|
|
|
|
|
|
|
|
case *ssa.MakeMap:
|
|
|
|
obj = a.nextNode()
|
|
|
|
tmap := v.Type().Underlying().(*types.Map)
|
|
|
|
a.addNodes(tmap.Key(), "makemap.key")
|
|
|
|
a.addNodes(tmap.Elem(), "makemap.value")
|
|
|
|
a.endObject(obj, cgn, v)
|
|
|
|
|
|
|
|
case *ssa.MakeInterface:
|
|
|
|
tConc := v.X.Type()
|
|
|
|
obj = a.makeTagged(tConc, cgn, v)
|
|
|
|
|
|
|
|
// Copy the value into it, if nontrivial.
|
|
|
|
if x := a.valueNode(v.X); x != 0 {
|
|
|
|
a.copy(obj+1, x, a.sizeof(tConc))
|
|
|
|
}
|
|
|
|
|
|
|
|
case *ssa.FieldAddr:
|
|
|
|
if xobj := a.objectNode(cgn, v.X); xobj != 0 {
|
|
|
|
obj = xobj + nodeid(a.offsetOf(mustDeref(v.X.Type()), v.Field))
|
|
|
|
}
|
|
|
|
|
|
|
|
case *ssa.IndexAddr:
|
|
|
|
if xobj := a.objectNode(cgn, v.X); xobj != 0 {
|
|
|
|
obj = xobj + 1
|
|
|
|
}
|
|
|
|
|
|
|
|
case *ssa.Slice:
|
|
|
|
obj = a.objectNode(cgn, v.X)
|
|
|
|
|
|
|
|
case *ssa.Convert:
|
|
|
|
// TODO(adonovan): opt: handle these cases too:
|
|
|
|
// - unsafe.Pointer->*T conversion acts like Alloc
|
|
|
|
// - string->[]byte/[]rune conversion acts like MakeSlice
|
|
|
|
}
|
2013-10-01 07:46:33 -06:00
|
|
|
|
|
|
|
if a.log != nil {
|
2013-10-14 11:53:41 -06:00
|
|
|
fmt.Fprintf(a.log, "\tlocalobj[%s] = n%d\n", v.Name(), obj)
|
2013-10-01 07:46:33 -06:00
|
|
|
}
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.localobj[v] = obj
|
|
|
|
}
|
|
|
|
return obj
|
|
|
|
}
|
|
|
|
|
|
|
|
// genLoad generates constraints for result = *(ptr + val).
|
|
|
|
func (a *analysis) genLoad(cgn *cgnode, result nodeid, ptr ssa.Value, offset, sizeof uint32) {
|
|
|
|
if obj := a.objectNode(cgn, ptr); obj != 0 {
|
|
|
|
// Pre-apply loadConstraint.solve().
|
|
|
|
a.copy(result, obj+nodeid(offset), sizeof)
|
|
|
|
} else {
|
|
|
|
a.load(result, a.valueNode(ptr), offset, sizeof)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// genOffsetAddr generates constraints for a 'v=ptr.field' (FieldAddr)
|
|
|
|
// or 'v=ptr[*]' (IndexAddr) instruction v.
|
|
|
|
func (a *analysis) genOffsetAddr(cgn *cgnode, v ssa.Value, ptr nodeid, offset uint32) {
|
|
|
|
dst := a.valueNode(v)
|
|
|
|
if obj := a.objectNode(cgn, v); obj != 0 {
|
|
|
|
// Pre-apply offsetAddrConstraint.solve().
|
|
|
|
a.addressOf(dst, obj)
|
|
|
|
} else {
|
|
|
|
a.offsetAddr(dst, ptr, offset)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// genStore generates constraints for *(ptr + offset) = val.
|
|
|
|
func (a *analysis) genStore(cgn *cgnode, ptr ssa.Value, val nodeid, offset, sizeof uint32) {
|
|
|
|
if obj := a.objectNode(cgn, ptr); obj != 0 {
|
|
|
|
// Pre-apply storeConstraint.solve().
|
|
|
|
a.copy(obj+nodeid(offset), val, sizeof)
|
|
|
|
} else {
|
|
|
|
a.store(a.valueNode(ptr), val, offset, sizeof)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-08-22 10:27:55 -06:00
|
|
|
// genInstr generates contraints for instruction instr in context cgn.
|
|
|
|
func (a *analysis) genInstr(cgn *cgnode, instr ssa.Instruction) {
|
|
|
|
if a.log != nil {
|
|
|
|
var prefix string
|
|
|
|
if val, ok := instr.(ssa.Value); ok {
|
|
|
|
prefix = val.Name() + " = "
|
|
|
|
}
|
|
|
|
fmt.Fprintf(a.log, "; %s%s\n", prefix, instr)
|
|
|
|
}
|
|
|
|
|
|
|
|
switch instr := instr.(type) {
|
|
|
|
case *ssa.DebugRef:
|
|
|
|
// no-op.
|
|
|
|
|
|
|
|
case *ssa.UnOp:
|
|
|
|
switch instr.Op {
|
|
|
|
case token.ARROW: // <-x
|
|
|
|
// We can ignore instr.CommaOk because the node we're
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
// altering is always at zero offset relative to instr
|
|
|
|
a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(instr.Type()))
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
case token.MUL: // *x
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.genLoad(cgn, a.valueNode(instr), instr.X, 0, a.sizeof(instr.Type()))
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
default:
|
|
|
|
// NOT, SUB, XOR: no-op.
|
|
|
|
}
|
|
|
|
|
|
|
|
case *ssa.BinOp:
|
|
|
|
// All no-ops.
|
|
|
|
|
|
|
|
case ssa.CallInstruction: // *ssa.Call, *ssa.Go, *ssa.Defer
|
|
|
|
a.genCall(cgn, instr)
|
|
|
|
|
|
|
|
case *ssa.ChangeType:
|
|
|
|
a.copy(a.valueNode(instr), a.valueNode(instr.X), 1)
|
|
|
|
|
|
|
|
case *ssa.Convert:
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
a.genConv(instr, cgn)
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
case *ssa.Extract:
|
|
|
|
a.copy(a.valueNode(instr),
|
|
|
|
a.valueOffsetNode(instr.Tuple, instr.Index),
|
|
|
|
a.sizeof(instr.Type()))
|
|
|
|
|
|
|
|
case *ssa.FieldAddr:
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.genOffsetAddr(cgn, instr, a.valueNode(instr.X),
|
2013-08-22 10:27:55 -06:00
|
|
|
a.offsetOf(mustDeref(instr.X.Type()), instr.Field))
|
|
|
|
|
|
|
|
case *ssa.IndexAddr:
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.genOffsetAddr(cgn, instr, a.valueNode(instr.X), 1)
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
case *ssa.Field:
|
|
|
|
a.copy(a.valueNode(instr),
|
|
|
|
a.valueOffsetNode(instr.X, instr.Field),
|
|
|
|
a.sizeof(instr.Type()))
|
|
|
|
|
|
|
|
case *ssa.Index:
|
|
|
|
a.copy(a.valueNode(instr), 1+a.valueNode(instr.X), a.sizeof(instr.Type()))
|
|
|
|
|
|
|
|
case *ssa.Select:
|
|
|
|
recv := a.valueOffsetNode(instr, 2) // instr : (index, recvOk, recv0, ... recv_n-1)
|
|
|
|
for _, st := range instr.States {
|
|
|
|
elemSize := a.sizeof(st.Chan.Type().Underlying().(*types.Chan).Elem())
|
|
|
|
switch st.Dir {
|
|
|
|
case ast.RECV:
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.genLoad(cgn, recv, st.Chan, 0, elemSize)
|
2013-08-22 10:27:55 -06:00
|
|
|
recv++
|
|
|
|
|
|
|
|
case ast.SEND:
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.genStore(cgn, st.Chan, a.valueNode(st.Send), 0, elemSize)
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-10-08 10:31:39 -06:00
|
|
|
case *ssa.Return:
|
2013-08-22 10:27:55 -06:00
|
|
|
results := a.funcResults(cgn.obj)
|
|
|
|
for _, r := range instr.Results {
|
|
|
|
sz := a.sizeof(r.Type())
|
|
|
|
a.copy(results, a.valueNode(r), sz)
|
|
|
|
results += nodeid(sz)
|
|
|
|
}
|
|
|
|
|
|
|
|
case *ssa.Send:
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.genStore(cgn, instr.Chan, a.valueNode(instr.X), 0, a.sizeof(instr.X.Type()))
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
case *ssa.Store:
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.genStore(cgn, instr.Addr, a.valueNode(instr.Val), 0, a.sizeof(instr.Val.Type()))
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
case *ssa.Alloc, *ssa.MakeSlice, *ssa.MakeChan, *ssa.MakeMap, *ssa.MakeInterface:
|
|
|
|
v := instr.(ssa.Value)
|
|
|
|
a.addressOf(a.valueNode(v), a.objectNode(cgn, v))
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
case *ssa.ChangeInterface:
|
|
|
|
a.copy(a.valueNode(instr), a.valueNode(instr.X), 1)
|
|
|
|
|
|
|
|
case *ssa.TypeAssert:
|
2013-10-29 19:57:53 -06:00
|
|
|
a.typeAssert(instr.AssertedType, a.valueNode(instr), a.valueNode(instr.X), true)
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
case *ssa.Slice:
|
|
|
|
a.copy(a.valueNode(instr), a.valueNode(instr.X), 1)
|
|
|
|
|
|
|
|
case *ssa.If, *ssa.Jump:
|
|
|
|
// no-op.
|
|
|
|
|
|
|
|
case *ssa.Phi:
|
|
|
|
sz := a.sizeof(instr.Type())
|
|
|
|
for _, e := range instr.Edges {
|
|
|
|
a.copy(a.valueNode(instr), a.valueNode(e), sz)
|
|
|
|
}
|
|
|
|
|
|
|
|
case *ssa.MakeClosure:
|
|
|
|
fn := instr.Fn.(*ssa.Function)
|
|
|
|
a.copy(a.valueNode(instr), a.valueNode(fn), 1)
|
|
|
|
// Free variables are treated like global variables.
|
|
|
|
for i, b := range instr.Bindings {
|
|
|
|
a.copy(a.valueNode(fn.FreeVars[i]), a.valueNode(b), a.sizeof(b.Type()))
|
|
|
|
}
|
|
|
|
|
|
|
|
case *ssa.RunDefers:
|
|
|
|
// The analysis is flow insensitive, so we just "call"
|
|
|
|
// defers as we encounter them.
|
|
|
|
|
|
|
|
case *ssa.Range:
|
|
|
|
// Do nothing. Next{Iter: *ssa.Range} handles this case.
|
|
|
|
|
|
|
|
case *ssa.Next:
|
|
|
|
if !instr.IsString { // map
|
|
|
|
// Assumes that Next is always directly applied to a Range result.
|
|
|
|
theMap := instr.Iter.(*ssa.Range).X
|
|
|
|
tMap := theMap.Type().Underlying().(*types.Map)
|
|
|
|
ksize := a.sizeof(tMap.Key())
|
|
|
|
vsize := a.sizeof(tMap.Elem())
|
|
|
|
|
|
|
|
// Load from the map's (k,v) into the tuple's (ok, k, v).
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.genLoad(cgn, a.valueNode(instr)+1, theMap, 0, ksize+vsize)
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
case *ssa.Lookup:
|
|
|
|
if tMap, ok := instr.X.Type().Underlying().(*types.Map); ok {
|
|
|
|
// CommaOk can be ignored: field 0 is a no-op.
|
|
|
|
ksize := a.sizeof(tMap.Key())
|
|
|
|
vsize := a.sizeof(tMap.Elem())
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.genLoad(cgn, a.valueNode(instr), instr.X, ksize, vsize)
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
case *ssa.MapUpdate:
|
|
|
|
tmap := instr.Map.Type().Underlying().(*types.Map)
|
|
|
|
ksize := a.sizeof(tmap.Key())
|
|
|
|
vsize := a.sizeof(tmap.Elem())
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.genStore(cgn, instr.Map, a.valueNode(instr.Key), 0, ksize)
|
|
|
|
a.genStore(cgn, instr.Map, a.valueNode(instr.Value), ksize, vsize)
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
case *ssa.Panic:
|
|
|
|
a.copy(a.panicNode, a.valueNode(instr.X), 1)
|
|
|
|
|
|
|
|
default:
|
|
|
|
panic(fmt.Sprintf("unimplemented: %T", instr))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-10-09 10:41:55 -06:00
|
|
|
func (a *analysis) makeCGNode(fn *ssa.Function, obj nodeid, callersite *callsite) *cgnode {
|
|
|
|
cgn := &cgnode{fn: fn, obj: obj, callersite: callersite}
|
2013-09-25 15:17:42 -06:00
|
|
|
a.cgnodes = append(a.cgnodes, cgn)
|
|
|
|
return cgn
|
|
|
|
}
|
|
|
|
|
2013-08-22 10:27:55 -06:00
|
|
|
// genRootCalls generates the synthetic root of the callgraph and the
|
|
|
|
// initial calls from it to the analysis scope, such as main, a test
|
|
|
|
// or a library.
|
|
|
|
//
|
|
|
|
func (a *analysis) genRootCalls() *cgnode {
|
|
|
|
r := ssa.NewFunction("<root>", new(types.Signature), "root of callgraph")
|
|
|
|
r.Prog = a.prog // hack.
|
|
|
|
r.Enclosing = r // hack, so Function.String() doesn't crash
|
|
|
|
r.String() // (asserts that it doesn't crash)
|
2013-10-09 10:41:55 -06:00
|
|
|
root := a.makeCGNode(r, 0, nil)
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
// For each main package, call main.init(), main.main().
|
|
|
|
for _, mainPkg := range a.config.Mains {
|
|
|
|
main := mainPkg.Func("main")
|
|
|
|
if main == nil {
|
|
|
|
panic(fmt.Sprintf("%s has no main function", mainPkg))
|
|
|
|
}
|
|
|
|
|
|
|
|
targets := a.addOneNode(main.Signature, "root.targets", nil)
|
2013-09-25 15:17:42 -06:00
|
|
|
site := &callsite{targets: targets}
|
|
|
|
root.sites = append(root.sites, site)
|
2013-08-22 10:27:55 -06:00
|
|
|
for _, fn := range [2]*ssa.Function{mainPkg.Func("init"), main} {
|
|
|
|
if a.log != nil {
|
|
|
|
fmt.Fprintf(a.log, "\troot call to %s:\n", fn)
|
|
|
|
}
|
|
|
|
a.copy(targets, a.valueNode(fn), 1)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return root
|
|
|
|
}
|
|
|
|
|
|
|
|
// genFunc generates constraints for function fn.
|
|
|
|
func (a *analysis) genFunc(cgn *cgnode) {
|
|
|
|
fn := cgn.fn
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
|
|
|
|
impl := a.findIntrinsic(fn)
|
|
|
|
|
2013-08-22 10:27:55 -06:00
|
|
|
if a.log != nil {
|
|
|
|
fmt.Fprintln(a.log)
|
|
|
|
fmt.Fprintln(a.log)
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
|
|
|
|
// Hack: don't display body if intrinsic.
|
|
|
|
if impl != nil {
|
|
|
|
fn2 := *cgn.fn // copy
|
|
|
|
fn2.Locals = nil
|
|
|
|
fn2.Blocks = nil
|
|
|
|
fn2.DumpTo(a.log)
|
|
|
|
} else {
|
|
|
|
cgn.fn.DumpTo(a.log)
|
|
|
|
}
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
|
|
|
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
if impl != nil {
|
2013-08-22 10:27:55 -06:00
|
|
|
impl(a, cgn)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if fn.Blocks == nil {
|
|
|
|
// External function with no intrinsic treatment.
|
|
|
|
// We'll warn about calls to such functions at the end.
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
if a.log != nil {
|
|
|
|
fmt.Fprintln(a.log, "; Creating nodes for local values")
|
|
|
|
}
|
|
|
|
|
|
|
|
a.localval = make(map[ssa.Value]nodeid)
|
|
|
|
a.localobj = make(map[ssa.Value]nodeid)
|
|
|
|
|
2013-08-22 10:27:55 -06:00
|
|
|
// The value nodes for the params are in the func object block.
|
|
|
|
params := a.funcParams(cgn.obj)
|
|
|
|
for _, p := range fn.Params {
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.setValueNode(p, params, cgn)
|
2013-08-22 10:27:55 -06:00
|
|
|
params += nodeid(a.sizeof(p.Type()))
|
|
|
|
}
|
|
|
|
|
|
|
|
// Free variables are treated like global variables:
|
|
|
|
// the outer function sets them with MakeClosure;
|
|
|
|
// the inner function accesses them with Capture.
|
|
|
|
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
// Create value nodes for all value instructions
|
|
|
|
// since SSA may contain forward references.
|
2013-08-22 10:27:55 -06:00
|
|
|
for _, b := range fn.Blocks {
|
|
|
|
for _, instr := range b.Instrs {
|
|
|
|
switch instr := instr.(type) {
|
|
|
|
case *ssa.Range:
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
// do nothing: it has a funky type,
|
|
|
|
// and *ssa.Next does all the work.
|
2013-08-22 10:27:55 -06:00
|
|
|
|
|
|
|
case ssa.Value:
|
|
|
|
var comment string
|
|
|
|
if a.log != nil {
|
|
|
|
comment = instr.Name()
|
|
|
|
}
|
|
|
|
id := a.addNodes(instr.Type(), comment)
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.setValueNode(instr, id, cgn)
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Generate constraints for instructions.
|
|
|
|
for _, b := range fn.Blocks {
|
|
|
|
for _, instr := range b.Instrs {
|
|
|
|
a.genInstr(cgn, instr)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
go.tools/pointer: strength reduction during constraint generation.
Motivation: simple constraints---copy and addr---are more
amenable to pre-solver optimizations (forthcoming) than
complex constraints: load, store, and all others.
In code such as the following:
t0 = new struct { x, y int }
t1 = &t0.y
t2 = *t1
there's no need for the full generality of a (complex)
load constraint for t2=*t1 since t1 can only point to t0.y.
All we need is a (simple) copy constraint t2 = (t0.y)
where (t0.y) is the object node label for that field.
For all "addressable" SSA instructions, we tabulate
whether their points-to set is necessarily a singleton. For
some (e.g. Alloc, MakeSlice, etc) this is always true by
design. For others (e.g. FieldAddr) it depends on their
operands.
We exploit this information when generating constraints:
all load-form and store-form constraints are reduced to copy
constraints if the pointer's PTS is a singleton.
Similarly all FieldAddr (y=&x.f) and IndexAddr (y=&x[0])
constraints are reduced to offset addition, for singleton
operands.
Here's the constraint mix when running on the oracle itself.
The total number of constraints is unchanged but the fraction
that are complex has gone down to 21% from 53%.
before after
--simple--
addr 20682 46949
copy 61454 91211
--complex--
offsetAddr 41621 15325
load 18769 12925
store 30758 6908
invoke 758 760
typeAssert 1688 1689
total 175832 175869
Also:
- Add Pointer.Context() for local variables,
since we now plumb cgnodes throughout. Nice.
- Refactor all load-form (load, receive, lookup) and
store-form (Store, send, MapUpdate) constraints to use
genLoad and genStore.
- Log counts of constraints by type.
- valNodes split into localval and globalval maps;
localval is purged after each function.
- analogous maps localobj[v] and globalobj[v] hold sole label
for pts(v), if singleton.
- fnObj map subsumed by globalobj.
- make{Function/Global/Constant} inlined into objectValue.
Much cleaner.
R=crawshaw
CC=golang-dev
https://golang.org/cl/13979043
2013-09-27 09:33:01 -06:00
|
|
|
a.localval = nil
|
|
|
|
a.localobj = nil
|
2013-08-22 10:27:55 -06:00
|
|
|
}
|
|
|
|
|
go.tools/ssa: fix computation of set of types requiring method sets.
Motivation:
Previously, we assumed that the set of types for which a
complete method set (containing all synthesized wrapper
functions) is required at runtime was the set of types
used as operands to some *ssa.MakeInterface instruction.
In fact, this is an underapproximation because types can
be derived from other ones via reflection, and some of
these may need methods. The reflect.Type API allows *T to
be derived from T, and these may have different method
sets. Reflection also allows almost any subcomponent of a
type to be accessed (with one exception: given T, defined
'type T struct{S}', you can reach S but not struct{S}).
As a result, the pointer analysis was unable to generate
all necessary constraints before running the solver,
causing a crash when reflection derives types whose
methods are unavailable. (A similar problem would afflict
an ahead-of-time compiler based on ssa. The ssa/interp
interpreter was immune only because it does not require
all wrapper methods to be created before execution
begins.)
Description:
This change causes the SSA builder to record, for each
package, the set of all types with non-empty method sets that
are referenced within that package. This set is accessed via
Packages.TypesWithMethodSets(). Program.TypesWithMethodSets()
returns its union across all packages.
The set of references that matter are:
- types of operands to some MakeInterface instruction (as before)
- types of all exported package members
- all subcomponents of the above, recursively.
This is a conservative approximation to the set of types
whose methods may be called dynamically.
We define the owning package of a type as follows:
- the owner of a named type is the package in which it is defined;
- the owner of a pointer-to-named type is the owner of that named type;
- the owner of all other types is nil.
A package must include the method sets for all types that it
owns, and all subcomponents of that type that are not owned by
another package, recursively. Types with an owner appear in
exactly one package; types with no owner (such as struct{T})
may appear within multiple packages.
(A typical Go compiler would emit multiple copies of these
methods as weak symbols; a typical linker would eliminate
duplicates.)
Also:
- go/types/typemap: implement hash function for *Tuple.
- pointer: generate nodes/constraints for all of
ssa.Program.TypesWithMethodSets().
Add rtti.go regression test.
- Add API test of Package.TypesWithMethodSets().
- Set Function.Pkg to nil (again) for wrapper functions,
since these may be shared by many packages.
- Remove a redundant logging statement.
- Document that ssa CREATE phase is in fact sequential.
Fixes golang/go#6605
R=gri
CC=golang-dev
https://golang.org/cl/14920056
2013-10-23 15:07:52 -06:00
|
|
|
// genMethodsOf generates nodes and constraints for all methods of type T.
|
|
|
|
func (a *analysis) genMethodsOf(T types.Type) {
|
|
|
|
mset := T.MethodSet()
|
|
|
|
for i, n := 0, mset.Len(); i < n; i++ {
|
|
|
|
a.valueNode(a.prog.Method(mset.At(i)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-08-22 10:27:55 -06:00
|
|
|
// generate generates offline constraints for the entire program.
|
|
|
|
// It returns the synthetic root of the callgraph.
|
|
|
|
//
|
|
|
|
func (a *analysis) generate() *cgnode {
|
|
|
|
// Create a dummy node since we use the nodeid 0 for
|
|
|
|
// non-pointerlike variables.
|
|
|
|
a.addNodes(tInvalid, "(zero)")
|
|
|
|
|
|
|
|
// Create the global node for panic values.
|
|
|
|
a.panicNode = a.addNodes(tEface, "panic")
|
|
|
|
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
// Create nodes and constraints for all methods of reflect.rtype.
|
2013-09-23 14:13:01 -06:00
|
|
|
// (Shared contours are used by dynamic calls to reflect.Type
|
|
|
|
// methods---typically just String().)
|
|
|
|
if rtype := a.reflectRtypePtr; rtype != nil {
|
go.tools/ssa: fix computation of set of types requiring method sets.
Motivation:
Previously, we assumed that the set of types for which a
complete method set (containing all synthesized wrapper
functions) is required at runtime was the set of types
used as operands to some *ssa.MakeInterface instruction.
In fact, this is an underapproximation because types can
be derived from other ones via reflection, and some of
these may need methods. The reflect.Type API allows *T to
be derived from T, and these may have different method
sets. Reflection also allows almost any subcomponent of a
type to be accessed (with one exception: given T, defined
'type T struct{S}', you can reach S but not struct{S}).
As a result, the pointer analysis was unable to generate
all necessary constraints before running the solver,
causing a crash when reflection derives types whose
methods are unavailable. (A similar problem would afflict
an ahead-of-time compiler based on ssa. The ssa/interp
interpreter was immune only because it does not require
all wrapper methods to be created before execution
begins.)
Description:
This change causes the SSA builder to record, for each
package, the set of all types with non-empty method sets that
are referenced within that package. This set is accessed via
Packages.TypesWithMethodSets(). Program.TypesWithMethodSets()
returns its union across all packages.
The set of references that matter are:
- types of operands to some MakeInterface instruction (as before)
- types of all exported package members
- all subcomponents of the above, recursively.
This is a conservative approximation to the set of types
whose methods may be called dynamically.
We define the owning package of a type as follows:
- the owner of a named type is the package in which it is defined;
- the owner of a pointer-to-named type is the owner of that named type;
- the owner of all other types is nil.
A package must include the method sets for all types that it
owns, and all subcomponents of that type that are not owned by
another package, recursively. Types with an owner appear in
exactly one package; types with no owner (such as struct{T})
may appear within multiple packages.
(A typical Go compiler would emit multiple copies of these
methods as weak symbols; a typical linker would eliminate
duplicates.)
Also:
- go/types/typemap: implement hash function for *Tuple.
- pointer: generate nodes/constraints for all of
ssa.Program.TypesWithMethodSets().
Add rtti.go regression test.
- Add API test of Package.TypesWithMethodSets().
- Set Function.Pkg to nil (again) for wrapper functions,
since these may be shared by many packages.
- Remove a redundant logging statement.
- Document that ssa CREATE phase is in fact sequential.
Fixes golang/go#6605
R=gri
CC=golang-dev
https://golang.org/cl/14920056
2013-10-23 15:07:52 -06:00
|
|
|
a.genMethodsOf(rtype)
|
go.tools/pointer: reflection, part 1: maps, and some core features.
Core:
reflect.TypeOf
reflect.ValueOf
reflect.Zero
reflect.Value.Interface
Maps:
(reflect.Value).MapIndex
(reflect.Value).MapKeys
(reflect.Value).SetMapIndex
(*reflect.rtype).Elem
(*reflect.rtype).Key
+ tests:
pointer/testdata/mapreflect.go.
oracle/testdata/src/main/reflection.go.
Interface objects (T, V...) have been renamed "tagged objects".
Abstraction: we model reflect.Value similar to
interface{}---as a pointer that points only to tagged
objects---but a reflect.Value may also point to an "indirect
tagged object", one in which the payload V is of type *T not T.
These are required because reflect.Values can hold lvalues,
e.g. when derived via Field() or Elem(), though we won't use
them till we get to structs and pointers.
Solving: each reflection intrinsic defines a new constraint
and resolution rule. Because of the nature of reflection,
generalizing across types, the resolution rules dynamically
create additional complex constraints during solving, where
previously only simple (copy) constraints were created.
This requires some solver changes:
The work done before the main solver loop (to attach new
constraints to the graph) is now done before each iteration,
in processNewConstraints.
Its loop over constraints is broken into two passes:
the first handles base (addr-of) constraints,
the second handles simple and complex constraints.
constraint.init() has been inlined. The only behaviour that
varies across constraints is ptr()
Sadly this will pessimize presolver optimisations, when we get
there; such is the price of reflection.
Objects: reflection intrinsics create objects (i.e. cause
memory allocations) with no SSA operation. We will represent
them as the cgnode of the instrinsic (e.g. reflect.New), so we
extend Labels and node.data to represent objects as a product
(not sum) of ssa.Value and cgnode and pull this out into its
own type, struct object. This simplifies a number of
invariants and saves space. The ntObject flag is now
represented by obj!=nil; the other flags are moved into
object.
cgnodes are now always recorded in objects/Labels for which it
is appropriate (all but those for globals, constants and the
shared contours for functions).
Also:
- Prepopulate the flattenMemo cache to consider reflect.Value
a fake pointer, not a struct.
- Improve accessors and documentation on type Label.
- @conctypes assertions renamed @types (since dyn. types needn't be concrete).
- add oracle 'describe' test on an interface (missing, an oversight).
R=crawshaw
CC=golang-dev
https://golang.org/cl/13418048
2013-09-16 07:49:10 -06:00
|
|
|
}
|
|
|
|
|
2013-08-22 10:27:55 -06:00
|
|
|
root := a.genRootCalls()
|
|
|
|
|
go.tools/ssa: fix computation of set of types requiring method sets.
Motivation:
Previously, we assumed that the set of types for which a
complete method set (containing all synthesized wrapper
functions) is required at runtime was the set of types
used as operands to some *ssa.MakeInterface instruction.
In fact, this is an underapproximation because types can
be derived from other ones via reflection, and some of
these may need methods. The reflect.Type API allows *T to
be derived from T, and these may have different method
sets. Reflection also allows almost any subcomponent of a
type to be accessed (with one exception: given T, defined
'type T struct{S}', you can reach S but not struct{S}).
As a result, the pointer analysis was unable to generate
all necessary constraints before running the solver,
causing a crash when reflection derives types whose
methods are unavailable. (A similar problem would afflict
an ahead-of-time compiler based on ssa. The ssa/interp
interpreter was immune only because it does not require
all wrapper methods to be created before execution
begins.)
Description:
This change causes the SSA builder to record, for each
package, the set of all types with non-empty method sets that
are referenced within that package. This set is accessed via
Packages.TypesWithMethodSets(). Program.TypesWithMethodSets()
returns its union across all packages.
The set of references that matter are:
- types of operands to some MakeInterface instruction (as before)
- types of all exported package members
- all subcomponents of the above, recursively.
This is a conservative approximation to the set of types
whose methods may be called dynamically.
We define the owning package of a type as follows:
- the owner of a named type is the package in which it is defined;
- the owner of a pointer-to-named type is the owner of that named type;
- the owner of all other types is nil.
A package must include the method sets for all types that it
owns, and all subcomponents of that type that are not owned by
another package, recursively. Types with an owner appear in
exactly one package; types with no owner (such as struct{T})
may appear within multiple packages.
(A typical Go compiler would emit multiple copies of these
methods as weak symbols; a typical linker would eliminate
duplicates.)
Also:
- go/types/typemap: implement hash function for *Tuple.
- pointer: generate nodes/constraints for all of
ssa.Program.TypesWithMethodSets().
Add rtti.go regression test.
- Add API test of Package.TypesWithMethodSets().
- Set Function.Pkg to nil (again) for wrapper functions,
since these may be shared by many packages.
- Remove a redundant logging statement.
- Document that ssa CREATE phase is in fact sequential.
Fixes golang/go#6605
R=gri
CC=golang-dev
https://golang.org/cl/14920056
2013-10-23 15:07:52 -06:00
|
|
|
// Create nodes and constraints for all methods of all types
|
|
|
|
// that are dynamically accessible via reflection or interfaces.
|
|
|
|
for _, T := range a.prog.TypesWithMethodSets() {
|
|
|
|
a.genMethodsOf(T)
|
|
|
|
}
|
|
|
|
|
2013-08-22 10:27:55 -06:00
|
|
|
// Generate constraints for entire program.
|
|
|
|
for len(a.genq) > 0 {
|
|
|
|
cgn := a.genq[0]
|
|
|
|
a.genq = a.genq[1:]
|
|
|
|
a.genFunc(cgn)
|
|
|
|
}
|
|
|
|
|
2013-10-01 07:46:33 -06:00
|
|
|
// The runtime magically allocates os.Args; so should we.
|
|
|
|
if os := a.prog.ImportedPackage("os"); os != nil {
|
|
|
|
// In effect: os.Args = new([1]string)[:]
|
|
|
|
obj := a.addNodes(types.NewArray(types.Typ[types.String], 1), "<command-line args>")
|
|
|
|
a.endObject(obj, nil, "<command-line args>")
|
|
|
|
a.addressOf(a.objectNode(nil, os.Var("Args")), obj)
|
|
|
|
}
|
|
|
|
|
2013-08-22 10:27:55 -06:00
|
|
|
return root
|
|
|
|
}
|