mirror of
https://github.com/golang/go
synced 2024-11-18 17:54:57 -07:00
go/ssa/interp: fix bug causing subelements of aggregates to change address.
Since all SSA values are immutable, no value copying is required for any operations except those that load from or store to a variable; those operations must do an aggregate copy, i.e., descend into struct and array elements. All other calls to copyVal have been removed; they were pieces of duct tape, as I had long suspected. The descent must be based on the static type information, not the "shape" of the dynamic value, since two reflect.Value structs may have different internal shapes. We clobber the true definition of reflect.Value's underlying type, replacing it with struct{interface{}, interface{}}, which is close enough to make the load/store functions work. + Test Change-Id: I5e239d91ed0cb2a669a9f75766024fe1f9a5c347 Reviewed-on: https://go-review.googlesource.com/7532 Reviewed-by: Robert Griesemer <gri@golang.org>
This commit is contained in:
parent
8cc1c75580
commit
868c429971
@ -239,10 +239,10 @@ func visitInstr(fr *frame, instr ssa.Instruction) continuation {
|
||||
panic(targetPanic{fr.get(instr.X)})
|
||||
|
||||
case *ssa.Send:
|
||||
fr.get(instr.Chan).(chan value) <- copyVal(fr.get(instr.X))
|
||||
fr.get(instr.Chan).(chan value) <- fr.get(instr.X)
|
||||
|
||||
case *ssa.Store:
|
||||
*fr.get(instr.Addr).(*value) = copyVal(fr.get(instr.Val))
|
||||
store(deref(instr.Addr.Type()), fr.get(instr.Addr).(*value), fr.get(instr.Val))
|
||||
|
||||
case *ssa.If:
|
||||
succ := 1
|
||||
@ -307,10 +307,11 @@ func visitInstr(fr *frame, instr ssa.Instruction) continuation {
|
||||
|
||||
case *ssa.FieldAddr:
|
||||
x := fr.get(instr.X)
|
||||
// FIXME wrong! &global.f must not change if we do *global = zero!
|
||||
fr.env[instr] = &(*x.(*value)).(structure)[instr.Field]
|
||||
|
||||
case *ssa.Field:
|
||||
fr.env[instr] = copyVal(fr.get(instr.X).(structure)[instr.Field])
|
||||
fr.env[instr] = fr.get(instr.X).(structure)[instr.Field]
|
||||
|
||||
case *ssa.IndexAddr:
|
||||
x := fr.get(instr.X)
|
||||
@ -325,7 +326,7 @@ func visitInstr(fr *frame, instr ssa.Instruction) continuation {
|
||||
}
|
||||
|
||||
case *ssa.Index:
|
||||
fr.env[instr] = copyVal(fr.get(instr.X).(array)[asInt(fr.get(instr.Index))])
|
||||
fr.env[instr] = fr.get(instr.X).(array)[asInt(fr.get(instr.Index))]
|
||||
|
||||
case *ssa.Lookup:
|
||||
fr.env[instr] = lookup(instr, fr.get(instr.X), fr.get(instr.Index))
|
||||
@ -436,7 +437,7 @@ func prepareCall(fr *frame, call *ssa.CallCommon) (fn value, args []value) {
|
||||
} else {
|
||||
fn = f
|
||||
}
|
||||
args = append(args, copyVal(recv.v))
|
||||
args = append(args, recv.v)
|
||||
}
|
||||
for _, arg := range call.Args {
|
||||
args = append(args, fr.get(arg))
|
||||
|
@ -286,9 +286,7 @@ func lookup(instr *ssa.Lookup, x, idx value) value {
|
||||
v = x.lookup(idx.(hashable))
|
||||
ok = v != nil
|
||||
}
|
||||
if ok {
|
||||
v = copyVal(v)
|
||||
} else {
|
||||
if !ok {
|
||||
v = zero(instr.X.Type().Underlying().(*types.Map).Elem())
|
||||
}
|
||||
if instr.CommaOk {
|
||||
@ -844,7 +842,7 @@ func unop(instr *ssa.UnOp, x value) value {
|
||||
return -x
|
||||
}
|
||||
case token.MUL:
|
||||
return copyVal(*x.(*value)) // load
|
||||
return load(deref(instr.X.Type()), x.(*value))
|
||||
case token.NOT:
|
||||
return !x.(bool)
|
||||
case token.XOR:
|
||||
@ -891,7 +889,7 @@ func typeAssert(i *interpreter, instr *ssa.TypeAssert, itf iface) value {
|
||||
err = checkInterface(i, idst, itf)
|
||||
|
||||
} else if types.Identical(itf.t, instr.AssertedType) {
|
||||
v = copyVal(itf.v) // extract value
|
||||
v = itf.v // extract value
|
||||
|
||||
} else {
|
||||
err = fmt.Sprintf("interface conversion: interface is %s, not %s", itf.t, instr.AssertedType)
|
||||
|
@ -31,8 +31,7 @@ func (t *opaqueType) String() string { return t.name }
|
||||
var reflectTypesPackage = types.NewPackage("reflect", "reflect")
|
||||
|
||||
// rtype is the concrete type the interpreter uses to implement the
|
||||
// reflect.Type interface. Since its type is opaque to the target
|
||||
// language, we use a types.Basic.
|
||||
// reflect.Type interface.
|
||||
//
|
||||
// type rtype <opaque>
|
||||
var rtypeType = makeNamedType("rtype", &opaqueType{nil, "rtype"})
|
||||
@ -508,6 +507,29 @@ func initReflect(i *interpreter) {
|
||||
Members: make(map[string]ssa.Member),
|
||||
}
|
||||
|
||||
// Clobber the type-checker's notion of reflect.Value's
|
||||
// underlying type so that it more closely matches the fake one
|
||||
// (at least in the number of fields---we lie about the type of
|
||||
// the rtype field).
|
||||
//
|
||||
// We must ensure that calls to (ssa.Value).Type() return the
|
||||
// fake type so that correct "shape" is used when allocating
|
||||
// variables, making zero values, loading, and storing.
|
||||
//
|
||||
// TODO(adonovan): obviously this is a hack. We need a cleaner
|
||||
// way to fake the reflect package (almost---DeepEqual is fine).
|
||||
// One approach would be not to even load its source code, but
|
||||
// provide fake source files. This would guarantee that no bad
|
||||
// information leaks into other packages.
|
||||
if r := i.prog.ImportedPackage("reflect"); r != nil {
|
||||
rV := r.Object.Scope().Lookup("Value").Type().(*types.Named)
|
||||
tEface := types.NewInterface(nil, nil).Complete()
|
||||
rV.SetUnderlying(types.NewStruct([]*types.Var{
|
||||
types.NewField(token.NoPos, r.Object, "t", tEface, false), // a lie
|
||||
types.NewField(token.NoPos, r.Object, "v", tEface, false),
|
||||
}, nil))
|
||||
}
|
||||
|
||||
i.rtypeMethods = methodSet{
|
||||
"Bits": newMethod(i.reflectPackage, rtypeType, "Bits"),
|
||||
"Elem": newMethod(i.reflectPackage, rtypeType, "Elem"),
|
||||
|
17
go/ssa/interp/testdata/coverage.go
vendored
17
go/ssa/interp/testdata/coverage.go
vendored
@ -515,3 +515,20 @@ func init() {
|
||||
i.f()
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// Regression test for a subtle bug in which copying values would causes
|
||||
// subcomponents of aggregate variables to change address, breaking
|
||||
// aliases.
|
||||
func init() {
|
||||
type T struct{ f int }
|
||||
var x T
|
||||
p := &x.f
|
||||
x = T{}
|
||||
*p = 1
|
||||
if x.f != 1 {
|
||||
panic("lost store")
|
||||
}
|
||||
if p != &x.f {
|
||||
panic("unstable address")
|
||||
}
|
||||
}
|
||||
|
@ -310,43 +310,53 @@ func hash(t types.Type, x value) int {
|
||||
panic(fmt.Sprintf("%T is unhashable", x))
|
||||
}
|
||||
|
||||
// copyVal returns a copy of value v.
|
||||
// TODO(adonovan): add tests of aliasing and mutation.
|
||||
func copyVal(v value) value {
|
||||
if v == nil {
|
||||
panic("copyVal(nil)")
|
||||
}
|
||||
switch v := v.(type) {
|
||||
case bool, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64, uintptr, float32, float64, complex64, complex128, string, unsafe.Pointer:
|
||||
return v
|
||||
case map[value]value:
|
||||
return v
|
||||
case *hashmap:
|
||||
return v
|
||||
case chan value:
|
||||
return v
|
||||
case *value:
|
||||
return v
|
||||
case *ssa.Function, *ssa.Builtin, *closure:
|
||||
return v
|
||||
case iface:
|
||||
return v
|
||||
case []value:
|
||||
return v
|
||||
case structure:
|
||||
// reflect.Value struct values don't have a fixed shape, since the
|
||||
// payload can be a scalar or an aggregate depending on the instance.
|
||||
// So store (and load) can't simply use recursion over the shape of the
|
||||
// rhs value, or the lhs, to copy the value; we need the static type
|
||||
// information. (We can't make reflect.Value a new basic data type
|
||||
// because its "structness" is exposed to Go programs.)
|
||||
|
||||
// load returns the value of type T in *addr.
|
||||
func load(T types.Type, addr *value) value {
|
||||
switch T := T.Underlying().(type) {
|
||||
case *types.Struct:
|
||||
v := (*addr).(structure)
|
||||
a := make(structure, len(v))
|
||||
copy(a, v)
|
||||
for i := range a {
|
||||
a[i] = load(T.Field(i).Type(), &v[i])
|
||||
}
|
||||
return a
|
||||
case array:
|
||||
case *types.Array:
|
||||
v := (*addr).(array)
|
||||
a := make(array, len(v))
|
||||
copy(a, v)
|
||||
for i := range a {
|
||||
a[i] = load(T.Elem(), &v[i])
|
||||
}
|
||||
return a
|
||||
case tuple:
|
||||
break
|
||||
case rtype:
|
||||
return v
|
||||
default:
|
||||
return *addr
|
||||
}
|
||||
}
|
||||
|
||||
// store stores value v of type T into *addr.
|
||||
func store(T types.Type, addr *value, v value) {
|
||||
switch T := T.Underlying().(type) {
|
||||
case *types.Struct:
|
||||
lhs := (*addr).(structure)
|
||||
rhs := v.(structure)
|
||||
for i := range lhs {
|
||||
store(T.Field(i).Type(), &lhs[i], rhs[i])
|
||||
}
|
||||
case *types.Array:
|
||||
lhs := (*addr).(array)
|
||||
rhs := v.(array)
|
||||
for i := range lhs {
|
||||
store(T.Elem(), &lhs[i], rhs[i])
|
||||
}
|
||||
default:
|
||||
*addr = v
|
||||
}
|
||||
panic(fmt.Sprintf("cannot copy %T", v))
|
||||
}
|
||||
|
||||
// Prints in the style of built-in println.
|
||||
|
Loading…
Reference in New Issue
Block a user