1
0
mirror of https://github.com/golang/go synced 2024-10-05 16:41:21 -06:00

[dev.ssa] cmd/compile: make failed nil checks panic

Introduce pseudo-ops PanicMem and LoweredPanicMem.
PanicMem could be rewritten directly into MOVL
during lowering, but then we couldn't log nil checks.

With this change, runnable nil check tests pass:

GOSSAPKG=main go run run.go -- nil*.go

Compiler output nil check tests fail:

GOSSAPKG=p go run run.go -- nil*.go

This is due to several factors:

* SSA has improved elimination of unnecessary nil checks.
* SSA is missing elimination of implicit nil checks.
* SSA is missing extra logging about why nil checks were removed.

I'm not sure how best to resolve these failures,
particularly in a world in which the two backends
will live side by side for some time.
For now, punt on the problem.

Change-Id: Ib2ca6824551671f92e0e1800b036f5ca0905e2a3
Reviewed-on: https://go-review.googlesource.com/13474
Reviewed-by: Keith Randall <khr@golang.org>
This commit is contained in:
Josh Bleecher Snyder 2015-08-11 09:47:45 -07:00
parent 2af0648014
commit 463858e6ff
7 changed files with 73 additions and 7 deletions

View File

@ -1499,20 +1499,27 @@ func canSSA(n *Node) bool {
}
// nilCheck generates nil pointer checking code.
// Starts a new block on return.
// Starts a new block on return, unless nil checks are disabled.
// Used only for automatically inserted nil checks,
// not for user code like 'x != nil'.
func (s *state) nilCheck(ptr *ssa.Value) {
if Disable_checknil != 0 {
return
}
c := s.newValue1(ssa.OpIsNonNil, Types[TBOOL], ptr)
b := s.endBlock()
b.Kind = ssa.BlockIf
b.Kind = ssa.BlockIf // TODO: likeliness hint
b.Control = c
bNext := s.f.NewBlock(ssa.BlockPlain)
bPanic := s.f.NewBlock(ssa.BlockPlain)
addEdge(b, bNext)
addEdge(b, s.exit)
s.startBlock(bNext)
// TODO(khr): Don't go directly to exit. Go to a stub that calls panicmem first.
addEdge(b, bPanic)
addEdge(bPanic, s.exit)
s.startBlock(bPanic)
// TODO: implicit nil checks somehow?
s.vars[&memvar] = s.newValue2(ssa.OpPanicNilCheck, ssa.TypeMem, ptr, s.mem())
s.endBlock()
s.startBlock(bNext)
}
// boundsCheck generates bounds checking code. Checks if 0 <= idx < len, branches to exit if not.
@ -2145,6 +2152,25 @@ func genValue(v *ssa.Value) {
case ssa.OpArg:
// memory arg needs no code
// TODO: check that only mem arg goes here.
case ssa.OpAMD64LoweredPanicNilCheck:
if Debug_checknil != 0 && v.Line > 1 { // v.Line==1 in generated wrappers
Warnl(int(v.Line), "generated nil check")
}
// Write to memory address 0. It doesn't matter what we write; use AX.
// XORL AX, AX; MOVL AX, (AX) is shorter than MOVL AX, 0.
// TODO: If we had the pointer (v.Args[0]) in a register r,
// we could use MOVL AX, (r) instead of having to zero AX.
// But it isn't worth loading r just to accomplish that.
p := Prog(x86.AXORL)
p.From.Type = obj.TYPE_REG
p.From.Reg = x86.REG_AX
p.To.Type = obj.TYPE_REG
p.To.Reg = x86.REG_AX
q := Prog(x86.AMOVL)
q.From.Type = obj.TYPE_REG
q.From.Reg = x86.REG_AX
q.To.Type = obj.TYPE_MEM
q.To.Reg = x86.REG_AX
case ssa.OpAMD64CALLstatic:
p := Prog(obj.ACALL)
p.To.Type = obj.TYPE_MEM

View File

@ -216,6 +216,8 @@
(IsNonNil p) -> (SETNE (TESTQ <TypeFlags> p p))
(IsInBounds idx len) -> (SETB (CMPQ <TypeFlags> idx len))
(PanicNilCheck ptr mem) -> (LoweredPanicNilCheck ptr mem)
(Move [size] dst src mem) -> (REPMOVSB dst src (MOVQconst <config.Frontend().TypeUInt64()> [size]) mem)
(Not x) -> (XORBconst [1] x)

View File

@ -287,6 +287,9 @@ func init() {
// Rewrites will convert this to (SETG (CMPQ b a)).
// InvertFlags is a pseudo-op which can't appear in assembly output.
{name: "InvertFlags"}, // reverse direction of arg0
// LoweredPanicNilCheck is a pseudo-op.
{name: "LoweredPanicNilCheck"},
}
var AMD64blocks = []blockData{

View File

@ -252,6 +252,8 @@ var genericOps = []opData{
{name: "IsNonNil"}, // arg0 != nil
{name: "IsInBounds"}, // 0 <= arg0 < arg1
{name: "PanicNilCheck"}, // trigger a dereference fault; arg0=nil ptr, arg1=mem
// Indexing operations
{name: "ArrayIndex"}, // arg0=array, arg1=index. Returns a[i]
{name: "PtrIndex"}, // arg0=ptr, arg1=index. Computes ptr+sizeof(*v.type)*index, where index is extended to ptrwidth type

View File

@ -17,9 +17,14 @@ func checkLower(f *Func) {
// rules may leave dead generic ops behind).
for _, b := range f.Blocks {
for _, v := range b.Values {
if opcodeTable[v.Op].generic && v.Op != OpSP && v.Op != OpSB && v.Op != OpArg && v.Op != OpCopy && v.Op != OpPhi {
f.Unimplementedf("%s not lowered", v.LongString())
if !opcodeTable[v.Op].generic {
continue // lowered
}
switch v.Op {
case OpSP, OpSB, OpArg, OpCopy, OpPhi:
continue // ok not to lower
}
f.Unimplementedf("%s not lowered", v.LongString())
}
}
}

View File

@ -194,6 +194,7 @@ const (
OpAMD64CALLclosure
OpAMD64REPMOVSB
OpAMD64InvertFlags
OpAMD64LoweredPanicNilCheck
OpAdd8
OpAdd16
@ -367,6 +368,7 @@ const (
OpTrunc64to32
OpIsNonNil
OpIsInBounds
OpPanicNilCheck
OpArrayIndex
OpPtrIndex
OpOffPtr
@ -2113,6 +2115,10 @@ var opcodeTable = [...]opInfo{
name: "InvertFlags",
reg: regInfo{},
},
{
name: "LoweredPanicNilCheck",
reg: regInfo{},
},
{
name: "Add8",
@ -2802,6 +2808,10 @@ var opcodeTable = [...]opInfo{
name: "IsInBounds",
generic: true,
},
{
name: "PanicNilCheck",
generic: true,
},
{
name: "ArrayIndex",
generic: true,

View File

@ -4836,6 +4836,24 @@ func rewriteValueAMD64(v *Value, config *Config) bool {
goto end6f8a8c559a167d1f0a5901d09a1fb248
end6f8a8c559a167d1f0a5901d09a1fb248:
;
case OpPanicNilCheck:
// match: (PanicNilCheck ptr mem)
// cond:
// result: (LoweredPanicNilCheck ptr mem)
{
ptr := v.Args[0]
mem := v.Args[1]
v.Op = OpAMD64LoweredPanicNilCheck
v.AuxInt = 0
v.Aux = nil
v.resetArgs()
v.AddArg(ptr)
v.AddArg(mem)
return true
}
goto enda02b1ad5a6f929b782190145f2c8628b
enda02b1ad5a6f929b782190145f2c8628b:
;
case OpRsh16Ux16:
// match: (Rsh16Ux16 <t> x y)
// cond: