diff --git a/src/cmd/compile/internal/ssa/prove.go b/src/cmd/compile/internal/ssa/prove.go index dcdb48180c9..c5387802a79 100644 --- a/src/cmd/compile/internal/ssa/prove.go +++ b/src/cmd/compile/internal/ssa/prove.go @@ -1296,6 +1296,13 @@ func removeBranch(b *Block, branch branch) { // isNonNegative reports whether v is known to be greater or equal to zero. func isNonNegative(v *Value) bool { + if !v.Type.IsInteger() { + panic("isNonNegative bad type") + } + if !v.Type.IsSigned() { + return true + } + switch v.Op { case OpConst64: return v.AuxInt >= 0 @@ -1303,16 +1310,37 @@ func isNonNegative(v *Value) bool { case OpConst32: return int32(v.AuxInt) >= 0 + case OpConst16: + return int16(v.AuxInt) >= 0 + + case OpConst8: + return int8(v.AuxInt) >= 0 + case OpStringLen, OpSliceLen, OpSliceCap, - OpZeroExt8to64, OpZeroExt16to64, OpZeroExt32to64: + OpZeroExt8to64, OpZeroExt16to64, OpZeroExt32to64, + OpZeroExt8to32, OpZeroExt16to32, OpZeroExt8to16, + OpCtz64, OpCtz32, OpCtz16, OpCtz8: return true - case OpRsh64Ux64: + case OpRsh64Ux64, OpRsh32Ux64: by := v.Args[1] return by.Op == OpConst64 && by.AuxInt > 0 - case OpRsh64x64: + case OpRsh64x64, OpRsh32x64, OpRsh8x64, OpRsh16x64, OpRsh32x32, OpRsh64x32, + OpSignExt32to64, OpSignExt16to64, OpSignExt8to64, OpSignExt16to32, OpSignExt8to32: return isNonNegative(v.Args[0]) + + case OpAnd64, OpAnd32, OpAnd16, OpAnd8: + return isNonNegative(v.Args[0]) || isNonNegative(v.Args[1]) + + case OpMod64, OpMod32, OpMod16, OpMod8, + OpDiv64, OpDiv32, OpDiv16, OpDiv8, + OpOr64, OpOr32, OpOr16, OpOr8, + OpXor64, OpXor32, OpXor16, OpXor8: + return isNonNegative(v.Args[0]) && isNonNegative(v.Args[1]) + + // We could handle OpPhi here, but the improvements from doing + // so are very minor, and it is neither simple nor cheap. } return false }