cmd/compile/internal/ssa: inline memmove with known size

Replace calls to memmove with known (constant) size, with OpMove.
Do it only if it is safe from aliasing point of view.
Helps with code like this:

append(buf,"const str"...)

In strconv this provides nice benefit:
Quote-6                                   731ns ± 2%   647ns ± 3%  -11.41%  (p=0.000 n=10+10)
QuoteRune-6                               117ns ± 5%   111ns ± 1%   -4.54%  (p=0.000 n=10+10)
AppendQuote-6                             475ns ± 0%   396ns ± 0%  -16.59%  (p=0.000 n=9+10)
AppendQuoteRune-6                        32.0ns ± 0%  27.4ns ± 0%  -14.41%  (p=0.000 n=8+9)

Change-Id: I7704f5c51b46aed2d8f033de74c75140fc35036c
Reviewed-on: https://go-review.googlesource.com/54394
Run-TryBot: Ilya Tocar <ilya.tocar@intel.com>
Reviewed-by: Keith Randall <khr@golang.org>
TryBot-Result: Gobot Gobot <gobot@golang.org>
This commit is contained in:
Ilya Tocar 2017-08-09 14:00:38 -05:00
parent ad3742f4ac
commit f3884680fc
6 changed files with 189 additions and 2 deletions

View file

@ -409,6 +409,8 @@ func rewriteValuegeneric(v *Value) bool {
return rewriteValuegeneric_OpSlicemask_0(v)
case OpSqrt:
return rewriteValuegeneric_OpSqrt_0(v)
case OpStaticCall:
return rewriteValuegeneric_OpStaticCall_0(v)
case OpStore:
return rewriteValuegeneric_OpStore_0(v) || rewriteValuegeneric_OpStore_10(v)
case OpStringLen:
@ -23732,6 +23734,93 @@ func rewriteValuegeneric_OpSqrt_0(v *Value) bool {
}
return false
}
func rewriteValuegeneric_OpStaticCall_0(v *Value) bool {
b := v.Block
_ = b
config := b.Func.Config
_ = config
// match: (StaticCall {sym} s1:(Store _ (Const64 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem))))
// cond: isSameSym(sym,"runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmoveSize(sz,config)
// result: (Move {t.(*types.Type).Elem()} [sz] dst src mem)
for {
sym := v.Aux
s1 := v.Args[0]
if s1.Op != OpStore {
break
}
_ = s1.Args[2]
s1_1 := s1.Args[1]
if s1_1.Op != OpConst64 {
break
}
sz := s1_1.AuxInt
s2 := s1.Args[2]
if s2.Op != OpStore {
break
}
_ = s2.Args[2]
src := s2.Args[1]
s3 := s2.Args[2]
if s3.Op != OpStore {
break
}
t := s3.Aux
_ = s3.Args[2]
dst := s3.Args[1]
mem := s3.Args[2]
if !(isSameSym(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmoveSize(sz, config)) {
break
}
v.reset(OpMove)
v.AuxInt = sz
v.Aux = t.(*types.Type).Elem()
v.AddArg(dst)
v.AddArg(src)
v.AddArg(mem)
return true
}
// match: (StaticCall {sym} s1:(Store _ (Const32 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem))))
// cond: isSameSym(sym,"runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmoveSize(sz,config)
// result: (Move {t.(*types.Type).Elem()} [sz] dst src mem)
for {
sym := v.Aux
s1 := v.Args[0]
if s1.Op != OpStore {
break
}
_ = s1.Args[2]
s1_1 := s1.Args[1]
if s1_1.Op != OpConst32 {
break
}
sz := s1_1.AuxInt
s2 := s1.Args[2]
if s2.Op != OpStore {
break
}
_ = s2.Args[2]
src := s2.Args[1]
s3 := s2.Args[2]
if s3.Op != OpStore {
break
}
t := s3.Aux
_ = s3.Args[2]
dst := s3.Args[1]
mem := s3.Args[2]
if !(isSameSym(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmoveSize(sz, config)) {
break
}
v.reset(OpMove)
v.AuxInt = sz
v.Aux = t.(*types.Type).Elem()
v.AddArg(dst)
v.AddArg(src)
v.AddArg(mem)
return true
}
return false
}
func rewriteValuegeneric_OpStore_0(v *Value) bool {
b := v.Block
_ = b