in src/cmd/compile/internal/ssa/rewriteARM64.go [7635:9146]
func rewriteValueARM64_OpARM64MOVBstore(v *Value) bool {
v_2 := v.Args[2]
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
config := b.Func.Config
// match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem)
// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
// result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem)
for {
off1 := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpARM64ADDconst {
break
}
off2 := auxIntToInt64(v_0.AuxInt)
ptr := v_0.Args[0]
val := v_1
mem := v_2
if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
break
}
v.reset(OpARM64MOVBstore)
v.AuxInt = int32ToAuxInt(off1 + int32(off2))
v.Aux = symToAux(sym)
v.AddArg3(ptr, val, mem)
return true
}
// match: (MOVBstore [off] {sym} (ADD ptr idx) val mem)
// cond: off == 0 && sym == nil
// result: (MOVBstoreidx ptr idx val mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
if v_0.Op != OpARM64ADD {
break
}
idx := v_0.Args[1]
ptr := v_0.Args[0]
val := v_1
mem := v_2
if !(off == 0 && sym == nil) {
break
}
v.reset(OpARM64MOVBstoreidx)
v.AddArg4(ptr, idx, val, mem)
return true
}
// match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
for {
off1 := auxIntToInt32(v.AuxInt)
sym1 := auxToSym(v.Aux)
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := auxIntToInt32(v_0.AuxInt)
sym2 := auxToSym(v_0.Aux)
ptr := v_0.Args[0]
val := v_1
mem := v_2
if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
break
}
v.reset(OpARM64MOVBstore)
v.AuxInt = int32ToAuxInt(off1 + off2)
v.Aux = symToAux(mergeSym(sym1, sym2))
v.AddArg3(ptr, val, mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
// result: (MOVBstorezero [off] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
ptr := v_0
if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
break
}
mem := v_2
v.reset(OpARM64MOVBstorezero)
v.AuxInt = int32ToAuxInt(off)
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
// result: (MOVBstore [off] {sym} ptr x mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
ptr := v_0
if v_1.Op != OpARM64MOVBreg {
break
}
x := v_1.Args[0]
mem := v_2
v.reset(OpARM64MOVBstore)
v.AuxInt = int32ToAuxInt(off)
v.Aux = symToAux(sym)
v.AddArg3(ptr, x, mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
// result: (MOVBstore [off] {sym} ptr x mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
ptr := v_0
if v_1.Op != OpARM64MOVBUreg {
break
}
x := v_1.Args[0]
mem := v_2
v.reset(OpARM64MOVBstore)
v.AuxInt = int32ToAuxInt(off)
v.Aux = symToAux(sym)
v.AddArg3(ptr, x, mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
// result: (MOVBstore [off] {sym} ptr x mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
ptr := v_0
if v_1.Op != OpARM64MOVHreg {
break
}
x := v_1.Args[0]
mem := v_2
v.reset(OpARM64MOVBstore)
v.AuxInt = int32ToAuxInt(off)
v.Aux = symToAux(sym)
v.AddArg3(ptr, x, mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
// result: (MOVBstore [off] {sym} ptr x mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
ptr := v_0
if v_1.Op != OpARM64MOVHUreg {
break
}
x := v_1.Args[0]
mem := v_2
v.reset(OpARM64MOVBstore)
v.AuxInt = int32ToAuxInt(off)
v.Aux = symToAux(sym)
v.AddArg3(ptr, x, mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
// result: (MOVBstore [off] {sym} ptr x mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
ptr := v_0
if v_1.Op != OpARM64MOVWreg {
break
}
x := v_1.Args[0]
mem := v_2
v.reset(OpARM64MOVBstore)
v.AuxInt = int32ToAuxInt(off)
v.Aux = symToAux(sym)
v.AddArg3(ptr, x, mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
// result: (MOVBstore [off] {sym} ptr x mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
ptr := v_0
if v_1.Op != OpARM64MOVWUreg {
break
}
x := v_1.Args[0]
mem := v_2
v.reset(OpARM64MOVBstore)
v.AuxInt = int32ToAuxInt(off)
v.Aux = symToAux(sym)
v.AddArg3(ptr, x, mem)
return true
}
// match: (MOVBstore [i] {s} ptr0 (SRLconst [8] w) x:(MOVBstore [i-1] {s} ptr1 w mem))
// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
// result: (MOVHstore [i-1] {s} ptr0 w mem)
for {
i := auxIntToInt32(v.AuxInt)
s := auxToSym(v.Aux)
ptr0 := v_0
if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 8 {
break
}
w := v_1.Args[0]
x := v_2
if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
break
}
mem := x.Args[2]
ptr1 := x.Args[0]
if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
break
}
v.reset(OpARM64MOVHstore)
v.AuxInt = int32ToAuxInt(i - 1)
v.Aux = symToAux(s)
v.AddArg3(ptr0, w, mem)
return true
}
// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [8] w) x:(MOVBstoreidx ptr1 idx1 w mem))
// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
// result: (MOVHstoreidx ptr1 idx1 w mem)
for {
if auxIntToInt32(v.AuxInt) != 1 {
break
}
s := auxToSym(v.Aux)
if v_0.Op != OpARM64ADD {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
v_0_1 := v_0.Args[1]
for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
ptr0 := v_0_0
idx0 := v_0_1
if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 8 {
continue
}
w := v_1.Args[0]
x := v_2
if x.Op != OpARM64MOVBstoreidx {
continue
}
mem := x.Args[3]
ptr1 := x.Args[0]
idx1 := x.Args[1]
if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
continue
}
v.reset(OpARM64MOVHstoreidx)
v.AddArg4(ptr1, idx1, w, mem)
return true
}
break
}
// match: (MOVBstore [i] {s} ptr0 (UBFX [armBFAuxInt(8, 8)] w) x:(MOVBstore [i-1] {s} ptr1 w mem))
// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
// result: (MOVHstore [i-1] {s} ptr0 w mem)
for {
i := auxIntToInt32(v.AuxInt)
s := auxToSym(v.Aux)
ptr0 := v_0
if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(8, 8) {
break
}
w := v_1.Args[0]
x := v_2
if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
break
}
mem := x.Args[2]
ptr1 := x.Args[0]
if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
break
}
v.reset(OpARM64MOVHstore)
v.AuxInt = int32ToAuxInt(i - 1)
v.Aux = symToAux(s)
v.AddArg3(ptr0, w, mem)
return true
}
// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [armBFAuxInt(8, 8)] w) x:(MOVBstoreidx ptr1 idx1 w mem))
// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
// result: (MOVHstoreidx ptr1 idx1 w mem)
for {
if auxIntToInt32(v.AuxInt) != 1 {
break
}
s := auxToSym(v.Aux)
if v_0.Op != OpARM64ADD {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
v_0_1 := v_0.Args[1]
for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
ptr0 := v_0_0
idx0 := v_0_1
if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(8, 8) {
continue
}
w := v_1.Args[0]
x := v_2
if x.Op != OpARM64MOVBstoreidx {
continue
}
mem := x.Args[3]
ptr1 := x.Args[0]
idx1 := x.Args[1]
if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
continue
}
v.reset(OpARM64MOVHstoreidx)
v.AddArg4(ptr1, idx1, w, mem)
return true
}
break
}
// match: (MOVBstore [i] {s} ptr0 (UBFX [armBFAuxInt(8, 24)] w) x:(MOVBstore [i-1] {s} ptr1 w mem))
// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
// result: (MOVHstore [i-1] {s} ptr0 w mem)
for {
i := auxIntToInt32(v.AuxInt)
s := auxToSym(v.Aux)
ptr0 := v_0
if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(8, 24) {
break
}
w := v_1.Args[0]
x := v_2
if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
break
}
mem := x.Args[2]
ptr1 := x.Args[0]
if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
break
}
v.reset(OpARM64MOVHstore)
v.AuxInt = int32ToAuxInt(i - 1)
v.Aux = symToAux(s)
v.AddArg3(ptr0, w, mem)
return true
}
// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [armBFAuxInt(8, 24)] w) x:(MOVBstoreidx ptr1 idx1 w mem))
// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
// result: (MOVHstoreidx ptr1 idx1 w mem)
for {
if auxIntToInt32(v.AuxInt) != 1 {
break
}
s := auxToSym(v.Aux)
if v_0.Op != OpARM64ADD {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
v_0_1 := v_0.Args[1]
for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
ptr0 := v_0_0
idx0 := v_0_1
if v_1.Op != OpARM64UBFX || auxIntToArm64BitField(v_1.AuxInt) != armBFAuxInt(8, 24) {
continue
}
w := v_1.Args[0]
x := v_2
if x.Op != OpARM64MOVBstoreidx {
continue
}
mem := x.Args[3]
ptr1 := x.Args[0]
idx1 := x.Args[1]
if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
continue
}
v.reset(OpARM64MOVHstoreidx)
v.AddArg4(ptr1, idx1, w, mem)
return true
}
break
}
// match: (MOVBstore [i] {s} ptr0 (SRLconst [8] (MOVDreg w)) x:(MOVBstore [i-1] {s} ptr1 w mem))
// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
// result: (MOVHstore [i-1] {s} ptr0 w mem)
for {
i := auxIntToInt32(v.AuxInt)
s := auxToSym(v.Aux)
ptr0 := v_0
if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 8 {
break
}
v_1_0 := v_1.Args[0]
if v_1_0.Op != OpARM64MOVDreg {
break
}
w := v_1_0.Args[0]
x := v_2
if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
break
}
mem := x.Args[2]
ptr1 := x.Args[0]
if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
break
}
v.reset(OpARM64MOVHstore)
v.AuxInt = int32ToAuxInt(i - 1)
v.Aux = symToAux(s)
v.AddArg3(ptr0, w, mem)
return true
}
// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [8] (MOVDreg w)) x:(MOVBstoreidx ptr1 idx1 w mem))
// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
// result: (MOVHstoreidx ptr1 idx1 w mem)
for {
if auxIntToInt32(v.AuxInt) != 1 {
break
}
s := auxToSym(v.Aux)
if v_0.Op != OpARM64ADD {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
v_0_1 := v_0.Args[1]
for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
ptr0 := v_0_0
idx0 := v_0_1
if v_1.Op != OpARM64SRLconst || auxIntToInt64(v_1.AuxInt) != 8 {
continue
}
v_1_0 := v_1.Args[0]
if v_1_0.Op != OpARM64MOVDreg {
continue
}
w := v_1_0.Args[0]
x := v_2
if x.Op != OpARM64MOVBstoreidx {
continue
}
mem := x.Args[3]
ptr1 := x.Args[0]
idx1 := x.Args[1]
if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
continue
}
v.reset(OpARM64MOVHstoreidx)
v.AddArg4(ptr1, idx1, w, mem)
return true
}
break
}
// match: (MOVBstore [i] {s} ptr0 (SRLconst [j] w) x:(MOVBstore [i-1] {s} ptr1 w0:(SRLconst [j-8] w) mem))
// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
// result: (MOVHstore [i-1] {s} ptr0 w0 mem)
for {
i := auxIntToInt32(v.AuxInt)
s := auxToSym(v.Aux)
ptr0 := v_0
if v_1.Op != OpARM64SRLconst {
break
}
j := auxIntToInt64(v_1.AuxInt)
w := v_1.Args[0]
x := v_2
if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
break
}
mem := x.Args[2]
ptr1 := x.Args[0]
w0 := x.Args[1]
if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-8 || w != w0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
break
}
v.reset(OpARM64MOVHstore)
v.AuxInt = int32ToAuxInt(i - 1)
v.Aux = symToAux(s)
v.AddArg3(ptr0, w0, mem)
return true
}
// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [j] w) x:(MOVBstoreidx ptr1 idx1 w0:(SRLconst [j-8] w) mem))
// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
// result: (MOVHstoreidx ptr1 idx1 w0 mem)
for {
if auxIntToInt32(v.AuxInt) != 1 {
break
}
s := auxToSym(v.Aux)
if v_0.Op != OpARM64ADD {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
v_0_1 := v_0.Args[1]
for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
ptr0 := v_0_0
idx0 := v_0_1
if v_1.Op != OpARM64SRLconst {
continue
}
j := auxIntToInt64(v_1.AuxInt)
w := v_1.Args[0]
x := v_2
if x.Op != OpARM64MOVBstoreidx {
continue
}
mem := x.Args[3]
ptr1 := x.Args[0]
idx1 := x.Args[1]
w0 := x.Args[2]
if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-8 || w != w0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
continue
}
v.reset(OpARM64MOVHstoreidx)
v.AddArg4(ptr1, idx1, w0, mem)
return true
}
break
}
// match: (MOVBstore [i] {s} ptr0 (UBFX [bfc] w) x:(MOVBstore [i-1] {s} ptr1 w0:(UBFX [bfc2] w) mem))
// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && bfc.getARM64BFwidth() == 32 - bfc.getARM64BFlsb() && bfc2.getARM64BFwidth() == 32 - bfc2.getARM64BFlsb() && bfc2.getARM64BFlsb() == bfc.getARM64BFlsb() - 8 && clobber(x)
// result: (MOVHstore [i-1] {s} ptr0 w0 mem)
for {
i := auxIntToInt32(v.AuxInt)
s := auxToSym(v.Aux)
ptr0 := v_0
if v_1.Op != OpARM64UBFX {
break
}
bfc := auxIntToArm64BitField(v_1.AuxInt)
w := v_1.Args[0]
x := v_2
if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
break
}
mem := x.Args[2]
ptr1 := x.Args[0]
w0 := x.Args[1]
if w0.Op != OpARM64UBFX {
break
}
bfc2 := auxIntToArm64BitField(w0.AuxInt)
if w != w0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && bfc.getARM64BFwidth() == 32-bfc.getARM64BFlsb() && bfc2.getARM64BFwidth() == 32-bfc2.getARM64BFlsb() && bfc2.getARM64BFlsb() == bfc.getARM64BFlsb()-8 && clobber(x)) {
break
}
v.reset(OpARM64MOVHstore)
v.AuxInt = int32ToAuxInt(i - 1)
v.Aux = symToAux(s)
v.AddArg3(ptr0, w0, mem)
return true
}
// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [bfc] w) x:(MOVBstoreidx ptr1 idx1 w0:(UBFX [bfc2] w) mem))
// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && bfc.getARM64BFwidth() == 32 - bfc.getARM64BFlsb() && bfc2.getARM64BFwidth() == 32 - bfc2.getARM64BFlsb() && bfc2.getARM64BFlsb() == bfc.getARM64BFlsb() - 8 && clobber(x)
// result: (MOVHstoreidx ptr1 idx1 w0 mem)
for {
if auxIntToInt32(v.AuxInt) != 1 {
break
}
s := auxToSym(v.Aux)
if v_0.Op != OpARM64ADD {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
v_0_1 := v_0.Args[1]
for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
ptr0 := v_0_0
idx0 := v_0_1
if v_1.Op != OpARM64UBFX {
continue
}
bfc := auxIntToArm64BitField(v_1.AuxInt)
w := v_1.Args[0]
x := v_2
if x.Op != OpARM64MOVBstoreidx {
continue
}
mem := x.Args[3]
ptr1 := x.Args[0]
idx1 := x.Args[1]
w0 := x.Args[2]
if w0.Op != OpARM64UBFX {
continue
}
bfc2 := auxIntToArm64BitField(w0.AuxInt)
if w != w0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && bfc.getARM64BFwidth() == 32-bfc.getARM64BFlsb() && bfc2.getARM64BFwidth() == 32-bfc2.getARM64BFlsb() && bfc2.getARM64BFlsb() == bfc.getARM64BFlsb()-8 && clobber(x)) {
continue
}
v.reset(OpARM64MOVHstoreidx)
v.AddArg4(ptr1, idx1, w0, mem)
return true
}
break
}
// match: (MOVBstore [i] {s} ptr0 (SRLconst [j] (MOVDreg w)) x:(MOVBstore [i-1] {s} ptr1 w0:(SRLconst [j-8] (MOVDreg w)) mem))
// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
// result: (MOVHstore [i-1] {s} ptr0 w0 mem)
for {
i := auxIntToInt32(v.AuxInt)
s := auxToSym(v.Aux)
ptr0 := v_0
if v_1.Op != OpARM64SRLconst {
break
}
j := auxIntToInt64(v_1.AuxInt)
v_1_0 := v_1.Args[0]
if v_1_0.Op != OpARM64MOVDreg {
break
}
w := v_1_0.Args[0]
x := v_2
if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
break
}
mem := x.Args[2]
ptr1 := x.Args[0]
w0 := x.Args[1]
if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-8 {
break
}
w0_0 := w0.Args[0]
if w0_0.Op != OpARM64MOVDreg || w != w0_0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
break
}
v.reset(OpARM64MOVHstore)
v.AuxInt = int32ToAuxInt(i - 1)
v.Aux = symToAux(s)
v.AddArg3(ptr0, w0, mem)
return true
}
// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [j] (MOVDreg w)) x:(MOVBstoreidx ptr1 idx1 w0:(SRLconst [j-8] (MOVDreg w)) mem))
// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
// result: (MOVHstoreidx ptr1 idx1 w0 mem)
for {
if auxIntToInt32(v.AuxInt) != 1 {
break
}
s := auxToSym(v.Aux)
if v_0.Op != OpARM64ADD {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
v_0_1 := v_0.Args[1]
for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
ptr0 := v_0_0
idx0 := v_0_1
if v_1.Op != OpARM64SRLconst {
continue
}
j := auxIntToInt64(v_1.AuxInt)
v_1_0 := v_1.Args[0]
if v_1_0.Op != OpARM64MOVDreg {
continue
}
w := v_1_0.Args[0]
x := v_2
if x.Op != OpARM64MOVBstoreidx {
continue
}
mem := x.Args[3]
ptr1 := x.Args[0]
idx1 := x.Args[1]
w0 := x.Args[2]
if w0.Op != OpARM64SRLconst || auxIntToInt64(w0.AuxInt) != j-8 {
continue
}
w0_0 := w0.Args[0]
if w0_0.Op != OpARM64MOVDreg || w != w0_0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
continue
}
v.reset(OpARM64MOVHstoreidx)
v.AddArg4(ptr1, idx1, w0, mem)
return true
}
break
}
// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] w) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] w) x3:(MOVBstore [i-4] {s} ptr (SRLconst [32] w) x4:(MOVBstore [i-5] {s} ptr (SRLconst [40] w) x5:(MOVBstore [i-6] {s} ptr (SRLconst [48] w) x6:(MOVBstore [i-7] {s} ptr (SRLconst [56] w) mem))))))))
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0, x1, x2, x3, x4, x5, x6)
// result: (MOVDstore [i-7] {s} ptr (REV <w.Type> w) mem)
for {
i := auxIntToInt32(v.AuxInt)
s := auxToSym(v.Aux)
ptr := v_0
w := v_1
x0 := v_2
if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != i-1 || auxToSym(x0.Aux) != s {
break
}
_ = x0.Args[2]
if ptr != x0.Args[0] {
break
}
x0_1 := x0.Args[1]
if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 || w != x0_1.Args[0] {
break
}
x1 := x0.Args[2]
if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != i-2 || auxToSym(x1.Aux) != s {
break
}
_ = x1.Args[2]
if ptr != x1.Args[0] {
break
}
x1_1 := x1.Args[1]
if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 || w != x1_1.Args[0] {
break
}
x2 := x1.Args[2]
if x2.Op != OpARM64MOVBstore || auxIntToInt32(x2.AuxInt) != i-3 || auxToSym(x2.Aux) != s {
break
}
_ = x2.Args[2]
if ptr != x2.Args[0] {
break
}
x2_1 := x2.Args[1]
if x2_1.Op != OpARM64SRLconst || auxIntToInt64(x2_1.AuxInt) != 24 || w != x2_1.Args[0] {
break
}
x3 := x2.Args[2]
if x3.Op != OpARM64MOVBstore || auxIntToInt32(x3.AuxInt) != i-4 || auxToSym(x3.Aux) != s {
break
}
_ = x3.Args[2]
if ptr != x3.Args[0] {
break
}
x3_1 := x3.Args[1]
if x3_1.Op != OpARM64SRLconst || auxIntToInt64(x3_1.AuxInt) != 32 || w != x3_1.Args[0] {
break
}
x4 := x3.Args[2]
if x4.Op != OpARM64MOVBstore || auxIntToInt32(x4.AuxInt) != i-5 || auxToSym(x4.Aux) != s {
break
}
_ = x4.Args[2]
if ptr != x4.Args[0] {
break
}
x4_1 := x4.Args[1]
if x4_1.Op != OpARM64SRLconst || auxIntToInt64(x4_1.AuxInt) != 40 || w != x4_1.Args[0] {
break
}
x5 := x4.Args[2]
if x5.Op != OpARM64MOVBstore || auxIntToInt32(x5.AuxInt) != i-6 || auxToSym(x5.Aux) != s {
break
}
_ = x5.Args[2]
if ptr != x5.Args[0] {
break
}
x5_1 := x5.Args[1]
if x5_1.Op != OpARM64SRLconst || auxIntToInt64(x5_1.AuxInt) != 48 || w != x5_1.Args[0] {
break
}
x6 := x5.Args[2]
if x6.Op != OpARM64MOVBstore || auxIntToInt32(x6.AuxInt) != i-7 || auxToSym(x6.Aux) != s {
break
}
mem := x6.Args[2]
if ptr != x6.Args[0] {
break
}
x6_1 := x6.Args[1]
if x6_1.Op != OpARM64SRLconst || auxIntToInt64(x6_1.AuxInt) != 56 || w != x6_1.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0, x1, x2, x3, x4, x5, x6)) {
break
}
v.reset(OpARM64MOVDstore)
v.AuxInt = int32ToAuxInt(i - 7)
v.Aux = symToAux(s)
v0 := b.NewValue0(x6.Pos, OpARM64REV, w.Type)
v0.AddArg(w)
v.AddArg3(ptr, v0, mem)
return true
}
// match: (MOVBstore [7] {s} p w x0:(MOVBstore [6] {s} p (SRLconst [8] w) x1:(MOVBstore [5] {s} p (SRLconst [16] w) x2:(MOVBstore [4] {s} p (SRLconst [24] w) x3:(MOVBstore [3] {s} p (SRLconst [32] w) x4:(MOVBstore [2] {s} p (SRLconst [40] w) x5:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [48] w) x6:(MOVBstoreidx ptr0 idx0 (SRLconst [56] w) mem))))))))
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, x5, x6)
// result: (MOVDstoreidx ptr0 idx0 (REV <w.Type> w) mem)
for {
if auxIntToInt32(v.AuxInt) != 7 {
break
}
s := auxToSym(v.Aux)
p := v_0
w := v_1
x0 := v_2
if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != 6 || auxToSym(x0.Aux) != s {
break
}
_ = x0.Args[2]
if p != x0.Args[0] {
break
}
x0_1 := x0.Args[1]
if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 || w != x0_1.Args[0] {
break
}
x1 := x0.Args[2]
if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != 5 || auxToSym(x1.Aux) != s {
break
}
_ = x1.Args[2]
if p != x1.Args[0] {
break
}
x1_1 := x1.Args[1]
if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 || w != x1_1.Args[0] {
break
}
x2 := x1.Args[2]
if x2.Op != OpARM64MOVBstore || auxIntToInt32(x2.AuxInt) != 4 || auxToSym(x2.Aux) != s {
break
}
_ = x2.Args[2]
if p != x2.Args[0] {
break
}
x2_1 := x2.Args[1]
if x2_1.Op != OpARM64SRLconst || auxIntToInt64(x2_1.AuxInt) != 24 || w != x2_1.Args[0] {
break
}
x3 := x2.Args[2]
if x3.Op != OpARM64MOVBstore || auxIntToInt32(x3.AuxInt) != 3 || auxToSym(x3.Aux) != s {
break
}
_ = x3.Args[2]
if p != x3.Args[0] {
break
}
x3_1 := x3.Args[1]
if x3_1.Op != OpARM64SRLconst || auxIntToInt64(x3_1.AuxInt) != 32 || w != x3_1.Args[0] {
break
}
x4 := x3.Args[2]
if x4.Op != OpARM64MOVBstore || auxIntToInt32(x4.AuxInt) != 2 || auxToSym(x4.Aux) != s {
break
}
_ = x4.Args[2]
if p != x4.Args[0] {
break
}
x4_1 := x4.Args[1]
if x4_1.Op != OpARM64SRLconst || auxIntToInt64(x4_1.AuxInt) != 40 || w != x4_1.Args[0] {
break
}
x5 := x4.Args[2]
if x5.Op != OpARM64MOVBstore || auxIntToInt32(x5.AuxInt) != 1 || auxToSym(x5.Aux) != s {
break
}
_ = x5.Args[2]
p1 := x5.Args[0]
if p1.Op != OpARM64ADD {
break
}
_ = p1.Args[1]
p1_0 := p1.Args[0]
p1_1 := p1.Args[1]
for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
ptr1 := p1_0
idx1 := p1_1
x5_1 := x5.Args[1]
if x5_1.Op != OpARM64SRLconst || auxIntToInt64(x5_1.AuxInt) != 48 || w != x5_1.Args[0] {
continue
}
x6 := x5.Args[2]
if x6.Op != OpARM64MOVBstoreidx {
continue
}
mem := x6.Args[3]
ptr0 := x6.Args[0]
idx0 := x6.Args[1]
x6_2 := x6.Args[2]
if x6_2.Op != OpARM64SRLconst || auxIntToInt64(x6_2.AuxInt) != 56 || w != x6_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2, x3, x4, x5, x6)) {
continue
}
v.reset(OpARM64MOVDstoreidx)
v0 := b.NewValue0(x5.Pos, OpARM64REV, w.Type)
v0.AddArg(w)
v.AddArg4(ptr0, idx0, v0, mem)
return true
}
break
}
// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (UBFX [armBFAuxInt(8, 24)] w) x1:(MOVBstore [i-2] {s} ptr (UBFX [armBFAuxInt(16, 16)] w) x2:(MOVBstore [i-3] {s} ptr (UBFX [armBFAuxInt(24, 8)] w) mem))))
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)
// result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem)
for {
i := auxIntToInt32(v.AuxInt)
s := auxToSym(v.Aux)
ptr := v_0
w := v_1
x0 := v_2
if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != i-1 || auxToSym(x0.Aux) != s {
break
}
_ = x0.Args[2]
if ptr != x0.Args[0] {
break
}
x0_1 := x0.Args[1]
if x0_1.Op != OpARM64UBFX || auxIntToArm64BitField(x0_1.AuxInt) != armBFAuxInt(8, 24) || w != x0_1.Args[0] {
break
}
x1 := x0.Args[2]
if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != i-2 || auxToSym(x1.Aux) != s {
break
}
_ = x1.Args[2]
if ptr != x1.Args[0] {
break
}
x1_1 := x1.Args[1]
if x1_1.Op != OpARM64UBFX || auxIntToArm64BitField(x1_1.AuxInt) != armBFAuxInt(16, 16) || w != x1_1.Args[0] {
break
}
x2 := x1.Args[2]
if x2.Op != OpARM64MOVBstore || auxIntToInt32(x2.AuxInt) != i-3 || auxToSym(x2.Aux) != s {
break
}
mem := x2.Args[2]
if ptr != x2.Args[0] {
break
}
x2_1 := x2.Args[1]
if x2_1.Op != OpARM64UBFX || auxIntToArm64BitField(x2_1.AuxInt) != armBFAuxInt(24, 8) || w != x2_1.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)) {
break
}
v.reset(OpARM64MOVWstore)
v.AuxInt = int32ToAuxInt(i - 3)
v.Aux = symToAux(s)
v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type)
v0.AddArg(w)
v.AddArg3(ptr, v0, mem)
return true
}
// match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (UBFX [armBFAuxInt(8, 24)] w) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (UBFX [armBFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr0 idx0 (UBFX [armBFAuxInt(24, 8)] w) mem))))
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)
// result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem)
for {
if auxIntToInt32(v.AuxInt) != 3 {
break
}
s := auxToSym(v.Aux)
p := v_0
w := v_1
x0 := v_2
if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != 2 || auxToSym(x0.Aux) != s {
break
}
_ = x0.Args[2]
if p != x0.Args[0] {
break
}
x0_1 := x0.Args[1]
if x0_1.Op != OpARM64UBFX || auxIntToArm64BitField(x0_1.AuxInt) != armBFAuxInt(8, 24) || w != x0_1.Args[0] {
break
}
x1 := x0.Args[2]
if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != 1 || auxToSym(x1.Aux) != s {
break
}
_ = x1.Args[2]
p1 := x1.Args[0]
if p1.Op != OpARM64ADD {
break
}
_ = p1.Args[1]
p1_0 := p1.Args[0]
p1_1 := p1.Args[1]
for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
ptr1 := p1_0
idx1 := p1_1
x1_1 := x1.Args[1]
if x1_1.Op != OpARM64UBFX || auxIntToArm64BitField(x1_1.AuxInt) != armBFAuxInt(16, 16) || w != x1_1.Args[0] {
continue
}
x2 := x1.Args[2]
if x2.Op != OpARM64MOVBstoreidx {
continue
}
mem := x2.Args[3]
ptr0 := x2.Args[0]
idx0 := x2.Args[1]
x2_2 := x2.Args[2]
if x2_2.Op != OpARM64UBFX || auxIntToArm64BitField(x2_2.AuxInt) != armBFAuxInt(24, 8) || w != x2_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)) {
continue
}
v.reset(OpARM64MOVWstoreidx)
v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type)
v0.AddArg(w)
v.AddArg4(ptr0, idx0, v0, mem)
return true
}
break
}
// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] (MOVDreg w)) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] (MOVDreg w)) mem))))
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)
// result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem)
for {
i := auxIntToInt32(v.AuxInt)
s := auxToSym(v.Aux)
ptr := v_0
w := v_1
x0 := v_2
if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != i-1 || auxToSym(x0.Aux) != s {
break
}
_ = x0.Args[2]
if ptr != x0.Args[0] {
break
}
x0_1 := x0.Args[1]
if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 {
break
}
x0_1_0 := x0_1.Args[0]
if x0_1_0.Op != OpARM64MOVDreg || w != x0_1_0.Args[0] {
break
}
x1 := x0.Args[2]
if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != i-2 || auxToSym(x1.Aux) != s {
break
}
_ = x1.Args[2]
if ptr != x1.Args[0] {
break
}
x1_1 := x1.Args[1]
if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 {
break
}
x1_1_0 := x1_1.Args[0]
if x1_1_0.Op != OpARM64MOVDreg || w != x1_1_0.Args[0] {
break
}
x2 := x1.Args[2]
if x2.Op != OpARM64MOVBstore || auxIntToInt32(x2.AuxInt) != i-3 || auxToSym(x2.Aux) != s {
break
}
mem := x2.Args[2]
if ptr != x2.Args[0] {
break
}
x2_1 := x2.Args[1]
if x2_1.Op != OpARM64SRLconst || auxIntToInt64(x2_1.AuxInt) != 24 {
break
}
x2_1_0 := x2_1.Args[0]
if x2_1_0.Op != OpARM64MOVDreg || w != x2_1_0.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)) {
break
}
v.reset(OpARM64MOVWstore)
v.AuxInt = int32ToAuxInt(i - 3)
v.Aux = symToAux(s)
v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type)
v0.AddArg(w)
v.AddArg3(ptr, v0, mem)
return true
}
// match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (SRLconst [8] (MOVDreg w)) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [16] (MOVDreg w)) x2:(MOVBstoreidx ptr0 idx0 (SRLconst [24] (MOVDreg w)) mem))))
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)
// result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem)
for {
if auxIntToInt32(v.AuxInt) != 3 {
break
}
s := auxToSym(v.Aux)
p := v_0
w := v_1
x0 := v_2
if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != 2 || auxToSym(x0.Aux) != s {
break
}
_ = x0.Args[2]
if p != x0.Args[0] {
break
}
x0_1 := x0.Args[1]
if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 {
break
}
x0_1_0 := x0_1.Args[0]
if x0_1_0.Op != OpARM64MOVDreg || w != x0_1_0.Args[0] {
break
}
x1 := x0.Args[2]
if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != 1 || auxToSym(x1.Aux) != s {
break
}
_ = x1.Args[2]
p1 := x1.Args[0]
if p1.Op != OpARM64ADD {
break
}
_ = p1.Args[1]
p1_0 := p1.Args[0]
p1_1 := p1.Args[1]
for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
ptr1 := p1_0
idx1 := p1_1
x1_1 := x1.Args[1]
if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 {
continue
}
x1_1_0 := x1_1.Args[0]
if x1_1_0.Op != OpARM64MOVDreg || w != x1_1_0.Args[0] {
continue
}
x2 := x1.Args[2]
if x2.Op != OpARM64MOVBstoreidx {
continue
}
mem := x2.Args[3]
ptr0 := x2.Args[0]
idx0 := x2.Args[1]
x2_2 := x2.Args[2]
if x2_2.Op != OpARM64SRLconst || auxIntToInt64(x2_2.AuxInt) != 24 {
continue
}
x2_2_0 := x2_2.Args[0]
if x2_2_0.Op != OpARM64MOVDreg || w != x2_2_0.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)) {
continue
}
v.reset(OpARM64MOVWstoreidx)
v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type)
v0.AddArg(w)
v.AddArg4(ptr0, idx0, v0, mem)
return true
}
break
}
// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] w) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] w) mem))))
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)
// result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem)
for {
i := auxIntToInt32(v.AuxInt)
s := auxToSym(v.Aux)
ptr := v_0
w := v_1
x0 := v_2
if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != i-1 || auxToSym(x0.Aux) != s {
break
}
_ = x0.Args[2]
if ptr != x0.Args[0] {
break
}
x0_1 := x0.Args[1]
if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 || w != x0_1.Args[0] {
break
}
x1 := x0.Args[2]
if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != i-2 || auxToSym(x1.Aux) != s {
break
}
_ = x1.Args[2]
if ptr != x1.Args[0] {
break
}
x1_1 := x1.Args[1]
if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 || w != x1_1.Args[0] {
break
}
x2 := x1.Args[2]
if x2.Op != OpARM64MOVBstore || auxIntToInt32(x2.AuxInt) != i-3 || auxToSym(x2.Aux) != s {
break
}
mem := x2.Args[2]
if ptr != x2.Args[0] {
break
}
x2_1 := x2.Args[1]
if x2_1.Op != OpARM64SRLconst || auxIntToInt64(x2_1.AuxInt) != 24 || w != x2_1.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0, x1, x2)) {
break
}
v.reset(OpARM64MOVWstore)
v.AuxInt = int32ToAuxInt(i - 3)
v.Aux = symToAux(s)
v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type)
v0.AddArg(w)
v.AddArg3(ptr, v0, mem)
return true
}
// match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (SRLconst [8] w) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [16] w) x2:(MOVBstoreidx ptr0 idx0 (SRLconst [24] w) mem))))
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)
// result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem)
for {
if auxIntToInt32(v.AuxInt) != 3 {
break
}
s := auxToSym(v.Aux)
p := v_0
w := v_1
x0 := v_2
if x0.Op != OpARM64MOVBstore || auxIntToInt32(x0.AuxInt) != 2 || auxToSym(x0.Aux) != s {
break
}
_ = x0.Args[2]
if p != x0.Args[0] {
break
}
x0_1 := x0.Args[1]
if x0_1.Op != OpARM64SRLconst || auxIntToInt64(x0_1.AuxInt) != 8 || w != x0_1.Args[0] {
break
}
x1 := x0.Args[2]
if x1.Op != OpARM64MOVBstore || auxIntToInt32(x1.AuxInt) != 1 || auxToSym(x1.Aux) != s {
break
}
_ = x1.Args[2]
p1 := x1.Args[0]
if p1.Op != OpARM64ADD {
break
}
_ = p1.Args[1]
p1_0 := p1.Args[0]
p1_1 := p1.Args[1]
for _i0 := 0; _i0 <= 1; _i0, p1_0, p1_1 = _i0+1, p1_1, p1_0 {
ptr1 := p1_0
idx1 := p1_1
x1_1 := x1.Args[1]
if x1_1.Op != OpARM64SRLconst || auxIntToInt64(x1_1.AuxInt) != 16 || w != x1_1.Args[0] {
continue
}
x2 := x1.Args[2]
if x2.Op != OpARM64MOVBstoreidx {
continue
}
mem := x2.Args[3]
ptr0 := x2.Args[0]
idx0 := x2.Args[1]
x2_2 := x2.Args[2]
if x2_2.Op != OpARM64SRLconst || auxIntToInt64(x2_2.AuxInt) != 24 || w != x2_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0, x1, x2)) {
continue
}
v.reset(OpARM64MOVWstoreidx)
v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type)
v0.AddArg(w)
v.AddArg4(ptr0, idx0, v0, mem)
return true
}
break
}
// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) mem))
// cond: x.Uses == 1 && clobber(x)
// result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
for {
i := auxIntToInt32(v.AuxInt)
s := auxToSym(v.Aux)
ptr := v_0
w := v_1
x := v_2
if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
break
}
mem := x.Args[2]
if ptr != x.Args[0] {
break
}
x_1 := x.Args[1]
if x_1.Op != OpARM64SRLconst || auxIntToInt64(x_1.AuxInt) != 8 || w != x_1.Args[0] || !(x.Uses == 1 && clobber(x)) {
break
}
v.reset(OpARM64MOVHstore)
v.AuxInt = int32ToAuxInt(i - 1)
v.Aux = symToAux(s)
v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
v0.AddArg(w)
v.AddArg3(ptr, v0, mem)
return true
}
// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] w) mem))
// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
// result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
for {
if auxIntToInt32(v.AuxInt) != 1 {
break
}
s := auxToSym(v.Aux)
if v_0.Op != OpARM64ADD {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
v_0_1 := v_0.Args[1]
for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
ptr1 := v_0_0
idx1 := v_0_1
w := v_1
x := v_2
if x.Op != OpARM64MOVBstoreidx {
continue
}
mem := x.Args[3]
ptr0 := x.Args[0]
idx0 := x.Args[1]
x_2 := x.Args[2]
if x_2.Op != OpARM64SRLconst || auxIntToInt64(x_2.AuxInt) != 8 || w != x_2.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
continue
}
v.reset(OpARM64MOVHstoreidx)
v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
v0.AddArg(w)
v.AddArg4(ptr0, idx0, v0, mem)
return true
}
break
}
// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (UBFX [armBFAuxInt(8, 8)] w) mem))
// cond: x.Uses == 1 && clobber(x)
// result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
for {
i := auxIntToInt32(v.AuxInt)
s := auxToSym(v.Aux)
ptr := v_0
w := v_1
x := v_2
if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
break
}
mem := x.Args[2]
if ptr != x.Args[0] {
break
}
x_1 := x.Args[1]
if x_1.Op != OpARM64UBFX || auxIntToArm64BitField(x_1.AuxInt) != armBFAuxInt(8, 8) || w != x_1.Args[0] || !(x.Uses == 1 && clobber(x)) {
break
}
v.reset(OpARM64MOVHstore)
v.AuxInt = int32ToAuxInt(i - 1)
v.Aux = symToAux(s)
v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
v0.AddArg(w)
v.AddArg3(ptr, v0, mem)
return true
}
// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (UBFX [armBFAuxInt(8, 8)] w) mem))
// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
// result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
for {
if auxIntToInt32(v.AuxInt) != 1 {
break
}
s := auxToSym(v.Aux)
if v_0.Op != OpARM64ADD {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
v_0_1 := v_0.Args[1]
for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
ptr1 := v_0_0
idx1 := v_0_1
w := v_1
x := v_2
if x.Op != OpARM64MOVBstoreidx {
continue
}
mem := x.Args[3]
ptr0 := x.Args[0]
idx0 := x.Args[1]
x_2 := x.Args[2]
if x_2.Op != OpARM64UBFX || auxIntToArm64BitField(x_2.AuxInt) != armBFAuxInt(8, 8) || w != x_2.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
continue
}
v.reset(OpARM64MOVHstoreidx)
v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
v0.AddArg(w)
v.AddArg4(ptr0, idx0, v0, mem)
return true
}
break
}
// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) mem))
// cond: x.Uses == 1 && clobber(x)
// result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
for {
i := auxIntToInt32(v.AuxInt)
s := auxToSym(v.Aux)
ptr := v_0
w := v_1
x := v_2
if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
break
}
mem := x.Args[2]
if ptr != x.Args[0] {
break
}
x_1 := x.Args[1]
if x_1.Op != OpARM64SRLconst || auxIntToInt64(x_1.AuxInt) != 8 {
break
}
x_1_0 := x_1.Args[0]
if x_1_0.Op != OpARM64MOVDreg || w != x_1_0.Args[0] || !(x.Uses == 1 && clobber(x)) {
break
}
v.reset(OpARM64MOVHstore)
v.AuxInt = int32ToAuxInt(i - 1)
v.Aux = symToAux(s)
v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
v0.AddArg(w)
v.AddArg3(ptr, v0, mem)
return true
}
// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] (MOVDreg w)) mem))
// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
// result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
for {
if auxIntToInt32(v.AuxInt) != 1 {
break
}
s := auxToSym(v.Aux)
if v_0.Op != OpARM64ADD {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
v_0_1 := v_0.Args[1]
for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
ptr1 := v_0_0
idx1 := v_0_1
w := v_1
x := v_2
if x.Op != OpARM64MOVBstoreidx {
continue
}
mem := x.Args[3]
ptr0 := x.Args[0]
idx0 := x.Args[1]
x_2 := x.Args[2]
if x_2.Op != OpARM64SRLconst || auxIntToInt64(x_2.AuxInt) != 8 {
continue
}
x_2_0 := x_2.Args[0]
if x_2_0.Op != OpARM64MOVDreg || w != x_2_0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
continue
}
v.reset(OpARM64MOVHstoreidx)
v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
v0.AddArg(w)
v.AddArg4(ptr0, idx0, v0, mem)
return true
}
break
}
// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (UBFX [armBFAuxInt(8, 24)] w) mem))
// cond: x.Uses == 1 && clobber(x)
// result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
for {
i := auxIntToInt32(v.AuxInt)
s := auxToSym(v.Aux)
ptr := v_0
w := v_1
x := v_2
if x.Op != OpARM64MOVBstore || auxIntToInt32(x.AuxInt) != i-1 || auxToSym(x.Aux) != s {
break
}
mem := x.Args[2]
if ptr != x.Args[0] {
break
}
x_1 := x.Args[1]
if x_1.Op != OpARM64UBFX || auxIntToArm64BitField(x_1.AuxInt) != armBFAuxInt(8, 24) || w != x_1.Args[0] || !(x.Uses == 1 && clobber(x)) {
break
}
v.reset(OpARM64MOVHstore)
v.AuxInt = int32ToAuxInt(i - 1)
v.Aux = symToAux(s)
v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
v0.AddArg(w)
v.AddArg3(ptr, v0, mem)
return true
}
// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (UBFX [armBFAuxInt(8, 24)] w) mem))
// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
// result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
for {
if auxIntToInt32(v.AuxInt) != 1 {
break
}
s := auxToSym(v.Aux)
if v_0.Op != OpARM64ADD {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
v_0_1 := v_0.Args[1]
for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
ptr1 := v_0_0
idx1 := v_0_1
w := v_1
x := v_2
if x.Op != OpARM64MOVBstoreidx {
continue
}
mem := x.Args[3]
ptr0 := x.Args[0]
idx0 := x.Args[1]
x_2 := x.Args[2]
if x_2.Op != OpARM64UBFX || auxIntToArm64BitField(x_2.AuxInt) != armBFAuxInt(8, 24) || w != x_2.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
continue
}
v.reset(OpARM64MOVHstoreidx)
v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
v0.AddArg(w)
v.AddArg4(ptr0, idx0, v0, mem)
return true
}
break
}
return false
}