...

Text file src/cmd/compile/internal/ssa/_gen/Wasm.rules

Documentation: cmd/compile/internal/ssa/_gen

     1// Copyright 2018 The Go Authors. All rights reserved.
     2// Use of this source code is governed by a BSD-style
     3// license that can be found in the LICENSE file.
     4
     5(Last ___) => v.Args[len(v.Args)-1]
     6
     7// Lowering arithmetic
     8(Add(64|32|16|8|Ptr) ...) => (I64Add ...)
     9(Add(64|32)F ...) => (F(64|32)Add ...)
    10
    11(Sub(64|32|16|8|Ptr) ...) => (I64Sub ...)
    12(Sub(64|32)F ...) => (F(64|32)Sub ...)
    13
    14(Mul(64|32|16|8) ...) => (I64Mul ...)
    15(Mul(64|32)F ...) => (F(64|32)Mul ...)
    16
    17(Div64 [false] x y) => (I64DivS x y)
    18(Div32 [false] x y) => (I64DivS (SignExt32to64 x) (SignExt32to64 y))
    19(Div16 [false] x y) => (I64DivS (SignExt16to64 x) (SignExt16to64 y))
    20(Div8          x y) => (I64DivS (SignExt8to64 x) (SignExt8to64 y))
    21(Div64u ...) => (I64DivU ...)
    22(Div32u x y) => (I64DivU (ZeroExt32to64 x) (ZeroExt32to64 y))
    23(Div16u x y) => (I64DivU (ZeroExt16to64 x) (ZeroExt16to64 y))
    24(Div8u  x y) => (I64DivU (ZeroExt8to64 x) (ZeroExt8to64 y))
    25(Div(64|32)F ...) => (F(64|32)Div ...)
    26
    27(Mod64 [false] x y) => (I64RemS x y)
    28(Mod32 [false] x y) => (I64RemS (SignExt32to64 x) (SignExt32to64 y))
    29(Mod16 [false] x y) => (I64RemS (SignExt16to64 x) (SignExt16to64 y))
    30(Mod8          x y) => (I64RemS (SignExt8to64  x) (SignExt8to64  y))
    31(Mod64u ...) => (I64RemU ...)
    32(Mod32u x y) => (I64RemU (ZeroExt32to64 x) (ZeroExt32to64 y))
    33(Mod16u x y) => (I64RemU (ZeroExt16to64 x) (ZeroExt16to64 y))
    34(Mod8u  x y) => (I64RemU (ZeroExt8to64  x) (ZeroExt8to64  y))
    35
    36(And(64|32|16|8|B) ...) => (I64And ...)
    37
    38(Or(64|32|16|8|B) ...) => (I64Or ...)
    39
    40(Xor(64|32|16|8) ...) => (I64Xor ...)
    41
    42(Neg(64|32|16|8) x) => (I64Sub (I64Const [0]) x)
    43(Neg(64|32)F ...) => (F(64|32)Neg ...)
    44
    45(Com(64|32|16|8) x) => (I64Xor x (I64Const [-1]))
    46
    47(Not ...) => (I64Eqz ...)
    48
    49(Avg64u x y) => (I64Add (I64ShrU (I64Sub x y) (I64Const [1])) y)
    50
    51// High word of multiply without carry bits; see Hacker's Delight, 2nd. ed, Figure 8-2, p. 174.
    52(Hmul64 <t> x y) =>
    53	(Last <t>
    54		x0: (ZeroExt32to64 x)
    55		x1: (I64ShrS x (I64Const [32]))
    56		y0: (ZeroExt32to64 y)
    57		y1: (I64ShrS y (I64Const [32]))
    58		x0y0: (I64Mul x0 y0)
    59		tt: (I64Add (I64Mul x1 y0) (I64ShrU x0y0 (I64Const [32])))
    60		w1: (I64Add (I64Mul x0 y1) (ZeroExt32to64 tt))
    61		w2: (I64ShrS tt (I64Const [32]))
    62		(I64Add (I64Add (I64Mul x1 y1) w2) (I64ShrS w1 (I64Const [32]))))
    63
    64// Same as Hmul64 but signed shifts now unsigned.
    65(Hmul64u <t> x y) =>
    66	(Last <t>
    67		x0: (ZeroExt32to64 x)
    68		x1: (I64ShrU x (I64Const [32]))
    69		y0: (ZeroExt32to64 y)
    70		y1: (I64ShrU y (I64Const [32]))
    71		w0: (I64Mul x0 y0)
    72		tt: (I64Add (I64Mul x1 y0) (I64ShrU w0 (I64Const [32])))
    73		w1: (I64Add (I64Mul x0 y1) (ZeroExt32to64 tt))
    74		w2: (I64ShrU tt (I64Const [32]))
    75		hi: (I64Add (I64Add (I64Mul x1 y1) w2) (I64ShrU w1 (I64Const [32]))))
    76
    77(Select0 <t> (Mul64uhilo x y)) => (Hmul64u <t> x y)
    78(Select1 <t> (Mul64uhilo x y)) => (I64Mul x y)
    79
    80// Lowering pointer arithmetic
    81(OffPtr ...) => (I64AddConst ...)
    82
    83// Lowering extension
    84// It is unnecessary to extend loads
    85(SignExt32to64        x:(I64Load32S _ _)) => x
    86(SignExt16to(64|32)   x:(I64Load16S _ _)) => x
    87(SignExt8to(64|32|16) x:(I64Load8S  _ _)) => x
    88(ZeroExt32to64        x:(I64Load32U _ _)) => x
    89(ZeroExt16to(64|32)   x:(I64Load16U _ _)) => x
    90(ZeroExt8to(64|32|16) x:(I64Load8U  _ _)) => x
    91(SignExt32to64        x) => (I64Extend32S x)
    92(SignExt8to(64|32|16) x) => (I64Extend8S x)
    93(SignExt16to(64|32)   x) => (I64Extend16S x)
    94(ZeroExt32to64        x) => (I64And x (I64Const [0xffffffff]))
    95(ZeroExt16to(64|32)   x) => (I64And x (I64Const [0xffff]))
    96(ZeroExt8to(64|32|16) x) => (I64And x (I64Const [0xff]))
    97
    98(Slicemask x) => (I64ShrS (I64Sub (I64Const [0]) x) (I64Const [63]))
    99
   100// Lowering truncation
   101// Because we ignore the high parts, truncates are just copies.
   102(Trunc64to(32|16|8) ...) => (Copy ...)
   103(Trunc32to(16|8)    ...) => (Copy ...)
   104(Trunc16to8         ...) => (Copy ...)
   105
   106// Lowering float <=> int
   107(Cvt32to(64|32)F x) => (F(64|32)ConvertI64S (SignExt32to64 x))
   108(Cvt64to(64|32)F ...) => (F(64|32)ConvertI64S ...)
   109(Cvt32Uto(64|32)F x) => (F(64|32)ConvertI64U (ZeroExt32to64 x))
   110(Cvt64Uto(64|32)F ...) => (F(64|32)ConvertI64U ...)
   111
   112(Cvt32Fto32 ...) => (I64TruncSatF32S ...)
   113(Cvt32Fto64 ...) => (I64TruncSatF32S ...)
   114(Cvt64Fto32 ...) => (I64TruncSatF64S ...)
   115(Cvt64Fto64 ...) => (I64TruncSatF64S ...)
   116(Cvt32Fto32U ...) => (I64TruncSatF32U ...)
   117(Cvt32Fto64U ...) => (I64TruncSatF32U ...)
   118(Cvt64Fto32U ...) => (I64TruncSatF64U ...)
   119(Cvt64Fto64U ...) => (I64TruncSatF64U ...)
   120
   121(Cvt32Fto64F ...) => (F64PromoteF32 ...)
   122(Cvt64Fto32F ...) => (F32DemoteF64 ...)
   123
   124(CvtBoolToUint8 ...) => (Copy ...)
   125
   126(Round32F ...) => (Copy ...)
   127(Round64F ...) => (Copy ...)
   128
   129// Lowering shifts
   130// Unsigned shifts need to return 0 if shift amount is >= width of shifted value.
   131
   132(Lsh64x64 x y) && shiftIsBounded(v) => (I64Shl x y)
   133(Lsh64x64 x (I64Const [c])) && uint64(c) < 64 => (I64Shl x (I64Const [c]))
   134(Lsh64x64 x (I64Const [c])) && uint64(c) >= 64 => (I64Const [0])
   135(Lsh64x64 x y) => (Select (I64Shl x y) (I64Const [0]) (I64LtU y (I64Const [64])))
   136(Lsh64x(32|16|8) [c] x y) => (Lsh64x64 [c] x (ZeroExt(32|16|8)to64 y))
   137
   138(Lsh32x64 ...) => (Lsh64x64 ...)
   139(Lsh32x(32|16|8) [c] x y) => (Lsh64x64 [c] x (ZeroExt(32|16|8)to64 y))
   140
   141(Lsh16x64 ...) => (Lsh64x64 ...)
   142(Lsh16x(32|16|8) [c] x y) => (Lsh64x64 [c] x (ZeroExt(32|16|8)to64 y))
   143
   144(Lsh8x64 ...) => (Lsh64x64 ...)
   145(Lsh8x(32|16|8) [c] x y) => (Lsh64x64 [c] x (ZeroExt(32|16|8)to64 y))
   146
   147(Rsh64Ux64 x y) && shiftIsBounded(v) => (I64ShrU x y)
   148(Rsh64Ux64 x (I64Const [c])) && uint64(c) < 64 => (I64ShrU x (I64Const [c]))
   149(Rsh64Ux64 x (I64Const [c])) && uint64(c) >= 64 => (I64Const [0])
   150(Rsh64Ux64 x y) => (Select (I64ShrU x y) (I64Const [0]) (I64LtU y (I64Const [64])))
   151(Rsh64Ux(32|16|8) [c] x y) => (Rsh64Ux64 [c] x (ZeroExt(32|16|8)to64 y))
   152
   153(Rsh32Ux64 [c] x y) => (Rsh64Ux64 [c] (ZeroExt32to64 x) y)
   154(Rsh32Ux(32|16|8) [c] x y) => (Rsh64Ux64 [c] (ZeroExt32to64 x) (ZeroExt(32|16|8)to64 y))
   155
   156(Rsh16Ux64 [c] x y) => (Rsh64Ux64 [c] (ZeroExt16to64 x) y)
   157(Rsh16Ux(32|16|8) [c] x y) => (Rsh64Ux64 [c] (ZeroExt16to64 x) (ZeroExt(32|16|8)to64 y))
   158
   159(Rsh8Ux64 [c] x y) => (Rsh64Ux64 [c] (ZeroExt8to64 x) y)
   160(Rsh8Ux(32|16|8) [c] x y) => (Rsh64Ux64 [c] (ZeroExt8to64 x) (ZeroExt(32|16|8)to64 y))
   161
   162// Signed right shift needs to return 0/-1 if shift amount is >= width of shifted value.
   163// We implement this by setting the shift value to (width - 1) if the shift value is >= width.
   164
   165(Rsh64x64 x y) && shiftIsBounded(v) => (I64ShrS x y)
   166(Rsh64x64 x (I64Const [c])) && uint64(c) < 64 => (I64ShrS x (I64Const [c]))
   167(Rsh64x64 x (I64Const [c])) && uint64(c) >= 64 => (I64ShrS x (I64Const [63]))
   168(Rsh64x64 x y) => (I64ShrS x (Select <typ.Int64> y (I64Const [63]) (I64LtU y (I64Const [64]))))
   169(Rsh64x(32|16|8) [c] x y) => (Rsh64x64 [c] x (ZeroExt(32|16|8)to64 y))
   170
   171(Rsh32x64 [c] x y) => (Rsh64x64 [c] (SignExt32to64 x) y)
   172(Rsh32x(32|16|8) [c] x y) => (Rsh64x64 [c] (SignExt32to64 x) (ZeroExt(32|16|8)to64 y))
   173
   174(Rsh16x64 [c] x y) => (Rsh64x64 [c] (SignExt16to64 x) y)
   175(Rsh16x(32|16|8) [c] x y) => (Rsh64x64 [c] (SignExt16to64 x) (ZeroExt(32|16|8)to64 y))
   176
   177(Rsh8x64 [c] x y)  => (Rsh64x64 [c] (SignExt8to64 x) y)
   178(Rsh8x(32|16|8) [c] x y)  => (Rsh64x64 [c] (SignExt8to64 x) (ZeroExt(32|16|8)to64 y))
   179
   180// Lowering rotates
   181(RotateLeft8 <t> x (I64Const [c])) => (Or8 (Lsh8x64 <t> x (I64Const [c&7])) (Rsh8Ux64 <t> x (I64Const [-c&7])))
   182(RotateLeft16 <t> x (I64Const [c])) => (Or16 (Lsh16x64 <t> x (I64Const [c&15])) (Rsh16Ux64 <t> x (I64Const [-c&15])))
   183(RotateLeft32 ...) => (I32Rotl ...)
   184(RotateLeft64 ...) => (I64Rotl ...)
   185
   186// Lowering comparisons
   187(Less64  ...) => (I64LtS ...)
   188(Less32  x y) => (I64LtS (SignExt32to64 x) (SignExt32to64 y))
   189(Less16  x y) => (I64LtS (SignExt16to64 x) (SignExt16to64 y))
   190(Less8   x y) => (I64LtS (SignExt8to64  x) (SignExt8to64  y))
   191(Less64U ...) => (I64LtU ...)
   192(Less32U x y) => (I64LtU (ZeroExt32to64 x) (ZeroExt32to64 y))
   193(Less16U x y) => (I64LtU (ZeroExt16to64 x) (ZeroExt16to64 y))
   194(Less8U  x y) => (I64LtU (ZeroExt8to64  x) (ZeroExt8to64  y))
   195(Less(64|32)F ...) => (F(64|32)Lt ...)
   196
   197(Leq64  ...) => (I64LeS ...)
   198(Leq32  x y) => (I64LeS (SignExt32to64 x) (SignExt32to64 y))
   199(Leq16  x y) => (I64LeS (SignExt16to64 x) (SignExt16to64 y))
   200(Leq8   x y) => (I64LeS (SignExt8to64  x) (SignExt8to64  y))
   201(Leq64U ...) => (I64LeU ...)
   202(Leq32U x y) => (I64LeU (ZeroExt32to64 x) (ZeroExt32to64 y))
   203(Leq16U x y) => (I64LeU (ZeroExt16to64 x) (ZeroExt16to64 y))
   204(Leq8U  x y) => (I64LeU (ZeroExt8to64  x) (ZeroExt8to64  y))
   205(Leq(64|32)F ...) => (F(64|32)Le ...)
   206
   207(Eq64  ...) => (I64Eq ...)
   208(Eq32  x y) => (I64Eq (ZeroExt32to64 x) (ZeroExt32to64 y))
   209(Eq16  x y) => (I64Eq (ZeroExt16to64 x) (ZeroExt16to64 y))
   210(Eq8   x y) => (I64Eq (ZeroExt8to64  x) (ZeroExt8to64  y))
   211(EqB   ...) => (I64Eq ...)
   212(EqPtr ...) => (I64Eq ...)
   213(Eq(64|32)F ...) => (F(64|32)Eq ...)
   214
   215(Neq64  ...) => (I64Ne ...)
   216(Neq32  x y) => (I64Ne (ZeroExt32to64 x) (ZeroExt32to64 y))
   217(Neq16  x y) => (I64Ne (ZeroExt16to64 x) (ZeroExt16to64 y))
   218(Neq8   x y) => (I64Ne (ZeroExt8to64  x) (ZeroExt8to64  y))
   219(NeqB   ...) => (I64Ne ...)
   220(NeqPtr ...) => (I64Ne ...)
   221(Neq(64|32)F ...) => (F(64|32)Ne ...)
   222
   223// Lowering loads
   224(Load <t> ptr mem) && is32BitFloat(t) => (F32Load ptr mem)
   225(Load <t> ptr mem) && is64BitFloat(t) => (F64Load ptr mem)
   226(Load <t> ptr mem) && t.Size() == 8 => (I64Load ptr mem)
   227(Load <t> ptr mem) && t.Size() == 4 && !t.IsSigned() => (I64Load32U ptr mem)
   228(Load <t> ptr mem) && t.Size() == 4 &&  t.IsSigned() => (I64Load32S ptr mem)
   229(Load <t> ptr mem) && t.Size() == 2 && !t.IsSigned() => (I64Load16U ptr mem)
   230(Load <t> ptr mem) && t.Size() == 2 &&  t.IsSigned() => (I64Load16S ptr mem)
   231(Load <t> ptr mem) && t.Size() == 1 && !t.IsSigned() => (I64Load8U ptr mem)
   232(Load <t> ptr mem) && t.Size() == 1 &&  t.IsSigned() => (I64Load8S ptr mem)
   233
   234// Lowering stores
   235(Store {t} ptr val mem) && is64BitFloat(t) => (F64Store ptr val mem)
   236(Store {t} ptr val mem) && is32BitFloat(t) => (F32Store ptr val mem)
   237(Store {t} ptr val mem) && t.Size() == 8 => (I64Store ptr val mem)
   238(Store {t} ptr val mem) && t.Size() == 4 => (I64Store32 ptr val mem)
   239(Store {t} ptr val mem) && t.Size() == 2 => (I64Store16 ptr val mem)
   240(Store {t} ptr val mem) && t.Size() == 1 => (I64Store8 ptr val mem)
   241
   242// Lowering moves
   243(Move [0] _ _ mem) => mem
   244(Move [1] dst src mem) => (I64Store8 dst (I64Load8U src mem) mem)
   245(Move [2] dst src mem) => (I64Store16 dst (I64Load16U src mem) mem)
   246(Move [4] dst src mem) => (I64Store32 dst (I64Load32U src mem) mem)
   247(Move [8] dst src mem) => (I64Store dst (I64Load src mem) mem)
   248(Move [16] dst src mem) =>
   249	(I64Store [8] dst (I64Load [8] src mem)
   250		(I64Store dst (I64Load src mem) mem))
   251(Move [3] dst src mem) =>
   252	(I64Store8 [2] dst (I64Load8U [2] src mem)
   253		(I64Store16 dst (I64Load16U src mem) mem))
   254(Move [5] dst src mem) =>
   255	(I64Store8 [4] dst (I64Load8U [4] src mem)
   256		(I64Store32 dst (I64Load32U src mem) mem))
   257(Move [6] dst src mem) =>
   258	(I64Store16 [4] dst (I64Load16U [4] src mem)
   259		(I64Store32 dst (I64Load32U src mem) mem))
   260(Move [7] dst src mem) =>
   261	(I64Store32 [3] dst (I64Load32U [3] src mem)
   262		(I64Store32 dst (I64Load32U src mem) mem))
   263(Move [s] dst src mem) && s > 8 && s < 16 =>
   264	(I64Store [s-8] dst (I64Load [s-8] src mem)
   265		(I64Store dst (I64Load src mem) mem))
   266
   267// Large copying uses helper.
   268(Move [s] dst src mem) && logLargeCopy(v, s) =>
   269	(LoweredMove [s] dst src mem)
   270
   271// Lowering Zero instructions
   272(Zero [0] _ mem) => mem
   273(Zero [1] destptr mem) => (I64Store8 destptr (I64Const [0]) mem)
   274(Zero [2] destptr mem) => (I64Store16 destptr (I64Const [0]) mem)
   275(Zero [4] destptr mem) => (I64Store32 destptr (I64Const [0]) mem)
   276(Zero [8] destptr mem) => (I64Store destptr (I64Const [0]) mem)
   277
   278(Zero [3] destptr mem) =>
   279	(I64Store8 [2] destptr (I64Const [0])
   280		(I64Store16 destptr (I64Const [0]) mem))
   281(Zero [5] destptr mem) =>
   282	(I64Store8 [4] destptr (I64Const [0])
   283		(I64Store32 destptr (I64Const [0]) mem))
   284(Zero [6] destptr mem) =>
   285	(I64Store16 [4] destptr (I64Const [0])
   286		(I64Store32 destptr (I64Const [0]) mem))
   287(Zero [7] destptr mem) =>
   288	(I64Store32 [3] destptr (I64Const [0])
   289		(I64Store32 destptr (I64Const [0]) mem))
   290
   291// Strip off any fractional word zeroing.
   292(Zero [s] destptr mem) && s%8 != 0 && s > 8 && s < 32 =>
   293	(Zero [s-s%8] (OffPtr <destptr.Type> destptr [s%8])
   294		(I64Store destptr (I64Const [0]) mem))
   295
   296// Zero small numbers of words directly.
   297(Zero [16] destptr mem) =>
   298	(I64Store [8] destptr (I64Const [0])
   299		(I64Store destptr (I64Const [0]) mem))
   300(Zero [24] destptr mem) =>
   301	(I64Store [16] destptr (I64Const [0])
   302		(I64Store [8] destptr (I64Const [0])
   303			(I64Store destptr (I64Const [0]) mem)))
   304(Zero [32] destptr mem) =>
   305	(I64Store [24] destptr (I64Const [0])
   306		(I64Store [16] destptr (I64Const [0])
   307			(I64Store [8] destptr (I64Const [0])
   308				(I64Store destptr (I64Const [0]) mem))))
   309
   310// Large zeroing uses helper.
   311(Zero [s] destptr mem) =>
   312	(LoweredZero [s] destptr mem)
   313
   314// Lowering constants
   315(Const64 ...) => (I64Const ...)
   316(Const(32|16|8) [c]) => (I64Const [int64(c)])
   317(Const(64|32)F ...) => (F(64|32)Const ...)
   318(ConstNil) => (I64Const [0])
   319(ConstBool [c]) => (I64Const [b2i(c)])
   320
   321// Lowering calls
   322(StaticCall ...) => (LoweredStaticCall ...)
   323(ClosureCall ...) => (LoweredClosureCall ...)
   324(InterCall ...) => (LoweredInterCall ...)
   325(TailCall ...) => (LoweredTailCall ...)
   326
   327// Miscellaneous
   328(Convert ...) => (LoweredConvert ...)
   329(IsNonNil p) => (I64Eqz (I64Eqz p))
   330(IsInBounds ...) => (I64LtU ...)
   331(IsSliceInBounds ...) => (I64LeU ...)
   332(NilCheck ...) => (LoweredNilCheck ...)
   333(GetClosurePtr ...) => (LoweredGetClosurePtr ...)
   334(GetCallerPC ...) => (LoweredGetCallerPC ...)
   335(GetCallerSP ...) => (LoweredGetCallerSP ...)
   336(Addr {sym} base) => (LoweredAddr {sym} [0] base)
   337(LocalAddr <t> {sym} base mem) && t.Elem().HasPointers() => (LoweredAddr {sym} (SPanchored base mem))
   338(LocalAddr <t> {sym} base _)  && !t.Elem().HasPointers() => (LoweredAddr {sym} base)
   339
   340// Write barrier.
   341(WB ...) => (LoweredWB ...)
   342
   343// --- Intrinsics ---
   344(Sqrt ...) => (F64Sqrt ...)
   345(Trunc ...) => (F64Trunc ...)
   346(Ceil ...) => (F64Ceil ...)
   347(Floor ...) => (F64Floor ...)
   348(RoundToEven ...) => (F64Nearest ...)
   349(Abs ...) => (F64Abs ...)
   350(Copysign ...) => (F64Copysign ...)
   351
   352(Sqrt32 ...) => (F32Sqrt ...)
   353
   354(Ctz64 ...) => (I64Ctz ...)
   355(Ctz32 x) => (I64Ctz (I64Or x (I64Const [0x100000000])))
   356(Ctz16 x) => (I64Ctz (I64Or x (I64Const [0x10000])))
   357(Ctz8  x) => (I64Ctz (I64Or x (I64Const [0x100])))
   358
   359(Ctz(64|32|16|8)NonZero ...) => (I64Ctz ...)
   360
   361(BitLen64 x) => (I64Sub (I64Const [64]) (I64Clz x))
   362(BitLen(32|16|8) x) => (BitLen64 (ZeroExt(32|16|8)to64 x))
   363
   364(PopCount64 ...) => (I64Popcnt ...)
   365(PopCount32 x) => (I64Popcnt (ZeroExt32to64 x))
   366(PopCount16 x) => (I64Popcnt (ZeroExt16to64 x))
   367(PopCount8  x) => (I64Popcnt (ZeroExt8to64  x))
   368
   369(CondSelect ...) => (Select ...)
   370
   371// --- Optimizations ---
   372(I64Add (I64Const [x]) (I64Const [y])) => (I64Const [x + y])
   373(I64Mul (I64Const [x]) (I64Const [y])) => (I64Const [x * y])
   374(I64And (I64Const [x]) (I64Const [y])) => (I64Const [x & y])
   375(I64Or  (I64Const [x]) (I64Const [y])) => (I64Const [x | y])
   376(I64Xor (I64Const [x]) (I64Const [y])) => (I64Const [x ^ y])
   377(F64Add (F64Const [x]) (F64Const [y])) => (F64Const [x + y])
   378(F64Mul (F64Const [x]) (F64Const [y])) && !math.IsNaN(x * y) => (F64Const [x * y])
   379(I64Eq  (I64Const [x]) (I64Const [y])) && x == y => (I64Const [1])
   380(I64Eq  (I64Const [x]) (I64Const [y])) && x != y => (I64Const [0])
   381(I64Ne  (I64Const [x]) (I64Const [y])) && x == y => (I64Const [0])
   382(I64Ne  (I64Const [x]) (I64Const [y])) && x != y => (I64Const [1])
   383
   384(I64Shl (I64Const [x]) (I64Const [y])) => (I64Const [x << uint64(y)])
   385(I64ShrU (I64Const [x]) (I64Const [y])) => (I64Const [int64(uint64(x) >> uint64(y))])
   386(I64ShrS (I64Const [x]) (I64Const [y])) => (I64Const [x >> uint64(y)])
   387
   388// TODO: declare these operations as commutative and get rid of these rules?
   389(I64Add (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Add y (I64Const [x]))
   390(I64Mul (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Mul y (I64Const [x]))
   391(I64And (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64And y (I64Const [x]))
   392(I64Or  (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Or  y (I64Const [x]))
   393(I64Xor (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Xor y (I64Const [x]))
   394(F64Add (F64Const [x]) y) && y.Op != OpWasmF64Const => (F64Add y (F64Const [x]))
   395(F64Mul (F64Const [x]) y) && y.Op != OpWasmF64Const => (F64Mul y (F64Const [x]))
   396(I64Eq  (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Eq y  (I64Const [x]))
   397(I64Ne  (I64Const [x]) y) && y.Op != OpWasmI64Const => (I64Ne y  (I64Const [x]))
   398
   399(I64Eq x (I64Const [0])) => (I64Eqz x)
   400(I64LtU (I64Const [0]) x) => (I64Eqz (I64Eqz x))
   401(I64LeU x (I64Const [0])) => (I64Eqz x)
   402(I64LtU x (I64Const [1])) => (I64Eqz x)
   403(I64LeU (I64Const [1]) x) => (I64Eqz (I64Eqz x))
   404(I64Ne x (I64Const [0])) => (I64Eqz (I64Eqz x))
   405
   406(I64Add x (I64Const <t> [y])) && !t.IsPtr() => (I64AddConst [y] x)
   407(I64AddConst [0] x) => x
   408(I64Eqz (I64Eqz (I64Eqz x))) => (I64Eqz x)
   409
   410// folding offset into load/store
   411((I64Load|I64Load32U|I64Load32S|I64Load16U|I64Load16S|I64Load8U|I64Load8S) [off] (I64AddConst [off2] ptr) mem)
   412	&& isU32Bit(off+off2) =>
   413	((I64Load|I64Load32U|I64Load32S|I64Load16U|I64Load16S|I64Load8U|I64Load8S) [off+off2] ptr mem)
   414
   415((I64Store|I64Store32|I64Store16|I64Store8) [off] (I64AddConst [off2] ptr) val mem)
   416	&& isU32Bit(off+off2) =>
   417	((I64Store|I64Store32|I64Store16|I64Store8) [off+off2] ptr val mem)
   418
   419// folding offset into address
   420(I64AddConst [off] (LoweredAddr {sym} [off2] base)) && isU32Bit(off+int64(off2)) =>
   421	(LoweredAddr {sym} [int32(off)+off2] base)
   422(I64AddConst [off] x:(SP)) && isU32Bit(off) => (LoweredAddr [int32(off)] x) // so it is rematerializeable
   423
   424// transforming readonly globals into constants
   425(I64Load [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(read64(sym, off+int64(off2), config.ctxt.Arch.ByteOrder))])
   426(I64Load32U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(read32(sym, off+int64(off2), config.ctxt.Arch.ByteOrder))])
   427(I64Load16U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(read16(sym, off+int64(off2), config.ctxt.Arch.ByteOrder))])
   428(I64Load8U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(read8(sym, off+int64(off2)))])
   429(I64Load32S [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(int32(read32(sym, off+int64(off2), config.ctxt.Arch.ByteOrder)))])
   430(I64Load16S [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(int16(read16(sym, off+int64(off2), config.ctxt.Arch.ByteOrder)))])
   431(I64Load8S [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(int8(read8(sym, off+int64(off2))))])

View as plain text