Source file src/cmd/compile/internal/ssa/rewritegeneric.go

     1  // Code generated from _gen/generic.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "math"
     6  import "math/bits"
     7  import "cmd/internal/obj"
     8  import "cmd/compile/internal/types"
     9  import "cmd/compile/internal/ir"
    10  
    11  func rewriteValuegeneric(v *Value) bool {
    12  	switch v.Op {
    13  	case OpAdd16:
    14  		return rewriteValuegeneric_OpAdd16(v)
    15  	case OpAdd32:
    16  		return rewriteValuegeneric_OpAdd32(v)
    17  	case OpAdd32F:
    18  		return rewriteValuegeneric_OpAdd32F(v)
    19  	case OpAdd64:
    20  		return rewriteValuegeneric_OpAdd64(v)
    21  	case OpAdd64F:
    22  		return rewriteValuegeneric_OpAdd64F(v)
    23  	case OpAdd8:
    24  		return rewriteValuegeneric_OpAdd8(v)
    25  	case OpAddPtr:
    26  		return rewriteValuegeneric_OpAddPtr(v)
    27  	case OpAnd16:
    28  		return rewriteValuegeneric_OpAnd16(v)
    29  	case OpAnd32:
    30  		return rewriteValuegeneric_OpAnd32(v)
    31  	case OpAnd64:
    32  		return rewriteValuegeneric_OpAnd64(v)
    33  	case OpAnd8:
    34  		return rewriteValuegeneric_OpAnd8(v)
    35  	case OpAndB:
    36  		return rewriteValuegeneric_OpAndB(v)
    37  	case OpArraySelect:
    38  		return rewriteValuegeneric_OpArraySelect(v)
    39  	case OpBitLen16:
    40  		return rewriteValuegeneric_OpBitLen16(v)
    41  	case OpBitLen32:
    42  		return rewriteValuegeneric_OpBitLen32(v)
    43  	case OpBitLen64:
    44  		return rewriteValuegeneric_OpBitLen64(v)
    45  	case OpBitLen8:
    46  		return rewriteValuegeneric_OpBitLen8(v)
    47  	case OpCeil:
    48  		return rewriteValuegeneric_OpCeil(v)
    49  	case OpCom16:
    50  		return rewriteValuegeneric_OpCom16(v)
    51  	case OpCom32:
    52  		return rewriteValuegeneric_OpCom32(v)
    53  	case OpCom64:
    54  		return rewriteValuegeneric_OpCom64(v)
    55  	case OpCom8:
    56  		return rewriteValuegeneric_OpCom8(v)
    57  	case OpConstInterface:
    58  		return rewriteValuegeneric_OpConstInterface(v)
    59  	case OpConstSlice:
    60  		return rewriteValuegeneric_OpConstSlice(v)
    61  	case OpConstString:
    62  		return rewriteValuegeneric_OpConstString(v)
    63  	case OpConvert:
    64  		return rewriteValuegeneric_OpConvert(v)
    65  	case OpCtz16:
    66  		return rewriteValuegeneric_OpCtz16(v)
    67  	case OpCtz32:
    68  		return rewriteValuegeneric_OpCtz32(v)
    69  	case OpCtz64:
    70  		return rewriteValuegeneric_OpCtz64(v)
    71  	case OpCtz8:
    72  		return rewriteValuegeneric_OpCtz8(v)
    73  	case OpCvt32Fto32:
    74  		return rewriteValuegeneric_OpCvt32Fto32(v)
    75  	case OpCvt32Fto64:
    76  		return rewriteValuegeneric_OpCvt32Fto64(v)
    77  	case OpCvt32Fto64F:
    78  		return rewriteValuegeneric_OpCvt32Fto64F(v)
    79  	case OpCvt32to32F:
    80  		return rewriteValuegeneric_OpCvt32to32F(v)
    81  	case OpCvt32to64F:
    82  		return rewriteValuegeneric_OpCvt32to64F(v)
    83  	case OpCvt64Fto32:
    84  		return rewriteValuegeneric_OpCvt64Fto32(v)
    85  	case OpCvt64Fto32F:
    86  		return rewriteValuegeneric_OpCvt64Fto32F(v)
    87  	case OpCvt64Fto64:
    88  		return rewriteValuegeneric_OpCvt64Fto64(v)
    89  	case OpCvt64to32F:
    90  		return rewriteValuegeneric_OpCvt64to32F(v)
    91  	case OpCvt64to64F:
    92  		return rewriteValuegeneric_OpCvt64to64F(v)
    93  	case OpCvtBoolToUint8:
    94  		return rewriteValuegeneric_OpCvtBoolToUint8(v)
    95  	case OpDiv16:
    96  		return rewriteValuegeneric_OpDiv16(v)
    97  	case OpDiv16u:
    98  		return rewriteValuegeneric_OpDiv16u(v)
    99  	case OpDiv32:
   100  		return rewriteValuegeneric_OpDiv32(v)
   101  	case OpDiv32F:
   102  		return rewriteValuegeneric_OpDiv32F(v)
   103  	case OpDiv32u:
   104  		return rewriteValuegeneric_OpDiv32u(v)
   105  	case OpDiv64:
   106  		return rewriteValuegeneric_OpDiv64(v)
   107  	case OpDiv64F:
   108  		return rewriteValuegeneric_OpDiv64F(v)
   109  	case OpDiv64u:
   110  		return rewriteValuegeneric_OpDiv64u(v)
   111  	case OpDiv8:
   112  		return rewriteValuegeneric_OpDiv8(v)
   113  	case OpDiv8u:
   114  		return rewriteValuegeneric_OpDiv8u(v)
   115  	case OpEq16:
   116  		return rewriteValuegeneric_OpEq16(v)
   117  	case OpEq32:
   118  		return rewriteValuegeneric_OpEq32(v)
   119  	case OpEq32F:
   120  		return rewriteValuegeneric_OpEq32F(v)
   121  	case OpEq64:
   122  		return rewriteValuegeneric_OpEq64(v)
   123  	case OpEq64F:
   124  		return rewriteValuegeneric_OpEq64F(v)
   125  	case OpEq8:
   126  		return rewriteValuegeneric_OpEq8(v)
   127  	case OpEqB:
   128  		return rewriteValuegeneric_OpEqB(v)
   129  	case OpEqInter:
   130  		return rewriteValuegeneric_OpEqInter(v)
   131  	case OpEqPtr:
   132  		return rewriteValuegeneric_OpEqPtr(v)
   133  	case OpEqSlice:
   134  		return rewriteValuegeneric_OpEqSlice(v)
   135  	case OpFloor:
   136  		return rewriteValuegeneric_OpFloor(v)
   137  	case OpIMake:
   138  		return rewriteValuegeneric_OpIMake(v)
   139  	case OpInterLECall:
   140  		return rewriteValuegeneric_OpInterLECall(v)
   141  	case OpIsInBounds:
   142  		return rewriteValuegeneric_OpIsInBounds(v)
   143  	case OpIsNonNil:
   144  		return rewriteValuegeneric_OpIsNonNil(v)
   145  	case OpIsSliceInBounds:
   146  		return rewriteValuegeneric_OpIsSliceInBounds(v)
   147  	case OpLeq16:
   148  		return rewriteValuegeneric_OpLeq16(v)
   149  	case OpLeq16U:
   150  		return rewriteValuegeneric_OpLeq16U(v)
   151  	case OpLeq32:
   152  		return rewriteValuegeneric_OpLeq32(v)
   153  	case OpLeq32F:
   154  		return rewriteValuegeneric_OpLeq32F(v)
   155  	case OpLeq32U:
   156  		return rewriteValuegeneric_OpLeq32U(v)
   157  	case OpLeq64:
   158  		return rewriteValuegeneric_OpLeq64(v)
   159  	case OpLeq64F:
   160  		return rewriteValuegeneric_OpLeq64F(v)
   161  	case OpLeq64U:
   162  		return rewriteValuegeneric_OpLeq64U(v)
   163  	case OpLeq8:
   164  		return rewriteValuegeneric_OpLeq8(v)
   165  	case OpLeq8U:
   166  		return rewriteValuegeneric_OpLeq8U(v)
   167  	case OpLess16:
   168  		return rewriteValuegeneric_OpLess16(v)
   169  	case OpLess16U:
   170  		return rewriteValuegeneric_OpLess16U(v)
   171  	case OpLess32:
   172  		return rewriteValuegeneric_OpLess32(v)
   173  	case OpLess32F:
   174  		return rewriteValuegeneric_OpLess32F(v)
   175  	case OpLess32U:
   176  		return rewriteValuegeneric_OpLess32U(v)
   177  	case OpLess64:
   178  		return rewriteValuegeneric_OpLess64(v)
   179  	case OpLess64F:
   180  		return rewriteValuegeneric_OpLess64F(v)
   181  	case OpLess64U:
   182  		return rewriteValuegeneric_OpLess64U(v)
   183  	case OpLess8:
   184  		return rewriteValuegeneric_OpLess8(v)
   185  	case OpLess8U:
   186  		return rewriteValuegeneric_OpLess8U(v)
   187  	case OpLoad:
   188  		return rewriteValuegeneric_OpLoad(v)
   189  	case OpLsh16x16:
   190  		return rewriteValuegeneric_OpLsh16x16(v)
   191  	case OpLsh16x32:
   192  		return rewriteValuegeneric_OpLsh16x32(v)
   193  	case OpLsh16x64:
   194  		return rewriteValuegeneric_OpLsh16x64(v)
   195  	case OpLsh16x8:
   196  		return rewriteValuegeneric_OpLsh16x8(v)
   197  	case OpLsh32x16:
   198  		return rewriteValuegeneric_OpLsh32x16(v)
   199  	case OpLsh32x32:
   200  		return rewriteValuegeneric_OpLsh32x32(v)
   201  	case OpLsh32x64:
   202  		return rewriteValuegeneric_OpLsh32x64(v)
   203  	case OpLsh32x8:
   204  		return rewriteValuegeneric_OpLsh32x8(v)
   205  	case OpLsh64x16:
   206  		return rewriteValuegeneric_OpLsh64x16(v)
   207  	case OpLsh64x32:
   208  		return rewriteValuegeneric_OpLsh64x32(v)
   209  	case OpLsh64x64:
   210  		return rewriteValuegeneric_OpLsh64x64(v)
   211  	case OpLsh64x8:
   212  		return rewriteValuegeneric_OpLsh64x8(v)
   213  	case OpLsh8x16:
   214  		return rewriteValuegeneric_OpLsh8x16(v)
   215  	case OpLsh8x32:
   216  		return rewriteValuegeneric_OpLsh8x32(v)
   217  	case OpLsh8x64:
   218  		return rewriteValuegeneric_OpLsh8x64(v)
   219  	case OpLsh8x8:
   220  		return rewriteValuegeneric_OpLsh8x8(v)
   221  	case OpMod16:
   222  		return rewriteValuegeneric_OpMod16(v)
   223  	case OpMod16u:
   224  		return rewriteValuegeneric_OpMod16u(v)
   225  	case OpMod32:
   226  		return rewriteValuegeneric_OpMod32(v)
   227  	case OpMod32u:
   228  		return rewriteValuegeneric_OpMod32u(v)
   229  	case OpMod64:
   230  		return rewriteValuegeneric_OpMod64(v)
   231  	case OpMod64u:
   232  		return rewriteValuegeneric_OpMod64u(v)
   233  	case OpMod8:
   234  		return rewriteValuegeneric_OpMod8(v)
   235  	case OpMod8u:
   236  		return rewriteValuegeneric_OpMod8u(v)
   237  	case OpMove:
   238  		return rewriteValuegeneric_OpMove(v)
   239  	case OpMul16:
   240  		return rewriteValuegeneric_OpMul16(v)
   241  	case OpMul32:
   242  		return rewriteValuegeneric_OpMul32(v)
   243  	case OpMul32F:
   244  		return rewriteValuegeneric_OpMul32F(v)
   245  	case OpMul64:
   246  		return rewriteValuegeneric_OpMul64(v)
   247  	case OpMul64F:
   248  		return rewriteValuegeneric_OpMul64F(v)
   249  	case OpMul8:
   250  		return rewriteValuegeneric_OpMul8(v)
   251  	case OpNeg16:
   252  		return rewriteValuegeneric_OpNeg16(v)
   253  	case OpNeg32:
   254  		return rewriteValuegeneric_OpNeg32(v)
   255  	case OpNeg32F:
   256  		return rewriteValuegeneric_OpNeg32F(v)
   257  	case OpNeg64:
   258  		return rewriteValuegeneric_OpNeg64(v)
   259  	case OpNeg64F:
   260  		return rewriteValuegeneric_OpNeg64F(v)
   261  	case OpNeg8:
   262  		return rewriteValuegeneric_OpNeg8(v)
   263  	case OpNeq16:
   264  		return rewriteValuegeneric_OpNeq16(v)
   265  	case OpNeq32:
   266  		return rewriteValuegeneric_OpNeq32(v)
   267  	case OpNeq32F:
   268  		return rewriteValuegeneric_OpNeq32F(v)
   269  	case OpNeq64:
   270  		return rewriteValuegeneric_OpNeq64(v)
   271  	case OpNeq64F:
   272  		return rewriteValuegeneric_OpNeq64F(v)
   273  	case OpNeq8:
   274  		return rewriteValuegeneric_OpNeq8(v)
   275  	case OpNeqB:
   276  		return rewriteValuegeneric_OpNeqB(v)
   277  	case OpNeqInter:
   278  		return rewriteValuegeneric_OpNeqInter(v)
   279  	case OpNeqPtr:
   280  		return rewriteValuegeneric_OpNeqPtr(v)
   281  	case OpNeqSlice:
   282  		return rewriteValuegeneric_OpNeqSlice(v)
   283  	case OpNilCheck:
   284  		return rewriteValuegeneric_OpNilCheck(v)
   285  	case OpNot:
   286  		return rewriteValuegeneric_OpNot(v)
   287  	case OpOffPtr:
   288  		return rewriteValuegeneric_OpOffPtr(v)
   289  	case OpOr16:
   290  		return rewriteValuegeneric_OpOr16(v)
   291  	case OpOr32:
   292  		return rewriteValuegeneric_OpOr32(v)
   293  	case OpOr64:
   294  		return rewriteValuegeneric_OpOr64(v)
   295  	case OpOr8:
   296  		return rewriteValuegeneric_OpOr8(v)
   297  	case OpOrB:
   298  		return rewriteValuegeneric_OpOrB(v)
   299  	case OpPhi:
   300  		return rewriteValuegeneric_OpPhi(v)
   301  	case OpPtrIndex:
   302  		return rewriteValuegeneric_OpPtrIndex(v)
   303  	case OpRotateLeft16:
   304  		return rewriteValuegeneric_OpRotateLeft16(v)
   305  	case OpRotateLeft32:
   306  		return rewriteValuegeneric_OpRotateLeft32(v)
   307  	case OpRotateLeft64:
   308  		return rewriteValuegeneric_OpRotateLeft64(v)
   309  	case OpRotateLeft8:
   310  		return rewriteValuegeneric_OpRotateLeft8(v)
   311  	case OpRound32F:
   312  		return rewriteValuegeneric_OpRound32F(v)
   313  	case OpRound64F:
   314  		return rewriteValuegeneric_OpRound64F(v)
   315  	case OpRoundToEven:
   316  		return rewriteValuegeneric_OpRoundToEven(v)
   317  	case OpRsh16Ux16:
   318  		return rewriteValuegeneric_OpRsh16Ux16(v)
   319  	case OpRsh16Ux32:
   320  		return rewriteValuegeneric_OpRsh16Ux32(v)
   321  	case OpRsh16Ux64:
   322  		return rewriteValuegeneric_OpRsh16Ux64(v)
   323  	case OpRsh16Ux8:
   324  		return rewriteValuegeneric_OpRsh16Ux8(v)
   325  	case OpRsh16x16:
   326  		return rewriteValuegeneric_OpRsh16x16(v)
   327  	case OpRsh16x32:
   328  		return rewriteValuegeneric_OpRsh16x32(v)
   329  	case OpRsh16x64:
   330  		return rewriteValuegeneric_OpRsh16x64(v)
   331  	case OpRsh16x8:
   332  		return rewriteValuegeneric_OpRsh16x8(v)
   333  	case OpRsh32Ux16:
   334  		return rewriteValuegeneric_OpRsh32Ux16(v)
   335  	case OpRsh32Ux32:
   336  		return rewriteValuegeneric_OpRsh32Ux32(v)
   337  	case OpRsh32Ux64:
   338  		return rewriteValuegeneric_OpRsh32Ux64(v)
   339  	case OpRsh32Ux8:
   340  		return rewriteValuegeneric_OpRsh32Ux8(v)
   341  	case OpRsh32x16:
   342  		return rewriteValuegeneric_OpRsh32x16(v)
   343  	case OpRsh32x32:
   344  		return rewriteValuegeneric_OpRsh32x32(v)
   345  	case OpRsh32x64:
   346  		return rewriteValuegeneric_OpRsh32x64(v)
   347  	case OpRsh32x8:
   348  		return rewriteValuegeneric_OpRsh32x8(v)
   349  	case OpRsh64Ux16:
   350  		return rewriteValuegeneric_OpRsh64Ux16(v)
   351  	case OpRsh64Ux32:
   352  		return rewriteValuegeneric_OpRsh64Ux32(v)
   353  	case OpRsh64Ux64:
   354  		return rewriteValuegeneric_OpRsh64Ux64(v)
   355  	case OpRsh64Ux8:
   356  		return rewriteValuegeneric_OpRsh64Ux8(v)
   357  	case OpRsh64x16:
   358  		return rewriteValuegeneric_OpRsh64x16(v)
   359  	case OpRsh64x32:
   360  		return rewriteValuegeneric_OpRsh64x32(v)
   361  	case OpRsh64x64:
   362  		return rewriteValuegeneric_OpRsh64x64(v)
   363  	case OpRsh64x8:
   364  		return rewriteValuegeneric_OpRsh64x8(v)
   365  	case OpRsh8Ux16:
   366  		return rewriteValuegeneric_OpRsh8Ux16(v)
   367  	case OpRsh8Ux32:
   368  		return rewriteValuegeneric_OpRsh8Ux32(v)
   369  	case OpRsh8Ux64:
   370  		return rewriteValuegeneric_OpRsh8Ux64(v)
   371  	case OpRsh8Ux8:
   372  		return rewriteValuegeneric_OpRsh8Ux8(v)
   373  	case OpRsh8x16:
   374  		return rewriteValuegeneric_OpRsh8x16(v)
   375  	case OpRsh8x32:
   376  		return rewriteValuegeneric_OpRsh8x32(v)
   377  	case OpRsh8x64:
   378  		return rewriteValuegeneric_OpRsh8x64(v)
   379  	case OpRsh8x8:
   380  		return rewriteValuegeneric_OpRsh8x8(v)
   381  	case OpSelect0:
   382  		return rewriteValuegeneric_OpSelect0(v)
   383  	case OpSelect1:
   384  		return rewriteValuegeneric_OpSelect1(v)
   385  	case OpSelectN:
   386  		return rewriteValuegeneric_OpSelectN(v)
   387  	case OpSignExt16to32:
   388  		return rewriteValuegeneric_OpSignExt16to32(v)
   389  	case OpSignExt16to64:
   390  		return rewriteValuegeneric_OpSignExt16to64(v)
   391  	case OpSignExt32to64:
   392  		return rewriteValuegeneric_OpSignExt32to64(v)
   393  	case OpSignExt8to16:
   394  		return rewriteValuegeneric_OpSignExt8to16(v)
   395  	case OpSignExt8to32:
   396  		return rewriteValuegeneric_OpSignExt8to32(v)
   397  	case OpSignExt8to64:
   398  		return rewriteValuegeneric_OpSignExt8to64(v)
   399  	case OpSliceCap:
   400  		return rewriteValuegeneric_OpSliceCap(v)
   401  	case OpSliceLen:
   402  		return rewriteValuegeneric_OpSliceLen(v)
   403  	case OpSlicePtr:
   404  		return rewriteValuegeneric_OpSlicePtr(v)
   405  	case OpSlicemask:
   406  		return rewriteValuegeneric_OpSlicemask(v)
   407  	case OpSqrt:
   408  		return rewriteValuegeneric_OpSqrt(v)
   409  	case OpStaticCall:
   410  		return rewriteValuegeneric_OpStaticCall(v)
   411  	case OpStaticLECall:
   412  		return rewriteValuegeneric_OpStaticLECall(v)
   413  	case OpStore:
   414  		return rewriteValuegeneric_OpStore(v)
   415  	case OpStringLen:
   416  		return rewriteValuegeneric_OpStringLen(v)
   417  	case OpStringPtr:
   418  		return rewriteValuegeneric_OpStringPtr(v)
   419  	case OpStructSelect:
   420  		return rewriteValuegeneric_OpStructSelect(v)
   421  	case OpSub16:
   422  		return rewriteValuegeneric_OpSub16(v)
   423  	case OpSub32:
   424  		return rewriteValuegeneric_OpSub32(v)
   425  	case OpSub32F:
   426  		return rewriteValuegeneric_OpSub32F(v)
   427  	case OpSub64:
   428  		return rewriteValuegeneric_OpSub64(v)
   429  	case OpSub64F:
   430  		return rewriteValuegeneric_OpSub64F(v)
   431  	case OpSub8:
   432  		return rewriteValuegeneric_OpSub8(v)
   433  	case OpTrunc:
   434  		return rewriteValuegeneric_OpTrunc(v)
   435  	case OpTrunc16to8:
   436  		return rewriteValuegeneric_OpTrunc16to8(v)
   437  	case OpTrunc32to16:
   438  		return rewriteValuegeneric_OpTrunc32to16(v)
   439  	case OpTrunc32to8:
   440  		return rewriteValuegeneric_OpTrunc32to8(v)
   441  	case OpTrunc64to16:
   442  		return rewriteValuegeneric_OpTrunc64to16(v)
   443  	case OpTrunc64to32:
   444  		return rewriteValuegeneric_OpTrunc64to32(v)
   445  	case OpTrunc64to8:
   446  		return rewriteValuegeneric_OpTrunc64to8(v)
   447  	case OpXor16:
   448  		return rewriteValuegeneric_OpXor16(v)
   449  	case OpXor32:
   450  		return rewriteValuegeneric_OpXor32(v)
   451  	case OpXor64:
   452  		return rewriteValuegeneric_OpXor64(v)
   453  	case OpXor8:
   454  		return rewriteValuegeneric_OpXor8(v)
   455  	case OpZero:
   456  		return rewriteValuegeneric_OpZero(v)
   457  	case OpZeroExt16to32:
   458  		return rewriteValuegeneric_OpZeroExt16to32(v)
   459  	case OpZeroExt16to64:
   460  		return rewriteValuegeneric_OpZeroExt16to64(v)
   461  	case OpZeroExt32to64:
   462  		return rewriteValuegeneric_OpZeroExt32to64(v)
   463  	case OpZeroExt8to16:
   464  		return rewriteValuegeneric_OpZeroExt8to16(v)
   465  	case OpZeroExt8to32:
   466  		return rewriteValuegeneric_OpZeroExt8to32(v)
   467  	case OpZeroExt8to64:
   468  		return rewriteValuegeneric_OpZeroExt8to64(v)
   469  	}
   470  	return false
   471  }
   472  func rewriteValuegeneric_OpAdd16(v *Value) bool {
   473  	v_1 := v.Args[1]
   474  	v_0 := v.Args[0]
   475  	b := v.Block
   476  	config := b.Func.Config
   477  	// match: (Add16 (Const16 [c]) (Const16 [d]))
   478  	// result: (Const16 [c+d])
   479  	for {
   480  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   481  			if v_0.Op != OpConst16 {
   482  				continue
   483  			}
   484  			c := auxIntToInt16(v_0.AuxInt)
   485  			if v_1.Op != OpConst16 {
   486  				continue
   487  			}
   488  			d := auxIntToInt16(v_1.AuxInt)
   489  			v.reset(OpConst16)
   490  			v.AuxInt = int16ToAuxInt(c + d)
   491  			return true
   492  		}
   493  		break
   494  	}
   495  	// match: (Add16 <t> (Mul16 x y) (Mul16 x z))
   496  	// result: (Mul16 x (Add16 <t> y z))
   497  	for {
   498  		t := v.Type
   499  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   500  			if v_0.Op != OpMul16 {
   501  				continue
   502  			}
   503  			_ = v_0.Args[1]
   504  			v_0_0 := v_0.Args[0]
   505  			v_0_1 := v_0.Args[1]
   506  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
   507  				x := v_0_0
   508  				y := v_0_1
   509  				if v_1.Op != OpMul16 {
   510  					continue
   511  				}
   512  				_ = v_1.Args[1]
   513  				v_1_0 := v_1.Args[0]
   514  				v_1_1 := v_1.Args[1]
   515  				for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 {
   516  					if x != v_1_0 {
   517  						continue
   518  					}
   519  					z := v_1_1
   520  					v.reset(OpMul16)
   521  					v0 := b.NewValue0(v.Pos, OpAdd16, t)
   522  					v0.AddArg2(y, z)
   523  					v.AddArg2(x, v0)
   524  					return true
   525  				}
   526  			}
   527  		}
   528  		break
   529  	}
   530  	// match: (Add16 (Const16 [0]) x)
   531  	// result: x
   532  	for {
   533  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   534  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
   535  				continue
   536  			}
   537  			x := v_1
   538  			v.copyOf(x)
   539  			return true
   540  		}
   541  		break
   542  	}
   543  	// match: (Add16 x (Neg16 y))
   544  	// result: (Sub16 x y)
   545  	for {
   546  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   547  			x := v_0
   548  			if v_1.Op != OpNeg16 {
   549  				continue
   550  			}
   551  			y := v_1.Args[0]
   552  			v.reset(OpSub16)
   553  			v.AddArg2(x, y)
   554  			return true
   555  		}
   556  		break
   557  	}
   558  	// match: (Add16 (Com16 x) x)
   559  	// result: (Const16 [-1])
   560  	for {
   561  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   562  			if v_0.Op != OpCom16 {
   563  				continue
   564  			}
   565  			x := v_0.Args[0]
   566  			if x != v_1 {
   567  				continue
   568  			}
   569  			v.reset(OpConst16)
   570  			v.AuxInt = int16ToAuxInt(-1)
   571  			return true
   572  		}
   573  		break
   574  	}
   575  	// match: (Add16 (Sub16 x t) (Add16 t y))
   576  	// result: (Add16 x y)
   577  	for {
   578  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   579  			if v_0.Op != OpSub16 {
   580  				continue
   581  			}
   582  			t := v_0.Args[1]
   583  			x := v_0.Args[0]
   584  			if v_1.Op != OpAdd16 {
   585  				continue
   586  			}
   587  			_ = v_1.Args[1]
   588  			v_1_0 := v_1.Args[0]
   589  			v_1_1 := v_1.Args[1]
   590  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
   591  				if t != v_1_0 {
   592  					continue
   593  				}
   594  				y := v_1_1
   595  				v.reset(OpAdd16)
   596  				v.AddArg2(x, y)
   597  				return true
   598  			}
   599  		}
   600  		break
   601  	}
   602  	// match: (Add16 (Const16 [1]) (Com16 x))
   603  	// result: (Neg16 x)
   604  	for {
   605  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   606  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 1 || v_1.Op != OpCom16 {
   607  				continue
   608  			}
   609  			x := v_1.Args[0]
   610  			v.reset(OpNeg16)
   611  			v.AddArg(x)
   612  			return true
   613  		}
   614  		break
   615  	}
   616  	// match: (Add16 x (Sub16 y x))
   617  	// result: y
   618  	for {
   619  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   620  			x := v_0
   621  			if v_1.Op != OpSub16 {
   622  				continue
   623  			}
   624  			_ = v_1.Args[1]
   625  			y := v_1.Args[0]
   626  			if x != v_1.Args[1] {
   627  				continue
   628  			}
   629  			v.copyOf(y)
   630  			return true
   631  		}
   632  		break
   633  	}
   634  	// match: (Add16 x (Add16 y (Sub16 z x)))
   635  	// result: (Add16 y z)
   636  	for {
   637  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   638  			x := v_0
   639  			if v_1.Op != OpAdd16 {
   640  				continue
   641  			}
   642  			_ = v_1.Args[1]
   643  			v_1_0 := v_1.Args[0]
   644  			v_1_1 := v_1.Args[1]
   645  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
   646  				y := v_1_0
   647  				if v_1_1.Op != OpSub16 {
   648  					continue
   649  				}
   650  				_ = v_1_1.Args[1]
   651  				z := v_1_1.Args[0]
   652  				if x != v_1_1.Args[1] {
   653  					continue
   654  				}
   655  				v.reset(OpAdd16)
   656  				v.AddArg2(y, z)
   657  				return true
   658  			}
   659  		}
   660  		break
   661  	}
   662  	// match: (Add16 (Add16 i:(Const16 <t>) z) x)
   663  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
   664  	// result: (Add16 i (Add16 <t> z x))
   665  	for {
   666  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   667  			if v_0.Op != OpAdd16 {
   668  				continue
   669  			}
   670  			_ = v_0.Args[1]
   671  			v_0_0 := v_0.Args[0]
   672  			v_0_1 := v_0.Args[1]
   673  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
   674  				i := v_0_0
   675  				if i.Op != OpConst16 {
   676  					continue
   677  				}
   678  				t := i.Type
   679  				z := v_0_1
   680  				x := v_1
   681  				if !(z.Op != OpConst16 && x.Op != OpConst16) {
   682  					continue
   683  				}
   684  				v.reset(OpAdd16)
   685  				v0 := b.NewValue0(v.Pos, OpAdd16, t)
   686  				v0.AddArg2(z, x)
   687  				v.AddArg2(i, v0)
   688  				return true
   689  			}
   690  		}
   691  		break
   692  	}
   693  	// match: (Add16 (Sub16 i:(Const16 <t>) z) x)
   694  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
   695  	// result: (Add16 i (Sub16 <t> x z))
   696  	for {
   697  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   698  			if v_0.Op != OpSub16 {
   699  				continue
   700  			}
   701  			z := v_0.Args[1]
   702  			i := v_0.Args[0]
   703  			if i.Op != OpConst16 {
   704  				continue
   705  			}
   706  			t := i.Type
   707  			x := v_1
   708  			if !(z.Op != OpConst16 && x.Op != OpConst16) {
   709  				continue
   710  			}
   711  			v.reset(OpAdd16)
   712  			v0 := b.NewValue0(v.Pos, OpSub16, t)
   713  			v0.AddArg2(x, z)
   714  			v.AddArg2(i, v0)
   715  			return true
   716  		}
   717  		break
   718  	}
   719  	// match: (Add16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
   720  	// result: (Add16 (Const16 <t> [c+d]) x)
   721  	for {
   722  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   723  			if v_0.Op != OpConst16 {
   724  				continue
   725  			}
   726  			t := v_0.Type
   727  			c := auxIntToInt16(v_0.AuxInt)
   728  			if v_1.Op != OpAdd16 {
   729  				continue
   730  			}
   731  			_ = v_1.Args[1]
   732  			v_1_0 := v_1.Args[0]
   733  			v_1_1 := v_1.Args[1]
   734  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
   735  				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
   736  					continue
   737  				}
   738  				d := auxIntToInt16(v_1_0.AuxInt)
   739  				x := v_1_1
   740  				v.reset(OpAdd16)
   741  				v0 := b.NewValue0(v.Pos, OpConst16, t)
   742  				v0.AuxInt = int16ToAuxInt(c + d)
   743  				v.AddArg2(v0, x)
   744  				return true
   745  			}
   746  		}
   747  		break
   748  	}
   749  	// match: (Add16 (Const16 <t> [c]) (Sub16 (Const16 <t> [d]) x))
   750  	// result: (Sub16 (Const16 <t> [c+d]) x)
   751  	for {
   752  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   753  			if v_0.Op != OpConst16 {
   754  				continue
   755  			}
   756  			t := v_0.Type
   757  			c := auxIntToInt16(v_0.AuxInt)
   758  			if v_1.Op != OpSub16 {
   759  				continue
   760  			}
   761  			x := v_1.Args[1]
   762  			v_1_0 := v_1.Args[0]
   763  			if v_1_0.Op != OpConst16 || v_1_0.Type != t {
   764  				continue
   765  			}
   766  			d := auxIntToInt16(v_1_0.AuxInt)
   767  			v.reset(OpSub16)
   768  			v0 := b.NewValue0(v.Pos, OpConst16, t)
   769  			v0.AuxInt = int16ToAuxInt(c + d)
   770  			v.AddArg2(v0, x)
   771  			return true
   772  		}
   773  		break
   774  	}
   775  	// match: (Add16 (Lsh16x64 x z:(Const64 <t> [c])) (Rsh16Ux64 x (Const64 [d])))
   776  	// cond: c < 16 && d == 16-c && canRotate(config, 16)
   777  	// result: (RotateLeft16 x z)
   778  	for {
   779  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   780  			if v_0.Op != OpLsh16x64 {
   781  				continue
   782  			}
   783  			_ = v_0.Args[1]
   784  			x := v_0.Args[0]
   785  			z := v_0.Args[1]
   786  			if z.Op != OpConst64 {
   787  				continue
   788  			}
   789  			c := auxIntToInt64(z.AuxInt)
   790  			if v_1.Op != OpRsh16Ux64 {
   791  				continue
   792  			}
   793  			_ = v_1.Args[1]
   794  			if x != v_1.Args[0] {
   795  				continue
   796  			}
   797  			v_1_1 := v_1.Args[1]
   798  			if v_1_1.Op != OpConst64 {
   799  				continue
   800  			}
   801  			d := auxIntToInt64(v_1_1.AuxInt)
   802  			if !(c < 16 && d == 16-c && canRotate(config, 16)) {
   803  				continue
   804  			}
   805  			v.reset(OpRotateLeft16)
   806  			v.AddArg2(x, z)
   807  			return true
   808  		}
   809  		break
   810  	}
   811  	// match: (Add16 left:(Lsh16x64 x y) right:(Rsh16Ux64 x (Sub64 (Const64 [16]) y)))
   812  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
   813  	// result: (RotateLeft16 x y)
   814  	for {
   815  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   816  			left := v_0
   817  			if left.Op != OpLsh16x64 {
   818  				continue
   819  			}
   820  			y := left.Args[1]
   821  			x := left.Args[0]
   822  			right := v_1
   823  			if right.Op != OpRsh16Ux64 {
   824  				continue
   825  			}
   826  			_ = right.Args[1]
   827  			if x != right.Args[0] {
   828  				continue
   829  			}
   830  			right_1 := right.Args[1]
   831  			if right_1.Op != OpSub64 {
   832  				continue
   833  			}
   834  			_ = right_1.Args[1]
   835  			right_1_0 := right_1.Args[0]
   836  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
   837  				continue
   838  			}
   839  			v.reset(OpRotateLeft16)
   840  			v.AddArg2(x, y)
   841  			return true
   842  		}
   843  		break
   844  	}
   845  	// match: (Add16 left:(Lsh16x32 x y) right:(Rsh16Ux32 x (Sub32 (Const32 [16]) y)))
   846  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
   847  	// result: (RotateLeft16 x y)
   848  	for {
   849  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   850  			left := v_0
   851  			if left.Op != OpLsh16x32 {
   852  				continue
   853  			}
   854  			y := left.Args[1]
   855  			x := left.Args[0]
   856  			right := v_1
   857  			if right.Op != OpRsh16Ux32 {
   858  				continue
   859  			}
   860  			_ = right.Args[1]
   861  			if x != right.Args[0] {
   862  				continue
   863  			}
   864  			right_1 := right.Args[1]
   865  			if right_1.Op != OpSub32 {
   866  				continue
   867  			}
   868  			_ = right_1.Args[1]
   869  			right_1_0 := right_1.Args[0]
   870  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
   871  				continue
   872  			}
   873  			v.reset(OpRotateLeft16)
   874  			v.AddArg2(x, y)
   875  			return true
   876  		}
   877  		break
   878  	}
   879  	// match: (Add16 left:(Lsh16x16 x y) right:(Rsh16Ux16 x (Sub16 (Const16 [16]) y)))
   880  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
   881  	// result: (RotateLeft16 x y)
   882  	for {
   883  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   884  			left := v_0
   885  			if left.Op != OpLsh16x16 {
   886  				continue
   887  			}
   888  			y := left.Args[1]
   889  			x := left.Args[0]
   890  			right := v_1
   891  			if right.Op != OpRsh16Ux16 {
   892  				continue
   893  			}
   894  			_ = right.Args[1]
   895  			if x != right.Args[0] {
   896  				continue
   897  			}
   898  			right_1 := right.Args[1]
   899  			if right_1.Op != OpSub16 {
   900  				continue
   901  			}
   902  			_ = right_1.Args[1]
   903  			right_1_0 := right_1.Args[0]
   904  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
   905  				continue
   906  			}
   907  			v.reset(OpRotateLeft16)
   908  			v.AddArg2(x, y)
   909  			return true
   910  		}
   911  		break
   912  	}
   913  	// match: (Add16 left:(Lsh16x8 x y) right:(Rsh16Ux8 x (Sub8 (Const8 [16]) y)))
   914  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
   915  	// result: (RotateLeft16 x y)
   916  	for {
   917  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   918  			left := v_0
   919  			if left.Op != OpLsh16x8 {
   920  				continue
   921  			}
   922  			y := left.Args[1]
   923  			x := left.Args[0]
   924  			right := v_1
   925  			if right.Op != OpRsh16Ux8 {
   926  				continue
   927  			}
   928  			_ = right.Args[1]
   929  			if x != right.Args[0] {
   930  				continue
   931  			}
   932  			right_1 := right.Args[1]
   933  			if right_1.Op != OpSub8 {
   934  				continue
   935  			}
   936  			_ = right_1.Args[1]
   937  			right_1_0 := right_1.Args[0]
   938  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
   939  				continue
   940  			}
   941  			v.reset(OpRotateLeft16)
   942  			v.AddArg2(x, y)
   943  			return true
   944  		}
   945  		break
   946  	}
   947  	// match: (Add16 right:(Rsh16Ux64 x y) left:(Lsh16x64 x z:(Sub64 (Const64 [16]) y)))
   948  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
   949  	// result: (RotateLeft16 x z)
   950  	for {
   951  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   952  			right := v_0
   953  			if right.Op != OpRsh16Ux64 {
   954  				continue
   955  			}
   956  			y := right.Args[1]
   957  			x := right.Args[0]
   958  			left := v_1
   959  			if left.Op != OpLsh16x64 {
   960  				continue
   961  			}
   962  			_ = left.Args[1]
   963  			if x != left.Args[0] {
   964  				continue
   965  			}
   966  			z := left.Args[1]
   967  			if z.Op != OpSub64 {
   968  				continue
   969  			}
   970  			_ = z.Args[1]
   971  			z_0 := z.Args[0]
   972  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
   973  				continue
   974  			}
   975  			v.reset(OpRotateLeft16)
   976  			v.AddArg2(x, z)
   977  			return true
   978  		}
   979  		break
   980  	}
   981  	// match: (Add16 right:(Rsh16Ux32 x y) left:(Lsh16x32 x z:(Sub32 (Const32 [16]) y)))
   982  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
   983  	// result: (RotateLeft16 x z)
   984  	for {
   985  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
   986  			right := v_0
   987  			if right.Op != OpRsh16Ux32 {
   988  				continue
   989  			}
   990  			y := right.Args[1]
   991  			x := right.Args[0]
   992  			left := v_1
   993  			if left.Op != OpLsh16x32 {
   994  				continue
   995  			}
   996  			_ = left.Args[1]
   997  			if x != left.Args[0] {
   998  				continue
   999  			}
  1000  			z := left.Args[1]
  1001  			if z.Op != OpSub32 {
  1002  				continue
  1003  			}
  1004  			_ = z.Args[1]
  1005  			z_0 := z.Args[0]
  1006  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
  1007  				continue
  1008  			}
  1009  			v.reset(OpRotateLeft16)
  1010  			v.AddArg2(x, z)
  1011  			return true
  1012  		}
  1013  		break
  1014  	}
  1015  	// match: (Add16 right:(Rsh16Ux16 x y) left:(Lsh16x16 x z:(Sub16 (Const16 [16]) y)))
  1016  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
  1017  	// result: (RotateLeft16 x z)
  1018  	for {
  1019  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1020  			right := v_0
  1021  			if right.Op != OpRsh16Ux16 {
  1022  				continue
  1023  			}
  1024  			y := right.Args[1]
  1025  			x := right.Args[0]
  1026  			left := v_1
  1027  			if left.Op != OpLsh16x16 {
  1028  				continue
  1029  			}
  1030  			_ = left.Args[1]
  1031  			if x != left.Args[0] {
  1032  				continue
  1033  			}
  1034  			z := left.Args[1]
  1035  			if z.Op != OpSub16 {
  1036  				continue
  1037  			}
  1038  			_ = z.Args[1]
  1039  			z_0 := z.Args[0]
  1040  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
  1041  				continue
  1042  			}
  1043  			v.reset(OpRotateLeft16)
  1044  			v.AddArg2(x, z)
  1045  			return true
  1046  		}
  1047  		break
  1048  	}
  1049  	// match: (Add16 right:(Rsh16Ux8 x y) left:(Lsh16x8 x z:(Sub8 (Const8 [16]) y)))
  1050  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
  1051  	// result: (RotateLeft16 x z)
  1052  	for {
  1053  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1054  			right := v_0
  1055  			if right.Op != OpRsh16Ux8 {
  1056  				continue
  1057  			}
  1058  			y := right.Args[1]
  1059  			x := right.Args[0]
  1060  			left := v_1
  1061  			if left.Op != OpLsh16x8 {
  1062  				continue
  1063  			}
  1064  			_ = left.Args[1]
  1065  			if x != left.Args[0] {
  1066  				continue
  1067  			}
  1068  			z := left.Args[1]
  1069  			if z.Op != OpSub8 {
  1070  				continue
  1071  			}
  1072  			_ = z.Args[1]
  1073  			z_0 := z.Args[0]
  1074  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
  1075  				continue
  1076  			}
  1077  			v.reset(OpRotateLeft16)
  1078  			v.AddArg2(x, z)
  1079  			return true
  1080  		}
  1081  		break
  1082  	}
  1083  	return false
  1084  }
  1085  func rewriteValuegeneric_OpAdd32(v *Value) bool {
  1086  	v_1 := v.Args[1]
  1087  	v_0 := v.Args[0]
  1088  	b := v.Block
  1089  	config := b.Func.Config
  1090  	// match: (Add32 (Const32 [c]) (Const32 [d]))
  1091  	// result: (Const32 [c+d])
  1092  	for {
  1093  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1094  			if v_0.Op != OpConst32 {
  1095  				continue
  1096  			}
  1097  			c := auxIntToInt32(v_0.AuxInt)
  1098  			if v_1.Op != OpConst32 {
  1099  				continue
  1100  			}
  1101  			d := auxIntToInt32(v_1.AuxInt)
  1102  			v.reset(OpConst32)
  1103  			v.AuxInt = int32ToAuxInt(c + d)
  1104  			return true
  1105  		}
  1106  		break
  1107  	}
  1108  	// match: (Add32 <t> (Mul32 x y) (Mul32 x z))
  1109  	// result: (Mul32 x (Add32 <t> y z))
  1110  	for {
  1111  		t := v.Type
  1112  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1113  			if v_0.Op != OpMul32 {
  1114  				continue
  1115  			}
  1116  			_ = v_0.Args[1]
  1117  			v_0_0 := v_0.Args[0]
  1118  			v_0_1 := v_0.Args[1]
  1119  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  1120  				x := v_0_0
  1121  				y := v_0_1
  1122  				if v_1.Op != OpMul32 {
  1123  					continue
  1124  				}
  1125  				_ = v_1.Args[1]
  1126  				v_1_0 := v_1.Args[0]
  1127  				v_1_1 := v_1.Args[1]
  1128  				for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 {
  1129  					if x != v_1_0 {
  1130  						continue
  1131  					}
  1132  					z := v_1_1
  1133  					v.reset(OpMul32)
  1134  					v0 := b.NewValue0(v.Pos, OpAdd32, t)
  1135  					v0.AddArg2(y, z)
  1136  					v.AddArg2(x, v0)
  1137  					return true
  1138  				}
  1139  			}
  1140  		}
  1141  		break
  1142  	}
  1143  	// match: (Add32 (Const32 [0]) x)
  1144  	// result: x
  1145  	for {
  1146  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1147  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
  1148  				continue
  1149  			}
  1150  			x := v_1
  1151  			v.copyOf(x)
  1152  			return true
  1153  		}
  1154  		break
  1155  	}
  1156  	// match: (Add32 x (Neg32 y))
  1157  	// result: (Sub32 x y)
  1158  	for {
  1159  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1160  			x := v_0
  1161  			if v_1.Op != OpNeg32 {
  1162  				continue
  1163  			}
  1164  			y := v_1.Args[0]
  1165  			v.reset(OpSub32)
  1166  			v.AddArg2(x, y)
  1167  			return true
  1168  		}
  1169  		break
  1170  	}
  1171  	// match: (Add32 (Com32 x) x)
  1172  	// result: (Const32 [-1])
  1173  	for {
  1174  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1175  			if v_0.Op != OpCom32 {
  1176  				continue
  1177  			}
  1178  			x := v_0.Args[0]
  1179  			if x != v_1 {
  1180  				continue
  1181  			}
  1182  			v.reset(OpConst32)
  1183  			v.AuxInt = int32ToAuxInt(-1)
  1184  			return true
  1185  		}
  1186  		break
  1187  	}
  1188  	// match: (Add32 (Sub32 x t) (Add32 t y))
  1189  	// result: (Add32 x y)
  1190  	for {
  1191  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1192  			if v_0.Op != OpSub32 {
  1193  				continue
  1194  			}
  1195  			t := v_0.Args[1]
  1196  			x := v_0.Args[0]
  1197  			if v_1.Op != OpAdd32 {
  1198  				continue
  1199  			}
  1200  			_ = v_1.Args[1]
  1201  			v_1_0 := v_1.Args[0]
  1202  			v_1_1 := v_1.Args[1]
  1203  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  1204  				if t != v_1_0 {
  1205  					continue
  1206  				}
  1207  				y := v_1_1
  1208  				v.reset(OpAdd32)
  1209  				v.AddArg2(x, y)
  1210  				return true
  1211  			}
  1212  		}
  1213  		break
  1214  	}
  1215  	// match: (Add32 (Const32 [1]) (Com32 x))
  1216  	// result: (Neg32 x)
  1217  	for {
  1218  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1219  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 1 || v_1.Op != OpCom32 {
  1220  				continue
  1221  			}
  1222  			x := v_1.Args[0]
  1223  			v.reset(OpNeg32)
  1224  			v.AddArg(x)
  1225  			return true
  1226  		}
  1227  		break
  1228  	}
  1229  	// match: (Add32 x (Sub32 y x))
  1230  	// result: y
  1231  	for {
  1232  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1233  			x := v_0
  1234  			if v_1.Op != OpSub32 {
  1235  				continue
  1236  			}
  1237  			_ = v_1.Args[1]
  1238  			y := v_1.Args[0]
  1239  			if x != v_1.Args[1] {
  1240  				continue
  1241  			}
  1242  			v.copyOf(y)
  1243  			return true
  1244  		}
  1245  		break
  1246  	}
  1247  	// match: (Add32 x (Add32 y (Sub32 z x)))
  1248  	// result: (Add32 y z)
  1249  	for {
  1250  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1251  			x := v_0
  1252  			if v_1.Op != OpAdd32 {
  1253  				continue
  1254  			}
  1255  			_ = v_1.Args[1]
  1256  			v_1_0 := v_1.Args[0]
  1257  			v_1_1 := v_1.Args[1]
  1258  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  1259  				y := v_1_0
  1260  				if v_1_1.Op != OpSub32 {
  1261  					continue
  1262  				}
  1263  				_ = v_1_1.Args[1]
  1264  				z := v_1_1.Args[0]
  1265  				if x != v_1_1.Args[1] {
  1266  					continue
  1267  				}
  1268  				v.reset(OpAdd32)
  1269  				v.AddArg2(y, z)
  1270  				return true
  1271  			}
  1272  		}
  1273  		break
  1274  	}
  1275  	// match: (Add32 (Add32 i:(Const32 <t>) z) x)
  1276  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  1277  	// result: (Add32 i (Add32 <t> z x))
  1278  	for {
  1279  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1280  			if v_0.Op != OpAdd32 {
  1281  				continue
  1282  			}
  1283  			_ = v_0.Args[1]
  1284  			v_0_0 := v_0.Args[0]
  1285  			v_0_1 := v_0.Args[1]
  1286  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  1287  				i := v_0_0
  1288  				if i.Op != OpConst32 {
  1289  					continue
  1290  				}
  1291  				t := i.Type
  1292  				z := v_0_1
  1293  				x := v_1
  1294  				if !(z.Op != OpConst32 && x.Op != OpConst32) {
  1295  					continue
  1296  				}
  1297  				v.reset(OpAdd32)
  1298  				v0 := b.NewValue0(v.Pos, OpAdd32, t)
  1299  				v0.AddArg2(z, x)
  1300  				v.AddArg2(i, v0)
  1301  				return true
  1302  			}
  1303  		}
  1304  		break
  1305  	}
  1306  	// match: (Add32 (Sub32 i:(Const32 <t>) z) x)
  1307  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  1308  	// result: (Add32 i (Sub32 <t> x z))
  1309  	for {
  1310  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1311  			if v_0.Op != OpSub32 {
  1312  				continue
  1313  			}
  1314  			z := v_0.Args[1]
  1315  			i := v_0.Args[0]
  1316  			if i.Op != OpConst32 {
  1317  				continue
  1318  			}
  1319  			t := i.Type
  1320  			x := v_1
  1321  			if !(z.Op != OpConst32 && x.Op != OpConst32) {
  1322  				continue
  1323  			}
  1324  			v.reset(OpAdd32)
  1325  			v0 := b.NewValue0(v.Pos, OpSub32, t)
  1326  			v0.AddArg2(x, z)
  1327  			v.AddArg2(i, v0)
  1328  			return true
  1329  		}
  1330  		break
  1331  	}
  1332  	// match: (Add32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
  1333  	// result: (Add32 (Const32 <t> [c+d]) x)
  1334  	for {
  1335  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1336  			if v_0.Op != OpConst32 {
  1337  				continue
  1338  			}
  1339  			t := v_0.Type
  1340  			c := auxIntToInt32(v_0.AuxInt)
  1341  			if v_1.Op != OpAdd32 {
  1342  				continue
  1343  			}
  1344  			_ = v_1.Args[1]
  1345  			v_1_0 := v_1.Args[0]
  1346  			v_1_1 := v_1.Args[1]
  1347  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  1348  				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
  1349  					continue
  1350  				}
  1351  				d := auxIntToInt32(v_1_0.AuxInt)
  1352  				x := v_1_1
  1353  				v.reset(OpAdd32)
  1354  				v0 := b.NewValue0(v.Pos, OpConst32, t)
  1355  				v0.AuxInt = int32ToAuxInt(c + d)
  1356  				v.AddArg2(v0, x)
  1357  				return true
  1358  			}
  1359  		}
  1360  		break
  1361  	}
  1362  	// match: (Add32 (Const32 <t> [c]) (Sub32 (Const32 <t> [d]) x))
  1363  	// result: (Sub32 (Const32 <t> [c+d]) x)
  1364  	for {
  1365  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1366  			if v_0.Op != OpConst32 {
  1367  				continue
  1368  			}
  1369  			t := v_0.Type
  1370  			c := auxIntToInt32(v_0.AuxInt)
  1371  			if v_1.Op != OpSub32 {
  1372  				continue
  1373  			}
  1374  			x := v_1.Args[1]
  1375  			v_1_0 := v_1.Args[0]
  1376  			if v_1_0.Op != OpConst32 || v_1_0.Type != t {
  1377  				continue
  1378  			}
  1379  			d := auxIntToInt32(v_1_0.AuxInt)
  1380  			v.reset(OpSub32)
  1381  			v0 := b.NewValue0(v.Pos, OpConst32, t)
  1382  			v0.AuxInt = int32ToAuxInt(c + d)
  1383  			v.AddArg2(v0, x)
  1384  			return true
  1385  		}
  1386  		break
  1387  	}
  1388  	// match: (Add32 (Lsh32x64 x z:(Const64 <t> [c])) (Rsh32Ux64 x (Const64 [d])))
  1389  	// cond: c < 32 && d == 32-c && canRotate(config, 32)
  1390  	// result: (RotateLeft32 x z)
  1391  	for {
  1392  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1393  			if v_0.Op != OpLsh32x64 {
  1394  				continue
  1395  			}
  1396  			_ = v_0.Args[1]
  1397  			x := v_0.Args[0]
  1398  			z := v_0.Args[1]
  1399  			if z.Op != OpConst64 {
  1400  				continue
  1401  			}
  1402  			c := auxIntToInt64(z.AuxInt)
  1403  			if v_1.Op != OpRsh32Ux64 {
  1404  				continue
  1405  			}
  1406  			_ = v_1.Args[1]
  1407  			if x != v_1.Args[0] {
  1408  				continue
  1409  			}
  1410  			v_1_1 := v_1.Args[1]
  1411  			if v_1_1.Op != OpConst64 {
  1412  				continue
  1413  			}
  1414  			d := auxIntToInt64(v_1_1.AuxInt)
  1415  			if !(c < 32 && d == 32-c && canRotate(config, 32)) {
  1416  				continue
  1417  			}
  1418  			v.reset(OpRotateLeft32)
  1419  			v.AddArg2(x, z)
  1420  			return true
  1421  		}
  1422  		break
  1423  	}
  1424  	// match: (Add32 left:(Lsh32x64 x y) right:(Rsh32Ux64 x (Sub64 (Const64 [32]) y)))
  1425  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
  1426  	// result: (RotateLeft32 x y)
  1427  	for {
  1428  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1429  			left := v_0
  1430  			if left.Op != OpLsh32x64 {
  1431  				continue
  1432  			}
  1433  			y := left.Args[1]
  1434  			x := left.Args[0]
  1435  			right := v_1
  1436  			if right.Op != OpRsh32Ux64 {
  1437  				continue
  1438  			}
  1439  			_ = right.Args[1]
  1440  			if x != right.Args[0] {
  1441  				continue
  1442  			}
  1443  			right_1 := right.Args[1]
  1444  			if right_1.Op != OpSub64 {
  1445  				continue
  1446  			}
  1447  			_ = right_1.Args[1]
  1448  			right_1_0 := right_1.Args[0]
  1449  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
  1450  				continue
  1451  			}
  1452  			v.reset(OpRotateLeft32)
  1453  			v.AddArg2(x, y)
  1454  			return true
  1455  		}
  1456  		break
  1457  	}
  1458  	// match: (Add32 left:(Lsh32x32 x y) right:(Rsh32Ux32 x (Sub32 (Const32 [32]) y)))
  1459  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
  1460  	// result: (RotateLeft32 x y)
  1461  	for {
  1462  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1463  			left := v_0
  1464  			if left.Op != OpLsh32x32 {
  1465  				continue
  1466  			}
  1467  			y := left.Args[1]
  1468  			x := left.Args[0]
  1469  			right := v_1
  1470  			if right.Op != OpRsh32Ux32 {
  1471  				continue
  1472  			}
  1473  			_ = right.Args[1]
  1474  			if x != right.Args[0] {
  1475  				continue
  1476  			}
  1477  			right_1 := right.Args[1]
  1478  			if right_1.Op != OpSub32 {
  1479  				continue
  1480  			}
  1481  			_ = right_1.Args[1]
  1482  			right_1_0 := right_1.Args[0]
  1483  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
  1484  				continue
  1485  			}
  1486  			v.reset(OpRotateLeft32)
  1487  			v.AddArg2(x, y)
  1488  			return true
  1489  		}
  1490  		break
  1491  	}
  1492  	// match: (Add32 left:(Lsh32x16 x y) right:(Rsh32Ux16 x (Sub16 (Const16 [32]) y)))
  1493  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
  1494  	// result: (RotateLeft32 x y)
  1495  	for {
  1496  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1497  			left := v_0
  1498  			if left.Op != OpLsh32x16 {
  1499  				continue
  1500  			}
  1501  			y := left.Args[1]
  1502  			x := left.Args[0]
  1503  			right := v_1
  1504  			if right.Op != OpRsh32Ux16 {
  1505  				continue
  1506  			}
  1507  			_ = right.Args[1]
  1508  			if x != right.Args[0] {
  1509  				continue
  1510  			}
  1511  			right_1 := right.Args[1]
  1512  			if right_1.Op != OpSub16 {
  1513  				continue
  1514  			}
  1515  			_ = right_1.Args[1]
  1516  			right_1_0 := right_1.Args[0]
  1517  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
  1518  				continue
  1519  			}
  1520  			v.reset(OpRotateLeft32)
  1521  			v.AddArg2(x, y)
  1522  			return true
  1523  		}
  1524  		break
  1525  	}
  1526  	// match: (Add32 left:(Lsh32x8 x y) right:(Rsh32Ux8 x (Sub8 (Const8 [32]) y)))
  1527  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
  1528  	// result: (RotateLeft32 x y)
  1529  	for {
  1530  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1531  			left := v_0
  1532  			if left.Op != OpLsh32x8 {
  1533  				continue
  1534  			}
  1535  			y := left.Args[1]
  1536  			x := left.Args[0]
  1537  			right := v_1
  1538  			if right.Op != OpRsh32Ux8 {
  1539  				continue
  1540  			}
  1541  			_ = right.Args[1]
  1542  			if x != right.Args[0] {
  1543  				continue
  1544  			}
  1545  			right_1 := right.Args[1]
  1546  			if right_1.Op != OpSub8 {
  1547  				continue
  1548  			}
  1549  			_ = right_1.Args[1]
  1550  			right_1_0 := right_1.Args[0]
  1551  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
  1552  				continue
  1553  			}
  1554  			v.reset(OpRotateLeft32)
  1555  			v.AddArg2(x, y)
  1556  			return true
  1557  		}
  1558  		break
  1559  	}
  1560  	// match: (Add32 right:(Rsh32Ux64 x y) left:(Lsh32x64 x z:(Sub64 (Const64 [32]) y)))
  1561  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
  1562  	// result: (RotateLeft32 x z)
  1563  	for {
  1564  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1565  			right := v_0
  1566  			if right.Op != OpRsh32Ux64 {
  1567  				continue
  1568  			}
  1569  			y := right.Args[1]
  1570  			x := right.Args[0]
  1571  			left := v_1
  1572  			if left.Op != OpLsh32x64 {
  1573  				continue
  1574  			}
  1575  			_ = left.Args[1]
  1576  			if x != left.Args[0] {
  1577  				continue
  1578  			}
  1579  			z := left.Args[1]
  1580  			if z.Op != OpSub64 {
  1581  				continue
  1582  			}
  1583  			_ = z.Args[1]
  1584  			z_0 := z.Args[0]
  1585  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
  1586  				continue
  1587  			}
  1588  			v.reset(OpRotateLeft32)
  1589  			v.AddArg2(x, z)
  1590  			return true
  1591  		}
  1592  		break
  1593  	}
  1594  	// match: (Add32 right:(Rsh32Ux32 x y) left:(Lsh32x32 x z:(Sub32 (Const32 [32]) y)))
  1595  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
  1596  	// result: (RotateLeft32 x z)
  1597  	for {
  1598  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1599  			right := v_0
  1600  			if right.Op != OpRsh32Ux32 {
  1601  				continue
  1602  			}
  1603  			y := right.Args[1]
  1604  			x := right.Args[0]
  1605  			left := v_1
  1606  			if left.Op != OpLsh32x32 {
  1607  				continue
  1608  			}
  1609  			_ = left.Args[1]
  1610  			if x != left.Args[0] {
  1611  				continue
  1612  			}
  1613  			z := left.Args[1]
  1614  			if z.Op != OpSub32 {
  1615  				continue
  1616  			}
  1617  			_ = z.Args[1]
  1618  			z_0 := z.Args[0]
  1619  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
  1620  				continue
  1621  			}
  1622  			v.reset(OpRotateLeft32)
  1623  			v.AddArg2(x, z)
  1624  			return true
  1625  		}
  1626  		break
  1627  	}
  1628  	// match: (Add32 right:(Rsh32Ux16 x y) left:(Lsh32x16 x z:(Sub16 (Const16 [32]) y)))
  1629  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
  1630  	// result: (RotateLeft32 x z)
  1631  	for {
  1632  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1633  			right := v_0
  1634  			if right.Op != OpRsh32Ux16 {
  1635  				continue
  1636  			}
  1637  			y := right.Args[1]
  1638  			x := right.Args[0]
  1639  			left := v_1
  1640  			if left.Op != OpLsh32x16 {
  1641  				continue
  1642  			}
  1643  			_ = left.Args[1]
  1644  			if x != left.Args[0] {
  1645  				continue
  1646  			}
  1647  			z := left.Args[1]
  1648  			if z.Op != OpSub16 {
  1649  				continue
  1650  			}
  1651  			_ = z.Args[1]
  1652  			z_0 := z.Args[0]
  1653  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
  1654  				continue
  1655  			}
  1656  			v.reset(OpRotateLeft32)
  1657  			v.AddArg2(x, z)
  1658  			return true
  1659  		}
  1660  		break
  1661  	}
  1662  	// match: (Add32 right:(Rsh32Ux8 x y) left:(Lsh32x8 x z:(Sub8 (Const8 [32]) y)))
  1663  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
  1664  	// result: (RotateLeft32 x z)
  1665  	for {
  1666  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1667  			right := v_0
  1668  			if right.Op != OpRsh32Ux8 {
  1669  				continue
  1670  			}
  1671  			y := right.Args[1]
  1672  			x := right.Args[0]
  1673  			left := v_1
  1674  			if left.Op != OpLsh32x8 {
  1675  				continue
  1676  			}
  1677  			_ = left.Args[1]
  1678  			if x != left.Args[0] {
  1679  				continue
  1680  			}
  1681  			z := left.Args[1]
  1682  			if z.Op != OpSub8 {
  1683  				continue
  1684  			}
  1685  			_ = z.Args[1]
  1686  			z_0 := z.Args[0]
  1687  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
  1688  				continue
  1689  			}
  1690  			v.reset(OpRotateLeft32)
  1691  			v.AddArg2(x, z)
  1692  			return true
  1693  		}
  1694  		break
  1695  	}
  1696  	return false
  1697  }
  1698  func rewriteValuegeneric_OpAdd32F(v *Value) bool {
  1699  	v_1 := v.Args[1]
  1700  	v_0 := v.Args[0]
  1701  	// match: (Add32F (Const32F [c]) (Const32F [d]))
  1702  	// cond: c+d == c+d
  1703  	// result: (Const32F [c+d])
  1704  	for {
  1705  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1706  			if v_0.Op != OpConst32F {
  1707  				continue
  1708  			}
  1709  			c := auxIntToFloat32(v_0.AuxInt)
  1710  			if v_1.Op != OpConst32F {
  1711  				continue
  1712  			}
  1713  			d := auxIntToFloat32(v_1.AuxInt)
  1714  			if !(c+d == c+d) {
  1715  				continue
  1716  			}
  1717  			v.reset(OpConst32F)
  1718  			v.AuxInt = float32ToAuxInt(c + d)
  1719  			return true
  1720  		}
  1721  		break
  1722  	}
  1723  	return false
  1724  }
  1725  func rewriteValuegeneric_OpAdd64(v *Value) bool {
  1726  	v_1 := v.Args[1]
  1727  	v_0 := v.Args[0]
  1728  	b := v.Block
  1729  	config := b.Func.Config
  1730  	// match: (Add64 (Const64 [c]) (Const64 [d]))
  1731  	// result: (Const64 [c+d])
  1732  	for {
  1733  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1734  			if v_0.Op != OpConst64 {
  1735  				continue
  1736  			}
  1737  			c := auxIntToInt64(v_0.AuxInt)
  1738  			if v_1.Op != OpConst64 {
  1739  				continue
  1740  			}
  1741  			d := auxIntToInt64(v_1.AuxInt)
  1742  			v.reset(OpConst64)
  1743  			v.AuxInt = int64ToAuxInt(c + d)
  1744  			return true
  1745  		}
  1746  		break
  1747  	}
  1748  	// match: (Add64 <t> (Mul64 x y) (Mul64 x z))
  1749  	// result: (Mul64 x (Add64 <t> y z))
  1750  	for {
  1751  		t := v.Type
  1752  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1753  			if v_0.Op != OpMul64 {
  1754  				continue
  1755  			}
  1756  			_ = v_0.Args[1]
  1757  			v_0_0 := v_0.Args[0]
  1758  			v_0_1 := v_0.Args[1]
  1759  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  1760  				x := v_0_0
  1761  				y := v_0_1
  1762  				if v_1.Op != OpMul64 {
  1763  					continue
  1764  				}
  1765  				_ = v_1.Args[1]
  1766  				v_1_0 := v_1.Args[0]
  1767  				v_1_1 := v_1.Args[1]
  1768  				for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 {
  1769  					if x != v_1_0 {
  1770  						continue
  1771  					}
  1772  					z := v_1_1
  1773  					v.reset(OpMul64)
  1774  					v0 := b.NewValue0(v.Pos, OpAdd64, t)
  1775  					v0.AddArg2(y, z)
  1776  					v.AddArg2(x, v0)
  1777  					return true
  1778  				}
  1779  			}
  1780  		}
  1781  		break
  1782  	}
  1783  	// match: (Add64 (Const64 [0]) x)
  1784  	// result: x
  1785  	for {
  1786  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1787  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
  1788  				continue
  1789  			}
  1790  			x := v_1
  1791  			v.copyOf(x)
  1792  			return true
  1793  		}
  1794  		break
  1795  	}
  1796  	// match: (Add64 x (Neg64 y))
  1797  	// result: (Sub64 x y)
  1798  	for {
  1799  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1800  			x := v_0
  1801  			if v_1.Op != OpNeg64 {
  1802  				continue
  1803  			}
  1804  			y := v_1.Args[0]
  1805  			v.reset(OpSub64)
  1806  			v.AddArg2(x, y)
  1807  			return true
  1808  		}
  1809  		break
  1810  	}
  1811  	// match: (Add64 (Com64 x) x)
  1812  	// result: (Const64 [-1])
  1813  	for {
  1814  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1815  			if v_0.Op != OpCom64 {
  1816  				continue
  1817  			}
  1818  			x := v_0.Args[0]
  1819  			if x != v_1 {
  1820  				continue
  1821  			}
  1822  			v.reset(OpConst64)
  1823  			v.AuxInt = int64ToAuxInt(-1)
  1824  			return true
  1825  		}
  1826  		break
  1827  	}
  1828  	// match: (Add64 (Sub64 x t) (Add64 t y))
  1829  	// result: (Add64 x y)
  1830  	for {
  1831  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1832  			if v_0.Op != OpSub64 {
  1833  				continue
  1834  			}
  1835  			t := v_0.Args[1]
  1836  			x := v_0.Args[0]
  1837  			if v_1.Op != OpAdd64 {
  1838  				continue
  1839  			}
  1840  			_ = v_1.Args[1]
  1841  			v_1_0 := v_1.Args[0]
  1842  			v_1_1 := v_1.Args[1]
  1843  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  1844  				if t != v_1_0 {
  1845  					continue
  1846  				}
  1847  				y := v_1_1
  1848  				v.reset(OpAdd64)
  1849  				v.AddArg2(x, y)
  1850  				return true
  1851  			}
  1852  		}
  1853  		break
  1854  	}
  1855  	// match: (Add64 (Const64 [1]) (Com64 x))
  1856  	// result: (Neg64 x)
  1857  	for {
  1858  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1859  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 1 || v_1.Op != OpCom64 {
  1860  				continue
  1861  			}
  1862  			x := v_1.Args[0]
  1863  			v.reset(OpNeg64)
  1864  			v.AddArg(x)
  1865  			return true
  1866  		}
  1867  		break
  1868  	}
  1869  	// match: (Add64 x (Sub64 y x))
  1870  	// result: y
  1871  	for {
  1872  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1873  			x := v_0
  1874  			if v_1.Op != OpSub64 {
  1875  				continue
  1876  			}
  1877  			_ = v_1.Args[1]
  1878  			y := v_1.Args[0]
  1879  			if x != v_1.Args[1] {
  1880  				continue
  1881  			}
  1882  			v.copyOf(y)
  1883  			return true
  1884  		}
  1885  		break
  1886  	}
  1887  	// match: (Add64 x (Add64 y (Sub64 z x)))
  1888  	// result: (Add64 y z)
  1889  	for {
  1890  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1891  			x := v_0
  1892  			if v_1.Op != OpAdd64 {
  1893  				continue
  1894  			}
  1895  			_ = v_1.Args[1]
  1896  			v_1_0 := v_1.Args[0]
  1897  			v_1_1 := v_1.Args[1]
  1898  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  1899  				y := v_1_0
  1900  				if v_1_1.Op != OpSub64 {
  1901  					continue
  1902  				}
  1903  				_ = v_1_1.Args[1]
  1904  				z := v_1_1.Args[0]
  1905  				if x != v_1_1.Args[1] {
  1906  					continue
  1907  				}
  1908  				v.reset(OpAdd64)
  1909  				v.AddArg2(y, z)
  1910  				return true
  1911  			}
  1912  		}
  1913  		break
  1914  	}
  1915  	// match: (Add64 (Add64 i:(Const64 <t>) z) x)
  1916  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  1917  	// result: (Add64 i (Add64 <t> z x))
  1918  	for {
  1919  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1920  			if v_0.Op != OpAdd64 {
  1921  				continue
  1922  			}
  1923  			_ = v_0.Args[1]
  1924  			v_0_0 := v_0.Args[0]
  1925  			v_0_1 := v_0.Args[1]
  1926  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  1927  				i := v_0_0
  1928  				if i.Op != OpConst64 {
  1929  					continue
  1930  				}
  1931  				t := i.Type
  1932  				z := v_0_1
  1933  				x := v_1
  1934  				if !(z.Op != OpConst64 && x.Op != OpConst64) {
  1935  					continue
  1936  				}
  1937  				v.reset(OpAdd64)
  1938  				v0 := b.NewValue0(v.Pos, OpAdd64, t)
  1939  				v0.AddArg2(z, x)
  1940  				v.AddArg2(i, v0)
  1941  				return true
  1942  			}
  1943  		}
  1944  		break
  1945  	}
  1946  	// match: (Add64 (Sub64 i:(Const64 <t>) z) x)
  1947  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  1948  	// result: (Add64 i (Sub64 <t> x z))
  1949  	for {
  1950  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1951  			if v_0.Op != OpSub64 {
  1952  				continue
  1953  			}
  1954  			z := v_0.Args[1]
  1955  			i := v_0.Args[0]
  1956  			if i.Op != OpConst64 {
  1957  				continue
  1958  			}
  1959  			t := i.Type
  1960  			x := v_1
  1961  			if !(z.Op != OpConst64 && x.Op != OpConst64) {
  1962  				continue
  1963  			}
  1964  			v.reset(OpAdd64)
  1965  			v0 := b.NewValue0(v.Pos, OpSub64, t)
  1966  			v0.AddArg2(x, z)
  1967  			v.AddArg2(i, v0)
  1968  			return true
  1969  		}
  1970  		break
  1971  	}
  1972  	// match: (Add64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
  1973  	// result: (Add64 (Const64 <t> [c+d]) x)
  1974  	for {
  1975  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1976  			if v_0.Op != OpConst64 {
  1977  				continue
  1978  			}
  1979  			t := v_0.Type
  1980  			c := auxIntToInt64(v_0.AuxInt)
  1981  			if v_1.Op != OpAdd64 {
  1982  				continue
  1983  			}
  1984  			_ = v_1.Args[1]
  1985  			v_1_0 := v_1.Args[0]
  1986  			v_1_1 := v_1.Args[1]
  1987  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  1988  				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
  1989  					continue
  1990  				}
  1991  				d := auxIntToInt64(v_1_0.AuxInt)
  1992  				x := v_1_1
  1993  				v.reset(OpAdd64)
  1994  				v0 := b.NewValue0(v.Pos, OpConst64, t)
  1995  				v0.AuxInt = int64ToAuxInt(c + d)
  1996  				v.AddArg2(v0, x)
  1997  				return true
  1998  			}
  1999  		}
  2000  		break
  2001  	}
  2002  	// match: (Add64 (Const64 <t> [c]) (Sub64 (Const64 <t> [d]) x))
  2003  	// result: (Sub64 (Const64 <t> [c+d]) x)
  2004  	for {
  2005  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2006  			if v_0.Op != OpConst64 {
  2007  				continue
  2008  			}
  2009  			t := v_0.Type
  2010  			c := auxIntToInt64(v_0.AuxInt)
  2011  			if v_1.Op != OpSub64 {
  2012  				continue
  2013  			}
  2014  			x := v_1.Args[1]
  2015  			v_1_0 := v_1.Args[0]
  2016  			if v_1_0.Op != OpConst64 || v_1_0.Type != t {
  2017  				continue
  2018  			}
  2019  			d := auxIntToInt64(v_1_0.AuxInt)
  2020  			v.reset(OpSub64)
  2021  			v0 := b.NewValue0(v.Pos, OpConst64, t)
  2022  			v0.AuxInt = int64ToAuxInt(c + d)
  2023  			v.AddArg2(v0, x)
  2024  			return true
  2025  		}
  2026  		break
  2027  	}
  2028  	// match: (Add64 (Lsh64x64 x z:(Const64 <t> [c])) (Rsh64Ux64 x (Const64 [d])))
  2029  	// cond: c < 64 && d == 64-c && canRotate(config, 64)
  2030  	// result: (RotateLeft64 x z)
  2031  	for {
  2032  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2033  			if v_0.Op != OpLsh64x64 {
  2034  				continue
  2035  			}
  2036  			_ = v_0.Args[1]
  2037  			x := v_0.Args[0]
  2038  			z := v_0.Args[1]
  2039  			if z.Op != OpConst64 {
  2040  				continue
  2041  			}
  2042  			c := auxIntToInt64(z.AuxInt)
  2043  			if v_1.Op != OpRsh64Ux64 {
  2044  				continue
  2045  			}
  2046  			_ = v_1.Args[1]
  2047  			if x != v_1.Args[0] {
  2048  				continue
  2049  			}
  2050  			v_1_1 := v_1.Args[1]
  2051  			if v_1_1.Op != OpConst64 {
  2052  				continue
  2053  			}
  2054  			d := auxIntToInt64(v_1_1.AuxInt)
  2055  			if !(c < 64 && d == 64-c && canRotate(config, 64)) {
  2056  				continue
  2057  			}
  2058  			v.reset(OpRotateLeft64)
  2059  			v.AddArg2(x, z)
  2060  			return true
  2061  		}
  2062  		break
  2063  	}
  2064  	// match: (Add64 left:(Lsh64x64 x y) right:(Rsh64Ux64 x (Sub64 (Const64 [64]) y)))
  2065  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
  2066  	// result: (RotateLeft64 x y)
  2067  	for {
  2068  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2069  			left := v_0
  2070  			if left.Op != OpLsh64x64 {
  2071  				continue
  2072  			}
  2073  			y := left.Args[1]
  2074  			x := left.Args[0]
  2075  			right := v_1
  2076  			if right.Op != OpRsh64Ux64 {
  2077  				continue
  2078  			}
  2079  			_ = right.Args[1]
  2080  			if x != right.Args[0] {
  2081  				continue
  2082  			}
  2083  			right_1 := right.Args[1]
  2084  			if right_1.Op != OpSub64 {
  2085  				continue
  2086  			}
  2087  			_ = right_1.Args[1]
  2088  			right_1_0 := right_1.Args[0]
  2089  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
  2090  				continue
  2091  			}
  2092  			v.reset(OpRotateLeft64)
  2093  			v.AddArg2(x, y)
  2094  			return true
  2095  		}
  2096  		break
  2097  	}
  2098  	// match: (Add64 left:(Lsh64x32 x y) right:(Rsh64Ux32 x (Sub32 (Const32 [64]) y)))
  2099  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
  2100  	// result: (RotateLeft64 x y)
  2101  	for {
  2102  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2103  			left := v_0
  2104  			if left.Op != OpLsh64x32 {
  2105  				continue
  2106  			}
  2107  			y := left.Args[1]
  2108  			x := left.Args[0]
  2109  			right := v_1
  2110  			if right.Op != OpRsh64Ux32 {
  2111  				continue
  2112  			}
  2113  			_ = right.Args[1]
  2114  			if x != right.Args[0] {
  2115  				continue
  2116  			}
  2117  			right_1 := right.Args[1]
  2118  			if right_1.Op != OpSub32 {
  2119  				continue
  2120  			}
  2121  			_ = right_1.Args[1]
  2122  			right_1_0 := right_1.Args[0]
  2123  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
  2124  				continue
  2125  			}
  2126  			v.reset(OpRotateLeft64)
  2127  			v.AddArg2(x, y)
  2128  			return true
  2129  		}
  2130  		break
  2131  	}
  2132  	// match: (Add64 left:(Lsh64x16 x y) right:(Rsh64Ux16 x (Sub16 (Const16 [64]) y)))
  2133  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
  2134  	// result: (RotateLeft64 x y)
  2135  	for {
  2136  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2137  			left := v_0
  2138  			if left.Op != OpLsh64x16 {
  2139  				continue
  2140  			}
  2141  			y := left.Args[1]
  2142  			x := left.Args[0]
  2143  			right := v_1
  2144  			if right.Op != OpRsh64Ux16 {
  2145  				continue
  2146  			}
  2147  			_ = right.Args[1]
  2148  			if x != right.Args[0] {
  2149  				continue
  2150  			}
  2151  			right_1 := right.Args[1]
  2152  			if right_1.Op != OpSub16 {
  2153  				continue
  2154  			}
  2155  			_ = right_1.Args[1]
  2156  			right_1_0 := right_1.Args[0]
  2157  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
  2158  				continue
  2159  			}
  2160  			v.reset(OpRotateLeft64)
  2161  			v.AddArg2(x, y)
  2162  			return true
  2163  		}
  2164  		break
  2165  	}
  2166  	// match: (Add64 left:(Lsh64x8 x y) right:(Rsh64Ux8 x (Sub8 (Const8 [64]) y)))
  2167  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
  2168  	// result: (RotateLeft64 x y)
  2169  	for {
  2170  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2171  			left := v_0
  2172  			if left.Op != OpLsh64x8 {
  2173  				continue
  2174  			}
  2175  			y := left.Args[1]
  2176  			x := left.Args[0]
  2177  			right := v_1
  2178  			if right.Op != OpRsh64Ux8 {
  2179  				continue
  2180  			}
  2181  			_ = right.Args[1]
  2182  			if x != right.Args[0] {
  2183  				continue
  2184  			}
  2185  			right_1 := right.Args[1]
  2186  			if right_1.Op != OpSub8 {
  2187  				continue
  2188  			}
  2189  			_ = right_1.Args[1]
  2190  			right_1_0 := right_1.Args[0]
  2191  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
  2192  				continue
  2193  			}
  2194  			v.reset(OpRotateLeft64)
  2195  			v.AddArg2(x, y)
  2196  			return true
  2197  		}
  2198  		break
  2199  	}
  2200  	// match: (Add64 right:(Rsh64Ux64 x y) left:(Lsh64x64 x z:(Sub64 (Const64 [64]) y)))
  2201  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
  2202  	// result: (RotateLeft64 x z)
  2203  	for {
  2204  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2205  			right := v_0
  2206  			if right.Op != OpRsh64Ux64 {
  2207  				continue
  2208  			}
  2209  			y := right.Args[1]
  2210  			x := right.Args[0]
  2211  			left := v_1
  2212  			if left.Op != OpLsh64x64 {
  2213  				continue
  2214  			}
  2215  			_ = left.Args[1]
  2216  			if x != left.Args[0] {
  2217  				continue
  2218  			}
  2219  			z := left.Args[1]
  2220  			if z.Op != OpSub64 {
  2221  				continue
  2222  			}
  2223  			_ = z.Args[1]
  2224  			z_0 := z.Args[0]
  2225  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
  2226  				continue
  2227  			}
  2228  			v.reset(OpRotateLeft64)
  2229  			v.AddArg2(x, z)
  2230  			return true
  2231  		}
  2232  		break
  2233  	}
  2234  	// match: (Add64 right:(Rsh64Ux32 x y) left:(Lsh64x32 x z:(Sub32 (Const32 [64]) y)))
  2235  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
  2236  	// result: (RotateLeft64 x z)
  2237  	for {
  2238  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2239  			right := v_0
  2240  			if right.Op != OpRsh64Ux32 {
  2241  				continue
  2242  			}
  2243  			y := right.Args[1]
  2244  			x := right.Args[0]
  2245  			left := v_1
  2246  			if left.Op != OpLsh64x32 {
  2247  				continue
  2248  			}
  2249  			_ = left.Args[1]
  2250  			if x != left.Args[0] {
  2251  				continue
  2252  			}
  2253  			z := left.Args[1]
  2254  			if z.Op != OpSub32 {
  2255  				continue
  2256  			}
  2257  			_ = z.Args[1]
  2258  			z_0 := z.Args[0]
  2259  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
  2260  				continue
  2261  			}
  2262  			v.reset(OpRotateLeft64)
  2263  			v.AddArg2(x, z)
  2264  			return true
  2265  		}
  2266  		break
  2267  	}
  2268  	// match: (Add64 right:(Rsh64Ux16 x y) left:(Lsh64x16 x z:(Sub16 (Const16 [64]) y)))
  2269  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
  2270  	// result: (RotateLeft64 x z)
  2271  	for {
  2272  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2273  			right := v_0
  2274  			if right.Op != OpRsh64Ux16 {
  2275  				continue
  2276  			}
  2277  			y := right.Args[1]
  2278  			x := right.Args[0]
  2279  			left := v_1
  2280  			if left.Op != OpLsh64x16 {
  2281  				continue
  2282  			}
  2283  			_ = left.Args[1]
  2284  			if x != left.Args[0] {
  2285  				continue
  2286  			}
  2287  			z := left.Args[1]
  2288  			if z.Op != OpSub16 {
  2289  				continue
  2290  			}
  2291  			_ = z.Args[1]
  2292  			z_0 := z.Args[0]
  2293  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
  2294  				continue
  2295  			}
  2296  			v.reset(OpRotateLeft64)
  2297  			v.AddArg2(x, z)
  2298  			return true
  2299  		}
  2300  		break
  2301  	}
  2302  	// match: (Add64 right:(Rsh64Ux8 x y) left:(Lsh64x8 x z:(Sub8 (Const8 [64]) y)))
  2303  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
  2304  	// result: (RotateLeft64 x z)
  2305  	for {
  2306  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2307  			right := v_0
  2308  			if right.Op != OpRsh64Ux8 {
  2309  				continue
  2310  			}
  2311  			y := right.Args[1]
  2312  			x := right.Args[0]
  2313  			left := v_1
  2314  			if left.Op != OpLsh64x8 {
  2315  				continue
  2316  			}
  2317  			_ = left.Args[1]
  2318  			if x != left.Args[0] {
  2319  				continue
  2320  			}
  2321  			z := left.Args[1]
  2322  			if z.Op != OpSub8 {
  2323  				continue
  2324  			}
  2325  			_ = z.Args[1]
  2326  			z_0 := z.Args[0]
  2327  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
  2328  				continue
  2329  			}
  2330  			v.reset(OpRotateLeft64)
  2331  			v.AddArg2(x, z)
  2332  			return true
  2333  		}
  2334  		break
  2335  	}
  2336  	return false
  2337  }
  2338  func rewriteValuegeneric_OpAdd64F(v *Value) bool {
  2339  	v_1 := v.Args[1]
  2340  	v_0 := v.Args[0]
  2341  	// match: (Add64F (Const64F [c]) (Const64F [d]))
  2342  	// cond: c+d == c+d
  2343  	// result: (Const64F [c+d])
  2344  	for {
  2345  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2346  			if v_0.Op != OpConst64F {
  2347  				continue
  2348  			}
  2349  			c := auxIntToFloat64(v_0.AuxInt)
  2350  			if v_1.Op != OpConst64F {
  2351  				continue
  2352  			}
  2353  			d := auxIntToFloat64(v_1.AuxInt)
  2354  			if !(c+d == c+d) {
  2355  				continue
  2356  			}
  2357  			v.reset(OpConst64F)
  2358  			v.AuxInt = float64ToAuxInt(c + d)
  2359  			return true
  2360  		}
  2361  		break
  2362  	}
  2363  	return false
  2364  }
  2365  func rewriteValuegeneric_OpAdd8(v *Value) bool {
  2366  	v_1 := v.Args[1]
  2367  	v_0 := v.Args[0]
  2368  	b := v.Block
  2369  	config := b.Func.Config
  2370  	// match: (Add8 (Const8 [c]) (Const8 [d]))
  2371  	// result: (Const8 [c+d])
  2372  	for {
  2373  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2374  			if v_0.Op != OpConst8 {
  2375  				continue
  2376  			}
  2377  			c := auxIntToInt8(v_0.AuxInt)
  2378  			if v_1.Op != OpConst8 {
  2379  				continue
  2380  			}
  2381  			d := auxIntToInt8(v_1.AuxInt)
  2382  			v.reset(OpConst8)
  2383  			v.AuxInt = int8ToAuxInt(c + d)
  2384  			return true
  2385  		}
  2386  		break
  2387  	}
  2388  	// match: (Add8 <t> (Mul8 x y) (Mul8 x z))
  2389  	// result: (Mul8 x (Add8 <t> y z))
  2390  	for {
  2391  		t := v.Type
  2392  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2393  			if v_0.Op != OpMul8 {
  2394  				continue
  2395  			}
  2396  			_ = v_0.Args[1]
  2397  			v_0_0 := v_0.Args[0]
  2398  			v_0_1 := v_0.Args[1]
  2399  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  2400  				x := v_0_0
  2401  				y := v_0_1
  2402  				if v_1.Op != OpMul8 {
  2403  					continue
  2404  				}
  2405  				_ = v_1.Args[1]
  2406  				v_1_0 := v_1.Args[0]
  2407  				v_1_1 := v_1.Args[1]
  2408  				for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 {
  2409  					if x != v_1_0 {
  2410  						continue
  2411  					}
  2412  					z := v_1_1
  2413  					v.reset(OpMul8)
  2414  					v0 := b.NewValue0(v.Pos, OpAdd8, t)
  2415  					v0.AddArg2(y, z)
  2416  					v.AddArg2(x, v0)
  2417  					return true
  2418  				}
  2419  			}
  2420  		}
  2421  		break
  2422  	}
  2423  	// match: (Add8 (Const8 [0]) x)
  2424  	// result: x
  2425  	for {
  2426  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2427  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
  2428  				continue
  2429  			}
  2430  			x := v_1
  2431  			v.copyOf(x)
  2432  			return true
  2433  		}
  2434  		break
  2435  	}
  2436  	// match: (Add8 x (Neg8 y))
  2437  	// result: (Sub8 x y)
  2438  	for {
  2439  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2440  			x := v_0
  2441  			if v_1.Op != OpNeg8 {
  2442  				continue
  2443  			}
  2444  			y := v_1.Args[0]
  2445  			v.reset(OpSub8)
  2446  			v.AddArg2(x, y)
  2447  			return true
  2448  		}
  2449  		break
  2450  	}
  2451  	// match: (Add8 (Com8 x) x)
  2452  	// result: (Const8 [-1])
  2453  	for {
  2454  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2455  			if v_0.Op != OpCom8 {
  2456  				continue
  2457  			}
  2458  			x := v_0.Args[0]
  2459  			if x != v_1 {
  2460  				continue
  2461  			}
  2462  			v.reset(OpConst8)
  2463  			v.AuxInt = int8ToAuxInt(-1)
  2464  			return true
  2465  		}
  2466  		break
  2467  	}
  2468  	// match: (Add8 (Sub8 x t) (Add8 t y))
  2469  	// result: (Add8 x y)
  2470  	for {
  2471  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2472  			if v_0.Op != OpSub8 {
  2473  				continue
  2474  			}
  2475  			t := v_0.Args[1]
  2476  			x := v_0.Args[0]
  2477  			if v_1.Op != OpAdd8 {
  2478  				continue
  2479  			}
  2480  			_ = v_1.Args[1]
  2481  			v_1_0 := v_1.Args[0]
  2482  			v_1_1 := v_1.Args[1]
  2483  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  2484  				if t != v_1_0 {
  2485  					continue
  2486  				}
  2487  				y := v_1_1
  2488  				v.reset(OpAdd8)
  2489  				v.AddArg2(x, y)
  2490  				return true
  2491  			}
  2492  		}
  2493  		break
  2494  	}
  2495  	// match: (Add8 (Const8 [1]) (Com8 x))
  2496  	// result: (Neg8 x)
  2497  	for {
  2498  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2499  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 1 || v_1.Op != OpCom8 {
  2500  				continue
  2501  			}
  2502  			x := v_1.Args[0]
  2503  			v.reset(OpNeg8)
  2504  			v.AddArg(x)
  2505  			return true
  2506  		}
  2507  		break
  2508  	}
  2509  	// match: (Add8 x (Sub8 y x))
  2510  	// result: y
  2511  	for {
  2512  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2513  			x := v_0
  2514  			if v_1.Op != OpSub8 {
  2515  				continue
  2516  			}
  2517  			_ = v_1.Args[1]
  2518  			y := v_1.Args[0]
  2519  			if x != v_1.Args[1] {
  2520  				continue
  2521  			}
  2522  			v.copyOf(y)
  2523  			return true
  2524  		}
  2525  		break
  2526  	}
  2527  	// match: (Add8 x (Add8 y (Sub8 z x)))
  2528  	// result: (Add8 y z)
  2529  	for {
  2530  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2531  			x := v_0
  2532  			if v_1.Op != OpAdd8 {
  2533  				continue
  2534  			}
  2535  			_ = v_1.Args[1]
  2536  			v_1_0 := v_1.Args[0]
  2537  			v_1_1 := v_1.Args[1]
  2538  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  2539  				y := v_1_0
  2540  				if v_1_1.Op != OpSub8 {
  2541  					continue
  2542  				}
  2543  				_ = v_1_1.Args[1]
  2544  				z := v_1_1.Args[0]
  2545  				if x != v_1_1.Args[1] {
  2546  					continue
  2547  				}
  2548  				v.reset(OpAdd8)
  2549  				v.AddArg2(y, z)
  2550  				return true
  2551  			}
  2552  		}
  2553  		break
  2554  	}
  2555  	// match: (Add8 (Add8 i:(Const8 <t>) z) x)
  2556  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  2557  	// result: (Add8 i (Add8 <t> z x))
  2558  	for {
  2559  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2560  			if v_0.Op != OpAdd8 {
  2561  				continue
  2562  			}
  2563  			_ = v_0.Args[1]
  2564  			v_0_0 := v_0.Args[0]
  2565  			v_0_1 := v_0.Args[1]
  2566  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  2567  				i := v_0_0
  2568  				if i.Op != OpConst8 {
  2569  					continue
  2570  				}
  2571  				t := i.Type
  2572  				z := v_0_1
  2573  				x := v_1
  2574  				if !(z.Op != OpConst8 && x.Op != OpConst8) {
  2575  					continue
  2576  				}
  2577  				v.reset(OpAdd8)
  2578  				v0 := b.NewValue0(v.Pos, OpAdd8, t)
  2579  				v0.AddArg2(z, x)
  2580  				v.AddArg2(i, v0)
  2581  				return true
  2582  			}
  2583  		}
  2584  		break
  2585  	}
  2586  	// match: (Add8 (Sub8 i:(Const8 <t>) z) x)
  2587  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  2588  	// result: (Add8 i (Sub8 <t> x z))
  2589  	for {
  2590  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2591  			if v_0.Op != OpSub8 {
  2592  				continue
  2593  			}
  2594  			z := v_0.Args[1]
  2595  			i := v_0.Args[0]
  2596  			if i.Op != OpConst8 {
  2597  				continue
  2598  			}
  2599  			t := i.Type
  2600  			x := v_1
  2601  			if !(z.Op != OpConst8 && x.Op != OpConst8) {
  2602  				continue
  2603  			}
  2604  			v.reset(OpAdd8)
  2605  			v0 := b.NewValue0(v.Pos, OpSub8, t)
  2606  			v0.AddArg2(x, z)
  2607  			v.AddArg2(i, v0)
  2608  			return true
  2609  		}
  2610  		break
  2611  	}
  2612  	// match: (Add8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
  2613  	// result: (Add8 (Const8 <t> [c+d]) x)
  2614  	for {
  2615  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2616  			if v_0.Op != OpConst8 {
  2617  				continue
  2618  			}
  2619  			t := v_0.Type
  2620  			c := auxIntToInt8(v_0.AuxInt)
  2621  			if v_1.Op != OpAdd8 {
  2622  				continue
  2623  			}
  2624  			_ = v_1.Args[1]
  2625  			v_1_0 := v_1.Args[0]
  2626  			v_1_1 := v_1.Args[1]
  2627  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  2628  				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
  2629  					continue
  2630  				}
  2631  				d := auxIntToInt8(v_1_0.AuxInt)
  2632  				x := v_1_1
  2633  				v.reset(OpAdd8)
  2634  				v0 := b.NewValue0(v.Pos, OpConst8, t)
  2635  				v0.AuxInt = int8ToAuxInt(c + d)
  2636  				v.AddArg2(v0, x)
  2637  				return true
  2638  			}
  2639  		}
  2640  		break
  2641  	}
  2642  	// match: (Add8 (Const8 <t> [c]) (Sub8 (Const8 <t> [d]) x))
  2643  	// result: (Sub8 (Const8 <t> [c+d]) x)
  2644  	for {
  2645  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2646  			if v_0.Op != OpConst8 {
  2647  				continue
  2648  			}
  2649  			t := v_0.Type
  2650  			c := auxIntToInt8(v_0.AuxInt)
  2651  			if v_1.Op != OpSub8 {
  2652  				continue
  2653  			}
  2654  			x := v_1.Args[1]
  2655  			v_1_0 := v_1.Args[0]
  2656  			if v_1_0.Op != OpConst8 || v_1_0.Type != t {
  2657  				continue
  2658  			}
  2659  			d := auxIntToInt8(v_1_0.AuxInt)
  2660  			v.reset(OpSub8)
  2661  			v0 := b.NewValue0(v.Pos, OpConst8, t)
  2662  			v0.AuxInt = int8ToAuxInt(c + d)
  2663  			v.AddArg2(v0, x)
  2664  			return true
  2665  		}
  2666  		break
  2667  	}
  2668  	// match: (Add8 (Lsh8x64 x z:(Const64 <t> [c])) (Rsh8Ux64 x (Const64 [d])))
  2669  	// cond: c < 8 && d == 8-c && canRotate(config, 8)
  2670  	// result: (RotateLeft8 x z)
  2671  	for {
  2672  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2673  			if v_0.Op != OpLsh8x64 {
  2674  				continue
  2675  			}
  2676  			_ = v_0.Args[1]
  2677  			x := v_0.Args[0]
  2678  			z := v_0.Args[1]
  2679  			if z.Op != OpConst64 {
  2680  				continue
  2681  			}
  2682  			c := auxIntToInt64(z.AuxInt)
  2683  			if v_1.Op != OpRsh8Ux64 {
  2684  				continue
  2685  			}
  2686  			_ = v_1.Args[1]
  2687  			if x != v_1.Args[0] {
  2688  				continue
  2689  			}
  2690  			v_1_1 := v_1.Args[1]
  2691  			if v_1_1.Op != OpConst64 {
  2692  				continue
  2693  			}
  2694  			d := auxIntToInt64(v_1_1.AuxInt)
  2695  			if !(c < 8 && d == 8-c && canRotate(config, 8)) {
  2696  				continue
  2697  			}
  2698  			v.reset(OpRotateLeft8)
  2699  			v.AddArg2(x, z)
  2700  			return true
  2701  		}
  2702  		break
  2703  	}
  2704  	// match: (Add8 left:(Lsh8x64 x y) right:(Rsh8Ux64 x (Sub64 (Const64 [8]) y)))
  2705  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
  2706  	// result: (RotateLeft8 x y)
  2707  	for {
  2708  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2709  			left := v_0
  2710  			if left.Op != OpLsh8x64 {
  2711  				continue
  2712  			}
  2713  			y := left.Args[1]
  2714  			x := left.Args[0]
  2715  			right := v_1
  2716  			if right.Op != OpRsh8Ux64 {
  2717  				continue
  2718  			}
  2719  			_ = right.Args[1]
  2720  			if x != right.Args[0] {
  2721  				continue
  2722  			}
  2723  			right_1 := right.Args[1]
  2724  			if right_1.Op != OpSub64 {
  2725  				continue
  2726  			}
  2727  			_ = right_1.Args[1]
  2728  			right_1_0 := right_1.Args[0]
  2729  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
  2730  				continue
  2731  			}
  2732  			v.reset(OpRotateLeft8)
  2733  			v.AddArg2(x, y)
  2734  			return true
  2735  		}
  2736  		break
  2737  	}
  2738  	// match: (Add8 left:(Lsh8x32 x y) right:(Rsh8Ux32 x (Sub32 (Const32 [8]) y)))
  2739  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
  2740  	// result: (RotateLeft8 x y)
  2741  	for {
  2742  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2743  			left := v_0
  2744  			if left.Op != OpLsh8x32 {
  2745  				continue
  2746  			}
  2747  			y := left.Args[1]
  2748  			x := left.Args[0]
  2749  			right := v_1
  2750  			if right.Op != OpRsh8Ux32 {
  2751  				continue
  2752  			}
  2753  			_ = right.Args[1]
  2754  			if x != right.Args[0] {
  2755  				continue
  2756  			}
  2757  			right_1 := right.Args[1]
  2758  			if right_1.Op != OpSub32 {
  2759  				continue
  2760  			}
  2761  			_ = right_1.Args[1]
  2762  			right_1_0 := right_1.Args[0]
  2763  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
  2764  				continue
  2765  			}
  2766  			v.reset(OpRotateLeft8)
  2767  			v.AddArg2(x, y)
  2768  			return true
  2769  		}
  2770  		break
  2771  	}
  2772  	// match: (Add8 left:(Lsh8x16 x y) right:(Rsh8Ux16 x (Sub16 (Const16 [8]) y)))
  2773  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
  2774  	// result: (RotateLeft8 x y)
  2775  	for {
  2776  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2777  			left := v_0
  2778  			if left.Op != OpLsh8x16 {
  2779  				continue
  2780  			}
  2781  			y := left.Args[1]
  2782  			x := left.Args[0]
  2783  			right := v_1
  2784  			if right.Op != OpRsh8Ux16 {
  2785  				continue
  2786  			}
  2787  			_ = right.Args[1]
  2788  			if x != right.Args[0] {
  2789  				continue
  2790  			}
  2791  			right_1 := right.Args[1]
  2792  			if right_1.Op != OpSub16 {
  2793  				continue
  2794  			}
  2795  			_ = right_1.Args[1]
  2796  			right_1_0 := right_1.Args[0]
  2797  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
  2798  				continue
  2799  			}
  2800  			v.reset(OpRotateLeft8)
  2801  			v.AddArg2(x, y)
  2802  			return true
  2803  		}
  2804  		break
  2805  	}
  2806  	// match: (Add8 left:(Lsh8x8 x y) right:(Rsh8Ux8 x (Sub8 (Const8 [8]) y)))
  2807  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
  2808  	// result: (RotateLeft8 x y)
  2809  	for {
  2810  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2811  			left := v_0
  2812  			if left.Op != OpLsh8x8 {
  2813  				continue
  2814  			}
  2815  			y := left.Args[1]
  2816  			x := left.Args[0]
  2817  			right := v_1
  2818  			if right.Op != OpRsh8Ux8 {
  2819  				continue
  2820  			}
  2821  			_ = right.Args[1]
  2822  			if x != right.Args[0] {
  2823  				continue
  2824  			}
  2825  			right_1 := right.Args[1]
  2826  			if right_1.Op != OpSub8 {
  2827  				continue
  2828  			}
  2829  			_ = right_1.Args[1]
  2830  			right_1_0 := right_1.Args[0]
  2831  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
  2832  				continue
  2833  			}
  2834  			v.reset(OpRotateLeft8)
  2835  			v.AddArg2(x, y)
  2836  			return true
  2837  		}
  2838  		break
  2839  	}
  2840  	// match: (Add8 right:(Rsh8Ux64 x y) left:(Lsh8x64 x z:(Sub64 (Const64 [8]) y)))
  2841  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
  2842  	// result: (RotateLeft8 x z)
  2843  	for {
  2844  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2845  			right := v_0
  2846  			if right.Op != OpRsh8Ux64 {
  2847  				continue
  2848  			}
  2849  			y := right.Args[1]
  2850  			x := right.Args[0]
  2851  			left := v_1
  2852  			if left.Op != OpLsh8x64 {
  2853  				continue
  2854  			}
  2855  			_ = left.Args[1]
  2856  			if x != left.Args[0] {
  2857  				continue
  2858  			}
  2859  			z := left.Args[1]
  2860  			if z.Op != OpSub64 {
  2861  				continue
  2862  			}
  2863  			_ = z.Args[1]
  2864  			z_0 := z.Args[0]
  2865  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
  2866  				continue
  2867  			}
  2868  			v.reset(OpRotateLeft8)
  2869  			v.AddArg2(x, z)
  2870  			return true
  2871  		}
  2872  		break
  2873  	}
  2874  	// match: (Add8 right:(Rsh8Ux32 x y) left:(Lsh8x32 x z:(Sub32 (Const32 [8]) y)))
  2875  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
  2876  	// result: (RotateLeft8 x z)
  2877  	for {
  2878  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2879  			right := v_0
  2880  			if right.Op != OpRsh8Ux32 {
  2881  				continue
  2882  			}
  2883  			y := right.Args[1]
  2884  			x := right.Args[0]
  2885  			left := v_1
  2886  			if left.Op != OpLsh8x32 {
  2887  				continue
  2888  			}
  2889  			_ = left.Args[1]
  2890  			if x != left.Args[0] {
  2891  				continue
  2892  			}
  2893  			z := left.Args[1]
  2894  			if z.Op != OpSub32 {
  2895  				continue
  2896  			}
  2897  			_ = z.Args[1]
  2898  			z_0 := z.Args[0]
  2899  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
  2900  				continue
  2901  			}
  2902  			v.reset(OpRotateLeft8)
  2903  			v.AddArg2(x, z)
  2904  			return true
  2905  		}
  2906  		break
  2907  	}
  2908  	// match: (Add8 right:(Rsh8Ux16 x y) left:(Lsh8x16 x z:(Sub16 (Const16 [8]) y)))
  2909  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
  2910  	// result: (RotateLeft8 x z)
  2911  	for {
  2912  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2913  			right := v_0
  2914  			if right.Op != OpRsh8Ux16 {
  2915  				continue
  2916  			}
  2917  			y := right.Args[1]
  2918  			x := right.Args[0]
  2919  			left := v_1
  2920  			if left.Op != OpLsh8x16 {
  2921  				continue
  2922  			}
  2923  			_ = left.Args[1]
  2924  			if x != left.Args[0] {
  2925  				continue
  2926  			}
  2927  			z := left.Args[1]
  2928  			if z.Op != OpSub16 {
  2929  				continue
  2930  			}
  2931  			_ = z.Args[1]
  2932  			z_0 := z.Args[0]
  2933  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
  2934  				continue
  2935  			}
  2936  			v.reset(OpRotateLeft8)
  2937  			v.AddArg2(x, z)
  2938  			return true
  2939  		}
  2940  		break
  2941  	}
  2942  	// match: (Add8 right:(Rsh8Ux8 x y) left:(Lsh8x8 x z:(Sub8 (Const8 [8]) y)))
  2943  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
  2944  	// result: (RotateLeft8 x z)
  2945  	for {
  2946  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2947  			right := v_0
  2948  			if right.Op != OpRsh8Ux8 {
  2949  				continue
  2950  			}
  2951  			y := right.Args[1]
  2952  			x := right.Args[0]
  2953  			left := v_1
  2954  			if left.Op != OpLsh8x8 {
  2955  				continue
  2956  			}
  2957  			_ = left.Args[1]
  2958  			if x != left.Args[0] {
  2959  				continue
  2960  			}
  2961  			z := left.Args[1]
  2962  			if z.Op != OpSub8 {
  2963  				continue
  2964  			}
  2965  			_ = z.Args[1]
  2966  			z_0 := z.Args[0]
  2967  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
  2968  				continue
  2969  			}
  2970  			v.reset(OpRotateLeft8)
  2971  			v.AddArg2(x, z)
  2972  			return true
  2973  		}
  2974  		break
  2975  	}
  2976  	return false
  2977  }
  2978  func rewriteValuegeneric_OpAddPtr(v *Value) bool {
  2979  	v_1 := v.Args[1]
  2980  	v_0 := v.Args[0]
  2981  	// match: (AddPtr <t> x (Const64 [c]))
  2982  	// result: (OffPtr <t> x [c])
  2983  	for {
  2984  		t := v.Type
  2985  		x := v_0
  2986  		if v_1.Op != OpConst64 {
  2987  			break
  2988  		}
  2989  		c := auxIntToInt64(v_1.AuxInt)
  2990  		v.reset(OpOffPtr)
  2991  		v.Type = t
  2992  		v.AuxInt = int64ToAuxInt(c)
  2993  		v.AddArg(x)
  2994  		return true
  2995  	}
  2996  	// match: (AddPtr <t> x (Const32 [c]))
  2997  	// result: (OffPtr <t> x [int64(c)])
  2998  	for {
  2999  		t := v.Type
  3000  		x := v_0
  3001  		if v_1.Op != OpConst32 {
  3002  			break
  3003  		}
  3004  		c := auxIntToInt32(v_1.AuxInt)
  3005  		v.reset(OpOffPtr)
  3006  		v.Type = t
  3007  		v.AuxInt = int64ToAuxInt(int64(c))
  3008  		v.AddArg(x)
  3009  		return true
  3010  	}
  3011  	return false
  3012  }
  3013  func rewriteValuegeneric_OpAnd16(v *Value) bool {
  3014  	v_1 := v.Args[1]
  3015  	v_0 := v.Args[0]
  3016  	b := v.Block
  3017  	// match: (And16 (Const16 [c]) (Const16 [d]))
  3018  	// result: (Const16 [c&d])
  3019  	for {
  3020  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3021  			if v_0.Op != OpConst16 {
  3022  				continue
  3023  			}
  3024  			c := auxIntToInt16(v_0.AuxInt)
  3025  			if v_1.Op != OpConst16 {
  3026  				continue
  3027  			}
  3028  			d := auxIntToInt16(v_1.AuxInt)
  3029  			v.reset(OpConst16)
  3030  			v.AuxInt = int16ToAuxInt(c & d)
  3031  			return true
  3032  		}
  3033  		break
  3034  	}
  3035  	// match: (And16 <t> (Com16 x) (Com16 y))
  3036  	// result: (Com16 (Or16 <t> x y))
  3037  	for {
  3038  		t := v.Type
  3039  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3040  			if v_0.Op != OpCom16 {
  3041  				continue
  3042  			}
  3043  			x := v_0.Args[0]
  3044  			if v_1.Op != OpCom16 {
  3045  				continue
  3046  			}
  3047  			y := v_1.Args[0]
  3048  			v.reset(OpCom16)
  3049  			v0 := b.NewValue0(v.Pos, OpOr16, t)
  3050  			v0.AddArg2(x, y)
  3051  			v.AddArg(v0)
  3052  			return true
  3053  		}
  3054  		break
  3055  	}
  3056  	// match: (And16 (Const16 [m]) (Rsh16Ux64 _ (Const64 [c])))
  3057  	// cond: c >= int64(16-ntz16(m))
  3058  	// result: (Const16 [0])
  3059  	for {
  3060  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3061  			if v_0.Op != OpConst16 {
  3062  				continue
  3063  			}
  3064  			m := auxIntToInt16(v_0.AuxInt)
  3065  			if v_1.Op != OpRsh16Ux64 {
  3066  				continue
  3067  			}
  3068  			_ = v_1.Args[1]
  3069  			v_1_1 := v_1.Args[1]
  3070  			if v_1_1.Op != OpConst64 {
  3071  				continue
  3072  			}
  3073  			c := auxIntToInt64(v_1_1.AuxInt)
  3074  			if !(c >= int64(16-ntz16(m))) {
  3075  				continue
  3076  			}
  3077  			v.reset(OpConst16)
  3078  			v.AuxInt = int16ToAuxInt(0)
  3079  			return true
  3080  		}
  3081  		break
  3082  	}
  3083  	// match: (And16 (Const16 [m]) (Lsh16x64 _ (Const64 [c])))
  3084  	// cond: c >= int64(16-nlz16(m))
  3085  	// result: (Const16 [0])
  3086  	for {
  3087  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3088  			if v_0.Op != OpConst16 {
  3089  				continue
  3090  			}
  3091  			m := auxIntToInt16(v_0.AuxInt)
  3092  			if v_1.Op != OpLsh16x64 {
  3093  				continue
  3094  			}
  3095  			_ = v_1.Args[1]
  3096  			v_1_1 := v_1.Args[1]
  3097  			if v_1_1.Op != OpConst64 {
  3098  				continue
  3099  			}
  3100  			c := auxIntToInt64(v_1_1.AuxInt)
  3101  			if !(c >= int64(16-nlz16(m))) {
  3102  				continue
  3103  			}
  3104  			v.reset(OpConst16)
  3105  			v.AuxInt = int16ToAuxInt(0)
  3106  			return true
  3107  		}
  3108  		break
  3109  	}
  3110  	// match: (And16 x x)
  3111  	// result: x
  3112  	for {
  3113  		x := v_0
  3114  		if x != v_1 {
  3115  			break
  3116  		}
  3117  		v.copyOf(x)
  3118  		return true
  3119  	}
  3120  	// match: (And16 (Const16 [-1]) x)
  3121  	// result: x
  3122  	for {
  3123  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3124  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
  3125  				continue
  3126  			}
  3127  			x := v_1
  3128  			v.copyOf(x)
  3129  			return true
  3130  		}
  3131  		break
  3132  	}
  3133  	// match: (And16 (Const16 [0]) _)
  3134  	// result: (Const16 [0])
  3135  	for {
  3136  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3137  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
  3138  				continue
  3139  			}
  3140  			v.reset(OpConst16)
  3141  			v.AuxInt = int16ToAuxInt(0)
  3142  			return true
  3143  		}
  3144  		break
  3145  	}
  3146  	// match: (And16 (Com16 x) x)
  3147  	// result: (Const16 [0])
  3148  	for {
  3149  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3150  			if v_0.Op != OpCom16 {
  3151  				continue
  3152  			}
  3153  			x := v_0.Args[0]
  3154  			if x != v_1 {
  3155  				continue
  3156  			}
  3157  			v.reset(OpConst16)
  3158  			v.AuxInt = int16ToAuxInt(0)
  3159  			return true
  3160  		}
  3161  		break
  3162  	}
  3163  	// match: (And16 x (And16 x y))
  3164  	// result: (And16 x y)
  3165  	for {
  3166  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3167  			x := v_0
  3168  			if v_1.Op != OpAnd16 {
  3169  				continue
  3170  			}
  3171  			_ = v_1.Args[1]
  3172  			v_1_0 := v_1.Args[0]
  3173  			v_1_1 := v_1.Args[1]
  3174  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  3175  				if x != v_1_0 {
  3176  					continue
  3177  				}
  3178  				y := v_1_1
  3179  				v.reset(OpAnd16)
  3180  				v.AddArg2(x, y)
  3181  				return true
  3182  			}
  3183  		}
  3184  		break
  3185  	}
  3186  	// match: (And16 (And16 i:(Const16 <t>) z) x)
  3187  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
  3188  	// result: (And16 i (And16 <t> z x))
  3189  	for {
  3190  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3191  			if v_0.Op != OpAnd16 {
  3192  				continue
  3193  			}
  3194  			_ = v_0.Args[1]
  3195  			v_0_0 := v_0.Args[0]
  3196  			v_0_1 := v_0.Args[1]
  3197  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  3198  				i := v_0_0
  3199  				if i.Op != OpConst16 {
  3200  					continue
  3201  				}
  3202  				t := i.Type
  3203  				z := v_0_1
  3204  				x := v_1
  3205  				if !(z.Op != OpConst16 && x.Op != OpConst16) {
  3206  					continue
  3207  				}
  3208  				v.reset(OpAnd16)
  3209  				v0 := b.NewValue0(v.Pos, OpAnd16, t)
  3210  				v0.AddArg2(z, x)
  3211  				v.AddArg2(i, v0)
  3212  				return true
  3213  			}
  3214  		}
  3215  		break
  3216  	}
  3217  	// match: (And16 (Const16 <t> [c]) (And16 (Const16 <t> [d]) x))
  3218  	// result: (And16 (Const16 <t> [c&d]) x)
  3219  	for {
  3220  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3221  			if v_0.Op != OpConst16 {
  3222  				continue
  3223  			}
  3224  			t := v_0.Type
  3225  			c := auxIntToInt16(v_0.AuxInt)
  3226  			if v_1.Op != OpAnd16 {
  3227  				continue
  3228  			}
  3229  			_ = v_1.Args[1]
  3230  			v_1_0 := v_1.Args[0]
  3231  			v_1_1 := v_1.Args[1]
  3232  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  3233  				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
  3234  					continue
  3235  				}
  3236  				d := auxIntToInt16(v_1_0.AuxInt)
  3237  				x := v_1_1
  3238  				v.reset(OpAnd16)
  3239  				v0 := b.NewValue0(v.Pos, OpConst16, t)
  3240  				v0.AuxInt = int16ToAuxInt(c & d)
  3241  				v.AddArg2(v0, x)
  3242  				return true
  3243  			}
  3244  		}
  3245  		break
  3246  	}
  3247  	return false
  3248  }
  3249  func rewriteValuegeneric_OpAnd32(v *Value) bool {
  3250  	v_1 := v.Args[1]
  3251  	v_0 := v.Args[0]
  3252  	b := v.Block
  3253  	// match: (And32 (Const32 [c]) (Const32 [d]))
  3254  	// result: (Const32 [c&d])
  3255  	for {
  3256  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3257  			if v_0.Op != OpConst32 {
  3258  				continue
  3259  			}
  3260  			c := auxIntToInt32(v_0.AuxInt)
  3261  			if v_1.Op != OpConst32 {
  3262  				continue
  3263  			}
  3264  			d := auxIntToInt32(v_1.AuxInt)
  3265  			v.reset(OpConst32)
  3266  			v.AuxInt = int32ToAuxInt(c & d)
  3267  			return true
  3268  		}
  3269  		break
  3270  	}
  3271  	// match: (And32 <t> (Com32 x) (Com32 y))
  3272  	// result: (Com32 (Or32 <t> x y))
  3273  	for {
  3274  		t := v.Type
  3275  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3276  			if v_0.Op != OpCom32 {
  3277  				continue
  3278  			}
  3279  			x := v_0.Args[0]
  3280  			if v_1.Op != OpCom32 {
  3281  				continue
  3282  			}
  3283  			y := v_1.Args[0]
  3284  			v.reset(OpCom32)
  3285  			v0 := b.NewValue0(v.Pos, OpOr32, t)
  3286  			v0.AddArg2(x, y)
  3287  			v.AddArg(v0)
  3288  			return true
  3289  		}
  3290  		break
  3291  	}
  3292  	// match: (And32 (Const32 [m]) (Rsh32Ux64 _ (Const64 [c])))
  3293  	// cond: c >= int64(32-ntz32(m))
  3294  	// result: (Const32 [0])
  3295  	for {
  3296  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3297  			if v_0.Op != OpConst32 {
  3298  				continue
  3299  			}
  3300  			m := auxIntToInt32(v_0.AuxInt)
  3301  			if v_1.Op != OpRsh32Ux64 {
  3302  				continue
  3303  			}
  3304  			_ = v_1.Args[1]
  3305  			v_1_1 := v_1.Args[1]
  3306  			if v_1_1.Op != OpConst64 {
  3307  				continue
  3308  			}
  3309  			c := auxIntToInt64(v_1_1.AuxInt)
  3310  			if !(c >= int64(32-ntz32(m))) {
  3311  				continue
  3312  			}
  3313  			v.reset(OpConst32)
  3314  			v.AuxInt = int32ToAuxInt(0)
  3315  			return true
  3316  		}
  3317  		break
  3318  	}
  3319  	// match: (And32 (Const32 [m]) (Lsh32x64 _ (Const64 [c])))
  3320  	// cond: c >= int64(32-nlz32(m))
  3321  	// result: (Const32 [0])
  3322  	for {
  3323  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3324  			if v_0.Op != OpConst32 {
  3325  				continue
  3326  			}
  3327  			m := auxIntToInt32(v_0.AuxInt)
  3328  			if v_1.Op != OpLsh32x64 {
  3329  				continue
  3330  			}
  3331  			_ = v_1.Args[1]
  3332  			v_1_1 := v_1.Args[1]
  3333  			if v_1_1.Op != OpConst64 {
  3334  				continue
  3335  			}
  3336  			c := auxIntToInt64(v_1_1.AuxInt)
  3337  			if !(c >= int64(32-nlz32(m))) {
  3338  				continue
  3339  			}
  3340  			v.reset(OpConst32)
  3341  			v.AuxInt = int32ToAuxInt(0)
  3342  			return true
  3343  		}
  3344  		break
  3345  	}
  3346  	// match: (And32 x x)
  3347  	// result: x
  3348  	for {
  3349  		x := v_0
  3350  		if x != v_1 {
  3351  			break
  3352  		}
  3353  		v.copyOf(x)
  3354  		return true
  3355  	}
  3356  	// match: (And32 (Const32 [-1]) x)
  3357  	// result: x
  3358  	for {
  3359  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3360  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
  3361  				continue
  3362  			}
  3363  			x := v_1
  3364  			v.copyOf(x)
  3365  			return true
  3366  		}
  3367  		break
  3368  	}
  3369  	// match: (And32 (Const32 [0]) _)
  3370  	// result: (Const32 [0])
  3371  	for {
  3372  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3373  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
  3374  				continue
  3375  			}
  3376  			v.reset(OpConst32)
  3377  			v.AuxInt = int32ToAuxInt(0)
  3378  			return true
  3379  		}
  3380  		break
  3381  	}
  3382  	// match: (And32 (Com32 x) x)
  3383  	// result: (Const32 [0])
  3384  	for {
  3385  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3386  			if v_0.Op != OpCom32 {
  3387  				continue
  3388  			}
  3389  			x := v_0.Args[0]
  3390  			if x != v_1 {
  3391  				continue
  3392  			}
  3393  			v.reset(OpConst32)
  3394  			v.AuxInt = int32ToAuxInt(0)
  3395  			return true
  3396  		}
  3397  		break
  3398  	}
  3399  	// match: (And32 x (And32 x y))
  3400  	// result: (And32 x y)
  3401  	for {
  3402  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3403  			x := v_0
  3404  			if v_1.Op != OpAnd32 {
  3405  				continue
  3406  			}
  3407  			_ = v_1.Args[1]
  3408  			v_1_0 := v_1.Args[0]
  3409  			v_1_1 := v_1.Args[1]
  3410  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  3411  				if x != v_1_0 {
  3412  					continue
  3413  				}
  3414  				y := v_1_1
  3415  				v.reset(OpAnd32)
  3416  				v.AddArg2(x, y)
  3417  				return true
  3418  			}
  3419  		}
  3420  		break
  3421  	}
  3422  	// match: (And32 (And32 i:(Const32 <t>) z) x)
  3423  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  3424  	// result: (And32 i (And32 <t> z x))
  3425  	for {
  3426  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3427  			if v_0.Op != OpAnd32 {
  3428  				continue
  3429  			}
  3430  			_ = v_0.Args[1]
  3431  			v_0_0 := v_0.Args[0]
  3432  			v_0_1 := v_0.Args[1]
  3433  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  3434  				i := v_0_0
  3435  				if i.Op != OpConst32 {
  3436  					continue
  3437  				}
  3438  				t := i.Type
  3439  				z := v_0_1
  3440  				x := v_1
  3441  				if !(z.Op != OpConst32 && x.Op != OpConst32) {
  3442  					continue
  3443  				}
  3444  				v.reset(OpAnd32)
  3445  				v0 := b.NewValue0(v.Pos, OpAnd32, t)
  3446  				v0.AddArg2(z, x)
  3447  				v.AddArg2(i, v0)
  3448  				return true
  3449  			}
  3450  		}
  3451  		break
  3452  	}
  3453  	// match: (And32 (Const32 <t> [c]) (And32 (Const32 <t> [d]) x))
  3454  	// result: (And32 (Const32 <t> [c&d]) x)
  3455  	for {
  3456  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3457  			if v_0.Op != OpConst32 {
  3458  				continue
  3459  			}
  3460  			t := v_0.Type
  3461  			c := auxIntToInt32(v_0.AuxInt)
  3462  			if v_1.Op != OpAnd32 {
  3463  				continue
  3464  			}
  3465  			_ = v_1.Args[1]
  3466  			v_1_0 := v_1.Args[0]
  3467  			v_1_1 := v_1.Args[1]
  3468  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  3469  				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
  3470  					continue
  3471  				}
  3472  				d := auxIntToInt32(v_1_0.AuxInt)
  3473  				x := v_1_1
  3474  				v.reset(OpAnd32)
  3475  				v0 := b.NewValue0(v.Pos, OpConst32, t)
  3476  				v0.AuxInt = int32ToAuxInt(c & d)
  3477  				v.AddArg2(v0, x)
  3478  				return true
  3479  			}
  3480  		}
  3481  		break
  3482  	}
  3483  	return false
  3484  }
  3485  func rewriteValuegeneric_OpAnd64(v *Value) bool {
  3486  	v_1 := v.Args[1]
  3487  	v_0 := v.Args[0]
  3488  	b := v.Block
  3489  	// match: (And64 (Const64 [c]) (Const64 [d]))
  3490  	// result: (Const64 [c&d])
  3491  	for {
  3492  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3493  			if v_0.Op != OpConst64 {
  3494  				continue
  3495  			}
  3496  			c := auxIntToInt64(v_0.AuxInt)
  3497  			if v_1.Op != OpConst64 {
  3498  				continue
  3499  			}
  3500  			d := auxIntToInt64(v_1.AuxInt)
  3501  			v.reset(OpConst64)
  3502  			v.AuxInt = int64ToAuxInt(c & d)
  3503  			return true
  3504  		}
  3505  		break
  3506  	}
  3507  	// match: (And64 <t> (Com64 x) (Com64 y))
  3508  	// result: (Com64 (Or64 <t> x y))
  3509  	for {
  3510  		t := v.Type
  3511  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3512  			if v_0.Op != OpCom64 {
  3513  				continue
  3514  			}
  3515  			x := v_0.Args[0]
  3516  			if v_1.Op != OpCom64 {
  3517  				continue
  3518  			}
  3519  			y := v_1.Args[0]
  3520  			v.reset(OpCom64)
  3521  			v0 := b.NewValue0(v.Pos, OpOr64, t)
  3522  			v0.AddArg2(x, y)
  3523  			v.AddArg(v0)
  3524  			return true
  3525  		}
  3526  		break
  3527  	}
  3528  	// match: (And64 (Const64 [m]) (Rsh64Ux64 _ (Const64 [c])))
  3529  	// cond: c >= int64(64-ntz64(m))
  3530  	// result: (Const64 [0])
  3531  	for {
  3532  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3533  			if v_0.Op != OpConst64 {
  3534  				continue
  3535  			}
  3536  			m := auxIntToInt64(v_0.AuxInt)
  3537  			if v_1.Op != OpRsh64Ux64 {
  3538  				continue
  3539  			}
  3540  			_ = v_1.Args[1]
  3541  			v_1_1 := v_1.Args[1]
  3542  			if v_1_1.Op != OpConst64 {
  3543  				continue
  3544  			}
  3545  			c := auxIntToInt64(v_1_1.AuxInt)
  3546  			if !(c >= int64(64-ntz64(m))) {
  3547  				continue
  3548  			}
  3549  			v.reset(OpConst64)
  3550  			v.AuxInt = int64ToAuxInt(0)
  3551  			return true
  3552  		}
  3553  		break
  3554  	}
  3555  	// match: (And64 (Const64 [m]) (Lsh64x64 _ (Const64 [c])))
  3556  	// cond: c >= int64(64-nlz64(m))
  3557  	// result: (Const64 [0])
  3558  	for {
  3559  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3560  			if v_0.Op != OpConst64 {
  3561  				continue
  3562  			}
  3563  			m := auxIntToInt64(v_0.AuxInt)
  3564  			if v_1.Op != OpLsh64x64 {
  3565  				continue
  3566  			}
  3567  			_ = v_1.Args[1]
  3568  			v_1_1 := v_1.Args[1]
  3569  			if v_1_1.Op != OpConst64 {
  3570  				continue
  3571  			}
  3572  			c := auxIntToInt64(v_1_1.AuxInt)
  3573  			if !(c >= int64(64-nlz64(m))) {
  3574  				continue
  3575  			}
  3576  			v.reset(OpConst64)
  3577  			v.AuxInt = int64ToAuxInt(0)
  3578  			return true
  3579  		}
  3580  		break
  3581  	}
  3582  	// match: (And64 x x)
  3583  	// result: x
  3584  	for {
  3585  		x := v_0
  3586  		if x != v_1 {
  3587  			break
  3588  		}
  3589  		v.copyOf(x)
  3590  		return true
  3591  	}
  3592  	// match: (And64 (Const64 [-1]) x)
  3593  	// result: x
  3594  	for {
  3595  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3596  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
  3597  				continue
  3598  			}
  3599  			x := v_1
  3600  			v.copyOf(x)
  3601  			return true
  3602  		}
  3603  		break
  3604  	}
  3605  	// match: (And64 (Const64 [0]) _)
  3606  	// result: (Const64 [0])
  3607  	for {
  3608  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3609  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
  3610  				continue
  3611  			}
  3612  			v.reset(OpConst64)
  3613  			v.AuxInt = int64ToAuxInt(0)
  3614  			return true
  3615  		}
  3616  		break
  3617  	}
  3618  	// match: (And64 (Com64 x) x)
  3619  	// result: (Const64 [0])
  3620  	for {
  3621  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3622  			if v_0.Op != OpCom64 {
  3623  				continue
  3624  			}
  3625  			x := v_0.Args[0]
  3626  			if x != v_1 {
  3627  				continue
  3628  			}
  3629  			v.reset(OpConst64)
  3630  			v.AuxInt = int64ToAuxInt(0)
  3631  			return true
  3632  		}
  3633  		break
  3634  	}
  3635  	// match: (And64 x (And64 x y))
  3636  	// result: (And64 x y)
  3637  	for {
  3638  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3639  			x := v_0
  3640  			if v_1.Op != OpAnd64 {
  3641  				continue
  3642  			}
  3643  			_ = v_1.Args[1]
  3644  			v_1_0 := v_1.Args[0]
  3645  			v_1_1 := v_1.Args[1]
  3646  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  3647  				if x != v_1_0 {
  3648  					continue
  3649  				}
  3650  				y := v_1_1
  3651  				v.reset(OpAnd64)
  3652  				v.AddArg2(x, y)
  3653  				return true
  3654  			}
  3655  		}
  3656  		break
  3657  	}
  3658  	// match: (And64 (And64 i:(Const64 <t>) z) x)
  3659  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  3660  	// result: (And64 i (And64 <t> z x))
  3661  	for {
  3662  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3663  			if v_0.Op != OpAnd64 {
  3664  				continue
  3665  			}
  3666  			_ = v_0.Args[1]
  3667  			v_0_0 := v_0.Args[0]
  3668  			v_0_1 := v_0.Args[1]
  3669  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  3670  				i := v_0_0
  3671  				if i.Op != OpConst64 {
  3672  					continue
  3673  				}
  3674  				t := i.Type
  3675  				z := v_0_1
  3676  				x := v_1
  3677  				if !(z.Op != OpConst64 && x.Op != OpConst64) {
  3678  					continue
  3679  				}
  3680  				v.reset(OpAnd64)
  3681  				v0 := b.NewValue0(v.Pos, OpAnd64, t)
  3682  				v0.AddArg2(z, x)
  3683  				v.AddArg2(i, v0)
  3684  				return true
  3685  			}
  3686  		}
  3687  		break
  3688  	}
  3689  	// match: (And64 (Const64 <t> [c]) (And64 (Const64 <t> [d]) x))
  3690  	// result: (And64 (Const64 <t> [c&d]) x)
  3691  	for {
  3692  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3693  			if v_0.Op != OpConst64 {
  3694  				continue
  3695  			}
  3696  			t := v_0.Type
  3697  			c := auxIntToInt64(v_0.AuxInt)
  3698  			if v_1.Op != OpAnd64 {
  3699  				continue
  3700  			}
  3701  			_ = v_1.Args[1]
  3702  			v_1_0 := v_1.Args[0]
  3703  			v_1_1 := v_1.Args[1]
  3704  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  3705  				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
  3706  					continue
  3707  				}
  3708  				d := auxIntToInt64(v_1_0.AuxInt)
  3709  				x := v_1_1
  3710  				v.reset(OpAnd64)
  3711  				v0 := b.NewValue0(v.Pos, OpConst64, t)
  3712  				v0.AuxInt = int64ToAuxInt(c & d)
  3713  				v.AddArg2(v0, x)
  3714  				return true
  3715  			}
  3716  		}
  3717  		break
  3718  	}
  3719  	return false
  3720  }
  3721  func rewriteValuegeneric_OpAnd8(v *Value) bool {
  3722  	v_1 := v.Args[1]
  3723  	v_0 := v.Args[0]
  3724  	b := v.Block
  3725  	// match: (And8 (Const8 [c]) (Const8 [d]))
  3726  	// result: (Const8 [c&d])
  3727  	for {
  3728  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3729  			if v_0.Op != OpConst8 {
  3730  				continue
  3731  			}
  3732  			c := auxIntToInt8(v_0.AuxInt)
  3733  			if v_1.Op != OpConst8 {
  3734  				continue
  3735  			}
  3736  			d := auxIntToInt8(v_1.AuxInt)
  3737  			v.reset(OpConst8)
  3738  			v.AuxInt = int8ToAuxInt(c & d)
  3739  			return true
  3740  		}
  3741  		break
  3742  	}
  3743  	// match: (And8 <t> (Com8 x) (Com8 y))
  3744  	// result: (Com8 (Or8 <t> x y))
  3745  	for {
  3746  		t := v.Type
  3747  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3748  			if v_0.Op != OpCom8 {
  3749  				continue
  3750  			}
  3751  			x := v_0.Args[0]
  3752  			if v_1.Op != OpCom8 {
  3753  				continue
  3754  			}
  3755  			y := v_1.Args[0]
  3756  			v.reset(OpCom8)
  3757  			v0 := b.NewValue0(v.Pos, OpOr8, t)
  3758  			v0.AddArg2(x, y)
  3759  			v.AddArg(v0)
  3760  			return true
  3761  		}
  3762  		break
  3763  	}
  3764  	// match: (And8 (Const8 [m]) (Rsh8Ux64 _ (Const64 [c])))
  3765  	// cond: c >= int64(8-ntz8(m))
  3766  	// result: (Const8 [0])
  3767  	for {
  3768  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3769  			if v_0.Op != OpConst8 {
  3770  				continue
  3771  			}
  3772  			m := auxIntToInt8(v_0.AuxInt)
  3773  			if v_1.Op != OpRsh8Ux64 {
  3774  				continue
  3775  			}
  3776  			_ = v_1.Args[1]
  3777  			v_1_1 := v_1.Args[1]
  3778  			if v_1_1.Op != OpConst64 {
  3779  				continue
  3780  			}
  3781  			c := auxIntToInt64(v_1_1.AuxInt)
  3782  			if !(c >= int64(8-ntz8(m))) {
  3783  				continue
  3784  			}
  3785  			v.reset(OpConst8)
  3786  			v.AuxInt = int8ToAuxInt(0)
  3787  			return true
  3788  		}
  3789  		break
  3790  	}
  3791  	// match: (And8 (Const8 [m]) (Lsh8x64 _ (Const64 [c])))
  3792  	// cond: c >= int64(8-nlz8(m))
  3793  	// result: (Const8 [0])
  3794  	for {
  3795  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3796  			if v_0.Op != OpConst8 {
  3797  				continue
  3798  			}
  3799  			m := auxIntToInt8(v_0.AuxInt)
  3800  			if v_1.Op != OpLsh8x64 {
  3801  				continue
  3802  			}
  3803  			_ = v_1.Args[1]
  3804  			v_1_1 := v_1.Args[1]
  3805  			if v_1_1.Op != OpConst64 {
  3806  				continue
  3807  			}
  3808  			c := auxIntToInt64(v_1_1.AuxInt)
  3809  			if !(c >= int64(8-nlz8(m))) {
  3810  				continue
  3811  			}
  3812  			v.reset(OpConst8)
  3813  			v.AuxInt = int8ToAuxInt(0)
  3814  			return true
  3815  		}
  3816  		break
  3817  	}
  3818  	// match: (And8 x x)
  3819  	// result: x
  3820  	for {
  3821  		x := v_0
  3822  		if x != v_1 {
  3823  			break
  3824  		}
  3825  		v.copyOf(x)
  3826  		return true
  3827  	}
  3828  	// match: (And8 (Const8 [-1]) x)
  3829  	// result: x
  3830  	for {
  3831  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3832  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
  3833  				continue
  3834  			}
  3835  			x := v_1
  3836  			v.copyOf(x)
  3837  			return true
  3838  		}
  3839  		break
  3840  	}
  3841  	// match: (And8 (Const8 [0]) _)
  3842  	// result: (Const8 [0])
  3843  	for {
  3844  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3845  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
  3846  				continue
  3847  			}
  3848  			v.reset(OpConst8)
  3849  			v.AuxInt = int8ToAuxInt(0)
  3850  			return true
  3851  		}
  3852  		break
  3853  	}
  3854  	// match: (And8 (Com8 x) x)
  3855  	// result: (Const8 [0])
  3856  	for {
  3857  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3858  			if v_0.Op != OpCom8 {
  3859  				continue
  3860  			}
  3861  			x := v_0.Args[0]
  3862  			if x != v_1 {
  3863  				continue
  3864  			}
  3865  			v.reset(OpConst8)
  3866  			v.AuxInt = int8ToAuxInt(0)
  3867  			return true
  3868  		}
  3869  		break
  3870  	}
  3871  	// match: (And8 x (And8 x y))
  3872  	// result: (And8 x y)
  3873  	for {
  3874  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3875  			x := v_0
  3876  			if v_1.Op != OpAnd8 {
  3877  				continue
  3878  			}
  3879  			_ = v_1.Args[1]
  3880  			v_1_0 := v_1.Args[0]
  3881  			v_1_1 := v_1.Args[1]
  3882  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  3883  				if x != v_1_0 {
  3884  					continue
  3885  				}
  3886  				y := v_1_1
  3887  				v.reset(OpAnd8)
  3888  				v.AddArg2(x, y)
  3889  				return true
  3890  			}
  3891  		}
  3892  		break
  3893  	}
  3894  	// match: (And8 (And8 i:(Const8 <t>) z) x)
  3895  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  3896  	// result: (And8 i (And8 <t> z x))
  3897  	for {
  3898  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3899  			if v_0.Op != OpAnd8 {
  3900  				continue
  3901  			}
  3902  			_ = v_0.Args[1]
  3903  			v_0_0 := v_0.Args[0]
  3904  			v_0_1 := v_0.Args[1]
  3905  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  3906  				i := v_0_0
  3907  				if i.Op != OpConst8 {
  3908  					continue
  3909  				}
  3910  				t := i.Type
  3911  				z := v_0_1
  3912  				x := v_1
  3913  				if !(z.Op != OpConst8 && x.Op != OpConst8) {
  3914  					continue
  3915  				}
  3916  				v.reset(OpAnd8)
  3917  				v0 := b.NewValue0(v.Pos, OpAnd8, t)
  3918  				v0.AddArg2(z, x)
  3919  				v.AddArg2(i, v0)
  3920  				return true
  3921  			}
  3922  		}
  3923  		break
  3924  	}
  3925  	// match: (And8 (Const8 <t> [c]) (And8 (Const8 <t> [d]) x))
  3926  	// result: (And8 (Const8 <t> [c&d]) x)
  3927  	for {
  3928  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3929  			if v_0.Op != OpConst8 {
  3930  				continue
  3931  			}
  3932  			t := v_0.Type
  3933  			c := auxIntToInt8(v_0.AuxInt)
  3934  			if v_1.Op != OpAnd8 {
  3935  				continue
  3936  			}
  3937  			_ = v_1.Args[1]
  3938  			v_1_0 := v_1.Args[0]
  3939  			v_1_1 := v_1.Args[1]
  3940  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  3941  				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
  3942  					continue
  3943  				}
  3944  				d := auxIntToInt8(v_1_0.AuxInt)
  3945  				x := v_1_1
  3946  				v.reset(OpAnd8)
  3947  				v0 := b.NewValue0(v.Pos, OpConst8, t)
  3948  				v0.AuxInt = int8ToAuxInt(c & d)
  3949  				v.AddArg2(v0, x)
  3950  				return true
  3951  			}
  3952  		}
  3953  		break
  3954  	}
  3955  	return false
  3956  }
  3957  func rewriteValuegeneric_OpAndB(v *Value) bool {
  3958  	v_1 := v.Args[1]
  3959  	v_0 := v.Args[0]
  3960  	b := v.Block
  3961  	// match: (AndB (Leq64 (Const64 [c]) x) (Less64 x (Const64 [d])))
  3962  	// cond: d >= c
  3963  	// result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
  3964  	for {
  3965  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3966  			if v_0.Op != OpLeq64 {
  3967  				continue
  3968  			}
  3969  			x := v_0.Args[1]
  3970  			v_0_0 := v_0.Args[0]
  3971  			if v_0_0.Op != OpConst64 {
  3972  				continue
  3973  			}
  3974  			c := auxIntToInt64(v_0_0.AuxInt)
  3975  			if v_1.Op != OpLess64 {
  3976  				continue
  3977  			}
  3978  			_ = v_1.Args[1]
  3979  			if x != v_1.Args[0] {
  3980  				continue
  3981  			}
  3982  			v_1_1 := v_1.Args[1]
  3983  			if v_1_1.Op != OpConst64 {
  3984  				continue
  3985  			}
  3986  			d := auxIntToInt64(v_1_1.AuxInt)
  3987  			if !(d >= c) {
  3988  				continue
  3989  			}
  3990  			v.reset(OpLess64U)
  3991  			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
  3992  			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
  3993  			v1.AuxInt = int64ToAuxInt(c)
  3994  			v0.AddArg2(x, v1)
  3995  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
  3996  			v2.AuxInt = int64ToAuxInt(d - c)
  3997  			v.AddArg2(v0, v2)
  3998  			return true
  3999  		}
  4000  		break
  4001  	}
  4002  	// match: (AndB (Leq64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
  4003  	// cond: d >= c
  4004  	// result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
  4005  	for {
  4006  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4007  			if v_0.Op != OpLeq64 {
  4008  				continue
  4009  			}
  4010  			x := v_0.Args[1]
  4011  			v_0_0 := v_0.Args[0]
  4012  			if v_0_0.Op != OpConst64 {
  4013  				continue
  4014  			}
  4015  			c := auxIntToInt64(v_0_0.AuxInt)
  4016  			if v_1.Op != OpLeq64 {
  4017  				continue
  4018  			}
  4019  			_ = v_1.Args[1]
  4020  			if x != v_1.Args[0] {
  4021  				continue
  4022  			}
  4023  			v_1_1 := v_1.Args[1]
  4024  			if v_1_1.Op != OpConst64 {
  4025  				continue
  4026  			}
  4027  			d := auxIntToInt64(v_1_1.AuxInt)
  4028  			if !(d >= c) {
  4029  				continue
  4030  			}
  4031  			v.reset(OpLeq64U)
  4032  			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
  4033  			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4034  			v1.AuxInt = int64ToAuxInt(c)
  4035  			v0.AddArg2(x, v1)
  4036  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4037  			v2.AuxInt = int64ToAuxInt(d - c)
  4038  			v.AddArg2(v0, v2)
  4039  			return true
  4040  		}
  4041  		break
  4042  	}
  4043  	// match: (AndB (Leq32 (Const32 [c]) x) (Less32 x (Const32 [d])))
  4044  	// cond: d >= c
  4045  	// result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
  4046  	for {
  4047  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4048  			if v_0.Op != OpLeq32 {
  4049  				continue
  4050  			}
  4051  			x := v_0.Args[1]
  4052  			v_0_0 := v_0.Args[0]
  4053  			if v_0_0.Op != OpConst32 {
  4054  				continue
  4055  			}
  4056  			c := auxIntToInt32(v_0_0.AuxInt)
  4057  			if v_1.Op != OpLess32 {
  4058  				continue
  4059  			}
  4060  			_ = v_1.Args[1]
  4061  			if x != v_1.Args[0] {
  4062  				continue
  4063  			}
  4064  			v_1_1 := v_1.Args[1]
  4065  			if v_1_1.Op != OpConst32 {
  4066  				continue
  4067  			}
  4068  			d := auxIntToInt32(v_1_1.AuxInt)
  4069  			if !(d >= c) {
  4070  				continue
  4071  			}
  4072  			v.reset(OpLess32U)
  4073  			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
  4074  			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4075  			v1.AuxInt = int32ToAuxInt(c)
  4076  			v0.AddArg2(x, v1)
  4077  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4078  			v2.AuxInt = int32ToAuxInt(d - c)
  4079  			v.AddArg2(v0, v2)
  4080  			return true
  4081  		}
  4082  		break
  4083  	}
  4084  	// match: (AndB (Leq32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
  4085  	// cond: d >= c
  4086  	// result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
  4087  	for {
  4088  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4089  			if v_0.Op != OpLeq32 {
  4090  				continue
  4091  			}
  4092  			x := v_0.Args[1]
  4093  			v_0_0 := v_0.Args[0]
  4094  			if v_0_0.Op != OpConst32 {
  4095  				continue
  4096  			}
  4097  			c := auxIntToInt32(v_0_0.AuxInt)
  4098  			if v_1.Op != OpLeq32 {
  4099  				continue
  4100  			}
  4101  			_ = v_1.Args[1]
  4102  			if x != v_1.Args[0] {
  4103  				continue
  4104  			}
  4105  			v_1_1 := v_1.Args[1]
  4106  			if v_1_1.Op != OpConst32 {
  4107  				continue
  4108  			}
  4109  			d := auxIntToInt32(v_1_1.AuxInt)
  4110  			if !(d >= c) {
  4111  				continue
  4112  			}
  4113  			v.reset(OpLeq32U)
  4114  			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
  4115  			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4116  			v1.AuxInt = int32ToAuxInt(c)
  4117  			v0.AddArg2(x, v1)
  4118  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4119  			v2.AuxInt = int32ToAuxInt(d - c)
  4120  			v.AddArg2(v0, v2)
  4121  			return true
  4122  		}
  4123  		break
  4124  	}
  4125  	// match: (AndB (Leq16 (Const16 [c]) x) (Less16 x (Const16 [d])))
  4126  	// cond: d >= c
  4127  	// result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
  4128  	for {
  4129  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4130  			if v_0.Op != OpLeq16 {
  4131  				continue
  4132  			}
  4133  			x := v_0.Args[1]
  4134  			v_0_0 := v_0.Args[0]
  4135  			if v_0_0.Op != OpConst16 {
  4136  				continue
  4137  			}
  4138  			c := auxIntToInt16(v_0_0.AuxInt)
  4139  			if v_1.Op != OpLess16 {
  4140  				continue
  4141  			}
  4142  			_ = v_1.Args[1]
  4143  			if x != v_1.Args[0] {
  4144  				continue
  4145  			}
  4146  			v_1_1 := v_1.Args[1]
  4147  			if v_1_1.Op != OpConst16 {
  4148  				continue
  4149  			}
  4150  			d := auxIntToInt16(v_1_1.AuxInt)
  4151  			if !(d >= c) {
  4152  				continue
  4153  			}
  4154  			v.reset(OpLess16U)
  4155  			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
  4156  			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4157  			v1.AuxInt = int16ToAuxInt(c)
  4158  			v0.AddArg2(x, v1)
  4159  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4160  			v2.AuxInt = int16ToAuxInt(d - c)
  4161  			v.AddArg2(v0, v2)
  4162  			return true
  4163  		}
  4164  		break
  4165  	}
  4166  	// match: (AndB (Leq16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
  4167  	// cond: d >= c
  4168  	// result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
  4169  	for {
  4170  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4171  			if v_0.Op != OpLeq16 {
  4172  				continue
  4173  			}
  4174  			x := v_0.Args[1]
  4175  			v_0_0 := v_0.Args[0]
  4176  			if v_0_0.Op != OpConst16 {
  4177  				continue
  4178  			}
  4179  			c := auxIntToInt16(v_0_0.AuxInt)
  4180  			if v_1.Op != OpLeq16 {
  4181  				continue
  4182  			}
  4183  			_ = v_1.Args[1]
  4184  			if x != v_1.Args[0] {
  4185  				continue
  4186  			}
  4187  			v_1_1 := v_1.Args[1]
  4188  			if v_1_1.Op != OpConst16 {
  4189  				continue
  4190  			}
  4191  			d := auxIntToInt16(v_1_1.AuxInt)
  4192  			if !(d >= c) {
  4193  				continue
  4194  			}
  4195  			v.reset(OpLeq16U)
  4196  			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
  4197  			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4198  			v1.AuxInt = int16ToAuxInt(c)
  4199  			v0.AddArg2(x, v1)
  4200  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4201  			v2.AuxInt = int16ToAuxInt(d - c)
  4202  			v.AddArg2(v0, v2)
  4203  			return true
  4204  		}
  4205  		break
  4206  	}
  4207  	// match: (AndB (Leq8 (Const8 [c]) x) (Less8 x (Const8 [d])))
  4208  	// cond: d >= c
  4209  	// result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
  4210  	for {
  4211  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4212  			if v_0.Op != OpLeq8 {
  4213  				continue
  4214  			}
  4215  			x := v_0.Args[1]
  4216  			v_0_0 := v_0.Args[0]
  4217  			if v_0_0.Op != OpConst8 {
  4218  				continue
  4219  			}
  4220  			c := auxIntToInt8(v_0_0.AuxInt)
  4221  			if v_1.Op != OpLess8 {
  4222  				continue
  4223  			}
  4224  			_ = v_1.Args[1]
  4225  			if x != v_1.Args[0] {
  4226  				continue
  4227  			}
  4228  			v_1_1 := v_1.Args[1]
  4229  			if v_1_1.Op != OpConst8 {
  4230  				continue
  4231  			}
  4232  			d := auxIntToInt8(v_1_1.AuxInt)
  4233  			if !(d >= c) {
  4234  				continue
  4235  			}
  4236  			v.reset(OpLess8U)
  4237  			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
  4238  			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4239  			v1.AuxInt = int8ToAuxInt(c)
  4240  			v0.AddArg2(x, v1)
  4241  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4242  			v2.AuxInt = int8ToAuxInt(d - c)
  4243  			v.AddArg2(v0, v2)
  4244  			return true
  4245  		}
  4246  		break
  4247  	}
  4248  	// match: (AndB (Leq8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
  4249  	// cond: d >= c
  4250  	// result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
  4251  	for {
  4252  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4253  			if v_0.Op != OpLeq8 {
  4254  				continue
  4255  			}
  4256  			x := v_0.Args[1]
  4257  			v_0_0 := v_0.Args[0]
  4258  			if v_0_0.Op != OpConst8 {
  4259  				continue
  4260  			}
  4261  			c := auxIntToInt8(v_0_0.AuxInt)
  4262  			if v_1.Op != OpLeq8 {
  4263  				continue
  4264  			}
  4265  			_ = v_1.Args[1]
  4266  			if x != v_1.Args[0] {
  4267  				continue
  4268  			}
  4269  			v_1_1 := v_1.Args[1]
  4270  			if v_1_1.Op != OpConst8 {
  4271  				continue
  4272  			}
  4273  			d := auxIntToInt8(v_1_1.AuxInt)
  4274  			if !(d >= c) {
  4275  				continue
  4276  			}
  4277  			v.reset(OpLeq8U)
  4278  			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
  4279  			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4280  			v1.AuxInt = int8ToAuxInt(c)
  4281  			v0.AddArg2(x, v1)
  4282  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4283  			v2.AuxInt = int8ToAuxInt(d - c)
  4284  			v.AddArg2(v0, v2)
  4285  			return true
  4286  		}
  4287  		break
  4288  	}
  4289  	// match: (AndB (Less64 (Const64 [c]) x) (Less64 x (Const64 [d])))
  4290  	// cond: d >= c+1 && c+1 > c
  4291  	// result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
  4292  	for {
  4293  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4294  			if v_0.Op != OpLess64 {
  4295  				continue
  4296  			}
  4297  			x := v_0.Args[1]
  4298  			v_0_0 := v_0.Args[0]
  4299  			if v_0_0.Op != OpConst64 {
  4300  				continue
  4301  			}
  4302  			c := auxIntToInt64(v_0_0.AuxInt)
  4303  			if v_1.Op != OpLess64 {
  4304  				continue
  4305  			}
  4306  			_ = v_1.Args[1]
  4307  			if x != v_1.Args[0] {
  4308  				continue
  4309  			}
  4310  			v_1_1 := v_1.Args[1]
  4311  			if v_1_1.Op != OpConst64 {
  4312  				continue
  4313  			}
  4314  			d := auxIntToInt64(v_1_1.AuxInt)
  4315  			if !(d >= c+1 && c+1 > c) {
  4316  				continue
  4317  			}
  4318  			v.reset(OpLess64U)
  4319  			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
  4320  			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4321  			v1.AuxInt = int64ToAuxInt(c + 1)
  4322  			v0.AddArg2(x, v1)
  4323  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4324  			v2.AuxInt = int64ToAuxInt(d - c - 1)
  4325  			v.AddArg2(v0, v2)
  4326  			return true
  4327  		}
  4328  		break
  4329  	}
  4330  	// match: (AndB (Less64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
  4331  	// cond: d >= c+1 && c+1 > c
  4332  	// result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
  4333  	for {
  4334  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4335  			if v_0.Op != OpLess64 {
  4336  				continue
  4337  			}
  4338  			x := v_0.Args[1]
  4339  			v_0_0 := v_0.Args[0]
  4340  			if v_0_0.Op != OpConst64 {
  4341  				continue
  4342  			}
  4343  			c := auxIntToInt64(v_0_0.AuxInt)
  4344  			if v_1.Op != OpLeq64 {
  4345  				continue
  4346  			}
  4347  			_ = v_1.Args[1]
  4348  			if x != v_1.Args[0] {
  4349  				continue
  4350  			}
  4351  			v_1_1 := v_1.Args[1]
  4352  			if v_1_1.Op != OpConst64 {
  4353  				continue
  4354  			}
  4355  			d := auxIntToInt64(v_1_1.AuxInt)
  4356  			if !(d >= c+1 && c+1 > c) {
  4357  				continue
  4358  			}
  4359  			v.reset(OpLeq64U)
  4360  			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
  4361  			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4362  			v1.AuxInt = int64ToAuxInt(c + 1)
  4363  			v0.AddArg2(x, v1)
  4364  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4365  			v2.AuxInt = int64ToAuxInt(d - c - 1)
  4366  			v.AddArg2(v0, v2)
  4367  			return true
  4368  		}
  4369  		break
  4370  	}
  4371  	// match: (AndB (Less32 (Const32 [c]) x) (Less32 x (Const32 [d])))
  4372  	// cond: d >= c+1 && c+1 > c
  4373  	// result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
  4374  	for {
  4375  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4376  			if v_0.Op != OpLess32 {
  4377  				continue
  4378  			}
  4379  			x := v_0.Args[1]
  4380  			v_0_0 := v_0.Args[0]
  4381  			if v_0_0.Op != OpConst32 {
  4382  				continue
  4383  			}
  4384  			c := auxIntToInt32(v_0_0.AuxInt)
  4385  			if v_1.Op != OpLess32 {
  4386  				continue
  4387  			}
  4388  			_ = v_1.Args[1]
  4389  			if x != v_1.Args[0] {
  4390  				continue
  4391  			}
  4392  			v_1_1 := v_1.Args[1]
  4393  			if v_1_1.Op != OpConst32 {
  4394  				continue
  4395  			}
  4396  			d := auxIntToInt32(v_1_1.AuxInt)
  4397  			if !(d >= c+1 && c+1 > c) {
  4398  				continue
  4399  			}
  4400  			v.reset(OpLess32U)
  4401  			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
  4402  			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4403  			v1.AuxInt = int32ToAuxInt(c + 1)
  4404  			v0.AddArg2(x, v1)
  4405  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4406  			v2.AuxInt = int32ToAuxInt(d - c - 1)
  4407  			v.AddArg2(v0, v2)
  4408  			return true
  4409  		}
  4410  		break
  4411  	}
  4412  	// match: (AndB (Less32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
  4413  	// cond: d >= c+1 && c+1 > c
  4414  	// result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
  4415  	for {
  4416  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4417  			if v_0.Op != OpLess32 {
  4418  				continue
  4419  			}
  4420  			x := v_0.Args[1]
  4421  			v_0_0 := v_0.Args[0]
  4422  			if v_0_0.Op != OpConst32 {
  4423  				continue
  4424  			}
  4425  			c := auxIntToInt32(v_0_0.AuxInt)
  4426  			if v_1.Op != OpLeq32 {
  4427  				continue
  4428  			}
  4429  			_ = v_1.Args[1]
  4430  			if x != v_1.Args[0] {
  4431  				continue
  4432  			}
  4433  			v_1_1 := v_1.Args[1]
  4434  			if v_1_1.Op != OpConst32 {
  4435  				continue
  4436  			}
  4437  			d := auxIntToInt32(v_1_1.AuxInt)
  4438  			if !(d >= c+1 && c+1 > c) {
  4439  				continue
  4440  			}
  4441  			v.reset(OpLeq32U)
  4442  			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
  4443  			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4444  			v1.AuxInt = int32ToAuxInt(c + 1)
  4445  			v0.AddArg2(x, v1)
  4446  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4447  			v2.AuxInt = int32ToAuxInt(d - c - 1)
  4448  			v.AddArg2(v0, v2)
  4449  			return true
  4450  		}
  4451  		break
  4452  	}
  4453  	// match: (AndB (Less16 (Const16 [c]) x) (Less16 x (Const16 [d])))
  4454  	// cond: d >= c+1 && c+1 > c
  4455  	// result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
  4456  	for {
  4457  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4458  			if v_0.Op != OpLess16 {
  4459  				continue
  4460  			}
  4461  			x := v_0.Args[1]
  4462  			v_0_0 := v_0.Args[0]
  4463  			if v_0_0.Op != OpConst16 {
  4464  				continue
  4465  			}
  4466  			c := auxIntToInt16(v_0_0.AuxInt)
  4467  			if v_1.Op != OpLess16 {
  4468  				continue
  4469  			}
  4470  			_ = v_1.Args[1]
  4471  			if x != v_1.Args[0] {
  4472  				continue
  4473  			}
  4474  			v_1_1 := v_1.Args[1]
  4475  			if v_1_1.Op != OpConst16 {
  4476  				continue
  4477  			}
  4478  			d := auxIntToInt16(v_1_1.AuxInt)
  4479  			if !(d >= c+1 && c+1 > c) {
  4480  				continue
  4481  			}
  4482  			v.reset(OpLess16U)
  4483  			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
  4484  			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4485  			v1.AuxInt = int16ToAuxInt(c + 1)
  4486  			v0.AddArg2(x, v1)
  4487  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4488  			v2.AuxInt = int16ToAuxInt(d - c - 1)
  4489  			v.AddArg2(v0, v2)
  4490  			return true
  4491  		}
  4492  		break
  4493  	}
  4494  	// match: (AndB (Less16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
  4495  	// cond: d >= c+1 && c+1 > c
  4496  	// result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
  4497  	for {
  4498  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4499  			if v_0.Op != OpLess16 {
  4500  				continue
  4501  			}
  4502  			x := v_0.Args[1]
  4503  			v_0_0 := v_0.Args[0]
  4504  			if v_0_0.Op != OpConst16 {
  4505  				continue
  4506  			}
  4507  			c := auxIntToInt16(v_0_0.AuxInt)
  4508  			if v_1.Op != OpLeq16 {
  4509  				continue
  4510  			}
  4511  			_ = v_1.Args[1]
  4512  			if x != v_1.Args[0] {
  4513  				continue
  4514  			}
  4515  			v_1_1 := v_1.Args[1]
  4516  			if v_1_1.Op != OpConst16 {
  4517  				continue
  4518  			}
  4519  			d := auxIntToInt16(v_1_1.AuxInt)
  4520  			if !(d >= c+1 && c+1 > c) {
  4521  				continue
  4522  			}
  4523  			v.reset(OpLeq16U)
  4524  			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
  4525  			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4526  			v1.AuxInt = int16ToAuxInt(c + 1)
  4527  			v0.AddArg2(x, v1)
  4528  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4529  			v2.AuxInt = int16ToAuxInt(d - c - 1)
  4530  			v.AddArg2(v0, v2)
  4531  			return true
  4532  		}
  4533  		break
  4534  	}
  4535  	// match: (AndB (Less8 (Const8 [c]) x) (Less8 x (Const8 [d])))
  4536  	// cond: d >= c+1 && c+1 > c
  4537  	// result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
  4538  	for {
  4539  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4540  			if v_0.Op != OpLess8 {
  4541  				continue
  4542  			}
  4543  			x := v_0.Args[1]
  4544  			v_0_0 := v_0.Args[0]
  4545  			if v_0_0.Op != OpConst8 {
  4546  				continue
  4547  			}
  4548  			c := auxIntToInt8(v_0_0.AuxInt)
  4549  			if v_1.Op != OpLess8 {
  4550  				continue
  4551  			}
  4552  			_ = v_1.Args[1]
  4553  			if x != v_1.Args[0] {
  4554  				continue
  4555  			}
  4556  			v_1_1 := v_1.Args[1]
  4557  			if v_1_1.Op != OpConst8 {
  4558  				continue
  4559  			}
  4560  			d := auxIntToInt8(v_1_1.AuxInt)
  4561  			if !(d >= c+1 && c+1 > c) {
  4562  				continue
  4563  			}
  4564  			v.reset(OpLess8U)
  4565  			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
  4566  			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4567  			v1.AuxInt = int8ToAuxInt(c + 1)
  4568  			v0.AddArg2(x, v1)
  4569  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4570  			v2.AuxInt = int8ToAuxInt(d - c - 1)
  4571  			v.AddArg2(v0, v2)
  4572  			return true
  4573  		}
  4574  		break
  4575  	}
  4576  	// match: (AndB (Less8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
  4577  	// cond: d >= c+1 && c+1 > c
  4578  	// result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
  4579  	for {
  4580  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4581  			if v_0.Op != OpLess8 {
  4582  				continue
  4583  			}
  4584  			x := v_0.Args[1]
  4585  			v_0_0 := v_0.Args[0]
  4586  			if v_0_0.Op != OpConst8 {
  4587  				continue
  4588  			}
  4589  			c := auxIntToInt8(v_0_0.AuxInt)
  4590  			if v_1.Op != OpLeq8 {
  4591  				continue
  4592  			}
  4593  			_ = v_1.Args[1]
  4594  			if x != v_1.Args[0] {
  4595  				continue
  4596  			}
  4597  			v_1_1 := v_1.Args[1]
  4598  			if v_1_1.Op != OpConst8 {
  4599  				continue
  4600  			}
  4601  			d := auxIntToInt8(v_1_1.AuxInt)
  4602  			if !(d >= c+1 && c+1 > c) {
  4603  				continue
  4604  			}
  4605  			v.reset(OpLeq8U)
  4606  			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
  4607  			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4608  			v1.AuxInt = int8ToAuxInt(c + 1)
  4609  			v0.AddArg2(x, v1)
  4610  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4611  			v2.AuxInt = int8ToAuxInt(d - c - 1)
  4612  			v.AddArg2(v0, v2)
  4613  			return true
  4614  		}
  4615  		break
  4616  	}
  4617  	// match: (AndB (Leq64U (Const64 [c]) x) (Less64U x (Const64 [d])))
  4618  	// cond: uint64(d) >= uint64(c)
  4619  	// result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
  4620  	for {
  4621  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4622  			if v_0.Op != OpLeq64U {
  4623  				continue
  4624  			}
  4625  			x := v_0.Args[1]
  4626  			v_0_0 := v_0.Args[0]
  4627  			if v_0_0.Op != OpConst64 {
  4628  				continue
  4629  			}
  4630  			c := auxIntToInt64(v_0_0.AuxInt)
  4631  			if v_1.Op != OpLess64U {
  4632  				continue
  4633  			}
  4634  			_ = v_1.Args[1]
  4635  			if x != v_1.Args[0] {
  4636  				continue
  4637  			}
  4638  			v_1_1 := v_1.Args[1]
  4639  			if v_1_1.Op != OpConst64 {
  4640  				continue
  4641  			}
  4642  			d := auxIntToInt64(v_1_1.AuxInt)
  4643  			if !(uint64(d) >= uint64(c)) {
  4644  				continue
  4645  			}
  4646  			v.reset(OpLess64U)
  4647  			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
  4648  			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4649  			v1.AuxInt = int64ToAuxInt(c)
  4650  			v0.AddArg2(x, v1)
  4651  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4652  			v2.AuxInt = int64ToAuxInt(d - c)
  4653  			v.AddArg2(v0, v2)
  4654  			return true
  4655  		}
  4656  		break
  4657  	}
  4658  	// match: (AndB (Leq64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
  4659  	// cond: uint64(d) >= uint64(c)
  4660  	// result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
  4661  	for {
  4662  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4663  			if v_0.Op != OpLeq64U {
  4664  				continue
  4665  			}
  4666  			x := v_0.Args[1]
  4667  			v_0_0 := v_0.Args[0]
  4668  			if v_0_0.Op != OpConst64 {
  4669  				continue
  4670  			}
  4671  			c := auxIntToInt64(v_0_0.AuxInt)
  4672  			if v_1.Op != OpLeq64U {
  4673  				continue
  4674  			}
  4675  			_ = v_1.Args[1]
  4676  			if x != v_1.Args[0] {
  4677  				continue
  4678  			}
  4679  			v_1_1 := v_1.Args[1]
  4680  			if v_1_1.Op != OpConst64 {
  4681  				continue
  4682  			}
  4683  			d := auxIntToInt64(v_1_1.AuxInt)
  4684  			if !(uint64(d) >= uint64(c)) {
  4685  				continue
  4686  			}
  4687  			v.reset(OpLeq64U)
  4688  			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
  4689  			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4690  			v1.AuxInt = int64ToAuxInt(c)
  4691  			v0.AddArg2(x, v1)
  4692  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4693  			v2.AuxInt = int64ToAuxInt(d - c)
  4694  			v.AddArg2(v0, v2)
  4695  			return true
  4696  		}
  4697  		break
  4698  	}
  4699  	// match: (AndB (Leq32U (Const32 [c]) x) (Less32U x (Const32 [d])))
  4700  	// cond: uint32(d) >= uint32(c)
  4701  	// result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
  4702  	for {
  4703  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4704  			if v_0.Op != OpLeq32U {
  4705  				continue
  4706  			}
  4707  			x := v_0.Args[1]
  4708  			v_0_0 := v_0.Args[0]
  4709  			if v_0_0.Op != OpConst32 {
  4710  				continue
  4711  			}
  4712  			c := auxIntToInt32(v_0_0.AuxInt)
  4713  			if v_1.Op != OpLess32U {
  4714  				continue
  4715  			}
  4716  			_ = v_1.Args[1]
  4717  			if x != v_1.Args[0] {
  4718  				continue
  4719  			}
  4720  			v_1_1 := v_1.Args[1]
  4721  			if v_1_1.Op != OpConst32 {
  4722  				continue
  4723  			}
  4724  			d := auxIntToInt32(v_1_1.AuxInt)
  4725  			if !(uint32(d) >= uint32(c)) {
  4726  				continue
  4727  			}
  4728  			v.reset(OpLess32U)
  4729  			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
  4730  			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4731  			v1.AuxInt = int32ToAuxInt(c)
  4732  			v0.AddArg2(x, v1)
  4733  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4734  			v2.AuxInt = int32ToAuxInt(d - c)
  4735  			v.AddArg2(v0, v2)
  4736  			return true
  4737  		}
  4738  		break
  4739  	}
  4740  	// match: (AndB (Leq32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
  4741  	// cond: uint32(d) >= uint32(c)
  4742  	// result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
  4743  	for {
  4744  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4745  			if v_0.Op != OpLeq32U {
  4746  				continue
  4747  			}
  4748  			x := v_0.Args[1]
  4749  			v_0_0 := v_0.Args[0]
  4750  			if v_0_0.Op != OpConst32 {
  4751  				continue
  4752  			}
  4753  			c := auxIntToInt32(v_0_0.AuxInt)
  4754  			if v_1.Op != OpLeq32U {
  4755  				continue
  4756  			}
  4757  			_ = v_1.Args[1]
  4758  			if x != v_1.Args[0] {
  4759  				continue
  4760  			}
  4761  			v_1_1 := v_1.Args[1]
  4762  			if v_1_1.Op != OpConst32 {
  4763  				continue
  4764  			}
  4765  			d := auxIntToInt32(v_1_1.AuxInt)
  4766  			if !(uint32(d) >= uint32(c)) {
  4767  				continue
  4768  			}
  4769  			v.reset(OpLeq32U)
  4770  			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
  4771  			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4772  			v1.AuxInt = int32ToAuxInt(c)
  4773  			v0.AddArg2(x, v1)
  4774  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
  4775  			v2.AuxInt = int32ToAuxInt(d - c)
  4776  			v.AddArg2(v0, v2)
  4777  			return true
  4778  		}
  4779  		break
  4780  	}
  4781  	// match: (AndB (Leq16U (Const16 [c]) x) (Less16U x (Const16 [d])))
  4782  	// cond: uint16(d) >= uint16(c)
  4783  	// result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
  4784  	for {
  4785  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4786  			if v_0.Op != OpLeq16U {
  4787  				continue
  4788  			}
  4789  			x := v_0.Args[1]
  4790  			v_0_0 := v_0.Args[0]
  4791  			if v_0_0.Op != OpConst16 {
  4792  				continue
  4793  			}
  4794  			c := auxIntToInt16(v_0_0.AuxInt)
  4795  			if v_1.Op != OpLess16U {
  4796  				continue
  4797  			}
  4798  			_ = v_1.Args[1]
  4799  			if x != v_1.Args[0] {
  4800  				continue
  4801  			}
  4802  			v_1_1 := v_1.Args[1]
  4803  			if v_1_1.Op != OpConst16 {
  4804  				continue
  4805  			}
  4806  			d := auxIntToInt16(v_1_1.AuxInt)
  4807  			if !(uint16(d) >= uint16(c)) {
  4808  				continue
  4809  			}
  4810  			v.reset(OpLess16U)
  4811  			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
  4812  			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4813  			v1.AuxInt = int16ToAuxInt(c)
  4814  			v0.AddArg2(x, v1)
  4815  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4816  			v2.AuxInt = int16ToAuxInt(d - c)
  4817  			v.AddArg2(v0, v2)
  4818  			return true
  4819  		}
  4820  		break
  4821  	}
  4822  	// match: (AndB (Leq16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
  4823  	// cond: uint16(d) >= uint16(c)
  4824  	// result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
  4825  	for {
  4826  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4827  			if v_0.Op != OpLeq16U {
  4828  				continue
  4829  			}
  4830  			x := v_0.Args[1]
  4831  			v_0_0 := v_0.Args[0]
  4832  			if v_0_0.Op != OpConst16 {
  4833  				continue
  4834  			}
  4835  			c := auxIntToInt16(v_0_0.AuxInt)
  4836  			if v_1.Op != OpLeq16U {
  4837  				continue
  4838  			}
  4839  			_ = v_1.Args[1]
  4840  			if x != v_1.Args[0] {
  4841  				continue
  4842  			}
  4843  			v_1_1 := v_1.Args[1]
  4844  			if v_1_1.Op != OpConst16 {
  4845  				continue
  4846  			}
  4847  			d := auxIntToInt16(v_1_1.AuxInt)
  4848  			if !(uint16(d) >= uint16(c)) {
  4849  				continue
  4850  			}
  4851  			v.reset(OpLeq16U)
  4852  			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
  4853  			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4854  			v1.AuxInt = int16ToAuxInt(c)
  4855  			v0.AddArg2(x, v1)
  4856  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
  4857  			v2.AuxInt = int16ToAuxInt(d - c)
  4858  			v.AddArg2(v0, v2)
  4859  			return true
  4860  		}
  4861  		break
  4862  	}
  4863  	// match: (AndB (Leq8U (Const8 [c]) x) (Less8U x (Const8 [d])))
  4864  	// cond: uint8(d) >= uint8(c)
  4865  	// result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
  4866  	for {
  4867  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4868  			if v_0.Op != OpLeq8U {
  4869  				continue
  4870  			}
  4871  			x := v_0.Args[1]
  4872  			v_0_0 := v_0.Args[0]
  4873  			if v_0_0.Op != OpConst8 {
  4874  				continue
  4875  			}
  4876  			c := auxIntToInt8(v_0_0.AuxInt)
  4877  			if v_1.Op != OpLess8U {
  4878  				continue
  4879  			}
  4880  			_ = v_1.Args[1]
  4881  			if x != v_1.Args[0] {
  4882  				continue
  4883  			}
  4884  			v_1_1 := v_1.Args[1]
  4885  			if v_1_1.Op != OpConst8 {
  4886  				continue
  4887  			}
  4888  			d := auxIntToInt8(v_1_1.AuxInt)
  4889  			if !(uint8(d) >= uint8(c)) {
  4890  				continue
  4891  			}
  4892  			v.reset(OpLess8U)
  4893  			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
  4894  			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4895  			v1.AuxInt = int8ToAuxInt(c)
  4896  			v0.AddArg2(x, v1)
  4897  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4898  			v2.AuxInt = int8ToAuxInt(d - c)
  4899  			v.AddArg2(v0, v2)
  4900  			return true
  4901  		}
  4902  		break
  4903  	}
  4904  	// match: (AndB (Leq8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
  4905  	// cond: uint8(d) >= uint8(c)
  4906  	// result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
  4907  	for {
  4908  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4909  			if v_0.Op != OpLeq8U {
  4910  				continue
  4911  			}
  4912  			x := v_0.Args[1]
  4913  			v_0_0 := v_0.Args[0]
  4914  			if v_0_0.Op != OpConst8 {
  4915  				continue
  4916  			}
  4917  			c := auxIntToInt8(v_0_0.AuxInt)
  4918  			if v_1.Op != OpLeq8U {
  4919  				continue
  4920  			}
  4921  			_ = v_1.Args[1]
  4922  			if x != v_1.Args[0] {
  4923  				continue
  4924  			}
  4925  			v_1_1 := v_1.Args[1]
  4926  			if v_1_1.Op != OpConst8 {
  4927  				continue
  4928  			}
  4929  			d := auxIntToInt8(v_1_1.AuxInt)
  4930  			if !(uint8(d) >= uint8(c)) {
  4931  				continue
  4932  			}
  4933  			v.reset(OpLeq8U)
  4934  			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
  4935  			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4936  			v1.AuxInt = int8ToAuxInt(c)
  4937  			v0.AddArg2(x, v1)
  4938  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
  4939  			v2.AuxInt = int8ToAuxInt(d - c)
  4940  			v.AddArg2(v0, v2)
  4941  			return true
  4942  		}
  4943  		break
  4944  	}
  4945  	// match: (AndB (Less64U (Const64 [c]) x) (Less64U x (Const64 [d])))
  4946  	// cond: uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)
  4947  	// result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
  4948  	for {
  4949  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4950  			if v_0.Op != OpLess64U {
  4951  				continue
  4952  			}
  4953  			x := v_0.Args[1]
  4954  			v_0_0 := v_0.Args[0]
  4955  			if v_0_0.Op != OpConst64 {
  4956  				continue
  4957  			}
  4958  			c := auxIntToInt64(v_0_0.AuxInt)
  4959  			if v_1.Op != OpLess64U {
  4960  				continue
  4961  			}
  4962  			_ = v_1.Args[1]
  4963  			if x != v_1.Args[0] {
  4964  				continue
  4965  			}
  4966  			v_1_1 := v_1.Args[1]
  4967  			if v_1_1.Op != OpConst64 {
  4968  				continue
  4969  			}
  4970  			d := auxIntToInt64(v_1_1.AuxInt)
  4971  			if !(uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)) {
  4972  				continue
  4973  			}
  4974  			v.reset(OpLess64U)
  4975  			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
  4976  			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4977  			v1.AuxInt = int64ToAuxInt(c + 1)
  4978  			v0.AddArg2(x, v1)
  4979  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
  4980  			v2.AuxInt = int64ToAuxInt(d - c - 1)
  4981  			v.AddArg2(v0, v2)
  4982  			return true
  4983  		}
  4984  		break
  4985  	}
  4986  	// match: (AndB (Less64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
  4987  	// cond: uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)
  4988  	// result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
  4989  	for {
  4990  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  4991  			if v_0.Op != OpLess64U {
  4992  				continue
  4993  			}
  4994  			x := v_0.Args[1]
  4995  			v_0_0 := v_0.Args[0]
  4996  			if v_0_0.Op != OpConst64 {
  4997  				continue
  4998  			}
  4999  			c := auxIntToInt64(v_0_0.AuxInt)
  5000  			if v_1.Op != OpLeq64U {
  5001  				continue
  5002  			}
  5003  			_ = v_1.Args[1]
  5004  			if x != v_1.Args[0] {
  5005  				continue
  5006  			}
  5007  			v_1_1 := v_1.Args[1]
  5008  			if v_1_1.Op != OpConst64 {
  5009  				continue
  5010  			}
  5011  			d := auxIntToInt64(v_1_1.AuxInt)
  5012  			if !(uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)) {
  5013  				continue
  5014  			}
  5015  			v.reset(OpLeq64U)
  5016  			v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
  5017  			v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
  5018  			v1.AuxInt = int64ToAuxInt(c + 1)
  5019  			v0.AddArg2(x, v1)
  5020  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
  5021  			v2.AuxInt = int64ToAuxInt(d - c - 1)
  5022  			v.AddArg2(v0, v2)
  5023  			return true
  5024  		}
  5025  		break
  5026  	}
  5027  	// match: (AndB (Less32U (Const32 [c]) x) (Less32U x (Const32 [d])))
  5028  	// cond: uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)
  5029  	// result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
  5030  	for {
  5031  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5032  			if v_0.Op != OpLess32U {
  5033  				continue
  5034  			}
  5035  			x := v_0.Args[1]
  5036  			v_0_0 := v_0.Args[0]
  5037  			if v_0_0.Op != OpConst32 {
  5038  				continue
  5039  			}
  5040  			c := auxIntToInt32(v_0_0.AuxInt)
  5041  			if v_1.Op != OpLess32U {
  5042  				continue
  5043  			}
  5044  			_ = v_1.Args[1]
  5045  			if x != v_1.Args[0] {
  5046  				continue
  5047  			}
  5048  			v_1_1 := v_1.Args[1]
  5049  			if v_1_1.Op != OpConst32 {
  5050  				continue
  5051  			}
  5052  			d := auxIntToInt32(v_1_1.AuxInt)
  5053  			if !(uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)) {
  5054  				continue
  5055  			}
  5056  			v.reset(OpLess32U)
  5057  			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
  5058  			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
  5059  			v1.AuxInt = int32ToAuxInt(c + 1)
  5060  			v0.AddArg2(x, v1)
  5061  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
  5062  			v2.AuxInt = int32ToAuxInt(d - c - 1)
  5063  			v.AddArg2(v0, v2)
  5064  			return true
  5065  		}
  5066  		break
  5067  	}
  5068  	// match: (AndB (Less32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
  5069  	// cond: uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)
  5070  	// result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
  5071  	for {
  5072  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5073  			if v_0.Op != OpLess32U {
  5074  				continue
  5075  			}
  5076  			x := v_0.Args[1]
  5077  			v_0_0 := v_0.Args[0]
  5078  			if v_0_0.Op != OpConst32 {
  5079  				continue
  5080  			}
  5081  			c := auxIntToInt32(v_0_0.AuxInt)
  5082  			if v_1.Op != OpLeq32U {
  5083  				continue
  5084  			}
  5085  			_ = v_1.Args[1]
  5086  			if x != v_1.Args[0] {
  5087  				continue
  5088  			}
  5089  			v_1_1 := v_1.Args[1]
  5090  			if v_1_1.Op != OpConst32 {
  5091  				continue
  5092  			}
  5093  			d := auxIntToInt32(v_1_1.AuxInt)
  5094  			if !(uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)) {
  5095  				continue
  5096  			}
  5097  			v.reset(OpLeq32U)
  5098  			v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
  5099  			v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
  5100  			v1.AuxInt = int32ToAuxInt(c + 1)
  5101  			v0.AddArg2(x, v1)
  5102  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
  5103  			v2.AuxInt = int32ToAuxInt(d - c - 1)
  5104  			v.AddArg2(v0, v2)
  5105  			return true
  5106  		}
  5107  		break
  5108  	}
  5109  	// match: (AndB (Less16U (Const16 [c]) x) (Less16U x (Const16 [d])))
  5110  	// cond: uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)
  5111  	// result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
  5112  	for {
  5113  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5114  			if v_0.Op != OpLess16U {
  5115  				continue
  5116  			}
  5117  			x := v_0.Args[1]
  5118  			v_0_0 := v_0.Args[0]
  5119  			if v_0_0.Op != OpConst16 {
  5120  				continue
  5121  			}
  5122  			c := auxIntToInt16(v_0_0.AuxInt)
  5123  			if v_1.Op != OpLess16U {
  5124  				continue
  5125  			}
  5126  			_ = v_1.Args[1]
  5127  			if x != v_1.Args[0] {
  5128  				continue
  5129  			}
  5130  			v_1_1 := v_1.Args[1]
  5131  			if v_1_1.Op != OpConst16 {
  5132  				continue
  5133  			}
  5134  			d := auxIntToInt16(v_1_1.AuxInt)
  5135  			if !(uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)) {
  5136  				continue
  5137  			}
  5138  			v.reset(OpLess16U)
  5139  			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
  5140  			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
  5141  			v1.AuxInt = int16ToAuxInt(c + 1)
  5142  			v0.AddArg2(x, v1)
  5143  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
  5144  			v2.AuxInt = int16ToAuxInt(d - c - 1)
  5145  			v.AddArg2(v0, v2)
  5146  			return true
  5147  		}
  5148  		break
  5149  	}
  5150  	// match: (AndB (Less16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
  5151  	// cond: uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)
  5152  	// result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
  5153  	for {
  5154  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5155  			if v_0.Op != OpLess16U {
  5156  				continue
  5157  			}
  5158  			x := v_0.Args[1]
  5159  			v_0_0 := v_0.Args[0]
  5160  			if v_0_0.Op != OpConst16 {
  5161  				continue
  5162  			}
  5163  			c := auxIntToInt16(v_0_0.AuxInt)
  5164  			if v_1.Op != OpLeq16U {
  5165  				continue
  5166  			}
  5167  			_ = v_1.Args[1]
  5168  			if x != v_1.Args[0] {
  5169  				continue
  5170  			}
  5171  			v_1_1 := v_1.Args[1]
  5172  			if v_1_1.Op != OpConst16 {
  5173  				continue
  5174  			}
  5175  			d := auxIntToInt16(v_1_1.AuxInt)
  5176  			if !(uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)) {
  5177  				continue
  5178  			}
  5179  			v.reset(OpLeq16U)
  5180  			v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
  5181  			v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
  5182  			v1.AuxInt = int16ToAuxInt(c + 1)
  5183  			v0.AddArg2(x, v1)
  5184  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
  5185  			v2.AuxInt = int16ToAuxInt(d - c - 1)
  5186  			v.AddArg2(v0, v2)
  5187  			return true
  5188  		}
  5189  		break
  5190  	}
  5191  	// match: (AndB (Less8U (Const8 [c]) x) (Less8U x (Const8 [d])))
  5192  	// cond: uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)
  5193  	// result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
  5194  	for {
  5195  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5196  			if v_0.Op != OpLess8U {
  5197  				continue
  5198  			}
  5199  			x := v_0.Args[1]
  5200  			v_0_0 := v_0.Args[0]
  5201  			if v_0_0.Op != OpConst8 {
  5202  				continue
  5203  			}
  5204  			c := auxIntToInt8(v_0_0.AuxInt)
  5205  			if v_1.Op != OpLess8U {
  5206  				continue
  5207  			}
  5208  			_ = v_1.Args[1]
  5209  			if x != v_1.Args[0] {
  5210  				continue
  5211  			}
  5212  			v_1_1 := v_1.Args[1]
  5213  			if v_1_1.Op != OpConst8 {
  5214  				continue
  5215  			}
  5216  			d := auxIntToInt8(v_1_1.AuxInt)
  5217  			if !(uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)) {
  5218  				continue
  5219  			}
  5220  			v.reset(OpLess8U)
  5221  			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
  5222  			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
  5223  			v1.AuxInt = int8ToAuxInt(c + 1)
  5224  			v0.AddArg2(x, v1)
  5225  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
  5226  			v2.AuxInt = int8ToAuxInt(d - c - 1)
  5227  			v.AddArg2(v0, v2)
  5228  			return true
  5229  		}
  5230  		break
  5231  	}
  5232  	// match: (AndB (Less8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
  5233  	// cond: uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)
  5234  	// result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
  5235  	for {
  5236  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5237  			if v_0.Op != OpLess8U {
  5238  				continue
  5239  			}
  5240  			x := v_0.Args[1]
  5241  			v_0_0 := v_0.Args[0]
  5242  			if v_0_0.Op != OpConst8 {
  5243  				continue
  5244  			}
  5245  			c := auxIntToInt8(v_0_0.AuxInt)
  5246  			if v_1.Op != OpLeq8U {
  5247  				continue
  5248  			}
  5249  			_ = v_1.Args[1]
  5250  			if x != v_1.Args[0] {
  5251  				continue
  5252  			}
  5253  			v_1_1 := v_1.Args[1]
  5254  			if v_1_1.Op != OpConst8 {
  5255  				continue
  5256  			}
  5257  			d := auxIntToInt8(v_1_1.AuxInt)
  5258  			if !(uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)) {
  5259  				continue
  5260  			}
  5261  			v.reset(OpLeq8U)
  5262  			v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
  5263  			v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
  5264  			v1.AuxInt = int8ToAuxInt(c + 1)
  5265  			v0.AddArg2(x, v1)
  5266  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
  5267  			v2.AuxInt = int8ToAuxInt(d - c - 1)
  5268  			v.AddArg2(v0, v2)
  5269  			return true
  5270  		}
  5271  		break
  5272  	}
  5273  	return false
  5274  }
  5275  func rewriteValuegeneric_OpArraySelect(v *Value) bool {
  5276  	v_0 := v.Args[0]
  5277  	// match: (ArraySelect (ArrayMake1 x))
  5278  	// result: x
  5279  	for {
  5280  		if v_0.Op != OpArrayMake1 {
  5281  			break
  5282  		}
  5283  		x := v_0.Args[0]
  5284  		v.copyOf(x)
  5285  		return true
  5286  	}
  5287  	// match: (ArraySelect [0] (IData x))
  5288  	// result: (IData x)
  5289  	for {
  5290  		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpIData {
  5291  			break
  5292  		}
  5293  		x := v_0.Args[0]
  5294  		v.reset(OpIData)
  5295  		v.AddArg(x)
  5296  		return true
  5297  	}
  5298  	return false
  5299  }
  5300  func rewriteValuegeneric_OpBitLen16(v *Value) bool {
  5301  	v_0 := v.Args[0]
  5302  	b := v.Block
  5303  	config := b.Func.Config
  5304  	// match: (BitLen16 (Const16 [c]))
  5305  	// cond: config.PtrSize == 8
  5306  	// result: (Const64 [int64(bits.Len16(uint16(c)))])
  5307  	for {
  5308  		if v_0.Op != OpConst16 {
  5309  			break
  5310  		}
  5311  		c := auxIntToInt16(v_0.AuxInt)
  5312  		if !(config.PtrSize == 8) {
  5313  			break
  5314  		}
  5315  		v.reset(OpConst64)
  5316  		v.AuxInt = int64ToAuxInt(int64(bits.Len16(uint16(c))))
  5317  		return true
  5318  	}
  5319  	// match: (BitLen16 (Const16 [c]))
  5320  	// cond: config.PtrSize == 4
  5321  	// result: (Const32 [int32(bits.Len16(uint16(c)))])
  5322  	for {
  5323  		if v_0.Op != OpConst16 {
  5324  			break
  5325  		}
  5326  		c := auxIntToInt16(v_0.AuxInt)
  5327  		if !(config.PtrSize == 4) {
  5328  			break
  5329  		}
  5330  		v.reset(OpConst32)
  5331  		v.AuxInt = int32ToAuxInt(int32(bits.Len16(uint16(c))))
  5332  		return true
  5333  	}
  5334  	return false
  5335  }
  5336  func rewriteValuegeneric_OpBitLen32(v *Value) bool {
  5337  	v_0 := v.Args[0]
  5338  	b := v.Block
  5339  	config := b.Func.Config
  5340  	// match: (BitLen32 (Const32 [c]))
  5341  	// cond: config.PtrSize == 8
  5342  	// result: (Const64 [int64(bits.Len32(uint32(c)))])
  5343  	for {
  5344  		if v_0.Op != OpConst32 {
  5345  			break
  5346  		}
  5347  		c := auxIntToInt32(v_0.AuxInt)
  5348  		if !(config.PtrSize == 8) {
  5349  			break
  5350  		}
  5351  		v.reset(OpConst64)
  5352  		v.AuxInt = int64ToAuxInt(int64(bits.Len32(uint32(c))))
  5353  		return true
  5354  	}
  5355  	// match: (BitLen32 (Const32 [c]))
  5356  	// cond: config.PtrSize == 4
  5357  	// result: (Const32 [int32(bits.Len32(uint32(c)))])
  5358  	for {
  5359  		if v_0.Op != OpConst32 {
  5360  			break
  5361  		}
  5362  		c := auxIntToInt32(v_0.AuxInt)
  5363  		if !(config.PtrSize == 4) {
  5364  			break
  5365  		}
  5366  		v.reset(OpConst32)
  5367  		v.AuxInt = int32ToAuxInt(int32(bits.Len32(uint32(c))))
  5368  		return true
  5369  	}
  5370  	return false
  5371  }
  5372  func rewriteValuegeneric_OpBitLen64(v *Value) bool {
  5373  	v_0 := v.Args[0]
  5374  	b := v.Block
  5375  	config := b.Func.Config
  5376  	// match: (BitLen64 (Const64 [c]))
  5377  	// cond: config.PtrSize == 8
  5378  	// result: (Const64 [int64(bits.Len64(uint64(c)))])
  5379  	for {
  5380  		if v_0.Op != OpConst64 {
  5381  			break
  5382  		}
  5383  		c := auxIntToInt64(v_0.AuxInt)
  5384  		if !(config.PtrSize == 8) {
  5385  			break
  5386  		}
  5387  		v.reset(OpConst64)
  5388  		v.AuxInt = int64ToAuxInt(int64(bits.Len64(uint64(c))))
  5389  		return true
  5390  	}
  5391  	// match: (BitLen64 (Const64 [c]))
  5392  	// cond: config.PtrSize == 4
  5393  	// result: (Const32 [int32(bits.Len64(uint64(c)))])
  5394  	for {
  5395  		if v_0.Op != OpConst64 {
  5396  			break
  5397  		}
  5398  		c := auxIntToInt64(v_0.AuxInt)
  5399  		if !(config.PtrSize == 4) {
  5400  			break
  5401  		}
  5402  		v.reset(OpConst32)
  5403  		v.AuxInt = int32ToAuxInt(int32(bits.Len64(uint64(c))))
  5404  		return true
  5405  	}
  5406  	return false
  5407  }
  5408  func rewriteValuegeneric_OpBitLen8(v *Value) bool {
  5409  	v_0 := v.Args[0]
  5410  	b := v.Block
  5411  	config := b.Func.Config
  5412  	// match: (BitLen8 (Const8 [c]))
  5413  	// cond: config.PtrSize == 8
  5414  	// result: (Const64 [int64(bits.Len8(uint8(c)))])
  5415  	for {
  5416  		if v_0.Op != OpConst8 {
  5417  			break
  5418  		}
  5419  		c := auxIntToInt8(v_0.AuxInt)
  5420  		if !(config.PtrSize == 8) {
  5421  			break
  5422  		}
  5423  		v.reset(OpConst64)
  5424  		v.AuxInt = int64ToAuxInt(int64(bits.Len8(uint8(c))))
  5425  		return true
  5426  	}
  5427  	// match: (BitLen8 (Const8 [c]))
  5428  	// cond: config.PtrSize == 4
  5429  	// result: (Const32 [int32(bits.Len8(uint8(c)))])
  5430  	for {
  5431  		if v_0.Op != OpConst8 {
  5432  			break
  5433  		}
  5434  		c := auxIntToInt8(v_0.AuxInt)
  5435  		if !(config.PtrSize == 4) {
  5436  			break
  5437  		}
  5438  		v.reset(OpConst32)
  5439  		v.AuxInt = int32ToAuxInt(int32(bits.Len8(uint8(c))))
  5440  		return true
  5441  	}
  5442  	return false
  5443  }
  5444  func rewriteValuegeneric_OpCeil(v *Value) bool {
  5445  	v_0 := v.Args[0]
  5446  	// match: (Ceil (Const64F [c]))
  5447  	// result: (Const64F [math.Ceil(c)])
  5448  	for {
  5449  		if v_0.Op != OpConst64F {
  5450  			break
  5451  		}
  5452  		c := auxIntToFloat64(v_0.AuxInt)
  5453  		v.reset(OpConst64F)
  5454  		v.AuxInt = float64ToAuxInt(math.Ceil(c))
  5455  		return true
  5456  	}
  5457  	return false
  5458  }
  5459  func rewriteValuegeneric_OpCom16(v *Value) bool {
  5460  	v_0 := v.Args[0]
  5461  	// match: (Com16 (Com16 x))
  5462  	// result: x
  5463  	for {
  5464  		if v_0.Op != OpCom16 {
  5465  			break
  5466  		}
  5467  		x := v_0.Args[0]
  5468  		v.copyOf(x)
  5469  		return true
  5470  	}
  5471  	// match: (Com16 (Const16 [c]))
  5472  	// result: (Const16 [^c])
  5473  	for {
  5474  		if v_0.Op != OpConst16 {
  5475  			break
  5476  		}
  5477  		c := auxIntToInt16(v_0.AuxInt)
  5478  		v.reset(OpConst16)
  5479  		v.AuxInt = int16ToAuxInt(^c)
  5480  		return true
  5481  	}
  5482  	// match: (Com16 (Add16 (Const16 [-1]) x))
  5483  	// result: (Neg16 x)
  5484  	for {
  5485  		if v_0.Op != OpAdd16 {
  5486  			break
  5487  		}
  5488  		_ = v_0.Args[1]
  5489  		v_0_0 := v_0.Args[0]
  5490  		v_0_1 := v_0.Args[1]
  5491  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  5492  			if v_0_0.Op != OpConst16 || auxIntToInt16(v_0_0.AuxInt) != -1 {
  5493  				continue
  5494  			}
  5495  			x := v_0_1
  5496  			v.reset(OpNeg16)
  5497  			v.AddArg(x)
  5498  			return true
  5499  		}
  5500  		break
  5501  	}
  5502  	return false
  5503  }
  5504  func rewriteValuegeneric_OpCom32(v *Value) bool {
  5505  	v_0 := v.Args[0]
  5506  	// match: (Com32 (Com32 x))
  5507  	// result: x
  5508  	for {
  5509  		if v_0.Op != OpCom32 {
  5510  			break
  5511  		}
  5512  		x := v_0.Args[0]
  5513  		v.copyOf(x)
  5514  		return true
  5515  	}
  5516  	// match: (Com32 (Const32 [c]))
  5517  	// result: (Const32 [^c])
  5518  	for {
  5519  		if v_0.Op != OpConst32 {
  5520  			break
  5521  		}
  5522  		c := auxIntToInt32(v_0.AuxInt)
  5523  		v.reset(OpConst32)
  5524  		v.AuxInt = int32ToAuxInt(^c)
  5525  		return true
  5526  	}
  5527  	// match: (Com32 (Add32 (Const32 [-1]) x))
  5528  	// result: (Neg32 x)
  5529  	for {
  5530  		if v_0.Op != OpAdd32 {
  5531  			break
  5532  		}
  5533  		_ = v_0.Args[1]
  5534  		v_0_0 := v_0.Args[0]
  5535  		v_0_1 := v_0.Args[1]
  5536  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  5537  			if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != -1 {
  5538  				continue
  5539  			}
  5540  			x := v_0_1
  5541  			v.reset(OpNeg32)
  5542  			v.AddArg(x)
  5543  			return true
  5544  		}
  5545  		break
  5546  	}
  5547  	return false
  5548  }
  5549  func rewriteValuegeneric_OpCom64(v *Value) bool {
  5550  	v_0 := v.Args[0]
  5551  	// match: (Com64 (Com64 x))
  5552  	// result: x
  5553  	for {
  5554  		if v_0.Op != OpCom64 {
  5555  			break
  5556  		}
  5557  		x := v_0.Args[0]
  5558  		v.copyOf(x)
  5559  		return true
  5560  	}
  5561  	// match: (Com64 (Const64 [c]))
  5562  	// result: (Const64 [^c])
  5563  	for {
  5564  		if v_0.Op != OpConst64 {
  5565  			break
  5566  		}
  5567  		c := auxIntToInt64(v_0.AuxInt)
  5568  		v.reset(OpConst64)
  5569  		v.AuxInt = int64ToAuxInt(^c)
  5570  		return true
  5571  	}
  5572  	// match: (Com64 (Add64 (Const64 [-1]) x))
  5573  	// result: (Neg64 x)
  5574  	for {
  5575  		if v_0.Op != OpAdd64 {
  5576  			break
  5577  		}
  5578  		_ = v_0.Args[1]
  5579  		v_0_0 := v_0.Args[0]
  5580  		v_0_1 := v_0.Args[1]
  5581  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  5582  			if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != -1 {
  5583  				continue
  5584  			}
  5585  			x := v_0_1
  5586  			v.reset(OpNeg64)
  5587  			v.AddArg(x)
  5588  			return true
  5589  		}
  5590  		break
  5591  	}
  5592  	return false
  5593  }
  5594  func rewriteValuegeneric_OpCom8(v *Value) bool {
  5595  	v_0 := v.Args[0]
  5596  	// match: (Com8 (Com8 x))
  5597  	// result: x
  5598  	for {
  5599  		if v_0.Op != OpCom8 {
  5600  			break
  5601  		}
  5602  		x := v_0.Args[0]
  5603  		v.copyOf(x)
  5604  		return true
  5605  	}
  5606  	// match: (Com8 (Const8 [c]))
  5607  	// result: (Const8 [^c])
  5608  	for {
  5609  		if v_0.Op != OpConst8 {
  5610  			break
  5611  		}
  5612  		c := auxIntToInt8(v_0.AuxInt)
  5613  		v.reset(OpConst8)
  5614  		v.AuxInt = int8ToAuxInt(^c)
  5615  		return true
  5616  	}
  5617  	// match: (Com8 (Add8 (Const8 [-1]) x))
  5618  	// result: (Neg8 x)
  5619  	for {
  5620  		if v_0.Op != OpAdd8 {
  5621  			break
  5622  		}
  5623  		_ = v_0.Args[1]
  5624  		v_0_0 := v_0.Args[0]
  5625  		v_0_1 := v_0.Args[1]
  5626  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  5627  			if v_0_0.Op != OpConst8 || auxIntToInt8(v_0_0.AuxInt) != -1 {
  5628  				continue
  5629  			}
  5630  			x := v_0_1
  5631  			v.reset(OpNeg8)
  5632  			v.AddArg(x)
  5633  			return true
  5634  		}
  5635  		break
  5636  	}
  5637  	return false
  5638  }
  5639  func rewriteValuegeneric_OpConstInterface(v *Value) bool {
  5640  	b := v.Block
  5641  	typ := &b.Func.Config.Types
  5642  	// match: (ConstInterface)
  5643  	// result: (IMake (ConstNil <typ.Uintptr>) (ConstNil <typ.BytePtr>))
  5644  	for {
  5645  		v.reset(OpIMake)
  5646  		v0 := b.NewValue0(v.Pos, OpConstNil, typ.Uintptr)
  5647  		v1 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr)
  5648  		v.AddArg2(v0, v1)
  5649  		return true
  5650  	}
  5651  }
  5652  func rewriteValuegeneric_OpConstSlice(v *Value) bool {
  5653  	b := v.Block
  5654  	config := b.Func.Config
  5655  	typ := &b.Func.Config.Types
  5656  	// match: (ConstSlice)
  5657  	// cond: config.PtrSize == 4
  5658  	// result: (SliceMake (ConstNil <v.Type.Elem().PtrTo()>) (Const32 <typ.Int> [0]) (Const32 <typ.Int> [0]))
  5659  	for {
  5660  		if !(config.PtrSize == 4) {
  5661  			break
  5662  		}
  5663  		v.reset(OpSliceMake)
  5664  		v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.Elem().PtrTo())
  5665  		v1 := b.NewValue0(v.Pos, OpConst32, typ.Int)
  5666  		v1.AuxInt = int32ToAuxInt(0)
  5667  		v.AddArg3(v0, v1, v1)
  5668  		return true
  5669  	}
  5670  	// match: (ConstSlice)
  5671  	// cond: config.PtrSize == 8
  5672  	// result: (SliceMake (ConstNil <v.Type.Elem().PtrTo()>) (Const64 <typ.Int> [0]) (Const64 <typ.Int> [0]))
  5673  	for {
  5674  		if !(config.PtrSize == 8) {
  5675  			break
  5676  		}
  5677  		v.reset(OpSliceMake)
  5678  		v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.Elem().PtrTo())
  5679  		v1 := b.NewValue0(v.Pos, OpConst64, typ.Int)
  5680  		v1.AuxInt = int64ToAuxInt(0)
  5681  		v.AddArg3(v0, v1, v1)
  5682  		return true
  5683  	}
  5684  	return false
  5685  }
  5686  func rewriteValuegeneric_OpConstString(v *Value) bool {
  5687  	b := v.Block
  5688  	config := b.Func.Config
  5689  	fe := b.Func.fe
  5690  	typ := &b.Func.Config.Types
  5691  	// match: (ConstString {str})
  5692  	// cond: config.PtrSize == 4 && str == ""
  5693  	// result: (StringMake (ConstNil) (Const32 <typ.Int> [0]))
  5694  	for {
  5695  		str := auxToString(v.Aux)
  5696  		if !(config.PtrSize == 4 && str == "") {
  5697  			break
  5698  		}
  5699  		v.reset(OpStringMake)
  5700  		v0 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr)
  5701  		v1 := b.NewValue0(v.Pos, OpConst32, typ.Int)
  5702  		v1.AuxInt = int32ToAuxInt(0)
  5703  		v.AddArg2(v0, v1)
  5704  		return true
  5705  	}
  5706  	// match: (ConstString {str})
  5707  	// cond: config.PtrSize == 8 && str == ""
  5708  	// result: (StringMake (ConstNil) (Const64 <typ.Int> [0]))
  5709  	for {
  5710  		str := auxToString(v.Aux)
  5711  		if !(config.PtrSize == 8 && str == "") {
  5712  			break
  5713  		}
  5714  		v.reset(OpStringMake)
  5715  		v0 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr)
  5716  		v1 := b.NewValue0(v.Pos, OpConst64, typ.Int)
  5717  		v1.AuxInt = int64ToAuxInt(0)
  5718  		v.AddArg2(v0, v1)
  5719  		return true
  5720  	}
  5721  	// match: (ConstString {str})
  5722  	// cond: config.PtrSize == 4 && str != ""
  5723  	// result: (StringMake (Addr <typ.BytePtr> {fe.StringData(str)} (SB)) (Const32 <typ.Int> [int32(len(str))]))
  5724  	for {
  5725  		str := auxToString(v.Aux)
  5726  		if !(config.PtrSize == 4 && str != "") {
  5727  			break
  5728  		}
  5729  		v.reset(OpStringMake)
  5730  		v0 := b.NewValue0(v.Pos, OpAddr, typ.BytePtr)
  5731  		v0.Aux = symToAux(fe.StringData(str))
  5732  		v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr)
  5733  		v0.AddArg(v1)
  5734  		v2 := b.NewValue0(v.Pos, OpConst32, typ.Int)
  5735  		v2.AuxInt = int32ToAuxInt(int32(len(str)))
  5736  		v.AddArg2(v0, v2)
  5737  		return true
  5738  	}
  5739  	// match: (ConstString {str})
  5740  	// cond: config.PtrSize == 8 && str != ""
  5741  	// result: (StringMake (Addr <typ.BytePtr> {fe.StringData(str)} (SB)) (Const64 <typ.Int> [int64(len(str))]))
  5742  	for {
  5743  		str := auxToString(v.Aux)
  5744  		if !(config.PtrSize == 8 && str != "") {
  5745  			break
  5746  		}
  5747  		v.reset(OpStringMake)
  5748  		v0 := b.NewValue0(v.Pos, OpAddr, typ.BytePtr)
  5749  		v0.Aux = symToAux(fe.StringData(str))
  5750  		v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr)
  5751  		v0.AddArg(v1)
  5752  		v2 := b.NewValue0(v.Pos, OpConst64, typ.Int)
  5753  		v2.AuxInt = int64ToAuxInt(int64(len(str)))
  5754  		v.AddArg2(v0, v2)
  5755  		return true
  5756  	}
  5757  	return false
  5758  }
  5759  func rewriteValuegeneric_OpConvert(v *Value) bool {
  5760  	v_1 := v.Args[1]
  5761  	v_0 := v.Args[0]
  5762  	// match: (Convert (Add64 (Convert ptr mem) off) mem)
  5763  	// result: (AddPtr ptr off)
  5764  	for {
  5765  		if v_0.Op != OpAdd64 {
  5766  			break
  5767  		}
  5768  		_ = v_0.Args[1]
  5769  		v_0_0 := v_0.Args[0]
  5770  		v_0_1 := v_0.Args[1]
  5771  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  5772  			if v_0_0.Op != OpConvert {
  5773  				continue
  5774  			}
  5775  			mem := v_0_0.Args[1]
  5776  			ptr := v_0_0.Args[0]
  5777  			off := v_0_1
  5778  			if mem != v_1 {
  5779  				continue
  5780  			}
  5781  			v.reset(OpAddPtr)
  5782  			v.AddArg2(ptr, off)
  5783  			return true
  5784  		}
  5785  		break
  5786  	}
  5787  	// match: (Convert (Add32 (Convert ptr mem) off) mem)
  5788  	// result: (AddPtr ptr off)
  5789  	for {
  5790  		if v_0.Op != OpAdd32 {
  5791  			break
  5792  		}
  5793  		_ = v_0.Args[1]
  5794  		v_0_0 := v_0.Args[0]
  5795  		v_0_1 := v_0.Args[1]
  5796  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  5797  			if v_0_0.Op != OpConvert {
  5798  				continue
  5799  			}
  5800  			mem := v_0_0.Args[1]
  5801  			ptr := v_0_0.Args[0]
  5802  			off := v_0_1
  5803  			if mem != v_1 {
  5804  				continue
  5805  			}
  5806  			v.reset(OpAddPtr)
  5807  			v.AddArg2(ptr, off)
  5808  			return true
  5809  		}
  5810  		break
  5811  	}
  5812  	// match: (Convert (Convert ptr mem) mem)
  5813  	// result: ptr
  5814  	for {
  5815  		if v_0.Op != OpConvert {
  5816  			break
  5817  		}
  5818  		mem := v_0.Args[1]
  5819  		ptr := v_0.Args[0]
  5820  		if mem != v_1 {
  5821  			break
  5822  		}
  5823  		v.copyOf(ptr)
  5824  		return true
  5825  	}
  5826  	return false
  5827  }
  5828  func rewriteValuegeneric_OpCtz16(v *Value) bool {
  5829  	v_0 := v.Args[0]
  5830  	b := v.Block
  5831  	config := b.Func.Config
  5832  	// match: (Ctz16 (Const16 [c]))
  5833  	// cond: config.PtrSize == 4
  5834  	// result: (Const32 [int32(ntz16(c))])
  5835  	for {
  5836  		if v_0.Op != OpConst16 {
  5837  			break
  5838  		}
  5839  		c := auxIntToInt16(v_0.AuxInt)
  5840  		if !(config.PtrSize == 4) {
  5841  			break
  5842  		}
  5843  		v.reset(OpConst32)
  5844  		v.AuxInt = int32ToAuxInt(int32(ntz16(c)))
  5845  		return true
  5846  	}
  5847  	// match: (Ctz16 (Const16 [c]))
  5848  	// cond: config.PtrSize == 8
  5849  	// result: (Const64 [int64(ntz16(c))])
  5850  	for {
  5851  		if v_0.Op != OpConst16 {
  5852  			break
  5853  		}
  5854  		c := auxIntToInt16(v_0.AuxInt)
  5855  		if !(config.PtrSize == 8) {
  5856  			break
  5857  		}
  5858  		v.reset(OpConst64)
  5859  		v.AuxInt = int64ToAuxInt(int64(ntz16(c)))
  5860  		return true
  5861  	}
  5862  	return false
  5863  }
  5864  func rewriteValuegeneric_OpCtz32(v *Value) bool {
  5865  	v_0 := v.Args[0]
  5866  	b := v.Block
  5867  	config := b.Func.Config
  5868  	// match: (Ctz32 (Const32 [c]))
  5869  	// cond: config.PtrSize == 4
  5870  	// result: (Const32 [int32(ntz32(c))])
  5871  	for {
  5872  		if v_0.Op != OpConst32 {
  5873  			break
  5874  		}
  5875  		c := auxIntToInt32(v_0.AuxInt)
  5876  		if !(config.PtrSize == 4) {
  5877  			break
  5878  		}
  5879  		v.reset(OpConst32)
  5880  		v.AuxInt = int32ToAuxInt(int32(ntz32(c)))
  5881  		return true
  5882  	}
  5883  	// match: (Ctz32 (Const32 [c]))
  5884  	// cond: config.PtrSize == 8
  5885  	// result: (Const64 [int64(ntz32(c))])
  5886  	for {
  5887  		if v_0.Op != OpConst32 {
  5888  			break
  5889  		}
  5890  		c := auxIntToInt32(v_0.AuxInt)
  5891  		if !(config.PtrSize == 8) {
  5892  			break
  5893  		}
  5894  		v.reset(OpConst64)
  5895  		v.AuxInt = int64ToAuxInt(int64(ntz32(c)))
  5896  		return true
  5897  	}
  5898  	return false
  5899  }
  5900  func rewriteValuegeneric_OpCtz64(v *Value) bool {
  5901  	v_0 := v.Args[0]
  5902  	b := v.Block
  5903  	config := b.Func.Config
  5904  	// match: (Ctz64 (Const64 [c]))
  5905  	// cond: config.PtrSize == 4
  5906  	// result: (Const32 [int32(ntz64(c))])
  5907  	for {
  5908  		if v_0.Op != OpConst64 {
  5909  			break
  5910  		}
  5911  		c := auxIntToInt64(v_0.AuxInt)
  5912  		if !(config.PtrSize == 4) {
  5913  			break
  5914  		}
  5915  		v.reset(OpConst32)
  5916  		v.AuxInt = int32ToAuxInt(int32(ntz64(c)))
  5917  		return true
  5918  	}
  5919  	// match: (Ctz64 (Const64 [c]))
  5920  	// cond: config.PtrSize == 8
  5921  	// result: (Const64 [int64(ntz64(c))])
  5922  	for {
  5923  		if v_0.Op != OpConst64 {
  5924  			break
  5925  		}
  5926  		c := auxIntToInt64(v_0.AuxInt)
  5927  		if !(config.PtrSize == 8) {
  5928  			break
  5929  		}
  5930  		v.reset(OpConst64)
  5931  		v.AuxInt = int64ToAuxInt(int64(ntz64(c)))
  5932  		return true
  5933  	}
  5934  	return false
  5935  }
  5936  func rewriteValuegeneric_OpCtz8(v *Value) bool {
  5937  	v_0 := v.Args[0]
  5938  	b := v.Block
  5939  	config := b.Func.Config
  5940  	// match: (Ctz8 (Const8 [c]))
  5941  	// cond: config.PtrSize == 4
  5942  	// result: (Const32 [int32(ntz8(c))])
  5943  	for {
  5944  		if v_0.Op != OpConst8 {
  5945  			break
  5946  		}
  5947  		c := auxIntToInt8(v_0.AuxInt)
  5948  		if !(config.PtrSize == 4) {
  5949  			break
  5950  		}
  5951  		v.reset(OpConst32)
  5952  		v.AuxInt = int32ToAuxInt(int32(ntz8(c)))
  5953  		return true
  5954  	}
  5955  	// match: (Ctz8 (Const8 [c]))
  5956  	// cond: config.PtrSize == 8
  5957  	// result: (Const64 [int64(ntz8(c))])
  5958  	for {
  5959  		if v_0.Op != OpConst8 {
  5960  			break
  5961  		}
  5962  		c := auxIntToInt8(v_0.AuxInt)
  5963  		if !(config.PtrSize == 8) {
  5964  			break
  5965  		}
  5966  		v.reset(OpConst64)
  5967  		v.AuxInt = int64ToAuxInt(int64(ntz8(c)))
  5968  		return true
  5969  	}
  5970  	return false
  5971  }
  5972  func rewriteValuegeneric_OpCvt32Fto32(v *Value) bool {
  5973  	v_0 := v.Args[0]
  5974  	// match: (Cvt32Fto32 (Const32F [c]))
  5975  	// result: (Const32 [int32(c)])
  5976  	for {
  5977  		if v_0.Op != OpConst32F {
  5978  			break
  5979  		}
  5980  		c := auxIntToFloat32(v_0.AuxInt)
  5981  		v.reset(OpConst32)
  5982  		v.AuxInt = int32ToAuxInt(int32(c))
  5983  		return true
  5984  	}
  5985  	return false
  5986  }
  5987  func rewriteValuegeneric_OpCvt32Fto64(v *Value) bool {
  5988  	v_0 := v.Args[0]
  5989  	// match: (Cvt32Fto64 (Const32F [c]))
  5990  	// result: (Const64 [int64(c)])
  5991  	for {
  5992  		if v_0.Op != OpConst32F {
  5993  			break
  5994  		}
  5995  		c := auxIntToFloat32(v_0.AuxInt)
  5996  		v.reset(OpConst64)
  5997  		v.AuxInt = int64ToAuxInt(int64(c))
  5998  		return true
  5999  	}
  6000  	return false
  6001  }
  6002  func rewriteValuegeneric_OpCvt32Fto64F(v *Value) bool {
  6003  	v_0 := v.Args[0]
  6004  	// match: (Cvt32Fto64F (Const32F [c]))
  6005  	// result: (Const64F [float64(c)])
  6006  	for {
  6007  		if v_0.Op != OpConst32F {
  6008  			break
  6009  		}
  6010  		c := auxIntToFloat32(v_0.AuxInt)
  6011  		v.reset(OpConst64F)
  6012  		v.AuxInt = float64ToAuxInt(float64(c))
  6013  		return true
  6014  	}
  6015  	return false
  6016  }
  6017  func rewriteValuegeneric_OpCvt32to32F(v *Value) bool {
  6018  	v_0 := v.Args[0]
  6019  	// match: (Cvt32to32F (Const32 [c]))
  6020  	// result: (Const32F [float32(c)])
  6021  	for {
  6022  		if v_0.Op != OpConst32 {
  6023  			break
  6024  		}
  6025  		c := auxIntToInt32(v_0.AuxInt)
  6026  		v.reset(OpConst32F)
  6027  		v.AuxInt = float32ToAuxInt(float32(c))
  6028  		return true
  6029  	}
  6030  	return false
  6031  }
  6032  func rewriteValuegeneric_OpCvt32to64F(v *Value) bool {
  6033  	v_0 := v.Args[0]
  6034  	// match: (Cvt32to64F (Const32 [c]))
  6035  	// result: (Const64F [float64(c)])
  6036  	for {
  6037  		if v_0.Op != OpConst32 {
  6038  			break
  6039  		}
  6040  		c := auxIntToInt32(v_0.AuxInt)
  6041  		v.reset(OpConst64F)
  6042  		v.AuxInt = float64ToAuxInt(float64(c))
  6043  		return true
  6044  	}
  6045  	return false
  6046  }
  6047  func rewriteValuegeneric_OpCvt64Fto32(v *Value) bool {
  6048  	v_0 := v.Args[0]
  6049  	// match: (Cvt64Fto32 (Const64F [c]))
  6050  	// result: (Const32 [int32(c)])
  6051  	for {
  6052  		if v_0.Op != OpConst64F {
  6053  			break
  6054  		}
  6055  		c := auxIntToFloat64(v_0.AuxInt)
  6056  		v.reset(OpConst32)
  6057  		v.AuxInt = int32ToAuxInt(int32(c))
  6058  		return true
  6059  	}
  6060  	return false
  6061  }
  6062  func rewriteValuegeneric_OpCvt64Fto32F(v *Value) bool {
  6063  	v_0 := v.Args[0]
  6064  	// match: (Cvt64Fto32F (Const64F [c]))
  6065  	// result: (Const32F [float32(c)])
  6066  	for {
  6067  		if v_0.Op != OpConst64F {
  6068  			break
  6069  		}
  6070  		c := auxIntToFloat64(v_0.AuxInt)
  6071  		v.reset(OpConst32F)
  6072  		v.AuxInt = float32ToAuxInt(float32(c))
  6073  		return true
  6074  	}
  6075  	// match: (Cvt64Fto32F sqrt0:(Sqrt (Cvt32Fto64F x)))
  6076  	// cond: sqrt0.Uses==1
  6077  	// result: (Sqrt32 x)
  6078  	for {
  6079  		sqrt0 := v_0
  6080  		if sqrt0.Op != OpSqrt {
  6081  			break
  6082  		}
  6083  		sqrt0_0 := sqrt0.Args[0]
  6084  		if sqrt0_0.Op != OpCvt32Fto64F {
  6085  			break
  6086  		}
  6087  		x := sqrt0_0.Args[0]
  6088  		if !(sqrt0.Uses == 1) {
  6089  			break
  6090  		}
  6091  		v.reset(OpSqrt32)
  6092  		v.AddArg(x)
  6093  		return true
  6094  	}
  6095  	return false
  6096  }
  6097  func rewriteValuegeneric_OpCvt64Fto64(v *Value) bool {
  6098  	v_0 := v.Args[0]
  6099  	// match: (Cvt64Fto64 (Const64F [c]))
  6100  	// result: (Const64 [int64(c)])
  6101  	for {
  6102  		if v_0.Op != OpConst64F {
  6103  			break
  6104  		}
  6105  		c := auxIntToFloat64(v_0.AuxInt)
  6106  		v.reset(OpConst64)
  6107  		v.AuxInt = int64ToAuxInt(int64(c))
  6108  		return true
  6109  	}
  6110  	return false
  6111  }
  6112  func rewriteValuegeneric_OpCvt64to32F(v *Value) bool {
  6113  	v_0 := v.Args[0]
  6114  	// match: (Cvt64to32F (Const64 [c]))
  6115  	// result: (Const32F [float32(c)])
  6116  	for {
  6117  		if v_0.Op != OpConst64 {
  6118  			break
  6119  		}
  6120  		c := auxIntToInt64(v_0.AuxInt)
  6121  		v.reset(OpConst32F)
  6122  		v.AuxInt = float32ToAuxInt(float32(c))
  6123  		return true
  6124  	}
  6125  	return false
  6126  }
  6127  func rewriteValuegeneric_OpCvt64to64F(v *Value) bool {
  6128  	v_0 := v.Args[0]
  6129  	// match: (Cvt64to64F (Const64 [c]))
  6130  	// result: (Const64F [float64(c)])
  6131  	for {
  6132  		if v_0.Op != OpConst64 {
  6133  			break
  6134  		}
  6135  		c := auxIntToInt64(v_0.AuxInt)
  6136  		v.reset(OpConst64F)
  6137  		v.AuxInt = float64ToAuxInt(float64(c))
  6138  		return true
  6139  	}
  6140  	return false
  6141  }
  6142  func rewriteValuegeneric_OpCvtBoolToUint8(v *Value) bool {
  6143  	v_0 := v.Args[0]
  6144  	// match: (CvtBoolToUint8 (ConstBool [false]))
  6145  	// result: (Const8 [0])
  6146  	for {
  6147  		if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != false {
  6148  			break
  6149  		}
  6150  		v.reset(OpConst8)
  6151  		v.AuxInt = int8ToAuxInt(0)
  6152  		return true
  6153  	}
  6154  	// match: (CvtBoolToUint8 (ConstBool [true]))
  6155  	// result: (Const8 [1])
  6156  	for {
  6157  		if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != true {
  6158  			break
  6159  		}
  6160  		v.reset(OpConst8)
  6161  		v.AuxInt = int8ToAuxInt(1)
  6162  		return true
  6163  	}
  6164  	return false
  6165  }
  6166  func rewriteValuegeneric_OpDiv16(v *Value) bool {
  6167  	v_1 := v.Args[1]
  6168  	v_0 := v.Args[0]
  6169  	b := v.Block
  6170  	typ := &b.Func.Config.Types
  6171  	// match: (Div16 (Const16 [c]) (Const16 [d]))
  6172  	// cond: d != 0
  6173  	// result: (Const16 [c/d])
  6174  	for {
  6175  		if v_0.Op != OpConst16 {
  6176  			break
  6177  		}
  6178  		c := auxIntToInt16(v_0.AuxInt)
  6179  		if v_1.Op != OpConst16 {
  6180  			break
  6181  		}
  6182  		d := auxIntToInt16(v_1.AuxInt)
  6183  		if !(d != 0) {
  6184  			break
  6185  		}
  6186  		v.reset(OpConst16)
  6187  		v.AuxInt = int16ToAuxInt(c / d)
  6188  		return true
  6189  	}
  6190  	// match: (Div16 n (Const16 [c]))
  6191  	// cond: isNonNegative(n) && isPowerOfTwo(c)
  6192  	// result: (Rsh16Ux64 n (Const64 <typ.UInt64> [log16(c)]))
  6193  	for {
  6194  		n := v_0
  6195  		if v_1.Op != OpConst16 {
  6196  			break
  6197  		}
  6198  		c := auxIntToInt16(v_1.AuxInt)
  6199  		if !(isNonNegative(n) && isPowerOfTwo(c)) {
  6200  			break
  6201  		}
  6202  		v.reset(OpRsh16Ux64)
  6203  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6204  		v0.AuxInt = int64ToAuxInt(log16(c))
  6205  		v.AddArg2(n, v0)
  6206  		return true
  6207  	}
  6208  	// match: (Div16 <t> n (Const16 [c]))
  6209  	// cond: c < 0 && c != -1<<15
  6210  	// result: (Neg16 (Div16 <t> n (Const16 <t> [-c])))
  6211  	for {
  6212  		t := v.Type
  6213  		n := v_0
  6214  		if v_1.Op != OpConst16 {
  6215  			break
  6216  		}
  6217  		c := auxIntToInt16(v_1.AuxInt)
  6218  		if !(c < 0 && c != -1<<15) {
  6219  			break
  6220  		}
  6221  		v.reset(OpNeg16)
  6222  		v0 := b.NewValue0(v.Pos, OpDiv16, t)
  6223  		v1 := b.NewValue0(v.Pos, OpConst16, t)
  6224  		v1.AuxInt = int16ToAuxInt(-c)
  6225  		v0.AddArg2(n, v1)
  6226  		v.AddArg(v0)
  6227  		return true
  6228  	}
  6229  	// match: (Div16 <t> x (Const16 [-1<<15]))
  6230  	// result: (Rsh16Ux64 (And16 <t> x (Neg16 <t> x)) (Const64 <typ.UInt64> [15]))
  6231  	for {
  6232  		t := v.Type
  6233  		x := v_0
  6234  		if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != -1<<15 {
  6235  			break
  6236  		}
  6237  		v.reset(OpRsh16Ux64)
  6238  		v0 := b.NewValue0(v.Pos, OpAnd16, t)
  6239  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  6240  		v1.AddArg(x)
  6241  		v0.AddArg2(x, v1)
  6242  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6243  		v2.AuxInt = int64ToAuxInt(15)
  6244  		v.AddArg2(v0, v2)
  6245  		return true
  6246  	}
  6247  	// match: (Div16 <t> n (Const16 [c]))
  6248  	// cond: isPowerOfTwo(c)
  6249  	// result: (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [int64(16-log16(c))]))) (Const64 <typ.UInt64> [int64(log16(c))]))
  6250  	for {
  6251  		t := v.Type
  6252  		n := v_0
  6253  		if v_1.Op != OpConst16 {
  6254  			break
  6255  		}
  6256  		c := auxIntToInt16(v_1.AuxInt)
  6257  		if !(isPowerOfTwo(c)) {
  6258  			break
  6259  		}
  6260  		v.reset(OpRsh16x64)
  6261  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
  6262  		v1 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
  6263  		v2 := b.NewValue0(v.Pos, OpRsh16x64, t)
  6264  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6265  		v3.AuxInt = int64ToAuxInt(15)
  6266  		v2.AddArg2(n, v3)
  6267  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6268  		v4.AuxInt = int64ToAuxInt(int64(16 - log16(c)))
  6269  		v1.AddArg2(v2, v4)
  6270  		v0.AddArg2(n, v1)
  6271  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6272  		v5.AuxInt = int64ToAuxInt(int64(log16(c)))
  6273  		v.AddArg2(v0, v5)
  6274  		return true
  6275  	}
  6276  	// match: (Div16 <t> x (Const16 [c]))
  6277  	// cond: smagicOK16(c)
  6278  	// result: (Sub16 <t> (Rsh32x64 <t> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(smagic16(c).m)]) (SignExt16to32 x)) (Const64 <typ.UInt64> [16+smagic16(c).s])) (Rsh32x64 <t> (SignExt16to32 x) (Const64 <typ.UInt64> [31])))
  6279  	for {
  6280  		t := v.Type
  6281  		x := v_0
  6282  		if v_1.Op != OpConst16 {
  6283  			break
  6284  		}
  6285  		c := auxIntToInt16(v_1.AuxInt)
  6286  		if !(smagicOK16(c)) {
  6287  			break
  6288  		}
  6289  		v.reset(OpSub16)
  6290  		v.Type = t
  6291  		v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
  6292  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  6293  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6294  		v2.AuxInt = int32ToAuxInt(int32(smagic16(c).m))
  6295  		v3 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  6296  		v3.AddArg(x)
  6297  		v1.AddArg2(v2, v3)
  6298  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6299  		v4.AuxInt = int64ToAuxInt(16 + smagic16(c).s)
  6300  		v0.AddArg2(v1, v4)
  6301  		v5 := b.NewValue0(v.Pos, OpRsh32x64, t)
  6302  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6303  		v6.AuxInt = int64ToAuxInt(31)
  6304  		v5.AddArg2(v3, v6)
  6305  		v.AddArg2(v0, v5)
  6306  		return true
  6307  	}
  6308  	return false
  6309  }
  6310  func rewriteValuegeneric_OpDiv16u(v *Value) bool {
  6311  	v_1 := v.Args[1]
  6312  	v_0 := v.Args[0]
  6313  	b := v.Block
  6314  	config := b.Func.Config
  6315  	typ := &b.Func.Config.Types
  6316  	// match: (Div16u (Const16 [c]) (Const16 [d]))
  6317  	// cond: d != 0
  6318  	// result: (Const16 [int16(uint16(c)/uint16(d))])
  6319  	for {
  6320  		if v_0.Op != OpConst16 {
  6321  			break
  6322  		}
  6323  		c := auxIntToInt16(v_0.AuxInt)
  6324  		if v_1.Op != OpConst16 {
  6325  			break
  6326  		}
  6327  		d := auxIntToInt16(v_1.AuxInt)
  6328  		if !(d != 0) {
  6329  			break
  6330  		}
  6331  		v.reset(OpConst16)
  6332  		v.AuxInt = int16ToAuxInt(int16(uint16(c) / uint16(d)))
  6333  		return true
  6334  	}
  6335  	// match: (Div16u n (Const16 [c]))
  6336  	// cond: isPowerOfTwo(c)
  6337  	// result: (Rsh16Ux64 n (Const64 <typ.UInt64> [log16(c)]))
  6338  	for {
  6339  		n := v_0
  6340  		if v_1.Op != OpConst16 {
  6341  			break
  6342  		}
  6343  		c := auxIntToInt16(v_1.AuxInt)
  6344  		if !(isPowerOfTwo(c)) {
  6345  			break
  6346  		}
  6347  		v.reset(OpRsh16Ux64)
  6348  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6349  		v0.AuxInt = int64ToAuxInt(log16(c))
  6350  		v.AddArg2(n, v0)
  6351  		return true
  6352  	}
  6353  	// match: (Div16u x (Const16 [c]))
  6354  	// cond: umagicOK16(c) && config.RegSize == 8
  6355  	// result: (Trunc64to16 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<16+umagic16(c).m)]) (ZeroExt16to64 x)) (Const64 <typ.UInt64> [16+umagic16(c).s])))
  6356  	for {
  6357  		x := v_0
  6358  		if v_1.Op != OpConst16 {
  6359  			break
  6360  		}
  6361  		c := auxIntToInt16(v_1.AuxInt)
  6362  		if !(umagicOK16(c) && config.RegSize == 8) {
  6363  			break
  6364  		}
  6365  		v.reset(OpTrunc64to16)
  6366  		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  6367  		v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  6368  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6369  		v2.AuxInt = int64ToAuxInt(int64(1<<16 + umagic16(c).m))
  6370  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6371  		v3.AddArg(x)
  6372  		v1.AddArg2(v2, v3)
  6373  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6374  		v4.AuxInt = int64ToAuxInt(16 + umagic16(c).s)
  6375  		v0.AddArg2(v1, v4)
  6376  		v.AddArg(v0)
  6377  		return true
  6378  	}
  6379  	// match: (Div16u x (Const16 [c]))
  6380  	// cond: umagicOK16(c) && config.RegSize == 4 && umagic16(c).m&1 == 0
  6381  	// result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<15+umagic16(c).m/2)]) (ZeroExt16to32 x)) (Const64 <typ.UInt64> [16+umagic16(c).s-1])))
  6382  	for {
  6383  		x := v_0
  6384  		if v_1.Op != OpConst16 {
  6385  			break
  6386  		}
  6387  		c := auxIntToInt16(v_1.AuxInt)
  6388  		if !(umagicOK16(c) && config.RegSize == 4 && umagic16(c).m&1 == 0) {
  6389  			break
  6390  		}
  6391  		v.reset(OpTrunc32to16)
  6392  		v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  6393  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  6394  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6395  		v2.AuxInt = int32ToAuxInt(int32(1<<15 + umagic16(c).m/2))
  6396  		v3 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6397  		v3.AddArg(x)
  6398  		v1.AddArg2(v2, v3)
  6399  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6400  		v4.AuxInt = int64ToAuxInt(16 + umagic16(c).s - 1)
  6401  		v0.AddArg2(v1, v4)
  6402  		v.AddArg(v0)
  6403  		return true
  6404  	}
  6405  	// match: (Div16u x (Const16 [c]))
  6406  	// cond: umagicOK16(c) && config.RegSize == 4 && c&1 == 0
  6407  	// result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<15+(umagic16(c).m+1)/2)]) (Rsh32Ux64 <typ.UInt32> (ZeroExt16to32 x) (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [16+umagic16(c).s-2])))
  6408  	for {
  6409  		x := v_0
  6410  		if v_1.Op != OpConst16 {
  6411  			break
  6412  		}
  6413  		c := auxIntToInt16(v_1.AuxInt)
  6414  		if !(umagicOK16(c) && config.RegSize == 4 && c&1 == 0) {
  6415  			break
  6416  		}
  6417  		v.reset(OpTrunc32to16)
  6418  		v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  6419  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  6420  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6421  		v2.AuxInt = int32ToAuxInt(int32(1<<15 + (umagic16(c).m+1)/2))
  6422  		v3 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  6423  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6424  		v4.AddArg(x)
  6425  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6426  		v5.AuxInt = int64ToAuxInt(1)
  6427  		v3.AddArg2(v4, v5)
  6428  		v1.AddArg2(v2, v3)
  6429  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6430  		v6.AuxInt = int64ToAuxInt(16 + umagic16(c).s - 2)
  6431  		v0.AddArg2(v1, v6)
  6432  		v.AddArg(v0)
  6433  		return true
  6434  	}
  6435  	// match: (Div16u x (Const16 [c]))
  6436  	// cond: umagicOK16(c) && config.RegSize == 4 && config.useAvg
  6437  	// result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Avg32u (Lsh32x64 <typ.UInt32> (ZeroExt16to32 x) (Const64 <typ.UInt64> [16])) (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(umagic16(c).m)]) (ZeroExt16to32 x))) (Const64 <typ.UInt64> [16+umagic16(c).s-1])))
  6438  	for {
  6439  		x := v_0
  6440  		if v_1.Op != OpConst16 {
  6441  			break
  6442  		}
  6443  		c := auxIntToInt16(v_1.AuxInt)
  6444  		if !(umagicOK16(c) && config.RegSize == 4 && config.useAvg) {
  6445  			break
  6446  		}
  6447  		v.reset(OpTrunc32to16)
  6448  		v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  6449  		v1 := b.NewValue0(v.Pos, OpAvg32u, typ.UInt32)
  6450  		v2 := b.NewValue0(v.Pos, OpLsh32x64, typ.UInt32)
  6451  		v3 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6452  		v3.AddArg(x)
  6453  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6454  		v4.AuxInt = int64ToAuxInt(16)
  6455  		v2.AddArg2(v3, v4)
  6456  		v5 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  6457  		v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6458  		v6.AuxInt = int32ToAuxInt(int32(umagic16(c).m))
  6459  		v5.AddArg2(v6, v3)
  6460  		v1.AddArg2(v2, v5)
  6461  		v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6462  		v7.AuxInt = int64ToAuxInt(16 + umagic16(c).s - 1)
  6463  		v0.AddArg2(v1, v7)
  6464  		v.AddArg(v0)
  6465  		return true
  6466  	}
  6467  	return false
  6468  }
  6469  func rewriteValuegeneric_OpDiv32(v *Value) bool {
  6470  	v_1 := v.Args[1]
  6471  	v_0 := v.Args[0]
  6472  	b := v.Block
  6473  	config := b.Func.Config
  6474  	typ := &b.Func.Config.Types
  6475  	// match: (Div32 (Const32 [c]) (Const32 [d]))
  6476  	// cond: d != 0
  6477  	// result: (Const32 [c/d])
  6478  	for {
  6479  		if v_0.Op != OpConst32 {
  6480  			break
  6481  		}
  6482  		c := auxIntToInt32(v_0.AuxInt)
  6483  		if v_1.Op != OpConst32 {
  6484  			break
  6485  		}
  6486  		d := auxIntToInt32(v_1.AuxInt)
  6487  		if !(d != 0) {
  6488  			break
  6489  		}
  6490  		v.reset(OpConst32)
  6491  		v.AuxInt = int32ToAuxInt(c / d)
  6492  		return true
  6493  	}
  6494  	// match: (Div32 n (Const32 [c]))
  6495  	// cond: isNonNegative(n) && isPowerOfTwo(c)
  6496  	// result: (Rsh32Ux64 n (Const64 <typ.UInt64> [log32(c)]))
  6497  	for {
  6498  		n := v_0
  6499  		if v_1.Op != OpConst32 {
  6500  			break
  6501  		}
  6502  		c := auxIntToInt32(v_1.AuxInt)
  6503  		if !(isNonNegative(n) && isPowerOfTwo(c)) {
  6504  			break
  6505  		}
  6506  		v.reset(OpRsh32Ux64)
  6507  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6508  		v0.AuxInt = int64ToAuxInt(log32(c))
  6509  		v.AddArg2(n, v0)
  6510  		return true
  6511  	}
  6512  	// match: (Div32 <t> n (Const32 [c]))
  6513  	// cond: c < 0 && c != -1<<31
  6514  	// result: (Neg32 (Div32 <t> n (Const32 <t> [-c])))
  6515  	for {
  6516  		t := v.Type
  6517  		n := v_0
  6518  		if v_1.Op != OpConst32 {
  6519  			break
  6520  		}
  6521  		c := auxIntToInt32(v_1.AuxInt)
  6522  		if !(c < 0 && c != -1<<31) {
  6523  			break
  6524  		}
  6525  		v.reset(OpNeg32)
  6526  		v0 := b.NewValue0(v.Pos, OpDiv32, t)
  6527  		v1 := b.NewValue0(v.Pos, OpConst32, t)
  6528  		v1.AuxInt = int32ToAuxInt(-c)
  6529  		v0.AddArg2(n, v1)
  6530  		v.AddArg(v0)
  6531  		return true
  6532  	}
  6533  	// match: (Div32 <t> x (Const32 [-1<<31]))
  6534  	// result: (Rsh32Ux64 (And32 <t> x (Neg32 <t> x)) (Const64 <typ.UInt64> [31]))
  6535  	for {
  6536  		t := v.Type
  6537  		x := v_0
  6538  		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != -1<<31 {
  6539  			break
  6540  		}
  6541  		v.reset(OpRsh32Ux64)
  6542  		v0 := b.NewValue0(v.Pos, OpAnd32, t)
  6543  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  6544  		v1.AddArg(x)
  6545  		v0.AddArg2(x, v1)
  6546  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6547  		v2.AuxInt = int64ToAuxInt(31)
  6548  		v.AddArg2(v0, v2)
  6549  		return true
  6550  	}
  6551  	// match: (Div32 <t> n (Const32 [c]))
  6552  	// cond: isPowerOfTwo(c)
  6553  	// result: (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [int64(32-log32(c))]))) (Const64 <typ.UInt64> [int64(log32(c))]))
  6554  	for {
  6555  		t := v.Type
  6556  		n := v_0
  6557  		if v_1.Op != OpConst32 {
  6558  			break
  6559  		}
  6560  		c := auxIntToInt32(v_1.AuxInt)
  6561  		if !(isPowerOfTwo(c)) {
  6562  			break
  6563  		}
  6564  		v.reset(OpRsh32x64)
  6565  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  6566  		v1 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
  6567  		v2 := b.NewValue0(v.Pos, OpRsh32x64, t)
  6568  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6569  		v3.AuxInt = int64ToAuxInt(31)
  6570  		v2.AddArg2(n, v3)
  6571  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6572  		v4.AuxInt = int64ToAuxInt(int64(32 - log32(c)))
  6573  		v1.AddArg2(v2, v4)
  6574  		v0.AddArg2(n, v1)
  6575  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6576  		v5.AuxInt = int64ToAuxInt(int64(log32(c)))
  6577  		v.AddArg2(v0, v5)
  6578  		return true
  6579  	}
  6580  	// match: (Div32 <t> x (Const32 [c]))
  6581  	// cond: smagicOK32(c) && config.RegSize == 8
  6582  	// result: (Sub32 <t> (Rsh64x64 <t> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(smagic32(c).m)]) (SignExt32to64 x)) (Const64 <typ.UInt64> [32+smagic32(c).s])) (Rsh64x64 <t> (SignExt32to64 x) (Const64 <typ.UInt64> [63])))
  6583  	for {
  6584  		t := v.Type
  6585  		x := v_0
  6586  		if v_1.Op != OpConst32 {
  6587  			break
  6588  		}
  6589  		c := auxIntToInt32(v_1.AuxInt)
  6590  		if !(smagicOK32(c) && config.RegSize == 8) {
  6591  			break
  6592  		}
  6593  		v.reset(OpSub32)
  6594  		v.Type = t
  6595  		v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
  6596  		v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  6597  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6598  		v2.AuxInt = int64ToAuxInt(int64(smagic32(c).m))
  6599  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6600  		v3.AddArg(x)
  6601  		v1.AddArg2(v2, v3)
  6602  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6603  		v4.AuxInt = int64ToAuxInt(32 + smagic32(c).s)
  6604  		v0.AddArg2(v1, v4)
  6605  		v5 := b.NewValue0(v.Pos, OpRsh64x64, t)
  6606  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6607  		v6.AuxInt = int64ToAuxInt(63)
  6608  		v5.AddArg2(v3, v6)
  6609  		v.AddArg2(v0, v5)
  6610  		return true
  6611  	}
  6612  	// match: (Div32 <t> x (Const32 [c]))
  6613  	// cond: smagicOK32(c) && config.RegSize == 4 && smagic32(c).m&1 == 0 && config.useHmul
  6614  	// result: (Sub32 <t> (Rsh32x64 <t> (Hmul32 <t> (Const32 <typ.UInt32> [int32(smagic32(c).m/2)]) x) (Const64 <typ.UInt64> [smagic32(c).s-1])) (Rsh32x64 <t> x (Const64 <typ.UInt64> [31])))
  6615  	for {
  6616  		t := v.Type
  6617  		x := v_0
  6618  		if v_1.Op != OpConst32 {
  6619  			break
  6620  		}
  6621  		c := auxIntToInt32(v_1.AuxInt)
  6622  		if !(smagicOK32(c) && config.RegSize == 4 && smagic32(c).m&1 == 0 && config.useHmul) {
  6623  			break
  6624  		}
  6625  		v.reset(OpSub32)
  6626  		v.Type = t
  6627  		v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
  6628  		v1 := b.NewValue0(v.Pos, OpHmul32, t)
  6629  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6630  		v2.AuxInt = int32ToAuxInt(int32(smagic32(c).m / 2))
  6631  		v1.AddArg2(v2, x)
  6632  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6633  		v3.AuxInt = int64ToAuxInt(smagic32(c).s - 1)
  6634  		v0.AddArg2(v1, v3)
  6635  		v4 := b.NewValue0(v.Pos, OpRsh32x64, t)
  6636  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6637  		v5.AuxInt = int64ToAuxInt(31)
  6638  		v4.AddArg2(x, v5)
  6639  		v.AddArg2(v0, v4)
  6640  		return true
  6641  	}
  6642  	// match: (Div32 <t> x (Const32 [c]))
  6643  	// cond: smagicOK32(c) && config.RegSize == 4 && smagic32(c).m&1 != 0 && config.useHmul
  6644  	// result: (Sub32 <t> (Rsh32x64 <t> (Add32 <t> (Hmul32 <t> (Const32 <typ.UInt32> [int32(smagic32(c).m)]) x) x) (Const64 <typ.UInt64> [smagic32(c).s])) (Rsh32x64 <t> x (Const64 <typ.UInt64> [31])))
  6645  	for {
  6646  		t := v.Type
  6647  		x := v_0
  6648  		if v_1.Op != OpConst32 {
  6649  			break
  6650  		}
  6651  		c := auxIntToInt32(v_1.AuxInt)
  6652  		if !(smagicOK32(c) && config.RegSize == 4 && smagic32(c).m&1 != 0 && config.useHmul) {
  6653  			break
  6654  		}
  6655  		v.reset(OpSub32)
  6656  		v.Type = t
  6657  		v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
  6658  		v1 := b.NewValue0(v.Pos, OpAdd32, t)
  6659  		v2 := b.NewValue0(v.Pos, OpHmul32, t)
  6660  		v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6661  		v3.AuxInt = int32ToAuxInt(int32(smagic32(c).m))
  6662  		v2.AddArg2(v3, x)
  6663  		v1.AddArg2(v2, x)
  6664  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6665  		v4.AuxInt = int64ToAuxInt(smagic32(c).s)
  6666  		v0.AddArg2(v1, v4)
  6667  		v5 := b.NewValue0(v.Pos, OpRsh32x64, t)
  6668  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6669  		v6.AuxInt = int64ToAuxInt(31)
  6670  		v5.AddArg2(x, v6)
  6671  		v.AddArg2(v0, v5)
  6672  		return true
  6673  	}
  6674  	return false
  6675  }
  6676  func rewriteValuegeneric_OpDiv32F(v *Value) bool {
  6677  	v_1 := v.Args[1]
  6678  	v_0 := v.Args[0]
  6679  	b := v.Block
  6680  	// match: (Div32F (Const32F [c]) (Const32F [d]))
  6681  	// cond: c/d == c/d
  6682  	// result: (Const32F [c/d])
  6683  	for {
  6684  		if v_0.Op != OpConst32F {
  6685  			break
  6686  		}
  6687  		c := auxIntToFloat32(v_0.AuxInt)
  6688  		if v_1.Op != OpConst32F {
  6689  			break
  6690  		}
  6691  		d := auxIntToFloat32(v_1.AuxInt)
  6692  		if !(c/d == c/d) {
  6693  			break
  6694  		}
  6695  		v.reset(OpConst32F)
  6696  		v.AuxInt = float32ToAuxInt(c / d)
  6697  		return true
  6698  	}
  6699  	// match: (Div32F x (Const32F <t> [c]))
  6700  	// cond: reciprocalExact32(c)
  6701  	// result: (Mul32F x (Const32F <t> [1/c]))
  6702  	for {
  6703  		x := v_0
  6704  		if v_1.Op != OpConst32F {
  6705  			break
  6706  		}
  6707  		t := v_1.Type
  6708  		c := auxIntToFloat32(v_1.AuxInt)
  6709  		if !(reciprocalExact32(c)) {
  6710  			break
  6711  		}
  6712  		v.reset(OpMul32F)
  6713  		v0 := b.NewValue0(v.Pos, OpConst32F, t)
  6714  		v0.AuxInt = float32ToAuxInt(1 / c)
  6715  		v.AddArg2(x, v0)
  6716  		return true
  6717  	}
  6718  	return false
  6719  }
  6720  func rewriteValuegeneric_OpDiv32u(v *Value) bool {
  6721  	v_1 := v.Args[1]
  6722  	v_0 := v.Args[0]
  6723  	b := v.Block
  6724  	config := b.Func.Config
  6725  	typ := &b.Func.Config.Types
  6726  	// match: (Div32u (Const32 [c]) (Const32 [d]))
  6727  	// cond: d != 0
  6728  	// result: (Const32 [int32(uint32(c)/uint32(d))])
  6729  	for {
  6730  		if v_0.Op != OpConst32 {
  6731  			break
  6732  		}
  6733  		c := auxIntToInt32(v_0.AuxInt)
  6734  		if v_1.Op != OpConst32 {
  6735  			break
  6736  		}
  6737  		d := auxIntToInt32(v_1.AuxInt)
  6738  		if !(d != 0) {
  6739  			break
  6740  		}
  6741  		v.reset(OpConst32)
  6742  		v.AuxInt = int32ToAuxInt(int32(uint32(c) / uint32(d)))
  6743  		return true
  6744  	}
  6745  	// match: (Div32u n (Const32 [c]))
  6746  	// cond: isPowerOfTwo(c)
  6747  	// result: (Rsh32Ux64 n (Const64 <typ.UInt64> [log32(c)]))
  6748  	for {
  6749  		n := v_0
  6750  		if v_1.Op != OpConst32 {
  6751  			break
  6752  		}
  6753  		c := auxIntToInt32(v_1.AuxInt)
  6754  		if !(isPowerOfTwo(c)) {
  6755  			break
  6756  		}
  6757  		v.reset(OpRsh32Ux64)
  6758  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6759  		v0.AuxInt = int64ToAuxInt(log32(c))
  6760  		v.AddArg2(n, v0)
  6761  		return true
  6762  	}
  6763  	// match: (Div32u x (Const32 [c]))
  6764  	// cond: umagicOK32(c) && config.RegSize == 4 && umagic32(c).m&1 == 0 && config.useHmul
  6765  	// result: (Rsh32Ux64 <typ.UInt32> (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<31+umagic32(c).m/2)]) x) (Const64 <typ.UInt64> [umagic32(c).s-1]))
  6766  	for {
  6767  		x := v_0
  6768  		if v_1.Op != OpConst32 {
  6769  			break
  6770  		}
  6771  		c := auxIntToInt32(v_1.AuxInt)
  6772  		if !(umagicOK32(c) && config.RegSize == 4 && umagic32(c).m&1 == 0 && config.useHmul) {
  6773  			break
  6774  		}
  6775  		v.reset(OpRsh32Ux64)
  6776  		v.Type = typ.UInt32
  6777  		v0 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32)
  6778  		v1 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6779  		v1.AuxInt = int32ToAuxInt(int32(1<<31 + umagic32(c).m/2))
  6780  		v0.AddArg2(v1, x)
  6781  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6782  		v2.AuxInt = int64ToAuxInt(umagic32(c).s - 1)
  6783  		v.AddArg2(v0, v2)
  6784  		return true
  6785  	}
  6786  	// match: (Div32u x (Const32 [c]))
  6787  	// cond: umagicOK32(c) && config.RegSize == 4 && c&1 == 0 && config.useHmul
  6788  	// result: (Rsh32Ux64 <typ.UInt32> (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<31+(umagic32(c).m+1)/2)]) (Rsh32Ux64 <typ.UInt32> x (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [umagic32(c).s-2]))
  6789  	for {
  6790  		x := v_0
  6791  		if v_1.Op != OpConst32 {
  6792  			break
  6793  		}
  6794  		c := auxIntToInt32(v_1.AuxInt)
  6795  		if !(umagicOK32(c) && config.RegSize == 4 && c&1 == 0 && config.useHmul) {
  6796  			break
  6797  		}
  6798  		v.reset(OpRsh32Ux64)
  6799  		v.Type = typ.UInt32
  6800  		v0 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32)
  6801  		v1 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6802  		v1.AuxInt = int32ToAuxInt(int32(1<<31 + (umagic32(c).m+1)/2))
  6803  		v2 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  6804  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6805  		v3.AuxInt = int64ToAuxInt(1)
  6806  		v2.AddArg2(x, v3)
  6807  		v0.AddArg2(v1, v2)
  6808  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6809  		v4.AuxInt = int64ToAuxInt(umagic32(c).s - 2)
  6810  		v.AddArg2(v0, v4)
  6811  		return true
  6812  	}
  6813  	// match: (Div32u x (Const32 [c]))
  6814  	// cond: umagicOK32(c) && config.RegSize == 4 && config.useAvg && config.useHmul
  6815  	// result: (Rsh32Ux64 <typ.UInt32> (Avg32u x (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int32(umagic32(c).m)]) x)) (Const64 <typ.UInt64> [umagic32(c).s-1]))
  6816  	for {
  6817  		x := v_0
  6818  		if v_1.Op != OpConst32 {
  6819  			break
  6820  		}
  6821  		c := auxIntToInt32(v_1.AuxInt)
  6822  		if !(umagicOK32(c) && config.RegSize == 4 && config.useAvg && config.useHmul) {
  6823  			break
  6824  		}
  6825  		v.reset(OpRsh32Ux64)
  6826  		v.Type = typ.UInt32
  6827  		v0 := b.NewValue0(v.Pos, OpAvg32u, typ.UInt32)
  6828  		v1 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32)
  6829  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  6830  		v2.AuxInt = int32ToAuxInt(int32(umagic32(c).m))
  6831  		v1.AddArg2(v2, x)
  6832  		v0.AddArg2(x, v1)
  6833  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6834  		v3.AuxInt = int64ToAuxInt(umagic32(c).s - 1)
  6835  		v.AddArg2(v0, v3)
  6836  		return true
  6837  	}
  6838  	// match: (Div32u x (Const32 [c]))
  6839  	// cond: umagicOK32(c) && config.RegSize == 8 && umagic32(c).m&1 == 0
  6840  	// result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<31+umagic32(c).m/2)]) (ZeroExt32to64 x)) (Const64 <typ.UInt64> [32+umagic32(c).s-1])))
  6841  	for {
  6842  		x := v_0
  6843  		if v_1.Op != OpConst32 {
  6844  			break
  6845  		}
  6846  		c := auxIntToInt32(v_1.AuxInt)
  6847  		if !(umagicOK32(c) && config.RegSize == 8 && umagic32(c).m&1 == 0) {
  6848  			break
  6849  		}
  6850  		v.reset(OpTrunc64to32)
  6851  		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  6852  		v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  6853  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6854  		v2.AuxInt = int64ToAuxInt(int64(1<<31 + umagic32(c).m/2))
  6855  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6856  		v3.AddArg(x)
  6857  		v1.AddArg2(v2, v3)
  6858  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6859  		v4.AuxInt = int64ToAuxInt(32 + umagic32(c).s - 1)
  6860  		v0.AddArg2(v1, v4)
  6861  		v.AddArg(v0)
  6862  		return true
  6863  	}
  6864  	// match: (Div32u x (Const32 [c]))
  6865  	// cond: umagicOK32(c) && config.RegSize == 8 && c&1 == 0
  6866  	// result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<31+(umagic32(c).m+1)/2)]) (Rsh64Ux64 <typ.UInt64> (ZeroExt32to64 x) (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [32+umagic32(c).s-2])))
  6867  	for {
  6868  		x := v_0
  6869  		if v_1.Op != OpConst32 {
  6870  			break
  6871  		}
  6872  		c := auxIntToInt32(v_1.AuxInt)
  6873  		if !(umagicOK32(c) && config.RegSize == 8 && c&1 == 0) {
  6874  			break
  6875  		}
  6876  		v.reset(OpTrunc64to32)
  6877  		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  6878  		v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  6879  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6880  		v2.AuxInt = int64ToAuxInt(int64(1<<31 + (umagic32(c).m+1)/2))
  6881  		v3 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  6882  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6883  		v4.AddArg(x)
  6884  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6885  		v5.AuxInt = int64ToAuxInt(1)
  6886  		v3.AddArg2(v4, v5)
  6887  		v1.AddArg2(v2, v3)
  6888  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6889  		v6.AuxInt = int64ToAuxInt(32 + umagic32(c).s - 2)
  6890  		v0.AddArg2(v1, v6)
  6891  		v.AddArg(v0)
  6892  		return true
  6893  	}
  6894  	// match: (Div32u x (Const32 [c]))
  6895  	// cond: umagicOK32(c) && config.RegSize == 8 && config.useAvg
  6896  	// result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Avg64u (Lsh64x64 <typ.UInt64> (ZeroExt32to64 x) (Const64 <typ.UInt64> [32])) (Mul64 <typ.UInt64> (Const64 <typ.UInt32> [int64(umagic32(c).m)]) (ZeroExt32to64 x))) (Const64 <typ.UInt64> [32+umagic32(c).s-1])))
  6897  	for {
  6898  		x := v_0
  6899  		if v_1.Op != OpConst32 {
  6900  			break
  6901  		}
  6902  		c := auxIntToInt32(v_1.AuxInt)
  6903  		if !(umagicOK32(c) && config.RegSize == 8 && config.useAvg) {
  6904  			break
  6905  		}
  6906  		v.reset(OpTrunc64to32)
  6907  		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  6908  		v1 := b.NewValue0(v.Pos, OpAvg64u, typ.UInt64)
  6909  		v2 := b.NewValue0(v.Pos, OpLsh64x64, typ.UInt64)
  6910  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6911  		v3.AddArg(x)
  6912  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6913  		v4.AuxInt = int64ToAuxInt(32)
  6914  		v2.AddArg2(v3, v4)
  6915  		v5 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  6916  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt32)
  6917  		v6.AuxInt = int64ToAuxInt(int64(umagic32(c).m))
  6918  		v5.AddArg2(v6, v3)
  6919  		v1.AddArg2(v2, v5)
  6920  		v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6921  		v7.AuxInt = int64ToAuxInt(32 + umagic32(c).s - 1)
  6922  		v0.AddArg2(v1, v7)
  6923  		v.AddArg(v0)
  6924  		return true
  6925  	}
  6926  	return false
  6927  }
  6928  func rewriteValuegeneric_OpDiv64(v *Value) bool {
  6929  	v_1 := v.Args[1]
  6930  	v_0 := v.Args[0]
  6931  	b := v.Block
  6932  	config := b.Func.Config
  6933  	typ := &b.Func.Config.Types
  6934  	// match: (Div64 (Const64 [c]) (Const64 [d]))
  6935  	// cond: d != 0
  6936  	// result: (Const64 [c/d])
  6937  	for {
  6938  		if v_0.Op != OpConst64 {
  6939  			break
  6940  		}
  6941  		c := auxIntToInt64(v_0.AuxInt)
  6942  		if v_1.Op != OpConst64 {
  6943  			break
  6944  		}
  6945  		d := auxIntToInt64(v_1.AuxInt)
  6946  		if !(d != 0) {
  6947  			break
  6948  		}
  6949  		v.reset(OpConst64)
  6950  		v.AuxInt = int64ToAuxInt(c / d)
  6951  		return true
  6952  	}
  6953  	// match: (Div64 n (Const64 [c]))
  6954  	// cond: isNonNegative(n) && isPowerOfTwo(c)
  6955  	// result: (Rsh64Ux64 n (Const64 <typ.UInt64> [log64(c)]))
  6956  	for {
  6957  		n := v_0
  6958  		if v_1.Op != OpConst64 {
  6959  			break
  6960  		}
  6961  		c := auxIntToInt64(v_1.AuxInt)
  6962  		if !(isNonNegative(n) && isPowerOfTwo(c)) {
  6963  			break
  6964  		}
  6965  		v.reset(OpRsh64Ux64)
  6966  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  6967  		v0.AuxInt = int64ToAuxInt(log64(c))
  6968  		v.AddArg2(n, v0)
  6969  		return true
  6970  	}
  6971  	// match: (Div64 n (Const64 [-1<<63]))
  6972  	// cond: isNonNegative(n)
  6973  	// result: (Const64 [0])
  6974  	for {
  6975  		n := v_0
  6976  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 || !(isNonNegative(n)) {
  6977  			break
  6978  		}
  6979  		v.reset(OpConst64)
  6980  		v.AuxInt = int64ToAuxInt(0)
  6981  		return true
  6982  	}
  6983  	// match: (Div64 <t> n (Const64 [c]))
  6984  	// cond: c < 0 && c != -1<<63
  6985  	// result: (Neg64 (Div64 <t> n (Const64 <t> [-c])))
  6986  	for {
  6987  		t := v.Type
  6988  		n := v_0
  6989  		if v_1.Op != OpConst64 {
  6990  			break
  6991  		}
  6992  		c := auxIntToInt64(v_1.AuxInt)
  6993  		if !(c < 0 && c != -1<<63) {
  6994  			break
  6995  		}
  6996  		v.reset(OpNeg64)
  6997  		v0 := b.NewValue0(v.Pos, OpDiv64, t)
  6998  		v1 := b.NewValue0(v.Pos, OpConst64, t)
  6999  		v1.AuxInt = int64ToAuxInt(-c)
  7000  		v0.AddArg2(n, v1)
  7001  		v.AddArg(v0)
  7002  		return true
  7003  	}
  7004  	// match: (Div64 <t> x (Const64 [-1<<63]))
  7005  	// result: (Rsh64Ux64 (And64 <t> x (Neg64 <t> x)) (Const64 <typ.UInt64> [63]))
  7006  	for {
  7007  		t := v.Type
  7008  		x := v_0
  7009  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 {
  7010  			break
  7011  		}
  7012  		v.reset(OpRsh64Ux64)
  7013  		v0 := b.NewValue0(v.Pos, OpAnd64, t)
  7014  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  7015  		v1.AddArg(x)
  7016  		v0.AddArg2(x, v1)
  7017  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7018  		v2.AuxInt = int64ToAuxInt(63)
  7019  		v.AddArg2(v0, v2)
  7020  		return true
  7021  	}
  7022  	// match: (Div64 <t> n (Const64 [c]))
  7023  	// cond: isPowerOfTwo(c)
  7024  	// result: (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [int64(64-log64(c))]))) (Const64 <typ.UInt64> [int64(log64(c))]))
  7025  	for {
  7026  		t := v.Type
  7027  		n := v_0
  7028  		if v_1.Op != OpConst64 {
  7029  			break
  7030  		}
  7031  		c := auxIntToInt64(v_1.AuxInt)
  7032  		if !(isPowerOfTwo(c)) {
  7033  			break
  7034  		}
  7035  		v.reset(OpRsh64x64)
  7036  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  7037  		v1 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
  7038  		v2 := b.NewValue0(v.Pos, OpRsh64x64, t)
  7039  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7040  		v3.AuxInt = int64ToAuxInt(63)
  7041  		v2.AddArg2(n, v3)
  7042  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7043  		v4.AuxInt = int64ToAuxInt(int64(64 - log64(c)))
  7044  		v1.AddArg2(v2, v4)
  7045  		v0.AddArg2(n, v1)
  7046  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7047  		v5.AuxInt = int64ToAuxInt(int64(log64(c)))
  7048  		v.AddArg2(v0, v5)
  7049  		return true
  7050  	}
  7051  	// match: (Div64 <t> x (Const64 [c]))
  7052  	// cond: smagicOK64(c) && smagic64(c).m&1 == 0 && config.useHmul
  7053  	// result: (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <typ.UInt64> [int64(smagic64(c).m/2)]) x) (Const64 <typ.UInt64> [smagic64(c).s-1])) (Rsh64x64 <t> x (Const64 <typ.UInt64> [63])))
  7054  	for {
  7055  		t := v.Type
  7056  		x := v_0
  7057  		if v_1.Op != OpConst64 {
  7058  			break
  7059  		}
  7060  		c := auxIntToInt64(v_1.AuxInt)
  7061  		if !(smagicOK64(c) && smagic64(c).m&1 == 0 && config.useHmul) {
  7062  			break
  7063  		}
  7064  		v.reset(OpSub64)
  7065  		v.Type = t
  7066  		v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
  7067  		v1 := b.NewValue0(v.Pos, OpHmul64, t)
  7068  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7069  		v2.AuxInt = int64ToAuxInt(int64(smagic64(c).m / 2))
  7070  		v1.AddArg2(v2, x)
  7071  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7072  		v3.AuxInt = int64ToAuxInt(smagic64(c).s - 1)
  7073  		v0.AddArg2(v1, v3)
  7074  		v4 := b.NewValue0(v.Pos, OpRsh64x64, t)
  7075  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7076  		v5.AuxInt = int64ToAuxInt(63)
  7077  		v4.AddArg2(x, v5)
  7078  		v.AddArg2(v0, v4)
  7079  		return true
  7080  	}
  7081  	// match: (Div64 <t> x (Const64 [c]))
  7082  	// cond: smagicOK64(c) && smagic64(c).m&1 != 0 && config.useHmul
  7083  	// result: (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <typ.UInt64> [int64(smagic64(c).m)]) x) x) (Const64 <typ.UInt64> [smagic64(c).s])) (Rsh64x64 <t> x (Const64 <typ.UInt64> [63])))
  7084  	for {
  7085  		t := v.Type
  7086  		x := v_0
  7087  		if v_1.Op != OpConst64 {
  7088  			break
  7089  		}
  7090  		c := auxIntToInt64(v_1.AuxInt)
  7091  		if !(smagicOK64(c) && smagic64(c).m&1 != 0 && config.useHmul) {
  7092  			break
  7093  		}
  7094  		v.reset(OpSub64)
  7095  		v.Type = t
  7096  		v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
  7097  		v1 := b.NewValue0(v.Pos, OpAdd64, t)
  7098  		v2 := b.NewValue0(v.Pos, OpHmul64, t)
  7099  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7100  		v3.AuxInt = int64ToAuxInt(int64(smagic64(c).m))
  7101  		v2.AddArg2(v3, x)
  7102  		v1.AddArg2(v2, x)
  7103  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7104  		v4.AuxInt = int64ToAuxInt(smagic64(c).s)
  7105  		v0.AddArg2(v1, v4)
  7106  		v5 := b.NewValue0(v.Pos, OpRsh64x64, t)
  7107  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7108  		v6.AuxInt = int64ToAuxInt(63)
  7109  		v5.AddArg2(x, v6)
  7110  		v.AddArg2(v0, v5)
  7111  		return true
  7112  	}
  7113  	return false
  7114  }
  7115  func rewriteValuegeneric_OpDiv64F(v *Value) bool {
  7116  	v_1 := v.Args[1]
  7117  	v_0 := v.Args[0]
  7118  	b := v.Block
  7119  	// match: (Div64F (Const64F [c]) (Const64F [d]))
  7120  	// cond: c/d == c/d
  7121  	// result: (Const64F [c/d])
  7122  	for {
  7123  		if v_0.Op != OpConst64F {
  7124  			break
  7125  		}
  7126  		c := auxIntToFloat64(v_0.AuxInt)
  7127  		if v_1.Op != OpConst64F {
  7128  			break
  7129  		}
  7130  		d := auxIntToFloat64(v_1.AuxInt)
  7131  		if !(c/d == c/d) {
  7132  			break
  7133  		}
  7134  		v.reset(OpConst64F)
  7135  		v.AuxInt = float64ToAuxInt(c / d)
  7136  		return true
  7137  	}
  7138  	// match: (Div64F x (Const64F <t> [c]))
  7139  	// cond: reciprocalExact64(c)
  7140  	// result: (Mul64F x (Const64F <t> [1/c]))
  7141  	for {
  7142  		x := v_0
  7143  		if v_1.Op != OpConst64F {
  7144  			break
  7145  		}
  7146  		t := v_1.Type
  7147  		c := auxIntToFloat64(v_1.AuxInt)
  7148  		if !(reciprocalExact64(c)) {
  7149  			break
  7150  		}
  7151  		v.reset(OpMul64F)
  7152  		v0 := b.NewValue0(v.Pos, OpConst64F, t)
  7153  		v0.AuxInt = float64ToAuxInt(1 / c)
  7154  		v.AddArg2(x, v0)
  7155  		return true
  7156  	}
  7157  	return false
  7158  }
  7159  func rewriteValuegeneric_OpDiv64u(v *Value) bool {
  7160  	v_1 := v.Args[1]
  7161  	v_0 := v.Args[0]
  7162  	b := v.Block
  7163  	config := b.Func.Config
  7164  	typ := &b.Func.Config.Types
  7165  	// match: (Div64u (Const64 [c]) (Const64 [d]))
  7166  	// cond: d != 0
  7167  	// result: (Const64 [int64(uint64(c)/uint64(d))])
  7168  	for {
  7169  		if v_0.Op != OpConst64 {
  7170  			break
  7171  		}
  7172  		c := auxIntToInt64(v_0.AuxInt)
  7173  		if v_1.Op != OpConst64 {
  7174  			break
  7175  		}
  7176  		d := auxIntToInt64(v_1.AuxInt)
  7177  		if !(d != 0) {
  7178  			break
  7179  		}
  7180  		v.reset(OpConst64)
  7181  		v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
  7182  		return true
  7183  	}
  7184  	// match: (Div64u n (Const64 [c]))
  7185  	// cond: isPowerOfTwo(c)
  7186  	// result: (Rsh64Ux64 n (Const64 <typ.UInt64> [log64(c)]))
  7187  	for {
  7188  		n := v_0
  7189  		if v_1.Op != OpConst64 {
  7190  			break
  7191  		}
  7192  		c := auxIntToInt64(v_1.AuxInt)
  7193  		if !(isPowerOfTwo(c)) {
  7194  			break
  7195  		}
  7196  		v.reset(OpRsh64Ux64)
  7197  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7198  		v0.AuxInt = int64ToAuxInt(log64(c))
  7199  		v.AddArg2(n, v0)
  7200  		return true
  7201  	}
  7202  	// match: (Div64u n (Const64 [-1<<63]))
  7203  	// result: (Rsh64Ux64 n (Const64 <typ.UInt64> [63]))
  7204  	for {
  7205  		n := v_0
  7206  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 {
  7207  			break
  7208  		}
  7209  		v.reset(OpRsh64Ux64)
  7210  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7211  		v0.AuxInt = int64ToAuxInt(63)
  7212  		v.AddArg2(n, v0)
  7213  		return true
  7214  	}
  7215  	// match: (Div64u x (Const64 [c]))
  7216  	// cond: c > 0 && c <= 0xFFFF && umagicOK32(int32(c)) && config.RegSize == 4 && config.useHmul
  7217  	// result: (Add64 (Add64 <typ.UInt64> (Add64 <typ.UInt64> (Lsh64x64 <typ.UInt64> (ZeroExt32to64 (Div32u <typ.UInt32> (Trunc64to32 <typ.UInt32> (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [32]))) (Const32 <typ.UInt32> [int32(c)]))) (Const64 <typ.UInt64> [32])) (ZeroExt32to64 (Div32u <typ.UInt32> (Trunc64to32 <typ.UInt32> x) (Const32 <typ.UInt32> [int32(c)])))) (Mul64 <typ.UInt64> (ZeroExt32to64 <typ.UInt64> (Mod32u <typ.UInt32> (Trunc64to32 <typ.UInt32> (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [32]))) (Const32 <typ.UInt32> [int32(c)]))) (Const64 <typ.UInt64> [int64((1<<32)/c)]))) (ZeroExt32to64 (Div32u <typ.UInt32> (Add32 <typ.UInt32> (Mod32u <typ.UInt32> (Trunc64to32 <typ.UInt32> x) (Const32 <typ.UInt32> [int32(c)])) (Mul32 <typ.UInt32> (Mod32u <typ.UInt32> (Trunc64to32 <typ.UInt32> (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [32]))) (Const32 <typ.UInt32> [int32(c)])) (Const32 <typ.UInt32> [int32((1<<32)%c)]))) (Const32 <typ.UInt32> [int32(c)]))))
  7218  	for {
  7219  		x := v_0
  7220  		if v_1.Op != OpConst64 {
  7221  			break
  7222  		}
  7223  		c := auxIntToInt64(v_1.AuxInt)
  7224  		if !(c > 0 && c <= 0xFFFF && umagicOK32(int32(c)) && config.RegSize == 4 && config.useHmul) {
  7225  			break
  7226  		}
  7227  		v.reset(OpAdd64)
  7228  		v0 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
  7229  		v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
  7230  		v2 := b.NewValue0(v.Pos, OpLsh64x64, typ.UInt64)
  7231  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7232  		v4 := b.NewValue0(v.Pos, OpDiv32u, typ.UInt32)
  7233  		v5 := b.NewValue0(v.Pos, OpTrunc64to32, typ.UInt32)
  7234  		v6 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  7235  		v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7236  		v7.AuxInt = int64ToAuxInt(32)
  7237  		v6.AddArg2(x, v7)
  7238  		v5.AddArg(v6)
  7239  		v8 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7240  		v8.AuxInt = int32ToAuxInt(int32(c))
  7241  		v4.AddArg2(v5, v8)
  7242  		v3.AddArg(v4)
  7243  		v2.AddArg2(v3, v7)
  7244  		v9 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7245  		v10 := b.NewValue0(v.Pos, OpDiv32u, typ.UInt32)
  7246  		v11 := b.NewValue0(v.Pos, OpTrunc64to32, typ.UInt32)
  7247  		v11.AddArg(x)
  7248  		v10.AddArg2(v11, v8)
  7249  		v9.AddArg(v10)
  7250  		v1.AddArg2(v2, v9)
  7251  		v12 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  7252  		v13 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7253  		v14 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
  7254  		v14.AddArg2(v5, v8)
  7255  		v13.AddArg(v14)
  7256  		v15 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7257  		v15.AuxInt = int64ToAuxInt(int64((1 << 32) / c))
  7258  		v12.AddArg2(v13, v15)
  7259  		v0.AddArg2(v1, v12)
  7260  		v16 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7261  		v17 := b.NewValue0(v.Pos, OpDiv32u, typ.UInt32)
  7262  		v18 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
  7263  		v19 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
  7264  		v19.AddArg2(v11, v8)
  7265  		v20 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  7266  		v21 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7267  		v21.AuxInt = int32ToAuxInt(int32((1 << 32) % c))
  7268  		v20.AddArg2(v14, v21)
  7269  		v18.AddArg2(v19, v20)
  7270  		v17.AddArg2(v18, v8)
  7271  		v16.AddArg(v17)
  7272  		v.AddArg2(v0, v16)
  7273  		return true
  7274  	}
  7275  	// match: (Div64u x (Const64 [c]))
  7276  	// cond: umagicOK64(c) && config.RegSize == 8 && umagic64(c).m&1 == 0 && config.useHmul
  7277  	// result: (Rsh64Ux64 <typ.UInt64> (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<63+umagic64(c).m/2)]) x) (Const64 <typ.UInt64> [umagic64(c).s-1]))
  7278  	for {
  7279  		x := v_0
  7280  		if v_1.Op != OpConst64 {
  7281  			break
  7282  		}
  7283  		c := auxIntToInt64(v_1.AuxInt)
  7284  		if !(umagicOK64(c) && config.RegSize == 8 && umagic64(c).m&1 == 0 && config.useHmul) {
  7285  			break
  7286  		}
  7287  		v.reset(OpRsh64Ux64)
  7288  		v.Type = typ.UInt64
  7289  		v0 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64)
  7290  		v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7291  		v1.AuxInt = int64ToAuxInt(int64(1<<63 + umagic64(c).m/2))
  7292  		v0.AddArg2(v1, x)
  7293  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7294  		v2.AuxInt = int64ToAuxInt(umagic64(c).s - 1)
  7295  		v.AddArg2(v0, v2)
  7296  		return true
  7297  	}
  7298  	// match: (Div64u x (Const64 [c]))
  7299  	// cond: umagicOK64(c) && config.RegSize == 8 && c&1 == 0 && config.useHmul
  7300  	// result: (Rsh64Ux64 <typ.UInt64> (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<63+(umagic64(c).m+1)/2)]) (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [umagic64(c).s-2]))
  7301  	for {
  7302  		x := v_0
  7303  		if v_1.Op != OpConst64 {
  7304  			break
  7305  		}
  7306  		c := auxIntToInt64(v_1.AuxInt)
  7307  		if !(umagicOK64(c) && config.RegSize == 8 && c&1 == 0 && config.useHmul) {
  7308  			break
  7309  		}
  7310  		v.reset(OpRsh64Ux64)
  7311  		v.Type = typ.UInt64
  7312  		v0 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64)
  7313  		v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7314  		v1.AuxInt = int64ToAuxInt(int64(1<<63 + (umagic64(c).m+1)/2))
  7315  		v2 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  7316  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7317  		v3.AuxInt = int64ToAuxInt(1)
  7318  		v2.AddArg2(x, v3)
  7319  		v0.AddArg2(v1, v2)
  7320  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7321  		v4.AuxInt = int64ToAuxInt(umagic64(c).s - 2)
  7322  		v.AddArg2(v0, v4)
  7323  		return true
  7324  	}
  7325  	// match: (Div64u x (Const64 [c]))
  7326  	// cond: umagicOK64(c) && config.RegSize == 8 && config.useAvg && config.useHmul
  7327  	// result: (Rsh64Ux64 <typ.UInt64> (Avg64u x (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(umagic64(c).m)]) x)) (Const64 <typ.UInt64> [umagic64(c).s-1]))
  7328  	for {
  7329  		x := v_0
  7330  		if v_1.Op != OpConst64 {
  7331  			break
  7332  		}
  7333  		c := auxIntToInt64(v_1.AuxInt)
  7334  		if !(umagicOK64(c) && config.RegSize == 8 && config.useAvg && config.useHmul) {
  7335  			break
  7336  		}
  7337  		v.reset(OpRsh64Ux64)
  7338  		v.Type = typ.UInt64
  7339  		v0 := b.NewValue0(v.Pos, OpAvg64u, typ.UInt64)
  7340  		v1 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64)
  7341  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7342  		v2.AuxInt = int64ToAuxInt(int64(umagic64(c).m))
  7343  		v1.AddArg2(v2, x)
  7344  		v0.AddArg2(x, v1)
  7345  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7346  		v3.AuxInt = int64ToAuxInt(umagic64(c).s - 1)
  7347  		v.AddArg2(v0, v3)
  7348  		return true
  7349  	}
  7350  	return false
  7351  }
  7352  func rewriteValuegeneric_OpDiv8(v *Value) bool {
  7353  	v_1 := v.Args[1]
  7354  	v_0 := v.Args[0]
  7355  	b := v.Block
  7356  	typ := &b.Func.Config.Types
  7357  	// match: (Div8 (Const8 [c]) (Const8 [d]))
  7358  	// cond: d != 0
  7359  	// result: (Const8 [c/d])
  7360  	for {
  7361  		if v_0.Op != OpConst8 {
  7362  			break
  7363  		}
  7364  		c := auxIntToInt8(v_0.AuxInt)
  7365  		if v_1.Op != OpConst8 {
  7366  			break
  7367  		}
  7368  		d := auxIntToInt8(v_1.AuxInt)
  7369  		if !(d != 0) {
  7370  			break
  7371  		}
  7372  		v.reset(OpConst8)
  7373  		v.AuxInt = int8ToAuxInt(c / d)
  7374  		return true
  7375  	}
  7376  	// match: (Div8 n (Const8 [c]))
  7377  	// cond: isNonNegative(n) && isPowerOfTwo(c)
  7378  	// result: (Rsh8Ux64 n (Const64 <typ.UInt64> [log8(c)]))
  7379  	for {
  7380  		n := v_0
  7381  		if v_1.Op != OpConst8 {
  7382  			break
  7383  		}
  7384  		c := auxIntToInt8(v_1.AuxInt)
  7385  		if !(isNonNegative(n) && isPowerOfTwo(c)) {
  7386  			break
  7387  		}
  7388  		v.reset(OpRsh8Ux64)
  7389  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7390  		v0.AuxInt = int64ToAuxInt(log8(c))
  7391  		v.AddArg2(n, v0)
  7392  		return true
  7393  	}
  7394  	// match: (Div8 <t> n (Const8 [c]))
  7395  	// cond: c < 0 && c != -1<<7
  7396  	// result: (Neg8 (Div8 <t> n (Const8 <t> [-c])))
  7397  	for {
  7398  		t := v.Type
  7399  		n := v_0
  7400  		if v_1.Op != OpConst8 {
  7401  			break
  7402  		}
  7403  		c := auxIntToInt8(v_1.AuxInt)
  7404  		if !(c < 0 && c != -1<<7) {
  7405  			break
  7406  		}
  7407  		v.reset(OpNeg8)
  7408  		v0 := b.NewValue0(v.Pos, OpDiv8, t)
  7409  		v1 := b.NewValue0(v.Pos, OpConst8, t)
  7410  		v1.AuxInt = int8ToAuxInt(-c)
  7411  		v0.AddArg2(n, v1)
  7412  		v.AddArg(v0)
  7413  		return true
  7414  	}
  7415  	// match: (Div8 <t> x (Const8 [-1<<7 ]))
  7416  	// result: (Rsh8Ux64 (And8 <t> x (Neg8 <t> x)) (Const64 <typ.UInt64> [7 ]))
  7417  	for {
  7418  		t := v.Type
  7419  		x := v_0
  7420  		if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != -1<<7 {
  7421  			break
  7422  		}
  7423  		v.reset(OpRsh8Ux64)
  7424  		v0 := b.NewValue0(v.Pos, OpAnd8, t)
  7425  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  7426  		v1.AddArg(x)
  7427  		v0.AddArg2(x, v1)
  7428  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7429  		v2.AuxInt = int64ToAuxInt(7)
  7430  		v.AddArg2(v0, v2)
  7431  		return true
  7432  	}
  7433  	// match: (Div8 <t> n (Const8 [c]))
  7434  	// cond: isPowerOfTwo(c)
  7435  	// result: (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [int64( 8-log8(c))]))) (Const64 <typ.UInt64> [int64(log8(c))]))
  7436  	for {
  7437  		t := v.Type
  7438  		n := v_0
  7439  		if v_1.Op != OpConst8 {
  7440  			break
  7441  		}
  7442  		c := auxIntToInt8(v_1.AuxInt)
  7443  		if !(isPowerOfTwo(c)) {
  7444  			break
  7445  		}
  7446  		v.reset(OpRsh8x64)
  7447  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  7448  		v1 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
  7449  		v2 := b.NewValue0(v.Pos, OpRsh8x64, t)
  7450  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7451  		v3.AuxInt = int64ToAuxInt(7)
  7452  		v2.AddArg2(n, v3)
  7453  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7454  		v4.AuxInt = int64ToAuxInt(int64(8 - log8(c)))
  7455  		v1.AddArg2(v2, v4)
  7456  		v0.AddArg2(n, v1)
  7457  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7458  		v5.AuxInt = int64ToAuxInt(int64(log8(c)))
  7459  		v.AddArg2(v0, v5)
  7460  		return true
  7461  	}
  7462  	// match: (Div8 <t> x (Const8 [c]))
  7463  	// cond: smagicOK8(c)
  7464  	// result: (Sub8 <t> (Rsh32x64 <t> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(smagic8(c).m)]) (SignExt8to32 x)) (Const64 <typ.UInt64> [8+smagic8(c).s])) (Rsh32x64 <t> (SignExt8to32 x) (Const64 <typ.UInt64> [31])))
  7465  	for {
  7466  		t := v.Type
  7467  		x := v_0
  7468  		if v_1.Op != OpConst8 {
  7469  			break
  7470  		}
  7471  		c := auxIntToInt8(v_1.AuxInt)
  7472  		if !(smagicOK8(c)) {
  7473  			break
  7474  		}
  7475  		v.reset(OpSub8)
  7476  		v.Type = t
  7477  		v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
  7478  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  7479  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7480  		v2.AuxInt = int32ToAuxInt(int32(smagic8(c).m))
  7481  		v3 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  7482  		v3.AddArg(x)
  7483  		v1.AddArg2(v2, v3)
  7484  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7485  		v4.AuxInt = int64ToAuxInt(8 + smagic8(c).s)
  7486  		v0.AddArg2(v1, v4)
  7487  		v5 := b.NewValue0(v.Pos, OpRsh32x64, t)
  7488  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7489  		v6.AuxInt = int64ToAuxInt(31)
  7490  		v5.AddArg2(v3, v6)
  7491  		v.AddArg2(v0, v5)
  7492  		return true
  7493  	}
  7494  	return false
  7495  }
  7496  func rewriteValuegeneric_OpDiv8u(v *Value) bool {
  7497  	v_1 := v.Args[1]
  7498  	v_0 := v.Args[0]
  7499  	b := v.Block
  7500  	typ := &b.Func.Config.Types
  7501  	// match: (Div8u (Const8 [c]) (Const8 [d]))
  7502  	// cond: d != 0
  7503  	// result: (Const8 [int8(uint8(c)/uint8(d))])
  7504  	for {
  7505  		if v_0.Op != OpConst8 {
  7506  			break
  7507  		}
  7508  		c := auxIntToInt8(v_0.AuxInt)
  7509  		if v_1.Op != OpConst8 {
  7510  			break
  7511  		}
  7512  		d := auxIntToInt8(v_1.AuxInt)
  7513  		if !(d != 0) {
  7514  			break
  7515  		}
  7516  		v.reset(OpConst8)
  7517  		v.AuxInt = int8ToAuxInt(int8(uint8(c) / uint8(d)))
  7518  		return true
  7519  	}
  7520  	// match: (Div8u n (Const8 [c]))
  7521  	// cond: isPowerOfTwo(c)
  7522  	// result: (Rsh8Ux64 n (Const64 <typ.UInt64> [log8(c)]))
  7523  	for {
  7524  		n := v_0
  7525  		if v_1.Op != OpConst8 {
  7526  			break
  7527  		}
  7528  		c := auxIntToInt8(v_1.AuxInt)
  7529  		if !(isPowerOfTwo(c)) {
  7530  			break
  7531  		}
  7532  		v.reset(OpRsh8Ux64)
  7533  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7534  		v0.AuxInt = int64ToAuxInt(log8(c))
  7535  		v.AddArg2(n, v0)
  7536  		return true
  7537  	}
  7538  	// match: (Div8u x (Const8 [c]))
  7539  	// cond: umagicOK8(c)
  7540  	// result: (Trunc32to8 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<8+umagic8(c).m)]) (ZeroExt8to32 x)) (Const64 <typ.UInt64> [8+umagic8(c).s])))
  7541  	for {
  7542  		x := v_0
  7543  		if v_1.Op != OpConst8 {
  7544  			break
  7545  		}
  7546  		c := auxIntToInt8(v_1.AuxInt)
  7547  		if !(umagicOK8(c)) {
  7548  			break
  7549  		}
  7550  		v.reset(OpTrunc32to8)
  7551  		v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  7552  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  7553  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7554  		v2.AuxInt = int32ToAuxInt(int32(1<<8 + umagic8(c).m))
  7555  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7556  		v3.AddArg(x)
  7557  		v1.AddArg2(v2, v3)
  7558  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7559  		v4.AuxInt = int64ToAuxInt(8 + umagic8(c).s)
  7560  		v0.AddArg2(v1, v4)
  7561  		v.AddArg(v0)
  7562  		return true
  7563  	}
  7564  	return false
  7565  }
  7566  func rewriteValuegeneric_OpEq16(v *Value) bool {
  7567  	v_1 := v.Args[1]
  7568  	v_0 := v.Args[0]
  7569  	b := v.Block
  7570  	config := b.Func.Config
  7571  	typ := &b.Func.Config.Types
  7572  	// match: (Eq16 x x)
  7573  	// result: (ConstBool [true])
  7574  	for {
  7575  		x := v_0
  7576  		if x != v_1 {
  7577  			break
  7578  		}
  7579  		v.reset(OpConstBool)
  7580  		v.AuxInt = boolToAuxInt(true)
  7581  		return true
  7582  	}
  7583  	// match: (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
  7584  	// result: (Eq16 (Const16 <t> [c-d]) x)
  7585  	for {
  7586  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7587  			if v_0.Op != OpConst16 {
  7588  				continue
  7589  			}
  7590  			t := v_0.Type
  7591  			c := auxIntToInt16(v_0.AuxInt)
  7592  			if v_1.Op != OpAdd16 {
  7593  				continue
  7594  			}
  7595  			_ = v_1.Args[1]
  7596  			v_1_0 := v_1.Args[0]
  7597  			v_1_1 := v_1.Args[1]
  7598  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  7599  				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
  7600  					continue
  7601  				}
  7602  				d := auxIntToInt16(v_1_0.AuxInt)
  7603  				x := v_1_1
  7604  				v.reset(OpEq16)
  7605  				v0 := b.NewValue0(v.Pos, OpConst16, t)
  7606  				v0.AuxInt = int16ToAuxInt(c - d)
  7607  				v.AddArg2(v0, x)
  7608  				return true
  7609  			}
  7610  		}
  7611  		break
  7612  	}
  7613  	// match: (Eq16 (Const16 [c]) (Const16 [d]))
  7614  	// result: (ConstBool [c == d])
  7615  	for {
  7616  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7617  			if v_0.Op != OpConst16 {
  7618  				continue
  7619  			}
  7620  			c := auxIntToInt16(v_0.AuxInt)
  7621  			if v_1.Op != OpConst16 {
  7622  				continue
  7623  			}
  7624  			d := auxIntToInt16(v_1.AuxInt)
  7625  			v.reset(OpConstBool)
  7626  			v.AuxInt = boolToAuxInt(c == d)
  7627  			return true
  7628  		}
  7629  		break
  7630  	}
  7631  	// match: (Eq16 (Mod16u x (Const16 [c])) (Const16 [0]))
  7632  	// cond: x.Op != OpConst16 && udivisibleOK16(c) && !hasSmallRotate(config)
  7633  	// result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt16to32 <typ.UInt32> x) (Const32 <typ.UInt32> [int32(uint16(c))])) (Const32 <typ.UInt32> [0]))
  7634  	for {
  7635  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7636  			if v_0.Op != OpMod16u {
  7637  				continue
  7638  			}
  7639  			_ = v_0.Args[1]
  7640  			x := v_0.Args[0]
  7641  			v_0_1 := v_0.Args[1]
  7642  			if v_0_1.Op != OpConst16 {
  7643  				continue
  7644  			}
  7645  			c := auxIntToInt16(v_0_1.AuxInt)
  7646  			if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 || !(x.Op != OpConst16 && udivisibleOK16(c) && !hasSmallRotate(config)) {
  7647  				continue
  7648  			}
  7649  			v.reset(OpEq32)
  7650  			v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
  7651  			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7652  			v1.AddArg(x)
  7653  			v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7654  			v2.AuxInt = int32ToAuxInt(int32(uint16(c)))
  7655  			v0.AddArg2(v1, v2)
  7656  			v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7657  			v3.AuxInt = int32ToAuxInt(0)
  7658  			v.AddArg2(v0, v3)
  7659  			return true
  7660  		}
  7661  		break
  7662  	}
  7663  	// match: (Eq16 (Mod16 x (Const16 [c])) (Const16 [0]))
  7664  	// cond: x.Op != OpConst16 && sdivisibleOK16(c) && !hasSmallRotate(config)
  7665  	// result: (Eq32 (Mod32 <typ.Int32> (SignExt16to32 <typ.Int32> x) (Const32 <typ.Int32> [int32(c)])) (Const32 <typ.Int32> [0]))
  7666  	for {
  7667  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7668  			if v_0.Op != OpMod16 {
  7669  				continue
  7670  			}
  7671  			_ = v_0.Args[1]
  7672  			x := v_0.Args[0]
  7673  			v_0_1 := v_0.Args[1]
  7674  			if v_0_1.Op != OpConst16 {
  7675  				continue
  7676  			}
  7677  			c := auxIntToInt16(v_0_1.AuxInt)
  7678  			if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 || !(x.Op != OpConst16 && sdivisibleOK16(c) && !hasSmallRotate(config)) {
  7679  				continue
  7680  			}
  7681  			v.reset(OpEq32)
  7682  			v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
  7683  			v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7684  			v1.AddArg(x)
  7685  			v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
  7686  			v2.AuxInt = int32ToAuxInt(int32(c))
  7687  			v0.AddArg2(v1, v2)
  7688  			v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
  7689  			v3.AuxInt = int32ToAuxInt(0)
  7690  			v.AddArg2(v0, v3)
  7691  			return true
  7692  		}
  7693  		break
  7694  	}
  7695  	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt16to64 x)) (Const64 [s]))) ) )
  7696  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic16(c).m) && s == 16+umagic16(c).s && x.Op != OpConst16 && udivisibleOK16(c)
  7697  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(udivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(16-udivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(udivisible16(c).max)]) )
  7698  	for {
  7699  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7700  			x := v_0
  7701  			if v_1.Op != OpMul16 {
  7702  				continue
  7703  			}
  7704  			_ = v_1.Args[1]
  7705  			v_1_0 := v_1.Args[0]
  7706  			v_1_1 := v_1.Args[1]
  7707  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  7708  				if v_1_0.Op != OpConst16 {
  7709  					continue
  7710  				}
  7711  				c := auxIntToInt16(v_1_0.AuxInt)
  7712  				if v_1_1.Op != OpTrunc64to16 {
  7713  					continue
  7714  				}
  7715  				v_1_1_0 := v_1_1.Args[0]
  7716  				if v_1_1_0.Op != OpRsh64Ux64 {
  7717  					continue
  7718  				}
  7719  				_ = v_1_1_0.Args[1]
  7720  				mul := v_1_1_0.Args[0]
  7721  				if mul.Op != OpMul64 {
  7722  					continue
  7723  				}
  7724  				_ = mul.Args[1]
  7725  				mul_0 := mul.Args[0]
  7726  				mul_1 := mul.Args[1]
  7727  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  7728  					if mul_0.Op != OpConst64 {
  7729  						continue
  7730  					}
  7731  					m := auxIntToInt64(mul_0.AuxInt)
  7732  					if mul_1.Op != OpZeroExt16to64 || x != mul_1.Args[0] {
  7733  						continue
  7734  					}
  7735  					v_1_1_0_1 := v_1_1_0.Args[1]
  7736  					if v_1_1_0_1.Op != OpConst64 {
  7737  						continue
  7738  					}
  7739  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  7740  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic16(c).m) && s == 16+umagic16(c).s && x.Op != OpConst16 && udivisibleOK16(c)) {
  7741  						continue
  7742  					}
  7743  					v.reset(OpLeq16U)
  7744  					v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  7745  					v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  7746  					v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7747  					v2.AuxInt = int16ToAuxInt(int16(udivisible16(c).m))
  7748  					v1.AddArg2(v2, x)
  7749  					v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7750  					v3.AuxInt = int16ToAuxInt(int16(16 - udivisible16(c).k))
  7751  					v0.AddArg2(v1, v3)
  7752  					v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7753  					v4.AuxInt = int16ToAuxInt(int16(udivisible16(c).max))
  7754  					v.AddArg2(v0, v4)
  7755  					return true
  7756  				}
  7757  			}
  7758  		}
  7759  		break
  7760  	}
  7761  	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x)) (Const64 [s]))) ) )
  7762  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<15+umagic16(c).m/2) && s == 16+umagic16(c).s-1 && x.Op != OpConst16 && udivisibleOK16(c)
  7763  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(udivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(16-udivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(udivisible16(c).max)]) )
  7764  	for {
  7765  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7766  			x := v_0
  7767  			if v_1.Op != OpMul16 {
  7768  				continue
  7769  			}
  7770  			_ = v_1.Args[1]
  7771  			v_1_0 := v_1.Args[0]
  7772  			v_1_1 := v_1.Args[1]
  7773  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  7774  				if v_1_0.Op != OpConst16 {
  7775  					continue
  7776  				}
  7777  				c := auxIntToInt16(v_1_0.AuxInt)
  7778  				if v_1_1.Op != OpTrunc32to16 {
  7779  					continue
  7780  				}
  7781  				v_1_1_0 := v_1_1.Args[0]
  7782  				if v_1_1_0.Op != OpRsh32Ux64 {
  7783  					continue
  7784  				}
  7785  				_ = v_1_1_0.Args[1]
  7786  				mul := v_1_1_0.Args[0]
  7787  				if mul.Op != OpMul32 {
  7788  					continue
  7789  				}
  7790  				_ = mul.Args[1]
  7791  				mul_0 := mul.Args[0]
  7792  				mul_1 := mul.Args[1]
  7793  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  7794  					if mul_0.Op != OpConst32 {
  7795  						continue
  7796  					}
  7797  					m := auxIntToInt32(mul_0.AuxInt)
  7798  					if mul_1.Op != OpZeroExt16to32 || x != mul_1.Args[0] {
  7799  						continue
  7800  					}
  7801  					v_1_1_0_1 := v_1_1_0.Args[1]
  7802  					if v_1_1_0_1.Op != OpConst64 {
  7803  						continue
  7804  					}
  7805  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  7806  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<15+umagic16(c).m/2) && s == 16+umagic16(c).s-1 && x.Op != OpConst16 && udivisibleOK16(c)) {
  7807  						continue
  7808  					}
  7809  					v.reset(OpLeq16U)
  7810  					v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  7811  					v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  7812  					v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7813  					v2.AuxInt = int16ToAuxInt(int16(udivisible16(c).m))
  7814  					v1.AddArg2(v2, x)
  7815  					v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7816  					v3.AuxInt = int16ToAuxInt(int16(16 - udivisible16(c).k))
  7817  					v0.AddArg2(v1, v3)
  7818  					v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7819  					v4.AuxInt = int16ToAuxInt(int16(udivisible16(c).max))
  7820  					v.AddArg2(v0, v4)
  7821  					return true
  7822  				}
  7823  			}
  7824  		}
  7825  		break
  7826  	}
  7827  	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1]))) (Const64 [s]))) ) )
  7828  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<15+(umagic16(c).m+1)/2) && s == 16+umagic16(c).s-2 && x.Op != OpConst16 && udivisibleOK16(c)
  7829  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(udivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(16-udivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(udivisible16(c).max)]) )
  7830  	for {
  7831  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7832  			x := v_0
  7833  			if v_1.Op != OpMul16 {
  7834  				continue
  7835  			}
  7836  			_ = v_1.Args[1]
  7837  			v_1_0 := v_1.Args[0]
  7838  			v_1_1 := v_1.Args[1]
  7839  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  7840  				if v_1_0.Op != OpConst16 {
  7841  					continue
  7842  				}
  7843  				c := auxIntToInt16(v_1_0.AuxInt)
  7844  				if v_1_1.Op != OpTrunc32to16 {
  7845  					continue
  7846  				}
  7847  				v_1_1_0 := v_1_1.Args[0]
  7848  				if v_1_1_0.Op != OpRsh32Ux64 {
  7849  					continue
  7850  				}
  7851  				_ = v_1_1_0.Args[1]
  7852  				mul := v_1_1_0.Args[0]
  7853  				if mul.Op != OpMul32 {
  7854  					continue
  7855  				}
  7856  				_ = mul.Args[1]
  7857  				mul_0 := mul.Args[0]
  7858  				mul_1 := mul.Args[1]
  7859  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  7860  					if mul_0.Op != OpConst32 {
  7861  						continue
  7862  					}
  7863  					m := auxIntToInt32(mul_0.AuxInt)
  7864  					if mul_1.Op != OpRsh32Ux64 {
  7865  						continue
  7866  					}
  7867  					_ = mul_1.Args[1]
  7868  					mul_1_0 := mul_1.Args[0]
  7869  					if mul_1_0.Op != OpZeroExt16to32 || x != mul_1_0.Args[0] {
  7870  						continue
  7871  					}
  7872  					mul_1_1 := mul_1.Args[1]
  7873  					if mul_1_1.Op != OpConst64 || auxIntToInt64(mul_1_1.AuxInt) != 1 {
  7874  						continue
  7875  					}
  7876  					v_1_1_0_1 := v_1_1_0.Args[1]
  7877  					if v_1_1_0_1.Op != OpConst64 {
  7878  						continue
  7879  					}
  7880  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  7881  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<15+(umagic16(c).m+1)/2) && s == 16+umagic16(c).s-2 && x.Op != OpConst16 && udivisibleOK16(c)) {
  7882  						continue
  7883  					}
  7884  					v.reset(OpLeq16U)
  7885  					v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  7886  					v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  7887  					v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7888  					v2.AuxInt = int16ToAuxInt(int16(udivisible16(c).m))
  7889  					v1.AddArg2(v2, x)
  7890  					v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7891  					v3.AuxInt = int16ToAuxInt(int16(16 - udivisible16(c).k))
  7892  					v0.AddArg2(v1, v3)
  7893  					v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7894  					v4.AuxInt = int16ToAuxInt(int16(udivisible16(c).max))
  7895  					v.AddArg2(v0, v4)
  7896  					return true
  7897  				}
  7898  			}
  7899  		}
  7900  		break
  7901  	}
  7902  	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x))) (Const64 [s]))) ) )
  7903  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(umagic16(c).m) && s == 16+umagic16(c).s-1 && x.Op != OpConst16 && udivisibleOK16(c)
  7904  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(udivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(16-udivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(udivisible16(c).max)]) )
  7905  	for {
  7906  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7907  			x := v_0
  7908  			if v_1.Op != OpMul16 {
  7909  				continue
  7910  			}
  7911  			_ = v_1.Args[1]
  7912  			v_1_0 := v_1.Args[0]
  7913  			v_1_1 := v_1.Args[1]
  7914  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  7915  				if v_1_0.Op != OpConst16 {
  7916  					continue
  7917  				}
  7918  				c := auxIntToInt16(v_1_0.AuxInt)
  7919  				if v_1_1.Op != OpTrunc32to16 {
  7920  					continue
  7921  				}
  7922  				v_1_1_0 := v_1_1.Args[0]
  7923  				if v_1_1_0.Op != OpRsh32Ux64 {
  7924  					continue
  7925  				}
  7926  				_ = v_1_1_0.Args[1]
  7927  				v_1_1_0_0 := v_1_1_0.Args[0]
  7928  				if v_1_1_0_0.Op != OpAvg32u {
  7929  					continue
  7930  				}
  7931  				_ = v_1_1_0_0.Args[1]
  7932  				v_1_1_0_0_0 := v_1_1_0_0.Args[0]
  7933  				if v_1_1_0_0_0.Op != OpLsh32x64 {
  7934  					continue
  7935  				}
  7936  				_ = v_1_1_0_0_0.Args[1]
  7937  				v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
  7938  				if v_1_1_0_0_0_0.Op != OpZeroExt16to32 || x != v_1_1_0_0_0_0.Args[0] {
  7939  					continue
  7940  				}
  7941  				v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
  7942  				if v_1_1_0_0_0_1.Op != OpConst64 || auxIntToInt64(v_1_1_0_0_0_1.AuxInt) != 16 {
  7943  					continue
  7944  				}
  7945  				mul := v_1_1_0_0.Args[1]
  7946  				if mul.Op != OpMul32 {
  7947  					continue
  7948  				}
  7949  				_ = mul.Args[1]
  7950  				mul_0 := mul.Args[0]
  7951  				mul_1 := mul.Args[1]
  7952  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  7953  					if mul_0.Op != OpConst32 {
  7954  						continue
  7955  					}
  7956  					m := auxIntToInt32(mul_0.AuxInt)
  7957  					if mul_1.Op != OpZeroExt16to32 || x != mul_1.Args[0] {
  7958  						continue
  7959  					}
  7960  					v_1_1_0_1 := v_1_1_0.Args[1]
  7961  					if v_1_1_0_1.Op != OpConst64 {
  7962  						continue
  7963  					}
  7964  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  7965  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(umagic16(c).m) && s == 16+umagic16(c).s-1 && x.Op != OpConst16 && udivisibleOK16(c)) {
  7966  						continue
  7967  					}
  7968  					v.reset(OpLeq16U)
  7969  					v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  7970  					v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  7971  					v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7972  					v2.AuxInt = int16ToAuxInt(int16(udivisible16(c).m))
  7973  					v1.AddArg2(v2, x)
  7974  					v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7975  					v3.AuxInt = int16ToAuxInt(int16(16 - udivisible16(c).k))
  7976  					v0.AddArg2(v1, v3)
  7977  					v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  7978  					v4.AuxInt = int16ToAuxInt(int16(udivisible16(c).max))
  7979  					v.AddArg2(v0, v4)
  7980  					return true
  7981  				}
  7982  			}
  7983  		}
  7984  		break
  7985  	}
  7986  	// match: (Eq16 x (Mul16 (Const16 [c]) (Sub16 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt16to32 x)) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31]))) ) )
  7987  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic16(c).m) && s == 16+smagic16(c).s && x.Op != OpConst16 && sdivisibleOK16(c)
  7988  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int16(sdivisible16(c).m)]) x) (Const16 <typ.UInt16> [int16(sdivisible16(c).a)]) ) (Const16 <typ.UInt16> [int16(16-sdivisible16(c).k)]) ) (Const16 <typ.UInt16> [int16(sdivisible16(c).max)]) )
  7989  	for {
  7990  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7991  			x := v_0
  7992  			if v_1.Op != OpMul16 {
  7993  				continue
  7994  			}
  7995  			_ = v_1.Args[1]
  7996  			v_1_0 := v_1.Args[0]
  7997  			v_1_1 := v_1.Args[1]
  7998  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  7999  				if v_1_0.Op != OpConst16 {
  8000  					continue
  8001  				}
  8002  				c := auxIntToInt16(v_1_0.AuxInt)
  8003  				if v_1_1.Op != OpSub16 {
  8004  					continue
  8005  				}
  8006  				_ = v_1_1.Args[1]
  8007  				v_1_1_0 := v_1_1.Args[0]
  8008  				if v_1_1_0.Op != OpRsh32x64 {
  8009  					continue
  8010  				}
  8011  				_ = v_1_1_0.Args[1]
  8012  				mul := v_1_1_0.Args[0]
  8013  				if mul.Op != OpMul32 {
  8014  					continue
  8015  				}
  8016  				_ = mul.Args[1]
  8017  				mul_0 := mul.Args[0]
  8018  				mul_1 := mul.Args[1]
  8019  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8020  					if mul_0.Op != OpConst32 {
  8021  						continue
  8022  					}
  8023  					m := auxIntToInt32(mul_0.AuxInt)
  8024  					if mul_1.Op != OpSignExt16to32 || x != mul_1.Args[0] {
  8025  						continue
  8026  					}
  8027  					v_1_1_0_1 := v_1_1_0.Args[1]
  8028  					if v_1_1_0_1.Op != OpConst64 {
  8029  						continue
  8030  					}
  8031  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  8032  					v_1_1_1 := v_1_1.Args[1]
  8033  					if v_1_1_1.Op != OpRsh32x64 {
  8034  						continue
  8035  					}
  8036  					_ = v_1_1_1.Args[1]
  8037  					v_1_1_1_0 := v_1_1_1.Args[0]
  8038  					if v_1_1_1_0.Op != OpSignExt16to32 || x != v_1_1_1_0.Args[0] {
  8039  						continue
  8040  					}
  8041  					v_1_1_1_1 := v_1_1_1.Args[1]
  8042  					if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 31 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic16(c).m) && s == 16+smagic16(c).s && x.Op != OpConst16 && sdivisibleOK16(c)) {
  8043  						continue
  8044  					}
  8045  					v.reset(OpLeq16U)
  8046  					v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  8047  					v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
  8048  					v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  8049  					v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  8050  					v3.AuxInt = int16ToAuxInt(int16(sdivisible16(c).m))
  8051  					v2.AddArg2(v3, x)
  8052  					v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  8053  					v4.AuxInt = int16ToAuxInt(int16(sdivisible16(c).a))
  8054  					v1.AddArg2(v2, v4)
  8055  					v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  8056  					v5.AuxInt = int16ToAuxInt(int16(16 - sdivisible16(c).k))
  8057  					v0.AddArg2(v1, v5)
  8058  					v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  8059  					v6.AuxInt = int16ToAuxInt(int16(sdivisible16(c).max))
  8060  					v.AddArg2(v0, v6)
  8061  					return true
  8062  				}
  8063  			}
  8064  		}
  8065  		break
  8066  	}
  8067  	// match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
  8068  	// cond: k > 0 && k < 15 && kbar == 16 - k
  8069  	// result: (Eq16 (And16 <t> n (Const16 <t> [1<<uint(k)-1])) (Const16 <t> [0]))
  8070  	for {
  8071  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8072  			n := v_0
  8073  			if v_1.Op != OpLsh16x64 {
  8074  				continue
  8075  			}
  8076  			_ = v_1.Args[1]
  8077  			v_1_0 := v_1.Args[0]
  8078  			if v_1_0.Op != OpRsh16x64 {
  8079  				continue
  8080  			}
  8081  			_ = v_1_0.Args[1]
  8082  			v_1_0_0 := v_1_0.Args[0]
  8083  			if v_1_0_0.Op != OpAdd16 {
  8084  				continue
  8085  			}
  8086  			t := v_1_0_0.Type
  8087  			_ = v_1_0_0.Args[1]
  8088  			v_1_0_0_0 := v_1_0_0.Args[0]
  8089  			v_1_0_0_1 := v_1_0_0.Args[1]
  8090  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
  8091  				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh16Ux64 || v_1_0_0_1.Type != t {
  8092  					continue
  8093  				}
  8094  				_ = v_1_0_0_1.Args[1]
  8095  				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
  8096  				if v_1_0_0_1_0.Op != OpRsh16x64 || v_1_0_0_1_0.Type != t {
  8097  					continue
  8098  				}
  8099  				_ = v_1_0_0_1_0.Args[1]
  8100  				if n != v_1_0_0_1_0.Args[0] {
  8101  					continue
  8102  				}
  8103  				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
  8104  				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 15 {
  8105  					continue
  8106  				}
  8107  				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
  8108  				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
  8109  					continue
  8110  				}
  8111  				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
  8112  				v_1_0_1 := v_1_0.Args[1]
  8113  				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
  8114  					continue
  8115  				}
  8116  				k := auxIntToInt64(v_1_0_1.AuxInt)
  8117  				v_1_1 := v_1.Args[1]
  8118  				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 15 && kbar == 16-k) {
  8119  					continue
  8120  				}
  8121  				v.reset(OpEq16)
  8122  				v0 := b.NewValue0(v.Pos, OpAnd16, t)
  8123  				v1 := b.NewValue0(v.Pos, OpConst16, t)
  8124  				v1.AuxInt = int16ToAuxInt(1<<uint(k) - 1)
  8125  				v0.AddArg2(n, v1)
  8126  				v2 := b.NewValue0(v.Pos, OpConst16, t)
  8127  				v2.AuxInt = int16ToAuxInt(0)
  8128  				v.AddArg2(v0, v2)
  8129  				return true
  8130  			}
  8131  		}
  8132  		break
  8133  	}
  8134  	// match: (Eq16 s:(Sub16 x y) (Const16 [0]))
  8135  	// cond: s.Uses == 1
  8136  	// result: (Eq16 x y)
  8137  	for {
  8138  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8139  			s := v_0
  8140  			if s.Op != OpSub16 {
  8141  				continue
  8142  			}
  8143  			y := s.Args[1]
  8144  			x := s.Args[0]
  8145  			if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 || !(s.Uses == 1) {
  8146  				continue
  8147  			}
  8148  			v.reset(OpEq16)
  8149  			v.AddArg2(x, y)
  8150  			return true
  8151  		}
  8152  		break
  8153  	}
  8154  	// match: (Eq16 (And16 <t> x (Const16 <t> [y])) (Const16 <t> [y]))
  8155  	// cond: oneBit16(y)
  8156  	// result: (Neq16 (And16 <t> x (Const16 <t> [y])) (Const16 <t> [0]))
  8157  	for {
  8158  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8159  			if v_0.Op != OpAnd16 {
  8160  				continue
  8161  			}
  8162  			t := v_0.Type
  8163  			_ = v_0.Args[1]
  8164  			v_0_0 := v_0.Args[0]
  8165  			v_0_1 := v_0.Args[1]
  8166  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  8167  				x := v_0_0
  8168  				if v_0_1.Op != OpConst16 || v_0_1.Type != t {
  8169  					continue
  8170  				}
  8171  				y := auxIntToInt16(v_0_1.AuxInt)
  8172  				if v_1.Op != OpConst16 || v_1.Type != t || auxIntToInt16(v_1.AuxInt) != y || !(oneBit16(y)) {
  8173  					continue
  8174  				}
  8175  				v.reset(OpNeq16)
  8176  				v0 := b.NewValue0(v.Pos, OpAnd16, t)
  8177  				v1 := b.NewValue0(v.Pos, OpConst16, t)
  8178  				v1.AuxInt = int16ToAuxInt(y)
  8179  				v0.AddArg2(x, v1)
  8180  				v2 := b.NewValue0(v.Pos, OpConst16, t)
  8181  				v2.AuxInt = int16ToAuxInt(0)
  8182  				v.AddArg2(v0, v2)
  8183  				return true
  8184  			}
  8185  		}
  8186  		break
  8187  	}
  8188  	return false
  8189  }
  8190  func rewriteValuegeneric_OpEq32(v *Value) bool {
  8191  	v_1 := v.Args[1]
  8192  	v_0 := v.Args[0]
  8193  	b := v.Block
  8194  	typ := &b.Func.Config.Types
  8195  	// match: (Eq32 x x)
  8196  	// result: (ConstBool [true])
  8197  	for {
  8198  		x := v_0
  8199  		if x != v_1 {
  8200  			break
  8201  		}
  8202  		v.reset(OpConstBool)
  8203  		v.AuxInt = boolToAuxInt(true)
  8204  		return true
  8205  	}
  8206  	// match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
  8207  	// result: (Eq32 (Const32 <t> [c-d]) x)
  8208  	for {
  8209  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8210  			if v_0.Op != OpConst32 {
  8211  				continue
  8212  			}
  8213  			t := v_0.Type
  8214  			c := auxIntToInt32(v_0.AuxInt)
  8215  			if v_1.Op != OpAdd32 {
  8216  				continue
  8217  			}
  8218  			_ = v_1.Args[1]
  8219  			v_1_0 := v_1.Args[0]
  8220  			v_1_1 := v_1.Args[1]
  8221  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8222  				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
  8223  					continue
  8224  				}
  8225  				d := auxIntToInt32(v_1_0.AuxInt)
  8226  				x := v_1_1
  8227  				v.reset(OpEq32)
  8228  				v0 := b.NewValue0(v.Pos, OpConst32, t)
  8229  				v0.AuxInt = int32ToAuxInt(c - d)
  8230  				v.AddArg2(v0, x)
  8231  				return true
  8232  			}
  8233  		}
  8234  		break
  8235  	}
  8236  	// match: (Eq32 (Const32 [c]) (Const32 [d]))
  8237  	// result: (ConstBool [c == d])
  8238  	for {
  8239  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8240  			if v_0.Op != OpConst32 {
  8241  				continue
  8242  			}
  8243  			c := auxIntToInt32(v_0.AuxInt)
  8244  			if v_1.Op != OpConst32 {
  8245  				continue
  8246  			}
  8247  			d := auxIntToInt32(v_1.AuxInt)
  8248  			v.reset(OpConstBool)
  8249  			v.AuxInt = boolToAuxInt(c == d)
  8250  			return true
  8251  		}
  8252  		break
  8253  	}
  8254  	// match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s])) ) )
  8255  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<31+umagic32(c).m/2) && s == umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)
  8256  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
  8257  	for {
  8258  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8259  			x := v_0
  8260  			if v_1.Op != OpMul32 {
  8261  				continue
  8262  			}
  8263  			_ = v_1.Args[1]
  8264  			v_1_0 := v_1.Args[0]
  8265  			v_1_1 := v_1.Args[1]
  8266  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8267  				if v_1_0.Op != OpConst32 {
  8268  					continue
  8269  				}
  8270  				c := auxIntToInt32(v_1_0.AuxInt)
  8271  				if v_1_1.Op != OpRsh32Ux64 {
  8272  					continue
  8273  				}
  8274  				_ = v_1_1.Args[1]
  8275  				mul := v_1_1.Args[0]
  8276  				if mul.Op != OpHmul32u {
  8277  					continue
  8278  				}
  8279  				_ = mul.Args[1]
  8280  				mul_0 := mul.Args[0]
  8281  				mul_1 := mul.Args[1]
  8282  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8283  					if mul_0.Op != OpConst32 {
  8284  						continue
  8285  					}
  8286  					m := auxIntToInt32(mul_0.AuxInt)
  8287  					if x != mul_1 {
  8288  						continue
  8289  					}
  8290  					v_1_1_1 := v_1_1.Args[1]
  8291  					if v_1_1_1.Op != OpConst64 {
  8292  						continue
  8293  					}
  8294  					s := auxIntToInt64(v_1_1_1.AuxInt)
  8295  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<31+umagic32(c).m/2) && s == umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)) {
  8296  						continue
  8297  					}
  8298  					v.reset(OpLeq32U)
  8299  					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8300  					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8301  					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8302  					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
  8303  					v1.AddArg2(v2, x)
  8304  					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8305  					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
  8306  					v0.AddArg2(v1, v3)
  8307  					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8308  					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
  8309  					v.AddArg2(v0, v4)
  8310  					return true
  8311  				}
  8312  			}
  8313  		}
  8314  		break
  8315  	}
  8316  	// match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s])) ) )
  8317  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<31+(umagic32(c).m+1)/2) && s == umagic32(c).s-2 && x.Op != OpConst32 && udivisibleOK32(c)
  8318  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
  8319  	for {
  8320  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8321  			x := v_0
  8322  			if v_1.Op != OpMul32 {
  8323  				continue
  8324  			}
  8325  			_ = v_1.Args[1]
  8326  			v_1_0 := v_1.Args[0]
  8327  			v_1_1 := v_1.Args[1]
  8328  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8329  				if v_1_0.Op != OpConst32 {
  8330  					continue
  8331  				}
  8332  				c := auxIntToInt32(v_1_0.AuxInt)
  8333  				if v_1_1.Op != OpRsh32Ux64 {
  8334  					continue
  8335  				}
  8336  				_ = v_1_1.Args[1]
  8337  				mul := v_1_1.Args[0]
  8338  				if mul.Op != OpHmul32u {
  8339  					continue
  8340  				}
  8341  				_ = mul.Args[1]
  8342  				mul_0 := mul.Args[0]
  8343  				mul_1 := mul.Args[1]
  8344  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8345  					if mul_0.Op != OpConst32 || mul_0.Type != typ.UInt32 {
  8346  						continue
  8347  					}
  8348  					m := auxIntToInt32(mul_0.AuxInt)
  8349  					if mul_1.Op != OpRsh32Ux64 {
  8350  						continue
  8351  					}
  8352  					_ = mul_1.Args[1]
  8353  					if x != mul_1.Args[0] {
  8354  						continue
  8355  					}
  8356  					mul_1_1 := mul_1.Args[1]
  8357  					if mul_1_1.Op != OpConst64 || auxIntToInt64(mul_1_1.AuxInt) != 1 {
  8358  						continue
  8359  					}
  8360  					v_1_1_1 := v_1_1.Args[1]
  8361  					if v_1_1_1.Op != OpConst64 {
  8362  						continue
  8363  					}
  8364  					s := auxIntToInt64(v_1_1_1.AuxInt)
  8365  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<31+(umagic32(c).m+1)/2) && s == umagic32(c).s-2 && x.Op != OpConst32 && udivisibleOK32(c)) {
  8366  						continue
  8367  					}
  8368  					v.reset(OpLeq32U)
  8369  					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8370  					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8371  					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8372  					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
  8373  					v1.AddArg2(v2, x)
  8374  					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8375  					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
  8376  					v0.AddArg2(v1, v3)
  8377  					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8378  					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
  8379  					v.AddArg2(v0, v4)
  8380  					return true
  8381  				}
  8382  			}
  8383  		}
  8384  		break
  8385  	}
  8386  	// match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s])) ) )
  8387  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(umagic32(c).m) && s == umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)
  8388  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
  8389  	for {
  8390  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8391  			x := v_0
  8392  			if v_1.Op != OpMul32 {
  8393  				continue
  8394  			}
  8395  			_ = v_1.Args[1]
  8396  			v_1_0 := v_1.Args[0]
  8397  			v_1_1 := v_1.Args[1]
  8398  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8399  				if v_1_0.Op != OpConst32 {
  8400  					continue
  8401  				}
  8402  				c := auxIntToInt32(v_1_0.AuxInt)
  8403  				if v_1_1.Op != OpRsh32Ux64 {
  8404  					continue
  8405  				}
  8406  				_ = v_1_1.Args[1]
  8407  				v_1_1_0 := v_1_1.Args[0]
  8408  				if v_1_1_0.Op != OpAvg32u {
  8409  					continue
  8410  				}
  8411  				_ = v_1_1_0.Args[1]
  8412  				if x != v_1_1_0.Args[0] {
  8413  					continue
  8414  				}
  8415  				mul := v_1_1_0.Args[1]
  8416  				if mul.Op != OpHmul32u {
  8417  					continue
  8418  				}
  8419  				_ = mul.Args[1]
  8420  				mul_0 := mul.Args[0]
  8421  				mul_1 := mul.Args[1]
  8422  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8423  					if mul_0.Op != OpConst32 {
  8424  						continue
  8425  					}
  8426  					m := auxIntToInt32(mul_0.AuxInt)
  8427  					if x != mul_1 {
  8428  						continue
  8429  					}
  8430  					v_1_1_1 := v_1_1.Args[1]
  8431  					if v_1_1_1.Op != OpConst64 {
  8432  						continue
  8433  					}
  8434  					s := auxIntToInt64(v_1_1_1.AuxInt)
  8435  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(umagic32(c).m) && s == umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)) {
  8436  						continue
  8437  					}
  8438  					v.reset(OpLeq32U)
  8439  					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8440  					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8441  					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8442  					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
  8443  					v1.AddArg2(v2, x)
  8444  					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8445  					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
  8446  					v0.AddArg2(v1, v3)
  8447  					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8448  					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
  8449  					v.AddArg2(v0, v4)
  8450  					return true
  8451  				}
  8452  			}
  8453  		}
  8454  		break
  8455  	}
  8456  	// match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s]))) ) )
  8457  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic32(c).m/2) && s == 32+umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)
  8458  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
  8459  	for {
  8460  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8461  			x := v_0
  8462  			if v_1.Op != OpMul32 {
  8463  				continue
  8464  			}
  8465  			_ = v_1.Args[1]
  8466  			v_1_0 := v_1.Args[0]
  8467  			v_1_1 := v_1.Args[1]
  8468  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8469  				if v_1_0.Op != OpConst32 {
  8470  					continue
  8471  				}
  8472  				c := auxIntToInt32(v_1_0.AuxInt)
  8473  				if v_1_1.Op != OpTrunc64to32 {
  8474  					continue
  8475  				}
  8476  				v_1_1_0 := v_1_1.Args[0]
  8477  				if v_1_1_0.Op != OpRsh64Ux64 {
  8478  					continue
  8479  				}
  8480  				_ = v_1_1_0.Args[1]
  8481  				mul := v_1_1_0.Args[0]
  8482  				if mul.Op != OpMul64 {
  8483  					continue
  8484  				}
  8485  				_ = mul.Args[1]
  8486  				mul_0 := mul.Args[0]
  8487  				mul_1 := mul.Args[1]
  8488  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8489  					if mul_0.Op != OpConst64 {
  8490  						continue
  8491  					}
  8492  					m := auxIntToInt64(mul_0.AuxInt)
  8493  					if mul_1.Op != OpZeroExt32to64 || x != mul_1.Args[0] {
  8494  						continue
  8495  					}
  8496  					v_1_1_0_1 := v_1_1_0.Args[1]
  8497  					if v_1_1_0_1.Op != OpConst64 {
  8498  						continue
  8499  					}
  8500  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  8501  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic32(c).m/2) && s == 32+umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)) {
  8502  						continue
  8503  					}
  8504  					v.reset(OpLeq32U)
  8505  					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8506  					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8507  					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8508  					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
  8509  					v1.AddArg2(v2, x)
  8510  					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8511  					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
  8512  					v0.AddArg2(v1, v3)
  8513  					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8514  					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
  8515  					v.AddArg2(v0, v4)
  8516  					return true
  8517  				}
  8518  			}
  8519  		}
  8520  		break
  8521  	}
  8522  	// match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s]))) ) )
  8523  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic32(c).m+1)/2) && s == 32+umagic32(c).s-2 && x.Op != OpConst32 && udivisibleOK32(c)
  8524  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
  8525  	for {
  8526  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8527  			x := v_0
  8528  			if v_1.Op != OpMul32 {
  8529  				continue
  8530  			}
  8531  			_ = v_1.Args[1]
  8532  			v_1_0 := v_1.Args[0]
  8533  			v_1_1 := v_1.Args[1]
  8534  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8535  				if v_1_0.Op != OpConst32 {
  8536  					continue
  8537  				}
  8538  				c := auxIntToInt32(v_1_0.AuxInt)
  8539  				if v_1_1.Op != OpTrunc64to32 {
  8540  					continue
  8541  				}
  8542  				v_1_1_0 := v_1_1.Args[0]
  8543  				if v_1_1_0.Op != OpRsh64Ux64 {
  8544  					continue
  8545  				}
  8546  				_ = v_1_1_0.Args[1]
  8547  				mul := v_1_1_0.Args[0]
  8548  				if mul.Op != OpMul64 {
  8549  					continue
  8550  				}
  8551  				_ = mul.Args[1]
  8552  				mul_0 := mul.Args[0]
  8553  				mul_1 := mul.Args[1]
  8554  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8555  					if mul_0.Op != OpConst64 {
  8556  						continue
  8557  					}
  8558  					m := auxIntToInt64(mul_0.AuxInt)
  8559  					if mul_1.Op != OpRsh64Ux64 {
  8560  						continue
  8561  					}
  8562  					_ = mul_1.Args[1]
  8563  					mul_1_0 := mul_1.Args[0]
  8564  					if mul_1_0.Op != OpZeroExt32to64 || x != mul_1_0.Args[0] {
  8565  						continue
  8566  					}
  8567  					mul_1_1 := mul_1.Args[1]
  8568  					if mul_1_1.Op != OpConst64 || auxIntToInt64(mul_1_1.AuxInt) != 1 {
  8569  						continue
  8570  					}
  8571  					v_1_1_0_1 := v_1_1_0.Args[1]
  8572  					if v_1_1_0_1.Op != OpConst64 {
  8573  						continue
  8574  					}
  8575  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  8576  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic32(c).m+1)/2) && s == 32+umagic32(c).s-2 && x.Op != OpConst32 && udivisibleOK32(c)) {
  8577  						continue
  8578  					}
  8579  					v.reset(OpLeq32U)
  8580  					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8581  					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8582  					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8583  					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
  8584  					v1.AddArg2(v2, x)
  8585  					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8586  					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
  8587  					v0.AddArg2(v1, v3)
  8588  					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8589  					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
  8590  					v.AddArg2(v0, v4)
  8591  					return true
  8592  				}
  8593  			}
  8594  		}
  8595  		break
  8596  	}
  8597  	// match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s]))) ) )
  8598  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic32(c).m) && s == 32+umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)
  8599  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(udivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(32-udivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(udivisible32(c).max)]) )
  8600  	for {
  8601  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8602  			x := v_0
  8603  			if v_1.Op != OpMul32 {
  8604  				continue
  8605  			}
  8606  			_ = v_1.Args[1]
  8607  			v_1_0 := v_1.Args[0]
  8608  			v_1_1 := v_1.Args[1]
  8609  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8610  				if v_1_0.Op != OpConst32 {
  8611  					continue
  8612  				}
  8613  				c := auxIntToInt32(v_1_0.AuxInt)
  8614  				if v_1_1.Op != OpTrunc64to32 {
  8615  					continue
  8616  				}
  8617  				v_1_1_0 := v_1_1.Args[0]
  8618  				if v_1_1_0.Op != OpRsh64Ux64 {
  8619  					continue
  8620  				}
  8621  				_ = v_1_1_0.Args[1]
  8622  				v_1_1_0_0 := v_1_1_0.Args[0]
  8623  				if v_1_1_0_0.Op != OpAvg64u {
  8624  					continue
  8625  				}
  8626  				_ = v_1_1_0_0.Args[1]
  8627  				v_1_1_0_0_0 := v_1_1_0_0.Args[0]
  8628  				if v_1_1_0_0_0.Op != OpLsh64x64 {
  8629  					continue
  8630  				}
  8631  				_ = v_1_1_0_0_0.Args[1]
  8632  				v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
  8633  				if v_1_1_0_0_0_0.Op != OpZeroExt32to64 || x != v_1_1_0_0_0_0.Args[0] {
  8634  					continue
  8635  				}
  8636  				v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
  8637  				if v_1_1_0_0_0_1.Op != OpConst64 || auxIntToInt64(v_1_1_0_0_0_1.AuxInt) != 32 {
  8638  					continue
  8639  				}
  8640  				mul := v_1_1_0_0.Args[1]
  8641  				if mul.Op != OpMul64 {
  8642  					continue
  8643  				}
  8644  				_ = mul.Args[1]
  8645  				mul_0 := mul.Args[0]
  8646  				mul_1 := mul.Args[1]
  8647  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8648  					if mul_0.Op != OpConst64 {
  8649  						continue
  8650  					}
  8651  					m := auxIntToInt64(mul_0.AuxInt)
  8652  					if mul_1.Op != OpZeroExt32to64 || x != mul_1.Args[0] {
  8653  						continue
  8654  					}
  8655  					v_1_1_0_1 := v_1_1_0.Args[1]
  8656  					if v_1_1_0_1.Op != OpConst64 {
  8657  						continue
  8658  					}
  8659  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  8660  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic32(c).m) && s == 32+umagic32(c).s-1 && x.Op != OpConst32 && udivisibleOK32(c)) {
  8661  						continue
  8662  					}
  8663  					v.reset(OpLeq32U)
  8664  					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8665  					v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8666  					v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8667  					v2.AuxInt = int32ToAuxInt(int32(udivisible32(c).m))
  8668  					v1.AddArg2(v2, x)
  8669  					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8670  					v3.AuxInt = int32ToAuxInt(int32(32 - udivisible32(c).k))
  8671  					v0.AddArg2(v1, v3)
  8672  					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8673  					v4.AuxInt = int32ToAuxInt(int32(udivisible32(c).max))
  8674  					v.AddArg2(v0, v4)
  8675  					return true
  8676  				}
  8677  			}
  8678  		}
  8679  		break
  8680  	}
  8681  	// match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh64x64 mul:(Mul64 (Const64 [m]) (SignExt32to64 x)) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63]))) ) )
  8682  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic32(c).m) && s == 32+smagic32(c).s && x.Op != OpConst32 && sdivisibleOK32(c)
  8683  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(sdivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(sdivisible32(c).a)]) ) (Const32 <typ.UInt32> [int32(32-sdivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(sdivisible32(c).max)]) )
  8684  	for {
  8685  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8686  			x := v_0
  8687  			if v_1.Op != OpMul32 {
  8688  				continue
  8689  			}
  8690  			_ = v_1.Args[1]
  8691  			v_1_0 := v_1.Args[0]
  8692  			v_1_1 := v_1.Args[1]
  8693  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8694  				if v_1_0.Op != OpConst32 {
  8695  					continue
  8696  				}
  8697  				c := auxIntToInt32(v_1_0.AuxInt)
  8698  				if v_1_1.Op != OpSub32 {
  8699  					continue
  8700  				}
  8701  				_ = v_1_1.Args[1]
  8702  				v_1_1_0 := v_1_1.Args[0]
  8703  				if v_1_1_0.Op != OpRsh64x64 {
  8704  					continue
  8705  				}
  8706  				_ = v_1_1_0.Args[1]
  8707  				mul := v_1_1_0.Args[0]
  8708  				if mul.Op != OpMul64 {
  8709  					continue
  8710  				}
  8711  				_ = mul.Args[1]
  8712  				mul_0 := mul.Args[0]
  8713  				mul_1 := mul.Args[1]
  8714  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8715  					if mul_0.Op != OpConst64 {
  8716  						continue
  8717  					}
  8718  					m := auxIntToInt64(mul_0.AuxInt)
  8719  					if mul_1.Op != OpSignExt32to64 || x != mul_1.Args[0] {
  8720  						continue
  8721  					}
  8722  					v_1_1_0_1 := v_1_1_0.Args[1]
  8723  					if v_1_1_0_1.Op != OpConst64 {
  8724  						continue
  8725  					}
  8726  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  8727  					v_1_1_1 := v_1_1.Args[1]
  8728  					if v_1_1_1.Op != OpRsh64x64 {
  8729  						continue
  8730  					}
  8731  					_ = v_1_1_1.Args[1]
  8732  					v_1_1_1_0 := v_1_1_1.Args[0]
  8733  					if v_1_1_1_0.Op != OpSignExt32to64 || x != v_1_1_1_0.Args[0] {
  8734  						continue
  8735  					}
  8736  					v_1_1_1_1 := v_1_1_1.Args[1]
  8737  					if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 63 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic32(c).m) && s == 32+smagic32(c).s && x.Op != OpConst32 && sdivisibleOK32(c)) {
  8738  						continue
  8739  					}
  8740  					v.reset(OpLeq32U)
  8741  					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8742  					v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
  8743  					v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8744  					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8745  					v3.AuxInt = int32ToAuxInt(int32(sdivisible32(c).m))
  8746  					v2.AddArg2(v3, x)
  8747  					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8748  					v4.AuxInt = int32ToAuxInt(int32(sdivisible32(c).a))
  8749  					v1.AddArg2(v2, v4)
  8750  					v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8751  					v5.AuxInt = int32ToAuxInt(int32(32 - sdivisible32(c).k))
  8752  					v0.AddArg2(v1, v5)
  8753  					v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8754  					v6.AuxInt = int32ToAuxInt(int32(sdivisible32(c).max))
  8755  					v.AddArg2(v0, v6)
  8756  					return true
  8757  				}
  8758  			}
  8759  		}
  8760  		break
  8761  	}
  8762  	// match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 mul:(Hmul32 (Const32 [m]) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) ) )
  8763  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic32(c).m/2) && s == smagic32(c).s-1 && x.Op != OpConst32 && sdivisibleOK32(c)
  8764  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(sdivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(sdivisible32(c).a)]) ) (Const32 <typ.UInt32> [int32(32-sdivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(sdivisible32(c).max)]) )
  8765  	for {
  8766  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8767  			x := v_0
  8768  			if v_1.Op != OpMul32 {
  8769  				continue
  8770  			}
  8771  			_ = v_1.Args[1]
  8772  			v_1_0 := v_1.Args[0]
  8773  			v_1_1 := v_1.Args[1]
  8774  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8775  				if v_1_0.Op != OpConst32 {
  8776  					continue
  8777  				}
  8778  				c := auxIntToInt32(v_1_0.AuxInt)
  8779  				if v_1_1.Op != OpSub32 {
  8780  					continue
  8781  				}
  8782  				_ = v_1_1.Args[1]
  8783  				v_1_1_0 := v_1_1.Args[0]
  8784  				if v_1_1_0.Op != OpRsh32x64 {
  8785  					continue
  8786  				}
  8787  				_ = v_1_1_0.Args[1]
  8788  				mul := v_1_1_0.Args[0]
  8789  				if mul.Op != OpHmul32 {
  8790  					continue
  8791  				}
  8792  				_ = mul.Args[1]
  8793  				mul_0 := mul.Args[0]
  8794  				mul_1 := mul.Args[1]
  8795  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  8796  					if mul_0.Op != OpConst32 {
  8797  						continue
  8798  					}
  8799  					m := auxIntToInt32(mul_0.AuxInt)
  8800  					if x != mul_1 {
  8801  						continue
  8802  					}
  8803  					v_1_1_0_1 := v_1_1_0.Args[1]
  8804  					if v_1_1_0_1.Op != OpConst64 {
  8805  						continue
  8806  					}
  8807  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  8808  					v_1_1_1 := v_1_1.Args[1]
  8809  					if v_1_1_1.Op != OpRsh32x64 {
  8810  						continue
  8811  					}
  8812  					_ = v_1_1_1.Args[1]
  8813  					if x != v_1_1_1.Args[0] {
  8814  						continue
  8815  					}
  8816  					v_1_1_1_1 := v_1_1_1.Args[1]
  8817  					if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 31 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic32(c).m/2) && s == smagic32(c).s-1 && x.Op != OpConst32 && sdivisibleOK32(c)) {
  8818  						continue
  8819  					}
  8820  					v.reset(OpLeq32U)
  8821  					v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8822  					v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
  8823  					v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8824  					v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8825  					v3.AuxInt = int32ToAuxInt(int32(sdivisible32(c).m))
  8826  					v2.AddArg2(v3, x)
  8827  					v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8828  					v4.AuxInt = int32ToAuxInt(int32(sdivisible32(c).a))
  8829  					v1.AddArg2(v2, v4)
  8830  					v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8831  					v5.AuxInt = int32ToAuxInt(int32(32 - sdivisible32(c).k))
  8832  					v0.AddArg2(v1, v5)
  8833  					v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8834  					v6.AuxInt = int32ToAuxInt(int32(sdivisible32(c).max))
  8835  					v.AddArg2(v0, v6)
  8836  					return true
  8837  				}
  8838  			}
  8839  		}
  8840  		break
  8841  	}
  8842  	// match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 (Const32 [m]) x) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) ) )
  8843  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic32(c).m) && s == smagic32(c).s && x.Op != OpConst32 && sdivisibleOK32(c)
  8844  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(sdivisible32(c).m)]) x) (Const32 <typ.UInt32> [int32(sdivisible32(c).a)]) ) (Const32 <typ.UInt32> [int32(32-sdivisible32(c).k)]) ) (Const32 <typ.UInt32> [int32(sdivisible32(c).max)]) )
  8845  	for {
  8846  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8847  			x := v_0
  8848  			if v_1.Op != OpMul32 {
  8849  				continue
  8850  			}
  8851  			_ = v_1.Args[1]
  8852  			v_1_0 := v_1.Args[0]
  8853  			v_1_1 := v_1.Args[1]
  8854  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  8855  				if v_1_0.Op != OpConst32 {
  8856  					continue
  8857  				}
  8858  				c := auxIntToInt32(v_1_0.AuxInt)
  8859  				if v_1_1.Op != OpSub32 {
  8860  					continue
  8861  				}
  8862  				_ = v_1_1.Args[1]
  8863  				v_1_1_0 := v_1_1.Args[0]
  8864  				if v_1_1_0.Op != OpRsh32x64 {
  8865  					continue
  8866  				}
  8867  				_ = v_1_1_0.Args[1]
  8868  				v_1_1_0_0 := v_1_1_0.Args[0]
  8869  				if v_1_1_0_0.Op != OpAdd32 {
  8870  					continue
  8871  				}
  8872  				_ = v_1_1_0_0.Args[1]
  8873  				v_1_1_0_0_0 := v_1_1_0_0.Args[0]
  8874  				v_1_1_0_0_1 := v_1_1_0_0.Args[1]
  8875  				for _i2 := 0; _i2 <= 1; _i2, v_1_1_0_0_0, v_1_1_0_0_1 = _i2+1, v_1_1_0_0_1, v_1_1_0_0_0 {
  8876  					mul := v_1_1_0_0_0
  8877  					if mul.Op != OpHmul32 {
  8878  						continue
  8879  					}
  8880  					_ = mul.Args[1]
  8881  					mul_0 := mul.Args[0]
  8882  					mul_1 := mul.Args[1]
  8883  					for _i3 := 0; _i3 <= 1; _i3, mul_0, mul_1 = _i3+1, mul_1, mul_0 {
  8884  						if mul_0.Op != OpConst32 {
  8885  							continue
  8886  						}
  8887  						m := auxIntToInt32(mul_0.AuxInt)
  8888  						if x != mul_1 || x != v_1_1_0_0_1 {
  8889  							continue
  8890  						}
  8891  						v_1_1_0_1 := v_1_1_0.Args[1]
  8892  						if v_1_1_0_1.Op != OpConst64 {
  8893  							continue
  8894  						}
  8895  						s := auxIntToInt64(v_1_1_0_1.AuxInt)
  8896  						v_1_1_1 := v_1_1.Args[1]
  8897  						if v_1_1_1.Op != OpRsh32x64 {
  8898  							continue
  8899  						}
  8900  						_ = v_1_1_1.Args[1]
  8901  						if x != v_1_1_1.Args[0] {
  8902  							continue
  8903  						}
  8904  						v_1_1_1_1 := v_1_1_1.Args[1]
  8905  						if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 31 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic32(c).m) && s == smagic32(c).s && x.Op != OpConst32 && sdivisibleOK32(c)) {
  8906  							continue
  8907  						}
  8908  						v.reset(OpLeq32U)
  8909  						v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
  8910  						v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
  8911  						v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8912  						v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8913  						v3.AuxInt = int32ToAuxInt(int32(sdivisible32(c).m))
  8914  						v2.AddArg2(v3, x)
  8915  						v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8916  						v4.AuxInt = int32ToAuxInt(int32(sdivisible32(c).a))
  8917  						v1.AddArg2(v2, v4)
  8918  						v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8919  						v5.AuxInt = int32ToAuxInt(int32(32 - sdivisible32(c).k))
  8920  						v0.AddArg2(v1, v5)
  8921  						v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8922  						v6.AuxInt = int32ToAuxInt(int32(sdivisible32(c).max))
  8923  						v.AddArg2(v0, v6)
  8924  						return true
  8925  					}
  8926  				}
  8927  			}
  8928  		}
  8929  		break
  8930  	}
  8931  	// match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
  8932  	// cond: k > 0 && k < 31 && kbar == 32 - k
  8933  	// result: (Eq32 (And32 <t> n (Const32 <t> [1<<uint(k)-1])) (Const32 <t> [0]))
  8934  	for {
  8935  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8936  			n := v_0
  8937  			if v_1.Op != OpLsh32x64 {
  8938  				continue
  8939  			}
  8940  			_ = v_1.Args[1]
  8941  			v_1_0 := v_1.Args[0]
  8942  			if v_1_0.Op != OpRsh32x64 {
  8943  				continue
  8944  			}
  8945  			_ = v_1_0.Args[1]
  8946  			v_1_0_0 := v_1_0.Args[0]
  8947  			if v_1_0_0.Op != OpAdd32 {
  8948  				continue
  8949  			}
  8950  			t := v_1_0_0.Type
  8951  			_ = v_1_0_0.Args[1]
  8952  			v_1_0_0_0 := v_1_0_0.Args[0]
  8953  			v_1_0_0_1 := v_1_0_0.Args[1]
  8954  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
  8955  				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh32Ux64 || v_1_0_0_1.Type != t {
  8956  					continue
  8957  				}
  8958  				_ = v_1_0_0_1.Args[1]
  8959  				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
  8960  				if v_1_0_0_1_0.Op != OpRsh32x64 || v_1_0_0_1_0.Type != t {
  8961  					continue
  8962  				}
  8963  				_ = v_1_0_0_1_0.Args[1]
  8964  				if n != v_1_0_0_1_0.Args[0] {
  8965  					continue
  8966  				}
  8967  				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
  8968  				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 31 {
  8969  					continue
  8970  				}
  8971  				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
  8972  				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
  8973  					continue
  8974  				}
  8975  				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
  8976  				v_1_0_1 := v_1_0.Args[1]
  8977  				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
  8978  					continue
  8979  				}
  8980  				k := auxIntToInt64(v_1_0_1.AuxInt)
  8981  				v_1_1 := v_1.Args[1]
  8982  				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 31 && kbar == 32-k) {
  8983  					continue
  8984  				}
  8985  				v.reset(OpEq32)
  8986  				v0 := b.NewValue0(v.Pos, OpAnd32, t)
  8987  				v1 := b.NewValue0(v.Pos, OpConst32, t)
  8988  				v1.AuxInt = int32ToAuxInt(1<<uint(k) - 1)
  8989  				v0.AddArg2(n, v1)
  8990  				v2 := b.NewValue0(v.Pos, OpConst32, t)
  8991  				v2.AuxInt = int32ToAuxInt(0)
  8992  				v.AddArg2(v0, v2)
  8993  				return true
  8994  			}
  8995  		}
  8996  		break
  8997  	}
  8998  	// match: (Eq32 s:(Sub32 x y) (Const32 [0]))
  8999  	// cond: s.Uses == 1
  9000  	// result: (Eq32 x y)
  9001  	for {
  9002  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9003  			s := v_0
  9004  			if s.Op != OpSub32 {
  9005  				continue
  9006  			}
  9007  			y := s.Args[1]
  9008  			x := s.Args[0]
  9009  			if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != 0 || !(s.Uses == 1) {
  9010  				continue
  9011  			}
  9012  			v.reset(OpEq32)
  9013  			v.AddArg2(x, y)
  9014  			return true
  9015  		}
  9016  		break
  9017  	}
  9018  	// match: (Eq32 (And32 <t> x (Const32 <t> [y])) (Const32 <t> [y]))
  9019  	// cond: oneBit32(y)
  9020  	// result: (Neq32 (And32 <t> x (Const32 <t> [y])) (Const32 <t> [0]))
  9021  	for {
  9022  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9023  			if v_0.Op != OpAnd32 {
  9024  				continue
  9025  			}
  9026  			t := v_0.Type
  9027  			_ = v_0.Args[1]
  9028  			v_0_0 := v_0.Args[0]
  9029  			v_0_1 := v_0.Args[1]
  9030  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  9031  				x := v_0_0
  9032  				if v_0_1.Op != OpConst32 || v_0_1.Type != t {
  9033  					continue
  9034  				}
  9035  				y := auxIntToInt32(v_0_1.AuxInt)
  9036  				if v_1.Op != OpConst32 || v_1.Type != t || auxIntToInt32(v_1.AuxInt) != y || !(oneBit32(y)) {
  9037  					continue
  9038  				}
  9039  				v.reset(OpNeq32)
  9040  				v0 := b.NewValue0(v.Pos, OpAnd32, t)
  9041  				v1 := b.NewValue0(v.Pos, OpConst32, t)
  9042  				v1.AuxInt = int32ToAuxInt(y)
  9043  				v0.AddArg2(x, v1)
  9044  				v2 := b.NewValue0(v.Pos, OpConst32, t)
  9045  				v2.AuxInt = int32ToAuxInt(0)
  9046  				v.AddArg2(v0, v2)
  9047  				return true
  9048  			}
  9049  		}
  9050  		break
  9051  	}
  9052  	return false
  9053  }
  9054  func rewriteValuegeneric_OpEq32F(v *Value) bool {
  9055  	v_1 := v.Args[1]
  9056  	v_0 := v.Args[0]
  9057  	// match: (Eq32F (Const32F [c]) (Const32F [d]))
  9058  	// result: (ConstBool [c == d])
  9059  	for {
  9060  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9061  			if v_0.Op != OpConst32F {
  9062  				continue
  9063  			}
  9064  			c := auxIntToFloat32(v_0.AuxInt)
  9065  			if v_1.Op != OpConst32F {
  9066  				continue
  9067  			}
  9068  			d := auxIntToFloat32(v_1.AuxInt)
  9069  			v.reset(OpConstBool)
  9070  			v.AuxInt = boolToAuxInt(c == d)
  9071  			return true
  9072  		}
  9073  		break
  9074  	}
  9075  	return false
  9076  }
  9077  func rewriteValuegeneric_OpEq64(v *Value) bool {
  9078  	v_1 := v.Args[1]
  9079  	v_0 := v.Args[0]
  9080  	b := v.Block
  9081  	typ := &b.Func.Config.Types
  9082  	// match: (Eq64 x x)
  9083  	// result: (ConstBool [true])
  9084  	for {
  9085  		x := v_0
  9086  		if x != v_1 {
  9087  			break
  9088  		}
  9089  		v.reset(OpConstBool)
  9090  		v.AuxInt = boolToAuxInt(true)
  9091  		return true
  9092  	}
  9093  	// match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
  9094  	// result: (Eq64 (Const64 <t> [c-d]) x)
  9095  	for {
  9096  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9097  			if v_0.Op != OpConst64 {
  9098  				continue
  9099  			}
  9100  			t := v_0.Type
  9101  			c := auxIntToInt64(v_0.AuxInt)
  9102  			if v_1.Op != OpAdd64 {
  9103  				continue
  9104  			}
  9105  			_ = v_1.Args[1]
  9106  			v_1_0 := v_1.Args[0]
  9107  			v_1_1 := v_1.Args[1]
  9108  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  9109  				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
  9110  					continue
  9111  				}
  9112  				d := auxIntToInt64(v_1_0.AuxInt)
  9113  				x := v_1_1
  9114  				v.reset(OpEq64)
  9115  				v0 := b.NewValue0(v.Pos, OpConst64, t)
  9116  				v0.AuxInt = int64ToAuxInt(c - d)
  9117  				v.AddArg2(v0, x)
  9118  				return true
  9119  			}
  9120  		}
  9121  		break
  9122  	}
  9123  	// match: (Eq64 (Const64 [c]) (Const64 [d]))
  9124  	// result: (ConstBool [c == d])
  9125  	for {
  9126  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9127  			if v_0.Op != OpConst64 {
  9128  				continue
  9129  			}
  9130  			c := auxIntToInt64(v_0.AuxInt)
  9131  			if v_1.Op != OpConst64 {
  9132  				continue
  9133  			}
  9134  			d := auxIntToInt64(v_1.AuxInt)
  9135  			v.reset(OpConstBool)
  9136  			v.AuxInt = boolToAuxInt(c == d)
  9137  			return true
  9138  		}
  9139  		break
  9140  	}
  9141  	// match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s])) ) )
  9142  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic64(c).m/2) && s == umagic64(c).s-1 && x.Op != OpConst64 && udivisibleOK64(c)
  9143  	// result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible64(c).m)]) x) (Const64 <typ.UInt64> [64-udivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(udivisible64(c).max)]) )
  9144  	for {
  9145  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9146  			x := v_0
  9147  			if v_1.Op != OpMul64 {
  9148  				continue
  9149  			}
  9150  			_ = v_1.Args[1]
  9151  			v_1_0 := v_1.Args[0]
  9152  			v_1_1 := v_1.Args[1]
  9153  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  9154  				if v_1_0.Op != OpConst64 {
  9155  					continue
  9156  				}
  9157  				c := auxIntToInt64(v_1_0.AuxInt)
  9158  				if v_1_1.Op != OpRsh64Ux64 {
  9159  					continue
  9160  				}
  9161  				_ = v_1_1.Args[1]
  9162  				mul := v_1_1.Args[0]
  9163  				if mul.Op != OpHmul64u {
  9164  					continue
  9165  				}
  9166  				_ = mul.Args[1]
  9167  				mul_0 := mul.Args[0]
  9168  				mul_1 := mul.Args[1]
  9169  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  9170  					if mul_0.Op != OpConst64 {
  9171  						continue
  9172  					}
  9173  					m := auxIntToInt64(mul_0.AuxInt)
  9174  					if x != mul_1 {
  9175  						continue
  9176  					}
  9177  					v_1_1_1 := v_1_1.Args[1]
  9178  					if v_1_1_1.Op != OpConst64 {
  9179  						continue
  9180  					}
  9181  					s := auxIntToInt64(v_1_1_1.AuxInt)
  9182  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic64(c).m/2) && s == umagic64(c).s-1 && x.Op != OpConst64 && udivisibleOK64(c)) {
  9183  						continue
  9184  					}
  9185  					v.reset(OpLeq64U)
  9186  					v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
  9187  					v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  9188  					v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9189  					v2.AuxInt = int64ToAuxInt(int64(udivisible64(c).m))
  9190  					v1.AddArg2(v2, x)
  9191  					v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9192  					v3.AuxInt = int64ToAuxInt(64 - udivisible64(c).k)
  9193  					v0.AddArg2(v1, v3)
  9194  					v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9195  					v4.AuxInt = int64ToAuxInt(int64(udivisible64(c).max))
  9196  					v.AddArg2(v0, v4)
  9197  					return true
  9198  				}
  9199  			}
  9200  		}
  9201  		break
  9202  	}
  9203  	// match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s])) ) )
  9204  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic64(c).m+1)/2) && s == umagic64(c).s-2 && x.Op != OpConst64 && udivisibleOK64(c)
  9205  	// result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible64(c).m)]) x) (Const64 <typ.UInt64> [64-udivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(udivisible64(c).max)]) )
  9206  	for {
  9207  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9208  			x := v_0
  9209  			if v_1.Op != OpMul64 {
  9210  				continue
  9211  			}
  9212  			_ = v_1.Args[1]
  9213  			v_1_0 := v_1.Args[0]
  9214  			v_1_1 := v_1.Args[1]
  9215  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  9216  				if v_1_0.Op != OpConst64 {
  9217  					continue
  9218  				}
  9219  				c := auxIntToInt64(v_1_0.AuxInt)
  9220  				if v_1_1.Op != OpRsh64Ux64 {
  9221  					continue
  9222  				}
  9223  				_ = v_1_1.Args[1]
  9224  				mul := v_1_1.Args[0]
  9225  				if mul.Op != OpHmul64u {
  9226  					continue
  9227  				}
  9228  				_ = mul.Args[1]
  9229  				mul_0 := mul.Args[0]
  9230  				mul_1 := mul.Args[1]
  9231  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  9232  					if mul_0.Op != OpConst64 {
  9233  						continue
  9234  					}
  9235  					m := auxIntToInt64(mul_0.AuxInt)
  9236  					if mul_1.Op != OpRsh64Ux64 {
  9237  						continue
  9238  					}
  9239  					_ = mul_1.Args[1]
  9240  					if x != mul_1.Args[0] {
  9241  						continue
  9242  					}
  9243  					mul_1_1 := mul_1.Args[1]
  9244  					if mul_1_1.Op != OpConst64 || auxIntToInt64(mul_1_1.AuxInt) != 1 {
  9245  						continue
  9246  					}
  9247  					v_1_1_1 := v_1_1.Args[1]
  9248  					if v_1_1_1.Op != OpConst64 {
  9249  						continue
  9250  					}
  9251  					s := auxIntToInt64(v_1_1_1.AuxInt)
  9252  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic64(c).m+1)/2) && s == umagic64(c).s-2 && x.Op != OpConst64 && udivisibleOK64(c)) {
  9253  						continue
  9254  					}
  9255  					v.reset(OpLeq64U)
  9256  					v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
  9257  					v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  9258  					v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9259  					v2.AuxInt = int64ToAuxInt(int64(udivisible64(c).m))
  9260  					v1.AddArg2(v2, x)
  9261  					v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9262  					v3.AuxInt = int64ToAuxInt(64 - udivisible64(c).k)
  9263  					v0.AddArg2(v1, v3)
  9264  					v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9265  					v4.AuxInt = int64ToAuxInt(int64(udivisible64(c).max))
  9266  					v.AddArg2(v0, v4)
  9267  					return true
  9268  				}
  9269  			}
  9270  		}
  9271  		break
  9272  	}
  9273  	// match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s])) ) )
  9274  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic64(c).m) && s == umagic64(c).s-1 && x.Op != OpConst64 && udivisibleOK64(c)
  9275  	// result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible64(c).m)]) x) (Const64 <typ.UInt64> [64-udivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(udivisible64(c).max)]) )
  9276  	for {
  9277  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9278  			x := v_0
  9279  			if v_1.Op != OpMul64 {
  9280  				continue
  9281  			}
  9282  			_ = v_1.Args[1]
  9283  			v_1_0 := v_1.Args[0]
  9284  			v_1_1 := v_1.Args[1]
  9285  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  9286  				if v_1_0.Op != OpConst64 {
  9287  					continue
  9288  				}
  9289  				c := auxIntToInt64(v_1_0.AuxInt)
  9290  				if v_1_1.Op != OpRsh64Ux64 {
  9291  					continue
  9292  				}
  9293  				_ = v_1_1.Args[1]
  9294  				v_1_1_0 := v_1_1.Args[0]
  9295  				if v_1_1_0.Op != OpAvg64u {
  9296  					continue
  9297  				}
  9298  				_ = v_1_1_0.Args[1]
  9299  				if x != v_1_1_0.Args[0] {
  9300  					continue
  9301  				}
  9302  				mul := v_1_1_0.Args[1]
  9303  				if mul.Op != OpHmul64u {
  9304  					continue
  9305  				}
  9306  				_ = mul.Args[1]
  9307  				mul_0 := mul.Args[0]
  9308  				mul_1 := mul.Args[1]
  9309  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  9310  					if mul_0.Op != OpConst64 {
  9311  						continue
  9312  					}
  9313  					m := auxIntToInt64(mul_0.AuxInt)
  9314  					if x != mul_1 {
  9315  						continue
  9316  					}
  9317  					v_1_1_1 := v_1_1.Args[1]
  9318  					if v_1_1_1.Op != OpConst64 {
  9319  						continue
  9320  					}
  9321  					s := auxIntToInt64(v_1_1_1.AuxInt)
  9322  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic64(c).m) && s == umagic64(c).s-1 && x.Op != OpConst64 && udivisibleOK64(c)) {
  9323  						continue
  9324  					}
  9325  					v.reset(OpLeq64U)
  9326  					v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
  9327  					v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  9328  					v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9329  					v2.AuxInt = int64ToAuxInt(int64(udivisible64(c).m))
  9330  					v1.AddArg2(v2, x)
  9331  					v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9332  					v3.AuxInt = int64ToAuxInt(64 - udivisible64(c).k)
  9333  					v0.AddArg2(v1, v3)
  9334  					v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9335  					v4.AuxInt = int64ToAuxInt(int64(udivisible64(c).max))
  9336  					v.AddArg2(v0, v4)
  9337  					return true
  9338  				}
  9339  			}
  9340  		}
  9341  		break
  9342  	}
  9343  	// match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 mul:(Hmul64 (Const64 [m]) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) ) )
  9344  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic64(c).m/2) && s == smagic64(c).s-1 && x.Op != OpConst64 && sdivisibleOK64(c)
  9345  	// result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible64(c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible64(c).a)]) ) (Const64 <typ.UInt64> [64-sdivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(sdivisible64(c).max)]) )
  9346  	for {
  9347  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9348  			x := v_0
  9349  			if v_1.Op != OpMul64 {
  9350  				continue
  9351  			}
  9352  			_ = v_1.Args[1]
  9353  			v_1_0 := v_1.Args[0]
  9354  			v_1_1 := v_1.Args[1]
  9355  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  9356  				if v_1_0.Op != OpConst64 {
  9357  					continue
  9358  				}
  9359  				c := auxIntToInt64(v_1_0.AuxInt)
  9360  				if v_1_1.Op != OpSub64 {
  9361  					continue
  9362  				}
  9363  				_ = v_1_1.Args[1]
  9364  				v_1_1_0 := v_1_1.Args[0]
  9365  				if v_1_1_0.Op != OpRsh64x64 {
  9366  					continue
  9367  				}
  9368  				_ = v_1_1_0.Args[1]
  9369  				mul := v_1_1_0.Args[0]
  9370  				if mul.Op != OpHmul64 {
  9371  					continue
  9372  				}
  9373  				_ = mul.Args[1]
  9374  				mul_0 := mul.Args[0]
  9375  				mul_1 := mul.Args[1]
  9376  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  9377  					if mul_0.Op != OpConst64 {
  9378  						continue
  9379  					}
  9380  					m := auxIntToInt64(mul_0.AuxInt)
  9381  					if x != mul_1 {
  9382  						continue
  9383  					}
  9384  					v_1_1_0_1 := v_1_1_0.Args[1]
  9385  					if v_1_1_0_1.Op != OpConst64 {
  9386  						continue
  9387  					}
  9388  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  9389  					v_1_1_1 := v_1_1.Args[1]
  9390  					if v_1_1_1.Op != OpRsh64x64 {
  9391  						continue
  9392  					}
  9393  					_ = v_1_1_1.Args[1]
  9394  					if x != v_1_1_1.Args[0] {
  9395  						continue
  9396  					}
  9397  					v_1_1_1_1 := v_1_1_1.Args[1]
  9398  					if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 63 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic64(c).m/2) && s == smagic64(c).s-1 && x.Op != OpConst64 && sdivisibleOK64(c)) {
  9399  						continue
  9400  					}
  9401  					v.reset(OpLeq64U)
  9402  					v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
  9403  					v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
  9404  					v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  9405  					v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9406  					v3.AuxInt = int64ToAuxInt(int64(sdivisible64(c).m))
  9407  					v2.AddArg2(v3, x)
  9408  					v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9409  					v4.AuxInt = int64ToAuxInt(int64(sdivisible64(c).a))
  9410  					v1.AddArg2(v2, v4)
  9411  					v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9412  					v5.AuxInt = int64ToAuxInt(64 - sdivisible64(c).k)
  9413  					v0.AddArg2(v1, v5)
  9414  					v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9415  					v6.AuxInt = int64ToAuxInt(int64(sdivisible64(c).max))
  9416  					v.AddArg2(v0, v6)
  9417  					return true
  9418  				}
  9419  			}
  9420  		}
  9421  		break
  9422  	}
  9423  	// match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 (Const64 [m]) x) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) ) )
  9424  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic64(c).m) && s == smagic64(c).s && x.Op != OpConst64 && sdivisibleOK64(c)
  9425  	// result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible64(c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible64(c).a)]) ) (Const64 <typ.UInt64> [64-sdivisible64(c).k]) ) (Const64 <typ.UInt64> [int64(sdivisible64(c).max)]) )
  9426  	for {
  9427  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9428  			x := v_0
  9429  			if v_1.Op != OpMul64 {
  9430  				continue
  9431  			}
  9432  			_ = v_1.Args[1]
  9433  			v_1_0 := v_1.Args[0]
  9434  			v_1_1 := v_1.Args[1]
  9435  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  9436  				if v_1_0.Op != OpConst64 {
  9437  					continue
  9438  				}
  9439  				c := auxIntToInt64(v_1_0.AuxInt)
  9440  				if v_1_1.Op != OpSub64 {
  9441  					continue
  9442  				}
  9443  				_ = v_1_1.Args[1]
  9444  				v_1_1_0 := v_1_1.Args[0]
  9445  				if v_1_1_0.Op != OpRsh64x64 {
  9446  					continue
  9447  				}
  9448  				_ = v_1_1_0.Args[1]
  9449  				v_1_1_0_0 := v_1_1_0.Args[0]
  9450  				if v_1_1_0_0.Op != OpAdd64 {
  9451  					continue
  9452  				}
  9453  				_ = v_1_1_0_0.Args[1]
  9454  				v_1_1_0_0_0 := v_1_1_0_0.Args[0]
  9455  				v_1_1_0_0_1 := v_1_1_0_0.Args[1]
  9456  				for _i2 := 0; _i2 <= 1; _i2, v_1_1_0_0_0, v_1_1_0_0_1 = _i2+1, v_1_1_0_0_1, v_1_1_0_0_0 {
  9457  					mul := v_1_1_0_0_0
  9458  					if mul.Op != OpHmul64 {
  9459  						continue
  9460  					}
  9461  					_ = mul.Args[1]
  9462  					mul_0 := mul.Args[0]
  9463  					mul_1 := mul.Args[1]
  9464  					for _i3 := 0; _i3 <= 1; _i3, mul_0, mul_1 = _i3+1, mul_1, mul_0 {
  9465  						if mul_0.Op != OpConst64 {
  9466  							continue
  9467  						}
  9468  						m := auxIntToInt64(mul_0.AuxInt)
  9469  						if x != mul_1 || x != v_1_1_0_0_1 {
  9470  							continue
  9471  						}
  9472  						v_1_1_0_1 := v_1_1_0.Args[1]
  9473  						if v_1_1_0_1.Op != OpConst64 {
  9474  							continue
  9475  						}
  9476  						s := auxIntToInt64(v_1_1_0_1.AuxInt)
  9477  						v_1_1_1 := v_1_1.Args[1]
  9478  						if v_1_1_1.Op != OpRsh64x64 {
  9479  							continue
  9480  						}
  9481  						_ = v_1_1_1.Args[1]
  9482  						if x != v_1_1_1.Args[0] {
  9483  							continue
  9484  						}
  9485  						v_1_1_1_1 := v_1_1_1.Args[1]
  9486  						if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 63 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic64(c).m) && s == smagic64(c).s && x.Op != OpConst64 && sdivisibleOK64(c)) {
  9487  							continue
  9488  						}
  9489  						v.reset(OpLeq64U)
  9490  						v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
  9491  						v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
  9492  						v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  9493  						v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9494  						v3.AuxInt = int64ToAuxInt(int64(sdivisible64(c).m))
  9495  						v2.AddArg2(v3, x)
  9496  						v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9497  						v4.AuxInt = int64ToAuxInt(int64(sdivisible64(c).a))
  9498  						v1.AddArg2(v2, v4)
  9499  						v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9500  						v5.AuxInt = int64ToAuxInt(64 - sdivisible64(c).k)
  9501  						v0.AddArg2(v1, v5)
  9502  						v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  9503  						v6.AuxInt = int64ToAuxInt(int64(sdivisible64(c).max))
  9504  						v.AddArg2(v0, v6)
  9505  						return true
  9506  					}
  9507  				}
  9508  			}
  9509  		}
  9510  		break
  9511  	}
  9512  	// match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
  9513  	// cond: k > 0 && k < 63 && kbar == 64 - k
  9514  	// result: (Eq64 (And64 <t> n (Const64 <t> [1<<uint(k)-1])) (Const64 <t> [0]))
  9515  	for {
  9516  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9517  			n := v_0
  9518  			if v_1.Op != OpLsh64x64 {
  9519  				continue
  9520  			}
  9521  			_ = v_1.Args[1]
  9522  			v_1_0 := v_1.Args[0]
  9523  			if v_1_0.Op != OpRsh64x64 {
  9524  				continue
  9525  			}
  9526  			_ = v_1_0.Args[1]
  9527  			v_1_0_0 := v_1_0.Args[0]
  9528  			if v_1_0_0.Op != OpAdd64 {
  9529  				continue
  9530  			}
  9531  			t := v_1_0_0.Type
  9532  			_ = v_1_0_0.Args[1]
  9533  			v_1_0_0_0 := v_1_0_0.Args[0]
  9534  			v_1_0_0_1 := v_1_0_0.Args[1]
  9535  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
  9536  				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh64Ux64 || v_1_0_0_1.Type != t {
  9537  					continue
  9538  				}
  9539  				_ = v_1_0_0_1.Args[1]
  9540  				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
  9541  				if v_1_0_0_1_0.Op != OpRsh64x64 || v_1_0_0_1_0.Type != t {
  9542  					continue
  9543  				}
  9544  				_ = v_1_0_0_1_0.Args[1]
  9545  				if n != v_1_0_0_1_0.Args[0] {
  9546  					continue
  9547  				}
  9548  				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
  9549  				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 63 {
  9550  					continue
  9551  				}
  9552  				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
  9553  				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
  9554  					continue
  9555  				}
  9556  				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
  9557  				v_1_0_1 := v_1_0.Args[1]
  9558  				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
  9559  					continue
  9560  				}
  9561  				k := auxIntToInt64(v_1_0_1.AuxInt)
  9562  				v_1_1 := v_1.Args[1]
  9563  				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 63 && kbar == 64-k) {
  9564  					continue
  9565  				}
  9566  				v.reset(OpEq64)
  9567  				v0 := b.NewValue0(v.Pos, OpAnd64, t)
  9568  				v1 := b.NewValue0(v.Pos, OpConst64, t)
  9569  				v1.AuxInt = int64ToAuxInt(1<<uint(k) - 1)
  9570  				v0.AddArg2(n, v1)
  9571  				v2 := b.NewValue0(v.Pos, OpConst64, t)
  9572  				v2.AuxInt = int64ToAuxInt(0)
  9573  				v.AddArg2(v0, v2)
  9574  				return true
  9575  			}
  9576  		}
  9577  		break
  9578  	}
  9579  	// match: (Eq64 s:(Sub64 x y) (Const64 [0]))
  9580  	// cond: s.Uses == 1
  9581  	// result: (Eq64 x y)
  9582  	for {
  9583  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9584  			s := v_0
  9585  			if s.Op != OpSub64 {
  9586  				continue
  9587  			}
  9588  			y := s.Args[1]
  9589  			x := s.Args[0]
  9590  			if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 || !(s.Uses == 1) {
  9591  				continue
  9592  			}
  9593  			v.reset(OpEq64)
  9594  			v.AddArg2(x, y)
  9595  			return true
  9596  		}
  9597  		break
  9598  	}
  9599  	// match: (Eq64 (And64 <t> x (Const64 <t> [y])) (Const64 <t> [y]))
  9600  	// cond: oneBit64(y)
  9601  	// result: (Neq64 (And64 <t> x (Const64 <t> [y])) (Const64 <t> [0]))
  9602  	for {
  9603  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9604  			if v_0.Op != OpAnd64 {
  9605  				continue
  9606  			}
  9607  			t := v_0.Type
  9608  			_ = v_0.Args[1]
  9609  			v_0_0 := v_0.Args[0]
  9610  			v_0_1 := v_0.Args[1]
  9611  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
  9612  				x := v_0_0
  9613  				if v_0_1.Op != OpConst64 || v_0_1.Type != t {
  9614  					continue
  9615  				}
  9616  				y := auxIntToInt64(v_0_1.AuxInt)
  9617  				if v_1.Op != OpConst64 || v_1.Type != t || auxIntToInt64(v_1.AuxInt) != y || !(oneBit64(y)) {
  9618  					continue
  9619  				}
  9620  				v.reset(OpNeq64)
  9621  				v0 := b.NewValue0(v.Pos, OpAnd64, t)
  9622  				v1 := b.NewValue0(v.Pos, OpConst64, t)
  9623  				v1.AuxInt = int64ToAuxInt(y)
  9624  				v0.AddArg2(x, v1)
  9625  				v2 := b.NewValue0(v.Pos, OpConst64, t)
  9626  				v2.AuxInt = int64ToAuxInt(0)
  9627  				v.AddArg2(v0, v2)
  9628  				return true
  9629  			}
  9630  		}
  9631  		break
  9632  	}
  9633  	return false
  9634  }
  9635  func rewriteValuegeneric_OpEq64F(v *Value) bool {
  9636  	v_1 := v.Args[1]
  9637  	v_0 := v.Args[0]
  9638  	// match: (Eq64F (Const64F [c]) (Const64F [d]))
  9639  	// result: (ConstBool [c == d])
  9640  	for {
  9641  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9642  			if v_0.Op != OpConst64F {
  9643  				continue
  9644  			}
  9645  			c := auxIntToFloat64(v_0.AuxInt)
  9646  			if v_1.Op != OpConst64F {
  9647  				continue
  9648  			}
  9649  			d := auxIntToFloat64(v_1.AuxInt)
  9650  			v.reset(OpConstBool)
  9651  			v.AuxInt = boolToAuxInt(c == d)
  9652  			return true
  9653  		}
  9654  		break
  9655  	}
  9656  	return false
  9657  }
  9658  func rewriteValuegeneric_OpEq8(v *Value) bool {
  9659  	v_1 := v.Args[1]
  9660  	v_0 := v.Args[0]
  9661  	b := v.Block
  9662  	config := b.Func.Config
  9663  	typ := &b.Func.Config.Types
  9664  	// match: (Eq8 x x)
  9665  	// result: (ConstBool [true])
  9666  	for {
  9667  		x := v_0
  9668  		if x != v_1 {
  9669  			break
  9670  		}
  9671  		v.reset(OpConstBool)
  9672  		v.AuxInt = boolToAuxInt(true)
  9673  		return true
  9674  	}
  9675  	// match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
  9676  	// result: (Eq8 (Const8 <t> [c-d]) x)
  9677  	for {
  9678  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9679  			if v_0.Op != OpConst8 {
  9680  				continue
  9681  			}
  9682  			t := v_0.Type
  9683  			c := auxIntToInt8(v_0.AuxInt)
  9684  			if v_1.Op != OpAdd8 {
  9685  				continue
  9686  			}
  9687  			_ = v_1.Args[1]
  9688  			v_1_0 := v_1.Args[0]
  9689  			v_1_1 := v_1.Args[1]
  9690  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  9691  				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
  9692  					continue
  9693  				}
  9694  				d := auxIntToInt8(v_1_0.AuxInt)
  9695  				x := v_1_1
  9696  				v.reset(OpEq8)
  9697  				v0 := b.NewValue0(v.Pos, OpConst8, t)
  9698  				v0.AuxInt = int8ToAuxInt(c - d)
  9699  				v.AddArg2(v0, x)
  9700  				return true
  9701  			}
  9702  		}
  9703  		break
  9704  	}
  9705  	// match: (Eq8 (Const8 [c]) (Const8 [d]))
  9706  	// result: (ConstBool [c == d])
  9707  	for {
  9708  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9709  			if v_0.Op != OpConst8 {
  9710  				continue
  9711  			}
  9712  			c := auxIntToInt8(v_0.AuxInt)
  9713  			if v_1.Op != OpConst8 {
  9714  				continue
  9715  			}
  9716  			d := auxIntToInt8(v_1.AuxInt)
  9717  			v.reset(OpConstBool)
  9718  			v.AuxInt = boolToAuxInt(c == d)
  9719  			return true
  9720  		}
  9721  		break
  9722  	}
  9723  	// match: (Eq8 (Mod8u x (Const8 [c])) (Const8 [0]))
  9724  	// cond: x.Op != OpConst8 && udivisibleOK8(c) && !hasSmallRotate(config)
  9725  	// result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt8to32 <typ.UInt32> x) (Const32 <typ.UInt32> [int32(uint8(c))])) (Const32 <typ.UInt32> [0]))
  9726  	for {
  9727  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9728  			if v_0.Op != OpMod8u {
  9729  				continue
  9730  			}
  9731  			_ = v_0.Args[1]
  9732  			x := v_0.Args[0]
  9733  			v_0_1 := v_0.Args[1]
  9734  			if v_0_1.Op != OpConst8 {
  9735  				continue
  9736  			}
  9737  			c := auxIntToInt8(v_0_1.AuxInt)
  9738  			if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 || !(x.Op != OpConst8 && udivisibleOK8(c) && !hasSmallRotate(config)) {
  9739  				continue
  9740  			}
  9741  			v.reset(OpEq32)
  9742  			v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
  9743  			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  9744  			v1.AddArg(x)
  9745  			v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  9746  			v2.AuxInt = int32ToAuxInt(int32(uint8(c)))
  9747  			v0.AddArg2(v1, v2)
  9748  			v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  9749  			v3.AuxInt = int32ToAuxInt(0)
  9750  			v.AddArg2(v0, v3)
  9751  			return true
  9752  		}
  9753  		break
  9754  	}
  9755  	// match: (Eq8 (Mod8 x (Const8 [c])) (Const8 [0]))
  9756  	// cond: x.Op != OpConst8 && sdivisibleOK8(c) && !hasSmallRotate(config)
  9757  	// result: (Eq32 (Mod32 <typ.Int32> (SignExt8to32 <typ.Int32> x) (Const32 <typ.Int32> [int32(c)])) (Const32 <typ.Int32> [0]))
  9758  	for {
  9759  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9760  			if v_0.Op != OpMod8 {
  9761  				continue
  9762  			}
  9763  			_ = v_0.Args[1]
  9764  			x := v_0.Args[0]
  9765  			v_0_1 := v_0.Args[1]
  9766  			if v_0_1.Op != OpConst8 {
  9767  				continue
  9768  			}
  9769  			c := auxIntToInt8(v_0_1.AuxInt)
  9770  			if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 || !(x.Op != OpConst8 && sdivisibleOK8(c) && !hasSmallRotate(config)) {
  9771  				continue
  9772  			}
  9773  			v.reset(OpEq32)
  9774  			v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
  9775  			v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  9776  			v1.AddArg(x)
  9777  			v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
  9778  			v2.AuxInt = int32ToAuxInt(int32(c))
  9779  			v0.AddArg2(v1, v2)
  9780  			v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
  9781  			v3.AuxInt = int32ToAuxInt(0)
  9782  			v.AddArg2(v0, v3)
  9783  			return true
  9784  		}
  9785  		break
  9786  	}
  9787  	// match: (Eq8 x (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s]))) ) )
  9788  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<8+umagic8(c).m) && s == 8+umagic8(c).s && x.Op != OpConst8 && udivisibleOK8(c)
  9789  	// result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int8(udivisible8(c).m)]) x) (Const8 <typ.UInt8> [int8(8-udivisible8(c).k)]) ) (Const8 <typ.UInt8> [int8(udivisible8(c).max)]) )
  9790  	for {
  9791  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9792  			x := v_0
  9793  			if v_1.Op != OpMul8 {
  9794  				continue
  9795  			}
  9796  			_ = v_1.Args[1]
  9797  			v_1_0 := v_1.Args[0]
  9798  			v_1_1 := v_1.Args[1]
  9799  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  9800  				if v_1_0.Op != OpConst8 {
  9801  					continue
  9802  				}
  9803  				c := auxIntToInt8(v_1_0.AuxInt)
  9804  				if v_1_1.Op != OpTrunc32to8 {
  9805  					continue
  9806  				}
  9807  				v_1_1_0 := v_1_1.Args[0]
  9808  				if v_1_1_0.Op != OpRsh32Ux64 {
  9809  					continue
  9810  				}
  9811  				_ = v_1_1_0.Args[1]
  9812  				mul := v_1_1_0.Args[0]
  9813  				if mul.Op != OpMul32 {
  9814  					continue
  9815  				}
  9816  				_ = mul.Args[1]
  9817  				mul_0 := mul.Args[0]
  9818  				mul_1 := mul.Args[1]
  9819  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  9820  					if mul_0.Op != OpConst32 {
  9821  						continue
  9822  					}
  9823  					m := auxIntToInt32(mul_0.AuxInt)
  9824  					if mul_1.Op != OpZeroExt8to32 || x != mul_1.Args[0] {
  9825  						continue
  9826  					}
  9827  					v_1_1_0_1 := v_1_1_0.Args[1]
  9828  					if v_1_1_0_1.Op != OpConst64 {
  9829  						continue
  9830  					}
  9831  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  9832  					if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(1<<8+umagic8(c).m) && s == 8+umagic8(c).s && x.Op != OpConst8 && udivisibleOK8(c)) {
  9833  						continue
  9834  					}
  9835  					v.reset(OpLeq8U)
  9836  					v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
  9837  					v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
  9838  					v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
  9839  					v2.AuxInt = int8ToAuxInt(int8(udivisible8(c).m))
  9840  					v1.AddArg2(v2, x)
  9841  					v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
  9842  					v3.AuxInt = int8ToAuxInt(int8(8 - udivisible8(c).k))
  9843  					v0.AddArg2(v1, v3)
  9844  					v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
  9845  					v4.AuxInt = int8ToAuxInt(int8(udivisible8(c).max))
  9846  					v.AddArg2(v0, v4)
  9847  					return true
  9848  				}
  9849  			}
  9850  		}
  9851  		break
  9852  	}
  9853  	// match: (Eq8 x (Mul8 (Const8 [c]) (Sub8 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt8to32 x)) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31]))) ) )
  9854  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic8(c).m) && s == 8+smagic8(c).s && x.Op != OpConst8 && sdivisibleOK8(c)
  9855  	// result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int8(sdivisible8(c).m)]) x) (Const8 <typ.UInt8> [int8(sdivisible8(c).a)]) ) (Const8 <typ.UInt8> [int8(8-sdivisible8(c).k)]) ) (Const8 <typ.UInt8> [int8(sdivisible8(c).max)]) )
  9856  	for {
  9857  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9858  			x := v_0
  9859  			if v_1.Op != OpMul8 {
  9860  				continue
  9861  			}
  9862  			_ = v_1.Args[1]
  9863  			v_1_0 := v_1.Args[0]
  9864  			v_1_1 := v_1.Args[1]
  9865  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
  9866  				if v_1_0.Op != OpConst8 {
  9867  					continue
  9868  				}
  9869  				c := auxIntToInt8(v_1_0.AuxInt)
  9870  				if v_1_1.Op != OpSub8 {
  9871  					continue
  9872  				}
  9873  				_ = v_1_1.Args[1]
  9874  				v_1_1_0 := v_1_1.Args[0]
  9875  				if v_1_1_0.Op != OpRsh32x64 {
  9876  					continue
  9877  				}
  9878  				_ = v_1_1_0.Args[1]
  9879  				mul := v_1_1_0.Args[0]
  9880  				if mul.Op != OpMul32 {
  9881  					continue
  9882  				}
  9883  				_ = mul.Args[1]
  9884  				mul_0 := mul.Args[0]
  9885  				mul_1 := mul.Args[1]
  9886  				for _i2 := 0; _i2 <= 1; _i2, mul_0, mul_1 = _i2+1, mul_1, mul_0 {
  9887  					if mul_0.Op != OpConst32 {
  9888  						continue
  9889  					}
  9890  					m := auxIntToInt32(mul_0.AuxInt)
  9891  					if mul_1.Op != OpSignExt8to32 || x != mul_1.Args[0] {
  9892  						continue
  9893  					}
  9894  					v_1_1_0_1 := v_1_1_0.Args[1]
  9895  					if v_1_1_0_1.Op != OpConst64 {
  9896  						continue
  9897  					}
  9898  					s := auxIntToInt64(v_1_1_0_1.AuxInt)
  9899  					v_1_1_1 := v_1_1.Args[1]
  9900  					if v_1_1_1.Op != OpRsh32x64 {
  9901  						continue
  9902  					}
  9903  					_ = v_1_1_1.Args[1]
  9904  					v_1_1_1_0 := v_1_1_1.Args[0]
  9905  					if v_1_1_1_0.Op != OpSignExt8to32 || x != v_1_1_1_0.Args[0] {
  9906  						continue
  9907  					}
  9908  					v_1_1_1_1 := v_1_1_1.Args[1]
  9909  					if v_1_1_1_1.Op != OpConst64 || auxIntToInt64(v_1_1_1_1.AuxInt) != 31 || !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int32(smagic8(c).m) && s == 8+smagic8(c).s && x.Op != OpConst8 && sdivisibleOK8(c)) {
  9910  						continue
  9911  					}
  9912  					v.reset(OpLeq8U)
  9913  					v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
  9914  					v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
  9915  					v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
  9916  					v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
  9917  					v3.AuxInt = int8ToAuxInt(int8(sdivisible8(c).m))
  9918  					v2.AddArg2(v3, x)
  9919  					v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
  9920  					v4.AuxInt = int8ToAuxInt(int8(sdivisible8(c).a))
  9921  					v1.AddArg2(v2, v4)
  9922  					v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
  9923  					v5.AuxInt = int8ToAuxInt(int8(8 - sdivisible8(c).k))
  9924  					v0.AddArg2(v1, v5)
  9925  					v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
  9926  					v6.AuxInt = int8ToAuxInt(int8(sdivisible8(c).max))
  9927  					v.AddArg2(v0, v6)
  9928  					return true
  9929  				}
  9930  			}
  9931  		}
  9932  		break
  9933  	}
  9934  	// match: (Eq8 n (Lsh8x64 (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
  9935  	// cond: k > 0 && k < 7 && kbar == 8 - k
  9936  	// result: (Eq8 (And8 <t> n (Const8 <t> [1<<uint(k)-1])) (Const8 <t> [0]))
  9937  	for {
  9938  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  9939  			n := v_0
  9940  			if v_1.Op != OpLsh8x64 {
  9941  				continue
  9942  			}
  9943  			_ = v_1.Args[1]
  9944  			v_1_0 := v_1.Args[0]
  9945  			if v_1_0.Op != OpRsh8x64 {
  9946  				continue
  9947  			}
  9948  			_ = v_1_0.Args[1]
  9949  			v_1_0_0 := v_1_0.Args[0]
  9950  			if v_1_0_0.Op != OpAdd8 {
  9951  				continue
  9952  			}
  9953  			t := v_1_0_0.Type
  9954  			_ = v_1_0_0.Args[1]
  9955  			v_1_0_0_0 := v_1_0_0.Args[0]
  9956  			v_1_0_0_1 := v_1_0_0.Args[1]
  9957  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
  9958  				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh8Ux64 || v_1_0_0_1.Type != t {
  9959  					continue
  9960  				}
  9961  				_ = v_1_0_0_1.Args[1]
  9962  				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
  9963  				if v_1_0_0_1_0.Op != OpRsh8x64 || v_1_0_0_1_0.Type != t {
  9964  					continue
  9965  				}
  9966  				_ = v_1_0_0_1_0.Args[1]
  9967  				if n != v_1_0_0_1_0.Args[0] {
  9968  					continue
  9969  				}
  9970  				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
  9971  				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 7 {
  9972  					continue
  9973  				}
  9974  				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
  9975  				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
  9976  					continue
  9977  				}
  9978  				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
  9979  				v_1_0_1 := v_1_0.Args[1]
  9980  				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
  9981  					continue
  9982  				}
  9983  				k := auxIntToInt64(v_1_0_1.AuxInt)
  9984  				v_1_1 := v_1.Args[1]
  9985  				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 7 && kbar == 8-k) {
  9986  					continue
  9987  				}
  9988  				v.reset(OpEq8)
  9989  				v0 := b.NewValue0(v.Pos, OpAnd8, t)
  9990  				v1 := b.NewValue0(v.Pos, OpConst8, t)
  9991  				v1.AuxInt = int8ToAuxInt(1<<uint(k) - 1)
  9992  				v0.AddArg2(n, v1)
  9993  				v2 := b.NewValue0(v.Pos, OpConst8, t)
  9994  				v2.AuxInt = int8ToAuxInt(0)
  9995  				v.AddArg2(v0, v2)
  9996  				return true
  9997  			}
  9998  		}
  9999  		break
 10000  	}
 10001  	// match: (Eq8 s:(Sub8 x y) (Const8 [0]))
 10002  	// cond: s.Uses == 1
 10003  	// result: (Eq8 x y)
 10004  	for {
 10005  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10006  			s := v_0
 10007  			if s.Op != OpSub8 {
 10008  				continue
 10009  			}
 10010  			y := s.Args[1]
 10011  			x := s.Args[0]
 10012  			if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 || !(s.Uses == 1) {
 10013  				continue
 10014  			}
 10015  			v.reset(OpEq8)
 10016  			v.AddArg2(x, y)
 10017  			return true
 10018  		}
 10019  		break
 10020  	}
 10021  	// match: (Eq8 (And8 <t> x (Const8 <t> [y])) (Const8 <t> [y]))
 10022  	// cond: oneBit8(y)
 10023  	// result: (Neq8 (And8 <t> x (Const8 <t> [y])) (Const8 <t> [0]))
 10024  	for {
 10025  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10026  			if v_0.Op != OpAnd8 {
 10027  				continue
 10028  			}
 10029  			t := v_0.Type
 10030  			_ = v_0.Args[1]
 10031  			v_0_0 := v_0.Args[0]
 10032  			v_0_1 := v_0.Args[1]
 10033  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 10034  				x := v_0_0
 10035  				if v_0_1.Op != OpConst8 || v_0_1.Type != t {
 10036  					continue
 10037  				}
 10038  				y := auxIntToInt8(v_0_1.AuxInt)
 10039  				if v_1.Op != OpConst8 || v_1.Type != t || auxIntToInt8(v_1.AuxInt) != y || !(oneBit8(y)) {
 10040  					continue
 10041  				}
 10042  				v.reset(OpNeq8)
 10043  				v0 := b.NewValue0(v.Pos, OpAnd8, t)
 10044  				v1 := b.NewValue0(v.Pos, OpConst8, t)
 10045  				v1.AuxInt = int8ToAuxInt(y)
 10046  				v0.AddArg2(x, v1)
 10047  				v2 := b.NewValue0(v.Pos, OpConst8, t)
 10048  				v2.AuxInt = int8ToAuxInt(0)
 10049  				v.AddArg2(v0, v2)
 10050  				return true
 10051  			}
 10052  		}
 10053  		break
 10054  	}
 10055  	return false
 10056  }
 10057  func rewriteValuegeneric_OpEqB(v *Value) bool {
 10058  	v_1 := v.Args[1]
 10059  	v_0 := v.Args[0]
 10060  	// match: (EqB (ConstBool [c]) (ConstBool [d]))
 10061  	// result: (ConstBool [c == d])
 10062  	for {
 10063  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10064  			if v_0.Op != OpConstBool {
 10065  				continue
 10066  			}
 10067  			c := auxIntToBool(v_0.AuxInt)
 10068  			if v_1.Op != OpConstBool {
 10069  				continue
 10070  			}
 10071  			d := auxIntToBool(v_1.AuxInt)
 10072  			v.reset(OpConstBool)
 10073  			v.AuxInt = boolToAuxInt(c == d)
 10074  			return true
 10075  		}
 10076  		break
 10077  	}
 10078  	// match: (EqB (ConstBool [false]) x)
 10079  	// result: (Not x)
 10080  	for {
 10081  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10082  			if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != false {
 10083  				continue
 10084  			}
 10085  			x := v_1
 10086  			v.reset(OpNot)
 10087  			v.AddArg(x)
 10088  			return true
 10089  		}
 10090  		break
 10091  	}
 10092  	// match: (EqB (ConstBool [true]) x)
 10093  	// result: x
 10094  	for {
 10095  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10096  			if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != true {
 10097  				continue
 10098  			}
 10099  			x := v_1
 10100  			v.copyOf(x)
 10101  			return true
 10102  		}
 10103  		break
 10104  	}
 10105  	return false
 10106  }
 10107  func rewriteValuegeneric_OpEqInter(v *Value) bool {
 10108  	v_1 := v.Args[1]
 10109  	v_0 := v.Args[0]
 10110  	b := v.Block
 10111  	typ := &b.Func.Config.Types
 10112  	// match: (EqInter x y)
 10113  	// result: (EqPtr (ITab x) (ITab y))
 10114  	for {
 10115  		x := v_0
 10116  		y := v_1
 10117  		v.reset(OpEqPtr)
 10118  		v0 := b.NewValue0(v.Pos, OpITab, typ.Uintptr)
 10119  		v0.AddArg(x)
 10120  		v1 := b.NewValue0(v.Pos, OpITab, typ.Uintptr)
 10121  		v1.AddArg(y)
 10122  		v.AddArg2(v0, v1)
 10123  		return true
 10124  	}
 10125  }
 10126  func rewriteValuegeneric_OpEqPtr(v *Value) bool {
 10127  	v_1 := v.Args[1]
 10128  	v_0 := v.Args[0]
 10129  	b := v.Block
 10130  	typ := &b.Func.Config.Types
 10131  	// match: (EqPtr x x)
 10132  	// result: (ConstBool [true])
 10133  	for {
 10134  		x := v_0
 10135  		if x != v_1 {
 10136  			break
 10137  		}
 10138  		v.reset(OpConstBool)
 10139  		v.AuxInt = boolToAuxInt(true)
 10140  		return true
 10141  	}
 10142  	// match: (EqPtr (Addr {x} _) (Addr {y} _))
 10143  	// result: (ConstBool [x == y])
 10144  	for {
 10145  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10146  			if v_0.Op != OpAddr {
 10147  				continue
 10148  			}
 10149  			x := auxToSym(v_0.Aux)
 10150  			if v_1.Op != OpAddr {
 10151  				continue
 10152  			}
 10153  			y := auxToSym(v_1.Aux)
 10154  			v.reset(OpConstBool)
 10155  			v.AuxInt = boolToAuxInt(x == y)
 10156  			return true
 10157  		}
 10158  		break
 10159  	}
 10160  	// match: (EqPtr (Addr {x} _) (OffPtr [o] (Addr {y} _)))
 10161  	// result: (ConstBool [x == y && o == 0])
 10162  	for {
 10163  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10164  			if v_0.Op != OpAddr {
 10165  				continue
 10166  			}
 10167  			x := auxToSym(v_0.Aux)
 10168  			if v_1.Op != OpOffPtr {
 10169  				continue
 10170  			}
 10171  			o := auxIntToInt64(v_1.AuxInt)
 10172  			v_1_0 := v_1.Args[0]
 10173  			if v_1_0.Op != OpAddr {
 10174  				continue
 10175  			}
 10176  			y := auxToSym(v_1_0.Aux)
 10177  			v.reset(OpConstBool)
 10178  			v.AuxInt = boolToAuxInt(x == y && o == 0)
 10179  			return true
 10180  		}
 10181  		break
 10182  	}
 10183  	// match: (EqPtr (OffPtr [o1] (Addr {x} _)) (OffPtr [o2] (Addr {y} _)))
 10184  	// result: (ConstBool [x == y && o1 == o2])
 10185  	for {
 10186  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10187  			if v_0.Op != OpOffPtr {
 10188  				continue
 10189  			}
 10190  			o1 := auxIntToInt64(v_0.AuxInt)
 10191  			v_0_0 := v_0.Args[0]
 10192  			if v_0_0.Op != OpAddr {
 10193  				continue
 10194  			}
 10195  			x := auxToSym(v_0_0.Aux)
 10196  			if v_1.Op != OpOffPtr {
 10197  				continue
 10198  			}
 10199  			o2 := auxIntToInt64(v_1.AuxInt)
 10200  			v_1_0 := v_1.Args[0]
 10201  			if v_1_0.Op != OpAddr {
 10202  				continue
 10203  			}
 10204  			y := auxToSym(v_1_0.Aux)
 10205  			v.reset(OpConstBool)
 10206  			v.AuxInt = boolToAuxInt(x == y && o1 == o2)
 10207  			return true
 10208  		}
 10209  		break
 10210  	}
 10211  	// match: (EqPtr (LocalAddr {x} _ _) (LocalAddr {y} _ _))
 10212  	// result: (ConstBool [x == y])
 10213  	for {
 10214  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10215  			if v_0.Op != OpLocalAddr {
 10216  				continue
 10217  			}
 10218  			x := auxToSym(v_0.Aux)
 10219  			if v_1.Op != OpLocalAddr {
 10220  				continue
 10221  			}
 10222  			y := auxToSym(v_1.Aux)
 10223  			v.reset(OpConstBool)
 10224  			v.AuxInt = boolToAuxInt(x == y)
 10225  			return true
 10226  		}
 10227  		break
 10228  	}
 10229  	// match: (EqPtr (LocalAddr {x} _ _) (OffPtr [o] (LocalAddr {y} _ _)))
 10230  	// result: (ConstBool [x == y && o == 0])
 10231  	for {
 10232  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10233  			if v_0.Op != OpLocalAddr {
 10234  				continue
 10235  			}
 10236  			x := auxToSym(v_0.Aux)
 10237  			if v_1.Op != OpOffPtr {
 10238  				continue
 10239  			}
 10240  			o := auxIntToInt64(v_1.AuxInt)
 10241  			v_1_0 := v_1.Args[0]
 10242  			if v_1_0.Op != OpLocalAddr {
 10243  				continue
 10244  			}
 10245  			y := auxToSym(v_1_0.Aux)
 10246  			v.reset(OpConstBool)
 10247  			v.AuxInt = boolToAuxInt(x == y && o == 0)
 10248  			return true
 10249  		}
 10250  		break
 10251  	}
 10252  	// match: (EqPtr (OffPtr [o1] (LocalAddr {x} _ _)) (OffPtr [o2] (LocalAddr {y} _ _)))
 10253  	// result: (ConstBool [x == y && o1 == o2])
 10254  	for {
 10255  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10256  			if v_0.Op != OpOffPtr {
 10257  				continue
 10258  			}
 10259  			o1 := auxIntToInt64(v_0.AuxInt)
 10260  			v_0_0 := v_0.Args[0]
 10261  			if v_0_0.Op != OpLocalAddr {
 10262  				continue
 10263  			}
 10264  			x := auxToSym(v_0_0.Aux)
 10265  			if v_1.Op != OpOffPtr {
 10266  				continue
 10267  			}
 10268  			o2 := auxIntToInt64(v_1.AuxInt)
 10269  			v_1_0 := v_1.Args[0]
 10270  			if v_1_0.Op != OpLocalAddr {
 10271  				continue
 10272  			}
 10273  			y := auxToSym(v_1_0.Aux)
 10274  			v.reset(OpConstBool)
 10275  			v.AuxInt = boolToAuxInt(x == y && o1 == o2)
 10276  			return true
 10277  		}
 10278  		break
 10279  	}
 10280  	// match: (EqPtr (OffPtr [o1] p1) p2)
 10281  	// cond: isSamePtr(p1, p2)
 10282  	// result: (ConstBool [o1 == 0])
 10283  	for {
 10284  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10285  			if v_0.Op != OpOffPtr {
 10286  				continue
 10287  			}
 10288  			o1 := auxIntToInt64(v_0.AuxInt)
 10289  			p1 := v_0.Args[0]
 10290  			p2 := v_1
 10291  			if !(isSamePtr(p1, p2)) {
 10292  				continue
 10293  			}
 10294  			v.reset(OpConstBool)
 10295  			v.AuxInt = boolToAuxInt(o1 == 0)
 10296  			return true
 10297  		}
 10298  		break
 10299  	}
 10300  	// match: (EqPtr (OffPtr [o1] p1) (OffPtr [o2] p2))
 10301  	// cond: isSamePtr(p1, p2)
 10302  	// result: (ConstBool [o1 == o2])
 10303  	for {
 10304  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10305  			if v_0.Op != OpOffPtr {
 10306  				continue
 10307  			}
 10308  			o1 := auxIntToInt64(v_0.AuxInt)
 10309  			p1 := v_0.Args[0]
 10310  			if v_1.Op != OpOffPtr {
 10311  				continue
 10312  			}
 10313  			o2 := auxIntToInt64(v_1.AuxInt)
 10314  			p2 := v_1.Args[0]
 10315  			if !(isSamePtr(p1, p2)) {
 10316  				continue
 10317  			}
 10318  			v.reset(OpConstBool)
 10319  			v.AuxInt = boolToAuxInt(o1 == o2)
 10320  			return true
 10321  		}
 10322  		break
 10323  	}
 10324  	// match: (EqPtr (Const32 [c]) (Const32 [d]))
 10325  	// result: (ConstBool [c == d])
 10326  	for {
 10327  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10328  			if v_0.Op != OpConst32 {
 10329  				continue
 10330  			}
 10331  			c := auxIntToInt32(v_0.AuxInt)
 10332  			if v_1.Op != OpConst32 {
 10333  				continue
 10334  			}
 10335  			d := auxIntToInt32(v_1.AuxInt)
 10336  			v.reset(OpConstBool)
 10337  			v.AuxInt = boolToAuxInt(c == d)
 10338  			return true
 10339  		}
 10340  		break
 10341  	}
 10342  	// match: (EqPtr (Const64 [c]) (Const64 [d]))
 10343  	// result: (ConstBool [c == d])
 10344  	for {
 10345  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10346  			if v_0.Op != OpConst64 {
 10347  				continue
 10348  			}
 10349  			c := auxIntToInt64(v_0.AuxInt)
 10350  			if v_1.Op != OpConst64 {
 10351  				continue
 10352  			}
 10353  			d := auxIntToInt64(v_1.AuxInt)
 10354  			v.reset(OpConstBool)
 10355  			v.AuxInt = boolToAuxInt(c == d)
 10356  			return true
 10357  		}
 10358  		break
 10359  	}
 10360  	// match: (EqPtr (Convert (Addr {x} _) _) (Addr {y} _))
 10361  	// result: (ConstBool [x==y])
 10362  	for {
 10363  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10364  			if v_0.Op != OpConvert {
 10365  				continue
 10366  			}
 10367  			v_0_0 := v_0.Args[0]
 10368  			if v_0_0.Op != OpAddr {
 10369  				continue
 10370  			}
 10371  			x := auxToSym(v_0_0.Aux)
 10372  			if v_1.Op != OpAddr {
 10373  				continue
 10374  			}
 10375  			y := auxToSym(v_1.Aux)
 10376  			v.reset(OpConstBool)
 10377  			v.AuxInt = boolToAuxInt(x == y)
 10378  			return true
 10379  		}
 10380  		break
 10381  	}
 10382  	// match: (EqPtr (LocalAddr _ _) (Addr _))
 10383  	// result: (ConstBool [false])
 10384  	for {
 10385  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10386  			if v_0.Op != OpLocalAddr || v_1.Op != OpAddr {
 10387  				continue
 10388  			}
 10389  			v.reset(OpConstBool)
 10390  			v.AuxInt = boolToAuxInt(false)
 10391  			return true
 10392  		}
 10393  		break
 10394  	}
 10395  	// match: (EqPtr (OffPtr (LocalAddr _ _)) (Addr _))
 10396  	// result: (ConstBool [false])
 10397  	for {
 10398  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10399  			if v_0.Op != OpOffPtr {
 10400  				continue
 10401  			}
 10402  			v_0_0 := v_0.Args[0]
 10403  			if v_0_0.Op != OpLocalAddr || v_1.Op != OpAddr {
 10404  				continue
 10405  			}
 10406  			v.reset(OpConstBool)
 10407  			v.AuxInt = boolToAuxInt(false)
 10408  			return true
 10409  		}
 10410  		break
 10411  	}
 10412  	// match: (EqPtr (LocalAddr _ _) (OffPtr (Addr _)))
 10413  	// result: (ConstBool [false])
 10414  	for {
 10415  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10416  			if v_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
 10417  				continue
 10418  			}
 10419  			v_1_0 := v_1.Args[0]
 10420  			if v_1_0.Op != OpAddr {
 10421  				continue
 10422  			}
 10423  			v.reset(OpConstBool)
 10424  			v.AuxInt = boolToAuxInt(false)
 10425  			return true
 10426  		}
 10427  		break
 10428  	}
 10429  	// match: (EqPtr (OffPtr (LocalAddr _ _)) (OffPtr (Addr _)))
 10430  	// result: (ConstBool [false])
 10431  	for {
 10432  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10433  			if v_0.Op != OpOffPtr {
 10434  				continue
 10435  			}
 10436  			v_0_0 := v_0.Args[0]
 10437  			if v_0_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
 10438  				continue
 10439  			}
 10440  			v_1_0 := v_1.Args[0]
 10441  			if v_1_0.Op != OpAddr {
 10442  				continue
 10443  			}
 10444  			v.reset(OpConstBool)
 10445  			v.AuxInt = boolToAuxInt(false)
 10446  			return true
 10447  		}
 10448  		break
 10449  	}
 10450  	// match: (EqPtr (AddPtr p1 o1) p2)
 10451  	// cond: isSamePtr(p1, p2)
 10452  	// result: (Not (IsNonNil o1))
 10453  	for {
 10454  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10455  			if v_0.Op != OpAddPtr {
 10456  				continue
 10457  			}
 10458  			o1 := v_0.Args[1]
 10459  			p1 := v_0.Args[0]
 10460  			p2 := v_1
 10461  			if !(isSamePtr(p1, p2)) {
 10462  				continue
 10463  			}
 10464  			v.reset(OpNot)
 10465  			v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool)
 10466  			v0.AddArg(o1)
 10467  			v.AddArg(v0)
 10468  			return true
 10469  		}
 10470  		break
 10471  	}
 10472  	// match: (EqPtr (Const32 [0]) p)
 10473  	// result: (Not (IsNonNil p))
 10474  	for {
 10475  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10476  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 10477  				continue
 10478  			}
 10479  			p := v_1
 10480  			v.reset(OpNot)
 10481  			v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool)
 10482  			v0.AddArg(p)
 10483  			v.AddArg(v0)
 10484  			return true
 10485  		}
 10486  		break
 10487  	}
 10488  	// match: (EqPtr (Const64 [0]) p)
 10489  	// result: (Not (IsNonNil p))
 10490  	for {
 10491  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10492  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 10493  				continue
 10494  			}
 10495  			p := v_1
 10496  			v.reset(OpNot)
 10497  			v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool)
 10498  			v0.AddArg(p)
 10499  			v.AddArg(v0)
 10500  			return true
 10501  		}
 10502  		break
 10503  	}
 10504  	// match: (EqPtr (ConstNil) p)
 10505  	// result: (Not (IsNonNil p))
 10506  	for {
 10507  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 10508  			if v_0.Op != OpConstNil {
 10509  				continue
 10510  			}
 10511  			p := v_1
 10512  			v.reset(OpNot)
 10513  			v0 := b.NewValue0(v.Pos, OpIsNonNil, typ.Bool)
 10514  			v0.AddArg(p)
 10515  			v.AddArg(v0)
 10516  			return true
 10517  		}
 10518  		break
 10519  	}
 10520  	return false
 10521  }
 10522  func rewriteValuegeneric_OpEqSlice(v *Value) bool {
 10523  	v_1 := v.Args[1]
 10524  	v_0 := v.Args[0]
 10525  	b := v.Block
 10526  	typ := &b.Func.Config.Types
 10527  	// match: (EqSlice x y)
 10528  	// result: (EqPtr (SlicePtr x) (SlicePtr y))
 10529  	for {
 10530  		x := v_0
 10531  		y := v_1
 10532  		v.reset(OpEqPtr)
 10533  		v0 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
 10534  		v0.AddArg(x)
 10535  		v1 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
 10536  		v1.AddArg(y)
 10537  		v.AddArg2(v0, v1)
 10538  		return true
 10539  	}
 10540  }
 10541  func rewriteValuegeneric_OpFloor(v *Value) bool {
 10542  	v_0 := v.Args[0]
 10543  	// match: (Floor (Const64F [c]))
 10544  	// result: (Const64F [math.Floor(c)])
 10545  	for {
 10546  		if v_0.Op != OpConst64F {
 10547  			break
 10548  		}
 10549  		c := auxIntToFloat64(v_0.AuxInt)
 10550  		v.reset(OpConst64F)
 10551  		v.AuxInt = float64ToAuxInt(math.Floor(c))
 10552  		return true
 10553  	}
 10554  	return false
 10555  }
 10556  func rewriteValuegeneric_OpIMake(v *Value) bool {
 10557  	v_1 := v.Args[1]
 10558  	v_0 := v.Args[0]
 10559  	// match: (IMake _typ (StructMake val))
 10560  	// result: (IMake _typ val)
 10561  	for {
 10562  		_typ := v_0
 10563  		if v_1.Op != OpStructMake || len(v_1.Args) != 1 {
 10564  			break
 10565  		}
 10566  		val := v_1.Args[0]
 10567  		v.reset(OpIMake)
 10568  		v.AddArg2(_typ, val)
 10569  		return true
 10570  	}
 10571  	// match: (IMake _typ (ArrayMake1 val))
 10572  	// result: (IMake _typ val)
 10573  	for {
 10574  		_typ := v_0
 10575  		if v_1.Op != OpArrayMake1 {
 10576  			break
 10577  		}
 10578  		val := v_1.Args[0]
 10579  		v.reset(OpIMake)
 10580  		v.AddArg2(_typ, val)
 10581  		return true
 10582  	}
 10583  	return false
 10584  }
 10585  func rewriteValuegeneric_OpInterLECall(v *Value) bool {
 10586  	// match: (InterLECall [argsize] {auxCall} (Addr {fn} (SB)) ___)
 10587  	// result: devirtLECall(v, fn.(*obj.LSym))
 10588  	for {
 10589  		if len(v.Args) < 1 {
 10590  			break
 10591  		}
 10592  		v_0 := v.Args[0]
 10593  		if v_0.Op != OpAddr {
 10594  			break
 10595  		}
 10596  		fn := auxToSym(v_0.Aux)
 10597  		v_0_0 := v_0.Args[0]
 10598  		if v_0_0.Op != OpSB {
 10599  			break
 10600  		}
 10601  		v.copyOf(devirtLECall(v, fn.(*obj.LSym)))
 10602  		return true
 10603  	}
 10604  	return false
 10605  }
 10606  func rewriteValuegeneric_OpIsInBounds(v *Value) bool {
 10607  	v_1 := v.Args[1]
 10608  	v_0 := v.Args[0]
 10609  	// match: (IsInBounds (ZeroExt8to32 _) (Const32 [c]))
 10610  	// cond: (1 << 8) <= c
 10611  	// result: (ConstBool [true])
 10612  	for {
 10613  		if v_0.Op != OpZeroExt8to32 || v_1.Op != OpConst32 {
 10614  			break
 10615  		}
 10616  		c := auxIntToInt32(v_1.AuxInt)
 10617  		if !((1 << 8) <= c) {
 10618  			break
 10619  		}
 10620  		v.reset(OpConstBool)
 10621  		v.AuxInt = boolToAuxInt(true)
 10622  		return true
 10623  	}
 10624  	// match: (IsInBounds (ZeroExt8to64 _) (Const64 [c]))
 10625  	// cond: (1 << 8) <= c
 10626  	// result: (ConstBool [true])
 10627  	for {
 10628  		if v_0.Op != OpZeroExt8to64 || v_1.Op != OpConst64 {
 10629  			break
 10630  		}
 10631  		c := auxIntToInt64(v_1.AuxInt)
 10632  		if !((1 << 8) <= c) {
 10633  			break
 10634  		}
 10635  		v.reset(OpConstBool)
 10636  		v.AuxInt = boolToAuxInt(true)
 10637  		return true
 10638  	}
 10639  	// match: (IsInBounds (ZeroExt16to32 _) (Const32 [c]))
 10640  	// cond: (1 << 16) <= c
 10641  	// result: (ConstBool [true])
 10642  	for {
 10643  		if v_0.Op != OpZeroExt16to32 || v_1.Op != OpConst32 {
 10644  			break
 10645  		}
 10646  		c := auxIntToInt32(v_1.AuxInt)
 10647  		if !((1 << 16) <= c) {
 10648  			break
 10649  		}
 10650  		v.reset(OpConstBool)
 10651  		v.AuxInt = boolToAuxInt(true)
 10652  		return true
 10653  	}
 10654  	// match: (IsInBounds (ZeroExt16to64 _) (Const64 [c]))
 10655  	// cond: (1 << 16) <= c
 10656  	// result: (ConstBool [true])
 10657  	for {
 10658  		if v_0.Op != OpZeroExt16to64 || v_1.Op != OpConst64 {
 10659  			break
 10660  		}
 10661  		c := auxIntToInt64(v_1.AuxInt)
 10662  		if !((1 << 16) <= c) {
 10663  			break
 10664  		}
 10665  		v.reset(OpConstBool)
 10666  		v.AuxInt = boolToAuxInt(true)
 10667  		return true
 10668  	}
 10669  	// match: (IsInBounds x x)
 10670  	// result: (ConstBool [false])
 10671  	for {
 10672  		x := v_0
 10673  		if x != v_1 {
 10674  			break
 10675  		}
 10676  		v.reset(OpConstBool)
 10677  		v.AuxInt = boolToAuxInt(false)
 10678  		return true
 10679  	}
 10680  	// match: (IsInBounds (And8 (Const8 [c]) _) (Const8 [d]))
 10681  	// cond: 0 <= c && c < d
 10682  	// result: (ConstBool [true])
 10683  	for {
 10684  		if v_0.Op != OpAnd8 {
 10685  			break
 10686  		}
 10687  		v_0_0 := v_0.Args[0]
 10688  		v_0_1 := v_0.Args[1]
 10689  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10690  			if v_0_0.Op != OpConst8 {
 10691  				continue
 10692  			}
 10693  			c := auxIntToInt8(v_0_0.AuxInt)
 10694  			if v_1.Op != OpConst8 {
 10695  				continue
 10696  			}
 10697  			d := auxIntToInt8(v_1.AuxInt)
 10698  			if !(0 <= c && c < d) {
 10699  				continue
 10700  			}
 10701  			v.reset(OpConstBool)
 10702  			v.AuxInt = boolToAuxInt(true)
 10703  			return true
 10704  		}
 10705  		break
 10706  	}
 10707  	// match: (IsInBounds (ZeroExt8to16 (And8 (Const8 [c]) _)) (Const16 [d]))
 10708  	// cond: 0 <= c && int16(c) < d
 10709  	// result: (ConstBool [true])
 10710  	for {
 10711  		if v_0.Op != OpZeroExt8to16 {
 10712  			break
 10713  		}
 10714  		v_0_0 := v_0.Args[0]
 10715  		if v_0_0.Op != OpAnd8 {
 10716  			break
 10717  		}
 10718  		v_0_0_0 := v_0_0.Args[0]
 10719  		v_0_0_1 := v_0_0.Args[1]
 10720  		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
 10721  			if v_0_0_0.Op != OpConst8 {
 10722  				continue
 10723  			}
 10724  			c := auxIntToInt8(v_0_0_0.AuxInt)
 10725  			if v_1.Op != OpConst16 {
 10726  				continue
 10727  			}
 10728  			d := auxIntToInt16(v_1.AuxInt)
 10729  			if !(0 <= c && int16(c) < d) {
 10730  				continue
 10731  			}
 10732  			v.reset(OpConstBool)
 10733  			v.AuxInt = boolToAuxInt(true)
 10734  			return true
 10735  		}
 10736  		break
 10737  	}
 10738  	// match: (IsInBounds (ZeroExt8to32 (And8 (Const8 [c]) _)) (Const32 [d]))
 10739  	// cond: 0 <= c && int32(c) < d
 10740  	// result: (ConstBool [true])
 10741  	for {
 10742  		if v_0.Op != OpZeroExt8to32 {
 10743  			break
 10744  		}
 10745  		v_0_0 := v_0.Args[0]
 10746  		if v_0_0.Op != OpAnd8 {
 10747  			break
 10748  		}
 10749  		v_0_0_0 := v_0_0.Args[0]
 10750  		v_0_0_1 := v_0_0.Args[1]
 10751  		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
 10752  			if v_0_0_0.Op != OpConst8 {
 10753  				continue
 10754  			}
 10755  			c := auxIntToInt8(v_0_0_0.AuxInt)
 10756  			if v_1.Op != OpConst32 {
 10757  				continue
 10758  			}
 10759  			d := auxIntToInt32(v_1.AuxInt)
 10760  			if !(0 <= c && int32(c) < d) {
 10761  				continue
 10762  			}
 10763  			v.reset(OpConstBool)
 10764  			v.AuxInt = boolToAuxInt(true)
 10765  			return true
 10766  		}
 10767  		break
 10768  	}
 10769  	// match: (IsInBounds (ZeroExt8to64 (And8 (Const8 [c]) _)) (Const64 [d]))
 10770  	// cond: 0 <= c && int64(c) < d
 10771  	// result: (ConstBool [true])
 10772  	for {
 10773  		if v_0.Op != OpZeroExt8to64 {
 10774  			break
 10775  		}
 10776  		v_0_0 := v_0.Args[0]
 10777  		if v_0_0.Op != OpAnd8 {
 10778  			break
 10779  		}
 10780  		v_0_0_0 := v_0_0.Args[0]
 10781  		v_0_0_1 := v_0_0.Args[1]
 10782  		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
 10783  			if v_0_0_0.Op != OpConst8 {
 10784  				continue
 10785  			}
 10786  			c := auxIntToInt8(v_0_0_0.AuxInt)
 10787  			if v_1.Op != OpConst64 {
 10788  				continue
 10789  			}
 10790  			d := auxIntToInt64(v_1.AuxInt)
 10791  			if !(0 <= c && int64(c) < d) {
 10792  				continue
 10793  			}
 10794  			v.reset(OpConstBool)
 10795  			v.AuxInt = boolToAuxInt(true)
 10796  			return true
 10797  		}
 10798  		break
 10799  	}
 10800  	// match: (IsInBounds (And16 (Const16 [c]) _) (Const16 [d]))
 10801  	// cond: 0 <= c && c < d
 10802  	// result: (ConstBool [true])
 10803  	for {
 10804  		if v_0.Op != OpAnd16 {
 10805  			break
 10806  		}
 10807  		v_0_0 := v_0.Args[0]
 10808  		v_0_1 := v_0.Args[1]
 10809  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10810  			if v_0_0.Op != OpConst16 {
 10811  				continue
 10812  			}
 10813  			c := auxIntToInt16(v_0_0.AuxInt)
 10814  			if v_1.Op != OpConst16 {
 10815  				continue
 10816  			}
 10817  			d := auxIntToInt16(v_1.AuxInt)
 10818  			if !(0 <= c && c < d) {
 10819  				continue
 10820  			}
 10821  			v.reset(OpConstBool)
 10822  			v.AuxInt = boolToAuxInt(true)
 10823  			return true
 10824  		}
 10825  		break
 10826  	}
 10827  	// match: (IsInBounds (ZeroExt16to32 (And16 (Const16 [c]) _)) (Const32 [d]))
 10828  	// cond: 0 <= c && int32(c) < d
 10829  	// result: (ConstBool [true])
 10830  	for {
 10831  		if v_0.Op != OpZeroExt16to32 {
 10832  			break
 10833  		}
 10834  		v_0_0 := v_0.Args[0]
 10835  		if v_0_0.Op != OpAnd16 {
 10836  			break
 10837  		}
 10838  		v_0_0_0 := v_0_0.Args[0]
 10839  		v_0_0_1 := v_0_0.Args[1]
 10840  		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
 10841  			if v_0_0_0.Op != OpConst16 {
 10842  				continue
 10843  			}
 10844  			c := auxIntToInt16(v_0_0_0.AuxInt)
 10845  			if v_1.Op != OpConst32 {
 10846  				continue
 10847  			}
 10848  			d := auxIntToInt32(v_1.AuxInt)
 10849  			if !(0 <= c && int32(c) < d) {
 10850  				continue
 10851  			}
 10852  			v.reset(OpConstBool)
 10853  			v.AuxInt = boolToAuxInt(true)
 10854  			return true
 10855  		}
 10856  		break
 10857  	}
 10858  	// match: (IsInBounds (ZeroExt16to64 (And16 (Const16 [c]) _)) (Const64 [d]))
 10859  	// cond: 0 <= c && int64(c) < d
 10860  	// result: (ConstBool [true])
 10861  	for {
 10862  		if v_0.Op != OpZeroExt16to64 {
 10863  			break
 10864  		}
 10865  		v_0_0 := v_0.Args[0]
 10866  		if v_0_0.Op != OpAnd16 {
 10867  			break
 10868  		}
 10869  		v_0_0_0 := v_0_0.Args[0]
 10870  		v_0_0_1 := v_0_0.Args[1]
 10871  		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
 10872  			if v_0_0_0.Op != OpConst16 {
 10873  				continue
 10874  			}
 10875  			c := auxIntToInt16(v_0_0_0.AuxInt)
 10876  			if v_1.Op != OpConst64 {
 10877  				continue
 10878  			}
 10879  			d := auxIntToInt64(v_1.AuxInt)
 10880  			if !(0 <= c && int64(c) < d) {
 10881  				continue
 10882  			}
 10883  			v.reset(OpConstBool)
 10884  			v.AuxInt = boolToAuxInt(true)
 10885  			return true
 10886  		}
 10887  		break
 10888  	}
 10889  	// match: (IsInBounds (And32 (Const32 [c]) _) (Const32 [d]))
 10890  	// cond: 0 <= c && c < d
 10891  	// result: (ConstBool [true])
 10892  	for {
 10893  		if v_0.Op != OpAnd32 {
 10894  			break
 10895  		}
 10896  		v_0_0 := v_0.Args[0]
 10897  		v_0_1 := v_0.Args[1]
 10898  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10899  			if v_0_0.Op != OpConst32 {
 10900  				continue
 10901  			}
 10902  			c := auxIntToInt32(v_0_0.AuxInt)
 10903  			if v_1.Op != OpConst32 {
 10904  				continue
 10905  			}
 10906  			d := auxIntToInt32(v_1.AuxInt)
 10907  			if !(0 <= c && c < d) {
 10908  				continue
 10909  			}
 10910  			v.reset(OpConstBool)
 10911  			v.AuxInt = boolToAuxInt(true)
 10912  			return true
 10913  		}
 10914  		break
 10915  	}
 10916  	// match: (IsInBounds (ZeroExt32to64 (And32 (Const32 [c]) _)) (Const64 [d]))
 10917  	// cond: 0 <= c && int64(c) < d
 10918  	// result: (ConstBool [true])
 10919  	for {
 10920  		if v_0.Op != OpZeroExt32to64 {
 10921  			break
 10922  		}
 10923  		v_0_0 := v_0.Args[0]
 10924  		if v_0_0.Op != OpAnd32 {
 10925  			break
 10926  		}
 10927  		v_0_0_0 := v_0_0.Args[0]
 10928  		v_0_0_1 := v_0_0.Args[1]
 10929  		for _i0 := 0; _i0 <= 1; _i0, v_0_0_0, v_0_0_1 = _i0+1, v_0_0_1, v_0_0_0 {
 10930  			if v_0_0_0.Op != OpConst32 {
 10931  				continue
 10932  			}
 10933  			c := auxIntToInt32(v_0_0_0.AuxInt)
 10934  			if v_1.Op != OpConst64 {
 10935  				continue
 10936  			}
 10937  			d := auxIntToInt64(v_1.AuxInt)
 10938  			if !(0 <= c && int64(c) < d) {
 10939  				continue
 10940  			}
 10941  			v.reset(OpConstBool)
 10942  			v.AuxInt = boolToAuxInt(true)
 10943  			return true
 10944  		}
 10945  		break
 10946  	}
 10947  	// match: (IsInBounds (And64 (Const64 [c]) _) (Const64 [d]))
 10948  	// cond: 0 <= c && c < d
 10949  	// result: (ConstBool [true])
 10950  	for {
 10951  		if v_0.Op != OpAnd64 {
 10952  			break
 10953  		}
 10954  		v_0_0 := v_0.Args[0]
 10955  		v_0_1 := v_0.Args[1]
 10956  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 10957  			if v_0_0.Op != OpConst64 {
 10958  				continue
 10959  			}
 10960  			c := auxIntToInt64(v_0_0.AuxInt)
 10961  			if v_1.Op != OpConst64 {
 10962  				continue
 10963  			}
 10964  			d := auxIntToInt64(v_1.AuxInt)
 10965  			if !(0 <= c && c < d) {
 10966  				continue
 10967  			}
 10968  			v.reset(OpConstBool)
 10969  			v.AuxInt = boolToAuxInt(true)
 10970  			return true
 10971  		}
 10972  		break
 10973  	}
 10974  	// match: (IsInBounds (Const32 [c]) (Const32 [d]))
 10975  	// result: (ConstBool [0 <= c && c < d])
 10976  	for {
 10977  		if v_0.Op != OpConst32 {
 10978  			break
 10979  		}
 10980  		c := auxIntToInt32(v_0.AuxInt)
 10981  		if v_1.Op != OpConst32 {
 10982  			break
 10983  		}
 10984  		d := auxIntToInt32(v_1.AuxInt)
 10985  		v.reset(OpConstBool)
 10986  		v.AuxInt = boolToAuxInt(0 <= c && c < d)
 10987  		return true
 10988  	}
 10989  	// match: (IsInBounds (Const64 [c]) (Const64 [d]))
 10990  	// result: (ConstBool [0 <= c && c < d])
 10991  	for {
 10992  		if v_0.Op != OpConst64 {
 10993  			break
 10994  		}
 10995  		c := auxIntToInt64(v_0.AuxInt)
 10996  		if v_1.Op != OpConst64 {
 10997  			break
 10998  		}
 10999  		d := auxIntToInt64(v_1.AuxInt)
 11000  		v.reset(OpConstBool)
 11001  		v.AuxInt = boolToAuxInt(0 <= c && c < d)
 11002  		return true
 11003  	}
 11004  	// match: (IsInBounds (Mod32u _ y) y)
 11005  	// result: (ConstBool [true])
 11006  	for {
 11007  		if v_0.Op != OpMod32u {
 11008  			break
 11009  		}
 11010  		y := v_0.Args[1]
 11011  		if y != v_1 {
 11012  			break
 11013  		}
 11014  		v.reset(OpConstBool)
 11015  		v.AuxInt = boolToAuxInt(true)
 11016  		return true
 11017  	}
 11018  	// match: (IsInBounds (Mod64u _ y) y)
 11019  	// result: (ConstBool [true])
 11020  	for {
 11021  		if v_0.Op != OpMod64u {
 11022  			break
 11023  		}
 11024  		y := v_0.Args[1]
 11025  		if y != v_1 {
 11026  			break
 11027  		}
 11028  		v.reset(OpConstBool)
 11029  		v.AuxInt = boolToAuxInt(true)
 11030  		return true
 11031  	}
 11032  	// match: (IsInBounds (ZeroExt8to64 (Rsh8Ux64 _ (Const64 [c]))) (Const64 [d]))
 11033  	// cond: 0 < c && c < 8 && 1<<uint( 8-c)-1 < d
 11034  	// result: (ConstBool [true])
 11035  	for {
 11036  		if v_0.Op != OpZeroExt8to64 {
 11037  			break
 11038  		}
 11039  		v_0_0 := v_0.Args[0]
 11040  		if v_0_0.Op != OpRsh8Ux64 {
 11041  			break
 11042  		}
 11043  		_ = v_0_0.Args[1]
 11044  		v_0_0_1 := v_0_0.Args[1]
 11045  		if v_0_0_1.Op != OpConst64 {
 11046  			break
 11047  		}
 11048  		c := auxIntToInt64(v_0_0_1.AuxInt)
 11049  		if v_1.Op != OpConst64 {
 11050  			break
 11051  		}
 11052  		d := auxIntToInt64(v_1.AuxInt)
 11053  		if !(0 < c && c < 8 && 1<<uint(8-c)-1 < d) {
 11054  			break
 11055  		}
 11056  		v.reset(OpConstBool)
 11057  		v.AuxInt = boolToAuxInt(true)
 11058  		return true
 11059  	}
 11060  	// match: (IsInBounds (ZeroExt8to32 (Rsh8Ux64 _ (Const64 [c]))) (Const32 [d]))
 11061  	// cond: 0 < c && c < 8 && 1<<uint( 8-c)-1 < d
 11062  	// result: (ConstBool [true])
 11063  	for {
 11064  		if v_0.Op != OpZeroExt8to32 {
 11065  			break
 11066  		}
 11067  		v_0_0 := v_0.Args[0]
 11068  		if v_0_0.Op != OpRsh8Ux64 {
 11069  			break
 11070  		}
 11071  		_ = v_0_0.Args[1]
 11072  		v_0_0_1 := v_0_0.Args[1]
 11073  		if v_0_0_1.Op != OpConst64 {
 11074  			break
 11075  		}
 11076  		c := auxIntToInt64(v_0_0_1.AuxInt)
 11077  		if v_1.Op != OpConst32 {
 11078  			break
 11079  		}
 11080  		d := auxIntToInt32(v_1.AuxInt)
 11081  		if !(0 < c && c < 8 && 1<<uint(8-c)-1 < d) {
 11082  			break
 11083  		}
 11084  		v.reset(OpConstBool)
 11085  		v.AuxInt = boolToAuxInt(true)
 11086  		return true
 11087  	}
 11088  	// match: (IsInBounds (ZeroExt8to16 (Rsh8Ux64 _ (Const64 [c]))) (Const16 [d]))
 11089  	// cond: 0 < c && c < 8 && 1<<uint( 8-c)-1 < d
 11090  	// result: (ConstBool [true])
 11091  	for {
 11092  		if v_0.Op != OpZeroExt8to16 {
 11093  			break
 11094  		}
 11095  		v_0_0 := v_0.Args[0]
 11096  		if v_0_0.Op != OpRsh8Ux64 {
 11097  			break
 11098  		}
 11099  		_ = v_0_0.Args[1]
 11100  		v_0_0_1 := v_0_0.Args[1]
 11101  		if v_0_0_1.Op != OpConst64 {
 11102  			break
 11103  		}
 11104  		c := auxIntToInt64(v_0_0_1.AuxInt)
 11105  		if v_1.Op != OpConst16 {
 11106  			break
 11107  		}
 11108  		d := auxIntToInt16(v_1.AuxInt)
 11109  		if !(0 < c && c < 8 && 1<<uint(8-c)-1 < d) {
 11110  			break
 11111  		}
 11112  		v.reset(OpConstBool)
 11113  		v.AuxInt = boolToAuxInt(true)
 11114  		return true
 11115  	}
 11116  	// match: (IsInBounds (Rsh8Ux64 _ (Const64 [c])) (Const64 [d]))
 11117  	// cond: 0 < c && c < 8 && 1<<uint( 8-c)-1 < d
 11118  	// result: (ConstBool [true])
 11119  	for {
 11120  		if v_0.Op != OpRsh8Ux64 {
 11121  			break
 11122  		}
 11123  		_ = v_0.Args[1]
 11124  		v_0_1 := v_0.Args[1]
 11125  		if v_0_1.Op != OpConst64 {
 11126  			break
 11127  		}
 11128  		c := auxIntToInt64(v_0_1.AuxInt)
 11129  		if v_1.Op != OpConst64 {
 11130  			break
 11131  		}
 11132  		d := auxIntToInt64(v_1.AuxInt)
 11133  		if !(0 < c && c < 8 && 1<<uint(8-c)-1 < d) {
 11134  			break
 11135  		}
 11136  		v.reset(OpConstBool)
 11137  		v.AuxInt = boolToAuxInt(true)
 11138  		return true
 11139  	}
 11140  	// match: (IsInBounds (ZeroExt16to64 (Rsh16Ux64 _ (Const64 [c]))) (Const64 [d]))
 11141  	// cond: 0 < c && c < 16 && 1<<uint(16-c)-1 < d
 11142  	// result: (ConstBool [true])
 11143  	for {
 11144  		if v_0.Op != OpZeroExt16to64 {
 11145  			break
 11146  		}
 11147  		v_0_0 := v_0.Args[0]
 11148  		if v_0_0.Op != OpRsh16Ux64 {
 11149  			break
 11150  		}
 11151  		_ = v_0_0.Args[1]
 11152  		v_0_0_1 := v_0_0.Args[1]
 11153  		if v_0_0_1.Op != OpConst64 {
 11154  			break
 11155  		}
 11156  		c := auxIntToInt64(v_0_0_1.AuxInt)
 11157  		if v_1.Op != OpConst64 {
 11158  			break
 11159  		}
 11160  		d := auxIntToInt64(v_1.AuxInt)
 11161  		if !(0 < c && c < 16 && 1<<uint(16-c)-1 < d) {
 11162  			break
 11163  		}
 11164  		v.reset(OpConstBool)
 11165  		v.AuxInt = boolToAuxInt(true)
 11166  		return true
 11167  	}
 11168  	// match: (IsInBounds (ZeroExt16to32 (Rsh16Ux64 _ (Const64 [c]))) (Const64 [d]))
 11169  	// cond: 0 < c && c < 16 && 1<<uint(16-c)-1 < d
 11170  	// result: (ConstBool [true])
 11171  	for {
 11172  		if v_0.Op != OpZeroExt16to32 {
 11173  			break
 11174  		}
 11175  		v_0_0 := v_0.Args[0]
 11176  		if v_0_0.Op != OpRsh16Ux64 {
 11177  			break
 11178  		}
 11179  		_ = v_0_0.Args[1]
 11180  		v_0_0_1 := v_0_0.Args[1]
 11181  		if v_0_0_1.Op != OpConst64 {
 11182  			break
 11183  		}
 11184  		c := auxIntToInt64(v_0_0_1.AuxInt)
 11185  		if v_1.Op != OpConst64 {
 11186  			break
 11187  		}
 11188  		d := auxIntToInt64(v_1.AuxInt)
 11189  		if !(0 < c && c < 16 && 1<<uint(16-c)-1 < d) {
 11190  			break
 11191  		}
 11192  		v.reset(OpConstBool)
 11193  		v.AuxInt = boolToAuxInt(true)
 11194  		return true
 11195  	}
 11196  	// match: (IsInBounds (Rsh16Ux64 _ (Const64 [c])) (Const64 [d]))
 11197  	// cond: 0 < c && c < 16 && 1<<uint(16-c)-1 < d
 11198  	// result: (ConstBool [true])
 11199  	for {
 11200  		if v_0.Op != OpRsh16Ux64 {
 11201  			break
 11202  		}
 11203  		_ = v_0.Args[1]
 11204  		v_0_1 := v_0.Args[1]
 11205  		if v_0_1.Op != OpConst64 {
 11206  			break
 11207  		}
 11208  		c := auxIntToInt64(v_0_1.AuxInt)
 11209  		if v_1.Op != OpConst64 {
 11210  			break
 11211  		}
 11212  		d := auxIntToInt64(v_1.AuxInt)
 11213  		if !(0 < c && c < 16 && 1<<uint(16-c)-1 < d) {
 11214  			break
 11215  		}
 11216  		v.reset(OpConstBool)
 11217  		v.AuxInt = boolToAuxInt(true)
 11218  		return true
 11219  	}
 11220  	// match: (IsInBounds (ZeroExt32to64 (Rsh32Ux64 _ (Const64 [c]))) (Const64 [d]))
 11221  	// cond: 0 < c && c < 32 && 1<<uint(32-c)-1 < d
 11222  	// result: (ConstBool [true])
 11223  	for {
 11224  		if v_0.Op != OpZeroExt32to64 {
 11225  			break
 11226  		}
 11227  		v_0_0 := v_0.Args[0]
 11228  		if v_0_0.Op != OpRsh32Ux64 {
 11229  			break
 11230  		}
 11231  		_ = v_0_0.Args[1]
 11232  		v_0_0_1 := v_0_0.Args[1]
 11233  		if v_0_0_1.Op != OpConst64 {
 11234  			break
 11235  		}
 11236  		c := auxIntToInt64(v_0_0_1.AuxInt)
 11237  		if v_1.Op != OpConst64 {
 11238  			break
 11239  		}
 11240  		d := auxIntToInt64(v_1.AuxInt)
 11241  		if !(0 < c && c < 32 && 1<<uint(32-c)-1 < d) {
 11242  			break
 11243  		}
 11244  		v.reset(OpConstBool)
 11245  		v.AuxInt = boolToAuxInt(true)
 11246  		return true
 11247  	}
 11248  	// match: (IsInBounds (Rsh32Ux64 _ (Const64 [c])) (Const64 [d]))
 11249  	// cond: 0 < c && c < 32 && 1<<uint(32-c)-1 < d
 11250  	// result: (ConstBool [true])
 11251  	for {
 11252  		if v_0.Op != OpRsh32Ux64 {
 11253  			break
 11254  		}
 11255  		_ = v_0.Args[1]
 11256  		v_0_1 := v_0.Args[1]
 11257  		if v_0_1.Op != OpConst64 {
 11258  			break
 11259  		}
 11260  		c := auxIntToInt64(v_0_1.AuxInt)
 11261  		if v_1.Op != OpConst64 {
 11262  			break
 11263  		}
 11264  		d := auxIntToInt64(v_1.AuxInt)
 11265  		if !(0 < c && c < 32 && 1<<uint(32-c)-1 < d) {
 11266  			break
 11267  		}
 11268  		v.reset(OpConstBool)
 11269  		v.AuxInt = boolToAuxInt(true)
 11270  		return true
 11271  	}
 11272  	// match: (IsInBounds (Rsh64Ux64 _ (Const64 [c])) (Const64 [d]))
 11273  	// cond: 0 < c && c < 64 && 1<<uint(64-c)-1 < d
 11274  	// result: (ConstBool [true])
 11275  	for {
 11276  		if v_0.Op != OpRsh64Ux64 {
 11277  			break
 11278  		}
 11279  		_ = v_0.Args[1]
 11280  		v_0_1 := v_0.Args[1]
 11281  		if v_0_1.Op != OpConst64 {
 11282  			break
 11283  		}
 11284  		c := auxIntToInt64(v_0_1.AuxInt)
 11285  		if v_1.Op != OpConst64 {
 11286  			break
 11287  		}
 11288  		d := auxIntToInt64(v_1.AuxInt)
 11289  		if !(0 < c && c < 64 && 1<<uint(64-c)-1 < d) {
 11290  			break
 11291  		}
 11292  		v.reset(OpConstBool)
 11293  		v.AuxInt = boolToAuxInt(true)
 11294  		return true
 11295  	}
 11296  	return false
 11297  }
 11298  func rewriteValuegeneric_OpIsNonNil(v *Value) bool {
 11299  	v_0 := v.Args[0]
 11300  	// match: (IsNonNil (ConstNil))
 11301  	// result: (ConstBool [false])
 11302  	for {
 11303  		if v_0.Op != OpConstNil {
 11304  			break
 11305  		}
 11306  		v.reset(OpConstBool)
 11307  		v.AuxInt = boolToAuxInt(false)
 11308  		return true
 11309  	}
 11310  	// match: (IsNonNil (Const32 [c]))
 11311  	// result: (ConstBool [c != 0])
 11312  	for {
 11313  		if v_0.Op != OpConst32 {
 11314  			break
 11315  		}
 11316  		c := auxIntToInt32(v_0.AuxInt)
 11317  		v.reset(OpConstBool)
 11318  		v.AuxInt = boolToAuxInt(c != 0)
 11319  		return true
 11320  	}
 11321  	// match: (IsNonNil (Const64 [c]))
 11322  	// result: (ConstBool [c != 0])
 11323  	for {
 11324  		if v_0.Op != OpConst64 {
 11325  			break
 11326  		}
 11327  		c := auxIntToInt64(v_0.AuxInt)
 11328  		v.reset(OpConstBool)
 11329  		v.AuxInt = boolToAuxInt(c != 0)
 11330  		return true
 11331  	}
 11332  	// match: (IsNonNil (Addr _) )
 11333  	// result: (ConstBool [true])
 11334  	for {
 11335  		if v_0.Op != OpAddr {
 11336  			break
 11337  		}
 11338  		v.reset(OpConstBool)
 11339  		v.AuxInt = boolToAuxInt(true)
 11340  		return true
 11341  	}
 11342  	// match: (IsNonNil (Convert (Addr _) _))
 11343  	// result: (ConstBool [true])
 11344  	for {
 11345  		if v_0.Op != OpConvert {
 11346  			break
 11347  		}
 11348  		v_0_0 := v_0.Args[0]
 11349  		if v_0_0.Op != OpAddr {
 11350  			break
 11351  		}
 11352  		v.reset(OpConstBool)
 11353  		v.AuxInt = boolToAuxInt(true)
 11354  		return true
 11355  	}
 11356  	// match: (IsNonNil (LocalAddr _ _))
 11357  	// result: (ConstBool [true])
 11358  	for {
 11359  		if v_0.Op != OpLocalAddr {
 11360  			break
 11361  		}
 11362  		v.reset(OpConstBool)
 11363  		v.AuxInt = boolToAuxInt(true)
 11364  		return true
 11365  	}
 11366  	return false
 11367  }
 11368  func rewriteValuegeneric_OpIsSliceInBounds(v *Value) bool {
 11369  	v_1 := v.Args[1]
 11370  	v_0 := v.Args[0]
 11371  	// match: (IsSliceInBounds x x)
 11372  	// result: (ConstBool [true])
 11373  	for {
 11374  		x := v_0
 11375  		if x != v_1 {
 11376  			break
 11377  		}
 11378  		v.reset(OpConstBool)
 11379  		v.AuxInt = boolToAuxInt(true)
 11380  		return true
 11381  	}
 11382  	// match: (IsSliceInBounds (And32 (Const32 [c]) _) (Const32 [d]))
 11383  	// cond: 0 <= c && c <= d
 11384  	// result: (ConstBool [true])
 11385  	for {
 11386  		if v_0.Op != OpAnd32 {
 11387  			break
 11388  		}
 11389  		v_0_0 := v_0.Args[0]
 11390  		v_0_1 := v_0.Args[1]
 11391  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 11392  			if v_0_0.Op != OpConst32 {
 11393  				continue
 11394  			}
 11395  			c := auxIntToInt32(v_0_0.AuxInt)
 11396  			if v_1.Op != OpConst32 {
 11397  				continue
 11398  			}
 11399  			d := auxIntToInt32(v_1.AuxInt)
 11400  			if !(0 <= c && c <= d) {
 11401  				continue
 11402  			}
 11403  			v.reset(OpConstBool)
 11404  			v.AuxInt = boolToAuxInt(true)
 11405  			return true
 11406  		}
 11407  		break
 11408  	}
 11409  	// match: (IsSliceInBounds (And64 (Const64 [c]) _) (Const64 [d]))
 11410  	// cond: 0 <= c && c <= d
 11411  	// result: (ConstBool [true])
 11412  	for {
 11413  		if v_0.Op != OpAnd64 {
 11414  			break
 11415  		}
 11416  		v_0_0 := v_0.Args[0]
 11417  		v_0_1 := v_0.Args[1]
 11418  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 11419  			if v_0_0.Op != OpConst64 {
 11420  				continue
 11421  			}
 11422  			c := auxIntToInt64(v_0_0.AuxInt)
 11423  			if v_1.Op != OpConst64 {
 11424  				continue
 11425  			}
 11426  			d := auxIntToInt64(v_1.AuxInt)
 11427  			if !(0 <= c && c <= d) {
 11428  				continue
 11429  			}
 11430  			v.reset(OpConstBool)
 11431  			v.AuxInt = boolToAuxInt(true)
 11432  			return true
 11433  		}
 11434  		break
 11435  	}
 11436  	// match: (IsSliceInBounds (Const32 [0]) _)
 11437  	// result: (ConstBool [true])
 11438  	for {
 11439  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 11440  			break
 11441  		}
 11442  		v.reset(OpConstBool)
 11443  		v.AuxInt = boolToAuxInt(true)
 11444  		return true
 11445  	}
 11446  	// match: (IsSliceInBounds (Const64 [0]) _)
 11447  	// result: (ConstBool [true])
 11448  	for {
 11449  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 11450  			break
 11451  		}
 11452  		v.reset(OpConstBool)
 11453  		v.AuxInt = boolToAuxInt(true)
 11454  		return true
 11455  	}
 11456  	// match: (IsSliceInBounds (Const32 [c]) (Const32 [d]))
 11457  	// result: (ConstBool [0 <= c && c <= d])
 11458  	for {
 11459  		if v_0.Op != OpConst32 {
 11460  			break
 11461  		}
 11462  		c := auxIntToInt32(v_0.AuxInt)
 11463  		if v_1.Op != OpConst32 {
 11464  			break
 11465  		}
 11466  		d := auxIntToInt32(v_1.AuxInt)
 11467  		v.reset(OpConstBool)
 11468  		v.AuxInt = boolToAuxInt(0 <= c && c <= d)
 11469  		return true
 11470  	}
 11471  	// match: (IsSliceInBounds (Const64 [c]) (Const64 [d]))
 11472  	// result: (ConstBool [0 <= c && c <= d])
 11473  	for {
 11474  		if v_0.Op != OpConst64 {
 11475  			break
 11476  		}
 11477  		c := auxIntToInt64(v_0.AuxInt)
 11478  		if v_1.Op != OpConst64 {
 11479  			break
 11480  		}
 11481  		d := auxIntToInt64(v_1.AuxInt)
 11482  		v.reset(OpConstBool)
 11483  		v.AuxInt = boolToAuxInt(0 <= c && c <= d)
 11484  		return true
 11485  	}
 11486  	// match: (IsSliceInBounds (SliceLen x) (SliceCap x))
 11487  	// result: (ConstBool [true])
 11488  	for {
 11489  		if v_0.Op != OpSliceLen {
 11490  			break
 11491  		}
 11492  		x := v_0.Args[0]
 11493  		if v_1.Op != OpSliceCap || x != v_1.Args[0] {
 11494  			break
 11495  		}
 11496  		v.reset(OpConstBool)
 11497  		v.AuxInt = boolToAuxInt(true)
 11498  		return true
 11499  	}
 11500  	return false
 11501  }
 11502  func rewriteValuegeneric_OpLeq16(v *Value) bool {
 11503  	v_1 := v.Args[1]
 11504  	v_0 := v.Args[0]
 11505  	b := v.Block
 11506  	// match: (Leq16 (Const16 [c]) (Const16 [d]))
 11507  	// result: (ConstBool [c <= d])
 11508  	for {
 11509  		if v_0.Op != OpConst16 {
 11510  			break
 11511  		}
 11512  		c := auxIntToInt16(v_0.AuxInt)
 11513  		if v_1.Op != OpConst16 {
 11514  			break
 11515  		}
 11516  		d := auxIntToInt16(v_1.AuxInt)
 11517  		v.reset(OpConstBool)
 11518  		v.AuxInt = boolToAuxInt(c <= d)
 11519  		return true
 11520  	}
 11521  	// match: (Leq16 (Const16 [0]) (And16 _ (Const16 [c])))
 11522  	// cond: c >= 0
 11523  	// result: (ConstBool [true])
 11524  	for {
 11525  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 || v_1.Op != OpAnd16 {
 11526  			break
 11527  		}
 11528  		_ = v_1.Args[1]
 11529  		v_1_0 := v_1.Args[0]
 11530  		v_1_1 := v_1.Args[1]
 11531  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 11532  			if v_1_1.Op != OpConst16 {
 11533  				continue
 11534  			}
 11535  			c := auxIntToInt16(v_1_1.AuxInt)
 11536  			if !(c >= 0) {
 11537  				continue
 11538  			}
 11539  			v.reset(OpConstBool)
 11540  			v.AuxInt = boolToAuxInt(true)
 11541  			return true
 11542  		}
 11543  		break
 11544  	}
 11545  	// match: (Leq16 (Const16 [0]) (Rsh16Ux64 _ (Const64 [c])))
 11546  	// cond: c > 0
 11547  	// result: (ConstBool [true])
 11548  	for {
 11549  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 || v_1.Op != OpRsh16Ux64 {
 11550  			break
 11551  		}
 11552  		_ = v_1.Args[1]
 11553  		v_1_1 := v_1.Args[1]
 11554  		if v_1_1.Op != OpConst64 {
 11555  			break
 11556  		}
 11557  		c := auxIntToInt64(v_1_1.AuxInt)
 11558  		if !(c > 0) {
 11559  			break
 11560  		}
 11561  		v.reset(OpConstBool)
 11562  		v.AuxInt = boolToAuxInt(true)
 11563  		return true
 11564  	}
 11565  	// match: (Leq16 x (Const16 <t> [-1]))
 11566  	// result: (Less16 x (Const16 <t> [0]))
 11567  	for {
 11568  		x := v_0
 11569  		if v_1.Op != OpConst16 {
 11570  			break
 11571  		}
 11572  		t := v_1.Type
 11573  		if auxIntToInt16(v_1.AuxInt) != -1 {
 11574  			break
 11575  		}
 11576  		v.reset(OpLess16)
 11577  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 11578  		v0.AuxInt = int16ToAuxInt(0)
 11579  		v.AddArg2(x, v0)
 11580  		return true
 11581  	}
 11582  	// match: (Leq16 (Const16 <t> [1]) x)
 11583  	// result: (Less16 (Const16 <t> [0]) x)
 11584  	for {
 11585  		if v_0.Op != OpConst16 {
 11586  			break
 11587  		}
 11588  		t := v_0.Type
 11589  		if auxIntToInt16(v_0.AuxInt) != 1 {
 11590  			break
 11591  		}
 11592  		x := v_1
 11593  		v.reset(OpLess16)
 11594  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 11595  		v0.AuxInt = int16ToAuxInt(0)
 11596  		v.AddArg2(v0, x)
 11597  		return true
 11598  	}
 11599  	// match: (Leq16 (Const16 [math.MinInt16]) _)
 11600  	// result: (ConstBool [true])
 11601  	for {
 11602  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != math.MinInt16 {
 11603  			break
 11604  		}
 11605  		v.reset(OpConstBool)
 11606  		v.AuxInt = boolToAuxInt(true)
 11607  		return true
 11608  	}
 11609  	// match: (Leq16 _ (Const16 [math.MaxInt16]))
 11610  	// result: (ConstBool [true])
 11611  	for {
 11612  		if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != math.MaxInt16 {
 11613  			break
 11614  		}
 11615  		v.reset(OpConstBool)
 11616  		v.AuxInt = boolToAuxInt(true)
 11617  		return true
 11618  	}
 11619  	// match: (Leq16 x c:(Const16 [math.MinInt16]))
 11620  	// result: (Eq16 x c)
 11621  	for {
 11622  		x := v_0
 11623  		c := v_1
 11624  		if c.Op != OpConst16 || auxIntToInt16(c.AuxInt) != math.MinInt16 {
 11625  			break
 11626  		}
 11627  		v.reset(OpEq16)
 11628  		v.AddArg2(x, c)
 11629  		return true
 11630  	}
 11631  	// match: (Leq16 c:(Const16 [math.MaxInt16]) x)
 11632  	// result: (Eq16 x c)
 11633  	for {
 11634  		c := v_0
 11635  		if c.Op != OpConst16 || auxIntToInt16(c.AuxInt) != math.MaxInt16 {
 11636  			break
 11637  		}
 11638  		x := v_1
 11639  		v.reset(OpEq16)
 11640  		v.AddArg2(x, c)
 11641  		return true
 11642  	}
 11643  	return false
 11644  }
 11645  func rewriteValuegeneric_OpLeq16U(v *Value) bool {
 11646  	v_1 := v.Args[1]
 11647  	v_0 := v.Args[0]
 11648  	b := v.Block
 11649  	// match: (Leq16U (Const16 [c]) (Const16 [d]))
 11650  	// result: (ConstBool [uint16(c) <= uint16(d)])
 11651  	for {
 11652  		if v_0.Op != OpConst16 {
 11653  			break
 11654  		}
 11655  		c := auxIntToInt16(v_0.AuxInt)
 11656  		if v_1.Op != OpConst16 {
 11657  			break
 11658  		}
 11659  		d := auxIntToInt16(v_1.AuxInt)
 11660  		v.reset(OpConstBool)
 11661  		v.AuxInt = boolToAuxInt(uint16(c) <= uint16(d))
 11662  		return true
 11663  	}
 11664  	// match: (Leq16U (Const16 <t> [1]) x)
 11665  	// result: (Neq16 (Const16 <t> [0]) x)
 11666  	for {
 11667  		if v_0.Op != OpConst16 {
 11668  			break
 11669  		}
 11670  		t := v_0.Type
 11671  		if auxIntToInt16(v_0.AuxInt) != 1 {
 11672  			break
 11673  		}
 11674  		x := v_1
 11675  		v.reset(OpNeq16)
 11676  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 11677  		v0.AuxInt = int16ToAuxInt(0)
 11678  		v.AddArg2(v0, x)
 11679  		return true
 11680  	}
 11681  	// match: (Leq16U (Const16 [0]) _)
 11682  	// result: (ConstBool [true])
 11683  	for {
 11684  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 11685  			break
 11686  		}
 11687  		v.reset(OpConstBool)
 11688  		v.AuxInt = boolToAuxInt(true)
 11689  		return true
 11690  	}
 11691  	// match: (Leq16U _ (Const16 [-1]))
 11692  	// result: (ConstBool [true])
 11693  	for {
 11694  		if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != -1 {
 11695  			break
 11696  		}
 11697  		v.reset(OpConstBool)
 11698  		v.AuxInt = boolToAuxInt(true)
 11699  		return true
 11700  	}
 11701  	// match: (Leq16U x c:(Const16 [0]))
 11702  	// result: (Eq16 x c)
 11703  	for {
 11704  		x := v_0
 11705  		c := v_1
 11706  		if c.Op != OpConst16 || auxIntToInt16(c.AuxInt) != 0 {
 11707  			break
 11708  		}
 11709  		v.reset(OpEq16)
 11710  		v.AddArg2(x, c)
 11711  		return true
 11712  	}
 11713  	// match: (Leq16U c:(Const16 [-1]) x)
 11714  	// result: (Eq16 x c)
 11715  	for {
 11716  		c := v_0
 11717  		if c.Op != OpConst16 || auxIntToInt16(c.AuxInt) != -1 {
 11718  			break
 11719  		}
 11720  		x := v_1
 11721  		v.reset(OpEq16)
 11722  		v.AddArg2(x, c)
 11723  		return true
 11724  	}
 11725  	return false
 11726  }
 11727  func rewriteValuegeneric_OpLeq32(v *Value) bool {
 11728  	v_1 := v.Args[1]
 11729  	v_0 := v.Args[0]
 11730  	b := v.Block
 11731  	// match: (Leq32 (Const32 [c]) (Const32 [d]))
 11732  	// result: (ConstBool [c <= d])
 11733  	for {
 11734  		if v_0.Op != OpConst32 {
 11735  			break
 11736  		}
 11737  		c := auxIntToInt32(v_0.AuxInt)
 11738  		if v_1.Op != OpConst32 {
 11739  			break
 11740  		}
 11741  		d := auxIntToInt32(v_1.AuxInt)
 11742  		v.reset(OpConstBool)
 11743  		v.AuxInt = boolToAuxInt(c <= d)
 11744  		return true
 11745  	}
 11746  	// match: (Leq32 (Const32 [0]) (And32 _ (Const32 [c])))
 11747  	// cond: c >= 0
 11748  	// result: (ConstBool [true])
 11749  	for {
 11750  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 || v_1.Op != OpAnd32 {
 11751  			break
 11752  		}
 11753  		_ = v_1.Args[1]
 11754  		v_1_0 := v_1.Args[0]
 11755  		v_1_1 := v_1.Args[1]
 11756  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 11757  			if v_1_1.Op != OpConst32 {
 11758  				continue
 11759  			}
 11760  			c := auxIntToInt32(v_1_1.AuxInt)
 11761  			if !(c >= 0) {
 11762  				continue
 11763  			}
 11764  			v.reset(OpConstBool)
 11765  			v.AuxInt = boolToAuxInt(true)
 11766  			return true
 11767  		}
 11768  		break
 11769  	}
 11770  	// match: (Leq32 (Const32 [0]) (Rsh32Ux64 _ (Const64 [c])))
 11771  	// cond: c > 0
 11772  	// result: (ConstBool [true])
 11773  	for {
 11774  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 || v_1.Op != OpRsh32Ux64 {
 11775  			break
 11776  		}
 11777  		_ = v_1.Args[1]
 11778  		v_1_1 := v_1.Args[1]
 11779  		if v_1_1.Op != OpConst64 {
 11780  			break
 11781  		}
 11782  		c := auxIntToInt64(v_1_1.AuxInt)
 11783  		if !(c > 0) {
 11784  			break
 11785  		}
 11786  		v.reset(OpConstBool)
 11787  		v.AuxInt = boolToAuxInt(true)
 11788  		return true
 11789  	}
 11790  	// match: (Leq32 x (Const32 <t> [-1]))
 11791  	// result: (Less32 x (Const32 <t> [0]))
 11792  	for {
 11793  		x := v_0
 11794  		if v_1.Op != OpConst32 {
 11795  			break
 11796  		}
 11797  		t := v_1.Type
 11798  		if auxIntToInt32(v_1.AuxInt) != -1 {
 11799  			break
 11800  		}
 11801  		v.reset(OpLess32)
 11802  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 11803  		v0.AuxInt = int32ToAuxInt(0)
 11804  		v.AddArg2(x, v0)
 11805  		return true
 11806  	}
 11807  	// match: (Leq32 (Const32 <t> [1]) x)
 11808  	// result: (Less32 (Const32 <t> [0]) x)
 11809  	for {
 11810  		if v_0.Op != OpConst32 {
 11811  			break
 11812  		}
 11813  		t := v_0.Type
 11814  		if auxIntToInt32(v_0.AuxInt) != 1 {
 11815  			break
 11816  		}
 11817  		x := v_1
 11818  		v.reset(OpLess32)
 11819  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 11820  		v0.AuxInt = int32ToAuxInt(0)
 11821  		v.AddArg2(v0, x)
 11822  		return true
 11823  	}
 11824  	// match: (Leq32 (Const32 [math.MinInt32]) _)
 11825  	// result: (ConstBool [true])
 11826  	for {
 11827  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != math.MinInt32 {
 11828  			break
 11829  		}
 11830  		v.reset(OpConstBool)
 11831  		v.AuxInt = boolToAuxInt(true)
 11832  		return true
 11833  	}
 11834  	// match: (Leq32 _ (Const32 [math.MaxInt32]))
 11835  	// result: (ConstBool [true])
 11836  	for {
 11837  		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != math.MaxInt32 {
 11838  			break
 11839  		}
 11840  		v.reset(OpConstBool)
 11841  		v.AuxInt = boolToAuxInt(true)
 11842  		return true
 11843  	}
 11844  	// match: (Leq32 x c:(Const32 [math.MinInt32]))
 11845  	// result: (Eq32 x c)
 11846  	for {
 11847  		x := v_0
 11848  		c := v_1
 11849  		if c.Op != OpConst32 || auxIntToInt32(c.AuxInt) != math.MinInt32 {
 11850  			break
 11851  		}
 11852  		v.reset(OpEq32)
 11853  		v.AddArg2(x, c)
 11854  		return true
 11855  	}
 11856  	// match: (Leq32 c:(Const32 [math.MaxInt32]) x)
 11857  	// result: (Eq32 x c)
 11858  	for {
 11859  		c := v_0
 11860  		if c.Op != OpConst32 || auxIntToInt32(c.AuxInt) != math.MaxInt32 {
 11861  			break
 11862  		}
 11863  		x := v_1
 11864  		v.reset(OpEq32)
 11865  		v.AddArg2(x, c)
 11866  		return true
 11867  	}
 11868  	return false
 11869  }
 11870  func rewriteValuegeneric_OpLeq32F(v *Value) bool {
 11871  	v_1 := v.Args[1]
 11872  	v_0 := v.Args[0]
 11873  	// match: (Leq32F (Const32F [c]) (Const32F [d]))
 11874  	// result: (ConstBool [c <= d])
 11875  	for {
 11876  		if v_0.Op != OpConst32F {
 11877  			break
 11878  		}
 11879  		c := auxIntToFloat32(v_0.AuxInt)
 11880  		if v_1.Op != OpConst32F {
 11881  			break
 11882  		}
 11883  		d := auxIntToFloat32(v_1.AuxInt)
 11884  		v.reset(OpConstBool)
 11885  		v.AuxInt = boolToAuxInt(c <= d)
 11886  		return true
 11887  	}
 11888  	return false
 11889  }
 11890  func rewriteValuegeneric_OpLeq32U(v *Value) bool {
 11891  	v_1 := v.Args[1]
 11892  	v_0 := v.Args[0]
 11893  	b := v.Block
 11894  	// match: (Leq32U (Const32 [c]) (Const32 [d]))
 11895  	// result: (ConstBool [uint32(c) <= uint32(d)])
 11896  	for {
 11897  		if v_0.Op != OpConst32 {
 11898  			break
 11899  		}
 11900  		c := auxIntToInt32(v_0.AuxInt)
 11901  		if v_1.Op != OpConst32 {
 11902  			break
 11903  		}
 11904  		d := auxIntToInt32(v_1.AuxInt)
 11905  		v.reset(OpConstBool)
 11906  		v.AuxInt = boolToAuxInt(uint32(c) <= uint32(d))
 11907  		return true
 11908  	}
 11909  	// match: (Leq32U (Const32 <t> [1]) x)
 11910  	// result: (Neq32 (Const32 <t> [0]) x)
 11911  	for {
 11912  		if v_0.Op != OpConst32 {
 11913  			break
 11914  		}
 11915  		t := v_0.Type
 11916  		if auxIntToInt32(v_0.AuxInt) != 1 {
 11917  			break
 11918  		}
 11919  		x := v_1
 11920  		v.reset(OpNeq32)
 11921  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 11922  		v0.AuxInt = int32ToAuxInt(0)
 11923  		v.AddArg2(v0, x)
 11924  		return true
 11925  	}
 11926  	// match: (Leq32U (Const32 [0]) _)
 11927  	// result: (ConstBool [true])
 11928  	for {
 11929  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 11930  			break
 11931  		}
 11932  		v.reset(OpConstBool)
 11933  		v.AuxInt = boolToAuxInt(true)
 11934  		return true
 11935  	}
 11936  	// match: (Leq32U _ (Const32 [-1]))
 11937  	// result: (ConstBool [true])
 11938  	for {
 11939  		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != -1 {
 11940  			break
 11941  		}
 11942  		v.reset(OpConstBool)
 11943  		v.AuxInt = boolToAuxInt(true)
 11944  		return true
 11945  	}
 11946  	// match: (Leq32U x c:(Const32 [0]))
 11947  	// result: (Eq32 x c)
 11948  	for {
 11949  		x := v_0
 11950  		c := v_1
 11951  		if c.Op != OpConst32 || auxIntToInt32(c.AuxInt) != 0 {
 11952  			break
 11953  		}
 11954  		v.reset(OpEq32)
 11955  		v.AddArg2(x, c)
 11956  		return true
 11957  	}
 11958  	// match: (Leq32U c:(Const32 [-1]) x)
 11959  	// result: (Eq32 x c)
 11960  	for {
 11961  		c := v_0
 11962  		if c.Op != OpConst32 || auxIntToInt32(c.AuxInt) != -1 {
 11963  			break
 11964  		}
 11965  		x := v_1
 11966  		v.reset(OpEq32)
 11967  		v.AddArg2(x, c)
 11968  		return true
 11969  	}
 11970  	return false
 11971  }
 11972  func rewriteValuegeneric_OpLeq64(v *Value) bool {
 11973  	v_1 := v.Args[1]
 11974  	v_0 := v.Args[0]
 11975  	b := v.Block
 11976  	// match: (Leq64 (Const64 [c]) (Const64 [d]))
 11977  	// result: (ConstBool [c <= d])
 11978  	for {
 11979  		if v_0.Op != OpConst64 {
 11980  			break
 11981  		}
 11982  		c := auxIntToInt64(v_0.AuxInt)
 11983  		if v_1.Op != OpConst64 {
 11984  			break
 11985  		}
 11986  		d := auxIntToInt64(v_1.AuxInt)
 11987  		v.reset(OpConstBool)
 11988  		v.AuxInt = boolToAuxInt(c <= d)
 11989  		return true
 11990  	}
 11991  	// match: (Leq64 (Const64 [0]) (And64 _ (Const64 [c])))
 11992  	// cond: c >= 0
 11993  	// result: (ConstBool [true])
 11994  	for {
 11995  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 || v_1.Op != OpAnd64 {
 11996  			break
 11997  		}
 11998  		_ = v_1.Args[1]
 11999  		v_1_0 := v_1.Args[0]
 12000  		v_1_1 := v_1.Args[1]
 12001  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 12002  			if v_1_1.Op != OpConst64 {
 12003  				continue
 12004  			}
 12005  			c := auxIntToInt64(v_1_1.AuxInt)
 12006  			if !(c >= 0) {
 12007  				continue
 12008  			}
 12009  			v.reset(OpConstBool)
 12010  			v.AuxInt = boolToAuxInt(true)
 12011  			return true
 12012  		}
 12013  		break
 12014  	}
 12015  	// match: (Leq64 (Const64 [0]) (Rsh64Ux64 _ (Const64 [c])))
 12016  	// cond: c > 0
 12017  	// result: (ConstBool [true])
 12018  	for {
 12019  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 || v_1.Op != OpRsh64Ux64 {
 12020  			break
 12021  		}
 12022  		_ = v_1.Args[1]
 12023  		v_1_1 := v_1.Args[1]
 12024  		if v_1_1.Op != OpConst64 {
 12025  			break
 12026  		}
 12027  		c := auxIntToInt64(v_1_1.AuxInt)
 12028  		if !(c > 0) {
 12029  			break
 12030  		}
 12031  		v.reset(OpConstBool)
 12032  		v.AuxInt = boolToAuxInt(true)
 12033  		return true
 12034  	}
 12035  	// match: (Leq64 x (Const64 <t> [-1]))
 12036  	// result: (Less64 x (Const64 <t> [0]))
 12037  	for {
 12038  		x := v_0
 12039  		if v_1.Op != OpConst64 {
 12040  			break
 12041  		}
 12042  		t := v_1.Type
 12043  		if auxIntToInt64(v_1.AuxInt) != -1 {
 12044  			break
 12045  		}
 12046  		v.reset(OpLess64)
 12047  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 12048  		v0.AuxInt = int64ToAuxInt(0)
 12049  		v.AddArg2(x, v0)
 12050  		return true
 12051  	}
 12052  	// match: (Leq64 (Const64 <t> [1]) x)
 12053  	// result: (Less64 (Const64 <t> [0]) x)
 12054  	for {
 12055  		if v_0.Op != OpConst64 {
 12056  			break
 12057  		}
 12058  		t := v_0.Type
 12059  		if auxIntToInt64(v_0.AuxInt) != 1 {
 12060  			break
 12061  		}
 12062  		x := v_1
 12063  		v.reset(OpLess64)
 12064  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 12065  		v0.AuxInt = int64ToAuxInt(0)
 12066  		v.AddArg2(v0, x)
 12067  		return true
 12068  	}
 12069  	// match: (Leq64 (Const64 [math.MinInt64]) _)
 12070  	// result: (ConstBool [true])
 12071  	for {
 12072  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != math.MinInt64 {
 12073  			break
 12074  		}
 12075  		v.reset(OpConstBool)
 12076  		v.AuxInt = boolToAuxInt(true)
 12077  		return true
 12078  	}
 12079  	// match: (Leq64 _ (Const64 [math.MaxInt64]))
 12080  	// result: (ConstBool [true])
 12081  	for {
 12082  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != math.MaxInt64 {
 12083  			break
 12084  		}
 12085  		v.reset(OpConstBool)
 12086  		v.AuxInt = boolToAuxInt(true)
 12087  		return true
 12088  	}
 12089  	// match: (Leq64 x c:(Const64 [math.MinInt64]))
 12090  	// result: (Eq64 x c)
 12091  	for {
 12092  		x := v_0
 12093  		c := v_1
 12094  		if c.Op != OpConst64 || auxIntToInt64(c.AuxInt) != math.MinInt64 {
 12095  			break
 12096  		}
 12097  		v.reset(OpEq64)
 12098  		v.AddArg2(x, c)
 12099  		return true
 12100  	}
 12101  	// match: (Leq64 c:(Const64 [math.MaxInt64]) x)
 12102  	// result: (Eq64 x c)
 12103  	for {
 12104  		c := v_0
 12105  		if c.Op != OpConst64 || auxIntToInt64(c.AuxInt) != math.MaxInt64 {
 12106  			break
 12107  		}
 12108  		x := v_1
 12109  		v.reset(OpEq64)
 12110  		v.AddArg2(x, c)
 12111  		return true
 12112  	}
 12113  	return false
 12114  }
 12115  func rewriteValuegeneric_OpLeq64F(v *Value) bool {
 12116  	v_1 := v.Args[1]
 12117  	v_0 := v.Args[0]
 12118  	// match: (Leq64F (Const64F [c]) (Const64F [d]))
 12119  	// result: (ConstBool [c <= d])
 12120  	for {
 12121  		if v_0.Op != OpConst64F {
 12122  			break
 12123  		}
 12124  		c := auxIntToFloat64(v_0.AuxInt)
 12125  		if v_1.Op != OpConst64F {
 12126  			break
 12127  		}
 12128  		d := auxIntToFloat64(v_1.AuxInt)
 12129  		v.reset(OpConstBool)
 12130  		v.AuxInt = boolToAuxInt(c <= d)
 12131  		return true
 12132  	}
 12133  	return false
 12134  }
 12135  func rewriteValuegeneric_OpLeq64U(v *Value) bool {
 12136  	v_1 := v.Args[1]
 12137  	v_0 := v.Args[0]
 12138  	b := v.Block
 12139  	// match: (Leq64U (Const64 [c]) (Const64 [d]))
 12140  	// result: (ConstBool [uint64(c) <= uint64(d)])
 12141  	for {
 12142  		if v_0.Op != OpConst64 {
 12143  			break
 12144  		}
 12145  		c := auxIntToInt64(v_0.AuxInt)
 12146  		if v_1.Op != OpConst64 {
 12147  			break
 12148  		}
 12149  		d := auxIntToInt64(v_1.AuxInt)
 12150  		v.reset(OpConstBool)
 12151  		v.AuxInt = boolToAuxInt(uint64(c) <= uint64(d))
 12152  		return true
 12153  	}
 12154  	// match: (Leq64U (Const64 <t> [1]) x)
 12155  	// result: (Neq64 (Const64 <t> [0]) x)
 12156  	for {
 12157  		if v_0.Op != OpConst64 {
 12158  			break
 12159  		}
 12160  		t := v_0.Type
 12161  		if auxIntToInt64(v_0.AuxInt) != 1 {
 12162  			break
 12163  		}
 12164  		x := v_1
 12165  		v.reset(OpNeq64)
 12166  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 12167  		v0.AuxInt = int64ToAuxInt(0)
 12168  		v.AddArg2(v0, x)
 12169  		return true
 12170  	}
 12171  	// match: (Leq64U (Const64 [0]) _)
 12172  	// result: (ConstBool [true])
 12173  	for {
 12174  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 12175  			break
 12176  		}
 12177  		v.reset(OpConstBool)
 12178  		v.AuxInt = boolToAuxInt(true)
 12179  		return true
 12180  	}
 12181  	// match: (Leq64U _ (Const64 [-1]))
 12182  	// result: (ConstBool [true])
 12183  	for {
 12184  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1 {
 12185  			break
 12186  		}
 12187  		v.reset(OpConstBool)
 12188  		v.AuxInt = boolToAuxInt(true)
 12189  		return true
 12190  	}
 12191  	// match: (Leq64U x c:(Const64 [0]))
 12192  	// result: (Eq64 x c)
 12193  	for {
 12194  		x := v_0
 12195  		c := v_1
 12196  		if c.Op != OpConst64 || auxIntToInt64(c.AuxInt) != 0 {
 12197  			break
 12198  		}
 12199  		v.reset(OpEq64)
 12200  		v.AddArg2(x, c)
 12201  		return true
 12202  	}
 12203  	// match: (Leq64U c:(Const64 [-1]) x)
 12204  	// result: (Eq64 x c)
 12205  	for {
 12206  		c := v_0
 12207  		if c.Op != OpConst64 || auxIntToInt64(c.AuxInt) != -1 {
 12208  			break
 12209  		}
 12210  		x := v_1
 12211  		v.reset(OpEq64)
 12212  		v.AddArg2(x, c)
 12213  		return true
 12214  	}
 12215  	return false
 12216  }
 12217  func rewriteValuegeneric_OpLeq8(v *Value) bool {
 12218  	v_1 := v.Args[1]
 12219  	v_0 := v.Args[0]
 12220  	b := v.Block
 12221  	// match: (Leq8 (Const8 [c]) (Const8 [d]))
 12222  	// result: (ConstBool [c <= d])
 12223  	for {
 12224  		if v_0.Op != OpConst8 {
 12225  			break
 12226  		}
 12227  		c := auxIntToInt8(v_0.AuxInt)
 12228  		if v_1.Op != OpConst8 {
 12229  			break
 12230  		}
 12231  		d := auxIntToInt8(v_1.AuxInt)
 12232  		v.reset(OpConstBool)
 12233  		v.AuxInt = boolToAuxInt(c <= d)
 12234  		return true
 12235  	}
 12236  	// match: (Leq8 (Const8 [0]) (And8 _ (Const8 [c])))
 12237  	// cond: c >= 0
 12238  	// result: (ConstBool [true])
 12239  	for {
 12240  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 || v_1.Op != OpAnd8 {
 12241  			break
 12242  		}
 12243  		_ = v_1.Args[1]
 12244  		v_1_0 := v_1.Args[0]
 12245  		v_1_1 := v_1.Args[1]
 12246  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 12247  			if v_1_1.Op != OpConst8 {
 12248  				continue
 12249  			}
 12250  			c := auxIntToInt8(v_1_1.AuxInt)
 12251  			if !(c >= 0) {
 12252  				continue
 12253  			}
 12254  			v.reset(OpConstBool)
 12255  			v.AuxInt = boolToAuxInt(true)
 12256  			return true
 12257  		}
 12258  		break
 12259  	}
 12260  	// match: (Leq8 (Const8 [0]) (Rsh8Ux64 _ (Const64 [c])))
 12261  	// cond: c > 0
 12262  	// result: (ConstBool [true])
 12263  	for {
 12264  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 || v_1.Op != OpRsh8Ux64 {
 12265  			break
 12266  		}
 12267  		_ = v_1.Args[1]
 12268  		v_1_1 := v_1.Args[1]
 12269  		if v_1_1.Op != OpConst64 {
 12270  			break
 12271  		}
 12272  		c := auxIntToInt64(v_1_1.AuxInt)
 12273  		if !(c > 0) {
 12274  			break
 12275  		}
 12276  		v.reset(OpConstBool)
 12277  		v.AuxInt = boolToAuxInt(true)
 12278  		return true
 12279  	}
 12280  	// match: (Leq8 x (Const8 <t> [-1]))
 12281  	// result: (Less8 x (Const8 <t> [0]))
 12282  	for {
 12283  		x := v_0
 12284  		if v_1.Op != OpConst8 {
 12285  			break
 12286  		}
 12287  		t := v_1.Type
 12288  		if auxIntToInt8(v_1.AuxInt) != -1 {
 12289  			break
 12290  		}
 12291  		v.reset(OpLess8)
 12292  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 12293  		v0.AuxInt = int8ToAuxInt(0)
 12294  		v.AddArg2(x, v0)
 12295  		return true
 12296  	}
 12297  	// match: (Leq8 (Const8 <t> [1]) x)
 12298  	// result: (Less8 (Const8 <t> [0]) x)
 12299  	for {
 12300  		if v_0.Op != OpConst8 {
 12301  			break
 12302  		}
 12303  		t := v_0.Type
 12304  		if auxIntToInt8(v_0.AuxInt) != 1 {
 12305  			break
 12306  		}
 12307  		x := v_1
 12308  		v.reset(OpLess8)
 12309  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 12310  		v0.AuxInt = int8ToAuxInt(0)
 12311  		v.AddArg2(v0, x)
 12312  		return true
 12313  	}
 12314  	// match: (Leq8 (Const8 [math.MinInt8 ]) _)
 12315  	// result: (ConstBool [true])
 12316  	for {
 12317  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != math.MinInt8 {
 12318  			break
 12319  		}
 12320  		v.reset(OpConstBool)
 12321  		v.AuxInt = boolToAuxInt(true)
 12322  		return true
 12323  	}
 12324  	// match: (Leq8 _ (Const8 [math.MaxInt8 ]))
 12325  	// result: (ConstBool [true])
 12326  	for {
 12327  		if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != math.MaxInt8 {
 12328  			break
 12329  		}
 12330  		v.reset(OpConstBool)
 12331  		v.AuxInt = boolToAuxInt(true)
 12332  		return true
 12333  	}
 12334  	// match: (Leq8 x c:(Const8 [math.MinInt8 ]))
 12335  	// result: (Eq8 x c)
 12336  	for {
 12337  		x := v_0
 12338  		c := v_1
 12339  		if c.Op != OpConst8 || auxIntToInt8(c.AuxInt) != math.MinInt8 {
 12340  			break
 12341  		}
 12342  		v.reset(OpEq8)
 12343  		v.AddArg2(x, c)
 12344  		return true
 12345  	}
 12346  	// match: (Leq8 c:(Const8 [math.MaxInt8 ]) x)
 12347  	// result: (Eq8 x c)
 12348  	for {
 12349  		c := v_0
 12350  		if c.Op != OpConst8 || auxIntToInt8(c.AuxInt) != math.MaxInt8 {
 12351  			break
 12352  		}
 12353  		x := v_1
 12354  		v.reset(OpEq8)
 12355  		v.AddArg2(x, c)
 12356  		return true
 12357  	}
 12358  	return false
 12359  }
 12360  func rewriteValuegeneric_OpLeq8U(v *Value) bool {
 12361  	v_1 := v.Args[1]
 12362  	v_0 := v.Args[0]
 12363  	b := v.Block
 12364  	// match: (Leq8U (Const8 [c]) (Const8 [d]))
 12365  	// result: (ConstBool [ uint8(c) <= uint8(d)])
 12366  	for {
 12367  		if v_0.Op != OpConst8 {
 12368  			break
 12369  		}
 12370  		c := auxIntToInt8(v_0.AuxInt)
 12371  		if v_1.Op != OpConst8 {
 12372  			break
 12373  		}
 12374  		d := auxIntToInt8(v_1.AuxInt)
 12375  		v.reset(OpConstBool)
 12376  		v.AuxInt = boolToAuxInt(uint8(c) <= uint8(d))
 12377  		return true
 12378  	}
 12379  	// match: (Leq8U (Const8 <t> [1]) x)
 12380  	// result: (Neq8 (Const8 <t> [0]) x)
 12381  	for {
 12382  		if v_0.Op != OpConst8 {
 12383  			break
 12384  		}
 12385  		t := v_0.Type
 12386  		if auxIntToInt8(v_0.AuxInt) != 1 {
 12387  			break
 12388  		}
 12389  		x := v_1
 12390  		v.reset(OpNeq8)
 12391  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 12392  		v0.AuxInt = int8ToAuxInt(0)
 12393  		v.AddArg2(v0, x)
 12394  		return true
 12395  	}
 12396  	// match: (Leq8U (Const8 [0]) _)
 12397  	// result: (ConstBool [true])
 12398  	for {
 12399  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 12400  			break
 12401  		}
 12402  		v.reset(OpConstBool)
 12403  		v.AuxInt = boolToAuxInt(true)
 12404  		return true
 12405  	}
 12406  	// match: (Leq8U _ (Const8 [-1]))
 12407  	// result: (ConstBool [true])
 12408  	for {
 12409  		if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != -1 {
 12410  			break
 12411  		}
 12412  		v.reset(OpConstBool)
 12413  		v.AuxInt = boolToAuxInt(true)
 12414  		return true
 12415  	}
 12416  	// match: (Leq8U x c:(Const8 [0]))
 12417  	// result: (Eq8 x c)
 12418  	for {
 12419  		x := v_0
 12420  		c := v_1
 12421  		if c.Op != OpConst8 || auxIntToInt8(c.AuxInt) != 0 {
 12422  			break
 12423  		}
 12424  		v.reset(OpEq8)
 12425  		v.AddArg2(x, c)
 12426  		return true
 12427  	}
 12428  	// match: (Leq8U c:(Const8 [-1]) x)
 12429  	// result: (Eq8 x c)
 12430  	for {
 12431  		c := v_0
 12432  		if c.Op != OpConst8 || auxIntToInt8(c.AuxInt) != -1 {
 12433  			break
 12434  		}
 12435  		x := v_1
 12436  		v.reset(OpEq8)
 12437  		v.AddArg2(x, c)
 12438  		return true
 12439  	}
 12440  	return false
 12441  }
 12442  func rewriteValuegeneric_OpLess16(v *Value) bool {
 12443  	v_1 := v.Args[1]
 12444  	v_0 := v.Args[0]
 12445  	b := v.Block
 12446  	// match: (Less16 (Const16 [c]) (Const16 [d]))
 12447  	// result: (ConstBool [c < d])
 12448  	for {
 12449  		if v_0.Op != OpConst16 {
 12450  			break
 12451  		}
 12452  		c := auxIntToInt16(v_0.AuxInt)
 12453  		if v_1.Op != OpConst16 {
 12454  			break
 12455  		}
 12456  		d := auxIntToInt16(v_1.AuxInt)
 12457  		v.reset(OpConstBool)
 12458  		v.AuxInt = boolToAuxInt(c < d)
 12459  		return true
 12460  	}
 12461  	// match: (Less16 (Const16 <t> [0]) x)
 12462  	// cond: isNonNegative(x)
 12463  	// result: (Neq16 (Const16 <t> [0]) x)
 12464  	for {
 12465  		if v_0.Op != OpConst16 {
 12466  			break
 12467  		}
 12468  		t := v_0.Type
 12469  		if auxIntToInt16(v_0.AuxInt) != 0 {
 12470  			break
 12471  		}
 12472  		x := v_1
 12473  		if !(isNonNegative(x)) {
 12474  			break
 12475  		}
 12476  		v.reset(OpNeq16)
 12477  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 12478  		v0.AuxInt = int16ToAuxInt(0)
 12479  		v.AddArg2(v0, x)
 12480  		return true
 12481  	}
 12482  	// match: (Less16 x (Const16 <t> [1]))
 12483  	// cond: isNonNegative(x)
 12484  	// result: (Eq16 (Const16 <t> [0]) x)
 12485  	for {
 12486  		x := v_0
 12487  		if v_1.Op != OpConst16 {
 12488  			break
 12489  		}
 12490  		t := v_1.Type
 12491  		if auxIntToInt16(v_1.AuxInt) != 1 || !(isNonNegative(x)) {
 12492  			break
 12493  		}
 12494  		v.reset(OpEq16)
 12495  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 12496  		v0.AuxInt = int16ToAuxInt(0)
 12497  		v.AddArg2(v0, x)
 12498  		return true
 12499  	}
 12500  	// match: (Less16 x (Const16 <t> [1]))
 12501  	// result: (Leq16 x (Const16 <t> [0]))
 12502  	for {
 12503  		x := v_0
 12504  		if v_1.Op != OpConst16 {
 12505  			break
 12506  		}
 12507  		t := v_1.Type
 12508  		if auxIntToInt16(v_1.AuxInt) != 1 {
 12509  			break
 12510  		}
 12511  		v.reset(OpLeq16)
 12512  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 12513  		v0.AuxInt = int16ToAuxInt(0)
 12514  		v.AddArg2(x, v0)
 12515  		return true
 12516  	}
 12517  	// match: (Less16 (Const16 <t> [-1]) x)
 12518  	// result: (Leq16 (Const16 <t> [0]) x)
 12519  	for {
 12520  		if v_0.Op != OpConst16 {
 12521  			break
 12522  		}
 12523  		t := v_0.Type
 12524  		if auxIntToInt16(v_0.AuxInt) != -1 {
 12525  			break
 12526  		}
 12527  		x := v_1
 12528  		v.reset(OpLeq16)
 12529  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 12530  		v0.AuxInt = int16ToAuxInt(0)
 12531  		v.AddArg2(v0, x)
 12532  		return true
 12533  	}
 12534  	// match: (Less16 _ (Const16 [math.MinInt16]))
 12535  	// result: (ConstBool [false])
 12536  	for {
 12537  		if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != math.MinInt16 {
 12538  			break
 12539  		}
 12540  		v.reset(OpConstBool)
 12541  		v.AuxInt = boolToAuxInt(false)
 12542  		return true
 12543  	}
 12544  	// match: (Less16 (Const16 [math.MaxInt16]) _)
 12545  	// result: (ConstBool [false])
 12546  	for {
 12547  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != math.MaxInt16 {
 12548  			break
 12549  		}
 12550  		v.reset(OpConstBool)
 12551  		v.AuxInt = boolToAuxInt(false)
 12552  		return true
 12553  	}
 12554  	// match: (Less16 x (Const16 <t> [math.MinInt16+1]))
 12555  	// result: (Eq16 x (Const16 <t> [math.MinInt16]))
 12556  	for {
 12557  		x := v_0
 12558  		if v_1.Op != OpConst16 {
 12559  			break
 12560  		}
 12561  		t := v_1.Type
 12562  		if auxIntToInt16(v_1.AuxInt) != math.MinInt16+1 {
 12563  			break
 12564  		}
 12565  		v.reset(OpEq16)
 12566  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 12567  		v0.AuxInt = int16ToAuxInt(math.MinInt16)
 12568  		v.AddArg2(x, v0)
 12569  		return true
 12570  	}
 12571  	// match: (Less16 (Const16 <t> [math.MaxInt16-1]) x)
 12572  	// result: (Eq16 x (Const16 <t> [math.MaxInt16]))
 12573  	for {
 12574  		if v_0.Op != OpConst16 {
 12575  			break
 12576  		}
 12577  		t := v_0.Type
 12578  		if auxIntToInt16(v_0.AuxInt) != math.MaxInt16-1 {
 12579  			break
 12580  		}
 12581  		x := v_1
 12582  		v.reset(OpEq16)
 12583  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 12584  		v0.AuxInt = int16ToAuxInt(math.MaxInt16)
 12585  		v.AddArg2(x, v0)
 12586  		return true
 12587  	}
 12588  	return false
 12589  }
 12590  func rewriteValuegeneric_OpLess16U(v *Value) bool {
 12591  	v_1 := v.Args[1]
 12592  	v_0 := v.Args[0]
 12593  	b := v.Block
 12594  	// match: (Less16U (Const16 [c]) (Const16 [d]))
 12595  	// result: (ConstBool [uint16(c) < uint16(d)])
 12596  	for {
 12597  		if v_0.Op != OpConst16 {
 12598  			break
 12599  		}
 12600  		c := auxIntToInt16(v_0.AuxInt)
 12601  		if v_1.Op != OpConst16 {
 12602  			break
 12603  		}
 12604  		d := auxIntToInt16(v_1.AuxInt)
 12605  		v.reset(OpConstBool)
 12606  		v.AuxInt = boolToAuxInt(uint16(c) < uint16(d))
 12607  		return true
 12608  	}
 12609  	// match: (Less16U x (Const16 <t> [1]))
 12610  	// result: (Eq16 (Const16 <t> [0]) x)
 12611  	for {
 12612  		x := v_0
 12613  		if v_1.Op != OpConst16 {
 12614  			break
 12615  		}
 12616  		t := v_1.Type
 12617  		if auxIntToInt16(v_1.AuxInt) != 1 {
 12618  			break
 12619  		}
 12620  		v.reset(OpEq16)
 12621  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 12622  		v0.AuxInt = int16ToAuxInt(0)
 12623  		v.AddArg2(v0, x)
 12624  		return true
 12625  	}
 12626  	// match: (Less16U _ (Const16 [0]))
 12627  	// result: (ConstBool [false])
 12628  	for {
 12629  		if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 {
 12630  			break
 12631  		}
 12632  		v.reset(OpConstBool)
 12633  		v.AuxInt = boolToAuxInt(false)
 12634  		return true
 12635  	}
 12636  	// match: (Less16U (Const16 [-1]) _)
 12637  	// result: (ConstBool [false])
 12638  	for {
 12639  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
 12640  			break
 12641  		}
 12642  		v.reset(OpConstBool)
 12643  		v.AuxInt = boolToAuxInt(false)
 12644  		return true
 12645  	}
 12646  	// match: (Less16U x (Const16 <t> [1]))
 12647  	// result: (Eq16 x (Const16 <t> [0]))
 12648  	for {
 12649  		x := v_0
 12650  		if v_1.Op != OpConst16 {
 12651  			break
 12652  		}
 12653  		t := v_1.Type
 12654  		if auxIntToInt16(v_1.AuxInt) != 1 {
 12655  			break
 12656  		}
 12657  		v.reset(OpEq16)
 12658  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 12659  		v0.AuxInt = int16ToAuxInt(0)
 12660  		v.AddArg2(x, v0)
 12661  		return true
 12662  	}
 12663  	// match: (Less16U (Const16 <t> [-2]) x)
 12664  	// result: (Eq16 x (Const16 <t> [-1]))
 12665  	for {
 12666  		if v_0.Op != OpConst16 {
 12667  			break
 12668  		}
 12669  		t := v_0.Type
 12670  		if auxIntToInt16(v_0.AuxInt) != -2 {
 12671  			break
 12672  		}
 12673  		x := v_1
 12674  		v.reset(OpEq16)
 12675  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 12676  		v0.AuxInt = int16ToAuxInt(-1)
 12677  		v.AddArg2(x, v0)
 12678  		return true
 12679  	}
 12680  	return false
 12681  }
 12682  func rewriteValuegeneric_OpLess32(v *Value) bool {
 12683  	v_1 := v.Args[1]
 12684  	v_0 := v.Args[0]
 12685  	b := v.Block
 12686  	// match: (Less32 (Const32 [c]) (Const32 [d]))
 12687  	// result: (ConstBool [c < d])
 12688  	for {
 12689  		if v_0.Op != OpConst32 {
 12690  			break
 12691  		}
 12692  		c := auxIntToInt32(v_0.AuxInt)
 12693  		if v_1.Op != OpConst32 {
 12694  			break
 12695  		}
 12696  		d := auxIntToInt32(v_1.AuxInt)
 12697  		v.reset(OpConstBool)
 12698  		v.AuxInt = boolToAuxInt(c < d)
 12699  		return true
 12700  	}
 12701  	// match: (Less32 (Const32 <t> [0]) x)
 12702  	// cond: isNonNegative(x)
 12703  	// result: (Neq32 (Const32 <t> [0]) x)
 12704  	for {
 12705  		if v_0.Op != OpConst32 {
 12706  			break
 12707  		}
 12708  		t := v_0.Type
 12709  		if auxIntToInt32(v_0.AuxInt) != 0 {
 12710  			break
 12711  		}
 12712  		x := v_1
 12713  		if !(isNonNegative(x)) {
 12714  			break
 12715  		}
 12716  		v.reset(OpNeq32)
 12717  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 12718  		v0.AuxInt = int32ToAuxInt(0)
 12719  		v.AddArg2(v0, x)
 12720  		return true
 12721  	}
 12722  	// match: (Less32 x (Const32 <t> [1]))
 12723  	// cond: isNonNegative(x)
 12724  	// result: (Eq32 (Const32 <t> [0]) x)
 12725  	for {
 12726  		x := v_0
 12727  		if v_1.Op != OpConst32 {
 12728  			break
 12729  		}
 12730  		t := v_1.Type
 12731  		if auxIntToInt32(v_1.AuxInt) != 1 || !(isNonNegative(x)) {
 12732  			break
 12733  		}
 12734  		v.reset(OpEq32)
 12735  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 12736  		v0.AuxInt = int32ToAuxInt(0)
 12737  		v.AddArg2(v0, x)
 12738  		return true
 12739  	}
 12740  	// match: (Less32 x (Const32 <t> [1]))
 12741  	// result: (Leq32 x (Const32 <t> [0]))
 12742  	for {
 12743  		x := v_0
 12744  		if v_1.Op != OpConst32 {
 12745  			break
 12746  		}
 12747  		t := v_1.Type
 12748  		if auxIntToInt32(v_1.AuxInt) != 1 {
 12749  			break
 12750  		}
 12751  		v.reset(OpLeq32)
 12752  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 12753  		v0.AuxInt = int32ToAuxInt(0)
 12754  		v.AddArg2(x, v0)
 12755  		return true
 12756  	}
 12757  	// match: (Less32 (Const32 <t> [-1]) x)
 12758  	// result: (Leq32 (Const32 <t> [0]) x)
 12759  	for {
 12760  		if v_0.Op != OpConst32 {
 12761  			break
 12762  		}
 12763  		t := v_0.Type
 12764  		if auxIntToInt32(v_0.AuxInt) != -1 {
 12765  			break
 12766  		}
 12767  		x := v_1
 12768  		v.reset(OpLeq32)
 12769  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 12770  		v0.AuxInt = int32ToAuxInt(0)
 12771  		v.AddArg2(v0, x)
 12772  		return true
 12773  	}
 12774  	// match: (Less32 _ (Const32 [math.MinInt32]))
 12775  	// result: (ConstBool [false])
 12776  	for {
 12777  		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != math.MinInt32 {
 12778  			break
 12779  		}
 12780  		v.reset(OpConstBool)
 12781  		v.AuxInt = boolToAuxInt(false)
 12782  		return true
 12783  	}
 12784  	// match: (Less32 (Const32 [math.MaxInt32]) _)
 12785  	// result: (ConstBool [false])
 12786  	for {
 12787  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != math.MaxInt32 {
 12788  			break
 12789  		}
 12790  		v.reset(OpConstBool)
 12791  		v.AuxInt = boolToAuxInt(false)
 12792  		return true
 12793  	}
 12794  	// match: (Less32 x (Const32 <t> [math.MinInt32+1]))
 12795  	// result: (Eq32 x (Const32 <t> [math.MinInt32]))
 12796  	for {
 12797  		x := v_0
 12798  		if v_1.Op != OpConst32 {
 12799  			break
 12800  		}
 12801  		t := v_1.Type
 12802  		if auxIntToInt32(v_1.AuxInt) != math.MinInt32+1 {
 12803  			break
 12804  		}
 12805  		v.reset(OpEq32)
 12806  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 12807  		v0.AuxInt = int32ToAuxInt(math.MinInt32)
 12808  		v.AddArg2(x, v0)
 12809  		return true
 12810  	}
 12811  	// match: (Less32 (Const32 <t> [math.MaxInt32-1]) x)
 12812  	// result: (Eq32 x (Const32 <t> [math.MaxInt32]))
 12813  	for {
 12814  		if v_0.Op != OpConst32 {
 12815  			break
 12816  		}
 12817  		t := v_0.Type
 12818  		if auxIntToInt32(v_0.AuxInt) != math.MaxInt32-1 {
 12819  			break
 12820  		}
 12821  		x := v_1
 12822  		v.reset(OpEq32)
 12823  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 12824  		v0.AuxInt = int32ToAuxInt(math.MaxInt32)
 12825  		v.AddArg2(x, v0)
 12826  		return true
 12827  	}
 12828  	return false
 12829  }
 12830  func rewriteValuegeneric_OpLess32F(v *Value) bool {
 12831  	v_1 := v.Args[1]
 12832  	v_0 := v.Args[0]
 12833  	// match: (Less32F (Const32F [c]) (Const32F [d]))
 12834  	// result: (ConstBool [c < d])
 12835  	for {
 12836  		if v_0.Op != OpConst32F {
 12837  			break
 12838  		}
 12839  		c := auxIntToFloat32(v_0.AuxInt)
 12840  		if v_1.Op != OpConst32F {
 12841  			break
 12842  		}
 12843  		d := auxIntToFloat32(v_1.AuxInt)
 12844  		v.reset(OpConstBool)
 12845  		v.AuxInt = boolToAuxInt(c < d)
 12846  		return true
 12847  	}
 12848  	return false
 12849  }
 12850  func rewriteValuegeneric_OpLess32U(v *Value) bool {
 12851  	v_1 := v.Args[1]
 12852  	v_0 := v.Args[0]
 12853  	b := v.Block
 12854  	// match: (Less32U (Const32 [c]) (Const32 [d]))
 12855  	// result: (ConstBool [uint32(c) < uint32(d)])
 12856  	for {
 12857  		if v_0.Op != OpConst32 {
 12858  			break
 12859  		}
 12860  		c := auxIntToInt32(v_0.AuxInt)
 12861  		if v_1.Op != OpConst32 {
 12862  			break
 12863  		}
 12864  		d := auxIntToInt32(v_1.AuxInt)
 12865  		v.reset(OpConstBool)
 12866  		v.AuxInt = boolToAuxInt(uint32(c) < uint32(d))
 12867  		return true
 12868  	}
 12869  	// match: (Less32U x (Const32 <t> [1]))
 12870  	// result: (Eq32 (Const32 <t> [0]) x)
 12871  	for {
 12872  		x := v_0
 12873  		if v_1.Op != OpConst32 {
 12874  			break
 12875  		}
 12876  		t := v_1.Type
 12877  		if auxIntToInt32(v_1.AuxInt) != 1 {
 12878  			break
 12879  		}
 12880  		v.reset(OpEq32)
 12881  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 12882  		v0.AuxInt = int32ToAuxInt(0)
 12883  		v.AddArg2(v0, x)
 12884  		return true
 12885  	}
 12886  	// match: (Less32U _ (Const32 [0]))
 12887  	// result: (ConstBool [false])
 12888  	for {
 12889  		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != 0 {
 12890  			break
 12891  		}
 12892  		v.reset(OpConstBool)
 12893  		v.AuxInt = boolToAuxInt(false)
 12894  		return true
 12895  	}
 12896  	// match: (Less32U (Const32 [-1]) _)
 12897  	// result: (ConstBool [false])
 12898  	for {
 12899  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
 12900  			break
 12901  		}
 12902  		v.reset(OpConstBool)
 12903  		v.AuxInt = boolToAuxInt(false)
 12904  		return true
 12905  	}
 12906  	// match: (Less32U x (Const32 <t> [1]))
 12907  	// result: (Eq32 x (Const32 <t> [0]))
 12908  	for {
 12909  		x := v_0
 12910  		if v_1.Op != OpConst32 {
 12911  			break
 12912  		}
 12913  		t := v_1.Type
 12914  		if auxIntToInt32(v_1.AuxInt) != 1 {
 12915  			break
 12916  		}
 12917  		v.reset(OpEq32)
 12918  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 12919  		v0.AuxInt = int32ToAuxInt(0)
 12920  		v.AddArg2(x, v0)
 12921  		return true
 12922  	}
 12923  	// match: (Less32U (Const32 <t> [-2]) x)
 12924  	// result: (Eq32 x (Const32 <t> [-1]))
 12925  	for {
 12926  		if v_0.Op != OpConst32 {
 12927  			break
 12928  		}
 12929  		t := v_0.Type
 12930  		if auxIntToInt32(v_0.AuxInt) != -2 {
 12931  			break
 12932  		}
 12933  		x := v_1
 12934  		v.reset(OpEq32)
 12935  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 12936  		v0.AuxInt = int32ToAuxInt(-1)
 12937  		v.AddArg2(x, v0)
 12938  		return true
 12939  	}
 12940  	return false
 12941  }
 12942  func rewriteValuegeneric_OpLess64(v *Value) bool {
 12943  	v_1 := v.Args[1]
 12944  	v_0 := v.Args[0]
 12945  	b := v.Block
 12946  	// match: (Less64 (Const64 [c]) (Const64 [d]))
 12947  	// result: (ConstBool [c < d])
 12948  	for {
 12949  		if v_0.Op != OpConst64 {
 12950  			break
 12951  		}
 12952  		c := auxIntToInt64(v_0.AuxInt)
 12953  		if v_1.Op != OpConst64 {
 12954  			break
 12955  		}
 12956  		d := auxIntToInt64(v_1.AuxInt)
 12957  		v.reset(OpConstBool)
 12958  		v.AuxInt = boolToAuxInt(c < d)
 12959  		return true
 12960  	}
 12961  	// match: (Less64 (Const64 <t> [0]) x)
 12962  	// cond: isNonNegative(x)
 12963  	// result: (Neq64 (Const64 <t> [0]) x)
 12964  	for {
 12965  		if v_0.Op != OpConst64 {
 12966  			break
 12967  		}
 12968  		t := v_0.Type
 12969  		if auxIntToInt64(v_0.AuxInt) != 0 {
 12970  			break
 12971  		}
 12972  		x := v_1
 12973  		if !(isNonNegative(x)) {
 12974  			break
 12975  		}
 12976  		v.reset(OpNeq64)
 12977  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 12978  		v0.AuxInt = int64ToAuxInt(0)
 12979  		v.AddArg2(v0, x)
 12980  		return true
 12981  	}
 12982  	// match: (Less64 x (Const64 <t> [1]))
 12983  	// cond: isNonNegative(x)
 12984  	// result: (Eq64 (Const64 <t> [0]) x)
 12985  	for {
 12986  		x := v_0
 12987  		if v_1.Op != OpConst64 {
 12988  			break
 12989  		}
 12990  		t := v_1.Type
 12991  		if auxIntToInt64(v_1.AuxInt) != 1 || !(isNonNegative(x)) {
 12992  			break
 12993  		}
 12994  		v.reset(OpEq64)
 12995  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 12996  		v0.AuxInt = int64ToAuxInt(0)
 12997  		v.AddArg2(v0, x)
 12998  		return true
 12999  	}
 13000  	// match: (Less64 x (Const64 <t> [1]))
 13001  	// result: (Leq64 x (Const64 <t> [0]))
 13002  	for {
 13003  		x := v_0
 13004  		if v_1.Op != OpConst64 {
 13005  			break
 13006  		}
 13007  		t := v_1.Type
 13008  		if auxIntToInt64(v_1.AuxInt) != 1 {
 13009  			break
 13010  		}
 13011  		v.reset(OpLeq64)
 13012  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13013  		v0.AuxInt = int64ToAuxInt(0)
 13014  		v.AddArg2(x, v0)
 13015  		return true
 13016  	}
 13017  	// match: (Less64 (Const64 <t> [-1]) x)
 13018  	// result: (Leq64 (Const64 <t> [0]) x)
 13019  	for {
 13020  		if v_0.Op != OpConst64 {
 13021  			break
 13022  		}
 13023  		t := v_0.Type
 13024  		if auxIntToInt64(v_0.AuxInt) != -1 {
 13025  			break
 13026  		}
 13027  		x := v_1
 13028  		v.reset(OpLeq64)
 13029  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13030  		v0.AuxInt = int64ToAuxInt(0)
 13031  		v.AddArg2(v0, x)
 13032  		return true
 13033  	}
 13034  	// match: (Less64 _ (Const64 [math.MinInt64]))
 13035  	// result: (ConstBool [false])
 13036  	for {
 13037  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != math.MinInt64 {
 13038  			break
 13039  		}
 13040  		v.reset(OpConstBool)
 13041  		v.AuxInt = boolToAuxInt(false)
 13042  		return true
 13043  	}
 13044  	// match: (Less64 (Const64 [math.MaxInt64]) _)
 13045  	// result: (ConstBool [false])
 13046  	for {
 13047  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != math.MaxInt64 {
 13048  			break
 13049  		}
 13050  		v.reset(OpConstBool)
 13051  		v.AuxInt = boolToAuxInt(false)
 13052  		return true
 13053  	}
 13054  	// match: (Less64 x (Const64 <t> [math.MinInt64+1]))
 13055  	// result: (Eq64 x (Const64 <t> [math.MinInt64]))
 13056  	for {
 13057  		x := v_0
 13058  		if v_1.Op != OpConst64 {
 13059  			break
 13060  		}
 13061  		t := v_1.Type
 13062  		if auxIntToInt64(v_1.AuxInt) != math.MinInt64+1 {
 13063  			break
 13064  		}
 13065  		v.reset(OpEq64)
 13066  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13067  		v0.AuxInt = int64ToAuxInt(math.MinInt64)
 13068  		v.AddArg2(x, v0)
 13069  		return true
 13070  	}
 13071  	// match: (Less64 (Const64 <t> [math.MaxInt64-1]) x)
 13072  	// result: (Eq64 x (Const64 <t> [math.MaxInt64]))
 13073  	for {
 13074  		if v_0.Op != OpConst64 {
 13075  			break
 13076  		}
 13077  		t := v_0.Type
 13078  		if auxIntToInt64(v_0.AuxInt) != math.MaxInt64-1 {
 13079  			break
 13080  		}
 13081  		x := v_1
 13082  		v.reset(OpEq64)
 13083  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13084  		v0.AuxInt = int64ToAuxInt(math.MaxInt64)
 13085  		v.AddArg2(x, v0)
 13086  		return true
 13087  	}
 13088  	return false
 13089  }
 13090  func rewriteValuegeneric_OpLess64F(v *Value) bool {
 13091  	v_1 := v.Args[1]
 13092  	v_0 := v.Args[0]
 13093  	// match: (Less64F (Const64F [c]) (Const64F [d]))
 13094  	// result: (ConstBool [c < d])
 13095  	for {
 13096  		if v_0.Op != OpConst64F {
 13097  			break
 13098  		}
 13099  		c := auxIntToFloat64(v_0.AuxInt)
 13100  		if v_1.Op != OpConst64F {
 13101  			break
 13102  		}
 13103  		d := auxIntToFloat64(v_1.AuxInt)
 13104  		v.reset(OpConstBool)
 13105  		v.AuxInt = boolToAuxInt(c < d)
 13106  		return true
 13107  	}
 13108  	return false
 13109  }
 13110  func rewriteValuegeneric_OpLess64U(v *Value) bool {
 13111  	v_1 := v.Args[1]
 13112  	v_0 := v.Args[0]
 13113  	b := v.Block
 13114  	// match: (Less64U (Const64 [c]) (Const64 [d]))
 13115  	// result: (ConstBool [uint64(c) < uint64(d)])
 13116  	for {
 13117  		if v_0.Op != OpConst64 {
 13118  			break
 13119  		}
 13120  		c := auxIntToInt64(v_0.AuxInt)
 13121  		if v_1.Op != OpConst64 {
 13122  			break
 13123  		}
 13124  		d := auxIntToInt64(v_1.AuxInt)
 13125  		v.reset(OpConstBool)
 13126  		v.AuxInt = boolToAuxInt(uint64(c) < uint64(d))
 13127  		return true
 13128  	}
 13129  	// match: (Less64U x (Const64 <t> [1]))
 13130  	// result: (Eq64 (Const64 <t> [0]) x)
 13131  	for {
 13132  		x := v_0
 13133  		if v_1.Op != OpConst64 {
 13134  			break
 13135  		}
 13136  		t := v_1.Type
 13137  		if auxIntToInt64(v_1.AuxInt) != 1 {
 13138  			break
 13139  		}
 13140  		v.reset(OpEq64)
 13141  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13142  		v0.AuxInt = int64ToAuxInt(0)
 13143  		v.AddArg2(v0, x)
 13144  		return true
 13145  	}
 13146  	// match: (Less64U _ (Const64 [0]))
 13147  	// result: (ConstBool [false])
 13148  	for {
 13149  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 13150  			break
 13151  		}
 13152  		v.reset(OpConstBool)
 13153  		v.AuxInt = boolToAuxInt(false)
 13154  		return true
 13155  	}
 13156  	// match: (Less64U (Const64 [-1]) _)
 13157  	// result: (ConstBool [false])
 13158  	for {
 13159  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
 13160  			break
 13161  		}
 13162  		v.reset(OpConstBool)
 13163  		v.AuxInt = boolToAuxInt(false)
 13164  		return true
 13165  	}
 13166  	// match: (Less64U x (Const64 <t> [1]))
 13167  	// result: (Eq64 x (Const64 <t> [0]))
 13168  	for {
 13169  		x := v_0
 13170  		if v_1.Op != OpConst64 {
 13171  			break
 13172  		}
 13173  		t := v_1.Type
 13174  		if auxIntToInt64(v_1.AuxInt) != 1 {
 13175  			break
 13176  		}
 13177  		v.reset(OpEq64)
 13178  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13179  		v0.AuxInt = int64ToAuxInt(0)
 13180  		v.AddArg2(x, v0)
 13181  		return true
 13182  	}
 13183  	// match: (Less64U (Const64 <t> [-2]) x)
 13184  	// result: (Eq64 x (Const64 <t> [-1]))
 13185  	for {
 13186  		if v_0.Op != OpConst64 {
 13187  			break
 13188  		}
 13189  		t := v_0.Type
 13190  		if auxIntToInt64(v_0.AuxInt) != -2 {
 13191  			break
 13192  		}
 13193  		x := v_1
 13194  		v.reset(OpEq64)
 13195  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 13196  		v0.AuxInt = int64ToAuxInt(-1)
 13197  		v.AddArg2(x, v0)
 13198  		return true
 13199  	}
 13200  	return false
 13201  }
 13202  func rewriteValuegeneric_OpLess8(v *Value) bool {
 13203  	v_1 := v.Args[1]
 13204  	v_0 := v.Args[0]
 13205  	b := v.Block
 13206  	// match: (Less8 (Const8 [c]) (Const8 [d]))
 13207  	// result: (ConstBool [c < d])
 13208  	for {
 13209  		if v_0.Op != OpConst8 {
 13210  			break
 13211  		}
 13212  		c := auxIntToInt8(v_0.AuxInt)
 13213  		if v_1.Op != OpConst8 {
 13214  			break
 13215  		}
 13216  		d := auxIntToInt8(v_1.AuxInt)
 13217  		v.reset(OpConstBool)
 13218  		v.AuxInt = boolToAuxInt(c < d)
 13219  		return true
 13220  	}
 13221  	// match: (Less8 (Const8 <t> [0]) x)
 13222  	// cond: isNonNegative(x)
 13223  	// result: (Neq8 (Const8 <t> [0]) x)
 13224  	for {
 13225  		if v_0.Op != OpConst8 {
 13226  			break
 13227  		}
 13228  		t := v_0.Type
 13229  		if auxIntToInt8(v_0.AuxInt) != 0 {
 13230  			break
 13231  		}
 13232  		x := v_1
 13233  		if !(isNonNegative(x)) {
 13234  			break
 13235  		}
 13236  		v.reset(OpNeq8)
 13237  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 13238  		v0.AuxInt = int8ToAuxInt(0)
 13239  		v.AddArg2(v0, x)
 13240  		return true
 13241  	}
 13242  	// match: (Less8 x (Const8 <t> [1]))
 13243  	// cond: isNonNegative(x)
 13244  	// result: (Eq8 (Const8 <t> [0]) x)
 13245  	for {
 13246  		x := v_0
 13247  		if v_1.Op != OpConst8 {
 13248  			break
 13249  		}
 13250  		t := v_1.Type
 13251  		if auxIntToInt8(v_1.AuxInt) != 1 || !(isNonNegative(x)) {
 13252  			break
 13253  		}
 13254  		v.reset(OpEq8)
 13255  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 13256  		v0.AuxInt = int8ToAuxInt(0)
 13257  		v.AddArg2(v0, x)
 13258  		return true
 13259  	}
 13260  	// match: (Less8 x (Const8 <t> [1]))
 13261  	// result: (Leq8 x (Const8 <t> [0]))
 13262  	for {
 13263  		x := v_0
 13264  		if v_1.Op != OpConst8 {
 13265  			break
 13266  		}
 13267  		t := v_1.Type
 13268  		if auxIntToInt8(v_1.AuxInt) != 1 {
 13269  			break
 13270  		}
 13271  		v.reset(OpLeq8)
 13272  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 13273  		v0.AuxInt = int8ToAuxInt(0)
 13274  		v.AddArg2(x, v0)
 13275  		return true
 13276  	}
 13277  	// match: (Less8 (Const8 <t> [-1]) x)
 13278  	// result: (Leq8 (Const8 <t> [0]) x)
 13279  	for {
 13280  		if v_0.Op != OpConst8 {
 13281  			break
 13282  		}
 13283  		t := v_0.Type
 13284  		if auxIntToInt8(v_0.AuxInt) != -1 {
 13285  			break
 13286  		}
 13287  		x := v_1
 13288  		v.reset(OpLeq8)
 13289  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 13290  		v0.AuxInt = int8ToAuxInt(0)
 13291  		v.AddArg2(v0, x)
 13292  		return true
 13293  	}
 13294  	// match: (Less8 _ (Const8 [math.MinInt8 ]))
 13295  	// result: (ConstBool [false])
 13296  	for {
 13297  		if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != math.MinInt8 {
 13298  			break
 13299  		}
 13300  		v.reset(OpConstBool)
 13301  		v.AuxInt = boolToAuxInt(false)
 13302  		return true
 13303  	}
 13304  	// match: (Less8 (Const8 [math.MaxInt8 ]) _)
 13305  	// result: (ConstBool [false])
 13306  	for {
 13307  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != math.MaxInt8 {
 13308  			break
 13309  		}
 13310  		v.reset(OpConstBool)
 13311  		v.AuxInt = boolToAuxInt(false)
 13312  		return true
 13313  	}
 13314  	// match: (Less8 x (Const8 <t> [math.MinInt8 +1]))
 13315  	// result: (Eq8 x (Const8 <t> [math.MinInt8 ]))
 13316  	for {
 13317  		x := v_0
 13318  		if v_1.Op != OpConst8 {
 13319  			break
 13320  		}
 13321  		t := v_1.Type
 13322  		if auxIntToInt8(v_1.AuxInt) != math.MinInt8+1 {
 13323  			break
 13324  		}
 13325  		v.reset(OpEq8)
 13326  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 13327  		v0.AuxInt = int8ToAuxInt(math.MinInt8)
 13328  		v.AddArg2(x, v0)
 13329  		return true
 13330  	}
 13331  	// match: (Less8 (Const8 <t> [math.MaxInt8 -1]) x)
 13332  	// result: (Eq8 x (Const8 <t> [math.MaxInt8 ]))
 13333  	for {
 13334  		if v_0.Op != OpConst8 {
 13335  			break
 13336  		}
 13337  		t := v_0.Type
 13338  		if auxIntToInt8(v_0.AuxInt) != math.MaxInt8-1 {
 13339  			break
 13340  		}
 13341  		x := v_1
 13342  		v.reset(OpEq8)
 13343  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 13344  		v0.AuxInt = int8ToAuxInt(math.MaxInt8)
 13345  		v.AddArg2(x, v0)
 13346  		return true
 13347  	}
 13348  	return false
 13349  }
 13350  func rewriteValuegeneric_OpLess8U(v *Value) bool {
 13351  	v_1 := v.Args[1]
 13352  	v_0 := v.Args[0]
 13353  	b := v.Block
 13354  	// match: (Less8U (Const8 [c]) (Const8 [d]))
 13355  	// result: (ConstBool [ uint8(c) < uint8(d)])
 13356  	for {
 13357  		if v_0.Op != OpConst8 {
 13358  			break
 13359  		}
 13360  		c := auxIntToInt8(v_0.AuxInt)
 13361  		if v_1.Op != OpConst8 {
 13362  			break
 13363  		}
 13364  		d := auxIntToInt8(v_1.AuxInt)
 13365  		v.reset(OpConstBool)
 13366  		v.AuxInt = boolToAuxInt(uint8(c) < uint8(d))
 13367  		return true
 13368  	}
 13369  	// match: (Less8U x (Const8 <t> [1]))
 13370  	// result: (Eq8 (Const8 <t> [0]) x)
 13371  	for {
 13372  		x := v_0
 13373  		if v_1.Op != OpConst8 {
 13374  			break
 13375  		}
 13376  		t := v_1.Type
 13377  		if auxIntToInt8(v_1.AuxInt) != 1 {
 13378  			break
 13379  		}
 13380  		v.reset(OpEq8)
 13381  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 13382  		v0.AuxInt = int8ToAuxInt(0)
 13383  		v.AddArg2(v0, x)
 13384  		return true
 13385  	}
 13386  	// match: (Less8U _ (Const8 [0]))
 13387  	// result: (ConstBool [false])
 13388  	for {
 13389  		if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 {
 13390  			break
 13391  		}
 13392  		v.reset(OpConstBool)
 13393  		v.AuxInt = boolToAuxInt(false)
 13394  		return true
 13395  	}
 13396  	// match: (Less8U (Const8 [-1]) _)
 13397  	// result: (ConstBool [false])
 13398  	for {
 13399  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
 13400  			break
 13401  		}
 13402  		v.reset(OpConstBool)
 13403  		v.AuxInt = boolToAuxInt(false)
 13404  		return true
 13405  	}
 13406  	// match: (Less8U x (Const8 <t> [1]))
 13407  	// result: (Eq8 x (Const8 <t> [0]))
 13408  	for {
 13409  		x := v_0
 13410  		if v_1.Op != OpConst8 {
 13411  			break
 13412  		}
 13413  		t := v_1.Type
 13414  		if auxIntToInt8(v_1.AuxInt) != 1 {
 13415  			break
 13416  		}
 13417  		v.reset(OpEq8)
 13418  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 13419  		v0.AuxInt = int8ToAuxInt(0)
 13420  		v.AddArg2(x, v0)
 13421  		return true
 13422  	}
 13423  	// match: (Less8U (Const8 <t> [-2]) x)
 13424  	// result: (Eq8 x (Const8 <t> [-1]))
 13425  	for {
 13426  		if v_0.Op != OpConst8 {
 13427  			break
 13428  		}
 13429  		t := v_0.Type
 13430  		if auxIntToInt8(v_0.AuxInt) != -2 {
 13431  			break
 13432  		}
 13433  		x := v_1
 13434  		v.reset(OpEq8)
 13435  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 13436  		v0.AuxInt = int8ToAuxInt(-1)
 13437  		v.AddArg2(x, v0)
 13438  		return true
 13439  	}
 13440  	return false
 13441  }
 13442  func rewriteValuegeneric_OpLoad(v *Value) bool {
 13443  	v_1 := v.Args[1]
 13444  	v_0 := v.Args[0]
 13445  	b := v.Block
 13446  	config := b.Func.Config
 13447  	// match: (Load <t1> p1 (Store {t2} p2 x _))
 13448  	// cond: isSamePtr(p1, p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size()
 13449  	// result: x
 13450  	for {
 13451  		t1 := v.Type
 13452  		p1 := v_0
 13453  		if v_1.Op != OpStore {
 13454  			break
 13455  		}
 13456  		t2 := auxToType(v_1.Aux)
 13457  		x := v_1.Args[1]
 13458  		p2 := v_1.Args[0]
 13459  		if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size()) {
 13460  			break
 13461  		}
 13462  		v.copyOf(x)
 13463  		return true
 13464  	}
 13465  	// match: (Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 x _)))
 13466  	// cond: isSamePtr(p1, p3) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p3, t3.Size(), p2, t2.Size())
 13467  	// result: x
 13468  	for {
 13469  		t1 := v.Type
 13470  		p1 := v_0
 13471  		if v_1.Op != OpStore {
 13472  			break
 13473  		}
 13474  		t2 := auxToType(v_1.Aux)
 13475  		_ = v_1.Args[2]
 13476  		p2 := v_1.Args[0]
 13477  		v_1_2 := v_1.Args[2]
 13478  		if v_1_2.Op != OpStore {
 13479  			break
 13480  		}
 13481  		t3 := auxToType(v_1_2.Aux)
 13482  		x := v_1_2.Args[1]
 13483  		p3 := v_1_2.Args[0]
 13484  		if !(isSamePtr(p1, p3) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p3, t3.Size(), p2, t2.Size())) {
 13485  			break
 13486  		}
 13487  		v.copyOf(x)
 13488  		return true
 13489  	}
 13490  	// match: (Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 x _))))
 13491  	// cond: isSamePtr(p1, p4) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p4, t4.Size(), p2, t2.Size()) && disjoint(p4, t4.Size(), p3, t3.Size())
 13492  	// result: x
 13493  	for {
 13494  		t1 := v.Type
 13495  		p1 := v_0
 13496  		if v_1.Op != OpStore {
 13497  			break
 13498  		}
 13499  		t2 := auxToType(v_1.Aux)
 13500  		_ = v_1.Args[2]
 13501  		p2 := v_1.Args[0]
 13502  		v_1_2 := v_1.Args[2]
 13503  		if v_1_2.Op != OpStore {
 13504  			break
 13505  		}
 13506  		t3 := auxToType(v_1_2.Aux)
 13507  		_ = v_1_2.Args[2]
 13508  		p3 := v_1_2.Args[0]
 13509  		v_1_2_2 := v_1_2.Args[2]
 13510  		if v_1_2_2.Op != OpStore {
 13511  			break
 13512  		}
 13513  		t4 := auxToType(v_1_2_2.Aux)
 13514  		x := v_1_2_2.Args[1]
 13515  		p4 := v_1_2_2.Args[0]
 13516  		if !(isSamePtr(p1, p4) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p4, t4.Size(), p2, t2.Size()) && disjoint(p4, t4.Size(), p3, t3.Size())) {
 13517  			break
 13518  		}
 13519  		v.copyOf(x)
 13520  		return true
 13521  	}
 13522  	// match: (Load <t1> p1 (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 x _)))))
 13523  	// cond: isSamePtr(p1, p5) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p5, t5.Size(), p2, t2.Size()) && disjoint(p5, t5.Size(), p3, t3.Size()) && disjoint(p5, t5.Size(), p4, t4.Size())
 13524  	// result: x
 13525  	for {
 13526  		t1 := v.Type
 13527  		p1 := v_0
 13528  		if v_1.Op != OpStore {
 13529  			break
 13530  		}
 13531  		t2 := auxToType(v_1.Aux)
 13532  		_ = v_1.Args[2]
 13533  		p2 := v_1.Args[0]
 13534  		v_1_2 := v_1.Args[2]
 13535  		if v_1_2.Op != OpStore {
 13536  			break
 13537  		}
 13538  		t3 := auxToType(v_1_2.Aux)
 13539  		_ = v_1_2.Args[2]
 13540  		p3 := v_1_2.Args[0]
 13541  		v_1_2_2 := v_1_2.Args[2]
 13542  		if v_1_2_2.Op != OpStore {
 13543  			break
 13544  		}
 13545  		t4 := auxToType(v_1_2_2.Aux)
 13546  		_ = v_1_2_2.Args[2]
 13547  		p4 := v_1_2_2.Args[0]
 13548  		v_1_2_2_2 := v_1_2_2.Args[2]
 13549  		if v_1_2_2_2.Op != OpStore {
 13550  			break
 13551  		}
 13552  		t5 := auxToType(v_1_2_2_2.Aux)
 13553  		x := v_1_2_2_2.Args[1]
 13554  		p5 := v_1_2_2_2.Args[0]
 13555  		if !(isSamePtr(p1, p5) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == t2.Size() && disjoint(p5, t5.Size(), p2, t2.Size()) && disjoint(p5, t5.Size(), p3, t3.Size()) && disjoint(p5, t5.Size(), p4, t4.Size())) {
 13556  			break
 13557  		}
 13558  		v.copyOf(x)
 13559  		return true
 13560  	}
 13561  	// match: (Load <t1> p1 (Store {t2} p2 (Const64 [x]) _))
 13562  	// cond: isSamePtr(p1,p2) && t2.Size() == 8 && is64BitFloat(t1) && !math.IsNaN(math.Float64frombits(uint64(x)))
 13563  	// result: (Const64F [math.Float64frombits(uint64(x))])
 13564  	for {
 13565  		t1 := v.Type
 13566  		p1 := v_0
 13567  		if v_1.Op != OpStore {
 13568  			break
 13569  		}
 13570  		t2 := auxToType(v_1.Aux)
 13571  		_ = v_1.Args[1]
 13572  		p2 := v_1.Args[0]
 13573  		v_1_1 := v_1.Args[1]
 13574  		if v_1_1.Op != OpConst64 {
 13575  			break
 13576  		}
 13577  		x := auxIntToInt64(v_1_1.AuxInt)
 13578  		if !(isSamePtr(p1, p2) && t2.Size() == 8 && is64BitFloat(t1) && !math.IsNaN(math.Float64frombits(uint64(x)))) {
 13579  			break
 13580  		}
 13581  		v.reset(OpConst64F)
 13582  		v.AuxInt = float64ToAuxInt(math.Float64frombits(uint64(x)))
 13583  		return true
 13584  	}
 13585  	// match: (Load <t1> p1 (Store {t2} p2 (Const32 [x]) _))
 13586  	// cond: isSamePtr(p1,p2) && t2.Size() == 4 && is32BitFloat(t1) && !math.IsNaN(float64(math.Float32frombits(uint32(x))))
 13587  	// result: (Const32F [math.Float32frombits(uint32(x))])
 13588  	for {
 13589  		t1 := v.Type
 13590  		p1 := v_0
 13591  		if v_1.Op != OpStore {
 13592  			break
 13593  		}
 13594  		t2 := auxToType(v_1.Aux)
 13595  		_ = v_1.Args[1]
 13596  		p2 := v_1.Args[0]
 13597  		v_1_1 := v_1.Args[1]
 13598  		if v_1_1.Op != OpConst32 {
 13599  			break
 13600  		}
 13601  		x := auxIntToInt32(v_1_1.AuxInt)
 13602  		if !(isSamePtr(p1, p2) && t2.Size() == 4 && is32BitFloat(t1) && !math.IsNaN(float64(math.Float32frombits(uint32(x))))) {
 13603  			break
 13604  		}
 13605  		v.reset(OpConst32F)
 13606  		v.AuxInt = float32ToAuxInt(math.Float32frombits(uint32(x)))
 13607  		return true
 13608  	}
 13609  	// match: (Load <t1> p1 (Store {t2} p2 (Const64F [x]) _))
 13610  	// cond: isSamePtr(p1,p2) && t2.Size() == 8 && is64BitInt(t1)
 13611  	// result: (Const64 [int64(math.Float64bits(x))])
 13612  	for {
 13613  		t1 := v.Type
 13614  		p1 := v_0
 13615  		if v_1.Op != OpStore {
 13616  			break
 13617  		}
 13618  		t2 := auxToType(v_1.Aux)
 13619  		_ = v_1.Args[1]
 13620  		p2 := v_1.Args[0]
 13621  		v_1_1 := v_1.Args[1]
 13622  		if v_1_1.Op != OpConst64F {
 13623  			break
 13624  		}
 13625  		x := auxIntToFloat64(v_1_1.AuxInt)
 13626  		if !(isSamePtr(p1, p2) && t2.Size() == 8 && is64BitInt(t1)) {
 13627  			break
 13628  		}
 13629  		v.reset(OpConst64)
 13630  		v.AuxInt = int64ToAuxInt(int64(math.Float64bits(x)))
 13631  		return true
 13632  	}
 13633  	// match: (Load <t1> p1 (Store {t2} p2 (Const32F [x]) _))
 13634  	// cond: isSamePtr(p1,p2) && t2.Size() == 4 && is32BitInt(t1)
 13635  	// result: (Const32 [int32(math.Float32bits(x))])
 13636  	for {
 13637  		t1 := v.Type
 13638  		p1 := v_0
 13639  		if v_1.Op != OpStore {
 13640  			break
 13641  		}
 13642  		t2 := auxToType(v_1.Aux)
 13643  		_ = v_1.Args[1]
 13644  		p2 := v_1.Args[0]
 13645  		v_1_1 := v_1.Args[1]
 13646  		if v_1_1.Op != OpConst32F {
 13647  			break
 13648  		}
 13649  		x := auxIntToFloat32(v_1_1.AuxInt)
 13650  		if !(isSamePtr(p1, p2) && t2.Size() == 4 && is32BitInt(t1)) {
 13651  			break
 13652  		}
 13653  		v.reset(OpConst32)
 13654  		v.AuxInt = int32ToAuxInt(int32(math.Float32bits(x)))
 13655  		return true
 13656  	}
 13657  	// match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ mem:(Zero [n] p3 _)))
 13658  	// cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p3) && CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size())
 13659  	// result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p3) mem)
 13660  	for {
 13661  		t1 := v.Type
 13662  		op := v_0
 13663  		if op.Op != OpOffPtr {
 13664  			break
 13665  		}
 13666  		o1 := auxIntToInt64(op.AuxInt)
 13667  		p1 := op.Args[0]
 13668  		if v_1.Op != OpStore {
 13669  			break
 13670  		}
 13671  		t2 := auxToType(v_1.Aux)
 13672  		_ = v_1.Args[2]
 13673  		p2 := v_1.Args[0]
 13674  		mem := v_1.Args[2]
 13675  		if mem.Op != OpZero {
 13676  			break
 13677  		}
 13678  		n := auxIntToInt64(mem.AuxInt)
 13679  		p3 := mem.Args[0]
 13680  		if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p3) && CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size())) {
 13681  			break
 13682  		}
 13683  		b = mem.Block
 13684  		v0 := b.NewValue0(v.Pos, OpLoad, t1)
 13685  		v.copyOf(v0)
 13686  		v1 := b.NewValue0(v.Pos, OpOffPtr, op.Type)
 13687  		v1.AuxInt = int64ToAuxInt(o1)
 13688  		v1.AddArg(p3)
 13689  		v0.AddArg2(v1, mem)
 13690  		return true
 13691  	}
 13692  	// match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ (Store {t3} p3 _ mem:(Zero [n] p4 _))))
 13693  	// cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p4) && CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size())
 13694  	// result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p4) mem)
 13695  	for {
 13696  		t1 := v.Type
 13697  		op := v_0
 13698  		if op.Op != OpOffPtr {
 13699  			break
 13700  		}
 13701  		o1 := auxIntToInt64(op.AuxInt)
 13702  		p1 := op.Args[0]
 13703  		if v_1.Op != OpStore {
 13704  			break
 13705  		}
 13706  		t2 := auxToType(v_1.Aux)
 13707  		_ = v_1.Args[2]
 13708  		p2 := v_1.Args[0]
 13709  		v_1_2 := v_1.Args[2]
 13710  		if v_1_2.Op != OpStore {
 13711  			break
 13712  		}
 13713  		t3 := auxToType(v_1_2.Aux)
 13714  		_ = v_1_2.Args[2]
 13715  		p3 := v_1_2.Args[0]
 13716  		mem := v_1_2.Args[2]
 13717  		if mem.Op != OpZero {
 13718  			break
 13719  		}
 13720  		n := auxIntToInt64(mem.AuxInt)
 13721  		p4 := mem.Args[0]
 13722  		if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p4) && CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size())) {
 13723  			break
 13724  		}
 13725  		b = mem.Block
 13726  		v0 := b.NewValue0(v.Pos, OpLoad, t1)
 13727  		v.copyOf(v0)
 13728  		v1 := b.NewValue0(v.Pos, OpOffPtr, op.Type)
 13729  		v1.AuxInt = int64ToAuxInt(o1)
 13730  		v1.AddArg(p4)
 13731  		v0.AddArg2(v1, mem)
 13732  		return true
 13733  	}
 13734  	// match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ mem:(Zero [n] p5 _)))))
 13735  	// cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p5) && CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size())
 13736  	// result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p5) mem)
 13737  	for {
 13738  		t1 := v.Type
 13739  		op := v_0
 13740  		if op.Op != OpOffPtr {
 13741  			break
 13742  		}
 13743  		o1 := auxIntToInt64(op.AuxInt)
 13744  		p1 := op.Args[0]
 13745  		if v_1.Op != OpStore {
 13746  			break
 13747  		}
 13748  		t2 := auxToType(v_1.Aux)
 13749  		_ = v_1.Args[2]
 13750  		p2 := v_1.Args[0]
 13751  		v_1_2 := v_1.Args[2]
 13752  		if v_1_2.Op != OpStore {
 13753  			break
 13754  		}
 13755  		t3 := auxToType(v_1_2.Aux)
 13756  		_ = v_1_2.Args[2]
 13757  		p3 := v_1_2.Args[0]
 13758  		v_1_2_2 := v_1_2.Args[2]
 13759  		if v_1_2_2.Op != OpStore {
 13760  			break
 13761  		}
 13762  		t4 := auxToType(v_1_2_2.Aux)
 13763  		_ = v_1_2_2.Args[2]
 13764  		p4 := v_1_2_2.Args[0]
 13765  		mem := v_1_2_2.Args[2]
 13766  		if mem.Op != OpZero {
 13767  			break
 13768  		}
 13769  		n := auxIntToInt64(mem.AuxInt)
 13770  		p5 := mem.Args[0]
 13771  		if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p5) && CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size())) {
 13772  			break
 13773  		}
 13774  		b = mem.Block
 13775  		v0 := b.NewValue0(v.Pos, OpLoad, t1)
 13776  		v.copyOf(v0)
 13777  		v1 := b.NewValue0(v.Pos, OpOffPtr, op.Type)
 13778  		v1.AuxInt = int64ToAuxInt(o1)
 13779  		v1.AddArg(p5)
 13780  		v0.AddArg2(v1, mem)
 13781  		return true
 13782  	}
 13783  	// match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 _ mem:(Zero [n] p6 _))))))
 13784  	// cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p6) && CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size()) && disjoint(op, t1.Size(), p5, t5.Size())
 13785  	// result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p6) mem)
 13786  	for {
 13787  		t1 := v.Type
 13788  		op := v_0
 13789  		if op.Op != OpOffPtr {
 13790  			break
 13791  		}
 13792  		o1 := auxIntToInt64(op.AuxInt)
 13793  		p1 := op.Args[0]
 13794  		if v_1.Op != OpStore {
 13795  			break
 13796  		}
 13797  		t2 := auxToType(v_1.Aux)
 13798  		_ = v_1.Args[2]
 13799  		p2 := v_1.Args[0]
 13800  		v_1_2 := v_1.Args[2]
 13801  		if v_1_2.Op != OpStore {
 13802  			break
 13803  		}
 13804  		t3 := auxToType(v_1_2.Aux)
 13805  		_ = v_1_2.Args[2]
 13806  		p3 := v_1_2.Args[0]
 13807  		v_1_2_2 := v_1_2.Args[2]
 13808  		if v_1_2_2.Op != OpStore {
 13809  			break
 13810  		}
 13811  		t4 := auxToType(v_1_2_2.Aux)
 13812  		_ = v_1_2_2.Args[2]
 13813  		p4 := v_1_2_2.Args[0]
 13814  		v_1_2_2_2 := v_1_2_2.Args[2]
 13815  		if v_1_2_2_2.Op != OpStore {
 13816  			break
 13817  		}
 13818  		t5 := auxToType(v_1_2_2_2.Aux)
 13819  		_ = v_1_2_2_2.Args[2]
 13820  		p5 := v_1_2_2_2.Args[0]
 13821  		mem := v_1_2_2_2.Args[2]
 13822  		if mem.Op != OpZero {
 13823  			break
 13824  		}
 13825  		n := auxIntToInt64(mem.AuxInt)
 13826  		p6 := mem.Args[0]
 13827  		if !(o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p6) && CanSSA(t1) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size()) && disjoint(op, t1.Size(), p5, t5.Size())) {
 13828  			break
 13829  		}
 13830  		b = mem.Block
 13831  		v0 := b.NewValue0(v.Pos, OpLoad, t1)
 13832  		v.copyOf(v0)
 13833  		v1 := b.NewValue0(v.Pos, OpOffPtr, op.Type)
 13834  		v1.AuxInt = int64ToAuxInt(o1)
 13835  		v1.AddArg(p6)
 13836  		v0.AddArg2(v1, mem)
 13837  		return true
 13838  	}
 13839  	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
 13840  	// cond: t1.IsBoolean() && isSamePtr(p1, p2) && n >= o + 1
 13841  	// result: (ConstBool [false])
 13842  	for {
 13843  		t1 := v.Type
 13844  		if v_0.Op != OpOffPtr {
 13845  			break
 13846  		}
 13847  		o := auxIntToInt64(v_0.AuxInt)
 13848  		p1 := v_0.Args[0]
 13849  		if v_1.Op != OpZero {
 13850  			break
 13851  		}
 13852  		n := auxIntToInt64(v_1.AuxInt)
 13853  		p2 := v_1.Args[0]
 13854  		if !(t1.IsBoolean() && isSamePtr(p1, p2) && n >= o+1) {
 13855  			break
 13856  		}
 13857  		v.reset(OpConstBool)
 13858  		v.AuxInt = boolToAuxInt(false)
 13859  		return true
 13860  	}
 13861  	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
 13862  	// cond: is8BitInt(t1) && isSamePtr(p1, p2) && n >= o + 1
 13863  	// result: (Const8 [0])
 13864  	for {
 13865  		t1 := v.Type
 13866  		if v_0.Op != OpOffPtr {
 13867  			break
 13868  		}
 13869  		o := auxIntToInt64(v_0.AuxInt)
 13870  		p1 := v_0.Args[0]
 13871  		if v_1.Op != OpZero {
 13872  			break
 13873  		}
 13874  		n := auxIntToInt64(v_1.AuxInt)
 13875  		p2 := v_1.Args[0]
 13876  		if !(is8BitInt(t1) && isSamePtr(p1, p2) && n >= o+1) {
 13877  			break
 13878  		}
 13879  		v.reset(OpConst8)
 13880  		v.AuxInt = int8ToAuxInt(0)
 13881  		return true
 13882  	}
 13883  	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
 13884  	// cond: is16BitInt(t1) && isSamePtr(p1, p2) && n >= o + 2
 13885  	// result: (Const16 [0])
 13886  	for {
 13887  		t1 := v.Type
 13888  		if v_0.Op != OpOffPtr {
 13889  			break
 13890  		}
 13891  		o := auxIntToInt64(v_0.AuxInt)
 13892  		p1 := v_0.Args[0]
 13893  		if v_1.Op != OpZero {
 13894  			break
 13895  		}
 13896  		n := auxIntToInt64(v_1.AuxInt)
 13897  		p2 := v_1.Args[0]
 13898  		if !(is16BitInt(t1) && isSamePtr(p1, p2) && n >= o+2) {
 13899  			break
 13900  		}
 13901  		v.reset(OpConst16)
 13902  		v.AuxInt = int16ToAuxInt(0)
 13903  		return true
 13904  	}
 13905  	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
 13906  	// cond: is32BitInt(t1) && isSamePtr(p1, p2) && n >= o + 4
 13907  	// result: (Const32 [0])
 13908  	for {
 13909  		t1 := v.Type
 13910  		if v_0.Op != OpOffPtr {
 13911  			break
 13912  		}
 13913  		o := auxIntToInt64(v_0.AuxInt)
 13914  		p1 := v_0.Args[0]
 13915  		if v_1.Op != OpZero {
 13916  			break
 13917  		}
 13918  		n := auxIntToInt64(v_1.AuxInt)
 13919  		p2 := v_1.Args[0]
 13920  		if !(is32BitInt(t1) && isSamePtr(p1, p2) && n >= o+4) {
 13921  			break
 13922  		}
 13923  		v.reset(OpConst32)
 13924  		v.AuxInt = int32ToAuxInt(0)
 13925  		return true
 13926  	}
 13927  	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
 13928  	// cond: is64BitInt(t1) && isSamePtr(p1, p2) && n >= o + 8
 13929  	// result: (Const64 [0])
 13930  	for {
 13931  		t1 := v.Type
 13932  		if v_0.Op != OpOffPtr {
 13933  			break
 13934  		}
 13935  		o := auxIntToInt64(v_0.AuxInt)
 13936  		p1 := v_0.Args[0]
 13937  		if v_1.Op != OpZero {
 13938  			break
 13939  		}
 13940  		n := auxIntToInt64(v_1.AuxInt)
 13941  		p2 := v_1.Args[0]
 13942  		if !(is64BitInt(t1) && isSamePtr(p1, p2) && n >= o+8) {
 13943  			break
 13944  		}
 13945  		v.reset(OpConst64)
 13946  		v.AuxInt = int64ToAuxInt(0)
 13947  		return true
 13948  	}
 13949  	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
 13950  	// cond: is32BitFloat(t1) && isSamePtr(p1, p2) && n >= o + 4
 13951  	// result: (Const32F [0])
 13952  	for {
 13953  		t1 := v.Type
 13954  		if v_0.Op != OpOffPtr {
 13955  			break
 13956  		}
 13957  		o := auxIntToInt64(v_0.AuxInt)
 13958  		p1 := v_0.Args[0]
 13959  		if v_1.Op != OpZero {
 13960  			break
 13961  		}
 13962  		n := auxIntToInt64(v_1.AuxInt)
 13963  		p2 := v_1.Args[0]
 13964  		if !(is32BitFloat(t1) && isSamePtr(p1, p2) && n >= o+4) {
 13965  			break
 13966  		}
 13967  		v.reset(OpConst32F)
 13968  		v.AuxInt = float32ToAuxInt(0)
 13969  		return true
 13970  	}
 13971  	// match: (Load <t1> (OffPtr [o] p1) (Zero [n] p2 _))
 13972  	// cond: is64BitFloat(t1) && isSamePtr(p1, p2) && n >= o + 8
 13973  	// result: (Const64F [0])
 13974  	for {
 13975  		t1 := v.Type
 13976  		if v_0.Op != OpOffPtr {
 13977  			break
 13978  		}
 13979  		o := auxIntToInt64(v_0.AuxInt)
 13980  		p1 := v_0.Args[0]
 13981  		if v_1.Op != OpZero {
 13982  			break
 13983  		}
 13984  		n := auxIntToInt64(v_1.AuxInt)
 13985  		p2 := v_1.Args[0]
 13986  		if !(is64BitFloat(t1) && isSamePtr(p1, p2) && n >= o+8) {
 13987  			break
 13988  		}
 13989  		v.reset(OpConst64F)
 13990  		v.AuxInt = float64ToAuxInt(0)
 13991  		return true
 13992  	}
 13993  	// match: (Load <t> _ _)
 13994  	// cond: t.IsStruct() && CanSSA(t)
 13995  	// result: rewriteStructLoad(v)
 13996  	for {
 13997  		t := v.Type
 13998  		if !(t.IsStruct() && CanSSA(t)) {
 13999  			break
 14000  		}
 14001  		v.copyOf(rewriteStructLoad(v))
 14002  		return true
 14003  	}
 14004  	// match: (Load <t> _ _)
 14005  	// cond: t.IsArray() && t.NumElem() == 0
 14006  	// result: (ArrayMake0)
 14007  	for {
 14008  		t := v.Type
 14009  		if !(t.IsArray() && t.NumElem() == 0) {
 14010  			break
 14011  		}
 14012  		v.reset(OpArrayMake0)
 14013  		return true
 14014  	}
 14015  	// match: (Load <t> ptr mem)
 14016  	// cond: t.IsArray() && t.NumElem() == 1 && CanSSA(t)
 14017  	// result: (ArrayMake1 (Load <t.Elem()> ptr mem))
 14018  	for {
 14019  		t := v.Type
 14020  		ptr := v_0
 14021  		mem := v_1
 14022  		if !(t.IsArray() && t.NumElem() == 1 && CanSSA(t)) {
 14023  			break
 14024  		}
 14025  		v.reset(OpArrayMake1)
 14026  		v0 := b.NewValue0(v.Pos, OpLoad, t.Elem())
 14027  		v0.AddArg2(ptr, mem)
 14028  		v.AddArg(v0)
 14029  		return true
 14030  	}
 14031  	// match: (Load <t> (OffPtr [off] (Addr {s} sb) ) _)
 14032  	// cond: t.IsUintptr() && isFixedSym(s, off)
 14033  	// result: (Addr {fixedSym(b.Func, s, off)} sb)
 14034  	for {
 14035  		t := v.Type
 14036  		if v_0.Op != OpOffPtr {
 14037  			break
 14038  		}
 14039  		off := auxIntToInt64(v_0.AuxInt)
 14040  		v_0_0 := v_0.Args[0]
 14041  		if v_0_0.Op != OpAddr {
 14042  			break
 14043  		}
 14044  		s := auxToSym(v_0_0.Aux)
 14045  		sb := v_0_0.Args[0]
 14046  		if !(t.IsUintptr() && isFixedSym(s, off)) {
 14047  			break
 14048  		}
 14049  		v.reset(OpAddr)
 14050  		v.Aux = symToAux(fixedSym(b.Func, s, off))
 14051  		v.AddArg(sb)
 14052  		return true
 14053  	}
 14054  	// match: (Load <t> (OffPtr [off] (Convert (Addr {s} sb) _) ) _)
 14055  	// cond: t.IsUintptr() && isFixedSym(s, off)
 14056  	// result: (Addr {fixedSym(b.Func, s, off)} sb)
 14057  	for {
 14058  		t := v.Type
 14059  		if v_0.Op != OpOffPtr {
 14060  			break
 14061  		}
 14062  		off := auxIntToInt64(v_0.AuxInt)
 14063  		v_0_0 := v_0.Args[0]
 14064  		if v_0_0.Op != OpConvert {
 14065  			break
 14066  		}
 14067  		v_0_0_0 := v_0_0.Args[0]
 14068  		if v_0_0_0.Op != OpAddr {
 14069  			break
 14070  		}
 14071  		s := auxToSym(v_0_0_0.Aux)
 14072  		sb := v_0_0_0.Args[0]
 14073  		if !(t.IsUintptr() && isFixedSym(s, off)) {
 14074  			break
 14075  		}
 14076  		v.reset(OpAddr)
 14077  		v.Aux = symToAux(fixedSym(b.Func, s, off))
 14078  		v.AddArg(sb)
 14079  		return true
 14080  	}
 14081  	// match: (Load <t> (OffPtr [off] (ITab (IMake (Addr {s} sb) _))) _)
 14082  	// cond: t.IsUintptr() && isFixedSym(s, off)
 14083  	// result: (Addr {fixedSym(b.Func, s, off)} sb)
 14084  	for {
 14085  		t := v.Type
 14086  		if v_0.Op != OpOffPtr {
 14087  			break
 14088  		}
 14089  		off := auxIntToInt64(v_0.AuxInt)
 14090  		v_0_0 := v_0.Args[0]
 14091  		if v_0_0.Op != OpITab {
 14092  			break
 14093  		}
 14094  		v_0_0_0 := v_0_0.Args[0]
 14095  		if v_0_0_0.Op != OpIMake {
 14096  			break
 14097  		}
 14098  		v_0_0_0_0 := v_0_0_0.Args[0]
 14099  		if v_0_0_0_0.Op != OpAddr {
 14100  			break
 14101  		}
 14102  		s := auxToSym(v_0_0_0_0.Aux)
 14103  		sb := v_0_0_0_0.Args[0]
 14104  		if !(t.IsUintptr() && isFixedSym(s, off)) {
 14105  			break
 14106  		}
 14107  		v.reset(OpAddr)
 14108  		v.Aux = symToAux(fixedSym(b.Func, s, off))
 14109  		v.AddArg(sb)
 14110  		return true
 14111  	}
 14112  	// match: (Load <t> (OffPtr [off] (ITab (IMake (Convert (Addr {s} sb) _) _))) _)
 14113  	// cond: t.IsUintptr() && isFixedSym(s, off)
 14114  	// result: (Addr {fixedSym(b.Func, s, off)} sb)
 14115  	for {
 14116  		t := v.Type
 14117  		if v_0.Op != OpOffPtr {
 14118  			break
 14119  		}
 14120  		off := auxIntToInt64(v_0.AuxInt)
 14121  		v_0_0 := v_0.Args[0]
 14122  		if v_0_0.Op != OpITab {
 14123  			break
 14124  		}
 14125  		v_0_0_0 := v_0_0.Args[0]
 14126  		if v_0_0_0.Op != OpIMake {
 14127  			break
 14128  		}
 14129  		v_0_0_0_0 := v_0_0_0.Args[0]
 14130  		if v_0_0_0_0.Op != OpConvert {
 14131  			break
 14132  		}
 14133  		v_0_0_0_0_0 := v_0_0_0_0.Args[0]
 14134  		if v_0_0_0_0_0.Op != OpAddr {
 14135  			break
 14136  		}
 14137  		s := auxToSym(v_0_0_0_0_0.Aux)
 14138  		sb := v_0_0_0_0_0.Args[0]
 14139  		if !(t.IsUintptr() && isFixedSym(s, off)) {
 14140  			break
 14141  		}
 14142  		v.reset(OpAddr)
 14143  		v.Aux = symToAux(fixedSym(b.Func, s, off))
 14144  		v.AddArg(sb)
 14145  		return true
 14146  	}
 14147  	// match: (Load <t> (OffPtr [off] (Addr {sym} _) ) _)
 14148  	// cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
 14149  	// result: (Const32 [fixed32(config, sym, off)])
 14150  	for {
 14151  		t := v.Type
 14152  		if v_0.Op != OpOffPtr {
 14153  			break
 14154  		}
 14155  		off := auxIntToInt64(v_0.AuxInt)
 14156  		v_0_0 := v_0.Args[0]
 14157  		if v_0_0.Op != OpAddr {
 14158  			break
 14159  		}
 14160  		sym := auxToSym(v_0_0.Aux)
 14161  		if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
 14162  			break
 14163  		}
 14164  		v.reset(OpConst32)
 14165  		v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
 14166  		return true
 14167  	}
 14168  	// match: (Load <t> (OffPtr [off] (Convert (Addr {sym} _) _) ) _)
 14169  	// cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
 14170  	// result: (Const32 [fixed32(config, sym, off)])
 14171  	for {
 14172  		t := v.Type
 14173  		if v_0.Op != OpOffPtr {
 14174  			break
 14175  		}
 14176  		off := auxIntToInt64(v_0.AuxInt)
 14177  		v_0_0 := v_0.Args[0]
 14178  		if v_0_0.Op != OpConvert {
 14179  			break
 14180  		}
 14181  		v_0_0_0 := v_0_0.Args[0]
 14182  		if v_0_0_0.Op != OpAddr {
 14183  			break
 14184  		}
 14185  		sym := auxToSym(v_0_0_0.Aux)
 14186  		if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
 14187  			break
 14188  		}
 14189  		v.reset(OpConst32)
 14190  		v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
 14191  		return true
 14192  	}
 14193  	// match: (Load <t> (OffPtr [off] (ITab (IMake (Addr {sym} _) _))) _)
 14194  	// cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
 14195  	// result: (Const32 [fixed32(config, sym, off)])
 14196  	for {
 14197  		t := v.Type
 14198  		if v_0.Op != OpOffPtr {
 14199  			break
 14200  		}
 14201  		off := auxIntToInt64(v_0.AuxInt)
 14202  		v_0_0 := v_0.Args[0]
 14203  		if v_0_0.Op != OpITab {
 14204  			break
 14205  		}
 14206  		v_0_0_0 := v_0_0.Args[0]
 14207  		if v_0_0_0.Op != OpIMake {
 14208  			break
 14209  		}
 14210  		v_0_0_0_0 := v_0_0_0.Args[0]
 14211  		if v_0_0_0_0.Op != OpAddr {
 14212  			break
 14213  		}
 14214  		sym := auxToSym(v_0_0_0_0.Aux)
 14215  		if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
 14216  			break
 14217  		}
 14218  		v.reset(OpConst32)
 14219  		v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
 14220  		return true
 14221  	}
 14222  	// match: (Load <t> (OffPtr [off] (ITab (IMake (Convert (Addr {sym} _) _) _))) _)
 14223  	// cond: t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)
 14224  	// result: (Const32 [fixed32(config, sym, off)])
 14225  	for {
 14226  		t := v.Type
 14227  		if v_0.Op != OpOffPtr {
 14228  			break
 14229  		}
 14230  		off := auxIntToInt64(v_0.AuxInt)
 14231  		v_0_0 := v_0.Args[0]
 14232  		if v_0_0.Op != OpITab {
 14233  			break
 14234  		}
 14235  		v_0_0_0 := v_0_0.Args[0]
 14236  		if v_0_0_0.Op != OpIMake {
 14237  			break
 14238  		}
 14239  		v_0_0_0_0 := v_0_0_0.Args[0]
 14240  		if v_0_0_0_0.Op != OpConvert {
 14241  			break
 14242  		}
 14243  		v_0_0_0_0_0 := v_0_0_0_0.Args[0]
 14244  		if v_0_0_0_0_0.Op != OpAddr {
 14245  			break
 14246  		}
 14247  		sym := auxToSym(v_0_0_0_0_0.Aux)
 14248  		if !(t.IsInteger() && t.Size() == 4 && isFixed32(config, sym, off)) {
 14249  			break
 14250  		}
 14251  		v.reset(OpConst32)
 14252  		v.AuxInt = int32ToAuxInt(fixed32(config, sym, off))
 14253  		return true
 14254  	}
 14255  	return false
 14256  }
 14257  func rewriteValuegeneric_OpLsh16x16(v *Value) bool {
 14258  	v_1 := v.Args[1]
 14259  	v_0 := v.Args[0]
 14260  	b := v.Block
 14261  	// match: (Lsh16x16 <t> x (Const16 [c]))
 14262  	// result: (Lsh16x64 x (Const64 <t> [int64(uint16(c))]))
 14263  	for {
 14264  		t := v.Type
 14265  		x := v_0
 14266  		if v_1.Op != OpConst16 {
 14267  			break
 14268  		}
 14269  		c := auxIntToInt16(v_1.AuxInt)
 14270  		v.reset(OpLsh16x64)
 14271  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 14272  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 14273  		v.AddArg2(x, v0)
 14274  		return true
 14275  	}
 14276  	// match: (Lsh16x16 (Const16 [0]) _)
 14277  	// result: (Const16 [0])
 14278  	for {
 14279  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 14280  			break
 14281  		}
 14282  		v.reset(OpConst16)
 14283  		v.AuxInt = int16ToAuxInt(0)
 14284  		return true
 14285  	}
 14286  	return false
 14287  }
 14288  func rewriteValuegeneric_OpLsh16x32(v *Value) bool {
 14289  	v_1 := v.Args[1]
 14290  	v_0 := v.Args[0]
 14291  	b := v.Block
 14292  	// match: (Lsh16x32 <t> x (Const32 [c]))
 14293  	// result: (Lsh16x64 x (Const64 <t> [int64(uint32(c))]))
 14294  	for {
 14295  		t := v.Type
 14296  		x := v_0
 14297  		if v_1.Op != OpConst32 {
 14298  			break
 14299  		}
 14300  		c := auxIntToInt32(v_1.AuxInt)
 14301  		v.reset(OpLsh16x64)
 14302  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 14303  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 14304  		v.AddArg2(x, v0)
 14305  		return true
 14306  	}
 14307  	// match: (Lsh16x32 (Const16 [0]) _)
 14308  	// result: (Const16 [0])
 14309  	for {
 14310  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 14311  			break
 14312  		}
 14313  		v.reset(OpConst16)
 14314  		v.AuxInt = int16ToAuxInt(0)
 14315  		return true
 14316  	}
 14317  	return false
 14318  }
 14319  func rewriteValuegeneric_OpLsh16x64(v *Value) bool {
 14320  	v_1 := v.Args[1]
 14321  	v_0 := v.Args[0]
 14322  	b := v.Block
 14323  	typ := &b.Func.Config.Types
 14324  	// match: (Lsh16x64 (Const16 [c]) (Const64 [d]))
 14325  	// result: (Const16 [c << uint64(d)])
 14326  	for {
 14327  		if v_0.Op != OpConst16 {
 14328  			break
 14329  		}
 14330  		c := auxIntToInt16(v_0.AuxInt)
 14331  		if v_1.Op != OpConst64 {
 14332  			break
 14333  		}
 14334  		d := auxIntToInt64(v_1.AuxInt)
 14335  		v.reset(OpConst16)
 14336  		v.AuxInt = int16ToAuxInt(c << uint64(d))
 14337  		return true
 14338  	}
 14339  	// match: (Lsh16x64 x (Const64 [0]))
 14340  	// result: x
 14341  	for {
 14342  		x := v_0
 14343  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 14344  			break
 14345  		}
 14346  		v.copyOf(x)
 14347  		return true
 14348  	}
 14349  	// match: (Lsh16x64 (Const16 [0]) _)
 14350  	// result: (Const16 [0])
 14351  	for {
 14352  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 14353  			break
 14354  		}
 14355  		v.reset(OpConst16)
 14356  		v.AuxInt = int16ToAuxInt(0)
 14357  		return true
 14358  	}
 14359  	// match: (Lsh16x64 _ (Const64 [c]))
 14360  	// cond: uint64(c) >= 16
 14361  	// result: (Const16 [0])
 14362  	for {
 14363  		if v_1.Op != OpConst64 {
 14364  			break
 14365  		}
 14366  		c := auxIntToInt64(v_1.AuxInt)
 14367  		if !(uint64(c) >= 16) {
 14368  			break
 14369  		}
 14370  		v.reset(OpConst16)
 14371  		v.AuxInt = int16ToAuxInt(0)
 14372  		return true
 14373  	}
 14374  	// match: (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d]))
 14375  	// cond: !uaddOvf(c,d)
 14376  	// result: (Lsh16x64 x (Const64 <t> [c+d]))
 14377  	for {
 14378  		t := v.Type
 14379  		if v_0.Op != OpLsh16x64 {
 14380  			break
 14381  		}
 14382  		_ = v_0.Args[1]
 14383  		x := v_0.Args[0]
 14384  		v_0_1 := v_0.Args[1]
 14385  		if v_0_1.Op != OpConst64 {
 14386  			break
 14387  		}
 14388  		c := auxIntToInt64(v_0_1.AuxInt)
 14389  		if v_1.Op != OpConst64 {
 14390  			break
 14391  		}
 14392  		d := auxIntToInt64(v_1.AuxInt)
 14393  		if !(!uaddOvf(c, d)) {
 14394  			break
 14395  		}
 14396  		v.reset(OpLsh16x64)
 14397  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 14398  		v0.AuxInt = int64ToAuxInt(c + d)
 14399  		v.AddArg2(x, v0)
 14400  		return true
 14401  	}
 14402  	// match: (Lsh16x64 i:(Rsh16x64 x (Const64 [c])) (Const64 [c]))
 14403  	// cond: c >= 0 && c < 16 && i.Uses == 1
 14404  	// result: (And16 x (Const16 <v.Type> [int16(-1) << c]))
 14405  	for {
 14406  		i := v_0
 14407  		if i.Op != OpRsh16x64 {
 14408  			break
 14409  		}
 14410  		_ = i.Args[1]
 14411  		x := i.Args[0]
 14412  		i_1 := i.Args[1]
 14413  		if i_1.Op != OpConst64 {
 14414  			break
 14415  		}
 14416  		c := auxIntToInt64(i_1.AuxInt)
 14417  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 16 && i.Uses == 1) {
 14418  			break
 14419  		}
 14420  		v.reset(OpAnd16)
 14421  		v0 := b.NewValue0(v.Pos, OpConst16, v.Type)
 14422  		v0.AuxInt = int16ToAuxInt(int16(-1) << c)
 14423  		v.AddArg2(x, v0)
 14424  		return true
 14425  	}
 14426  	// match: (Lsh16x64 i:(Rsh16Ux64 x (Const64 [c])) (Const64 [c]))
 14427  	// cond: c >= 0 && c < 16 && i.Uses == 1
 14428  	// result: (And16 x (Const16 <v.Type> [int16(-1) << c]))
 14429  	for {
 14430  		i := v_0
 14431  		if i.Op != OpRsh16Ux64 {
 14432  			break
 14433  		}
 14434  		_ = i.Args[1]
 14435  		x := i.Args[0]
 14436  		i_1 := i.Args[1]
 14437  		if i_1.Op != OpConst64 {
 14438  			break
 14439  		}
 14440  		c := auxIntToInt64(i_1.AuxInt)
 14441  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 16 && i.Uses == 1) {
 14442  			break
 14443  		}
 14444  		v.reset(OpAnd16)
 14445  		v0 := b.NewValue0(v.Pos, OpConst16, v.Type)
 14446  		v0.AuxInt = int16ToAuxInt(int16(-1) << c)
 14447  		v.AddArg2(x, v0)
 14448  		return true
 14449  	}
 14450  	// match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
 14451  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
 14452  	// result: (Lsh16x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
 14453  	for {
 14454  		if v_0.Op != OpRsh16Ux64 {
 14455  			break
 14456  		}
 14457  		_ = v_0.Args[1]
 14458  		v_0_0 := v_0.Args[0]
 14459  		if v_0_0.Op != OpLsh16x64 {
 14460  			break
 14461  		}
 14462  		_ = v_0_0.Args[1]
 14463  		x := v_0_0.Args[0]
 14464  		v_0_0_1 := v_0_0.Args[1]
 14465  		if v_0_0_1.Op != OpConst64 {
 14466  			break
 14467  		}
 14468  		c1 := auxIntToInt64(v_0_0_1.AuxInt)
 14469  		v_0_1 := v_0.Args[1]
 14470  		if v_0_1.Op != OpConst64 {
 14471  			break
 14472  		}
 14473  		c2 := auxIntToInt64(v_0_1.AuxInt)
 14474  		if v_1.Op != OpConst64 {
 14475  			break
 14476  		}
 14477  		c3 := auxIntToInt64(v_1.AuxInt)
 14478  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
 14479  			break
 14480  		}
 14481  		v.reset(OpLsh16x64)
 14482  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 14483  		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
 14484  		v.AddArg2(x, v0)
 14485  		return true
 14486  	}
 14487  	// match: (Lsh16x64 (And16 (Rsh16x64 <t> x (Const64 <t2> [c])) (Const16 [d])) (Const64 [e]))
 14488  	// cond: c >= e
 14489  	// result: (And16 (Rsh16x64 <t> x (Const64 <t2> [c-e])) (Const16 <t> [d<<e]))
 14490  	for {
 14491  		if v_0.Op != OpAnd16 {
 14492  			break
 14493  		}
 14494  		_ = v_0.Args[1]
 14495  		v_0_0 := v_0.Args[0]
 14496  		v_0_1 := v_0.Args[1]
 14497  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 14498  			if v_0_0.Op != OpRsh16x64 {
 14499  				continue
 14500  			}
 14501  			t := v_0_0.Type
 14502  			_ = v_0_0.Args[1]
 14503  			x := v_0_0.Args[0]
 14504  			v_0_0_1 := v_0_0.Args[1]
 14505  			if v_0_0_1.Op != OpConst64 {
 14506  				continue
 14507  			}
 14508  			t2 := v_0_0_1.Type
 14509  			c := auxIntToInt64(v_0_0_1.AuxInt)
 14510  			if v_0_1.Op != OpConst16 {
 14511  				continue
 14512  			}
 14513  			d := auxIntToInt16(v_0_1.AuxInt)
 14514  			if v_1.Op != OpConst64 {
 14515  				continue
 14516  			}
 14517  			e := auxIntToInt64(v_1.AuxInt)
 14518  			if !(c >= e) {
 14519  				continue
 14520  			}
 14521  			v.reset(OpAnd16)
 14522  			v0 := b.NewValue0(v.Pos, OpRsh16x64, t)
 14523  			v1 := b.NewValue0(v.Pos, OpConst64, t2)
 14524  			v1.AuxInt = int64ToAuxInt(c - e)
 14525  			v0.AddArg2(x, v1)
 14526  			v2 := b.NewValue0(v.Pos, OpConst16, t)
 14527  			v2.AuxInt = int16ToAuxInt(d << e)
 14528  			v.AddArg2(v0, v2)
 14529  			return true
 14530  		}
 14531  		break
 14532  	}
 14533  	// match: (Lsh16x64 (And16 (Rsh16Ux64 <t> x (Const64 <t2> [c])) (Const16 [d])) (Const64 [e]))
 14534  	// cond: c >= e
 14535  	// result: (And16 (Rsh16Ux64 <t> x (Const64 <t2> [c-e])) (Const16 <t> [d<<e]))
 14536  	for {
 14537  		if v_0.Op != OpAnd16 {
 14538  			break
 14539  		}
 14540  		_ = v_0.Args[1]
 14541  		v_0_0 := v_0.Args[0]
 14542  		v_0_1 := v_0.Args[1]
 14543  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 14544  			if v_0_0.Op != OpRsh16Ux64 {
 14545  				continue
 14546  			}
 14547  			t := v_0_0.Type
 14548  			_ = v_0_0.Args[1]
 14549  			x := v_0_0.Args[0]
 14550  			v_0_0_1 := v_0_0.Args[1]
 14551  			if v_0_0_1.Op != OpConst64 {
 14552  				continue
 14553  			}
 14554  			t2 := v_0_0_1.Type
 14555  			c := auxIntToInt64(v_0_0_1.AuxInt)
 14556  			if v_0_1.Op != OpConst16 {
 14557  				continue
 14558  			}
 14559  			d := auxIntToInt16(v_0_1.AuxInt)
 14560  			if v_1.Op != OpConst64 {
 14561  				continue
 14562  			}
 14563  			e := auxIntToInt64(v_1.AuxInt)
 14564  			if !(c >= e) {
 14565  				continue
 14566  			}
 14567  			v.reset(OpAnd16)
 14568  			v0 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
 14569  			v1 := b.NewValue0(v.Pos, OpConst64, t2)
 14570  			v1.AuxInt = int64ToAuxInt(c - e)
 14571  			v0.AddArg2(x, v1)
 14572  			v2 := b.NewValue0(v.Pos, OpConst16, t)
 14573  			v2.AuxInt = int16ToAuxInt(d << e)
 14574  			v.AddArg2(v0, v2)
 14575  			return true
 14576  		}
 14577  		break
 14578  	}
 14579  	// match: (Lsh16x64 (And16 (Rsh16x64 <t> x (Const64 <t2> [c])) (Const16 [d])) (Const64 [e]))
 14580  	// cond: c < e
 14581  	// result: (And16 (Lsh16x64 <t> x (Const64 <t2> [e-c])) (Const16 <t> [d<<e]))
 14582  	for {
 14583  		if v_0.Op != OpAnd16 {
 14584  			break
 14585  		}
 14586  		_ = v_0.Args[1]
 14587  		v_0_0 := v_0.Args[0]
 14588  		v_0_1 := v_0.Args[1]
 14589  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 14590  			if v_0_0.Op != OpRsh16x64 {
 14591  				continue
 14592  			}
 14593  			t := v_0_0.Type
 14594  			_ = v_0_0.Args[1]
 14595  			x := v_0_0.Args[0]
 14596  			v_0_0_1 := v_0_0.Args[1]
 14597  			if v_0_0_1.Op != OpConst64 {
 14598  				continue
 14599  			}
 14600  			t2 := v_0_0_1.Type
 14601  			c := auxIntToInt64(v_0_0_1.AuxInt)
 14602  			if v_0_1.Op != OpConst16 {
 14603  				continue
 14604  			}
 14605  			d := auxIntToInt16(v_0_1.AuxInt)
 14606  			if v_1.Op != OpConst64 {
 14607  				continue
 14608  			}
 14609  			e := auxIntToInt64(v_1.AuxInt)
 14610  			if !(c < e) {
 14611  				continue
 14612  			}
 14613  			v.reset(OpAnd16)
 14614  			v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
 14615  			v1 := b.NewValue0(v.Pos, OpConst64, t2)
 14616  			v1.AuxInt = int64ToAuxInt(e - c)
 14617  			v0.AddArg2(x, v1)
 14618  			v2 := b.NewValue0(v.Pos, OpConst16, t)
 14619  			v2.AuxInt = int16ToAuxInt(d << e)
 14620  			v.AddArg2(v0, v2)
 14621  			return true
 14622  		}
 14623  		break
 14624  	}
 14625  	// match: (Lsh16x64 (And16 (Rsh16Ux64 <t> x (Const64 <t2> [c])) (Const16 [d])) (Const64 [e]))
 14626  	// cond: c < e
 14627  	// result: (And16 (Lsh16x64 <t> x (Const64 <t2> [e-c])) (Const16 <t> [d<<e]))
 14628  	for {
 14629  		if v_0.Op != OpAnd16 {
 14630  			break
 14631  		}
 14632  		_ = v_0.Args[1]
 14633  		v_0_0 := v_0.Args[0]
 14634  		v_0_1 := v_0.Args[1]
 14635  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 14636  			if v_0_0.Op != OpRsh16Ux64 {
 14637  				continue
 14638  			}
 14639  			t := v_0_0.Type
 14640  			_ = v_0_0.Args[1]
 14641  			x := v_0_0.Args[0]
 14642  			v_0_0_1 := v_0_0.Args[1]
 14643  			if v_0_0_1.Op != OpConst64 {
 14644  				continue
 14645  			}
 14646  			t2 := v_0_0_1.Type
 14647  			c := auxIntToInt64(v_0_0_1.AuxInt)
 14648  			if v_0_1.Op != OpConst16 {
 14649  				continue
 14650  			}
 14651  			d := auxIntToInt16(v_0_1.AuxInt)
 14652  			if v_1.Op != OpConst64 {
 14653  				continue
 14654  			}
 14655  			e := auxIntToInt64(v_1.AuxInt)
 14656  			if !(c < e) {
 14657  				continue
 14658  			}
 14659  			v.reset(OpAnd16)
 14660  			v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
 14661  			v1 := b.NewValue0(v.Pos, OpConst64, t2)
 14662  			v1.AuxInt = int64ToAuxInt(e - c)
 14663  			v0.AddArg2(x, v1)
 14664  			v2 := b.NewValue0(v.Pos, OpConst16, t)
 14665  			v2.AuxInt = int16ToAuxInt(d << e)
 14666  			v.AddArg2(v0, v2)
 14667  			return true
 14668  		}
 14669  		break
 14670  	}
 14671  	return false
 14672  }
 14673  func rewriteValuegeneric_OpLsh16x8(v *Value) bool {
 14674  	v_1 := v.Args[1]
 14675  	v_0 := v.Args[0]
 14676  	b := v.Block
 14677  	// match: (Lsh16x8 <t> x (Const8 [c]))
 14678  	// result: (Lsh16x64 x (Const64 <t> [int64(uint8(c))]))
 14679  	for {
 14680  		t := v.Type
 14681  		x := v_0
 14682  		if v_1.Op != OpConst8 {
 14683  			break
 14684  		}
 14685  		c := auxIntToInt8(v_1.AuxInt)
 14686  		v.reset(OpLsh16x64)
 14687  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 14688  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 14689  		v.AddArg2(x, v0)
 14690  		return true
 14691  	}
 14692  	// match: (Lsh16x8 (Const16 [0]) _)
 14693  	// result: (Const16 [0])
 14694  	for {
 14695  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 14696  			break
 14697  		}
 14698  		v.reset(OpConst16)
 14699  		v.AuxInt = int16ToAuxInt(0)
 14700  		return true
 14701  	}
 14702  	return false
 14703  }
 14704  func rewriteValuegeneric_OpLsh32x16(v *Value) bool {
 14705  	v_1 := v.Args[1]
 14706  	v_0 := v.Args[0]
 14707  	b := v.Block
 14708  	// match: (Lsh32x16 <t> x (Const16 [c]))
 14709  	// result: (Lsh32x64 x (Const64 <t> [int64(uint16(c))]))
 14710  	for {
 14711  		t := v.Type
 14712  		x := v_0
 14713  		if v_1.Op != OpConst16 {
 14714  			break
 14715  		}
 14716  		c := auxIntToInt16(v_1.AuxInt)
 14717  		v.reset(OpLsh32x64)
 14718  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 14719  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 14720  		v.AddArg2(x, v0)
 14721  		return true
 14722  	}
 14723  	// match: (Lsh32x16 (Const32 [0]) _)
 14724  	// result: (Const32 [0])
 14725  	for {
 14726  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 14727  			break
 14728  		}
 14729  		v.reset(OpConst32)
 14730  		v.AuxInt = int32ToAuxInt(0)
 14731  		return true
 14732  	}
 14733  	return false
 14734  }
 14735  func rewriteValuegeneric_OpLsh32x32(v *Value) bool {
 14736  	v_1 := v.Args[1]
 14737  	v_0 := v.Args[0]
 14738  	b := v.Block
 14739  	// match: (Lsh32x32 <t> x (Const32 [c]))
 14740  	// result: (Lsh32x64 x (Const64 <t> [int64(uint32(c))]))
 14741  	for {
 14742  		t := v.Type
 14743  		x := v_0
 14744  		if v_1.Op != OpConst32 {
 14745  			break
 14746  		}
 14747  		c := auxIntToInt32(v_1.AuxInt)
 14748  		v.reset(OpLsh32x64)
 14749  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 14750  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 14751  		v.AddArg2(x, v0)
 14752  		return true
 14753  	}
 14754  	// match: (Lsh32x32 (Const32 [0]) _)
 14755  	// result: (Const32 [0])
 14756  	for {
 14757  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 14758  			break
 14759  		}
 14760  		v.reset(OpConst32)
 14761  		v.AuxInt = int32ToAuxInt(0)
 14762  		return true
 14763  	}
 14764  	return false
 14765  }
 14766  func rewriteValuegeneric_OpLsh32x64(v *Value) bool {
 14767  	v_1 := v.Args[1]
 14768  	v_0 := v.Args[0]
 14769  	b := v.Block
 14770  	typ := &b.Func.Config.Types
 14771  	// match: (Lsh32x64 (Const32 [c]) (Const64 [d]))
 14772  	// result: (Const32 [c << uint64(d)])
 14773  	for {
 14774  		if v_0.Op != OpConst32 {
 14775  			break
 14776  		}
 14777  		c := auxIntToInt32(v_0.AuxInt)
 14778  		if v_1.Op != OpConst64 {
 14779  			break
 14780  		}
 14781  		d := auxIntToInt64(v_1.AuxInt)
 14782  		v.reset(OpConst32)
 14783  		v.AuxInt = int32ToAuxInt(c << uint64(d))
 14784  		return true
 14785  	}
 14786  	// match: (Lsh32x64 x (Const64 [0]))
 14787  	// result: x
 14788  	for {
 14789  		x := v_0
 14790  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 14791  			break
 14792  		}
 14793  		v.copyOf(x)
 14794  		return true
 14795  	}
 14796  	// match: (Lsh32x64 (Const32 [0]) _)
 14797  	// result: (Const32 [0])
 14798  	for {
 14799  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 14800  			break
 14801  		}
 14802  		v.reset(OpConst32)
 14803  		v.AuxInt = int32ToAuxInt(0)
 14804  		return true
 14805  	}
 14806  	// match: (Lsh32x64 _ (Const64 [c]))
 14807  	// cond: uint64(c) >= 32
 14808  	// result: (Const32 [0])
 14809  	for {
 14810  		if v_1.Op != OpConst64 {
 14811  			break
 14812  		}
 14813  		c := auxIntToInt64(v_1.AuxInt)
 14814  		if !(uint64(c) >= 32) {
 14815  			break
 14816  		}
 14817  		v.reset(OpConst32)
 14818  		v.AuxInt = int32ToAuxInt(0)
 14819  		return true
 14820  	}
 14821  	// match: (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d]))
 14822  	// cond: !uaddOvf(c,d)
 14823  	// result: (Lsh32x64 x (Const64 <t> [c+d]))
 14824  	for {
 14825  		t := v.Type
 14826  		if v_0.Op != OpLsh32x64 {
 14827  			break
 14828  		}
 14829  		_ = v_0.Args[1]
 14830  		x := v_0.Args[0]
 14831  		v_0_1 := v_0.Args[1]
 14832  		if v_0_1.Op != OpConst64 {
 14833  			break
 14834  		}
 14835  		c := auxIntToInt64(v_0_1.AuxInt)
 14836  		if v_1.Op != OpConst64 {
 14837  			break
 14838  		}
 14839  		d := auxIntToInt64(v_1.AuxInt)
 14840  		if !(!uaddOvf(c, d)) {
 14841  			break
 14842  		}
 14843  		v.reset(OpLsh32x64)
 14844  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 14845  		v0.AuxInt = int64ToAuxInt(c + d)
 14846  		v.AddArg2(x, v0)
 14847  		return true
 14848  	}
 14849  	// match: (Lsh32x64 i:(Rsh32x64 x (Const64 [c])) (Const64 [c]))
 14850  	// cond: c >= 0 && c < 32 && i.Uses == 1
 14851  	// result: (And32 x (Const32 <v.Type> [int32(-1) << c]))
 14852  	for {
 14853  		i := v_0
 14854  		if i.Op != OpRsh32x64 {
 14855  			break
 14856  		}
 14857  		_ = i.Args[1]
 14858  		x := i.Args[0]
 14859  		i_1 := i.Args[1]
 14860  		if i_1.Op != OpConst64 {
 14861  			break
 14862  		}
 14863  		c := auxIntToInt64(i_1.AuxInt)
 14864  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 32 && i.Uses == 1) {
 14865  			break
 14866  		}
 14867  		v.reset(OpAnd32)
 14868  		v0 := b.NewValue0(v.Pos, OpConst32, v.Type)
 14869  		v0.AuxInt = int32ToAuxInt(int32(-1) << c)
 14870  		v.AddArg2(x, v0)
 14871  		return true
 14872  	}
 14873  	// match: (Lsh32x64 i:(Rsh32Ux64 x (Const64 [c])) (Const64 [c]))
 14874  	// cond: c >= 0 && c < 32 && i.Uses == 1
 14875  	// result: (And32 x (Const32 <v.Type> [int32(-1) << c]))
 14876  	for {
 14877  		i := v_0
 14878  		if i.Op != OpRsh32Ux64 {
 14879  			break
 14880  		}
 14881  		_ = i.Args[1]
 14882  		x := i.Args[0]
 14883  		i_1 := i.Args[1]
 14884  		if i_1.Op != OpConst64 {
 14885  			break
 14886  		}
 14887  		c := auxIntToInt64(i_1.AuxInt)
 14888  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 32 && i.Uses == 1) {
 14889  			break
 14890  		}
 14891  		v.reset(OpAnd32)
 14892  		v0 := b.NewValue0(v.Pos, OpConst32, v.Type)
 14893  		v0.AuxInt = int32ToAuxInt(int32(-1) << c)
 14894  		v.AddArg2(x, v0)
 14895  		return true
 14896  	}
 14897  	// match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
 14898  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
 14899  	// result: (Lsh32x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
 14900  	for {
 14901  		if v_0.Op != OpRsh32Ux64 {
 14902  			break
 14903  		}
 14904  		_ = v_0.Args[1]
 14905  		v_0_0 := v_0.Args[0]
 14906  		if v_0_0.Op != OpLsh32x64 {
 14907  			break
 14908  		}
 14909  		_ = v_0_0.Args[1]
 14910  		x := v_0_0.Args[0]
 14911  		v_0_0_1 := v_0_0.Args[1]
 14912  		if v_0_0_1.Op != OpConst64 {
 14913  			break
 14914  		}
 14915  		c1 := auxIntToInt64(v_0_0_1.AuxInt)
 14916  		v_0_1 := v_0.Args[1]
 14917  		if v_0_1.Op != OpConst64 {
 14918  			break
 14919  		}
 14920  		c2 := auxIntToInt64(v_0_1.AuxInt)
 14921  		if v_1.Op != OpConst64 {
 14922  			break
 14923  		}
 14924  		c3 := auxIntToInt64(v_1.AuxInt)
 14925  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
 14926  			break
 14927  		}
 14928  		v.reset(OpLsh32x64)
 14929  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 14930  		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
 14931  		v.AddArg2(x, v0)
 14932  		return true
 14933  	}
 14934  	// match: (Lsh32x64 (And32 (Rsh32x64 <t> x (Const64 <t2> [c])) (Const32 [d])) (Const64 [e]))
 14935  	// cond: c >= e
 14936  	// result: (And32 (Rsh32x64 <t> x (Const64 <t2> [c-e])) (Const32 <t> [d<<e]))
 14937  	for {
 14938  		if v_0.Op != OpAnd32 {
 14939  			break
 14940  		}
 14941  		_ = v_0.Args[1]
 14942  		v_0_0 := v_0.Args[0]
 14943  		v_0_1 := v_0.Args[1]
 14944  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 14945  			if v_0_0.Op != OpRsh32x64 {
 14946  				continue
 14947  			}
 14948  			t := v_0_0.Type
 14949  			_ = v_0_0.Args[1]
 14950  			x := v_0_0.Args[0]
 14951  			v_0_0_1 := v_0_0.Args[1]
 14952  			if v_0_0_1.Op != OpConst64 {
 14953  				continue
 14954  			}
 14955  			t2 := v_0_0_1.Type
 14956  			c := auxIntToInt64(v_0_0_1.AuxInt)
 14957  			if v_0_1.Op != OpConst32 {
 14958  				continue
 14959  			}
 14960  			d := auxIntToInt32(v_0_1.AuxInt)
 14961  			if v_1.Op != OpConst64 {
 14962  				continue
 14963  			}
 14964  			e := auxIntToInt64(v_1.AuxInt)
 14965  			if !(c >= e) {
 14966  				continue
 14967  			}
 14968  			v.reset(OpAnd32)
 14969  			v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
 14970  			v1 := b.NewValue0(v.Pos, OpConst64, t2)
 14971  			v1.AuxInt = int64ToAuxInt(c - e)
 14972  			v0.AddArg2(x, v1)
 14973  			v2 := b.NewValue0(v.Pos, OpConst32, t)
 14974  			v2.AuxInt = int32ToAuxInt(d << e)
 14975  			v.AddArg2(v0, v2)
 14976  			return true
 14977  		}
 14978  		break
 14979  	}
 14980  	// match: (Lsh32x64 (And32 (Rsh32Ux64 <t> x (Const64 <t2> [c])) (Const32 [d])) (Const64 [e]))
 14981  	// cond: c >= e
 14982  	// result: (And32 (Rsh32Ux64 <t> x (Const64 <t2> [c-e])) (Const32 <t> [d<<e]))
 14983  	for {
 14984  		if v_0.Op != OpAnd32 {
 14985  			break
 14986  		}
 14987  		_ = v_0.Args[1]
 14988  		v_0_0 := v_0.Args[0]
 14989  		v_0_1 := v_0.Args[1]
 14990  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 14991  			if v_0_0.Op != OpRsh32Ux64 {
 14992  				continue
 14993  			}
 14994  			t := v_0_0.Type
 14995  			_ = v_0_0.Args[1]
 14996  			x := v_0_0.Args[0]
 14997  			v_0_0_1 := v_0_0.Args[1]
 14998  			if v_0_0_1.Op != OpConst64 {
 14999  				continue
 15000  			}
 15001  			t2 := v_0_0_1.Type
 15002  			c := auxIntToInt64(v_0_0_1.AuxInt)
 15003  			if v_0_1.Op != OpConst32 {
 15004  				continue
 15005  			}
 15006  			d := auxIntToInt32(v_0_1.AuxInt)
 15007  			if v_1.Op != OpConst64 {
 15008  				continue
 15009  			}
 15010  			e := auxIntToInt64(v_1.AuxInt)
 15011  			if !(c >= e) {
 15012  				continue
 15013  			}
 15014  			v.reset(OpAnd32)
 15015  			v0 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
 15016  			v1 := b.NewValue0(v.Pos, OpConst64, t2)
 15017  			v1.AuxInt = int64ToAuxInt(c - e)
 15018  			v0.AddArg2(x, v1)
 15019  			v2 := b.NewValue0(v.Pos, OpConst32, t)
 15020  			v2.AuxInt = int32ToAuxInt(d << e)
 15021  			v.AddArg2(v0, v2)
 15022  			return true
 15023  		}
 15024  		break
 15025  	}
 15026  	// match: (Lsh32x64 (And32 (Rsh32x64 <t> x (Const64 <t2> [c])) (Const32 [d])) (Const64 [e]))
 15027  	// cond: c < e
 15028  	// result: (And32 (Lsh32x64 <t> x (Const64 <t2> [e-c])) (Const32 <t> [d<<e]))
 15029  	for {
 15030  		if v_0.Op != OpAnd32 {
 15031  			break
 15032  		}
 15033  		_ = v_0.Args[1]
 15034  		v_0_0 := v_0.Args[0]
 15035  		v_0_1 := v_0.Args[1]
 15036  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 15037  			if v_0_0.Op != OpRsh32x64 {
 15038  				continue
 15039  			}
 15040  			t := v_0_0.Type
 15041  			_ = v_0_0.Args[1]
 15042  			x := v_0_0.Args[0]
 15043  			v_0_0_1 := v_0_0.Args[1]
 15044  			if v_0_0_1.Op != OpConst64 {
 15045  				continue
 15046  			}
 15047  			t2 := v_0_0_1.Type
 15048  			c := auxIntToInt64(v_0_0_1.AuxInt)
 15049  			if v_0_1.Op != OpConst32 {
 15050  				continue
 15051  			}
 15052  			d := auxIntToInt32(v_0_1.AuxInt)
 15053  			if v_1.Op != OpConst64 {
 15054  				continue
 15055  			}
 15056  			e := auxIntToInt64(v_1.AuxInt)
 15057  			if !(c < e) {
 15058  				continue
 15059  			}
 15060  			v.reset(OpAnd32)
 15061  			v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
 15062  			v1 := b.NewValue0(v.Pos, OpConst64, t2)
 15063  			v1.AuxInt = int64ToAuxInt(e - c)
 15064  			v0.AddArg2(x, v1)
 15065  			v2 := b.NewValue0(v.Pos, OpConst32, t)
 15066  			v2.AuxInt = int32ToAuxInt(d << e)
 15067  			v.AddArg2(v0, v2)
 15068  			return true
 15069  		}
 15070  		break
 15071  	}
 15072  	// match: (Lsh32x64 (And32 (Rsh32Ux64 <t> x (Const64 <t2> [c])) (Const32 [d])) (Const64 [e]))
 15073  	// cond: c < e
 15074  	// result: (And32 (Lsh32x64 <t> x (Const64 <t2> [e-c])) (Const32 <t> [d<<e]))
 15075  	for {
 15076  		if v_0.Op != OpAnd32 {
 15077  			break
 15078  		}
 15079  		_ = v_0.Args[1]
 15080  		v_0_0 := v_0.Args[0]
 15081  		v_0_1 := v_0.Args[1]
 15082  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 15083  			if v_0_0.Op != OpRsh32Ux64 {
 15084  				continue
 15085  			}
 15086  			t := v_0_0.Type
 15087  			_ = v_0_0.Args[1]
 15088  			x := v_0_0.Args[0]
 15089  			v_0_0_1 := v_0_0.Args[1]
 15090  			if v_0_0_1.Op != OpConst64 {
 15091  				continue
 15092  			}
 15093  			t2 := v_0_0_1.Type
 15094  			c := auxIntToInt64(v_0_0_1.AuxInt)
 15095  			if v_0_1.Op != OpConst32 {
 15096  				continue
 15097  			}
 15098  			d := auxIntToInt32(v_0_1.AuxInt)
 15099  			if v_1.Op != OpConst64 {
 15100  				continue
 15101  			}
 15102  			e := auxIntToInt64(v_1.AuxInt)
 15103  			if !(c < e) {
 15104  				continue
 15105  			}
 15106  			v.reset(OpAnd32)
 15107  			v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
 15108  			v1 := b.NewValue0(v.Pos, OpConst64, t2)
 15109  			v1.AuxInt = int64ToAuxInt(e - c)
 15110  			v0.AddArg2(x, v1)
 15111  			v2 := b.NewValue0(v.Pos, OpConst32, t)
 15112  			v2.AuxInt = int32ToAuxInt(d << e)
 15113  			v.AddArg2(v0, v2)
 15114  			return true
 15115  		}
 15116  		break
 15117  	}
 15118  	return false
 15119  }
 15120  func rewriteValuegeneric_OpLsh32x8(v *Value) bool {
 15121  	v_1 := v.Args[1]
 15122  	v_0 := v.Args[0]
 15123  	b := v.Block
 15124  	// match: (Lsh32x8 <t> x (Const8 [c]))
 15125  	// result: (Lsh32x64 x (Const64 <t> [int64(uint8(c))]))
 15126  	for {
 15127  		t := v.Type
 15128  		x := v_0
 15129  		if v_1.Op != OpConst8 {
 15130  			break
 15131  		}
 15132  		c := auxIntToInt8(v_1.AuxInt)
 15133  		v.reset(OpLsh32x64)
 15134  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 15135  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 15136  		v.AddArg2(x, v0)
 15137  		return true
 15138  	}
 15139  	// match: (Lsh32x8 (Const32 [0]) _)
 15140  	// result: (Const32 [0])
 15141  	for {
 15142  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 15143  			break
 15144  		}
 15145  		v.reset(OpConst32)
 15146  		v.AuxInt = int32ToAuxInt(0)
 15147  		return true
 15148  	}
 15149  	return false
 15150  }
 15151  func rewriteValuegeneric_OpLsh64x16(v *Value) bool {
 15152  	v_1 := v.Args[1]
 15153  	v_0 := v.Args[0]
 15154  	b := v.Block
 15155  	// match: (Lsh64x16 <t> x (Const16 [c]))
 15156  	// result: (Lsh64x64 x (Const64 <t> [int64(uint16(c))]))
 15157  	for {
 15158  		t := v.Type
 15159  		x := v_0
 15160  		if v_1.Op != OpConst16 {
 15161  			break
 15162  		}
 15163  		c := auxIntToInt16(v_1.AuxInt)
 15164  		v.reset(OpLsh64x64)
 15165  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 15166  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 15167  		v.AddArg2(x, v0)
 15168  		return true
 15169  	}
 15170  	// match: (Lsh64x16 (Const64 [0]) _)
 15171  	// result: (Const64 [0])
 15172  	for {
 15173  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 15174  			break
 15175  		}
 15176  		v.reset(OpConst64)
 15177  		v.AuxInt = int64ToAuxInt(0)
 15178  		return true
 15179  	}
 15180  	return false
 15181  }
 15182  func rewriteValuegeneric_OpLsh64x32(v *Value) bool {
 15183  	v_1 := v.Args[1]
 15184  	v_0 := v.Args[0]
 15185  	b := v.Block
 15186  	// match: (Lsh64x32 <t> x (Const32 [c]))
 15187  	// result: (Lsh64x64 x (Const64 <t> [int64(uint32(c))]))
 15188  	for {
 15189  		t := v.Type
 15190  		x := v_0
 15191  		if v_1.Op != OpConst32 {
 15192  			break
 15193  		}
 15194  		c := auxIntToInt32(v_1.AuxInt)
 15195  		v.reset(OpLsh64x64)
 15196  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 15197  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 15198  		v.AddArg2(x, v0)
 15199  		return true
 15200  	}
 15201  	// match: (Lsh64x32 (Const64 [0]) _)
 15202  	// result: (Const64 [0])
 15203  	for {
 15204  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 15205  			break
 15206  		}
 15207  		v.reset(OpConst64)
 15208  		v.AuxInt = int64ToAuxInt(0)
 15209  		return true
 15210  	}
 15211  	return false
 15212  }
 15213  func rewriteValuegeneric_OpLsh64x64(v *Value) bool {
 15214  	v_1 := v.Args[1]
 15215  	v_0 := v.Args[0]
 15216  	b := v.Block
 15217  	typ := &b.Func.Config.Types
 15218  	// match: (Lsh64x64 (Const64 [c]) (Const64 [d]))
 15219  	// result: (Const64 [c << uint64(d)])
 15220  	for {
 15221  		if v_0.Op != OpConst64 {
 15222  			break
 15223  		}
 15224  		c := auxIntToInt64(v_0.AuxInt)
 15225  		if v_1.Op != OpConst64 {
 15226  			break
 15227  		}
 15228  		d := auxIntToInt64(v_1.AuxInt)
 15229  		v.reset(OpConst64)
 15230  		v.AuxInt = int64ToAuxInt(c << uint64(d))
 15231  		return true
 15232  	}
 15233  	// match: (Lsh64x64 x (Const64 [0]))
 15234  	// result: x
 15235  	for {
 15236  		x := v_0
 15237  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 15238  			break
 15239  		}
 15240  		v.copyOf(x)
 15241  		return true
 15242  	}
 15243  	// match: (Lsh64x64 (Const64 [0]) _)
 15244  	// result: (Const64 [0])
 15245  	for {
 15246  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 15247  			break
 15248  		}
 15249  		v.reset(OpConst64)
 15250  		v.AuxInt = int64ToAuxInt(0)
 15251  		return true
 15252  	}
 15253  	// match: (Lsh64x64 _ (Const64 [c]))
 15254  	// cond: uint64(c) >= 64
 15255  	// result: (Const64 [0])
 15256  	for {
 15257  		if v_1.Op != OpConst64 {
 15258  			break
 15259  		}
 15260  		c := auxIntToInt64(v_1.AuxInt)
 15261  		if !(uint64(c) >= 64) {
 15262  			break
 15263  		}
 15264  		v.reset(OpConst64)
 15265  		v.AuxInt = int64ToAuxInt(0)
 15266  		return true
 15267  	}
 15268  	// match: (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d]))
 15269  	// cond: !uaddOvf(c,d)
 15270  	// result: (Lsh64x64 x (Const64 <t> [c+d]))
 15271  	for {
 15272  		t := v.Type
 15273  		if v_0.Op != OpLsh64x64 {
 15274  			break
 15275  		}
 15276  		_ = v_0.Args[1]
 15277  		x := v_0.Args[0]
 15278  		v_0_1 := v_0.Args[1]
 15279  		if v_0_1.Op != OpConst64 {
 15280  			break
 15281  		}
 15282  		c := auxIntToInt64(v_0_1.AuxInt)
 15283  		if v_1.Op != OpConst64 {
 15284  			break
 15285  		}
 15286  		d := auxIntToInt64(v_1.AuxInt)
 15287  		if !(!uaddOvf(c, d)) {
 15288  			break
 15289  		}
 15290  		v.reset(OpLsh64x64)
 15291  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 15292  		v0.AuxInt = int64ToAuxInt(c + d)
 15293  		v.AddArg2(x, v0)
 15294  		return true
 15295  	}
 15296  	// match: (Lsh64x64 i:(Rsh64x64 x (Const64 [c])) (Const64 [c]))
 15297  	// cond: c >= 0 && c < 64 && i.Uses == 1
 15298  	// result: (And64 x (Const64 <v.Type> [int64(-1) << c]))
 15299  	for {
 15300  		i := v_0
 15301  		if i.Op != OpRsh64x64 {
 15302  			break
 15303  		}
 15304  		_ = i.Args[1]
 15305  		x := i.Args[0]
 15306  		i_1 := i.Args[1]
 15307  		if i_1.Op != OpConst64 {
 15308  			break
 15309  		}
 15310  		c := auxIntToInt64(i_1.AuxInt)
 15311  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 64 && i.Uses == 1) {
 15312  			break
 15313  		}
 15314  		v.reset(OpAnd64)
 15315  		v0 := b.NewValue0(v.Pos, OpConst64, v.Type)
 15316  		v0.AuxInt = int64ToAuxInt(int64(-1) << c)
 15317  		v.AddArg2(x, v0)
 15318  		return true
 15319  	}
 15320  	// match: (Lsh64x64 i:(Rsh64Ux64 x (Const64 [c])) (Const64 [c]))
 15321  	// cond: c >= 0 && c < 64 && i.Uses == 1
 15322  	// result: (And64 x (Const64 <v.Type> [int64(-1) << c]))
 15323  	for {
 15324  		i := v_0
 15325  		if i.Op != OpRsh64Ux64 {
 15326  			break
 15327  		}
 15328  		_ = i.Args[1]
 15329  		x := i.Args[0]
 15330  		i_1 := i.Args[1]
 15331  		if i_1.Op != OpConst64 {
 15332  			break
 15333  		}
 15334  		c := auxIntToInt64(i_1.AuxInt)
 15335  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 64 && i.Uses == 1) {
 15336  			break
 15337  		}
 15338  		v.reset(OpAnd64)
 15339  		v0 := b.NewValue0(v.Pos, OpConst64, v.Type)
 15340  		v0.AuxInt = int64ToAuxInt(int64(-1) << c)
 15341  		v.AddArg2(x, v0)
 15342  		return true
 15343  	}
 15344  	// match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
 15345  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
 15346  	// result: (Lsh64x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
 15347  	for {
 15348  		if v_0.Op != OpRsh64Ux64 {
 15349  			break
 15350  		}
 15351  		_ = v_0.Args[1]
 15352  		v_0_0 := v_0.Args[0]
 15353  		if v_0_0.Op != OpLsh64x64 {
 15354  			break
 15355  		}
 15356  		_ = v_0_0.Args[1]
 15357  		x := v_0_0.Args[0]
 15358  		v_0_0_1 := v_0_0.Args[1]
 15359  		if v_0_0_1.Op != OpConst64 {
 15360  			break
 15361  		}
 15362  		c1 := auxIntToInt64(v_0_0_1.AuxInt)
 15363  		v_0_1 := v_0.Args[1]
 15364  		if v_0_1.Op != OpConst64 {
 15365  			break
 15366  		}
 15367  		c2 := auxIntToInt64(v_0_1.AuxInt)
 15368  		if v_1.Op != OpConst64 {
 15369  			break
 15370  		}
 15371  		c3 := auxIntToInt64(v_1.AuxInt)
 15372  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
 15373  			break
 15374  		}
 15375  		v.reset(OpLsh64x64)
 15376  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 15377  		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
 15378  		v.AddArg2(x, v0)
 15379  		return true
 15380  	}
 15381  	// match: (Lsh64x64 (And64 (Rsh64x64 <t> x (Const64 <t2> [c])) (Const64 [d])) (Const64 [e]))
 15382  	// cond: c >= e
 15383  	// result: (And64 (Rsh64x64 <t> x (Const64 <t2> [c-e])) (Const64 <t> [d<<e]))
 15384  	for {
 15385  		if v_0.Op != OpAnd64 {
 15386  			break
 15387  		}
 15388  		_ = v_0.Args[1]
 15389  		v_0_0 := v_0.Args[0]
 15390  		v_0_1 := v_0.Args[1]
 15391  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 15392  			if v_0_0.Op != OpRsh64x64 {
 15393  				continue
 15394  			}
 15395  			t := v_0_0.Type
 15396  			_ = v_0_0.Args[1]
 15397  			x := v_0_0.Args[0]
 15398  			v_0_0_1 := v_0_0.Args[1]
 15399  			if v_0_0_1.Op != OpConst64 {
 15400  				continue
 15401  			}
 15402  			t2 := v_0_0_1.Type
 15403  			c := auxIntToInt64(v_0_0_1.AuxInt)
 15404  			if v_0_1.Op != OpConst64 {
 15405  				continue
 15406  			}
 15407  			d := auxIntToInt64(v_0_1.AuxInt)
 15408  			if v_1.Op != OpConst64 {
 15409  				continue
 15410  			}
 15411  			e := auxIntToInt64(v_1.AuxInt)
 15412  			if !(c >= e) {
 15413  				continue
 15414  			}
 15415  			v.reset(OpAnd64)
 15416  			v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
 15417  			v1 := b.NewValue0(v.Pos, OpConst64, t2)
 15418  			v1.AuxInt = int64ToAuxInt(c - e)
 15419  			v0.AddArg2(x, v1)
 15420  			v2 := b.NewValue0(v.Pos, OpConst64, t)
 15421  			v2.AuxInt = int64ToAuxInt(d << e)
 15422  			v.AddArg2(v0, v2)
 15423  			return true
 15424  		}
 15425  		break
 15426  	}
 15427  	// match: (Lsh64x64 (And64 (Rsh64Ux64 <t> x (Const64 <t2> [c])) (Const64 [d])) (Const64 [e]))
 15428  	// cond: c >= e
 15429  	// result: (And64 (Rsh64Ux64 <t> x (Const64 <t2> [c-e])) (Const64 <t> [d<<e]))
 15430  	for {
 15431  		if v_0.Op != OpAnd64 {
 15432  			break
 15433  		}
 15434  		_ = v_0.Args[1]
 15435  		v_0_0 := v_0.Args[0]
 15436  		v_0_1 := v_0.Args[1]
 15437  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 15438  			if v_0_0.Op != OpRsh64Ux64 {
 15439  				continue
 15440  			}
 15441  			t := v_0_0.Type
 15442  			_ = v_0_0.Args[1]
 15443  			x := v_0_0.Args[0]
 15444  			v_0_0_1 := v_0_0.Args[1]
 15445  			if v_0_0_1.Op != OpConst64 {
 15446  				continue
 15447  			}
 15448  			t2 := v_0_0_1.Type
 15449  			c := auxIntToInt64(v_0_0_1.AuxInt)
 15450  			if v_0_1.Op != OpConst64 {
 15451  				continue
 15452  			}
 15453  			d := auxIntToInt64(v_0_1.AuxInt)
 15454  			if v_1.Op != OpConst64 {
 15455  				continue
 15456  			}
 15457  			e := auxIntToInt64(v_1.AuxInt)
 15458  			if !(c >= e) {
 15459  				continue
 15460  			}
 15461  			v.reset(OpAnd64)
 15462  			v0 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
 15463  			v1 := b.NewValue0(v.Pos, OpConst64, t2)
 15464  			v1.AuxInt = int64ToAuxInt(c - e)
 15465  			v0.AddArg2(x, v1)
 15466  			v2 := b.NewValue0(v.Pos, OpConst64, t)
 15467  			v2.AuxInt = int64ToAuxInt(d << e)
 15468  			v.AddArg2(v0, v2)
 15469  			return true
 15470  		}
 15471  		break
 15472  	}
 15473  	// match: (Lsh64x64 (And64 (Rsh64x64 <t> x (Const64 <t2> [c])) (Const64 [d])) (Const64 [e]))
 15474  	// cond: c < e
 15475  	// result: (And64 (Lsh64x64 <t> x (Const64 <t2> [e-c])) (Const64 <t> [d<<e]))
 15476  	for {
 15477  		if v_0.Op != OpAnd64 {
 15478  			break
 15479  		}
 15480  		_ = v_0.Args[1]
 15481  		v_0_0 := v_0.Args[0]
 15482  		v_0_1 := v_0.Args[1]
 15483  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 15484  			if v_0_0.Op != OpRsh64x64 {
 15485  				continue
 15486  			}
 15487  			t := v_0_0.Type
 15488  			_ = v_0_0.Args[1]
 15489  			x := v_0_0.Args[0]
 15490  			v_0_0_1 := v_0_0.Args[1]
 15491  			if v_0_0_1.Op != OpConst64 {
 15492  				continue
 15493  			}
 15494  			t2 := v_0_0_1.Type
 15495  			c := auxIntToInt64(v_0_0_1.AuxInt)
 15496  			if v_0_1.Op != OpConst64 {
 15497  				continue
 15498  			}
 15499  			d := auxIntToInt64(v_0_1.AuxInt)
 15500  			if v_1.Op != OpConst64 {
 15501  				continue
 15502  			}
 15503  			e := auxIntToInt64(v_1.AuxInt)
 15504  			if !(c < e) {
 15505  				continue
 15506  			}
 15507  			v.reset(OpAnd64)
 15508  			v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
 15509  			v1 := b.NewValue0(v.Pos, OpConst64, t2)
 15510  			v1.AuxInt = int64ToAuxInt(e - c)
 15511  			v0.AddArg2(x, v1)
 15512  			v2 := b.NewValue0(v.Pos, OpConst64, t)
 15513  			v2.AuxInt = int64ToAuxInt(d << e)
 15514  			v.AddArg2(v0, v2)
 15515  			return true
 15516  		}
 15517  		break
 15518  	}
 15519  	// match: (Lsh64x64 (And64 (Rsh64Ux64 <t> x (Const64 <t2> [c])) (Const64 [d])) (Const64 [e]))
 15520  	// cond: c < e
 15521  	// result: (And64 (Lsh64x64 <t> x (Const64 <t2> [e-c])) (Const64 <t> [d<<e]))
 15522  	for {
 15523  		if v_0.Op != OpAnd64 {
 15524  			break
 15525  		}
 15526  		_ = v_0.Args[1]
 15527  		v_0_0 := v_0.Args[0]
 15528  		v_0_1 := v_0.Args[1]
 15529  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 15530  			if v_0_0.Op != OpRsh64Ux64 {
 15531  				continue
 15532  			}
 15533  			t := v_0_0.Type
 15534  			_ = v_0_0.Args[1]
 15535  			x := v_0_0.Args[0]
 15536  			v_0_0_1 := v_0_0.Args[1]
 15537  			if v_0_0_1.Op != OpConst64 {
 15538  				continue
 15539  			}
 15540  			t2 := v_0_0_1.Type
 15541  			c := auxIntToInt64(v_0_0_1.AuxInt)
 15542  			if v_0_1.Op != OpConst64 {
 15543  				continue
 15544  			}
 15545  			d := auxIntToInt64(v_0_1.AuxInt)
 15546  			if v_1.Op != OpConst64 {
 15547  				continue
 15548  			}
 15549  			e := auxIntToInt64(v_1.AuxInt)
 15550  			if !(c < e) {
 15551  				continue
 15552  			}
 15553  			v.reset(OpAnd64)
 15554  			v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
 15555  			v1 := b.NewValue0(v.Pos, OpConst64, t2)
 15556  			v1.AuxInt = int64ToAuxInt(e - c)
 15557  			v0.AddArg2(x, v1)
 15558  			v2 := b.NewValue0(v.Pos, OpConst64, t)
 15559  			v2.AuxInt = int64ToAuxInt(d << e)
 15560  			v.AddArg2(v0, v2)
 15561  			return true
 15562  		}
 15563  		break
 15564  	}
 15565  	return false
 15566  }
 15567  func rewriteValuegeneric_OpLsh64x8(v *Value) bool {
 15568  	v_1 := v.Args[1]
 15569  	v_0 := v.Args[0]
 15570  	b := v.Block
 15571  	// match: (Lsh64x8 <t> x (Const8 [c]))
 15572  	// result: (Lsh64x64 x (Const64 <t> [int64(uint8(c))]))
 15573  	for {
 15574  		t := v.Type
 15575  		x := v_0
 15576  		if v_1.Op != OpConst8 {
 15577  			break
 15578  		}
 15579  		c := auxIntToInt8(v_1.AuxInt)
 15580  		v.reset(OpLsh64x64)
 15581  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 15582  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 15583  		v.AddArg2(x, v0)
 15584  		return true
 15585  	}
 15586  	// match: (Lsh64x8 (Const64 [0]) _)
 15587  	// result: (Const64 [0])
 15588  	for {
 15589  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 15590  			break
 15591  		}
 15592  		v.reset(OpConst64)
 15593  		v.AuxInt = int64ToAuxInt(0)
 15594  		return true
 15595  	}
 15596  	return false
 15597  }
 15598  func rewriteValuegeneric_OpLsh8x16(v *Value) bool {
 15599  	v_1 := v.Args[1]
 15600  	v_0 := v.Args[0]
 15601  	b := v.Block
 15602  	// match: (Lsh8x16 <t> x (Const16 [c]))
 15603  	// result: (Lsh8x64 x (Const64 <t> [int64(uint16(c))]))
 15604  	for {
 15605  		t := v.Type
 15606  		x := v_0
 15607  		if v_1.Op != OpConst16 {
 15608  			break
 15609  		}
 15610  		c := auxIntToInt16(v_1.AuxInt)
 15611  		v.reset(OpLsh8x64)
 15612  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 15613  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 15614  		v.AddArg2(x, v0)
 15615  		return true
 15616  	}
 15617  	// match: (Lsh8x16 (Const8 [0]) _)
 15618  	// result: (Const8 [0])
 15619  	for {
 15620  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 15621  			break
 15622  		}
 15623  		v.reset(OpConst8)
 15624  		v.AuxInt = int8ToAuxInt(0)
 15625  		return true
 15626  	}
 15627  	return false
 15628  }
 15629  func rewriteValuegeneric_OpLsh8x32(v *Value) bool {
 15630  	v_1 := v.Args[1]
 15631  	v_0 := v.Args[0]
 15632  	b := v.Block
 15633  	// match: (Lsh8x32 <t> x (Const32 [c]))
 15634  	// result: (Lsh8x64 x (Const64 <t> [int64(uint32(c))]))
 15635  	for {
 15636  		t := v.Type
 15637  		x := v_0
 15638  		if v_1.Op != OpConst32 {
 15639  			break
 15640  		}
 15641  		c := auxIntToInt32(v_1.AuxInt)
 15642  		v.reset(OpLsh8x64)
 15643  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 15644  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 15645  		v.AddArg2(x, v0)
 15646  		return true
 15647  	}
 15648  	// match: (Lsh8x32 (Const8 [0]) _)
 15649  	// result: (Const8 [0])
 15650  	for {
 15651  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 15652  			break
 15653  		}
 15654  		v.reset(OpConst8)
 15655  		v.AuxInt = int8ToAuxInt(0)
 15656  		return true
 15657  	}
 15658  	return false
 15659  }
 15660  func rewriteValuegeneric_OpLsh8x64(v *Value) bool {
 15661  	v_1 := v.Args[1]
 15662  	v_0 := v.Args[0]
 15663  	b := v.Block
 15664  	typ := &b.Func.Config.Types
 15665  	// match: (Lsh8x64 (Const8 [c]) (Const64 [d]))
 15666  	// result: (Const8 [c << uint64(d)])
 15667  	for {
 15668  		if v_0.Op != OpConst8 {
 15669  			break
 15670  		}
 15671  		c := auxIntToInt8(v_0.AuxInt)
 15672  		if v_1.Op != OpConst64 {
 15673  			break
 15674  		}
 15675  		d := auxIntToInt64(v_1.AuxInt)
 15676  		v.reset(OpConst8)
 15677  		v.AuxInt = int8ToAuxInt(c << uint64(d))
 15678  		return true
 15679  	}
 15680  	// match: (Lsh8x64 x (Const64 [0]))
 15681  	// result: x
 15682  	for {
 15683  		x := v_0
 15684  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 15685  			break
 15686  		}
 15687  		v.copyOf(x)
 15688  		return true
 15689  	}
 15690  	// match: (Lsh8x64 (Const8 [0]) _)
 15691  	// result: (Const8 [0])
 15692  	for {
 15693  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 15694  			break
 15695  		}
 15696  		v.reset(OpConst8)
 15697  		v.AuxInt = int8ToAuxInt(0)
 15698  		return true
 15699  	}
 15700  	// match: (Lsh8x64 _ (Const64 [c]))
 15701  	// cond: uint64(c) >= 8
 15702  	// result: (Const8 [0])
 15703  	for {
 15704  		if v_1.Op != OpConst64 {
 15705  			break
 15706  		}
 15707  		c := auxIntToInt64(v_1.AuxInt)
 15708  		if !(uint64(c) >= 8) {
 15709  			break
 15710  		}
 15711  		v.reset(OpConst8)
 15712  		v.AuxInt = int8ToAuxInt(0)
 15713  		return true
 15714  	}
 15715  	// match: (Lsh8x64 <t> (Lsh8x64 x (Const64 [c])) (Const64 [d]))
 15716  	// cond: !uaddOvf(c,d)
 15717  	// result: (Lsh8x64 x (Const64 <t> [c+d]))
 15718  	for {
 15719  		t := v.Type
 15720  		if v_0.Op != OpLsh8x64 {
 15721  			break
 15722  		}
 15723  		_ = v_0.Args[1]
 15724  		x := v_0.Args[0]
 15725  		v_0_1 := v_0.Args[1]
 15726  		if v_0_1.Op != OpConst64 {
 15727  			break
 15728  		}
 15729  		c := auxIntToInt64(v_0_1.AuxInt)
 15730  		if v_1.Op != OpConst64 {
 15731  			break
 15732  		}
 15733  		d := auxIntToInt64(v_1.AuxInt)
 15734  		if !(!uaddOvf(c, d)) {
 15735  			break
 15736  		}
 15737  		v.reset(OpLsh8x64)
 15738  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 15739  		v0.AuxInt = int64ToAuxInt(c + d)
 15740  		v.AddArg2(x, v0)
 15741  		return true
 15742  	}
 15743  	// match: (Lsh8x64 i:(Rsh8x64 x (Const64 [c])) (Const64 [c]))
 15744  	// cond: c >= 0 && c < 8 && i.Uses == 1
 15745  	// result: (And8 x (Const8 <v.Type> [int8(-1) << c]))
 15746  	for {
 15747  		i := v_0
 15748  		if i.Op != OpRsh8x64 {
 15749  			break
 15750  		}
 15751  		_ = i.Args[1]
 15752  		x := i.Args[0]
 15753  		i_1 := i.Args[1]
 15754  		if i_1.Op != OpConst64 {
 15755  			break
 15756  		}
 15757  		c := auxIntToInt64(i_1.AuxInt)
 15758  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 8 && i.Uses == 1) {
 15759  			break
 15760  		}
 15761  		v.reset(OpAnd8)
 15762  		v0 := b.NewValue0(v.Pos, OpConst8, v.Type)
 15763  		v0.AuxInt = int8ToAuxInt(int8(-1) << c)
 15764  		v.AddArg2(x, v0)
 15765  		return true
 15766  	}
 15767  	// match: (Lsh8x64 i:(Rsh8Ux64 x (Const64 [c])) (Const64 [c]))
 15768  	// cond: c >= 0 && c < 8 && i.Uses == 1
 15769  	// result: (And8 x (Const8 <v.Type> [int8(-1) << c]))
 15770  	for {
 15771  		i := v_0
 15772  		if i.Op != OpRsh8Ux64 {
 15773  			break
 15774  		}
 15775  		_ = i.Args[1]
 15776  		x := i.Args[0]
 15777  		i_1 := i.Args[1]
 15778  		if i_1.Op != OpConst64 {
 15779  			break
 15780  		}
 15781  		c := auxIntToInt64(i_1.AuxInt)
 15782  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 8 && i.Uses == 1) {
 15783  			break
 15784  		}
 15785  		v.reset(OpAnd8)
 15786  		v0 := b.NewValue0(v.Pos, OpConst8, v.Type)
 15787  		v0.AuxInt = int8ToAuxInt(int8(-1) << c)
 15788  		v.AddArg2(x, v0)
 15789  		return true
 15790  	}
 15791  	// match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
 15792  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
 15793  	// result: (Lsh8x64 x (Const64 <typ.UInt64> [c1-c2+c3]))
 15794  	for {
 15795  		if v_0.Op != OpRsh8Ux64 {
 15796  			break
 15797  		}
 15798  		_ = v_0.Args[1]
 15799  		v_0_0 := v_0.Args[0]
 15800  		if v_0_0.Op != OpLsh8x64 {
 15801  			break
 15802  		}
 15803  		_ = v_0_0.Args[1]
 15804  		x := v_0_0.Args[0]
 15805  		v_0_0_1 := v_0_0.Args[1]
 15806  		if v_0_0_1.Op != OpConst64 {
 15807  			break
 15808  		}
 15809  		c1 := auxIntToInt64(v_0_0_1.AuxInt)
 15810  		v_0_1 := v_0.Args[1]
 15811  		if v_0_1.Op != OpConst64 {
 15812  			break
 15813  		}
 15814  		c2 := auxIntToInt64(v_0_1.AuxInt)
 15815  		if v_1.Op != OpConst64 {
 15816  			break
 15817  		}
 15818  		c3 := auxIntToInt64(v_1.AuxInt)
 15819  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
 15820  			break
 15821  		}
 15822  		v.reset(OpLsh8x64)
 15823  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 15824  		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
 15825  		v.AddArg2(x, v0)
 15826  		return true
 15827  	}
 15828  	// match: (Lsh8x64 (And8 (Rsh8x64 <t> x (Const64 <t2> [c])) (Const8 [d])) (Const64 [e]))
 15829  	// cond: c >= e
 15830  	// result: (And8 (Rsh8x64 <t> x (Const64 <t2> [c-e])) (Const8 <t> [d<<e]))
 15831  	for {
 15832  		if v_0.Op != OpAnd8 {
 15833  			break
 15834  		}
 15835  		_ = v_0.Args[1]
 15836  		v_0_0 := v_0.Args[0]
 15837  		v_0_1 := v_0.Args[1]
 15838  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 15839  			if v_0_0.Op != OpRsh8x64 {
 15840  				continue
 15841  			}
 15842  			t := v_0_0.Type
 15843  			_ = v_0_0.Args[1]
 15844  			x := v_0_0.Args[0]
 15845  			v_0_0_1 := v_0_0.Args[1]
 15846  			if v_0_0_1.Op != OpConst64 {
 15847  				continue
 15848  			}
 15849  			t2 := v_0_0_1.Type
 15850  			c := auxIntToInt64(v_0_0_1.AuxInt)
 15851  			if v_0_1.Op != OpConst8 {
 15852  				continue
 15853  			}
 15854  			d := auxIntToInt8(v_0_1.AuxInt)
 15855  			if v_1.Op != OpConst64 {
 15856  				continue
 15857  			}
 15858  			e := auxIntToInt64(v_1.AuxInt)
 15859  			if !(c >= e) {
 15860  				continue
 15861  			}
 15862  			v.reset(OpAnd8)
 15863  			v0 := b.NewValue0(v.Pos, OpRsh8x64, t)
 15864  			v1 := b.NewValue0(v.Pos, OpConst64, t2)
 15865  			v1.AuxInt = int64ToAuxInt(c - e)
 15866  			v0.AddArg2(x, v1)
 15867  			v2 := b.NewValue0(v.Pos, OpConst8, t)
 15868  			v2.AuxInt = int8ToAuxInt(d << e)
 15869  			v.AddArg2(v0, v2)
 15870  			return true
 15871  		}
 15872  		break
 15873  	}
 15874  	// match: (Lsh8x64 (And8 (Rsh8Ux64 <t> x (Const64 <t2> [c])) (Const8 [d])) (Const64 [e]))
 15875  	// cond: c >= e
 15876  	// result: (And8 (Rsh8Ux64 <t> x (Const64 <t2> [c-e])) (Const8 <t> [d<<e]))
 15877  	for {
 15878  		if v_0.Op != OpAnd8 {
 15879  			break
 15880  		}
 15881  		_ = v_0.Args[1]
 15882  		v_0_0 := v_0.Args[0]
 15883  		v_0_1 := v_0.Args[1]
 15884  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 15885  			if v_0_0.Op != OpRsh8Ux64 {
 15886  				continue
 15887  			}
 15888  			t := v_0_0.Type
 15889  			_ = v_0_0.Args[1]
 15890  			x := v_0_0.Args[0]
 15891  			v_0_0_1 := v_0_0.Args[1]
 15892  			if v_0_0_1.Op != OpConst64 {
 15893  				continue
 15894  			}
 15895  			t2 := v_0_0_1.Type
 15896  			c := auxIntToInt64(v_0_0_1.AuxInt)
 15897  			if v_0_1.Op != OpConst8 {
 15898  				continue
 15899  			}
 15900  			d := auxIntToInt8(v_0_1.AuxInt)
 15901  			if v_1.Op != OpConst64 {
 15902  				continue
 15903  			}
 15904  			e := auxIntToInt64(v_1.AuxInt)
 15905  			if !(c >= e) {
 15906  				continue
 15907  			}
 15908  			v.reset(OpAnd8)
 15909  			v0 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
 15910  			v1 := b.NewValue0(v.Pos, OpConst64, t2)
 15911  			v1.AuxInt = int64ToAuxInt(c - e)
 15912  			v0.AddArg2(x, v1)
 15913  			v2 := b.NewValue0(v.Pos, OpConst8, t)
 15914  			v2.AuxInt = int8ToAuxInt(d << e)
 15915  			v.AddArg2(v0, v2)
 15916  			return true
 15917  		}
 15918  		break
 15919  	}
 15920  	// match: (Lsh8x64 (And8 (Rsh8x64 <t> x (Const64 <t2> [c])) (Const8 [d])) (Const64 [e]))
 15921  	// cond: c < e
 15922  	// result: (And8 (Lsh8x64 <t> x (Const64 <t2> [e-c])) (Const8 <t> [d<<e]))
 15923  	for {
 15924  		if v_0.Op != OpAnd8 {
 15925  			break
 15926  		}
 15927  		_ = v_0.Args[1]
 15928  		v_0_0 := v_0.Args[0]
 15929  		v_0_1 := v_0.Args[1]
 15930  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 15931  			if v_0_0.Op != OpRsh8x64 {
 15932  				continue
 15933  			}
 15934  			t := v_0_0.Type
 15935  			_ = v_0_0.Args[1]
 15936  			x := v_0_0.Args[0]
 15937  			v_0_0_1 := v_0_0.Args[1]
 15938  			if v_0_0_1.Op != OpConst64 {
 15939  				continue
 15940  			}
 15941  			t2 := v_0_0_1.Type
 15942  			c := auxIntToInt64(v_0_0_1.AuxInt)
 15943  			if v_0_1.Op != OpConst8 {
 15944  				continue
 15945  			}
 15946  			d := auxIntToInt8(v_0_1.AuxInt)
 15947  			if v_1.Op != OpConst64 {
 15948  				continue
 15949  			}
 15950  			e := auxIntToInt64(v_1.AuxInt)
 15951  			if !(c < e) {
 15952  				continue
 15953  			}
 15954  			v.reset(OpAnd8)
 15955  			v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
 15956  			v1 := b.NewValue0(v.Pos, OpConst64, t2)
 15957  			v1.AuxInt = int64ToAuxInt(e - c)
 15958  			v0.AddArg2(x, v1)
 15959  			v2 := b.NewValue0(v.Pos, OpConst8, t)
 15960  			v2.AuxInt = int8ToAuxInt(d << e)
 15961  			v.AddArg2(v0, v2)
 15962  			return true
 15963  		}
 15964  		break
 15965  	}
 15966  	// match: (Lsh8x64 (And8 (Rsh8Ux64 <t> x (Const64 <t2> [c])) (Const8 [d])) (Const64 [e]))
 15967  	// cond: c < e
 15968  	// result: (And8 (Lsh8x64 <t> x (Const64 <t2> [e-c])) (Const8 <t> [d<<e]))
 15969  	for {
 15970  		if v_0.Op != OpAnd8 {
 15971  			break
 15972  		}
 15973  		_ = v_0.Args[1]
 15974  		v_0_0 := v_0.Args[0]
 15975  		v_0_1 := v_0.Args[1]
 15976  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 15977  			if v_0_0.Op != OpRsh8Ux64 {
 15978  				continue
 15979  			}
 15980  			t := v_0_0.Type
 15981  			_ = v_0_0.Args[1]
 15982  			x := v_0_0.Args[0]
 15983  			v_0_0_1 := v_0_0.Args[1]
 15984  			if v_0_0_1.Op != OpConst64 {
 15985  				continue
 15986  			}
 15987  			t2 := v_0_0_1.Type
 15988  			c := auxIntToInt64(v_0_0_1.AuxInt)
 15989  			if v_0_1.Op != OpConst8 {
 15990  				continue
 15991  			}
 15992  			d := auxIntToInt8(v_0_1.AuxInt)
 15993  			if v_1.Op != OpConst64 {
 15994  				continue
 15995  			}
 15996  			e := auxIntToInt64(v_1.AuxInt)
 15997  			if !(c < e) {
 15998  				continue
 15999  			}
 16000  			v.reset(OpAnd8)
 16001  			v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
 16002  			v1 := b.NewValue0(v.Pos, OpConst64, t2)
 16003  			v1.AuxInt = int64ToAuxInt(e - c)
 16004  			v0.AddArg2(x, v1)
 16005  			v2 := b.NewValue0(v.Pos, OpConst8, t)
 16006  			v2.AuxInt = int8ToAuxInt(d << e)
 16007  			v.AddArg2(v0, v2)
 16008  			return true
 16009  		}
 16010  		break
 16011  	}
 16012  	return false
 16013  }
 16014  func rewriteValuegeneric_OpLsh8x8(v *Value) bool {
 16015  	v_1 := v.Args[1]
 16016  	v_0 := v.Args[0]
 16017  	b := v.Block
 16018  	// match: (Lsh8x8 <t> x (Const8 [c]))
 16019  	// result: (Lsh8x64 x (Const64 <t> [int64(uint8(c))]))
 16020  	for {
 16021  		t := v.Type
 16022  		x := v_0
 16023  		if v_1.Op != OpConst8 {
 16024  			break
 16025  		}
 16026  		c := auxIntToInt8(v_1.AuxInt)
 16027  		v.reset(OpLsh8x64)
 16028  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 16029  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 16030  		v.AddArg2(x, v0)
 16031  		return true
 16032  	}
 16033  	// match: (Lsh8x8 (Const8 [0]) _)
 16034  	// result: (Const8 [0])
 16035  	for {
 16036  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 16037  			break
 16038  		}
 16039  		v.reset(OpConst8)
 16040  		v.AuxInt = int8ToAuxInt(0)
 16041  		return true
 16042  	}
 16043  	return false
 16044  }
 16045  func rewriteValuegeneric_OpMod16(v *Value) bool {
 16046  	v_1 := v.Args[1]
 16047  	v_0 := v.Args[0]
 16048  	b := v.Block
 16049  	// match: (Mod16 (Const16 [c]) (Const16 [d]))
 16050  	// cond: d != 0
 16051  	// result: (Const16 [c % d])
 16052  	for {
 16053  		if v_0.Op != OpConst16 {
 16054  			break
 16055  		}
 16056  		c := auxIntToInt16(v_0.AuxInt)
 16057  		if v_1.Op != OpConst16 {
 16058  			break
 16059  		}
 16060  		d := auxIntToInt16(v_1.AuxInt)
 16061  		if !(d != 0) {
 16062  			break
 16063  		}
 16064  		v.reset(OpConst16)
 16065  		v.AuxInt = int16ToAuxInt(c % d)
 16066  		return true
 16067  	}
 16068  	// match: (Mod16 <t> n (Const16 [c]))
 16069  	// cond: isNonNegative(n) && isPowerOfTwo(c)
 16070  	// result: (And16 n (Const16 <t> [c-1]))
 16071  	for {
 16072  		t := v.Type
 16073  		n := v_0
 16074  		if v_1.Op != OpConst16 {
 16075  			break
 16076  		}
 16077  		c := auxIntToInt16(v_1.AuxInt)
 16078  		if !(isNonNegative(n) && isPowerOfTwo(c)) {
 16079  			break
 16080  		}
 16081  		v.reset(OpAnd16)
 16082  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 16083  		v0.AuxInt = int16ToAuxInt(c - 1)
 16084  		v.AddArg2(n, v0)
 16085  		return true
 16086  	}
 16087  	// match: (Mod16 <t> n (Const16 [c]))
 16088  	// cond: c < 0 && c != -1<<15
 16089  	// result: (Mod16 <t> n (Const16 <t> [-c]))
 16090  	for {
 16091  		t := v.Type
 16092  		n := v_0
 16093  		if v_1.Op != OpConst16 {
 16094  			break
 16095  		}
 16096  		c := auxIntToInt16(v_1.AuxInt)
 16097  		if !(c < 0 && c != -1<<15) {
 16098  			break
 16099  		}
 16100  		v.reset(OpMod16)
 16101  		v.Type = t
 16102  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 16103  		v0.AuxInt = int16ToAuxInt(-c)
 16104  		v.AddArg2(n, v0)
 16105  		return true
 16106  	}
 16107  	// match: (Mod16 <t> x (Const16 [c]))
 16108  	// cond: x.Op != OpConst16 && (c > 0 || c == -1<<15)
 16109  	// result: (Sub16 x (Mul16 <t> (Div16 <t> x (Const16 <t> [c])) (Const16 <t> [c])))
 16110  	for {
 16111  		t := v.Type
 16112  		x := v_0
 16113  		if v_1.Op != OpConst16 {
 16114  			break
 16115  		}
 16116  		c := auxIntToInt16(v_1.AuxInt)
 16117  		if !(x.Op != OpConst16 && (c > 0 || c == -1<<15)) {
 16118  			break
 16119  		}
 16120  		v.reset(OpSub16)
 16121  		v0 := b.NewValue0(v.Pos, OpMul16, t)
 16122  		v1 := b.NewValue0(v.Pos, OpDiv16, t)
 16123  		v2 := b.NewValue0(v.Pos, OpConst16, t)
 16124  		v2.AuxInt = int16ToAuxInt(c)
 16125  		v1.AddArg2(x, v2)
 16126  		v0.AddArg2(v1, v2)
 16127  		v.AddArg2(x, v0)
 16128  		return true
 16129  	}
 16130  	return false
 16131  }
 16132  func rewriteValuegeneric_OpMod16u(v *Value) bool {
 16133  	v_1 := v.Args[1]
 16134  	v_0 := v.Args[0]
 16135  	b := v.Block
 16136  	// match: (Mod16u (Const16 [c]) (Const16 [d]))
 16137  	// cond: d != 0
 16138  	// result: (Const16 [int16(uint16(c) % uint16(d))])
 16139  	for {
 16140  		if v_0.Op != OpConst16 {
 16141  			break
 16142  		}
 16143  		c := auxIntToInt16(v_0.AuxInt)
 16144  		if v_1.Op != OpConst16 {
 16145  			break
 16146  		}
 16147  		d := auxIntToInt16(v_1.AuxInt)
 16148  		if !(d != 0) {
 16149  			break
 16150  		}
 16151  		v.reset(OpConst16)
 16152  		v.AuxInt = int16ToAuxInt(int16(uint16(c) % uint16(d)))
 16153  		return true
 16154  	}
 16155  	// match: (Mod16u <t> n (Const16 [c]))
 16156  	// cond: isPowerOfTwo(c)
 16157  	// result: (And16 n (Const16 <t> [c-1]))
 16158  	for {
 16159  		t := v.Type
 16160  		n := v_0
 16161  		if v_1.Op != OpConst16 {
 16162  			break
 16163  		}
 16164  		c := auxIntToInt16(v_1.AuxInt)
 16165  		if !(isPowerOfTwo(c)) {
 16166  			break
 16167  		}
 16168  		v.reset(OpAnd16)
 16169  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 16170  		v0.AuxInt = int16ToAuxInt(c - 1)
 16171  		v.AddArg2(n, v0)
 16172  		return true
 16173  	}
 16174  	// match: (Mod16u <t> x (Const16 [c]))
 16175  	// cond: x.Op != OpConst16 && c > 0 && umagicOK16(c)
 16176  	// result: (Sub16 x (Mul16 <t> (Div16u <t> x (Const16 <t> [c])) (Const16 <t> [c])))
 16177  	for {
 16178  		t := v.Type
 16179  		x := v_0
 16180  		if v_1.Op != OpConst16 {
 16181  			break
 16182  		}
 16183  		c := auxIntToInt16(v_1.AuxInt)
 16184  		if !(x.Op != OpConst16 && c > 0 && umagicOK16(c)) {
 16185  			break
 16186  		}
 16187  		v.reset(OpSub16)
 16188  		v0 := b.NewValue0(v.Pos, OpMul16, t)
 16189  		v1 := b.NewValue0(v.Pos, OpDiv16u, t)
 16190  		v2 := b.NewValue0(v.Pos, OpConst16, t)
 16191  		v2.AuxInt = int16ToAuxInt(c)
 16192  		v1.AddArg2(x, v2)
 16193  		v0.AddArg2(v1, v2)
 16194  		v.AddArg2(x, v0)
 16195  		return true
 16196  	}
 16197  	return false
 16198  }
 16199  func rewriteValuegeneric_OpMod32(v *Value) bool {
 16200  	v_1 := v.Args[1]
 16201  	v_0 := v.Args[0]
 16202  	b := v.Block
 16203  	// match: (Mod32 (Const32 [c]) (Const32 [d]))
 16204  	// cond: d != 0
 16205  	// result: (Const32 [c % d])
 16206  	for {
 16207  		if v_0.Op != OpConst32 {
 16208  			break
 16209  		}
 16210  		c := auxIntToInt32(v_0.AuxInt)
 16211  		if v_1.Op != OpConst32 {
 16212  			break
 16213  		}
 16214  		d := auxIntToInt32(v_1.AuxInt)
 16215  		if !(d != 0) {
 16216  			break
 16217  		}
 16218  		v.reset(OpConst32)
 16219  		v.AuxInt = int32ToAuxInt(c % d)
 16220  		return true
 16221  	}
 16222  	// match: (Mod32 <t> n (Const32 [c]))
 16223  	// cond: isNonNegative(n) && isPowerOfTwo(c)
 16224  	// result: (And32 n (Const32 <t> [c-1]))
 16225  	for {
 16226  		t := v.Type
 16227  		n := v_0
 16228  		if v_1.Op != OpConst32 {
 16229  			break
 16230  		}
 16231  		c := auxIntToInt32(v_1.AuxInt)
 16232  		if !(isNonNegative(n) && isPowerOfTwo(c)) {
 16233  			break
 16234  		}
 16235  		v.reset(OpAnd32)
 16236  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 16237  		v0.AuxInt = int32ToAuxInt(c - 1)
 16238  		v.AddArg2(n, v0)
 16239  		return true
 16240  	}
 16241  	// match: (Mod32 <t> n (Const32 [c]))
 16242  	// cond: c < 0 && c != -1<<31
 16243  	// result: (Mod32 <t> n (Const32 <t> [-c]))
 16244  	for {
 16245  		t := v.Type
 16246  		n := v_0
 16247  		if v_1.Op != OpConst32 {
 16248  			break
 16249  		}
 16250  		c := auxIntToInt32(v_1.AuxInt)
 16251  		if !(c < 0 && c != -1<<31) {
 16252  			break
 16253  		}
 16254  		v.reset(OpMod32)
 16255  		v.Type = t
 16256  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 16257  		v0.AuxInt = int32ToAuxInt(-c)
 16258  		v.AddArg2(n, v0)
 16259  		return true
 16260  	}
 16261  	// match: (Mod32 <t> x (Const32 [c]))
 16262  	// cond: x.Op != OpConst32 && (c > 0 || c == -1<<31)
 16263  	// result: (Sub32 x (Mul32 <t> (Div32 <t> x (Const32 <t> [c])) (Const32 <t> [c])))
 16264  	for {
 16265  		t := v.Type
 16266  		x := v_0
 16267  		if v_1.Op != OpConst32 {
 16268  			break
 16269  		}
 16270  		c := auxIntToInt32(v_1.AuxInt)
 16271  		if !(x.Op != OpConst32 && (c > 0 || c == -1<<31)) {
 16272  			break
 16273  		}
 16274  		v.reset(OpSub32)
 16275  		v0 := b.NewValue0(v.Pos, OpMul32, t)
 16276  		v1 := b.NewValue0(v.Pos, OpDiv32, t)
 16277  		v2 := b.NewValue0(v.Pos, OpConst32, t)
 16278  		v2.AuxInt = int32ToAuxInt(c)
 16279  		v1.AddArg2(x, v2)
 16280  		v0.AddArg2(v1, v2)
 16281  		v.AddArg2(x, v0)
 16282  		return true
 16283  	}
 16284  	return false
 16285  }
 16286  func rewriteValuegeneric_OpMod32u(v *Value) bool {
 16287  	v_1 := v.Args[1]
 16288  	v_0 := v.Args[0]
 16289  	b := v.Block
 16290  	// match: (Mod32u (Const32 [c]) (Const32 [d]))
 16291  	// cond: d != 0
 16292  	// result: (Const32 [int32(uint32(c) % uint32(d))])
 16293  	for {
 16294  		if v_0.Op != OpConst32 {
 16295  			break
 16296  		}
 16297  		c := auxIntToInt32(v_0.AuxInt)
 16298  		if v_1.Op != OpConst32 {
 16299  			break
 16300  		}
 16301  		d := auxIntToInt32(v_1.AuxInt)
 16302  		if !(d != 0) {
 16303  			break
 16304  		}
 16305  		v.reset(OpConst32)
 16306  		v.AuxInt = int32ToAuxInt(int32(uint32(c) % uint32(d)))
 16307  		return true
 16308  	}
 16309  	// match: (Mod32u <t> n (Const32 [c]))
 16310  	// cond: isPowerOfTwo(c)
 16311  	// result: (And32 n (Const32 <t> [c-1]))
 16312  	for {
 16313  		t := v.Type
 16314  		n := v_0
 16315  		if v_1.Op != OpConst32 {
 16316  			break
 16317  		}
 16318  		c := auxIntToInt32(v_1.AuxInt)
 16319  		if !(isPowerOfTwo(c)) {
 16320  			break
 16321  		}
 16322  		v.reset(OpAnd32)
 16323  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 16324  		v0.AuxInt = int32ToAuxInt(c - 1)
 16325  		v.AddArg2(n, v0)
 16326  		return true
 16327  	}
 16328  	// match: (Mod32u <t> x (Const32 [c]))
 16329  	// cond: x.Op != OpConst32 && c > 0 && umagicOK32(c)
 16330  	// result: (Sub32 x (Mul32 <t> (Div32u <t> x (Const32 <t> [c])) (Const32 <t> [c])))
 16331  	for {
 16332  		t := v.Type
 16333  		x := v_0
 16334  		if v_1.Op != OpConst32 {
 16335  			break
 16336  		}
 16337  		c := auxIntToInt32(v_1.AuxInt)
 16338  		if !(x.Op != OpConst32 && c > 0 && umagicOK32(c)) {
 16339  			break
 16340  		}
 16341  		v.reset(OpSub32)
 16342  		v0 := b.NewValue0(v.Pos, OpMul32, t)
 16343  		v1 := b.NewValue0(v.Pos, OpDiv32u, t)
 16344  		v2 := b.NewValue0(v.Pos, OpConst32, t)
 16345  		v2.AuxInt = int32ToAuxInt(c)
 16346  		v1.AddArg2(x, v2)
 16347  		v0.AddArg2(v1, v2)
 16348  		v.AddArg2(x, v0)
 16349  		return true
 16350  	}
 16351  	return false
 16352  }
 16353  func rewriteValuegeneric_OpMod64(v *Value) bool {
 16354  	v_1 := v.Args[1]
 16355  	v_0 := v.Args[0]
 16356  	b := v.Block
 16357  	// match: (Mod64 (Const64 [c]) (Const64 [d]))
 16358  	// cond: d != 0
 16359  	// result: (Const64 [c % d])
 16360  	for {
 16361  		if v_0.Op != OpConst64 {
 16362  			break
 16363  		}
 16364  		c := auxIntToInt64(v_0.AuxInt)
 16365  		if v_1.Op != OpConst64 {
 16366  			break
 16367  		}
 16368  		d := auxIntToInt64(v_1.AuxInt)
 16369  		if !(d != 0) {
 16370  			break
 16371  		}
 16372  		v.reset(OpConst64)
 16373  		v.AuxInt = int64ToAuxInt(c % d)
 16374  		return true
 16375  	}
 16376  	// match: (Mod64 <t> n (Const64 [c]))
 16377  	// cond: isNonNegative(n) && isPowerOfTwo(c)
 16378  	// result: (And64 n (Const64 <t> [c-1]))
 16379  	for {
 16380  		t := v.Type
 16381  		n := v_0
 16382  		if v_1.Op != OpConst64 {
 16383  			break
 16384  		}
 16385  		c := auxIntToInt64(v_1.AuxInt)
 16386  		if !(isNonNegative(n) && isPowerOfTwo(c)) {
 16387  			break
 16388  		}
 16389  		v.reset(OpAnd64)
 16390  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 16391  		v0.AuxInt = int64ToAuxInt(c - 1)
 16392  		v.AddArg2(n, v0)
 16393  		return true
 16394  	}
 16395  	// match: (Mod64 n (Const64 [-1<<63]))
 16396  	// cond: isNonNegative(n)
 16397  	// result: n
 16398  	for {
 16399  		n := v_0
 16400  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 || !(isNonNegative(n)) {
 16401  			break
 16402  		}
 16403  		v.copyOf(n)
 16404  		return true
 16405  	}
 16406  	// match: (Mod64 <t> n (Const64 [c]))
 16407  	// cond: c < 0 && c != -1<<63
 16408  	// result: (Mod64 <t> n (Const64 <t> [-c]))
 16409  	for {
 16410  		t := v.Type
 16411  		n := v_0
 16412  		if v_1.Op != OpConst64 {
 16413  			break
 16414  		}
 16415  		c := auxIntToInt64(v_1.AuxInt)
 16416  		if !(c < 0 && c != -1<<63) {
 16417  			break
 16418  		}
 16419  		v.reset(OpMod64)
 16420  		v.Type = t
 16421  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 16422  		v0.AuxInt = int64ToAuxInt(-c)
 16423  		v.AddArg2(n, v0)
 16424  		return true
 16425  	}
 16426  	// match: (Mod64 <t> x (Const64 [c]))
 16427  	// cond: x.Op != OpConst64 && (c > 0 || c == -1<<63)
 16428  	// result: (Sub64 x (Mul64 <t> (Div64 <t> x (Const64 <t> [c])) (Const64 <t> [c])))
 16429  	for {
 16430  		t := v.Type
 16431  		x := v_0
 16432  		if v_1.Op != OpConst64 {
 16433  			break
 16434  		}
 16435  		c := auxIntToInt64(v_1.AuxInt)
 16436  		if !(x.Op != OpConst64 && (c > 0 || c == -1<<63)) {
 16437  			break
 16438  		}
 16439  		v.reset(OpSub64)
 16440  		v0 := b.NewValue0(v.Pos, OpMul64, t)
 16441  		v1 := b.NewValue0(v.Pos, OpDiv64, t)
 16442  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 16443  		v2.AuxInt = int64ToAuxInt(c)
 16444  		v1.AddArg2(x, v2)
 16445  		v0.AddArg2(v1, v2)
 16446  		v.AddArg2(x, v0)
 16447  		return true
 16448  	}
 16449  	return false
 16450  }
 16451  func rewriteValuegeneric_OpMod64u(v *Value) bool {
 16452  	v_1 := v.Args[1]
 16453  	v_0 := v.Args[0]
 16454  	b := v.Block
 16455  	// match: (Mod64u (Const64 [c]) (Const64 [d]))
 16456  	// cond: d != 0
 16457  	// result: (Const64 [int64(uint64(c) % uint64(d))])
 16458  	for {
 16459  		if v_0.Op != OpConst64 {
 16460  			break
 16461  		}
 16462  		c := auxIntToInt64(v_0.AuxInt)
 16463  		if v_1.Op != OpConst64 {
 16464  			break
 16465  		}
 16466  		d := auxIntToInt64(v_1.AuxInt)
 16467  		if !(d != 0) {
 16468  			break
 16469  		}
 16470  		v.reset(OpConst64)
 16471  		v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
 16472  		return true
 16473  	}
 16474  	// match: (Mod64u <t> n (Const64 [c]))
 16475  	// cond: isPowerOfTwo(c)
 16476  	// result: (And64 n (Const64 <t> [c-1]))
 16477  	for {
 16478  		t := v.Type
 16479  		n := v_0
 16480  		if v_1.Op != OpConst64 {
 16481  			break
 16482  		}
 16483  		c := auxIntToInt64(v_1.AuxInt)
 16484  		if !(isPowerOfTwo(c)) {
 16485  			break
 16486  		}
 16487  		v.reset(OpAnd64)
 16488  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 16489  		v0.AuxInt = int64ToAuxInt(c - 1)
 16490  		v.AddArg2(n, v0)
 16491  		return true
 16492  	}
 16493  	// match: (Mod64u <t> n (Const64 [-1<<63]))
 16494  	// result: (And64 n (Const64 <t> [1<<63-1]))
 16495  	for {
 16496  		t := v.Type
 16497  		n := v_0
 16498  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != -1<<63 {
 16499  			break
 16500  		}
 16501  		v.reset(OpAnd64)
 16502  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 16503  		v0.AuxInt = int64ToAuxInt(1<<63 - 1)
 16504  		v.AddArg2(n, v0)
 16505  		return true
 16506  	}
 16507  	// match: (Mod64u <t> x (Const64 [c]))
 16508  	// cond: x.Op != OpConst64 && c > 0 && umagicOK64(c)
 16509  	// result: (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c])))
 16510  	for {
 16511  		t := v.Type
 16512  		x := v_0
 16513  		if v_1.Op != OpConst64 {
 16514  			break
 16515  		}
 16516  		c := auxIntToInt64(v_1.AuxInt)
 16517  		if !(x.Op != OpConst64 && c > 0 && umagicOK64(c)) {
 16518  			break
 16519  		}
 16520  		v.reset(OpSub64)
 16521  		v0 := b.NewValue0(v.Pos, OpMul64, t)
 16522  		v1 := b.NewValue0(v.Pos, OpDiv64u, t)
 16523  		v2 := b.NewValue0(v.Pos, OpConst64, t)
 16524  		v2.AuxInt = int64ToAuxInt(c)
 16525  		v1.AddArg2(x, v2)
 16526  		v0.AddArg2(v1, v2)
 16527  		v.AddArg2(x, v0)
 16528  		return true
 16529  	}
 16530  	return false
 16531  }
 16532  func rewriteValuegeneric_OpMod8(v *Value) bool {
 16533  	v_1 := v.Args[1]
 16534  	v_0 := v.Args[0]
 16535  	b := v.Block
 16536  	// match: (Mod8 (Const8 [c]) (Const8 [d]))
 16537  	// cond: d != 0
 16538  	// result: (Const8 [c % d])
 16539  	for {
 16540  		if v_0.Op != OpConst8 {
 16541  			break
 16542  		}
 16543  		c := auxIntToInt8(v_0.AuxInt)
 16544  		if v_1.Op != OpConst8 {
 16545  			break
 16546  		}
 16547  		d := auxIntToInt8(v_1.AuxInt)
 16548  		if !(d != 0) {
 16549  			break
 16550  		}
 16551  		v.reset(OpConst8)
 16552  		v.AuxInt = int8ToAuxInt(c % d)
 16553  		return true
 16554  	}
 16555  	// match: (Mod8 <t> n (Const8 [c]))
 16556  	// cond: isNonNegative(n) && isPowerOfTwo(c)
 16557  	// result: (And8 n (Const8 <t> [c-1]))
 16558  	for {
 16559  		t := v.Type
 16560  		n := v_0
 16561  		if v_1.Op != OpConst8 {
 16562  			break
 16563  		}
 16564  		c := auxIntToInt8(v_1.AuxInt)
 16565  		if !(isNonNegative(n) && isPowerOfTwo(c)) {
 16566  			break
 16567  		}
 16568  		v.reset(OpAnd8)
 16569  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 16570  		v0.AuxInt = int8ToAuxInt(c - 1)
 16571  		v.AddArg2(n, v0)
 16572  		return true
 16573  	}
 16574  	// match: (Mod8 <t> n (Const8 [c]))
 16575  	// cond: c < 0 && c != -1<<7
 16576  	// result: (Mod8 <t> n (Const8 <t> [-c]))
 16577  	for {
 16578  		t := v.Type
 16579  		n := v_0
 16580  		if v_1.Op != OpConst8 {
 16581  			break
 16582  		}
 16583  		c := auxIntToInt8(v_1.AuxInt)
 16584  		if !(c < 0 && c != -1<<7) {
 16585  			break
 16586  		}
 16587  		v.reset(OpMod8)
 16588  		v.Type = t
 16589  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 16590  		v0.AuxInt = int8ToAuxInt(-c)
 16591  		v.AddArg2(n, v0)
 16592  		return true
 16593  	}
 16594  	// match: (Mod8 <t> x (Const8 [c]))
 16595  	// cond: x.Op != OpConst8 && (c > 0 || c == -1<<7)
 16596  	// result: (Sub8 x (Mul8 <t> (Div8 <t> x (Const8 <t> [c])) (Const8 <t> [c])))
 16597  	for {
 16598  		t := v.Type
 16599  		x := v_0
 16600  		if v_1.Op != OpConst8 {
 16601  			break
 16602  		}
 16603  		c := auxIntToInt8(v_1.AuxInt)
 16604  		if !(x.Op != OpConst8 && (c > 0 || c == -1<<7)) {
 16605  			break
 16606  		}
 16607  		v.reset(OpSub8)
 16608  		v0 := b.NewValue0(v.Pos, OpMul8, t)
 16609  		v1 := b.NewValue0(v.Pos, OpDiv8, t)
 16610  		v2 := b.NewValue0(v.Pos, OpConst8, t)
 16611  		v2.AuxInt = int8ToAuxInt(c)
 16612  		v1.AddArg2(x, v2)
 16613  		v0.AddArg2(v1, v2)
 16614  		v.AddArg2(x, v0)
 16615  		return true
 16616  	}
 16617  	return false
 16618  }
 16619  func rewriteValuegeneric_OpMod8u(v *Value) bool {
 16620  	v_1 := v.Args[1]
 16621  	v_0 := v.Args[0]
 16622  	b := v.Block
 16623  	// match: (Mod8u (Const8 [c]) (Const8 [d]))
 16624  	// cond: d != 0
 16625  	// result: (Const8 [int8(uint8(c) % uint8(d))])
 16626  	for {
 16627  		if v_0.Op != OpConst8 {
 16628  			break
 16629  		}
 16630  		c := auxIntToInt8(v_0.AuxInt)
 16631  		if v_1.Op != OpConst8 {
 16632  			break
 16633  		}
 16634  		d := auxIntToInt8(v_1.AuxInt)
 16635  		if !(d != 0) {
 16636  			break
 16637  		}
 16638  		v.reset(OpConst8)
 16639  		v.AuxInt = int8ToAuxInt(int8(uint8(c) % uint8(d)))
 16640  		return true
 16641  	}
 16642  	// match: (Mod8u <t> n (Const8 [c]))
 16643  	// cond: isPowerOfTwo(c)
 16644  	// result: (And8 n (Const8 <t> [c-1]))
 16645  	for {
 16646  		t := v.Type
 16647  		n := v_0
 16648  		if v_1.Op != OpConst8 {
 16649  			break
 16650  		}
 16651  		c := auxIntToInt8(v_1.AuxInt)
 16652  		if !(isPowerOfTwo(c)) {
 16653  			break
 16654  		}
 16655  		v.reset(OpAnd8)
 16656  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 16657  		v0.AuxInt = int8ToAuxInt(c - 1)
 16658  		v.AddArg2(n, v0)
 16659  		return true
 16660  	}
 16661  	// match: (Mod8u <t> x (Const8 [c]))
 16662  	// cond: x.Op != OpConst8 && c > 0 && umagicOK8( c)
 16663  	// result: (Sub8 x (Mul8 <t> (Div8u <t> x (Const8 <t> [c])) (Const8 <t> [c])))
 16664  	for {
 16665  		t := v.Type
 16666  		x := v_0
 16667  		if v_1.Op != OpConst8 {
 16668  			break
 16669  		}
 16670  		c := auxIntToInt8(v_1.AuxInt)
 16671  		if !(x.Op != OpConst8 && c > 0 && umagicOK8(c)) {
 16672  			break
 16673  		}
 16674  		v.reset(OpSub8)
 16675  		v0 := b.NewValue0(v.Pos, OpMul8, t)
 16676  		v1 := b.NewValue0(v.Pos, OpDiv8u, t)
 16677  		v2 := b.NewValue0(v.Pos, OpConst8, t)
 16678  		v2.AuxInt = int8ToAuxInt(c)
 16679  		v1.AddArg2(x, v2)
 16680  		v0.AddArg2(v1, v2)
 16681  		v.AddArg2(x, v0)
 16682  		return true
 16683  	}
 16684  	return false
 16685  }
 16686  func rewriteValuegeneric_OpMove(v *Value) bool {
 16687  	v_2 := v.Args[2]
 16688  	v_1 := v.Args[1]
 16689  	v_0 := v.Args[0]
 16690  	b := v.Block
 16691  	config := b.Func.Config
 16692  	// match: (Move {t} [n] dst1 src mem:(Zero {t} [n] dst2 _))
 16693  	// cond: isSamePtr(src, dst2)
 16694  	// result: (Zero {t} [n] dst1 mem)
 16695  	for {
 16696  		n := auxIntToInt64(v.AuxInt)
 16697  		t := auxToType(v.Aux)
 16698  		dst1 := v_0
 16699  		src := v_1
 16700  		mem := v_2
 16701  		if mem.Op != OpZero || auxIntToInt64(mem.AuxInt) != n || auxToType(mem.Aux) != t {
 16702  			break
 16703  		}
 16704  		dst2 := mem.Args[0]
 16705  		if !(isSamePtr(src, dst2)) {
 16706  			break
 16707  		}
 16708  		v.reset(OpZero)
 16709  		v.AuxInt = int64ToAuxInt(n)
 16710  		v.Aux = typeToAux(t)
 16711  		v.AddArg2(dst1, mem)
 16712  		return true
 16713  	}
 16714  	// match: (Move {t} [n] dst1 src mem:(VarDef (Zero {t} [n] dst0 _)))
 16715  	// cond: isSamePtr(src, dst0)
 16716  	// result: (Zero {t} [n] dst1 mem)
 16717  	for {
 16718  		n := auxIntToInt64(v.AuxInt)
 16719  		t := auxToType(v.Aux)
 16720  		dst1 := v_0
 16721  		src := v_1
 16722  		mem := v_2
 16723  		if mem.Op != OpVarDef {
 16724  			break
 16725  		}
 16726  		mem_0 := mem.Args[0]
 16727  		if mem_0.Op != OpZero || auxIntToInt64(mem_0.AuxInt) != n || auxToType(mem_0.Aux) != t {
 16728  			break
 16729  		}
 16730  		dst0 := mem_0.Args[0]
 16731  		if !(isSamePtr(src, dst0)) {
 16732  			break
 16733  		}
 16734  		v.reset(OpZero)
 16735  		v.AuxInt = int64ToAuxInt(n)
 16736  		v.Aux = typeToAux(t)
 16737  		v.AddArg2(dst1, mem)
 16738  		return true
 16739  	}
 16740  	// match: (Move {t} [n] dst (Addr {sym} (SB)) mem)
 16741  	// cond: symIsROZero(sym)
 16742  	// result: (Zero {t} [n] dst mem)
 16743  	for {
 16744  		n := auxIntToInt64(v.AuxInt)
 16745  		t := auxToType(v.Aux)
 16746  		dst := v_0
 16747  		if v_1.Op != OpAddr {
 16748  			break
 16749  		}
 16750  		sym := auxToSym(v_1.Aux)
 16751  		v_1_0 := v_1.Args[0]
 16752  		if v_1_0.Op != OpSB {
 16753  			break
 16754  		}
 16755  		mem := v_2
 16756  		if !(symIsROZero(sym)) {
 16757  			break
 16758  		}
 16759  		v.reset(OpZero)
 16760  		v.AuxInt = int64ToAuxInt(n)
 16761  		v.Aux = typeToAux(t)
 16762  		v.AddArg2(dst, mem)
 16763  		return true
 16764  	}
 16765  	// match: (Move {t1} [n] dst1 src1 store:(Store {t2} op:(OffPtr [o2] dst2) _ mem))
 16766  	// cond: isSamePtr(dst1, dst2) && store.Uses == 1 && n >= o2 + t2.Size() && disjoint(src1, n, op, t2.Size()) && clobber(store)
 16767  	// result: (Move {t1} [n] dst1 src1 mem)
 16768  	for {
 16769  		n := auxIntToInt64(v.AuxInt)
 16770  		t1 := auxToType(v.Aux)
 16771  		dst1 := v_0
 16772  		src1 := v_1
 16773  		store := v_2
 16774  		if store.Op != OpStore {
 16775  			break
 16776  		}
 16777  		t2 := auxToType(store.Aux)
 16778  		mem := store.Args[2]
 16779  		op := store.Args[0]
 16780  		if op.Op != OpOffPtr {
 16781  			break
 16782  		}
 16783  		o2 := auxIntToInt64(op.AuxInt)
 16784  		dst2 := op.Args[0]
 16785  		if !(isSamePtr(dst1, dst2) && store.Uses == 1 && n >= o2+t2.Size() && disjoint(src1, n, op, t2.Size()) && clobber(store)) {
 16786  			break
 16787  		}
 16788  		v.reset(OpMove)
 16789  		v.AuxInt = int64ToAuxInt(n)
 16790  		v.Aux = typeToAux(t1)
 16791  		v.AddArg3(dst1, src1, mem)
 16792  		return true
 16793  	}
 16794  	// match: (Move {t} [n] dst1 src1 move:(Move {t} [n] dst2 _ mem))
 16795  	// cond: move.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move)
 16796  	// result: (Move {t} [n] dst1 src1 mem)
 16797  	for {
 16798  		n := auxIntToInt64(v.AuxInt)
 16799  		t := auxToType(v.Aux)
 16800  		dst1 := v_0
 16801  		src1 := v_1
 16802  		move := v_2
 16803  		if move.Op != OpMove || auxIntToInt64(move.AuxInt) != n || auxToType(move.Aux) != t {
 16804  			break
 16805  		}
 16806  		mem := move.Args[2]
 16807  		dst2 := move.Args[0]
 16808  		if !(move.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move)) {
 16809  			break
 16810  		}
 16811  		v.reset(OpMove)
 16812  		v.AuxInt = int64ToAuxInt(n)
 16813  		v.Aux = typeToAux(t)
 16814  		v.AddArg3(dst1, src1, mem)
 16815  		return true
 16816  	}
 16817  	// match: (Move {t} [n] dst1 src1 vardef:(VarDef {x} move:(Move {t} [n] dst2 _ mem)))
 16818  	// cond: move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move, vardef)
 16819  	// result: (Move {t} [n] dst1 src1 (VarDef {x} mem))
 16820  	for {
 16821  		n := auxIntToInt64(v.AuxInt)
 16822  		t := auxToType(v.Aux)
 16823  		dst1 := v_0
 16824  		src1 := v_1
 16825  		vardef := v_2
 16826  		if vardef.Op != OpVarDef {
 16827  			break
 16828  		}
 16829  		x := auxToSym(vardef.Aux)
 16830  		move := vardef.Args[0]
 16831  		if move.Op != OpMove || auxIntToInt64(move.AuxInt) != n || auxToType(move.Aux) != t {
 16832  			break
 16833  		}
 16834  		mem := move.Args[2]
 16835  		dst2 := move.Args[0]
 16836  		if !(move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(move, vardef)) {
 16837  			break
 16838  		}
 16839  		v.reset(OpMove)
 16840  		v.AuxInt = int64ToAuxInt(n)
 16841  		v.Aux = typeToAux(t)
 16842  		v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
 16843  		v0.Aux = symToAux(x)
 16844  		v0.AddArg(mem)
 16845  		v.AddArg3(dst1, src1, v0)
 16846  		return true
 16847  	}
 16848  	// match: (Move {t} [n] dst1 src1 zero:(Zero {t} [n] dst2 mem))
 16849  	// cond: zero.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero)
 16850  	// result: (Move {t} [n] dst1 src1 mem)
 16851  	for {
 16852  		n := auxIntToInt64(v.AuxInt)
 16853  		t := auxToType(v.Aux)
 16854  		dst1 := v_0
 16855  		src1 := v_1
 16856  		zero := v_2
 16857  		if zero.Op != OpZero || auxIntToInt64(zero.AuxInt) != n || auxToType(zero.Aux) != t {
 16858  			break
 16859  		}
 16860  		mem := zero.Args[1]
 16861  		dst2 := zero.Args[0]
 16862  		if !(zero.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero)) {
 16863  			break
 16864  		}
 16865  		v.reset(OpMove)
 16866  		v.AuxInt = int64ToAuxInt(n)
 16867  		v.Aux = typeToAux(t)
 16868  		v.AddArg3(dst1, src1, mem)
 16869  		return true
 16870  	}
 16871  	// match: (Move {t} [n] dst1 src1 vardef:(VarDef {x} zero:(Zero {t} [n] dst2 mem)))
 16872  	// cond: zero.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero, vardef)
 16873  	// result: (Move {t} [n] dst1 src1 (VarDef {x} mem))
 16874  	for {
 16875  		n := auxIntToInt64(v.AuxInt)
 16876  		t := auxToType(v.Aux)
 16877  		dst1 := v_0
 16878  		src1 := v_1
 16879  		vardef := v_2
 16880  		if vardef.Op != OpVarDef {
 16881  			break
 16882  		}
 16883  		x := auxToSym(vardef.Aux)
 16884  		zero := vardef.Args[0]
 16885  		if zero.Op != OpZero || auxIntToInt64(zero.AuxInt) != n || auxToType(zero.Aux) != t {
 16886  			break
 16887  		}
 16888  		mem := zero.Args[1]
 16889  		dst2 := zero.Args[0]
 16890  		if !(zero.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && disjoint(src1, n, dst2, n) && clobber(zero, vardef)) {
 16891  			break
 16892  		}
 16893  		v.reset(OpMove)
 16894  		v.AuxInt = int64ToAuxInt(n)
 16895  		v.Aux = typeToAux(t)
 16896  		v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
 16897  		v0.Aux = symToAux(x)
 16898  		v0.AddArg(mem)
 16899  		v.AddArg3(dst1, src1, v0)
 16900  		return true
 16901  	}
 16902  	// match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [0] p3) d2 _)))
 16903  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && o2 == t3.Size() && n == t2.Size() + t3.Size()
 16904  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [0] dst) d2 mem))
 16905  	for {
 16906  		n := auxIntToInt64(v.AuxInt)
 16907  		t1 := auxToType(v.Aux)
 16908  		dst := v_0
 16909  		p1 := v_1
 16910  		mem := v_2
 16911  		if mem.Op != OpStore {
 16912  			break
 16913  		}
 16914  		t2 := auxToType(mem.Aux)
 16915  		_ = mem.Args[2]
 16916  		op2 := mem.Args[0]
 16917  		if op2.Op != OpOffPtr {
 16918  			break
 16919  		}
 16920  		tt2 := op2.Type
 16921  		o2 := auxIntToInt64(op2.AuxInt)
 16922  		p2 := op2.Args[0]
 16923  		d1 := mem.Args[1]
 16924  		mem_2 := mem.Args[2]
 16925  		if mem_2.Op != OpStore {
 16926  			break
 16927  		}
 16928  		t3 := auxToType(mem_2.Aux)
 16929  		d2 := mem_2.Args[1]
 16930  		op3 := mem_2.Args[0]
 16931  		if op3.Op != OpOffPtr {
 16932  			break
 16933  		}
 16934  		tt3 := op3.Type
 16935  		if auxIntToInt64(op3.AuxInt) != 0 {
 16936  			break
 16937  		}
 16938  		p3 := op3.Args[0]
 16939  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && o2 == t3.Size() && n == t2.Size()+t3.Size()) {
 16940  			break
 16941  		}
 16942  		v.reset(OpStore)
 16943  		v.Aux = typeToAux(t2)
 16944  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 16945  		v0.AuxInt = int64ToAuxInt(o2)
 16946  		v0.AddArg(dst)
 16947  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 16948  		v1.Aux = typeToAux(t3)
 16949  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 16950  		v2.AuxInt = int64ToAuxInt(0)
 16951  		v2.AddArg(dst)
 16952  		v1.AddArg3(v2, d2, mem)
 16953  		v.AddArg3(v0, d1, v1)
 16954  		return true
 16955  	}
 16956  	// match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [o3] p3) d2 (Store {t4} op4:(OffPtr <tt4> [0] p4) d3 _))))
 16957  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size() + t3.Size() + t4.Size()
 16958  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [0] dst) d3 mem)))
 16959  	for {
 16960  		n := auxIntToInt64(v.AuxInt)
 16961  		t1 := auxToType(v.Aux)
 16962  		dst := v_0
 16963  		p1 := v_1
 16964  		mem := v_2
 16965  		if mem.Op != OpStore {
 16966  			break
 16967  		}
 16968  		t2 := auxToType(mem.Aux)
 16969  		_ = mem.Args[2]
 16970  		op2 := mem.Args[0]
 16971  		if op2.Op != OpOffPtr {
 16972  			break
 16973  		}
 16974  		tt2 := op2.Type
 16975  		o2 := auxIntToInt64(op2.AuxInt)
 16976  		p2 := op2.Args[0]
 16977  		d1 := mem.Args[1]
 16978  		mem_2 := mem.Args[2]
 16979  		if mem_2.Op != OpStore {
 16980  			break
 16981  		}
 16982  		t3 := auxToType(mem_2.Aux)
 16983  		_ = mem_2.Args[2]
 16984  		op3 := mem_2.Args[0]
 16985  		if op3.Op != OpOffPtr {
 16986  			break
 16987  		}
 16988  		tt3 := op3.Type
 16989  		o3 := auxIntToInt64(op3.AuxInt)
 16990  		p3 := op3.Args[0]
 16991  		d2 := mem_2.Args[1]
 16992  		mem_2_2 := mem_2.Args[2]
 16993  		if mem_2_2.Op != OpStore {
 16994  			break
 16995  		}
 16996  		t4 := auxToType(mem_2_2.Aux)
 16997  		d3 := mem_2_2.Args[1]
 16998  		op4 := mem_2_2.Args[0]
 16999  		if op4.Op != OpOffPtr {
 17000  			break
 17001  		}
 17002  		tt4 := op4.Type
 17003  		if auxIntToInt64(op4.AuxInt) != 0 {
 17004  			break
 17005  		}
 17006  		p4 := op4.Args[0]
 17007  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size()+t3.Size()+t4.Size()) {
 17008  			break
 17009  		}
 17010  		v.reset(OpStore)
 17011  		v.Aux = typeToAux(t2)
 17012  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 17013  		v0.AuxInt = int64ToAuxInt(o2)
 17014  		v0.AddArg(dst)
 17015  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17016  		v1.Aux = typeToAux(t3)
 17017  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 17018  		v2.AuxInt = int64ToAuxInt(o3)
 17019  		v2.AddArg(dst)
 17020  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17021  		v3.Aux = typeToAux(t4)
 17022  		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
 17023  		v4.AuxInt = int64ToAuxInt(0)
 17024  		v4.AddArg(dst)
 17025  		v3.AddArg3(v4, d3, mem)
 17026  		v1.AddArg3(v2, d2, v3)
 17027  		v.AddArg3(v0, d1, v1)
 17028  		return true
 17029  	}
 17030  	// match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [o3] p3) d2 (Store {t4} op4:(OffPtr <tt4> [o4] p4) d3 (Store {t5} op5:(OffPtr <tt5> [0] p5) d4 _)))))
 17031  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == t5.Size() && o3-o4 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size() + t3.Size() + t4.Size() + t5.Size()
 17032  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [0] dst) d4 mem))))
 17033  	for {
 17034  		n := auxIntToInt64(v.AuxInt)
 17035  		t1 := auxToType(v.Aux)
 17036  		dst := v_0
 17037  		p1 := v_1
 17038  		mem := v_2
 17039  		if mem.Op != OpStore {
 17040  			break
 17041  		}
 17042  		t2 := auxToType(mem.Aux)
 17043  		_ = mem.Args[2]
 17044  		op2 := mem.Args[0]
 17045  		if op2.Op != OpOffPtr {
 17046  			break
 17047  		}
 17048  		tt2 := op2.Type
 17049  		o2 := auxIntToInt64(op2.AuxInt)
 17050  		p2 := op2.Args[0]
 17051  		d1 := mem.Args[1]
 17052  		mem_2 := mem.Args[2]
 17053  		if mem_2.Op != OpStore {
 17054  			break
 17055  		}
 17056  		t3 := auxToType(mem_2.Aux)
 17057  		_ = mem_2.Args[2]
 17058  		op3 := mem_2.Args[0]
 17059  		if op3.Op != OpOffPtr {
 17060  			break
 17061  		}
 17062  		tt3 := op3.Type
 17063  		o3 := auxIntToInt64(op3.AuxInt)
 17064  		p3 := op3.Args[0]
 17065  		d2 := mem_2.Args[1]
 17066  		mem_2_2 := mem_2.Args[2]
 17067  		if mem_2_2.Op != OpStore {
 17068  			break
 17069  		}
 17070  		t4 := auxToType(mem_2_2.Aux)
 17071  		_ = mem_2_2.Args[2]
 17072  		op4 := mem_2_2.Args[0]
 17073  		if op4.Op != OpOffPtr {
 17074  			break
 17075  		}
 17076  		tt4 := op4.Type
 17077  		o4 := auxIntToInt64(op4.AuxInt)
 17078  		p4 := op4.Args[0]
 17079  		d3 := mem_2_2.Args[1]
 17080  		mem_2_2_2 := mem_2_2.Args[2]
 17081  		if mem_2_2_2.Op != OpStore {
 17082  			break
 17083  		}
 17084  		t5 := auxToType(mem_2_2_2.Aux)
 17085  		d4 := mem_2_2_2.Args[1]
 17086  		op5 := mem_2_2_2.Args[0]
 17087  		if op5.Op != OpOffPtr {
 17088  			break
 17089  		}
 17090  		tt5 := op5.Type
 17091  		if auxIntToInt64(op5.AuxInt) != 0 {
 17092  			break
 17093  		}
 17094  		p5 := op5.Args[0]
 17095  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == t5.Size() && o3-o4 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size()+t3.Size()+t4.Size()+t5.Size()) {
 17096  			break
 17097  		}
 17098  		v.reset(OpStore)
 17099  		v.Aux = typeToAux(t2)
 17100  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 17101  		v0.AuxInt = int64ToAuxInt(o2)
 17102  		v0.AddArg(dst)
 17103  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17104  		v1.Aux = typeToAux(t3)
 17105  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 17106  		v2.AuxInt = int64ToAuxInt(o3)
 17107  		v2.AddArg(dst)
 17108  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17109  		v3.Aux = typeToAux(t4)
 17110  		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
 17111  		v4.AuxInt = int64ToAuxInt(o4)
 17112  		v4.AddArg(dst)
 17113  		v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17114  		v5.Aux = typeToAux(t5)
 17115  		v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
 17116  		v6.AuxInt = int64ToAuxInt(0)
 17117  		v6.AddArg(dst)
 17118  		v5.AddArg3(v6, d4, mem)
 17119  		v3.AddArg3(v4, d3, v5)
 17120  		v1.AddArg3(v2, d2, v3)
 17121  		v.AddArg3(v0, d1, v1)
 17122  		return true
 17123  	}
 17124  	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [0] p3) d2 _))))
 17125  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && o2 == t3.Size() && n == t2.Size() + t3.Size()
 17126  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [0] dst) d2 mem))
 17127  	for {
 17128  		n := auxIntToInt64(v.AuxInt)
 17129  		t1 := auxToType(v.Aux)
 17130  		dst := v_0
 17131  		p1 := v_1
 17132  		mem := v_2
 17133  		if mem.Op != OpVarDef {
 17134  			break
 17135  		}
 17136  		mem_0 := mem.Args[0]
 17137  		if mem_0.Op != OpStore {
 17138  			break
 17139  		}
 17140  		t2 := auxToType(mem_0.Aux)
 17141  		_ = mem_0.Args[2]
 17142  		op2 := mem_0.Args[0]
 17143  		if op2.Op != OpOffPtr {
 17144  			break
 17145  		}
 17146  		tt2 := op2.Type
 17147  		o2 := auxIntToInt64(op2.AuxInt)
 17148  		p2 := op2.Args[0]
 17149  		d1 := mem_0.Args[1]
 17150  		mem_0_2 := mem_0.Args[2]
 17151  		if mem_0_2.Op != OpStore {
 17152  			break
 17153  		}
 17154  		t3 := auxToType(mem_0_2.Aux)
 17155  		d2 := mem_0_2.Args[1]
 17156  		op3 := mem_0_2.Args[0]
 17157  		if op3.Op != OpOffPtr {
 17158  			break
 17159  		}
 17160  		tt3 := op3.Type
 17161  		if auxIntToInt64(op3.AuxInt) != 0 {
 17162  			break
 17163  		}
 17164  		p3 := op3.Args[0]
 17165  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && o2 == t3.Size() && n == t2.Size()+t3.Size()) {
 17166  			break
 17167  		}
 17168  		v.reset(OpStore)
 17169  		v.Aux = typeToAux(t2)
 17170  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 17171  		v0.AuxInt = int64ToAuxInt(o2)
 17172  		v0.AddArg(dst)
 17173  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17174  		v1.Aux = typeToAux(t3)
 17175  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 17176  		v2.AuxInt = int64ToAuxInt(0)
 17177  		v2.AddArg(dst)
 17178  		v1.AddArg3(v2, d2, mem)
 17179  		v.AddArg3(v0, d1, v1)
 17180  		return true
 17181  	}
 17182  	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [o3] p3) d2 (Store {t4} op4:(OffPtr <tt4> [0] p4) d3 _)))))
 17183  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size() + t3.Size() + t4.Size()
 17184  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [0] dst) d3 mem)))
 17185  	for {
 17186  		n := auxIntToInt64(v.AuxInt)
 17187  		t1 := auxToType(v.Aux)
 17188  		dst := v_0
 17189  		p1 := v_1
 17190  		mem := v_2
 17191  		if mem.Op != OpVarDef {
 17192  			break
 17193  		}
 17194  		mem_0 := mem.Args[0]
 17195  		if mem_0.Op != OpStore {
 17196  			break
 17197  		}
 17198  		t2 := auxToType(mem_0.Aux)
 17199  		_ = mem_0.Args[2]
 17200  		op2 := mem_0.Args[0]
 17201  		if op2.Op != OpOffPtr {
 17202  			break
 17203  		}
 17204  		tt2 := op2.Type
 17205  		o2 := auxIntToInt64(op2.AuxInt)
 17206  		p2 := op2.Args[0]
 17207  		d1 := mem_0.Args[1]
 17208  		mem_0_2 := mem_0.Args[2]
 17209  		if mem_0_2.Op != OpStore {
 17210  			break
 17211  		}
 17212  		t3 := auxToType(mem_0_2.Aux)
 17213  		_ = mem_0_2.Args[2]
 17214  		op3 := mem_0_2.Args[0]
 17215  		if op3.Op != OpOffPtr {
 17216  			break
 17217  		}
 17218  		tt3 := op3.Type
 17219  		o3 := auxIntToInt64(op3.AuxInt)
 17220  		p3 := op3.Args[0]
 17221  		d2 := mem_0_2.Args[1]
 17222  		mem_0_2_2 := mem_0_2.Args[2]
 17223  		if mem_0_2_2.Op != OpStore {
 17224  			break
 17225  		}
 17226  		t4 := auxToType(mem_0_2_2.Aux)
 17227  		d3 := mem_0_2_2.Args[1]
 17228  		op4 := mem_0_2_2.Args[0]
 17229  		if op4.Op != OpOffPtr {
 17230  			break
 17231  		}
 17232  		tt4 := op4.Type
 17233  		if auxIntToInt64(op4.AuxInt) != 0 {
 17234  			break
 17235  		}
 17236  		p4 := op4.Args[0]
 17237  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && o3 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size()+t3.Size()+t4.Size()) {
 17238  			break
 17239  		}
 17240  		v.reset(OpStore)
 17241  		v.Aux = typeToAux(t2)
 17242  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 17243  		v0.AuxInt = int64ToAuxInt(o2)
 17244  		v0.AddArg(dst)
 17245  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17246  		v1.Aux = typeToAux(t3)
 17247  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 17248  		v2.AuxInt = int64ToAuxInt(o3)
 17249  		v2.AddArg(dst)
 17250  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17251  		v3.Aux = typeToAux(t4)
 17252  		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
 17253  		v4.AuxInt = int64ToAuxInt(0)
 17254  		v4.AddArg(dst)
 17255  		v3.AddArg3(v4, d3, mem)
 17256  		v1.AddArg3(v2, d2, v3)
 17257  		v.AddArg3(v0, d1, v1)
 17258  		return true
 17259  	}
 17260  	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [o3] p3) d2 (Store {t4} op4:(OffPtr <tt4> [o4] p4) d3 (Store {t5} op5:(OffPtr <tt5> [0] p5) d4 _))))))
 17261  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == t5.Size() && o3-o4 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size() + t3.Size() + t4.Size() + t5.Size()
 17262  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [0] dst) d4 mem))))
 17263  	for {
 17264  		n := auxIntToInt64(v.AuxInt)
 17265  		t1 := auxToType(v.Aux)
 17266  		dst := v_0
 17267  		p1 := v_1
 17268  		mem := v_2
 17269  		if mem.Op != OpVarDef {
 17270  			break
 17271  		}
 17272  		mem_0 := mem.Args[0]
 17273  		if mem_0.Op != OpStore {
 17274  			break
 17275  		}
 17276  		t2 := auxToType(mem_0.Aux)
 17277  		_ = mem_0.Args[2]
 17278  		op2 := mem_0.Args[0]
 17279  		if op2.Op != OpOffPtr {
 17280  			break
 17281  		}
 17282  		tt2 := op2.Type
 17283  		o2 := auxIntToInt64(op2.AuxInt)
 17284  		p2 := op2.Args[0]
 17285  		d1 := mem_0.Args[1]
 17286  		mem_0_2 := mem_0.Args[2]
 17287  		if mem_0_2.Op != OpStore {
 17288  			break
 17289  		}
 17290  		t3 := auxToType(mem_0_2.Aux)
 17291  		_ = mem_0_2.Args[2]
 17292  		op3 := mem_0_2.Args[0]
 17293  		if op3.Op != OpOffPtr {
 17294  			break
 17295  		}
 17296  		tt3 := op3.Type
 17297  		o3 := auxIntToInt64(op3.AuxInt)
 17298  		p3 := op3.Args[0]
 17299  		d2 := mem_0_2.Args[1]
 17300  		mem_0_2_2 := mem_0_2.Args[2]
 17301  		if mem_0_2_2.Op != OpStore {
 17302  			break
 17303  		}
 17304  		t4 := auxToType(mem_0_2_2.Aux)
 17305  		_ = mem_0_2_2.Args[2]
 17306  		op4 := mem_0_2_2.Args[0]
 17307  		if op4.Op != OpOffPtr {
 17308  			break
 17309  		}
 17310  		tt4 := op4.Type
 17311  		o4 := auxIntToInt64(op4.AuxInt)
 17312  		p4 := op4.Args[0]
 17313  		d3 := mem_0_2_2.Args[1]
 17314  		mem_0_2_2_2 := mem_0_2_2.Args[2]
 17315  		if mem_0_2_2_2.Op != OpStore {
 17316  			break
 17317  		}
 17318  		t5 := auxToType(mem_0_2_2_2.Aux)
 17319  		d4 := mem_0_2_2_2.Args[1]
 17320  		op5 := mem_0_2_2_2.Args[0]
 17321  		if op5.Op != OpOffPtr {
 17322  			break
 17323  		}
 17324  		tt5 := op5.Type
 17325  		if auxIntToInt64(op5.AuxInt) != 0 {
 17326  			break
 17327  		}
 17328  		p5 := op5.Args[0]
 17329  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && o4 == t5.Size() && o3-o4 == t4.Size() && o2-o3 == t3.Size() && n == t2.Size()+t3.Size()+t4.Size()+t5.Size()) {
 17330  			break
 17331  		}
 17332  		v.reset(OpStore)
 17333  		v.Aux = typeToAux(t2)
 17334  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 17335  		v0.AuxInt = int64ToAuxInt(o2)
 17336  		v0.AddArg(dst)
 17337  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17338  		v1.Aux = typeToAux(t3)
 17339  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 17340  		v2.AuxInt = int64ToAuxInt(o3)
 17341  		v2.AddArg(dst)
 17342  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17343  		v3.Aux = typeToAux(t4)
 17344  		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
 17345  		v4.AuxInt = int64ToAuxInt(o4)
 17346  		v4.AddArg(dst)
 17347  		v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17348  		v5.Aux = typeToAux(t5)
 17349  		v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
 17350  		v6.AuxInt = int64ToAuxInt(0)
 17351  		v6.AddArg(dst)
 17352  		v5.AddArg3(v6, d4, mem)
 17353  		v3.AddArg3(v4, d3, v5)
 17354  		v1.AddArg3(v2, d2, v3)
 17355  		v.AddArg3(v0, d1, v1)
 17356  		return true
 17357  	}
 17358  	// match: (Move {t1} [n] dst p1 mem:(Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Zero {t3} [n] p3 _)))
 17359  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && n >= o2 + t2.Size()
 17360  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Zero {t1} [n] dst mem))
 17361  	for {
 17362  		n := auxIntToInt64(v.AuxInt)
 17363  		t1 := auxToType(v.Aux)
 17364  		dst := v_0
 17365  		p1 := v_1
 17366  		mem := v_2
 17367  		if mem.Op != OpStore {
 17368  			break
 17369  		}
 17370  		t2 := auxToType(mem.Aux)
 17371  		_ = mem.Args[2]
 17372  		op2 := mem.Args[0]
 17373  		if op2.Op != OpOffPtr {
 17374  			break
 17375  		}
 17376  		tt2 := op2.Type
 17377  		o2 := auxIntToInt64(op2.AuxInt)
 17378  		p2 := op2.Args[0]
 17379  		d1 := mem.Args[1]
 17380  		mem_2 := mem.Args[2]
 17381  		if mem_2.Op != OpZero || auxIntToInt64(mem_2.AuxInt) != n {
 17382  			break
 17383  		}
 17384  		t3 := auxToType(mem_2.Aux)
 17385  		p3 := mem_2.Args[0]
 17386  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && n >= o2+t2.Size()) {
 17387  			break
 17388  		}
 17389  		v.reset(OpStore)
 17390  		v.Aux = typeToAux(t2)
 17391  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 17392  		v0.AuxInt = int64ToAuxInt(o2)
 17393  		v0.AddArg(dst)
 17394  		v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 17395  		v1.AuxInt = int64ToAuxInt(n)
 17396  		v1.Aux = typeToAux(t1)
 17397  		v1.AddArg2(dst, mem)
 17398  		v.AddArg3(v0, d1, v1)
 17399  		return true
 17400  	}
 17401  	// match: (Move {t1} [n] dst p1 mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Zero {t4} [n] p4 _))))
 17402  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && n >= o2 + t2.Size() && n >= o3 + t3.Size()
 17403  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Zero {t1} [n] dst mem)))
 17404  	for {
 17405  		n := auxIntToInt64(v.AuxInt)
 17406  		t1 := auxToType(v.Aux)
 17407  		dst := v_0
 17408  		p1 := v_1
 17409  		mem := v_2
 17410  		if mem.Op != OpStore {
 17411  			break
 17412  		}
 17413  		t2 := auxToType(mem.Aux)
 17414  		_ = mem.Args[2]
 17415  		mem_0 := mem.Args[0]
 17416  		if mem_0.Op != OpOffPtr {
 17417  			break
 17418  		}
 17419  		tt2 := mem_0.Type
 17420  		o2 := auxIntToInt64(mem_0.AuxInt)
 17421  		p2 := mem_0.Args[0]
 17422  		d1 := mem.Args[1]
 17423  		mem_2 := mem.Args[2]
 17424  		if mem_2.Op != OpStore {
 17425  			break
 17426  		}
 17427  		t3 := auxToType(mem_2.Aux)
 17428  		_ = mem_2.Args[2]
 17429  		mem_2_0 := mem_2.Args[0]
 17430  		if mem_2_0.Op != OpOffPtr {
 17431  			break
 17432  		}
 17433  		tt3 := mem_2_0.Type
 17434  		o3 := auxIntToInt64(mem_2_0.AuxInt)
 17435  		p3 := mem_2_0.Args[0]
 17436  		d2 := mem_2.Args[1]
 17437  		mem_2_2 := mem_2.Args[2]
 17438  		if mem_2_2.Op != OpZero || auxIntToInt64(mem_2_2.AuxInt) != n {
 17439  			break
 17440  		}
 17441  		t4 := auxToType(mem_2_2.Aux)
 17442  		p4 := mem_2_2.Args[0]
 17443  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && n >= o2+t2.Size() && n >= o3+t3.Size()) {
 17444  			break
 17445  		}
 17446  		v.reset(OpStore)
 17447  		v.Aux = typeToAux(t2)
 17448  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 17449  		v0.AuxInt = int64ToAuxInt(o2)
 17450  		v0.AddArg(dst)
 17451  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17452  		v1.Aux = typeToAux(t3)
 17453  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 17454  		v2.AuxInt = int64ToAuxInt(o3)
 17455  		v2.AddArg(dst)
 17456  		v3 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 17457  		v3.AuxInt = int64ToAuxInt(n)
 17458  		v3.Aux = typeToAux(t1)
 17459  		v3.AddArg2(dst, mem)
 17460  		v1.AddArg3(v2, d2, v3)
 17461  		v.AddArg3(v0, d1, v1)
 17462  		return true
 17463  	}
 17464  	// match: (Move {t1} [n] dst p1 mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Zero {t5} [n] p5 _)))))
 17465  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2 + t2.Size() && n >= o3 + t3.Size() && n >= o4 + t4.Size()
 17466  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Zero {t1} [n] dst mem))))
 17467  	for {
 17468  		n := auxIntToInt64(v.AuxInt)
 17469  		t1 := auxToType(v.Aux)
 17470  		dst := v_0
 17471  		p1 := v_1
 17472  		mem := v_2
 17473  		if mem.Op != OpStore {
 17474  			break
 17475  		}
 17476  		t2 := auxToType(mem.Aux)
 17477  		_ = mem.Args[2]
 17478  		mem_0 := mem.Args[0]
 17479  		if mem_0.Op != OpOffPtr {
 17480  			break
 17481  		}
 17482  		tt2 := mem_0.Type
 17483  		o2 := auxIntToInt64(mem_0.AuxInt)
 17484  		p2 := mem_0.Args[0]
 17485  		d1 := mem.Args[1]
 17486  		mem_2 := mem.Args[2]
 17487  		if mem_2.Op != OpStore {
 17488  			break
 17489  		}
 17490  		t3 := auxToType(mem_2.Aux)
 17491  		_ = mem_2.Args[2]
 17492  		mem_2_0 := mem_2.Args[0]
 17493  		if mem_2_0.Op != OpOffPtr {
 17494  			break
 17495  		}
 17496  		tt3 := mem_2_0.Type
 17497  		o3 := auxIntToInt64(mem_2_0.AuxInt)
 17498  		p3 := mem_2_0.Args[0]
 17499  		d2 := mem_2.Args[1]
 17500  		mem_2_2 := mem_2.Args[2]
 17501  		if mem_2_2.Op != OpStore {
 17502  			break
 17503  		}
 17504  		t4 := auxToType(mem_2_2.Aux)
 17505  		_ = mem_2_2.Args[2]
 17506  		mem_2_2_0 := mem_2_2.Args[0]
 17507  		if mem_2_2_0.Op != OpOffPtr {
 17508  			break
 17509  		}
 17510  		tt4 := mem_2_2_0.Type
 17511  		o4 := auxIntToInt64(mem_2_2_0.AuxInt)
 17512  		p4 := mem_2_2_0.Args[0]
 17513  		d3 := mem_2_2.Args[1]
 17514  		mem_2_2_2 := mem_2_2.Args[2]
 17515  		if mem_2_2_2.Op != OpZero || auxIntToInt64(mem_2_2_2.AuxInt) != n {
 17516  			break
 17517  		}
 17518  		t5 := auxToType(mem_2_2_2.Aux)
 17519  		p5 := mem_2_2_2.Args[0]
 17520  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2+t2.Size() && n >= o3+t3.Size() && n >= o4+t4.Size()) {
 17521  			break
 17522  		}
 17523  		v.reset(OpStore)
 17524  		v.Aux = typeToAux(t2)
 17525  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 17526  		v0.AuxInt = int64ToAuxInt(o2)
 17527  		v0.AddArg(dst)
 17528  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17529  		v1.Aux = typeToAux(t3)
 17530  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 17531  		v2.AuxInt = int64ToAuxInt(o3)
 17532  		v2.AddArg(dst)
 17533  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17534  		v3.Aux = typeToAux(t4)
 17535  		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
 17536  		v4.AuxInt = int64ToAuxInt(o4)
 17537  		v4.AddArg(dst)
 17538  		v5 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 17539  		v5.AuxInt = int64ToAuxInt(n)
 17540  		v5.Aux = typeToAux(t1)
 17541  		v5.AddArg2(dst, mem)
 17542  		v3.AddArg3(v4, d3, v5)
 17543  		v1.AddArg3(v2, d2, v3)
 17544  		v.AddArg3(v0, d1, v1)
 17545  		return true
 17546  	}
 17547  	// match: (Move {t1} [n] dst p1 mem:(Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Store {t5} (OffPtr <tt5> [o5] p5) d4 (Zero {t6} [n] p6 _))))))
 17548  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && t6.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2 + t2.Size() && n >= o3 + t3.Size() && n >= o4 + t4.Size() && n >= o5 + t5.Size()
 17549  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [o5] dst) d4 (Zero {t1} [n] dst mem)))))
 17550  	for {
 17551  		n := auxIntToInt64(v.AuxInt)
 17552  		t1 := auxToType(v.Aux)
 17553  		dst := v_0
 17554  		p1 := v_1
 17555  		mem := v_2
 17556  		if mem.Op != OpStore {
 17557  			break
 17558  		}
 17559  		t2 := auxToType(mem.Aux)
 17560  		_ = mem.Args[2]
 17561  		mem_0 := mem.Args[0]
 17562  		if mem_0.Op != OpOffPtr {
 17563  			break
 17564  		}
 17565  		tt2 := mem_0.Type
 17566  		o2 := auxIntToInt64(mem_0.AuxInt)
 17567  		p2 := mem_0.Args[0]
 17568  		d1 := mem.Args[1]
 17569  		mem_2 := mem.Args[2]
 17570  		if mem_2.Op != OpStore {
 17571  			break
 17572  		}
 17573  		t3 := auxToType(mem_2.Aux)
 17574  		_ = mem_2.Args[2]
 17575  		mem_2_0 := mem_2.Args[0]
 17576  		if mem_2_0.Op != OpOffPtr {
 17577  			break
 17578  		}
 17579  		tt3 := mem_2_0.Type
 17580  		o3 := auxIntToInt64(mem_2_0.AuxInt)
 17581  		p3 := mem_2_0.Args[0]
 17582  		d2 := mem_2.Args[1]
 17583  		mem_2_2 := mem_2.Args[2]
 17584  		if mem_2_2.Op != OpStore {
 17585  			break
 17586  		}
 17587  		t4 := auxToType(mem_2_2.Aux)
 17588  		_ = mem_2_2.Args[2]
 17589  		mem_2_2_0 := mem_2_2.Args[0]
 17590  		if mem_2_2_0.Op != OpOffPtr {
 17591  			break
 17592  		}
 17593  		tt4 := mem_2_2_0.Type
 17594  		o4 := auxIntToInt64(mem_2_2_0.AuxInt)
 17595  		p4 := mem_2_2_0.Args[0]
 17596  		d3 := mem_2_2.Args[1]
 17597  		mem_2_2_2 := mem_2_2.Args[2]
 17598  		if mem_2_2_2.Op != OpStore {
 17599  			break
 17600  		}
 17601  		t5 := auxToType(mem_2_2_2.Aux)
 17602  		_ = mem_2_2_2.Args[2]
 17603  		mem_2_2_2_0 := mem_2_2_2.Args[0]
 17604  		if mem_2_2_2_0.Op != OpOffPtr {
 17605  			break
 17606  		}
 17607  		tt5 := mem_2_2_2_0.Type
 17608  		o5 := auxIntToInt64(mem_2_2_2_0.AuxInt)
 17609  		p5 := mem_2_2_2_0.Args[0]
 17610  		d4 := mem_2_2_2.Args[1]
 17611  		mem_2_2_2_2 := mem_2_2_2.Args[2]
 17612  		if mem_2_2_2_2.Op != OpZero || auxIntToInt64(mem_2_2_2_2.AuxInt) != n {
 17613  			break
 17614  		}
 17615  		t6 := auxToType(mem_2_2_2_2.Aux)
 17616  		p6 := mem_2_2_2_2.Args[0]
 17617  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && t6.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2+t2.Size() && n >= o3+t3.Size() && n >= o4+t4.Size() && n >= o5+t5.Size()) {
 17618  			break
 17619  		}
 17620  		v.reset(OpStore)
 17621  		v.Aux = typeToAux(t2)
 17622  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 17623  		v0.AuxInt = int64ToAuxInt(o2)
 17624  		v0.AddArg(dst)
 17625  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17626  		v1.Aux = typeToAux(t3)
 17627  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 17628  		v2.AuxInt = int64ToAuxInt(o3)
 17629  		v2.AddArg(dst)
 17630  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17631  		v3.Aux = typeToAux(t4)
 17632  		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
 17633  		v4.AuxInt = int64ToAuxInt(o4)
 17634  		v4.AddArg(dst)
 17635  		v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17636  		v5.Aux = typeToAux(t5)
 17637  		v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
 17638  		v6.AuxInt = int64ToAuxInt(o5)
 17639  		v6.AddArg(dst)
 17640  		v7 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 17641  		v7.AuxInt = int64ToAuxInt(n)
 17642  		v7.Aux = typeToAux(t1)
 17643  		v7.AddArg2(dst, mem)
 17644  		v5.AddArg3(v6, d4, v7)
 17645  		v3.AddArg3(v4, d3, v5)
 17646  		v1.AddArg3(v2, d2, v3)
 17647  		v.AddArg3(v0, d1, v1)
 17648  		return true
 17649  	}
 17650  	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Zero {t3} [n] p3 _))))
 17651  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && n >= o2 + t2.Size()
 17652  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Zero {t1} [n] dst mem))
 17653  	for {
 17654  		n := auxIntToInt64(v.AuxInt)
 17655  		t1 := auxToType(v.Aux)
 17656  		dst := v_0
 17657  		p1 := v_1
 17658  		mem := v_2
 17659  		if mem.Op != OpVarDef {
 17660  			break
 17661  		}
 17662  		mem_0 := mem.Args[0]
 17663  		if mem_0.Op != OpStore {
 17664  			break
 17665  		}
 17666  		t2 := auxToType(mem_0.Aux)
 17667  		_ = mem_0.Args[2]
 17668  		op2 := mem_0.Args[0]
 17669  		if op2.Op != OpOffPtr {
 17670  			break
 17671  		}
 17672  		tt2 := op2.Type
 17673  		o2 := auxIntToInt64(op2.AuxInt)
 17674  		p2 := op2.Args[0]
 17675  		d1 := mem_0.Args[1]
 17676  		mem_0_2 := mem_0.Args[2]
 17677  		if mem_0_2.Op != OpZero || auxIntToInt64(mem_0_2.AuxInt) != n {
 17678  			break
 17679  		}
 17680  		t3 := auxToType(mem_0_2.Aux)
 17681  		p3 := mem_0_2.Args[0]
 17682  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && registerizable(b, t2) && n >= o2+t2.Size()) {
 17683  			break
 17684  		}
 17685  		v.reset(OpStore)
 17686  		v.Aux = typeToAux(t2)
 17687  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 17688  		v0.AuxInt = int64ToAuxInt(o2)
 17689  		v0.AddArg(dst)
 17690  		v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 17691  		v1.AuxInt = int64ToAuxInt(n)
 17692  		v1.Aux = typeToAux(t1)
 17693  		v1.AddArg2(dst, mem)
 17694  		v.AddArg3(v0, d1, v1)
 17695  		return true
 17696  	}
 17697  	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Zero {t4} [n] p4 _)))))
 17698  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && n >= o2 + t2.Size() && n >= o3 + t3.Size()
 17699  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Zero {t1} [n] dst mem)))
 17700  	for {
 17701  		n := auxIntToInt64(v.AuxInt)
 17702  		t1 := auxToType(v.Aux)
 17703  		dst := v_0
 17704  		p1 := v_1
 17705  		mem := v_2
 17706  		if mem.Op != OpVarDef {
 17707  			break
 17708  		}
 17709  		mem_0 := mem.Args[0]
 17710  		if mem_0.Op != OpStore {
 17711  			break
 17712  		}
 17713  		t2 := auxToType(mem_0.Aux)
 17714  		_ = mem_0.Args[2]
 17715  		mem_0_0 := mem_0.Args[0]
 17716  		if mem_0_0.Op != OpOffPtr {
 17717  			break
 17718  		}
 17719  		tt2 := mem_0_0.Type
 17720  		o2 := auxIntToInt64(mem_0_0.AuxInt)
 17721  		p2 := mem_0_0.Args[0]
 17722  		d1 := mem_0.Args[1]
 17723  		mem_0_2 := mem_0.Args[2]
 17724  		if mem_0_2.Op != OpStore {
 17725  			break
 17726  		}
 17727  		t3 := auxToType(mem_0_2.Aux)
 17728  		_ = mem_0_2.Args[2]
 17729  		mem_0_2_0 := mem_0_2.Args[0]
 17730  		if mem_0_2_0.Op != OpOffPtr {
 17731  			break
 17732  		}
 17733  		tt3 := mem_0_2_0.Type
 17734  		o3 := auxIntToInt64(mem_0_2_0.AuxInt)
 17735  		p3 := mem_0_2_0.Args[0]
 17736  		d2 := mem_0_2.Args[1]
 17737  		mem_0_2_2 := mem_0_2.Args[2]
 17738  		if mem_0_2_2.Op != OpZero || auxIntToInt64(mem_0_2_2.AuxInt) != n {
 17739  			break
 17740  		}
 17741  		t4 := auxToType(mem_0_2_2.Aux)
 17742  		p4 := mem_0_2_2.Args[0]
 17743  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && n >= o2+t2.Size() && n >= o3+t3.Size()) {
 17744  			break
 17745  		}
 17746  		v.reset(OpStore)
 17747  		v.Aux = typeToAux(t2)
 17748  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 17749  		v0.AuxInt = int64ToAuxInt(o2)
 17750  		v0.AddArg(dst)
 17751  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17752  		v1.Aux = typeToAux(t3)
 17753  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 17754  		v2.AuxInt = int64ToAuxInt(o3)
 17755  		v2.AddArg(dst)
 17756  		v3 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 17757  		v3.AuxInt = int64ToAuxInt(n)
 17758  		v3.Aux = typeToAux(t1)
 17759  		v3.AddArg2(dst, mem)
 17760  		v1.AddArg3(v2, d2, v3)
 17761  		v.AddArg3(v0, d1, v1)
 17762  		return true
 17763  	}
 17764  	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Zero {t5} [n] p5 _))))))
 17765  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2 + t2.Size() && n >= o3 + t3.Size() && n >= o4 + t4.Size()
 17766  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Zero {t1} [n] dst mem))))
 17767  	for {
 17768  		n := auxIntToInt64(v.AuxInt)
 17769  		t1 := auxToType(v.Aux)
 17770  		dst := v_0
 17771  		p1 := v_1
 17772  		mem := v_2
 17773  		if mem.Op != OpVarDef {
 17774  			break
 17775  		}
 17776  		mem_0 := mem.Args[0]
 17777  		if mem_0.Op != OpStore {
 17778  			break
 17779  		}
 17780  		t2 := auxToType(mem_0.Aux)
 17781  		_ = mem_0.Args[2]
 17782  		mem_0_0 := mem_0.Args[0]
 17783  		if mem_0_0.Op != OpOffPtr {
 17784  			break
 17785  		}
 17786  		tt2 := mem_0_0.Type
 17787  		o2 := auxIntToInt64(mem_0_0.AuxInt)
 17788  		p2 := mem_0_0.Args[0]
 17789  		d1 := mem_0.Args[1]
 17790  		mem_0_2 := mem_0.Args[2]
 17791  		if mem_0_2.Op != OpStore {
 17792  			break
 17793  		}
 17794  		t3 := auxToType(mem_0_2.Aux)
 17795  		_ = mem_0_2.Args[2]
 17796  		mem_0_2_0 := mem_0_2.Args[0]
 17797  		if mem_0_2_0.Op != OpOffPtr {
 17798  			break
 17799  		}
 17800  		tt3 := mem_0_2_0.Type
 17801  		o3 := auxIntToInt64(mem_0_2_0.AuxInt)
 17802  		p3 := mem_0_2_0.Args[0]
 17803  		d2 := mem_0_2.Args[1]
 17804  		mem_0_2_2 := mem_0_2.Args[2]
 17805  		if mem_0_2_2.Op != OpStore {
 17806  			break
 17807  		}
 17808  		t4 := auxToType(mem_0_2_2.Aux)
 17809  		_ = mem_0_2_2.Args[2]
 17810  		mem_0_2_2_0 := mem_0_2_2.Args[0]
 17811  		if mem_0_2_2_0.Op != OpOffPtr {
 17812  			break
 17813  		}
 17814  		tt4 := mem_0_2_2_0.Type
 17815  		o4 := auxIntToInt64(mem_0_2_2_0.AuxInt)
 17816  		p4 := mem_0_2_2_0.Args[0]
 17817  		d3 := mem_0_2_2.Args[1]
 17818  		mem_0_2_2_2 := mem_0_2_2.Args[2]
 17819  		if mem_0_2_2_2.Op != OpZero || auxIntToInt64(mem_0_2_2_2.AuxInt) != n {
 17820  			break
 17821  		}
 17822  		t5 := auxToType(mem_0_2_2_2.Aux)
 17823  		p5 := mem_0_2_2_2.Args[0]
 17824  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && n >= o2+t2.Size() && n >= o3+t3.Size() && n >= o4+t4.Size()) {
 17825  			break
 17826  		}
 17827  		v.reset(OpStore)
 17828  		v.Aux = typeToAux(t2)
 17829  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 17830  		v0.AuxInt = int64ToAuxInt(o2)
 17831  		v0.AddArg(dst)
 17832  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17833  		v1.Aux = typeToAux(t3)
 17834  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 17835  		v2.AuxInt = int64ToAuxInt(o3)
 17836  		v2.AddArg(dst)
 17837  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17838  		v3.Aux = typeToAux(t4)
 17839  		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
 17840  		v4.AuxInt = int64ToAuxInt(o4)
 17841  		v4.AddArg(dst)
 17842  		v5 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 17843  		v5.AuxInt = int64ToAuxInt(n)
 17844  		v5.Aux = typeToAux(t1)
 17845  		v5.AddArg2(dst, mem)
 17846  		v3.AddArg3(v4, d3, v5)
 17847  		v1.AddArg3(v2, d2, v3)
 17848  		v.AddArg3(v0, d1, v1)
 17849  		return true
 17850  	}
 17851  	// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Store {t5} (OffPtr <tt5> [o5] p5) d4 (Zero {t6} [n] p6 _)))))))
 17852  	// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && t6.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2 + t2.Size() && n >= o3 + t3.Size() && n >= o4 + t4.Size() && n >= o5 + t5.Size()
 17853  	// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [o5] dst) d4 (Zero {t1} [n] dst mem)))))
 17854  	for {
 17855  		n := auxIntToInt64(v.AuxInt)
 17856  		t1 := auxToType(v.Aux)
 17857  		dst := v_0
 17858  		p1 := v_1
 17859  		mem := v_2
 17860  		if mem.Op != OpVarDef {
 17861  			break
 17862  		}
 17863  		mem_0 := mem.Args[0]
 17864  		if mem_0.Op != OpStore {
 17865  			break
 17866  		}
 17867  		t2 := auxToType(mem_0.Aux)
 17868  		_ = mem_0.Args[2]
 17869  		mem_0_0 := mem_0.Args[0]
 17870  		if mem_0_0.Op != OpOffPtr {
 17871  			break
 17872  		}
 17873  		tt2 := mem_0_0.Type
 17874  		o2 := auxIntToInt64(mem_0_0.AuxInt)
 17875  		p2 := mem_0_0.Args[0]
 17876  		d1 := mem_0.Args[1]
 17877  		mem_0_2 := mem_0.Args[2]
 17878  		if mem_0_2.Op != OpStore {
 17879  			break
 17880  		}
 17881  		t3 := auxToType(mem_0_2.Aux)
 17882  		_ = mem_0_2.Args[2]
 17883  		mem_0_2_0 := mem_0_2.Args[0]
 17884  		if mem_0_2_0.Op != OpOffPtr {
 17885  			break
 17886  		}
 17887  		tt3 := mem_0_2_0.Type
 17888  		o3 := auxIntToInt64(mem_0_2_0.AuxInt)
 17889  		p3 := mem_0_2_0.Args[0]
 17890  		d2 := mem_0_2.Args[1]
 17891  		mem_0_2_2 := mem_0_2.Args[2]
 17892  		if mem_0_2_2.Op != OpStore {
 17893  			break
 17894  		}
 17895  		t4 := auxToType(mem_0_2_2.Aux)
 17896  		_ = mem_0_2_2.Args[2]
 17897  		mem_0_2_2_0 := mem_0_2_2.Args[0]
 17898  		if mem_0_2_2_0.Op != OpOffPtr {
 17899  			break
 17900  		}
 17901  		tt4 := mem_0_2_2_0.Type
 17902  		o4 := auxIntToInt64(mem_0_2_2_0.AuxInt)
 17903  		p4 := mem_0_2_2_0.Args[0]
 17904  		d3 := mem_0_2_2.Args[1]
 17905  		mem_0_2_2_2 := mem_0_2_2.Args[2]
 17906  		if mem_0_2_2_2.Op != OpStore {
 17907  			break
 17908  		}
 17909  		t5 := auxToType(mem_0_2_2_2.Aux)
 17910  		_ = mem_0_2_2_2.Args[2]
 17911  		mem_0_2_2_2_0 := mem_0_2_2_2.Args[0]
 17912  		if mem_0_2_2_2_0.Op != OpOffPtr {
 17913  			break
 17914  		}
 17915  		tt5 := mem_0_2_2_2_0.Type
 17916  		o5 := auxIntToInt64(mem_0_2_2_2_0.AuxInt)
 17917  		p5 := mem_0_2_2_2_0.Args[0]
 17918  		d4 := mem_0_2_2_2.Args[1]
 17919  		mem_0_2_2_2_2 := mem_0_2_2_2.Args[2]
 17920  		if mem_0_2_2_2_2.Op != OpZero || auxIntToInt64(mem_0_2_2_2_2.AuxInt) != n {
 17921  			break
 17922  		}
 17923  		t6 := auxToType(mem_0_2_2_2_2.Aux)
 17924  		p6 := mem_0_2_2_2_2.Args[0]
 17925  		if !(isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && t2.Alignment() <= t1.Alignment() && t3.Alignment() <= t1.Alignment() && t4.Alignment() <= t1.Alignment() && t5.Alignment() <= t1.Alignment() && t6.Alignment() <= t1.Alignment() && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2+t2.Size() && n >= o3+t3.Size() && n >= o4+t4.Size() && n >= o5+t5.Size()) {
 17926  			break
 17927  		}
 17928  		v.reset(OpStore)
 17929  		v.Aux = typeToAux(t2)
 17930  		v0 := b.NewValue0(v.Pos, OpOffPtr, tt2)
 17931  		v0.AuxInt = int64ToAuxInt(o2)
 17932  		v0.AddArg(dst)
 17933  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17934  		v1.Aux = typeToAux(t3)
 17935  		v2 := b.NewValue0(v.Pos, OpOffPtr, tt3)
 17936  		v2.AuxInt = int64ToAuxInt(o3)
 17937  		v2.AddArg(dst)
 17938  		v3 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17939  		v3.Aux = typeToAux(t4)
 17940  		v4 := b.NewValue0(v.Pos, OpOffPtr, tt4)
 17941  		v4.AuxInt = int64ToAuxInt(o4)
 17942  		v4.AddArg(dst)
 17943  		v5 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 17944  		v5.Aux = typeToAux(t5)
 17945  		v6 := b.NewValue0(v.Pos, OpOffPtr, tt5)
 17946  		v6.AuxInt = int64ToAuxInt(o5)
 17947  		v6.AddArg(dst)
 17948  		v7 := b.NewValue0(v.Pos, OpZero, types.TypeMem)
 17949  		v7.AuxInt = int64ToAuxInt(n)
 17950  		v7.Aux = typeToAux(t1)
 17951  		v7.AddArg2(dst, mem)
 17952  		v5.AddArg3(v6, d4, v7)
 17953  		v3.AddArg3(v4, d3, v5)
 17954  		v1.AddArg3(v2, d2, v3)
 17955  		v.AddArg3(v0, d1, v1)
 17956  		return true
 17957  	}
 17958  	// match: (Move {t1} [s] dst tmp1 midmem:(Move {t2} [s] tmp2 src _))
 17959  	// cond: t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))
 17960  	// result: (Move {t1} [s] dst src midmem)
 17961  	for {
 17962  		s := auxIntToInt64(v.AuxInt)
 17963  		t1 := auxToType(v.Aux)
 17964  		dst := v_0
 17965  		tmp1 := v_1
 17966  		midmem := v_2
 17967  		if midmem.Op != OpMove || auxIntToInt64(midmem.AuxInt) != s {
 17968  			break
 17969  		}
 17970  		t2 := auxToType(midmem.Aux)
 17971  		src := midmem.Args[1]
 17972  		tmp2 := midmem.Args[0]
 17973  		if !(t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))) {
 17974  			break
 17975  		}
 17976  		v.reset(OpMove)
 17977  		v.AuxInt = int64ToAuxInt(s)
 17978  		v.Aux = typeToAux(t1)
 17979  		v.AddArg3(dst, src, midmem)
 17980  		return true
 17981  	}
 17982  	// match: (Move {t1} [s] dst tmp1 midmem:(VarDef (Move {t2} [s] tmp2 src _)))
 17983  	// cond: t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))
 17984  	// result: (Move {t1} [s] dst src midmem)
 17985  	for {
 17986  		s := auxIntToInt64(v.AuxInt)
 17987  		t1 := auxToType(v.Aux)
 17988  		dst := v_0
 17989  		tmp1 := v_1
 17990  		midmem := v_2
 17991  		if midmem.Op != OpVarDef {
 17992  			break
 17993  		}
 17994  		midmem_0 := midmem.Args[0]
 17995  		if midmem_0.Op != OpMove || auxIntToInt64(midmem_0.AuxInt) != s {
 17996  			break
 17997  		}
 17998  		t2 := auxToType(midmem_0.Aux)
 17999  		src := midmem_0.Args[1]
 18000  		tmp2 := midmem_0.Args[0]
 18001  		if !(t1.Compare(t2) == types.CMPeq && isSamePtr(tmp1, tmp2) && isStackPtr(src) && !isVolatile(src) && disjoint(src, s, tmp2, s) && (disjoint(src, s, dst, s) || isInlinableMemmove(dst, src, s, config))) {
 18002  			break
 18003  		}
 18004  		v.reset(OpMove)
 18005  		v.AuxInt = int64ToAuxInt(s)
 18006  		v.Aux = typeToAux(t1)
 18007  		v.AddArg3(dst, src, midmem)
 18008  		return true
 18009  	}
 18010  	// match: (Move dst src mem)
 18011  	// cond: isSamePtr(dst, src)
 18012  	// result: mem
 18013  	for {
 18014  		dst := v_0
 18015  		src := v_1
 18016  		mem := v_2
 18017  		if !(isSamePtr(dst, src)) {
 18018  			break
 18019  		}
 18020  		v.copyOf(mem)
 18021  		return true
 18022  	}
 18023  	return false
 18024  }
 18025  func rewriteValuegeneric_OpMul16(v *Value) bool {
 18026  	v_1 := v.Args[1]
 18027  	v_0 := v.Args[0]
 18028  	b := v.Block
 18029  	typ := &b.Func.Config.Types
 18030  	// match: (Mul16 (Const16 [c]) (Const16 [d]))
 18031  	// result: (Const16 [c*d])
 18032  	for {
 18033  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18034  			if v_0.Op != OpConst16 {
 18035  				continue
 18036  			}
 18037  			c := auxIntToInt16(v_0.AuxInt)
 18038  			if v_1.Op != OpConst16 {
 18039  				continue
 18040  			}
 18041  			d := auxIntToInt16(v_1.AuxInt)
 18042  			v.reset(OpConst16)
 18043  			v.AuxInt = int16ToAuxInt(c * d)
 18044  			return true
 18045  		}
 18046  		break
 18047  	}
 18048  	// match: (Mul16 (Const16 [1]) x)
 18049  	// result: x
 18050  	for {
 18051  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18052  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 1 {
 18053  				continue
 18054  			}
 18055  			x := v_1
 18056  			v.copyOf(x)
 18057  			return true
 18058  		}
 18059  		break
 18060  	}
 18061  	// match: (Mul16 (Const16 [-1]) x)
 18062  	// result: (Neg16 x)
 18063  	for {
 18064  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18065  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
 18066  				continue
 18067  			}
 18068  			x := v_1
 18069  			v.reset(OpNeg16)
 18070  			v.AddArg(x)
 18071  			return true
 18072  		}
 18073  		break
 18074  	}
 18075  	// match: (Mul16 <t> n (Const16 [c]))
 18076  	// cond: isPowerOfTwo(c)
 18077  	// result: (Lsh16x64 <t> n (Const64 <typ.UInt64> [log16(c)]))
 18078  	for {
 18079  		t := v.Type
 18080  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18081  			n := v_0
 18082  			if v_1.Op != OpConst16 {
 18083  				continue
 18084  			}
 18085  			c := auxIntToInt16(v_1.AuxInt)
 18086  			if !(isPowerOfTwo(c)) {
 18087  				continue
 18088  			}
 18089  			v.reset(OpLsh16x64)
 18090  			v.Type = t
 18091  			v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 18092  			v0.AuxInt = int64ToAuxInt(log16(c))
 18093  			v.AddArg2(n, v0)
 18094  			return true
 18095  		}
 18096  		break
 18097  	}
 18098  	// match: (Mul16 <t> n (Const16 [c]))
 18099  	// cond: t.IsSigned() && isPowerOfTwo(-c)
 18100  	// result: (Neg16 (Lsh16x64 <t> n (Const64 <typ.UInt64> [log16(-c)])))
 18101  	for {
 18102  		t := v.Type
 18103  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18104  			n := v_0
 18105  			if v_1.Op != OpConst16 {
 18106  				continue
 18107  			}
 18108  			c := auxIntToInt16(v_1.AuxInt)
 18109  			if !(t.IsSigned() && isPowerOfTwo(-c)) {
 18110  				continue
 18111  			}
 18112  			v.reset(OpNeg16)
 18113  			v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
 18114  			v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 18115  			v1.AuxInt = int64ToAuxInt(log16(-c))
 18116  			v0.AddArg2(n, v1)
 18117  			v.AddArg(v0)
 18118  			return true
 18119  		}
 18120  		break
 18121  	}
 18122  	// match: (Mul16 (Const16 [0]) _)
 18123  	// result: (Const16 [0])
 18124  	for {
 18125  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18126  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 18127  				continue
 18128  			}
 18129  			v.reset(OpConst16)
 18130  			v.AuxInt = int16ToAuxInt(0)
 18131  			return true
 18132  		}
 18133  		break
 18134  	}
 18135  	// match: (Mul16 (Mul16 i:(Const16 <t>) z) x)
 18136  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
 18137  	// result: (Mul16 i (Mul16 <t> x z))
 18138  	for {
 18139  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18140  			if v_0.Op != OpMul16 {
 18141  				continue
 18142  			}
 18143  			_ = v_0.Args[1]
 18144  			v_0_0 := v_0.Args[0]
 18145  			v_0_1 := v_0.Args[1]
 18146  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 18147  				i := v_0_0
 18148  				if i.Op != OpConst16 {
 18149  					continue
 18150  				}
 18151  				t := i.Type
 18152  				z := v_0_1
 18153  				x := v_1
 18154  				if !(z.Op != OpConst16 && x.Op != OpConst16) {
 18155  					continue
 18156  				}
 18157  				v.reset(OpMul16)
 18158  				v0 := b.NewValue0(v.Pos, OpMul16, t)
 18159  				v0.AddArg2(x, z)
 18160  				v.AddArg2(i, v0)
 18161  				return true
 18162  			}
 18163  		}
 18164  		break
 18165  	}
 18166  	// match: (Mul16 (Const16 <t> [c]) (Mul16 (Const16 <t> [d]) x))
 18167  	// result: (Mul16 (Const16 <t> [c*d]) x)
 18168  	for {
 18169  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18170  			if v_0.Op != OpConst16 {
 18171  				continue
 18172  			}
 18173  			t := v_0.Type
 18174  			c := auxIntToInt16(v_0.AuxInt)
 18175  			if v_1.Op != OpMul16 {
 18176  				continue
 18177  			}
 18178  			_ = v_1.Args[1]
 18179  			v_1_0 := v_1.Args[0]
 18180  			v_1_1 := v_1.Args[1]
 18181  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 18182  				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
 18183  					continue
 18184  				}
 18185  				d := auxIntToInt16(v_1_0.AuxInt)
 18186  				x := v_1_1
 18187  				v.reset(OpMul16)
 18188  				v0 := b.NewValue0(v.Pos, OpConst16, t)
 18189  				v0.AuxInt = int16ToAuxInt(c * d)
 18190  				v.AddArg2(v0, x)
 18191  				return true
 18192  			}
 18193  		}
 18194  		break
 18195  	}
 18196  	return false
 18197  }
 18198  func rewriteValuegeneric_OpMul32(v *Value) bool {
 18199  	v_1 := v.Args[1]
 18200  	v_0 := v.Args[0]
 18201  	b := v.Block
 18202  	typ := &b.Func.Config.Types
 18203  	// match: (Mul32 (Const32 [c]) (Const32 [d]))
 18204  	// result: (Const32 [c*d])
 18205  	for {
 18206  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18207  			if v_0.Op != OpConst32 {
 18208  				continue
 18209  			}
 18210  			c := auxIntToInt32(v_0.AuxInt)
 18211  			if v_1.Op != OpConst32 {
 18212  				continue
 18213  			}
 18214  			d := auxIntToInt32(v_1.AuxInt)
 18215  			v.reset(OpConst32)
 18216  			v.AuxInt = int32ToAuxInt(c * d)
 18217  			return true
 18218  		}
 18219  		break
 18220  	}
 18221  	// match: (Mul32 (Const32 [1]) x)
 18222  	// result: x
 18223  	for {
 18224  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18225  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 1 {
 18226  				continue
 18227  			}
 18228  			x := v_1
 18229  			v.copyOf(x)
 18230  			return true
 18231  		}
 18232  		break
 18233  	}
 18234  	// match: (Mul32 (Const32 [-1]) x)
 18235  	// result: (Neg32 x)
 18236  	for {
 18237  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18238  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
 18239  				continue
 18240  			}
 18241  			x := v_1
 18242  			v.reset(OpNeg32)
 18243  			v.AddArg(x)
 18244  			return true
 18245  		}
 18246  		break
 18247  	}
 18248  	// match: (Mul32 <t> n (Const32 [c]))
 18249  	// cond: isPowerOfTwo(c)
 18250  	// result: (Lsh32x64 <t> n (Const64 <typ.UInt64> [log32(c)]))
 18251  	for {
 18252  		t := v.Type
 18253  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18254  			n := v_0
 18255  			if v_1.Op != OpConst32 {
 18256  				continue
 18257  			}
 18258  			c := auxIntToInt32(v_1.AuxInt)
 18259  			if !(isPowerOfTwo(c)) {
 18260  				continue
 18261  			}
 18262  			v.reset(OpLsh32x64)
 18263  			v.Type = t
 18264  			v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 18265  			v0.AuxInt = int64ToAuxInt(log32(c))
 18266  			v.AddArg2(n, v0)
 18267  			return true
 18268  		}
 18269  		break
 18270  	}
 18271  	// match: (Mul32 <t> n (Const32 [c]))
 18272  	// cond: t.IsSigned() && isPowerOfTwo(-c)
 18273  	// result: (Neg32 (Lsh32x64 <t> n (Const64 <typ.UInt64> [log32(-c)])))
 18274  	for {
 18275  		t := v.Type
 18276  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18277  			n := v_0
 18278  			if v_1.Op != OpConst32 {
 18279  				continue
 18280  			}
 18281  			c := auxIntToInt32(v_1.AuxInt)
 18282  			if !(t.IsSigned() && isPowerOfTwo(-c)) {
 18283  				continue
 18284  			}
 18285  			v.reset(OpNeg32)
 18286  			v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
 18287  			v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 18288  			v1.AuxInt = int64ToAuxInt(log32(-c))
 18289  			v0.AddArg2(n, v1)
 18290  			v.AddArg(v0)
 18291  			return true
 18292  		}
 18293  		break
 18294  	}
 18295  	// match: (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x))
 18296  	// result: (Add32 (Const32 <t> [c*d]) (Mul32 <t> (Const32 <t> [c]) x))
 18297  	for {
 18298  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18299  			if v_0.Op != OpConst32 {
 18300  				continue
 18301  			}
 18302  			t := v_0.Type
 18303  			c := auxIntToInt32(v_0.AuxInt)
 18304  			if v_1.Op != OpAdd32 || v_1.Type != t {
 18305  				continue
 18306  			}
 18307  			_ = v_1.Args[1]
 18308  			v_1_0 := v_1.Args[0]
 18309  			v_1_1 := v_1.Args[1]
 18310  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 18311  				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
 18312  					continue
 18313  				}
 18314  				d := auxIntToInt32(v_1_0.AuxInt)
 18315  				x := v_1_1
 18316  				v.reset(OpAdd32)
 18317  				v0 := b.NewValue0(v.Pos, OpConst32, t)
 18318  				v0.AuxInt = int32ToAuxInt(c * d)
 18319  				v1 := b.NewValue0(v.Pos, OpMul32, t)
 18320  				v2 := b.NewValue0(v.Pos, OpConst32, t)
 18321  				v2.AuxInt = int32ToAuxInt(c)
 18322  				v1.AddArg2(v2, x)
 18323  				v.AddArg2(v0, v1)
 18324  				return true
 18325  			}
 18326  		}
 18327  		break
 18328  	}
 18329  	// match: (Mul32 (Const32 [0]) _)
 18330  	// result: (Const32 [0])
 18331  	for {
 18332  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18333  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 18334  				continue
 18335  			}
 18336  			v.reset(OpConst32)
 18337  			v.AuxInt = int32ToAuxInt(0)
 18338  			return true
 18339  		}
 18340  		break
 18341  	}
 18342  	// match: (Mul32 (Mul32 i:(Const32 <t>) z) x)
 18343  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
 18344  	// result: (Mul32 i (Mul32 <t> x z))
 18345  	for {
 18346  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18347  			if v_0.Op != OpMul32 {
 18348  				continue
 18349  			}
 18350  			_ = v_0.Args[1]
 18351  			v_0_0 := v_0.Args[0]
 18352  			v_0_1 := v_0.Args[1]
 18353  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 18354  				i := v_0_0
 18355  				if i.Op != OpConst32 {
 18356  					continue
 18357  				}
 18358  				t := i.Type
 18359  				z := v_0_1
 18360  				x := v_1
 18361  				if !(z.Op != OpConst32 && x.Op != OpConst32) {
 18362  					continue
 18363  				}
 18364  				v.reset(OpMul32)
 18365  				v0 := b.NewValue0(v.Pos, OpMul32, t)
 18366  				v0.AddArg2(x, z)
 18367  				v.AddArg2(i, v0)
 18368  				return true
 18369  			}
 18370  		}
 18371  		break
 18372  	}
 18373  	// match: (Mul32 (Const32 <t> [c]) (Mul32 (Const32 <t> [d]) x))
 18374  	// result: (Mul32 (Const32 <t> [c*d]) x)
 18375  	for {
 18376  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18377  			if v_0.Op != OpConst32 {
 18378  				continue
 18379  			}
 18380  			t := v_0.Type
 18381  			c := auxIntToInt32(v_0.AuxInt)
 18382  			if v_1.Op != OpMul32 {
 18383  				continue
 18384  			}
 18385  			_ = v_1.Args[1]
 18386  			v_1_0 := v_1.Args[0]
 18387  			v_1_1 := v_1.Args[1]
 18388  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 18389  				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
 18390  					continue
 18391  				}
 18392  				d := auxIntToInt32(v_1_0.AuxInt)
 18393  				x := v_1_1
 18394  				v.reset(OpMul32)
 18395  				v0 := b.NewValue0(v.Pos, OpConst32, t)
 18396  				v0.AuxInt = int32ToAuxInt(c * d)
 18397  				v.AddArg2(v0, x)
 18398  				return true
 18399  			}
 18400  		}
 18401  		break
 18402  	}
 18403  	return false
 18404  }
 18405  func rewriteValuegeneric_OpMul32F(v *Value) bool {
 18406  	v_1 := v.Args[1]
 18407  	v_0 := v.Args[0]
 18408  	// match: (Mul32F (Const32F [c]) (Const32F [d]))
 18409  	// cond: c*d == c*d
 18410  	// result: (Const32F [c*d])
 18411  	for {
 18412  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18413  			if v_0.Op != OpConst32F {
 18414  				continue
 18415  			}
 18416  			c := auxIntToFloat32(v_0.AuxInt)
 18417  			if v_1.Op != OpConst32F {
 18418  				continue
 18419  			}
 18420  			d := auxIntToFloat32(v_1.AuxInt)
 18421  			if !(c*d == c*d) {
 18422  				continue
 18423  			}
 18424  			v.reset(OpConst32F)
 18425  			v.AuxInt = float32ToAuxInt(c * d)
 18426  			return true
 18427  		}
 18428  		break
 18429  	}
 18430  	// match: (Mul32F x (Const32F [1]))
 18431  	// result: x
 18432  	for {
 18433  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18434  			x := v_0
 18435  			if v_1.Op != OpConst32F || auxIntToFloat32(v_1.AuxInt) != 1 {
 18436  				continue
 18437  			}
 18438  			v.copyOf(x)
 18439  			return true
 18440  		}
 18441  		break
 18442  	}
 18443  	// match: (Mul32F x (Const32F [-1]))
 18444  	// result: (Neg32F x)
 18445  	for {
 18446  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18447  			x := v_0
 18448  			if v_1.Op != OpConst32F || auxIntToFloat32(v_1.AuxInt) != -1 {
 18449  				continue
 18450  			}
 18451  			v.reset(OpNeg32F)
 18452  			v.AddArg(x)
 18453  			return true
 18454  		}
 18455  		break
 18456  	}
 18457  	// match: (Mul32F x (Const32F [2]))
 18458  	// result: (Add32F x x)
 18459  	for {
 18460  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18461  			x := v_0
 18462  			if v_1.Op != OpConst32F || auxIntToFloat32(v_1.AuxInt) != 2 {
 18463  				continue
 18464  			}
 18465  			v.reset(OpAdd32F)
 18466  			v.AddArg2(x, x)
 18467  			return true
 18468  		}
 18469  		break
 18470  	}
 18471  	return false
 18472  }
 18473  func rewriteValuegeneric_OpMul64(v *Value) bool {
 18474  	v_1 := v.Args[1]
 18475  	v_0 := v.Args[0]
 18476  	b := v.Block
 18477  	typ := &b.Func.Config.Types
 18478  	// match: (Mul64 (Const64 [c]) (Const64 [d]))
 18479  	// result: (Const64 [c*d])
 18480  	for {
 18481  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18482  			if v_0.Op != OpConst64 {
 18483  				continue
 18484  			}
 18485  			c := auxIntToInt64(v_0.AuxInt)
 18486  			if v_1.Op != OpConst64 {
 18487  				continue
 18488  			}
 18489  			d := auxIntToInt64(v_1.AuxInt)
 18490  			v.reset(OpConst64)
 18491  			v.AuxInt = int64ToAuxInt(c * d)
 18492  			return true
 18493  		}
 18494  		break
 18495  	}
 18496  	// match: (Mul64 (Const64 [1]) x)
 18497  	// result: x
 18498  	for {
 18499  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18500  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 1 {
 18501  				continue
 18502  			}
 18503  			x := v_1
 18504  			v.copyOf(x)
 18505  			return true
 18506  		}
 18507  		break
 18508  	}
 18509  	// match: (Mul64 (Const64 [-1]) x)
 18510  	// result: (Neg64 x)
 18511  	for {
 18512  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18513  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
 18514  				continue
 18515  			}
 18516  			x := v_1
 18517  			v.reset(OpNeg64)
 18518  			v.AddArg(x)
 18519  			return true
 18520  		}
 18521  		break
 18522  	}
 18523  	// match: (Mul64 <t> n (Const64 [c]))
 18524  	// cond: isPowerOfTwo(c)
 18525  	// result: (Lsh64x64 <t> n (Const64 <typ.UInt64> [log64(c)]))
 18526  	for {
 18527  		t := v.Type
 18528  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18529  			n := v_0
 18530  			if v_1.Op != OpConst64 {
 18531  				continue
 18532  			}
 18533  			c := auxIntToInt64(v_1.AuxInt)
 18534  			if !(isPowerOfTwo(c)) {
 18535  				continue
 18536  			}
 18537  			v.reset(OpLsh64x64)
 18538  			v.Type = t
 18539  			v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 18540  			v0.AuxInt = int64ToAuxInt(log64(c))
 18541  			v.AddArg2(n, v0)
 18542  			return true
 18543  		}
 18544  		break
 18545  	}
 18546  	// match: (Mul64 <t> n (Const64 [c]))
 18547  	// cond: t.IsSigned() && isPowerOfTwo(-c)
 18548  	// result: (Neg64 (Lsh64x64 <t> n (Const64 <typ.UInt64> [log64(-c)])))
 18549  	for {
 18550  		t := v.Type
 18551  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18552  			n := v_0
 18553  			if v_1.Op != OpConst64 {
 18554  				continue
 18555  			}
 18556  			c := auxIntToInt64(v_1.AuxInt)
 18557  			if !(t.IsSigned() && isPowerOfTwo(-c)) {
 18558  				continue
 18559  			}
 18560  			v.reset(OpNeg64)
 18561  			v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
 18562  			v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 18563  			v1.AuxInt = int64ToAuxInt(log64(-c))
 18564  			v0.AddArg2(n, v1)
 18565  			v.AddArg(v0)
 18566  			return true
 18567  		}
 18568  		break
 18569  	}
 18570  	// match: (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x))
 18571  	// result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x))
 18572  	for {
 18573  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18574  			if v_0.Op != OpConst64 {
 18575  				continue
 18576  			}
 18577  			t := v_0.Type
 18578  			c := auxIntToInt64(v_0.AuxInt)
 18579  			if v_1.Op != OpAdd64 || v_1.Type != t {
 18580  				continue
 18581  			}
 18582  			_ = v_1.Args[1]
 18583  			v_1_0 := v_1.Args[0]
 18584  			v_1_1 := v_1.Args[1]
 18585  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 18586  				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
 18587  					continue
 18588  				}
 18589  				d := auxIntToInt64(v_1_0.AuxInt)
 18590  				x := v_1_1
 18591  				v.reset(OpAdd64)
 18592  				v0 := b.NewValue0(v.Pos, OpConst64, t)
 18593  				v0.AuxInt = int64ToAuxInt(c * d)
 18594  				v1 := b.NewValue0(v.Pos, OpMul64, t)
 18595  				v2 := b.NewValue0(v.Pos, OpConst64, t)
 18596  				v2.AuxInt = int64ToAuxInt(c)
 18597  				v1.AddArg2(v2, x)
 18598  				v.AddArg2(v0, v1)
 18599  				return true
 18600  			}
 18601  		}
 18602  		break
 18603  	}
 18604  	// match: (Mul64 (Const64 [0]) _)
 18605  	// result: (Const64 [0])
 18606  	for {
 18607  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18608  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 18609  				continue
 18610  			}
 18611  			v.reset(OpConst64)
 18612  			v.AuxInt = int64ToAuxInt(0)
 18613  			return true
 18614  		}
 18615  		break
 18616  	}
 18617  	// match: (Mul64 (Mul64 i:(Const64 <t>) z) x)
 18618  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
 18619  	// result: (Mul64 i (Mul64 <t> x z))
 18620  	for {
 18621  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18622  			if v_0.Op != OpMul64 {
 18623  				continue
 18624  			}
 18625  			_ = v_0.Args[1]
 18626  			v_0_0 := v_0.Args[0]
 18627  			v_0_1 := v_0.Args[1]
 18628  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 18629  				i := v_0_0
 18630  				if i.Op != OpConst64 {
 18631  					continue
 18632  				}
 18633  				t := i.Type
 18634  				z := v_0_1
 18635  				x := v_1
 18636  				if !(z.Op != OpConst64 && x.Op != OpConst64) {
 18637  					continue
 18638  				}
 18639  				v.reset(OpMul64)
 18640  				v0 := b.NewValue0(v.Pos, OpMul64, t)
 18641  				v0.AddArg2(x, z)
 18642  				v.AddArg2(i, v0)
 18643  				return true
 18644  			}
 18645  		}
 18646  		break
 18647  	}
 18648  	// match: (Mul64 (Const64 <t> [c]) (Mul64 (Const64 <t> [d]) x))
 18649  	// result: (Mul64 (Const64 <t> [c*d]) x)
 18650  	for {
 18651  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18652  			if v_0.Op != OpConst64 {
 18653  				continue
 18654  			}
 18655  			t := v_0.Type
 18656  			c := auxIntToInt64(v_0.AuxInt)
 18657  			if v_1.Op != OpMul64 {
 18658  				continue
 18659  			}
 18660  			_ = v_1.Args[1]
 18661  			v_1_0 := v_1.Args[0]
 18662  			v_1_1 := v_1.Args[1]
 18663  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 18664  				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
 18665  					continue
 18666  				}
 18667  				d := auxIntToInt64(v_1_0.AuxInt)
 18668  				x := v_1_1
 18669  				v.reset(OpMul64)
 18670  				v0 := b.NewValue0(v.Pos, OpConst64, t)
 18671  				v0.AuxInt = int64ToAuxInt(c * d)
 18672  				v.AddArg2(v0, x)
 18673  				return true
 18674  			}
 18675  		}
 18676  		break
 18677  	}
 18678  	return false
 18679  }
 18680  func rewriteValuegeneric_OpMul64F(v *Value) bool {
 18681  	v_1 := v.Args[1]
 18682  	v_0 := v.Args[0]
 18683  	// match: (Mul64F (Const64F [c]) (Const64F [d]))
 18684  	// cond: c*d == c*d
 18685  	// result: (Const64F [c*d])
 18686  	for {
 18687  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18688  			if v_0.Op != OpConst64F {
 18689  				continue
 18690  			}
 18691  			c := auxIntToFloat64(v_0.AuxInt)
 18692  			if v_1.Op != OpConst64F {
 18693  				continue
 18694  			}
 18695  			d := auxIntToFloat64(v_1.AuxInt)
 18696  			if !(c*d == c*d) {
 18697  				continue
 18698  			}
 18699  			v.reset(OpConst64F)
 18700  			v.AuxInt = float64ToAuxInt(c * d)
 18701  			return true
 18702  		}
 18703  		break
 18704  	}
 18705  	// match: (Mul64F x (Const64F [1]))
 18706  	// result: x
 18707  	for {
 18708  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18709  			x := v_0
 18710  			if v_1.Op != OpConst64F || auxIntToFloat64(v_1.AuxInt) != 1 {
 18711  				continue
 18712  			}
 18713  			v.copyOf(x)
 18714  			return true
 18715  		}
 18716  		break
 18717  	}
 18718  	// match: (Mul64F x (Const64F [-1]))
 18719  	// result: (Neg64F x)
 18720  	for {
 18721  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18722  			x := v_0
 18723  			if v_1.Op != OpConst64F || auxIntToFloat64(v_1.AuxInt) != -1 {
 18724  				continue
 18725  			}
 18726  			v.reset(OpNeg64F)
 18727  			v.AddArg(x)
 18728  			return true
 18729  		}
 18730  		break
 18731  	}
 18732  	// match: (Mul64F x (Const64F [2]))
 18733  	// result: (Add64F x x)
 18734  	for {
 18735  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18736  			x := v_0
 18737  			if v_1.Op != OpConst64F || auxIntToFloat64(v_1.AuxInt) != 2 {
 18738  				continue
 18739  			}
 18740  			v.reset(OpAdd64F)
 18741  			v.AddArg2(x, x)
 18742  			return true
 18743  		}
 18744  		break
 18745  	}
 18746  	return false
 18747  }
 18748  func rewriteValuegeneric_OpMul8(v *Value) bool {
 18749  	v_1 := v.Args[1]
 18750  	v_0 := v.Args[0]
 18751  	b := v.Block
 18752  	typ := &b.Func.Config.Types
 18753  	// match: (Mul8 (Const8 [c]) (Const8 [d]))
 18754  	// result: (Const8 [c*d])
 18755  	for {
 18756  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18757  			if v_0.Op != OpConst8 {
 18758  				continue
 18759  			}
 18760  			c := auxIntToInt8(v_0.AuxInt)
 18761  			if v_1.Op != OpConst8 {
 18762  				continue
 18763  			}
 18764  			d := auxIntToInt8(v_1.AuxInt)
 18765  			v.reset(OpConst8)
 18766  			v.AuxInt = int8ToAuxInt(c * d)
 18767  			return true
 18768  		}
 18769  		break
 18770  	}
 18771  	// match: (Mul8 (Const8 [1]) x)
 18772  	// result: x
 18773  	for {
 18774  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18775  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 1 {
 18776  				continue
 18777  			}
 18778  			x := v_1
 18779  			v.copyOf(x)
 18780  			return true
 18781  		}
 18782  		break
 18783  	}
 18784  	// match: (Mul8 (Const8 [-1]) x)
 18785  	// result: (Neg8 x)
 18786  	for {
 18787  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18788  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
 18789  				continue
 18790  			}
 18791  			x := v_1
 18792  			v.reset(OpNeg8)
 18793  			v.AddArg(x)
 18794  			return true
 18795  		}
 18796  		break
 18797  	}
 18798  	// match: (Mul8 <t> n (Const8 [c]))
 18799  	// cond: isPowerOfTwo(c)
 18800  	// result: (Lsh8x64 <t> n (Const64 <typ.UInt64> [log8(c)]))
 18801  	for {
 18802  		t := v.Type
 18803  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18804  			n := v_0
 18805  			if v_1.Op != OpConst8 {
 18806  				continue
 18807  			}
 18808  			c := auxIntToInt8(v_1.AuxInt)
 18809  			if !(isPowerOfTwo(c)) {
 18810  				continue
 18811  			}
 18812  			v.reset(OpLsh8x64)
 18813  			v.Type = t
 18814  			v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 18815  			v0.AuxInt = int64ToAuxInt(log8(c))
 18816  			v.AddArg2(n, v0)
 18817  			return true
 18818  		}
 18819  		break
 18820  	}
 18821  	// match: (Mul8 <t> n (Const8 [c]))
 18822  	// cond: t.IsSigned() && isPowerOfTwo(-c)
 18823  	// result: (Neg8 (Lsh8x64 <t> n (Const64 <typ.UInt64> [log8(-c)])))
 18824  	for {
 18825  		t := v.Type
 18826  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18827  			n := v_0
 18828  			if v_1.Op != OpConst8 {
 18829  				continue
 18830  			}
 18831  			c := auxIntToInt8(v_1.AuxInt)
 18832  			if !(t.IsSigned() && isPowerOfTwo(-c)) {
 18833  				continue
 18834  			}
 18835  			v.reset(OpNeg8)
 18836  			v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
 18837  			v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 18838  			v1.AuxInt = int64ToAuxInt(log8(-c))
 18839  			v0.AddArg2(n, v1)
 18840  			v.AddArg(v0)
 18841  			return true
 18842  		}
 18843  		break
 18844  	}
 18845  	// match: (Mul8 (Const8 [0]) _)
 18846  	// result: (Const8 [0])
 18847  	for {
 18848  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18849  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 18850  				continue
 18851  			}
 18852  			v.reset(OpConst8)
 18853  			v.AuxInt = int8ToAuxInt(0)
 18854  			return true
 18855  		}
 18856  		break
 18857  	}
 18858  	// match: (Mul8 (Mul8 i:(Const8 <t>) z) x)
 18859  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
 18860  	// result: (Mul8 i (Mul8 <t> x z))
 18861  	for {
 18862  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18863  			if v_0.Op != OpMul8 {
 18864  				continue
 18865  			}
 18866  			_ = v_0.Args[1]
 18867  			v_0_0 := v_0.Args[0]
 18868  			v_0_1 := v_0.Args[1]
 18869  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 18870  				i := v_0_0
 18871  				if i.Op != OpConst8 {
 18872  					continue
 18873  				}
 18874  				t := i.Type
 18875  				z := v_0_1
 18876  				x := v_1
 18877  				if !(z.Op != OpConst8 && x.Op != OpConst8) {
 18878  					continue
 18879  				}
 18880  				v.reset(OpMul8)
 18881  				v0 := b.NewValue0(v.Pos, OpMul8, t)
 18882  				v0.AddArg2(x, z)
 18883  				v.AddArg2(i, v0)
 18884  				return true
 18885  			}
 18886  		}
 18887  		break
 18888  	}
 18889  	// match: (Mul8 (Const8 <t> [c]) (Mul8 (Const8 <t> [d]) x))
 18890  	// result: (Mul8 (Const8 <t> [c*d]) x)
 18891  	for {
 18892  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 18893  			if v_0.Op != OpConst8 {
 18894  				continue
 18895  			}
 18896  			t := v_0.Type
 18897  			c := auxIntToInt8(v_0.AuxInt)
 18898  			if v_1.Op != OpMul8 {
 18899  				continue
 18900  			}
 18901  			_ = v_1.Args[1]
 18902  			v_1_0 := v_1.Args[0]
 18903  			v_1_1 := v_1.Args[1]
 18904  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 18905  				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
 18906  					continue
 18907  				}
 18908  				d := auxIntToInt8(v_1_0.AuxInt)
 18909  				x := v_1_1
 18910  				v.reset(OpMul8)
 18911  				v0 := b.NewValue0(v.Pos, OpConst8, t)
 18912  				v0.AuxInt = int8ToAuxInt(c * d)
 18913  				v.AddArg2(v0, x)
 18914  				return true
 18915  			}
 18916  		}
 18917  		break
 18918  	}
 18919  	return false
 18920  }
 18921  func rewriteValuegeneric_OpNeg16(v *Value) bool {
 18922  	v_0 := v.Args[0]
 18923  	b := v.Block
 18924  	// match: (Neg16 (Const16 [c]))
 18925  	// result: (Const16 [-c])
 18926  	for {
 18927  		if v_0.Op != OpConst16 {
 18928  			break
 18929  		}
 18930  		c := auxIntToInt16(v_0.AuxInt)
 18931  		v.reset(OpConst16)
 18932  		v.AuxInt = int16ToAuxInt(-c)
 18933  		return true
 18934  	}
 18935  	// match: (Neg16 (Sub16 x y))
 18936  	// result: (Sub16 y x)
 18937  	for {
 18938  		if v_0.Op != OpSub16 {
 18939  			break
 18940  		}
 18941  		y := v_0.Args[1]
 18942  		x := v_0.Args[0]
 18943  		v.reset(OpSub16)
 18944  		v.AddArg2(y, x)
 18945  		return true
 18946  	}
 18947  	// match: (Neg16 (Neg16 x))
 18948  	// result: x
 18949  	for {
 18950  		if v_0.Op != OpNeg16 {
 18951  			break
 18952  		}
 18953  		x := v_0.Args[0]
 18954  		v.copyOf(x)
 18955  		return true
 18956  	}
 18957  	// match: (Neg16 <t> (Com16 x))
 18958  	// result: (Add16 (Const16 <t> [1]) x)
 18959  	for {
 18960  		t := v.Type
 18961  		if v_0.Op != OpCom16 {
 18962  			break
 18963  		}
 18964  		x := v_0.Args[0]
 18965  		v.reset(OpAdd16)
 18966  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 18967  		v0.AuxInt = int16ToAuxInt(1)
 18968  		v.AddArg2(v0, x)
 18969  		return true
 18970  	}
 18971  	return false
 18972  }
 18973  func rewriteValuegeneric_OpNeg32(v *Value) bool {
 18974  	v_0 := v.Args[0]
 18975  	b := v.Block
 18976  	// match: (Neg32 (Const32 [c]))
 18977  	// result: (Const32 [-c])
 18978  	for {
 18979  		if v_0.Op != OpConst32 {
 18980  			break
 18981  		}
 18982  		c := auxIntToInt32(v_0.AuxInt)
 18983  		v.reset(OpConst32)
 18984  		v.AuxInt = int32ToAuxInt(-c)
 18985  		return true
 18986  	}
 18987  	// match: (Neg32 (Sub32 x y))
 18988  	// result: (Sub32 y x)
 18989  	for {
 18990  		if v_0.Op != OpSub32 {
 18991  			break
 18992  		}
 18993  		y := v_0.Args[1]
 18994  		x := v_0.Args[0]
 18995  		v.reset(OpSub32)
 18996  		v.AddArg2(y, x)
 18997  		return true
 18998  	}
 18999  	// match: (Neg32 (Neg32 x))
 19000  	// result: x
 19001  	for {
 19002  		if v_0.Op != OpNeg32 {
 19003  			break
 19004  		}
 19005  		x := v_0.Args[0]
 19006  		v.copyOf(x)
 19007  		return true
 19008  	}
 19009  	// match: (Neg32 <t> (Com32 x))
 19010  	// result: (Add32 (Const32 <t> [1]) x)
 19011  	for {
 19012  		t := v.Type
 19013  		if v_0.Op != OpCom32 {
 19014  			break
 19015  		}
 19016  		x := v_0.Args[0]
 19017  		v.reset(OpAdd32)
 19018  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 19019  		v0.AuxInt = int32ToAuxInt(1)
 19020  		v.AddArg2(v0, x)
 19021  		return true
 19022  	}
 19023  	return false
 19024  }
 19025  func rewriteValuegeneric_OpNeg32F(v *Value) bool {
 19026  	v_0 := v.Args[0]
 19027  	// match: (Neg32F (Const32F [c]))
 19028  	// cond: c != 0
 19029  	// result: (Const32F [-c])
 19030  	for {
 19031  		if v_0.Op != OpConst32F {
 19032  			break
 19033  		}
 19034  		c := auxIntToFloat32(v_0.AuxInt)
 19035  		if !(c != 0) {
 19036  			break
 19037  		}
 19038  		v.reset(OpConst32F)
 19039  		v.AuxInt = float32ToAuxInt(-c)
 19040  		return true
 19041  	}
 19042  	return false
 19043  }
 19044  func rewriteValuegeneric_OpNeg64(v *Value) bool {
 19045  	v_0 := v.Args[0]
 19046  	b := v.Block
 19047  	// match: (Neg64 (Const64 [c]))
 19048  	// result: (Const64 [-c])
 19049  	for {
 19050  		if v_0.Op != OpConst64 {
 19051  			break
 19052  		}
 19053  		c := auxIntToInt64(v_0.AuxInt)
 19054  		v.reset(OpConst64)
 19055  		v.AuxInt = int64ToAuxInt(-c)
 19056  		return true
 19057  	}
 19058  	// match: (Neg64 (Sub64 x y))
 19059  	// result: (Sub64 y x)
 19060  	for {
 19061  		if v_0.Op != OpSub64 {
 19062  			break
 19063  		}
 19064  		y := v_0.Args[1]
 19065  		x := v_0.Args[0]
 19066  		v.reset(OpSub64)
 19067  		v.AddArg2(y, x)
 19068  		return true
 19069  	}
 19070  	// match: (Neg64 (Neg64 x))
 19071  	// result: x
 19072  	for {
 19073  		if v_0.Op != OpNeg64 {
 19074  			break
 19075  		}
 19076  		x := v_0.Args[0]
 19077  		v.copyOf(x)
 19078  		return true
 19079  	}
 19080  	// match: (Neg64 <t> (Com64 x))
 19081  	// result: (Add64 (Const64 <t> [1]) x)
 19082  	for {
 19083  		t := v.Type
 19084  		if v_0.Op != OpCom64 {
 19085  			break
 19086  		}
 19087  		x := v_0.Args[0]
 19088  		v.reset(OpAdd64)
 19089  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 19090  		v0.AuxInt = int64ToAuxInt(1)
 19091  		v.AddArg2(v0, x)
 19092  		return true
 19093  	}
 19094  	return false
 19095  }
 19096  func rewriteValuegeneric_OpNeg64F(v *Value) bool {
 19097  	v_0 := v.Args[0]
 19098  	// match: (Neg64F (Const64F [c]))
 19099  	// cond: c != 0
 19100  	// result: (Const64F [-c])
 19101  	for {
 19102  		if v_0.Op != OpConst64F {
 19103  			break
 19104  		}
 19105  		c := auxIntToFloat64(v_0.AuxInt)
 19106  		if !(c != 0) {
 19107  			break
 19108  		}
 19109  		v.reset(OpConst64F)
 19110  		v.AuxInt = float64ToAuxInt(-c)
 19111  		return true
 19112  	}
 19113  	return false
 19114  }
 19115  func rewriteValuegeneric_OpNeg8(v *Value) bool {
 19116  	v_0 := v.Args[0]
 19117  	b := v.Block
 19118  	// match: (Neg8 (Const8 [c]))
 19119  	// result: (Const8 [-c])
 19120  	for {
 19121  		if v_0.Op != OpConst8 {
 19122  			break
 19123  		}
 19124  		c := auxIntToInt8(v_0.AuxInt)
 19125  		v.reset(OpConst8)
 19126  		v.AuxInt = int8ToAuxInt(-c)
 19127  		return true
 19128  	}
 19129  	// match: (Neg8 (Sub8 x y))
 19130  	// result: (Sub8 y x)
 19131  	for {
 19132  		if v_0.Op != OpSub8 {
 19133  			break
 19134  		}
 19135  		y := v_0.Args[1]
 19136  		x := v_0.Args[0]
 19137  		v.reset(OpSub8)
 19138  		v.AddArg2(y, x)
 19139  		return true
 19140  	}
 19141  	// match: (Neg8 (Neg8 x))
 19142  	// result: x
 19143  	for {
 19144  		if v_0.Op != OpNeg8 {
 19145  			break
 19146  		}
 19147  		x := v_0.Args[0]
 19148  		v.copyOf(x)
 19149  		return true
 19150  	}
 19151  	// match: (Neg8 <t> (Com8 x))
 19152  	// result: (Add8 (Const8 <t> [1]) x)
 19153  	for {
 19154  		t := v.Type
 19155  		if v_0.Op != OpCom8 {
 19156  			break
 19157  		}
 19158  		x := v_0.Args[0]
 19159  		v.reset(OpAdd8)
 19160  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 19161  		v0.AuxInt = int8ToAuxInt(1)
 19162  		v.AddArg2(v0, x)
 19163  		return true
 19164  	}
 19165  	return false
 19166  }
 19167  func rewriteValuegeneric_OpNeq16(v *Value) bool {
 19168  	v_1 := v.Args[1]
 19169  	v_0 := v.Args[0]
 19170  	b := v.Block
 19171  	typ := &b.Func.Config.Types
 19172  	// match: (Neq16 x x)
 19173  	// result: (ConstBool [false])
 19174  	for {
 19175  		x := v_0
 19176  		if x != v_1 {
 19177  			break
 19178  		}
 19179  		v.reset(OpConstBool)
 19180  		v.AuxInt = boolToAuxInt(false)
 19181  		return true
 19182  	}
 19183  	// match: (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
 19184  	// result: (Neq16 (Const16 <t> [c-d]) x)
 19185  	for {
 19186  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19187  			if v_0.Op != OpConst16 {
 19188  				continue
 19189  			}
 19190  			t := v_0.Type
 19191  			c := auxIntToInt16(v_0.AuxInt)
 19192  			if v_1.Op != OpAdd16 {
 19193  				continue
 19194  			}
 19195  			_ = v_1.Args[1]
 19196  			v_1_0 := v_1.Args[0]
 19197  			v_1_1 := v_1.Args[1]
 19198  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 19199  				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
 19200  					continue
 19201  				}
 19202  				d := auxIntToInt16(v_1_0.AuxInt)
 19203  				x := v_1_1
 19204  				v.reset(OpNeq16)
 19205  				v0 := b.NewValue0(v.Pos, OpConst16, t)
 19206  				v0.AuxInt = int16ToAuxInt(c - d)
 19207  				v.AddArg2(v0, x)
 19208  				return true
 19209  			}
 19210  		}
 19211  		break
 19212  	}
 19213  	// match: (Neq16 (Const16 [c]) (Const16 [d]))
 19214  	// result: (ConstBool [c != d])
 19215  	for {
 19216  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19217  			if v_0.Op != OpConst16 {
 19218  				continue
 19219  			}
 19220  			c := auxIntToInt16(v_0.AuxInt)
 19221  			if v_1.Op != OpConst16 {
 19222  				continue
 19223  			}
 19224  			d := auxIntToInt16(v_1.AuxInt)
 19225  			v.reset(OpConstBool)
 19226  			v.AuxInt = boolToAuxInt(c != d)
 19227  			return true
 19228  		}
 19229  		break
 19230  	}
 19231  	// match: (Neq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
 19232  	// cond: k > 0 && k < 15 && kbar == 16 - k
 19233  	// result: (Neq16 (And16 <t> n (Const16 <t> [1<<uint(k)-1])) (Const16 <t> [0]))
 19234  	for {
 19235  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19236  			n := v_0
 19237  			if v_1.Op != OpLsh16x64 {
 19238  				continue
 19239  			}
 19240  			_ = v_1.Args[1]
 19241  			v_1_0 := v_1.Args[0]
 19242  			if v_1_0.Op != OpRsh16x64 {
 19243  				continue
 19244  			}
 19245  			_ = v_1_0.Args[1]
 19246  			v_1_0_0 := v_1_0.Args[0]
 19247  			if v_1_0_0.Op != OpAdd16 {
 19248  				continue
 19249  			}
 19250  			t := v_1_0_0.Type
 19251  			_ = v_1_0_0.Args[1]
 19252  			v_1_0_0_0 := v_1_0_0.Args[0]
 19253  			v_1_0_0_1 := v_1_0_0.Args[1]
 19254  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
 19255  				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh16Ux64 || v_1_0_0_1.Type != t {
 19256  					continue
 19257  				}
 19258  				_ = v_1_0_0_1.Args[1]
 19259  				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
 19260  				if v_1_0_0_1_0.Op != OpRsh16x64 || v_1_0_0_1_0.Type != t {
 19261  					continue
 19262  				}
 19263  				_ = v_1_0_0_1_0.Args[1]
 19264  				if n != v_1_0_0_1_0.Args[0] {
 19265  					continue
 19266  				}
 19267  				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
 19268  				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 15 {
 19269  					continue
 19270  				}
 19271  				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
 19272  				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
 19273  					continue
 19274  				}
 19275  				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
 19276  				v_1_0_1 := v_1_0.Args[1]
 19277  				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
 19278  					continue
 19279  				}
 19280  				k := auxIntToInt64(v_1_0_1.AuxInt)
 19281  				v_1_1 := v_1.Args[1]
 19282  				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 15 && kbar == 16-k) {
 19283  					continue
 19284  				}
 19285  				v.reset(OpNeq16)
 19286  				v0 := b.NewValue0(v.Pos, OpAnd16, t)
 19287  				v1 := b.NewValue0(v.Pos, OpConst16, t)
 19288  				v1.AuxInt = int16ToAuxInt(1<<uint(k) - 1)
 19289  				v0.AddArg2(n, v1)
 19290  				v2 := b.NewValue0(v.Pos, OpConst16, t)
 19291  				v2.AuxInt = int16ToAuxInt(0)
 19292  				v.AddArg2(v0, v2)
 19293  				return true
 19294  			}
 19295  		}
 19296  		break
 19297  	}
 19298  	// match: (Neq16 s:(Sub16 x y) (Const16 [0]))
 19299  	// cond: s.Uses == 1
 19300  	// result: (Neq16 x y)
 19301  	for {
 19302  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19303  			s := v_0
 19304  			if s.Op != OpSub16 {
 19305  				continue
 19306  			}
 19307  			y := s.Args[1]
 19308  			x := s.Args[0]
 19309  			if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != 0 || !(s.Uses == 1) {
 19310  				continue
 19311  			}
 19312  			v.reset(OpNeq16)
 19313  			v.AddArg2(x, y)
 19314  			return true
 19315  		}
 19316  		break
 19317  	}
 19318  	// match: (Neq16 (And16 <t> x (Const16 <t> [y])) (Const16 <t> [y]))
 19319  	// cond: oneBit16(y)
 19320  	// result: (Eq16 (And16 <t> x (Const16 <t> [y])) (Const16 <t> [0]))
 19321  	for {
 19322  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19323  			if v_0.Op != OpAnd16 {
 19324  				continue
 19325  			}
 19326  			t := v_0.Type
 19327  			_ = v_0.Args[1]
 19328  			v_0_0 := v_0.Args[0]
 19329  			v_0_1 := v_0.Args[1]
 19330  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 19331  				x := v_0_0
 19332  				if v_0_1.Op != OpConst16 || v_0_1.Type != t {
 19333  					continue
 19334  				}
 19335  				y := auxIntToInt16(v_0_1.AuxInt)
 19336  				if v_1.Op != OpConst16 || v_1.Type != t || auxIntToInt16(v_1.AuxInt) != y || !(oneBit16(y)) {
 19337  					continue
 19338  				}
 19339  				v.reset(OpEq16)
 19340  				v0 := b.NewValue0(v.Pos, OpAnd16, t)
 19341  				v1 := b.NewValue0(v.Pos, OpConst16, t)
 19342  				v1.AuxInt = int16ToAuxInt(y)
 19343  				v0.AddArg2(x, v1)
 19344  				v2 := b.NewValue0(v.Pos, OpConst16, t)
 19345  				v2.AuxInt = int16ToAuxInt(0)
 19346  				v.AddArg2(v0, v2)
 19347  				return true
 19348  			}
 19349  		}
 19350  		break
 19351  	}
 19352  	return false
 19353  }
 19354  func rewriteValuegeneric_OpNeq32(v *Value) bool {
 19355  	v_1 := v.Args[1]
 19356  	v_0 := v.Args[0]
 19357  	b := v.Block
 19358  	typ := &b.Func.Config.Types
 19359  	// match: (Neq32 x x)
 19360  	// result: (ConstBool [false])
 19361  	for {
 19362  		x := v_0
 19363  		if x != v_1 {
 19364  			break
 19365  		}
 19366  		v.reset(OpConstBool)
 19367  		v.AuxInt = boolToAuxInt(false)
 19368  		return true
 19369  	}
 19370  	// match: (Neq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
 19371  	// result: (Neq32 (Const32 <t> [c-d]) x)
 19372  	for {
 19373  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19374  			if v_0.Op != OpConst32 {
 19375  				continue
 19376  			}
 19377  			t := v_0.Type
 19378  			c := auxIntToInt32(v_0.AuxInt)
 19379  			if v_1.Op != OpAdd32 {
 19380  				continue
 19381  			}
 19382  			_ = v_1.Args[1]
 19383  			v_1_0 := v_1.Args[0]
 19384  			v_1_1 := v_1.Args[1]
 19385  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 19386  				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
 19387  					continue
 19388  				}
 19389  				d := auxIntToInt32(v_1_0.AuxInt)
 19390  				x := v_1_1
 19391  				v.reset(OpNeq32)
 19392  				v0 := b.NewValue0(v.Pos, OpConst32, t)
 19393  				v0.AuxInt = int32ToAuxInt(c - d)
 19394  				v.AddArg2(v0, x)
 19395  				return true
 19396  			}
 19397  		}
 19398  		break
 19399  	}
 19400  	// match: (Neq32 (Const32 [c]) (Const32 [d]))
 19401  	// result: (ConstBool [c != d])
 19402  	for {
 19403  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19404  			if v_0.Op != OpConst32 {
 19405  				continue
 19406  			}
 19407  			c := auxIntToInt32(v_0.AuxInt)
 19408  			if v_1.Op != OpConst32 {
 19409  				continue
 19410  			}
 19411  			d := auxIntToInt32(v_1.AuxInt)
 19412  			v.reset(OpConstBool)
 19413  			v.AuxInt = boolToAuxInt(c != d)
 19414  			return true
 19415  		}
 19416  		break
 19417  	}
 19418  	// match: (Neq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
 19419  	// cond: k > 0 && k < 31 && kbar == 32 - k
 19420  	// result: (Neq32 (And32 <t> n (Const32 <t> [1<<uint(k)-1])) (Const32 <t> [0]))
 19421  	for {
 19422  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19423  			n := v_0
 19424  			if v_1.Op != OpLsh32x64 {
 19425  				continue
 19426  			}
 19427  			_ = v_1.Args[1]
 19428  			v_1_0 := v_1.Args[0]
 19429  			if v_1_0.Op != OpRsh32x64 {
 19430  				continue
 19431  			}
 19432  			_ = v_1_0.Args[1]
 19433  			v_1_0_0 := v_1_0.Args[0]
 19434  			if v_1_0_0.Op != OpAdd32 {
 19435  				continue
 19436  			}
 19437  			t := v_1_0_0.Type
 19438  			_ = v_1_0_0.Args[1]
 19439  			v_1_0_0_0 := v_1_0_0.Args[0]
 19440  			v_1_0_0_1 := v_1_0_0.Args[1]
 19441  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
 19442  				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh32Ux64 || v_1_0_0_1.Type != t {
 19443  					continue
 19444  				}
 19445  				_ = v_1_0_0_1.Args[1]
 19446  				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
 19447  				if v_1_0_0_1_0.Op != OpRsh32x64 || v_1_0_0_1_0.Type != t {
 19448  					continue
 19449  				}
 19450  				_ = v_1_0_0_1_0.Args[1]
 19451  				if n != v_1_0_0_1_0.Args[0] {
 19452  					continue
 19453  				}
 19454  				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
 19455  				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 31 {
 19456  					continue
 19457  				}
 19458  				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
 19459  				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
 19460  					continue
 19461  				}
 19462  				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
 19463  				v_1_0_1 := v_1_0.Args[1]
 19464  				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
 19465  					continue
 19466  				}
 19467  				k := auxIntToInt64(v_1_0_1.AuxInt)
 19468  				v_1_1 := v_1.Args[1]
 19469  				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 31 && kbar == 32-k) {
 19470  					continue
 19471  				}
 19472  				v.reset(OpNeq32)
 19473  				v0 := b.NewValue0(v.Pos, OpAnd32, t)
 19474  				v1 := b.NewValue0(v.Pos, OpConst32, t)
 19475  				v1.AuxInt = int32ToAuxInt(1<<uint(k) - 1)
 19476  				v0.AddArg2(n, v1)
 19477  				v2 := b.NewValue0(v.Pos, OpConst32, t)
 19478  				v2.AuxInt = int32ToAuxInt(0)
 19479  				v.AddArg2(v0, v2)
 19480  				return true
 19481  			}
 19482  		}
 19483  		break
 19484  	}
 19485  	// match: (Neq32 s:(Sub32 x y) (Const32 [0]))
 19486  	// cond: s.Uses == 1
 19487  	// result: (Neq32 x y)
 19488  	for {
 19489  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19490  			s := v_0
 19491  			if s.Op != OpSub32 {
 19492  				continue
 19493  			}
 19494  			y := s.Args[1]
 19495  			x := s.Args[0]
 19496  			if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != 0 || !(s.Uses == 1) {
 19497  				continue
 19498  			}
 19499  			v.reset(OpNeq32)
 19500  			v.AddArg2(x, y)
 19501  			return true
 19502  		}
 19503  		break
 19504  	}
 19505  	// match: (Neq32 (And32 <t> x (Const32 <t> [y])) (Const32 <t> [y]))
 19506  	// cond: oneBit32(y)
 19507  	// result: (Eq32 (And32 <t> x (Const32 <t> [y])) (Const32 <t> [0]))
 19508  	for {
 19509  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19510  			if v_0.Op != OpAnd32 {
 19511  				continue
 19512  			}
 19513  			t := v_0.Type
 19514  			_ = v_0.Args[1]
 19515  			v_0_0 := v_0.Args[0]
 19516  			v_0_1 := v_0.Args[1]
 19517  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 19518  				x := v_0_0
 19519  				if v_0_1.Op != OpConst32 || v_0_1.Type != t {
 19520  					continue
 19521  				}
 19522  				y := auxIntToInt32(v_0_1.AuxInt)
 19523  				if v_1.Op != OpConst32 || v_1.Type != t || auxIntToInt32(v_1.AuxInt) != y || !(oneBit32(y)) {
 19524  					continue
 19525  				}
 19526  				v.reset(OpEq32)
 19527  				v0 := b.NewValue0(v.Pos, OpAnd32, t)
 19528  				v1 := b.NewValue0(v.Pos, OpConst32, t)
 19529  				v1.AuxInt = int32ToAuxInt(y)
 19530  				v0.AddArg2(x, v1)
 19531  				v2 := b.NewValue0(v.Pos, OpConst32, t)
 19532  				v2.AuxInt = int32ToAuxInt(0)
 19533  				v.AddArg2(v0, v2)
 19534  				return true
 19535  			}
 19536  		}
 19537  		break
 19538  	}
 19539  	return false
 19540  }
 19541  func rewriteValuegeneric_OpNeq32F(v *Value) bool {
 19542  	v_1 := v.Args[1]
 19543  	v_0 := v.Args[0]
 19544  	// match: (Neq32F (Const32F [c]) (Const32F [d]))
 19545  	// result: (ConstBool [c != d])
 19546  	for {
 19547  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19548  			if v_0.Op != OpConst32F {
 19549  				continue
 19550  			}
 19551  			c := auxIntToFloat32(v_0.AuxInt)
 19552  			if v_1.Op != OpConst32F {
 19553  				continue
 19554  			}
 19555  			d := auxIntToFloat32(v_1.AuxInt)
 19556  			v.reset(OpConstBool)
 19557  			v.AuxInt = boolToAuxInt(c != d)
 19558  			return true
 19559  		}
 19560  		break
 19561  	}
 19562  	return false
 19563  }
 19564  func rewriteValuegeneric_OpNeq64(v *Value) bool {
 19565  	v_1 := v.Args[1]
 19566  	v_0 := v.Args[0]
 19567  	b := v.Block
 19568  	typ := &b.Func.Config.Types
 19569  	// match: (Neq64 x x)
 19570  	// result: (ConstBool [false])
 19571  	for {
 19572  		x := v_0
 19573  		if x != v_1 {
 19574  			break
 19575  		}
 19576  		v.reset(OpConstBool)
 19577  		v.AuxInt = boolToAuxInt(false)
 19578  		return true
 19579  	}
 19580  	// match: (Neq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
 19581  	// result: (Neq64 (Const64 <t> [c-d]) x)
 19582  	for {
 19583  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19584  			if v_0.Op != OpConst64 {
 19585  				continue
 19586  			}
 19587  			t := v_0.Type
 19588  			c := auxIntToInt64(v_0.AuxInt)
 19589  			if v_1.Op != OpAdd64 {
 19590  				continue
 19591  			}
 19592  			_ = v_1.Args[1]
 19593  			v_1_0 := v_1.Args[0]
 19594  			v_1_1 := v_1.Args[1]
 19595  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 19596  				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
 19597  					continue
 19598  				}
 19599  				d := auxIntToInt64(v_1_0.AuxInt)
 19600  				x := v_1_1
 19601  				v.reset(OpNeq64)
 19602  				v0 := b.NewValue0(v.Pos, OpConst64, t)
 19603  				v0.AuxInt = int64ToAuxInt(c - d)
 19604  				v.AddArg2(v0, x)
 19605  				return true
 19606  			}
 19607  		}
 19608  		break
 19609  	}
 19610  	// match: (Neq64 (Const64 [c]) (Const64 [d]))
 19611  	// result: (ConstBool [c != d])
 19612  	for {
 19613  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19614  			if v_0.Op != OpConst64 {
 19615  				continue
 19616  			}
 19617  			c := auxIntToInt64(v_0.AuxInt)
 19618  			if v_1.Op != OpConst64 {
 19619  				continue
 19620  			}
 19621  			d := auxIntToInt64(v_1.AuxInt)
 19622  			v.reset(OpConstBool)
 19623  			v.AuxInt = boolToAuxInt(c != d)
 19624  			return true
 19625  		}
 19626  		break
 19627  	}
 19628  	// match: (Neq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
 19629  	// cond: k > 0 && k < 63 && kbar == 64 - k
 19630  	// result: (Neq64 (And64 <t> n (Const64 <t> [1<<uint(k)-1])) (Const64 <t> [0]))
 19631  	for {
 19632  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19633  			n := v_0
 19634  			if v_1.Op != OpLsh64x64 {
 19635  				continue
 19636  			}
 19637  			_ = v_1.Args[1]
 19638  			v_1_0 := v_1.Args[0]
 19639  			if v_1_0.Op != OpRsh64x64 {
 19640  				continue
 19641  			}
 19642  			_ = v_1_0.Args[1]
 19643  			v_1_0_0 := v_1_0.Args[0]
 19644  			if v_1_0_0.Op != OpAdd64 {
 19645  				continue
 19646  			}
 19647  			t := v_1_0_0.Type
 19648  			_ = v_1_0_0.Args[1]
 19649  			v_1_0_0_0 := v_1_0_0.Args[0]
 19650  			v_1_0_0_1 := v_1_0_0.Args[1]
 19651  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
 19652  				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh64Ux64 || v_1_0_0_1.Type != t {
 19653  					continue
 19654  				}
 19655  				_ = v_1_0_0_1.Args[1]
 19656  				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
 19657  				if v_1_0_0_1_0.Op != OpRsh64x64 || v_1_0_0_1_0.Type != t {
 19658  					continue
 19659  				}
 19660  				_ = v_1_0_0_1_0.Args[1]
 19661  				if n != v_1_0_0_1_0.Args[0] {
 19662  					continue
 19663  				}
 19664  				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
 19665  				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 63 {
 19666  					continue
 19667  				}
 19668  				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
 19669  				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
 19670  					continue
 19671  				}
 19672  				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
 19673  				v_1_0_1 := v_1_0.Args[1]
 19674  				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
 19675  					continue
 19676  				}
 19677  				k := auxIntToInt64(v_1_0_1.AuxInt)
 19678  				v_1_1 := v_1.Args[1]
 19679  				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 63 && kbar == 64-k) {
 19680  					continue
 19681  				}
 19682  				v.reset(OpNeq64)
 19683  				v0 := b.NewValue0(v.Pos, OpAnd64, t)
 19684  				v1 := b.NewValue0(v.Pos, OpConst64, t)
 19685  				v1.AuxInt = int64ToAuxInt(1<<uint(k) - 1)
 19686  				v0.AddArg2(n, v1)
 19687  				v2 := b.NewValue0(v.Pos, OpConst64, t)
 19688  				v2.AuxInt = int64ToAuxInt(0)
 19689  				v.AddArg2(v0, v2)
 19690  				return true
 19691  			}
 19692  		}
 19693  		break
 19694  	}
 19695  	// match: (Neq64 s:(Sub64 x y) (Const64 [0]))
 19696  	// cond: s.Uses == 1
 19697  	// result: (Neq64 x y)
 19698  	for {
 19699  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19700  			s := v_0
 19701  			if s.Op != OpSub64 {
 19702  				continue
 19703  			}
 19704  			y := s.Args[1]
 19705  			x := s.Args[0]
 19706  			if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 || !(s.Uses == 1) {
 19707  				continue
 19708  			}
 19709  			v.reset(OpNeq64)
 19710  			v.AddArg2(x, y)
 19711  			return true
 19712  		}
 19713  		break
 19714  	}
 19715  	// match: (Neq64 (And64 <t> x (Const64 <t> [y])) (Const64 <t> [y]))
 19716  	// cond: oneBit64(y)
 19717  	// result: (Eq64 (And64 <t> x (Const64 <t> [y])) (Const64 <t> [0]))
 19718  	for {
 19719  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19720  			if v_0.Op != OpAnd64 {
 19721  				continue
 19722  			}
 19723  			t := v_0.Type
 19724  			_ = v_0.Args[1]
 19725  			v_0_0 := v_0.Args[0]
 19726  			v_0_1 := v_0.Args[1]
 19727  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 19728  				x := v_0_0
 19729  				if v_0_1.Op != OpConst64 || v_0_1.Type != t {
 19730  					continue
 19731  				}
 19732  				y := auxIntToInt64(v_0_1.AuxInt)
 19733  				if v_1.Op != OpConst64 || v_1.Type != t || auxIntToInt64(v_1.AuxInt) != y || !(oneBit64(y)) {
 19734  					continue
 19735  				}
 19736  				v.reset(OpEq64)
 19737  				v0 := b.NewValue0(v.Pos, OpAnd64, t)
 19738  				v1 := b.NewValue0(v.Pos, OpConst64, t)
 19739  				v1.AuxInt = int64ToAuxInt(y)
 19740  				v0.AddArg2(x, v1)
 19741  				v2 := b.NewValue0(v.Pos, OpConst64, t)
 19742  				v2.AuxInt = int64ToAuxInt(0)
 19743  				v.AddArg2(v0, v2)
 19744  				return true
 19745  			}
 19746  		}
 19747  		break
 19748  	}
 19749  	return false
 19750  }
 19751  func rewriteValuegeneric_OpNeq64F(v *Value) bool {
 19752  	v_1 := v.Args[1]
 19753  	v_0 := v.Args[0]
 19754  	// match: (Neq64F (Const64F [c]) (Const64F [d]))
 19755  	// result: (ConstBool [c != d])
 19756  	for {
 19757  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19758  			if v_0.Op != OpConst64F {
 19759  				continue
 19760  			}
 19761  			c := auxIntToFloat64(v_0.AuxInt)
 19762  			if v_1.Op != OpConst64F {
 19763  				continue
 19764  			}
 19765  			d := auxIntToFloat64(v_1.AuxInt)
 19766  			v.reset(OpConstBool)
 19767  			v.AuxInt = boolToAuxInt(c != d)
 19768  			return true
 19769  		}
 19770  		break
 19771  	}
 19772  	return false
 19773  }
 19774  func rewriteValuegeneric_OpNeq8(v *Value) bool {
 19775  	v_1 := v.Args[1]
 19776  	v_0 := v.Args[0]
 19777  	b := v.Block
 19778  	typ := &b.Func.Config.Types
 19779  	// match: (Neq8 x x)
 19780  	// result: (ConstBool [false])
 19781  	for {
 19782  		x := v_0
 19783  		if x != v_1 {
 19784  			break
 19785  		}
 19786  		v.reset(OpConstBool)
 19787  		v.AuxInt = boolToAuxInt(false)
 19788  		return true
 19789  	}
 19790  	// match: (Neq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
 19791  	// result: (Neq8 (Const8 <t> [c-d]) x)
 19792  	for {
 19793  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19794  			if v_0.Op != OpConst8 {
 19795  				continue
 19796  			}
 19797  			t := v_0.Type
 19798  			c := auxIntToInt8(v_0.AuxInt)
 19799  			if v_1.Op != OpAdd8 {
 19800  				continue
 19801  			}
 19802  			_ = v_1.Args[1]
 19803  			v_1_0 := v_1.Args[0]
 19804  			v_1_1 := v_1.Args[1]
 19805  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 19806  				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
 19807  					continue
 19808  				}
 19809  				d := auxIntToInt8(v_1_0.AuxInt)
 19810  				x := v_1_1
 19811  				v.reset(OpNeq8)
 19812  				v0 := b.NewValue0(v.Pos, OpConst8, t)
 19813  				v0.AuxInt = int8ToAuxInt(c - d)
 19814  				v.AddArg2(v0, x)
 19815  				return true
 19816  			}
 19817  		}
 19818  		break
 19819  	}
 19820  	// match: (Neq8 (Const8 [c]) (Const8 [d]))
 19821  	// result: (ConstBool [c != d])
 19822  	for {
 19823  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19824  			if v_0.Op != OpConst8 {
 19825  				continue
 19826  			}
 19827  			c := auxIntToInt8(v_0.AuxInt)
 19828  			if v_1.Op != OpConst8 {
 19829  				continue
 19830  			}
 19831  			d := auxIntToInt8(v_1.AuxInt)
 19832  			v.reset(OpConstBool)
 19833  			v.AuxInt = boolToAuxInt(c != d)
 19834  			return true
 19835  		}
 19836  		break
 19837  	}
 19838  	// match: (Neq8 n (Lsh8x64 (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) )
 19839  	// cond: k > 0 && k < 7 && kbar == 8 - k
 19840  	// result: (Neq8 (And8 <t> n (Const8 <t> [1<<uint(k)-1])) (Const8 <t> [0]))
 19841  	for {
 19842  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19843  			n := v_0
 19844  			if v_1.Op != OpLsh8x64 {
 19845  				continue
 19846  			}
 19847  			_ = v_1.Args[1]
 19848  			v_1_0 := v_1.Args[0]
 19849  			if v_1_0.Op != OpRsh8x64 {
 19850  				continue
 19851  			}
 19852  			_ = v_1_0.Args[1]
 19853  			v_1_0_0 := v_1_0.Args[0]
 19854  			if v_1_0_0.Op != OpAdd8 {
 19855  				continue
 19856  			}
 19857  			t := v_1_0_0.Type
 19858  			_ = v_1_0_0.Args[1]
 19859  			v_1_0_0_0 := v_1_0_0.Args[0]
 19860  			v_1_0_0_1 := v_1_0_0.Args[1]
 19861  			for _i1 := 0; _i1 <= 1; _i1, v_1_0_0_0, v_1_0_0_1 = _i1+1, v_1_0_0_1, v_1_0_0_0 {
 19862  				if n != v_1_0_0_0 || v_1_0_0_1.Op != OpRsh8Ux64 || v_1_0_0_1.Type != t {
 19863  					continue
 19864  				}
 19865  				_ = v_1_0_0_1.Args[1]
 19866  				v_1_0_0_1_0 := v_1_0_0_1.Args[0]
 19867  				if v_1_0_0_1_0.Op != OpRsh8x64 || v_1_0_0_1_0.Type != t {
 19868  					continue
 19869  				}
 19870  				_ = v_1_0_0_1_0.Args[1]
 19871  				if n != v_1_0_0_1_0.Args[0] {
 19872  					continue
 19873  				}
 19874  				v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
 19875  				if v_1_0_0_1_0_1.Op != OpConst64 || v_1_0_0_1_0_1.Type != typ.UInt64 || auxIntToInt64(v_1_0_0_1_0_1.AuxInt) != 7 {
 19876  					continue
 19877  				}
 19878  				v_1_0_0_1_1 := v_1_0_0_1.Args[1]
 19879  				if v_1_0_0_1_1.Op != OpConst64 || v_1_0_0_1_1.Type != typ.UInt64 {
 19880  					continue
 19881  				}
 19882  				kbar := auxIntToInt64(v_1_0_0_1_1.AuxInt)
 19883  				v_1_0_1 := v_1_0.Args[1]
 19884  				if v_1_0_1.Op != OpConst64 || v_1_0_1.Type != typ.UInt64 {
 19885  					continue
 19886  				}
 19887  				k := auxIntToInt64(v_1_0_1.AuxInt)
 19888  				v_1_1 := v_1.Args[1]
 19889  				if v_1_1.Op != OpConst64 || v_1_1.Type != typ.UInt64 || auxIntToInt64(v_1_1.AuxInt) != k || !(k > 0 && k < 7 && kbar == 8-k) {
 19890  					continue
 19891  				}
 19892  				v.reset(OpNeq8)
 19893  				v0 := b.NewValue0(v.Pos, OpAnd8, t)
 19894  				v1 := b.NewValue0(v.Pos, OpConst8, t)
 19895  				v1.AuxInt = int8ToAuxInt(1<<uint(k) - 1)
 19896  				v0.AddArg2(n, v1)
 19897  				v2 := b.NewValue0(v.Pos, OpConst8, t)
 19898  				v2.AuxInt = int8ToAuxInt(0)
 19899  				v.AddArg2(v0, v2)
 19900  				return true
 19901  			}
 19902  		}
 19903  		break
 19904  	}
 19905  	// match: (Neq8 s:(Sub8 x y) (Const8 [0]))
 19906  	// cond: s.Uses == 1
 19907  	// result: (Neq8 x y)
 19908  	for {
 19909  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19910  			s := v_0
 19911  			if s.Op != OpSub8 {
 19912  				continue
 19913  			}
 19914  			y := s.Args[1]
 19915  			x := s.Args[0]
 19916  			if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != 0 || !(s.Uses == 1) {
 19917  				continue
 19918  			}
 19919  			v.reset(OpNeq8)
 19920  			v.AddArg2(x, y)
 19921  			return true
 19922  		}
 19923  		break
 19924  	}
 19925  	// match: (Neq8 (And8 <t> x (Const8 <t> [y])) (Const8 <t> [y]))
 19926  	// cond: oneBit8(y)
 19927  	// result: (Eq8 (And8 <t> x (Const8 <t> [y])) (Const8 <t> [0]))
 19928  	for {
 19929  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19930  			if v_0.Op != OpAnd8 {
 19931  				continue
 19932  			}
 19933  			t := v_0.Type
 19934  			_ = v_0.Args[1]
 19935  			v_0_0 := v_0.Args[0]
 19936  			v_0_1 := v_0.Args[1]
 19937  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 19938  				x := v_0_0
 19939  				if v_0_1.Op != OpConst8 || v_0_1.Type != t {
 19940  					continue
 19941  				}
 19942  				y := auxIntToInt8(v_0_1.AuxInt)
 19943  				if v_1.Op != OpConst8 || v_1.Type != t || auxIntToInt8(v_1.AuxInt) != y || !(oneBit8(y)) {
 19944  					continue
 19945  				}
 19946  				v.reset(OpEq8)
 19947  				v0 := b.NewValue0(v.Pos, OpAnd8, t)
 19948  				v1 := b.NewValue0(v.Pos, OpConst8, t)
 19949  				v1.AuxInt = int8ToAuxInt(y)
 19950  				v0.AddArg2(x, v1)
 19951  				v2 := b.NewValue0(v.Pos, OpConst8, t)
 19952  				v2.AuxInt = int8ToAuxInt(0)
 19953  				v.AddArg2(v0, v2)
 19954  				return true
 19955  			}
 19956  		}
 19957  		break
 19958  	}
 19959  	return false
 19960  }
 19961  func rewriteValuegeneric_OpNeqB(v *Value) bool {
 19962  	v_1 := v.Args[1]
 19963  	v_0 := v.Args[0]
 19964  	// match: (NeqB (ConstBool [c]) (ConstBool [d]))
 19965  	// result: (ConstBool [c != d])
 19966  	for {
 19967  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19968  			if v_0.Op != OpConstBool {
 19969  				continue
 19970  			}
 19971  			c := auxIntToBool(v_0.AuxInt)
 19972  			if v_1.Op != OpConstBool {
 19973  				continue
 19974  			}
 19975  			d := auxIntToBool(v_1.AuxInt)
 19976  			v.reset(OpConstBool)
 19977  			v.AuxInt = boolToAuxInt(c != d)
 19978  			return true
 19979  		}
 19980  		break
 19981  	}
 19982  	// match: (NeqB (ConstBool [false]) x)
 19983  	// result: x
 19984  	for {
 19985  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19986  			if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != false {
 19987  				continue
 19988  			}
 19989  			x := v_1
 19990  			v.copyOf(x)
 19991  			return true
 19992  		}
 19993  		break
 19994  	}
 19995  	// match: (NeqB (ConstBool [true]) x)
 19996  	// result: (Not x)
 19997  	for {
 19998  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 19999  			if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != true {
 20000  				continue
 20001  			}
 20002  			x := v_1
 20003  			v.reset(OpNot)
 20004  			v.AddArg(x)
 20005  			return true
 20006  		}
 20007  		break
 20008  	}
 20009  	// match: (NeqB (Not x) (Not y))
 20010  	// result: (NeqB x y)
 20011  	for {
 20012  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20013  			if v_0.Op != OpNot {
 20014  				continue
 20015  			}
 20016  			x := v_0.Args[0]
 20017  			if v_1.Op != OpNot {
 20018  				continue
 20019  			}
 20020  			y := v_1.Args[0]
 20021  			v.reset(OpNeqB)
 20022  			v.AddArg2(x, y)
 20023  			return true
 20024  		}
 20025  		break
 20026  	}
 20027  	return false
 20028  }
 20029  func rewriteValuegeneric_OpNeqInter(v *Value) bool {
 20030  	v_1 := v.Args[1]
 20031  	v_0 := v.Args[0]
 20032  	b := v.Block
 20033  	typ := &b.Func.Config.Types
 20034  	// match: (NeqInter x y)
 20035  	// result: (NeqPtr (ITab x) (ITab y))
 20036  	for {
 20037  		x := v_0
 20038  		y := v_1
 20039  		v.reset(OpNeqPtr)
 20040  		v0 := b.NewValue0(v.Pos, OpITab, typ.Uintptr)
 20041  		v0.AddArg(x)
 20042  		v1 := b.NewValue0(v.Pos, OpITab, typ.Uintptr)
 20043  		v1.AddArg(y)
 20044  		v.AddArg2(v0, v1)
 20045  		return true
 20046  	}
 20047  }
 20048  func rewriteValuegeneric_OpNeqPtr(v *Value) bool {
 20049  	v_1 := v.Args[1]
 20050  	v_0 := v.Args[0]
 20051  	// match: (NeqPtr x x)
 20052  	// result: (ConstBool [false])
 20053  	for {
 20054  		x := v_0
 20055  		if x != v_1 {
 20056  			break
 20057  		}
 20058  		v.reset(OpConstBool)
 20059  		v.AuxInt = boolToAuxInt(false)
 20060  		return true
 20061  	}
 20062  	// match: (NeqPtr (Addr {x} _) (Addr {y} _))
 20063  	// result: (ConstBool [x != y])
 20064  	for {
 20065  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20066  			if v_0.Op != OpAddr {
 20067  				continue
 20068  			}
 20069  			x := auxToSym(v_0.Aux)
 20070  			if v_1.Op != OpAddr {
 20071  				continue
 20072  			}
 20073  			y := auxToSym(v_1.Aux)
 20074  			v.reset(OpConstBool)
 20075  			v.AuxInt = boolToAuxInt(x != y)
 20076  			return true
 20077  		}
 20078  		break
 20079  	}
 20080  	// match: (NeqPtr (Addr {x} _) (OffPtr [o] (Addr {y} _)))
 20081  	// result: (ConstBool [x != y || o != 0])
 20082  	for {
 20083  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20084  			if v_0.Op != OpAddr {
 20085  				continue
 20086  			}
 20087  			x := auxToSym(v_0.Aux)
 20088  			if v_1.Op != OpOffPtr {
 20089  				continue
 20090  			}
 20091  			o := auxIntToInt64(v_1.AuxInt)
 20092  			v_1_0 := v_1.Args[0]
 20093  			if v_1_0.Op != OpAddr {
 20094  				continue
 20095  			}
 20096  			y := auxToSym(v_1_0.Aux)
 20097  			v.reset(OpConstBool)
 20098  			v.AuxInt = boolToAuxInt(x != y || o != 0)
 20099  			return true
 20100  		}
 20101  		break
 20102  	}
 20103  	// match: (NeqPtr (OffPtr [o1] (Addr {x} _)) (OffPtr [o2] (Addr {y} _)))
 20104  	// result: (ConstBool [x != y || o1 != o2])
 20105  	for {
 20106  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20107  			if v_0.Op != OpOffPtr {
 20108  				continue
 20109  			}
 20110  			o1 := auxIntToInt64(v_0.AuxInt)
 20111  			v_0_0 := v_0.Args[0]
 20112  			if v_0_0.Op != OpAddr {
 20113  				continue
 20114  			}
 20115  			x := auxToSym(v_0_0.Aux)
 20116  			if v_1.Op != OpOffPtr {
 20117  				continue
 20118  			}
 20119  			o2 := auxIntToInt64(v_1.AuxInt)
 20120  			v_1_0 := v_1.Args[0]
 20121  			if v_1_0.Op != OpAddr {
 20122  				continue
 20123  			}
 20124  			y := auxToSym(v_1_0.Aux)
 20125  			v.reset(OpConstBool)
 20126  			v.AuxInt = boolToAuxInt(x != y || o1 != o2)
 20127  			return true
 20128  		}
 20129  		break
 20130  	}
 20131  	// match: (NeqPtr (LocalAddr {x} _ _) (LocalAddr {y} _ _))
 20132  	// result: (ConstBool [x != y])
 20133  	for {
 20134  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20135  			if v_0.Op != OpLocalAddr {
 20136  				continue
 20137  			}
 20138  			x := auxToSym(v_0.Aux)
 20139  			if v_1.Op != OpLocalAddr {
 20140  				continue
 20141  			}
 20142  			y := auxToSym(v_1.Aux)
 20143  			v.reset(OpConstBool)
 20144  			v.AuxInt = boolToAuxInt(x != y)
 20145  			return true
 20146  		}
 20147  		break
 20148  	}
 20149  	// match: (NeqPtr (LocalAddr {x} _ _) (OffPtr [o] (LocalAddr {y} _ _)))
 20150  	// result: (ConstBool [x != y || o != 0])
 20151  	for {
 20152  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20153  			if v_0.Op != OpLocalAddr {
 20154  				continue
 20155  			}
 20156  			x := auxToSym(v_0.Aux)
 20157  			if v_1.Op != OpOffPtr {
 20158  				continue
 20159  			}
 20160  			o := auxIntToInt64(v_1.AuxInt)
 20161  			v_1_0 := v_1.Args[0]
 20162  			if v_1_0.Op != OpLocalAddr {
 20163  				continue
 20164  			}
 20165  			y := auxToSym(v_1_0.Aux)
 20166  			v.reset(OpConstBool)
 20167  			v.AuxInt = boolToAuxInt(x != y || o != 0)
 20168  			return true
 20169  		}
 20170  		break
 20171  	}
 20172  	// match: (NeqPtr (OffPtr [o1] (LocalAddr {x} _ _)) (OffPtr [o2] (LocalAddr {y} _ _)))
 20173  	// result: (ConstBool [x != y || o1 != o2])
 20174  	for {
 20175  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20176  			if v_0.Op != OpOffPtr {
 20177  				continue
 20178  			}
 20179  			o1 := auxIntToInt64(v_0.AuxInt)
 20180  			v_0_0 := v_0.Args[0]
 20181  			if v_0_0.Op != OpLocalAddr {
 20182  				continue
 20183  			}
 20184  			x := auxToSym(v_0_0.Aux)
 20185  			if v_1.Op != OpOffPtr {
 20186  				continue
 20187  			}
 20188  			o2 := auxIntToInt64(v_1.AuxInt)
 20189  			v_1_0 := v_1.Args[0]
 20190  			if v_1_0.Op != OpLocalAddr {
 20191  				continue
 20192  			}
 20193  			y := auxToSym(v_1_0.Aux)
 20194  			v.reset(OpConstBool)
 20195  			v.AuxInt = boolToAuxInt(x != y || o1 != o2)
 20196  			return true
 20197  		}
 20198  		break
 20199  	}
 20200  	// match: (NeqPtr (OffPtr [o1] p1) p2)
 20201  	// cond: isSamePtr(p1, p2)
 20202  	// result: (ConstBool [o1 != 0])
 20203  	for {
 20204  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20205  			if v_0.Op != OpOffPtr {
 20206  				continue
 20207  			}
 20208  			o1 := auxIntToInt64(v_0.AuxInt)
 20209  			p1 := v_0.Args[0]
 20210  			p2 := v_1
 20211  			if !(isSamePtr(p1, p2)) {
 20212  				continue
 20213  			}
 20214  			v.reset(OpConstBool)
 20215  			v.AuxInt = boolToAuxInt(o1 != 0)
 20216  			return true
 20217  		}
 20218  		break
 20219  	}
 20220  	// match: (NeqPtr (OffPtr [o1] p1) (OffPtr [o2] p2))
 20221  	// cond: isSamePtr(p1, p2)
 20222  	// result: (ConstBool [o1 != o2])
 20223  	for {
 20224  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20225  			if v_0.Op != OpOffPtr {
 20226  				continue
 20227  			}
 20228  			o1 := auxIntToInt64(v_0.AuxInt)
 20229  			p1 := v_0.Args[0]
 20230  			if v_1.Op != OpOffPtr {
 20231  				continue
 20232  			}
 20233  			o2 := auxIntToInt64(v_1.AuxInt)
 20234  			p2 := v_1.Args[0]
 20235  			if !(isSamePtr(p1, p2)) {
 20236  				continue
 20237  			}
 20238  			v.reset(OpConstBool)
 20239  			v.AuxInt = boolToAuxInt(o1 != o2)
 20240  			return true
 20241  		}
 20242  		break
 20243  	}
 20244  	// match: (NeqPtr (Const32 [c]) (Const32 [d]))
 20245  	// result: (ConstBool [c != d])
 20246  	for {
 20247  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20248  			if v_0.Op != OpConst32 {
 20249  				continue
 20250  			}
 20251  			c := auxIntToInt32(v_0.AuxInt)
 20252  			if v_1.Op != OpConst32 {
 20253  				continue
 20254  			}
 20255  			d := auxIntToInt32(v_1.AuxInt)
 20256  			v.reset(OpConstBool)
 20257  			v.AuxInt = boolToAuxInt(c != d)
 20258  			return true
 20259  		}
 20260  		break
 20261  	}
 20262  	// match: (NeqPtr (Const64 [c]) (Const64 [d]))
 20263  	// result: (ConstBool [c != d])
 20264  	for {
 20265  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20266  			if v_0.Op != OpConst64 {
 20267  				continue
 20268  			}
 20269  			c := auxIntToInt64(v_0.AuxInt)
 20270  			if v_1.Op != OpConst64 {
 20271  				continue
 20272  			}
 20273  			d := auxIntToInt64(v_1.AuxInt)
 20274  			v.reset(OpConstBool)
 20275  			v.AuxInt = boolToAuxInt(c != d)
 20276  			return true
 20277  		}
 20278  		break
 20279  	}
 20280  	// match: (NeqPtr (Convert (Addr {x} _) _) (Addr {y} _))
 20281  	// result: (ConstBool [x!=y])
 20282  	for {
 20283  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20284  			if v_0.Op != OpConvert {
 20285  				continue
 20286  			}
 20287  			v_0_0 := v_0.Args[0]
 20288  			if v_0_0.Op != OpAddr {
 20289  				continue
 20290  			}
 20291  			x := auxToSym(v_0_0.Aux)
 20292  			if v_1.Op != OpAddr {
 20293  				continue
 20294  			}
 20295  			y := auxToSym(v_1.Aux)
 20296  			v.reset(OpConstBool)
 20297  			v.AuxInt = boolToAuxInt(x != y)
 20298  			return true
 20299  		}
 20300  		break
 20301  	}
 20302  	// match: (NeqPtr (LocalAddr _ _) (Addr _))
 20303  	// result: (ConstBool [true])
 20304  	for {
 20305  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20306  			if v_0.Op != OpLocalAddr || v_1.Op != OpAddr {
 20307  				continue
 20308  			}
 20309  			v.reset(OpConstBool)
 20310  			v.AuxInt = boolToAuxInt(true)
 20311  			return true
 20312  		}
 20313  		break
 20314  	}
 20315  	// match: (NeqPtr (OffPtr (LocalAddr _ _)) (Addr _))
 20316  	// result: (ConstBool [true])
 20317  	for {
 20318  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20319  			if v_0.Op != OpOffPtr {
 20320  				continue
 20321  			}
 20322  			v_0_0 := v_0.Args[0]
 20323  			if v_0_0.Op != OpLocalAddr || v_1.Op != OpAddr {
 20324  				continue
 20325  			}
 20326  			v.reset(OpConstBool)
 20327  			v.AuxInt = boolToAuxInt(true)
 20328  			return true
 20329  		}
 20330  		break
 20331  	}
 20332  	// match: (NeqPtr (LocalAddr _ _) (OffPtr (Addr _)))
 20333  	// result: (ConstBool [true])
 20334  	for {
 20335  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20336  			if v_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
 20337  				continue
 20338  			}
 20339  			v_1_0 := v_1.Args[0]
 20340  			if v_1_0.Op != OpAddr {
 20341  				continue
 20342  			}
 20343  			v.reset(OpConstBool)
 20344  			v.AuxInt = boolToAuxInt(true)
 20345  			return true
 20346  		}
 20347  		break
 20348  	}
 20349  	// match: (NeqPtr (OffPtr (LocalAddr _ _)) (OffPtr (Addr _)))
 20350  	// result: (ConstBool [true])
 20351  	for {
 20352  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20353  			if v_0.Op != OpOffPtr {
 20354  				continue
 20355  			}
 20356  			v_0_0 := v_0.Args[0]
 20357  			if v_0_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
 20358  				continue
 20359  			}
 20360  			v_1_0 := v_1.Args[0]
 20361  			if v_1_0.Op != OpAddr {
 20362  				continue
 20363  			}
 20364  			v.reset(OpConstBool)
 20365  			v.AuxInt = boolToAuxInt(true)
 20366  			return true
 20367  		}
 20368  		break
 20369  	}
 20370  	// match: (NeqPtr (AddPtr p1 o1) p2)
 20371  	// cond: isSamePtr(p1, p2)
 20372  	// result: (IsNonNil o1)
 20373  	for {
 20374  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20375  			if v_0.Op != OpAddPtr {
 20376  				continue
 20377  			}
 20378  			o1 := v_0.Args[1]
 20379  			p1 := v_0.Args[0]
 20380  			p2 := v_1
 20381  			if !(isSamePtr(p1, p2)) {
 20382  				continue
 20383  			}
 20384  			v.reset(OpIsNonNil)
 20385  			v.AddArg(o1)
 20386  			return true
 20387  		}
 20388  		break
 20389  	}
 20390  	// match: (NeqPtr (Const32 [0]) p)
 20391  	// result: (IsNonNil p)
 20392  	for {
 20393  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20394  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 20395  				continue
 20396  			}
 20397  			p := v_1
 20398  			v.reset(OpIsNonNil)
 20399  			v.AddArg(p)
 20400  			return true
 20401  		}
 20402  		break
 20403  	}
 20404  	// match: (NeqPtr (Const64 [0]) p)
 20405  	// result: (IsNonNil p)
 20406  	for {
 20407  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20408  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 20409  				continue
 20410  			}
 20411  			p := v_1
 20412  			v.reset(OpIsNonNil)
 20413  			v.AddArg(p)
 20414  			return true
 20415  		}
 20416  		break
 20417  	}
 20418  	// match: (NeqPtr (ConstNil) p)
 20419  	// result: (IsNonNil p)
 20420  	for {
 20421  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20422  			if v_0.Op != OpConstNil {
 20423  				continue
 20424  			}
 20425  			p := v_1
 20426  			v.reset(OpIsNonNil)
 20427  			v.AddArg(p)
 20428  			return true
 20429  		}
 20430  		break
 20431  	}
 20432  	return false
 20433  }
 20434  func rewriteValuegeneric_OpNeqSlice(v *Value) bool {
 20435  	v_1 := v.Args[1]
 20436  	v_0 := v.Args[0]
 20437  	b := v.Block
 20438  	typ := &b.Func.Config.Types
 20439  	// match: (NeqSlice x y)
 20440  	// result: (NeqPtr (SlicePtr x) (SlicePtr y))
 20441  	for {
 20442  		x := v_0
 20443  		y := v_1
 20444  		v.reset(OpNeqPtr)
 20445  		v0 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
 20446  		v0.AddArg(x)
 20447  		v1 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
 20448  		v1.AddArg(y)
 20449  		v.AddArg2(v0, v1)
 20450  		return true
 20451  	}
 20452  }
 20453  func rewriteValuegeneric_OpNilCheck(v *Value) bool {
 20454  	v_1 := v.Args[1]
 20455  	v_0 := v.Args[0]
 20456  	b := v.Block
 20457  	fe := b.Func.fe
 20458  	// match: (NilCheck ptr:(GetG mem) mem)
 20459  	// result: ptr
 20460  	for {
 20461  		ptr := v_0
 20462  		if ptr.Op != OpGetG {
 20463  			break
 20464  		}
 20465  		mem := ptr.Args[0]
 20466  		if mem != v_1 {
 20467  			break
 20468  		}
 20469  		v.copyOf(ptr)
 20470  		return true
 20471  	}
 20472  	// match: (NilCheck ptr:(SelectN [0] call:(StaticLECall _ _)) _)
 20473  	// cond: isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")
 20474  	// result: ptr
 20475  	for {
 20476  		ptr := v_0
 20477  		if ptr.Op != OpSelectN || auxIntToInt64(ptr.AuxInt) != 0 {
 20478  			break
 20479  		}
 20480  		call := ptr.Args[0]
 20481  		if call.Op != OpStaticLECall || len(call.Args) != 2 || !(isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
 20482  			break
 20483  		}
 20484  		v.copyOf(ptr)
 20485  		return true
 20486  	}
 20487  	// match: (NilCheck ptr:(OffPtr (SelectN [0] call:(StaticLECall _ _))) _)
 20488  	// cond: isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")
 20489  	// result: ptr
 20490  	for {
 20491  		ptr := v_0
 20492  		if ptr.Op != OpOffPtr {
 20493  			break
 20494  		}
 20495  		ptr_0 := ptr.Args[0]
 20496  		if ptr_0.Op != OpSelectN || auxIntToInt64(ptr_0.AuxInt) != 0 {
 20497  			break
 20498  		}
 20499  		call := ptr_0.Args[0]
 20500  		if call.Op != OpStaticLECall || len(call.Args) != 2 || !(isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
 20501  			break
 20502  		}
 20503  		v.copyOf(ptr)
 20504  		return true
 20505  	}
 20506  	// match: (NilCheck ptr:(Addr {_} (SB)) _)
 20507  	// result: ptr
 20508  	for {
 20509  		ptr := v_0
 20510  		if ptr.Op != OpAddr {
 20511  			break
 20512  		}
 20513  		ptr_0 := ptr.Args[0]
 20514  		if ptr_0.Op != OpSB {
 20515  			break
 20516  		}
 20517  		v.copyOf(ptr)
 20518  		return true
 20519  	}
 20520  	// match: (NilCheck ptr:(Convert (Addr {_} (SB)) _) _)
 20521  	// result: ptr
 20522  	for {
 20523  		ptr := v_0
 20524  		if ptr.Op != OpConvert {
 20525  			break
 20526  		}
 20527  		ptr_0 := ptr.Args[0]
 20528  		if ptr_0.Op != OpAddr {
 20529  			break
 20530  		}
 20531  		ptr_0_0 := ptr_0.Args[0]
 20532  		if ptr_0_0.Op != OpSB {
 20533  			break
 20534  		}
 20535  		v.copyOf(ptr)
 20536  		return true
 20537  	}
 20538  	return false
 20539  }
 20540  func rewriteValuegeneric_OpNot(v *Value) bool {
 20541  	v_0 := v.Args[0]
 20542  	// match: (Not (ConstBool [c]))
 20543  	// result: (ConstBool [!c])
 20544  	for {
 20545  		if v_0.Op != OpConstBool {
 20546  			break
 20547  		}
 20548  		c := auxIntToBool(v_0.AuxInt)
 20549  		v.reset(OpConstBool)
 20550  		v.AuxInt = boolToAuxInt(!c)
 20551  		return true
 20552  	}
 20553  	// match: (Not (Eq64 x y))
 20554  	// result: (Neq64 x y)
 20555  	for {
 20556  		if v_0.Op != OpEq64 {
 20557  			break
 20558  		}
 20559  		y := v_0.Args[1]
 20560  		x := v_0.Args[0]
 20561  		v.reset(OpNeq64)
 20562  		v.AddArg2(x, y)
 20563  		return true
 20564  	}
 20565  	// match: (Not (Eq32 x y))
 20566  	// result: (Neq32 x y)
 20567  	for {
 20568  		if v_0.Op != OpEq32 {
 20569  			break
 20570  		}
 20571  		y := v_0.Args[1]
 20572  		x := v_0.Args[0]
 20573  		v.reset(OpNeq32)
 20574  		v.AddArg2(x, y)
 20575  		return true
 20576  	}
 20577  	// match: (Not (Eq16 x y))
 20578  	// result: (Neq16 x y)
 20579  	for {
 20580  		if v_0.Op != OpEq16 {
 20581  			break
 20582  		}
 20583  		y := v_0.Args[1]
 20584  		x := v_0.Args[0]
 20585  		v.reset(OpNeq16)
 20586  		v.AddArg2(x, y)
 20587  		return true
 20588  	}
 20589  	// match: (Not (Eq8 x y))
 20590  	// result: (Neq8 x y)
 20591  	for {
 20592  		if v_0.Op != OpEq8 {
 20593  			break
 20594  		}
 20595  		y := v_0.Args[1]
 20596  		x := v_0.Args[0]
 20597  		v.reset(OpNeq8)
 20598  		v.AddArg2(x, y)
 20599  		return true
 20600  	}
 20601  	// match: (Not (EqB x y))
 20602  	// result: (NeqB x y)
 20603  	for {
 20604  		if v_0.Op != OpEqB {
 20605  			break
 20606  		}
 20607  		y := v_0.Args[1]
 20608  		x := v_0.Args[0]
 20609  		v.reset(OpNeqB)
 20610  		v.AddArg2(x, y)
 20611  		return true
 20612  	}
 20613  	// match: (Not (EqPtr x y))
 20614  	// result: (NeqPtr x y)
 20615  	for {
 20616  		if v_0.Op != OpEqPtr {
 20617  			break
 20618  		}
 20619  		y := v_0.Args[1]
 20620  		x := v_0.Args[0]
 20621  		v.reset(OpNeqPtr)
 20622  		v.AddArg2(x, y)
 20623  		return true
 20624  	}
 20625  	// match: (Not (Eq64F x y))
 20626  	// result: (Neq64F x y)
 20627  	for {
 20628  		if v_0.Op != OpEq64F {
 20629  			break
 20630  		}
 20631  		y := v_0.Args[1]
 20632  		x := v_0.Args[0]
 20633  		v.reset(OpNeq64F)
 20634  		v.AddArg2(x, y)
 20635  		return true
 20636  	}
 20637  	// match: (Not (Eq32F x y))
 20638  	// result: (Neq32F x y)
 20639  	for {
 20640  		if v_0.Op != OpEq32F {
 20641  			break
 20642  		}
 20643  		y := v_0.Args[1]
 20644  		x := v_0.Args[0]
 20645  		v.reset(OpNeq32F)
 20646  		v.AddArg2(x, y)
 20647  		return true
 20648  	}
 20649  	// match: (Not (Neq64 x y))
 20650  	// result: (Eq64 x y)
 20651  	for {
 20652  		if v_0.Op != OpNeq64 {
 20653  			break
 20654  		}
 20655  		y := v_0.Args[1]
 20656  		x := v_0.Args[0]
 20657  		v.reset(OpEq64)
 20658  		v.AddArg2(x, y)
 20659  		return true
 20660  	}
 20661  	// match: (Not (Neq32 x y))
 20662  	// result: (Eq32 x y)
 20663  	for {
 20664  		if v_0.Op != OpNeq32 {
 20665  			break
 20666  		}
 20667  		y := v_0.Args[1]
 20668  		x := v_0.Args[0]
 20669  		v.reset(OpEq32)
 20670  		v.AddArg2(x, y)
 20671  		return true
 20672  	}
 20673  	// match: (Not (Neq16 x y))
 20674  	// result: (Eq16 x y)
 20675  	for {
 20676  		if v_0.Op != OpNeq16 {
 20677  			break
 20678  		}
 20679  		y := v_0.Args[1]
 20680  		x := v_0.Args[0]
 20681  		v.reset(OpEq16)
 20682  		v.AddArg2(x, y)
 20683  		return true
 20684  	}
 20685  	// match: (Not (Neq8 x y))
 20686  	// result: (Eq8 x y)
 20687  	for {
 20688  		if v_0.Op != OpNeq8 {
 20689  			break
 20690  		}
 20691  		y := v_0.Args[1]
 20692  		x := v_0.Args[0]
 20693  		v.reset(OpEq8)
 20694  		v.AddArg2(x, y)
 20695  		return true
 20696  	}
 20697  	// match: (Not (NeqB x y))
 20698  	// result: (EqB x y)
 20699  	for {
 20700  		if v_0.Op != OpNeqB {
 20701  			break
 20702  		}
 20703  		y := v_0.Args[1]
 20704  		x := v_0.Args[0]
 20705  		v.reset(OpEqB)
 20706  		v.AddArg2(x, y)
 20707  		return true
 20708  	}
 20709  	// match: (Not (NeqPtr x y))
 20710  	// result: (EqPtr x y)
 20711  	for {
 20712  		if v_0.Op != OpNeqPtr {
 20713  			break
 20714  		}
 20715  		y := v_0.Args[1]
 20716  		x := v_0.Args[0]
 20717  		v.reset(OpEqPtr)
 20718  		v.AddArg2(x, y)
 20719  		return true
 20720  	}
 20721  	// match: (Not (Neq64F x y))
 20722  	// result: (Eq64F x y)
 20723  	for {
 20724  		if v_0.Op != OpNeq64F {
 20725  			break
 20726  		}
 20727  		y := v_0.Args[1]
 20728  		x := v_0.Args[0]
 20729  		v.reset(OpEq64F)
 20730  		v.AddArg2(x, y)
 20731  		return true
 20732  	}
 20733  	// match: (Not (Neq32F x y))
 20734  	// result: (Eq32F x y)
 20735  	for {
 20736  		if v_0.Op != OpNeq32F {
 20737  			break
 20738  		}
 20739  		y := v_0.Args[1]
 20740  		x := v_0.Args[0]
 20741  		v.reset(OpEq32F)
 20742  		v.AddArg2(x, y)
 20743  		return true
 20744  	}
 20745  	// match: (Not (Less64 x y))
 20746  	// result: (Leq64 y x)
 20747  	for {
 20748  		if v_0.Op != OpLess64 {
 20749  			break
 20750  		}
 20751  		y := v_0.Args[1]
 20752  		x := v_0.Args[0]
 20753  		v.reset(OpLeq64)
 20754  		v.AddArg2(y, x)
 20755  		return true
 20756  	}
 20757  	// match: (Not (Less32 x y))
 20758  	// result: (Leq32 y x)
 20759  	for {
 20760  		if v_0.Op != OpLess32 {
 20761  			break
 20762  		}
 20763  		y := v_0.Args[1]
 20764  		x := v_0.Args[0]
 20765  		v.reset(OpLeq32)
 20766  		v.AddArg2(y, x)
 20767  		return true
 20768  	}
 20769  	// match: (Not (Less16 x y))
 20770  	// result: (Leq16 y x)
 20771  	for {
 20772  		if v_0.Op != OpLess16 {
 20773  			break
 20774  		}
 20775  		y := v_0.Args[1]
 20776  		x := v_0.Args[0]
 20777  		v.reset(OpLeq16)
 20778  		v.AddArg2(y, x)
 20779  		return true
 20780  	}
 20781  	// match: (Not (Less8 x y))
 20782  	// result: (Leq8 y x)
 20783  	for {
 20784  		if v_0.Op != OpLess8 {
 20785  			break
 20786  		}
 20787  		y := v_0.Args[1]
 20788  		x := v_0.Args[0]
 20789  		v.reset(OpLeq8)
 20790  		v.AddArg2(y, x)
 20791  		return true
 20792  	}
 20793  	// match: (Not (Less64U x y))
 20794  	// result: (Leq64U y x)
 20795  	for {
 20796  		if v_0.Op != OpLess64U {
 20797  			break
 20798  		}
 20799  		y := v_0.Args[1]
 20800  		x := v_0.Args[0]
 20801  		v.reset(OpLeq64U)
 20802  		v.AddArg2(y, x)
 20803  		return true
 20804  	}
 20805  	// match: (Not (Less32U x y))
 20806  	// result: (Leq32U y x)
 20807  	for {
 20808  		if v_0.Op != OpLess32U {
 20809  			break
 20810  		}
 20811  		y := v_0.Args[1]
 20812  		x := v_0.Args[0]
 20813  		v.reset(OpLeq32U)
 20814  		v.AddArg2(y, x)
 20815  		return true
 20816  	}
 20817  	// match: (Not (Less16U x y))
 20818  	// result: (Leq16U y x)
 20819  	for {
 20820  		if v_0.Op != OpLess16U {
 20821  			break
 20822  		}
 20823  		y := v_0.Args[1]
 20824  		x := v_0.Args[0]
 20825  		v.reset(OpLeq16U)
 20826  		v.AddArg2(y, x)
 20827  		return true
 20828  	}
 20829  	// match: (Not (Less8U x y))
 20830  	// result: (Leq8U y x)
 20831  	for {
 20832  		if v_0.Op != OpLess8U {
 20833  			break
 20834  		}
 20835  		y := v_0.Args[1]
 20836  		x := v_0.Args[0]
 20837  		v.reset(OpLeq8U)
 20838  		v.AddArg2(y, x)
 20839  		return true
 20840  	}
 20841  	// match: (Not (Leq64 x y))
 20842  	// result: (Less64 y x)
 20843  	for {
 20844  		if v_0.Op != OpLeq64 {
 20845  			break
 20846  		}
 20847  		y := v_0.Args[1]
 20848  		x := v_0.Args[0]
 20849  		v.reset(OpLess64)
 20850  		v.AddArg2(y, x)
 20851  		return true
 20852  	}
 20853  	// match: (Not (Leq32 x y))
 20854  	// result: (Less32 y x)
 20855  	for {
 20856  		if v_0.Op != OpLeq32 {
 20857  			break
 20858  		}
 20859  		y := v_0.Args[1]
 20860  		x := v_0.Args[0]
 20861  		v.reset(OpLess32)
 20862  		v.AddArg2(y, x)
 20863  		return true
 20864  	}
 20865  	// match: (Not (Leq16 x y))
 20866  	// result: (Less16 y x)
 20867  	for {
 20868  		if v_0.Op != OpLeq16 {
 20869  			break
 20870  		}
 20871  		y := v_0.Args[1]
 20872  		x := v_0.Args[0]
 20873  		v.reset(OpLess16)
 20874  		v.AddArg2(y, x)
 20875  		return true
 20876  	}
 20877  	// match: (Not (Leq8 x y))
 20878  	// result: (Less8 y x)
 20879  	for {
 20880  		if v_0.Op != OpLeq8 {
 20881  			break
 20882  		}
 20883  		y := v_0.Args[1]
 20884  		x := v_0.Args[0]
 20885  		v.reset(OpLess8)
 20886  		v.AddArg2(y, x)
 20887  		return true
 20888  	}
 20889  	// match: (Not (Leq64U x y))
 20890  	// result: (Less64U y x)
 20891  	for {
 20892  		if v_0.Op != OpLeq64U {
 20893  			break
 20894  		}
 20895  		y := v_0.Args[1]
 20896  		x := v_0.Args[0]
 20897  		v.reset(OpLess64U)
 20898  		v.AddArg2(y, x)
 20899  		return true
 20900  	}
 20901  	// match: (Not (Leq32U x y))
 20902  	// result: (Less32U y x)
 20903  	for {
 20904  		if v_0.Op != OpLeq32U {
 20905  			break
 20906  		}
 20907  		y := v_0.Args[1]
 20908  		x := v_0.Args[0]
 20909  		v.reset(OpLess32U)
 20910  		v.AddArg2(y, x)
 20911  		return true
 20912  	}
 20913  	// match: (Not (Leq16U x y))
 20914  	// result: (Less16U y x)
 20915  	for {
 20916  		if v_0.Op != OpLeq16U {
 20917  			break
 20918  		}
 20919  		y := v_0.Args[1]
 20920  		x := v_0.Args[0]
 20921  		v.reset(OpLess16U)
 20922  		v.AddArg2(y, x)
 20923  		return true
 20924  	}
 20925  	// match: (Not (Leq8U x y))
 20926  	// result: (Less8U y x)
 20927  	for {
 20928  		if v_0.Op != OpLeq8U {
 20929  			break
 20930  		}
 20931  		y := v_0.Args[1]
 20932  		x := v_0.Args[0]
 20933  		v.reset(OpLess8U)
 20934  		v.AddArg2(y, x)
 20935  		return true
 20936  	}
 20937  	return false
 20938  }
 20939  func rewriteValuegeneric_OpOffPtr(v *Value) bool {
 20940  	v_0 := v.Args[0]
 20941  	// match: (OffPtr (OffPtr p [y]) [x])
 20942  	// result: (OffPtr p [x+y])
 20943  	for {
 20944  		x := auxIntToInt64(v.AuxInt)
 20945  		if v_0.Op != OpOffPtr {
 20946  			break
 20947  		}
 20948  		y := auxIntToInt64(v_0.AuxInt)
 20949  		p := v_0.Args[0]
 20950  		v.reset(OpOffPtr)
 20951  		v.AuxInt = int64ToAuxInt(x + y)
 20952  		v.AddArg(p)
 20953  		return true
 20954  	}
 20955  	// match: (OffPtr p [0])
 20956  	// cond: v.Type.Compare(p.Type) == types.CMPeq
 20957  	// result: p
 20958  	for {
 20959  		if auxIntToInt64(v.AuxInt) != 0 {
 20960  			break
 20961  		}
 20962  		p := v_0
 20963  		if !(v.Type.Compare(p.Type) == types.CMPeq) {
 20964  			break
 20965  		}
 20966  		v.copyOf(p)
 20967  		return true
 20968  	}
 20969  	return false
 20970  }
 20971  func rewriteValuegeneric_OpOr16(v *Value) bool {
 20972  	v_1 := v.Args[1]
 20973  	v_0 := v.Args[0]
 20974  	b := v.Block
 20975  	config := b.Func.Config
 20976  	// match: (Or16 (Const16 [c]) (Const16 [d]))
 20977  	// result: (Const16 [c|d])
 20978  	for {
 20979  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20980  			if v_0.Op != OpConst16 {
 20981  				continue
 20982  			}
 20983  			c := auxIntToInt16(v_0.AuxInt)
 20984  			if v_1.Op != OpConst16 {
 20985  				continue
 20986  			}
 20987  			d := auxIntToInt16(v_1.AuxInt)
 20988  			v.reset(OpConst16)
 20989  			v.AuxInt = int16ToAuxInt(c | d)
 20990  			return true
 20991  		}
 20992  		break
 20993  	}
 20994  	// match: (Or16 <t> (Com16 x) (Com16 y))
 20995  	// result: (Com16 (And16 <t> x y))
 20996  	for {
 20997  		t := v.Type
 20998  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 20999  			if v_0.Op != OpCom16 {
 21000  				continue
 21001  			}
 21002  			x := v_0.Args[0]
 21003  			if v_1.Op != OpCom16 {
 21004  				continue
 21005  			}
 21006  			y := v_1.Args[0]
 21007  			v.reset(OpCom16)
 21008  			v0 := b.NewValue0(v.Pos, OpAnd16, t)
 21009  			v0.AddArg2(x, y)
 21010  			v.AddArg(v0)
 21011  			return true
 21012  		}
 21013  		break
 21014  	}
 21015  	// match: (Or16 x x)
 21016  	// result: x
 21017  	for {
 21018  		x := v_0
 21019  		if x != v_1 {
 21020  			break
 21021  		}
 21022  		v.copyOf(x)
 21023  		return true
 21024  	}
 21025  	// match: (Or16 (Const16 [0]) x)
 21026  	// result: x
 21027  	for {
 21028  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21029  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 21030  				continue
 21031  			}
 21032  			x := v_1
 21033  			v.copyOf(x)
 21034  			return true
 21035  		}
 21036  		break
 21037  	}
 21038  	// match: (Or16 (Const16 [-1]) _)
 21039  	// result: (Const16 [-1])
 21040  	for {
 21041  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21042  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
 21043  				continue
 21044  			}
 21045  			v.reset(OpConst16)
 21046  			v.AuxInt = int16ToAuxInt(-1)
 21047  			return true
 21048  		}
 21049  		break
 21050  	}
 21051  	// match: (Or16 (Com16 x) x)
 21052  	// result: (Const16 [-1])
 21053  	for {
 21054  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21055  			if v_0.Op != OpCom16 {
 21056  				continue
 21057  			}
 21058  			x := v_0.Args[0]
 21059  			if x != v_1 {
 21060  				continue
 21061  			}
 21062  			v.reset(OpConst16)
 21063  			v.AuxInt = int16ToAuxInt(-1)
 21064  			return true
 21065  		}
 21066  		break
 21067  	}
 21068  	// match: (Or16 x (Or16 x y))
 21069  	// result: (Or16 x y)
 21070  	for {
 21071  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21072  			x := v_0
 21073  			if v_1.Op != OpOr16 {
 21074  				continue
 21075  			}
 21076  			_ = v_1.Args[1]
 21077  			v_1_0 := v_1.Args[0]
 21078  			v_1_1 := v_1.Args[1]
 21079  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 21080  				if x != v_1_0 {
 21081  					continue
 21082  				}
 21083  				y := v_1_1
 21084  				v.reset(OpOr16)
 21085  				v.AddArg2(x, y)
 21086  				return true
 21087  			}
 21088  		}
 21089  		break
 21090  	}
 21091  	// match: (Or16 (And16 x (Const16 [c2])) (Const16 <t> [c1]))
 21092  	// cond: ^(c1 | c2) == 0
 21093  	// result: (Or16 (Const16 <t> [c1]) x)
 21094  	for {
 21095  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21096  			if v_0.Op != OpAnd16 {
 21097  				continue
 21098  			}
 21099  			_ = v_0.Args[1]
 21100  			v_0_0 := v_0.Args[0]
 21101  			v_0_1 := v_0.Args[1]
 21102  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 21103  				x := v_0_0
 21104  				if v_0_1.Op != OpConst16 {
 21105  					continue
 21106  				}
 21107  				c2 := auxIntToInt16(v_0_1.AuxInt)
 21108  				if v_1.Op != OpConst16 {
 21109  					continue
 21110  				}
 21111  				t := v_1.Type
 21112  				c1 := auxIntToInt16(v_1.AuxInt)
 21113  				if !(^(c1 | c2) == 0) {
 21114  					continue
 21115  				}
 21116  				v.reset(OpOr16)
 21117  				v0 := b.NewValue0(v.Pos, OpConst16, t)
 21118  				v0.AuxInt = int16ToAuxInt(c1)
 21119  				v.AddArg2(v0, x)
 21120  				return true
 21121  			}
 21122  		}
 21123  		break
 21124  	}
 21125  	// match: (Or16 (Or16 i:(Const16 <t>) z) x)
 21126  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
 21127  	// result: (Or16 i (Or16 <t> z x))
 21128  	for {
 21129  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21130  			if v_0.Op != OpOr16 {
 21131  				continue
 21132  			}
 21133  			_ = v_0.Args[1]
 21134  			v_0_0 := v_0.Args[0]
 21135  			v_0_1 := v_0.Args[1]
 21136  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 21137  				i := v_0_0
 21138  				if i.Op != OpConst16 {
 21139  					continue
 21140  				}
 21141  				t := i.Type
 21142  				z := v_0_1
 21143  				x := v_1
 21144  				if !(z.Op != OpConst16 && x.Op != OpConst16) {
 21145  					continue
 21146  				}
 21147  				v.reset(OpOr16)
 21148  				v0 := b.NewValue0(v.Pos, OpOr16, t)
 21149  				v0.AddArg2(z, x)
 21150  				v.AddArg2(i, v0)
 21151  				return true
 21152  			}
 21153  		}
 21154  		break
 21155  	}
 21156  	// match: (Or16 (Const16 <t> [c]) (Or16 (Const16 <t> [d]) x))
 21157  	// result: (Or16 (Const16 <t> [c|d]) x)
 21158  	for {
 21159  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21160  			if v_0.Op != OpConst16 {
 21161  				continue
 21162  			}
 21163  			t := v_0.Type
 21164  			c := auxIntToInt16(v_0.AuxInt)
 21165  			if v_1.Op != OpOr16 {
 21166  				continue
 21167  			}
 21168  			_ = v_1.Args[1]
 21169  			v_1_0 := v_1.Args[0]
 21170  			v_1_1 := v_1.Args[1]
 21171  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 21172  				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
 21173  					continue
 21174  				}
 21175  				d := auxIntToInt16(v_1_0.AuxInt)
 21176  				x := v_1_1
 21177  				v.reset(OpOr16)
 21178  				v0 := b.NewValue0(v.Pos, OpConst16, t)
 21179  				v0.AuxInt = int16ToAuxInt(c | d)
 21180  				v.AddArg2(v0, x)
 21181  				return true
 21182  			}
 21183  		}
 21184  		break
 21185  	}
 21186  	// match: (Or16 (Lsh16x64 x z:(Const64 <t> [c])) (Rsh16Ux64 x (Const64 [d])))
 21187  	// cond: c < 16 && d == 16-c && canRotate(config, 16)
 21188  	// result: (RotateLeft16 x z)
 21189  	for {
 21190  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21191  			if v_0.Op != OpLsh16x64 {
 21192  				continue
 21193  			}
 21194  			_ = v_0.Args[1]
 21195  			x := v_0.Args[0]
 21196  			z := v_0.Args[1]
 21197  			if z.Op != OpConst64 {
 21198  				continue
 21199  			}
 21200  			c := auxIntToInt64(z.AuxInt)
 21201  			if v_1.Op != OpRsh16Ux64 {
 21202  				continue
 21203  			}
 21204  			_ = v_1.Args[1]
 21205  			if x != v_1.Args[0] {
 21206  				continue
 21207  			}
 21208  			v_1_1 := v_1.Args[1]
 21209  			if v_1_1.Op != OpConst64 {
 21210  				continue
 21211  			}
 21212  			d := auxIntToInt64(v_1_1.AuxInt)
 21213  			if !(c < 16 && d == 16-c && canRotate(config, 16)) {
 21214  				continue
 21215  			}
 21216  			v.reset(OpRotateLeft16)
 21217  			v.AddArg2(x, z)
 21218  			return true
 21219  		}
 21220  		break
 21221  	}
 21222  	// match: (Or16 left:(Lsh16x64 x y) right:(Rsh16Ux64 x (Sub64 (Const64 [16]) y)))
 21223  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 21224  	// result: (RotateLeft16 x y)
 21225  	for {
 21226  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21227  			left := v_0
 21228  			if left.Op != OpLsh16x64 {
 21229  				continue
 21230  			}
 21231  			y := left.Args[1]
 21232  			x := left.Args[0]
 21233  			right := v_1
 21234  			if right.Op != OpRsh16Ux64 {
 21235  				continue
 21236  			}
 21237  			_ = right.Args[1]
 21238  			if x != right.Args[0] {
 21239  				continue
 21240  			}
 21241  			right_1 := right.Args[1]
 21242  			if right_1.Op != OpSub64 {
 21243  				continue
 21244  			}
 21245  			_ = right_1.Args[1]
 21246  			right_1_0 := right_1.Args[0]
 21247  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 21248  				continue
 21249  			}
 21250  			v.reset(OpRotateLeft16)
 21251  			v.AddArg2(x, y)
 21252  			return true
 21253  		}
 21254  		break
 21255  	}
 21256  	// match: (Or16 left:(Lsh16x32 x y) right:(Rsh16Ux32 x (Sub32 (Const32 [16]) y)))
 21257  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 21258  	// result: (RotateLeft16 x y)
 21259  	for {
 21260  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21261  			left := v_0
 21262  			if left.Op != OpLsh16x32 {
 21263  				continue
 21264  			}
 21265  			y := left.Args[1]
 21266  			x := left.Args[0]
 21267  			right := v_1
 21268  			if right.Op != OpRsh16Ux32 {
 21269  				continue
 21270  			}
 21271  			_ = right.Args[1]
 21272  			if x != right.Args[0] {
 21273  				continue
 21274  			}
 21275  			right_1 := right.Args[1]
 21276  			if right_1.Op != OpSub32 {
 21277  				continue
 21278  			}
 21279  			_ = right_1.Args[1]
 21280  			right_1_0 := right_1.Args[0]
 21281  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 21282  				continue
 21283  			}
 21284  			v.reset(OpRotateLeft16)
 21285  			v.AddArg2(x, y)
 21286  			return true
 21287  		}
 21288  		break
 21289  	}
 21290  	// match: (Or16 left:(Lsh16x16 x y) right:(Rsh16Ux16 x (Sub16 (Const16 [16]) y)))
 21291  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 21292  	// result: (RotateLeft16 x y)
 21293  	for {
 21294  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21295  			left := v_0
 21296  			if left.Op != OpLsh16x16 {
 21297  				continue
 21298  			}
 21299  			y := left.Args[1]
 21300  			x := left.Args[0]
 21301  			right := v_1
 21302  			if right.Op != OpRsh16Ux16 {
 21303  				continue
 21304  			}
 21305  			_ = right.Args[1]
 21306  			if x != right.Args[0] {
 21307  				continue
 21308  			}
 21309  			right_1 := right.Args[1]
 21310  			if right_1.Op != OpSub16 {
 21311  				continue
 21312  			}
 21313  			_ = right_1.Args[1]
 21314  			right_1_0 := right_1.Args[0]
 21315  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 21316  				continue
 21317  			}
 21318  			v.reset(OpRotateLeft16)
 21319  			v.AddArg2(x, y)
 21320  			return true
 21321  		}
 21322  		break
 21323  	}
 21324  	// match: (Or16 left:(Lsh16x8 x y) right:(Rsh16Ux8 x (Sub8 (Const8 [16]) y)))
 21325  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 21326  	// result: (RotateLeft16 x y)
 21327  	for {
 21328  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21329  			left := v_0
 21330  			if left.Op != OpLsh16x8 {
 21331  				continue
 21332  			}
 21333  			y := left.Args[1]
 21334  			x := left.Args[0]
 21335  			right := v_1
 21336  			if right.Op != OpRsh16Ux8 {
 21337  				continue
 21338  			}
 21339  			_ = right.Args[1]
 21340  			if x != right.Args[0] {
 21341  				continue
 21342  			}
 21343  			right_1 := right.Args[1]
 21344  			if right_1.Op != OpSub8 {
 21345  				continue
 21346  			}
 21347  			_ = right_1.Args[1]
 21348  			right_1_0 := right_1.Args[0]
 21349  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 21350  				continue
 21351  			}
 21352  			v.reset(OpRotateLeft16)
 21353  			v.AddArg2(x, y)
 21354  			return true
 21355  		}
 21356  		break
 21357  	}
 21358  	// match: (Or16 right:(Rsh16Ux64 x y) left:(Lsh16x64 x z:(Sub64 (Const64 [16]) y)))
 21359  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 21360  	// result: (RotateLeft16 x z)
 21361  	for {
 21362  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21363  			right := v_0
 21364  			if right.Op != OpRsh16Ux64 {
 21365  				continue
 21366  			}
 21367  			y := right.Args[1]
 21368  			x := right.Args[0]
 21369  			left := v_1
 21370  			if left.Op != OpLsh16x64 {
 21371  				continue
 21372  			}
 21373  			_ = left.Args[1]
 21374  			if x != left.Args[0] {
 21375  				continue
 21376  			}
 21377  			z := left.Args[1]
 21378  			if z.Op != OpSub64 {
 21379  				continue
 21380  			}
 21381  			_ = z.Args[1]
 21382  			z_0 := z.Args[0]
 21383  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 21384  				continue
 21385  			}
 21386  			v.reset(OpRotateLeft16)
 21387  			v.AddArg2(x, z)
 21388  			return true
 21389  		}
 21390  		break
 21391  	}
 21392  	// match: (Or16 right:(Rsh16Ux32 x y) left:(Lsh16x32 x z:(Sub32 (Const32 [16]) y)))
 21393  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 21394  	// result: (RotateLeft16 x z)
 21395  	for {
 21396  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21397  			right := v_0
 21398  			if right.Op != OpRsh16Ux32 {
 21399  				continue
 21400  			}
 21401  			y := right.Args[1]
 21402  			x := right.Args[0]
 21403  			left := v_1
 21404  			if left.Op != OpLsh16x32 {
 21405  				continue
 21406  			}
 21407  			_ = left.Args[1]
 21408  			if x != left.Args[0] {
 21409  				continue
 21410  			}
 21411  			z := left.Args[1]
 21412  			if z.Op != OpSub32 {
 21413  				continue
 21414  			}
 21415  			_ = z.Args[1]
 21416  			z_0 := z.Args[0]
 21417  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 21418  				continue
 21419  			}
 21420  			v.reset(OpRotateLeft16)
 21421  			v.AddArg2(x, z)
 21422  			return true
 21423  		}
 21424  		break
 21425  	}
 21426  	// match: (Or16 right:(Rsh16Ux16 x y) left:(Lsh16x16 x z:(Sub16 (Const16 [16]) y)))
 21427  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 21428  	// result: (RotateLeft16 x z)
 21429  	for {
 21430  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21431  			right := v_0
 21432  			if right.Op != OpRsh16Ux16 {
 21433  				continue
 21434  			}
 21435  			y := right.Args[1]
 21436  			x := right.Args[0]
 21437  			left := v_1
 21438  			if left.Op != OpLsh16x16 {
 21439  				continue
 21440  			}
 21441  			_ = left.Args[1]
 21442  			if x != left.Args[0] {
 21443  				continue
 21444  			}
 21445  			z := left.Args[1]
 21446  			if z.Op != OpSub16 {
 21447  				continue
 21448  			}
 21449  			_ = z.Args[1]
 21450  			z_0 := z.Args[0]
 21451  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 21452  				continue
 21453  			}
 21454  			v.reset(OpRotateLeft16)
 21455  			v.AddArg2(x, z)
 21456  			return true
 21457  		}
 21458  		break
 21459  	}
 21460  	// match: (Or16 right:(Rsh16Ux8 x y) left:(Lsh16x8 x z:(Sub8 (Const8 [16]) y)))
 21461  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 21462  	// result: (RotateLeft16 x z)
 21463  	for {
 21464  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21465  			right := v_0
 21466  			if right.Op != OpRsh16Ux8 {
 21467  				continue
 21468  			}
 21469  			y := right.Args[1]
 21470  			x := right.Args[0]
 21471  			left := v_1
 21472  			if left.Op != OpLsh16x8 {
 21473  				continue
 21474  			}
 21475  			_ = left.Args[1]
 21476  			if x != left.Args[0] {
 21477  				continue
 21478  			}
 21479  			z := left.Args[1]
 21480  			if z.Op != OpSub8 {
 21481  				continue
 21482  			}
 21483  			_ = z.Args[1]
 21484  			z_0 := z.Args[0]
 21485  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 21486  				continue
 21487  			}
 21488  			v.reset(OpRotateLeft16)
 21489  			v.AddArg2(x, z)
 21490  			return true
 21491  		}
 21492  		break
 21493  	}
 21494  	return false
 21495  }
 21496  func rewriteValuegeneric_OpOr32(v *Value) bool {
 21497  	v_1 := v.Args[1]
 21498  	v_0 := v.Args[0]
 21499  	b := v.Block
 21500  	config := b.Func.Config
 21501  	// match: (Or32 (Const32 [c]) (Const32 [d]))
 21502  	// result: (Const32 [c|d])
 21503  	for {
 21504  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21505  			if v_0.Op != OpConst32 {
 21506  				continue
 21507  			}
 21508  			c := auxIntToInt32(v_0.AuxInt)
 21509  			if v_1.Op != OpConst32 {
 21510  				continue
 21511  			}
 21512  			d := auxIntToInt32(v_1.AuxInt)
 21513  			v.reset(OpConst32)
 21514  			v.AuxInt = int32ToAuxInt(c | d)
 21515  			return true
 21516  		}
 21517  		break
 21518  	}
 21519  	// match: (Or32 <t> (Com32 x) (Com32 y))
 21520  	// result: (Com32 (And32 <t> x y))
 21521  	for {
 21522  		t := v.Type
 21523  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21524  			if v_0.Op != OpCom32 {
 21525  				continue
 21526  			}
 21527  			x := v_0.Args[0]
 21528  			if v_1.Op != OpCom32 {
 21529  				continue
 21530  			}
 21531  			y := v_1.Args[0]
 21532  			v.reset(OpCom32)
 21533  			v0 := b.NewValue0(v.Pos, OpAnd32, t)
 21534  			v0.AddArg2(x, y)
 21535  			v.AddArg(v0)
 21536  			return true
 21537  		}
 21538  		break
 21539  	}
 21540  	// match: (Or32 x x)
 21541  	// result: x
 21542  	for {
 21543  		x := v_0
 21544  		if x != v_1 {
 21545  			break
 21546  		}
 21547  		v.copyOf(x)
 21548  		return true
 21549  	}
 21550  	// match: (Or32 (Const32 [0]) x)
 21551  	// result: x
 21552  	for {
 21553  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21554  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 21555  				continue
 21556  			}
 21557  			x := v_1
 21558  			v.copyOf(x)
 21559  			return true
 21560  		}
 21561  		break
 21562  	}
 21563  	// match: (Or32 (Const32 [-1]) _)
 21564  	// result: (Const32 [-1])
 21565  	for {
 21566  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21567  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
 21568  				continue
 21569  			}
 21570  			v.reset(OpConst32)
 21571  			v.AuxInt = int32ToAuxInt(-1)
 21572  			return true
 21573  		}
 21574  		break
 21575  	}
 21576  	// match: (Or32 (Com32 x) x)
 21577  	// result: (Const32 [-1])
 21578  	for {
 21579  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21580  			if v_0.Op != OpCom32 {
 21581  				continue
 21582  			}
 21583  			x := v_0.Args[0]
 21584  			if x != v_1 {
 21585  				continue
 21586  			}
 21587  			v.reset(OpConst32)
 21588  			v.AuxInt = int32ToAuxInt(-1)
 21589  			return true
 21590  		}
 21591  		break
 21592  	}
 21593  	// match: (Or32 x (Or32 x y))
 21594  	// result: (Or32 x y)
 21595  	for {
 21596  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21597  			x := v_0
 21598  			if v_1.Op != OpOr32 {
 21599  				continue
 21600  			}
 21601  			_ = v_1.Args[1]
 21602  			v_1_0 := v_1.Args[0]
 21603  			v_1_1 := v_1.Args[1]
 21604  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 21605  				if x != v_1_0 {
 21606  					continue
 21607  				}
 21608  				y := v_1_1
 21609  				v.reset(OpOr32)
 21610  				v.AddArg2(x, y)
 21611  				return true
 21612  			}
 21613  		}
 21614  		break
 21615  	}
 21616  	// match: (Or32 (And32 x (Const32 [c2])) (Const32 <t> [c1]))
 21617  	// cond: ^(c1 | c2) == 0
 21618  	// result: (Or32 (Const32 <t> [c1]) x)
 21619  	for {
 21620  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21621  			if v_0.Op != OpAnd32 {
 21622  				continue
 21623  			}
 21624  			_ = v_0.Args[1]
 21625  			v_0_0 := v_0.Args[0]
 21626  			v_0_1 := v_0.Args[1]
 21627  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 21628  				x := v_0_0
 21629  				if v_0_1.Op != OpConst32 {
 21630  					continue
 21631  				}
 21632  				c2 := auxIntToInt32(v_0_1.AuxInt)
 21633  				if v_1.Op != OpConst32 {
 21634  					continue
 21635  				}
 21636  				t := v_1.Type
 21637  				c1 := auxIntToInt32(v_1.AuxInt)
 21638  				if !(^(c1 | c2) == 0) {
 21639  					continue
 21640  				}
 21641  				v.reset(OpOr32)
 21642  				v0 := b.NewValue0(v.Pos, OpConst32, t)
 21643  				v0.AuxInt = int32ToAuxInt(c1)
 21644  				v.AddArg2(v0, x)
 21645  				return true
 21646  			}
 21647  		}
 21648  		break
 21649  	}
 21650  	// match: (Or32 (Or32 i:(Const32 <t>) z) x)
 21651  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
 21652  	// result: (Or32 i (Or32 <t> z x))
 21653  	for {
 21654  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21655  			if v_0.Op != OpOr32 {
 21656  				continue
 21657  			}
 21658  			_ = v_0.Args[1]
 21659  			v_0_0 := v_0.Args[0]
 21660  			v_0_1 := v_0.Args[1]
 21661  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 21662  				i := v_0_0
 21663  				if i.Op != OpConst32 {
 21664  					continue
 21665  				}
 21666  				t := i.Type
 21667  				z := v_0_1
 21668  				x := v_1
 21669  				if !(z.Op != OpConst32 && x.Op != OpConst32) {
 21670  					continue
 21671  				}
 21672  				v.reset(OpOr32)
 21673  				v0 := b.NewValue0(v.Pos, OpOr32, t)
 21674  				v0.AddArg2(z, x)
 21675  				v.AddArg2(i, v0)
 21676  				return true
 21677  			}
 21678  		}
 21679  		break
 21680  	}
 21681  	// match: (Or32 (Const32 <t> [c]) (Or32 (Const32 <t> [d]) x))
 21682  	// result: (Or32 (Const32 <t> [c|d]) x)
 21683  	for {
 21684  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21685  			if v_0.Op != OpConst32 {
 21686  				continue
 21687  			}
 21688  			t := v_0.Type
 21689  			c := auxIntToInt32(v_0.AuxInt)
 21690  			if v_1.Op != OpOr32 {
 21691  				continue
 21692  			}
 21693  			_ = v_1.Args[1]
 21694  			v_1_0 := v_1.Args[0]
 21695  			v_1_1 := v_1.Args[1]
 21696  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 21697  				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
 21698  					continue
 21699  				}
 21700  				d := auxIntToInt32(v_1_0.AuxInt)
 21701  				x := v_1_1
 21702  				v.reset(OpOr32)
 21703  				v0 := b.NewValue0(v.Pos, OpConst32, t)
 21704  				v0.AuxInt = int32ToAuxInt(c | d)
 21705  				v.AddArg2(v0, x)
 21706  				return true
 21707  			}
 21708  		}
 21709  		break
 21710  	}
 21711  	// match: (Or32 (Lsh32x64 x z:(Const64 <t> [c])) (Rsh32Ux64 x (Const64 [d])))
 21712  	// cond: c < 32 && d == 32-c && canRotate(config, 32)
 21713  	// result: (RotateLeft32 x z)
 21714  	for {
 21715  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21716  			if v_0.Op != OpLsh32x64 {
 21717  				continue
 21718  			}
 21719  			_ = v_0.Args[1]
 21720  			x := v_0.Args[0]
 21721  			z := v_0.Args[1]
 21722  			if z.Op != OpConst64 {
 21723  				continue
 21724  			}
 21725  			c := auxIntToInt64(z.AuxInt)
 21726  			if v_1.Op != OpRsh32Ux64 {
 21727  				continue
 21728  			}
 21729  			_ = v_1.Args[1]
 21730  			if x != v_1.Args[0] {
 21731  				continue
 21732  			}
 21733  			v_1_1 := v_1.Args[1]
 21734  			if v_1_1.Op != OpConst64 {
 21735  				continue
 21736  			}
 21737  			d := auxIntToInt64(v_1_1.AuxInt)
 21738  			if !(c < 32 && d == 32-c && canRotate(config, 32)) {
 21739  				continue
 21740  			}
 21741  			v.reset(OpRotateLeft32)
 21742  			v.AddArg2(x, z)
 21743  			return true
 21744  		}
 21745  		break
 21746  	}
 21747  	// match: (Or32 left:(Lsh32x64 x y) right:(Rsh32Ux64 x (Sub64 (Const64 [32]) y)))
 21748  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 21749  	// result: (RotateLeft32 x y)
 21750  	for {
 21751  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21752  			left := v_0
 21753  			if left.Op != OpLsh32x64 {
 21754  				continue
 21755  			}
 21756  			y := left.Args[1]
 21757  			x := left.Args[0]
 21758  			right := v_1
 21759  			if right.Op != OpRsh32Ux64 {
 21760  				continue
 21761  			}
 21762  			_ = right.Args[1]
 21763  			if x != right.Args[0] {
 21764  				continue
 21765  			}
 21766  			right_1 := right.Args[1]
 21767  			if right_1.Op != OpSub64 {
 21768  				continue
 21769  			}
 21770  			_ = right_1.Args[1]
 21771  			right_1_0 := right_1.Args[0]
 21772  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 21773  				continue
 21774  			}
 21775  			v.reset(OpRotateLeft32)
 21776  			v.AddArg2(x, y)
 21777  			return true
 21778  		}
 21779  		break
 21780  	}
 21781  	// match: (Or32 left:(Lsh32x32 x y) right:(Rsh32Ux32 x (Sub32 (Const32 [32]) y)))
 21782  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 21783  	// result: (RotateLeft32 x y)
 21784  	for {
 21785  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21786  			left := v_0
 21787  			if left.Op != OpLsh32x32 {
 21788  				continue
 21789  			}
 21790  			y := left.Args[1]
 21791  			x := left.Args[0]
 21792  			right := v_1
 21793  			if right.Op != OpRsh32Ux32 {
 21794  				continue
 21795  			}
 21796  			_ = right.Args[1]
 21797  			if x != right.Args[0] {
 21798  				continue
 21799  			}
 21800  			right_1 := right.Args[1]
 21801  			if right_1.Op != OpSub32 {
 21802  				continue
 21803  			}
 21804  			_ = right_1.Args[1]
 21805  			right_1_0 := right_1.Args[0]
 21806  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 21807  				continue
 21808  			}
 21809  			v.reset(OpRotateLeft32)
 21810  			v.AddArg2(x, y)
 21811  			return true
 21812  		}
 21813  		break
 21814  	}
 21815  	// match: (Or32 left:(Lsh32x16 x y) right:(Rsh32Ux16 x (Sub16 (Const16 [32]) y)))
 21816  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 21817  	// result: (RotateLeft32 x y)
 21818  	for {
 21819  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21820  			left := v_0
 21821  			if left.Op != OpLsh32x16 {
 21822  				continue
 21823  			}
 21824  			y := left.Args[1]
 21825  			x := left.Args[0]
 21826  			right := v_1
 21827  			if right.Op != OpRsh32Ux16 {
 21828  				continue
 21829  			}
 21830  			_ = right.Args[1]
 21831  			if x != right.Args[0] {
 21832  				continue
 21833  			}
 21834  			right_1 := right.Args[1]
 21835  			if right_1.Op != OpSub16 {
 21836  				continue
 21837  			}
 21838  			_ = right_1.Args[1]
 21839  			right_1_0 := right_1.Args[0]
 21840  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 21841  				continue
 21842  			}
 21843  			v.reset(OpRotateLeft32)
 21844  			v.AddArg2(x, y)
 21845  			return true
 21846  		}
 21847  		break
 21848  	}
 21849  	// match: (Or32 left:(Lsh32x8 x y) right:(Rsh32Ux8 x (Sub8 (Const8 [32]) y)))
 21850  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 21851  	// result: (RotateLeft32 x y)
 21852  	for {
 21853  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21854  			left := v_0
 21855  			if left.Op != OpLsh32x8 {
 21856  				continue
 21857  			}
 21858  			y := left.Args[1]
 21859  			x := left.Args[0]
 21860  			right := v_1
 21861  			if right.Op != OpRsh32Ux8 {
 21862  				continue
 21863  			}
 21864  			_ = right.Args[1]
 21865  			if x != right.Args[0] {
 21866  				continue
 21867  			}
 21868  			right_1 := right.Args[1]
 21869  			if right_1.Op != OpSub8 {
 21870  				continue
 21871  			}
 21872  			_ = right_1.Args[1]
 21873  			right_1_0 := right_1.Args[0]
 21874  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 21875  				continue
 21876  			}
 21877  			v.reset(OpRotateLeft32)
 21878  			v.AddArg2(x, y)
 21879  			return true
 21880  		}
 21881  		break
 21882  	}
 21883  	// match: (Or32 right:(Rsh32Ux64 x y) left:(Lsh32x64 x z:(Sub64 (Const64 [32]) y)))
 21884  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 21885  	// result: (RotateLeft32 x z)
 21886  	for {
 21887  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21888  			right := v_0
 21889  			if right.Op != OpRsh32Ux64 {
 21890  				continue
 21891  			}
 21892  			y := right.Args[1]
 21893  			x := right.Args[0]
 21894  			left := v_1
 21895  			if left.Op != OpLsh32x64 {
 21896  				continue
 21897  			}
 21898  			_ = left.Args[1]
 21899  			if x != left.Args[0] {
 21900  				continue
 21901  			}
 21902  			z := left.Args[1]
 21903  			if z.Op != OpSub64 {
 21904  				continue
 21905  			}
 21906  			_ = z.Args[1]
 21907  			z_0 := z.Args[0]
 21908  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 21909  				continue
 21910  			}
 21911  			v.reset(OpRotateLeft32)
 21912  			v.AddArg2(x, z)
 21913  			return true
 21914  		}
 21915  		break
 21916  	}
 21917  	// match: (Or32 right:(Rsh32Ux32 x y) left:(Lsh32x32 x z:(Sub32 (Const32 [32]) y)))
 21918  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 21919  	// result: (RotateLeft32 x z)
 21920  	for {
 21921  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21922  			right := v_0
 21923  			if right.Op != OpRsh32Ux32 {
 21924  				continue
 21925  			}
 21926  			y := right.Args[1]
 21927  			x := right.Args[0]
 21928  			left := v_1
 21929  			if left.Op != OpLsh32x32 {
 21930  				continue
 21931  			}
 21932  			_ = left.Args[1]
 21933  			if x != left.Args[0] {
 21934  				continue
 21935  			}
 21936  			z := left.Args[1]
 21937  			if z.Op != OpSub32 {
 21938  				continue
 21939  			}
 21940  			_ = z.Args[1]
 21941  			z_0 := z.Args[0]
 21942  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 21943  				continue
 21944  			}
 21945  			v.reset(OpRotateLeft32)
 21946  			v.AddArg2(x, z)
 21947  			return true
 21948  		}
 21949  		break
 21950  	}
 21951  	// match: (Or32 right:(Rsh32Ux16 x y) left:(Lsh32x16 x z:(Sub16 (Const16 [32]) y)))
 21952  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 21953  	// result: (RotateLeft32 x z)
 21954  	for {
 21955  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21956  			right := v_0
 21957  			if right.Op != OpRsh32Ux16 {
 21958  				continue
 21959  			}
 21960  			y := right.Args[1]
 21961  			x := right.Args[0]
 21962  			left := v_1
 21963  			if left.Op != OpLsh32x16 {
 21964  				continue
 21965  			}
 21966  			_ = left.Args[1]
 21967  			if x != left.Args[0] {
 21968  				continue
 21969  			}
 21970  			z := left.Args[1]
 21971  			if z.Op != OpSub16 {
 21972  				continue
 21973  			}
 21974  			_ = z.Args[1]
 21975  			z_0 := z.Args[0]
 21976  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 21977  				continue
 21978  			}
 21979  			v.reset(OpRotateLeft32)
 21980  			v.AddArg2(x, z)
 21981  			return true
 21982  		}
 21983  		break
 21984  	}
 21985  	// match: (Or32 right:(Rsh32Ux8 x y) left:(Lsh32x8 x z:(Sub8 (Const8 [32]) y)))
 21986  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 21987  	// result: (RotateLeft32 x z)
 21988  	for {
 21989  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 21990  			right := v_0
 21991  			if right.Op != OpRsh32Ux8 {
 21992  				continue
 21993  			}
 21994  			y := right.Args[1]
 21995  			x := right.Args[0]
 21996  			left := v_1
 21997  			if left.Op != OpLsh32x8 {
 21998  				continue
 21999  			}
 22000  			_ = left.Args[1]
 22001  			if x != left.Args[0] {
 22002  				continue
 22003  			}
 22004  			z := left.Args[1]
 22005  			if z.Op != OpSub8 {
 22006  				continue
 22007  			}
 22008  			_ = z.Args[1]
 22009  			z_0 := z.Args[0]
 22010  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 22011  				continue
 22012  			}
 22013  			v.reset(OpRotateLeft32)
 22014  			v.AddArg2(x, z)
 22015  			return true
 22016  		}
 22017  		break
 22018  	}
 22019  	return false
 22020  }
 22021  func rewriteValuegeneric_OpOr64(v *Value) bool {
 22022  	v_1 := v.Args[1]
 22023  	v_0 := v.Args[0]
 22024  	b := v.Block
 22025  	config := b.Func.Config
 22026  	// match: (Or64 (Const64 [c]) (Const64 [d]))
 22027  	// result: (Const64 [c|d])
 22028  	for {
 22029  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22030  			if v_0.Op != OpConst64 {
 22031  				continue
 22032  			}
 22033  			c := auxIntToInt64(v_0.AuxInt)
 22034  			if v_1.Op != OpConst64 {
 22035  				continue
 22036  			}
 22037  			d := auxIntToInt64(v_1.AuxInt)
 22038  			v.reset(OpConst64)
 22039  			v.AuxInt = int64ToAuxInt(c | d)
 22040  			return true
 22041  		}
 22042  		break
 22043  	}
 22044  	// match: (Or64 <t> (Com64 x) (Com64 y))
 22045  	// result: (Com64 (And64 <t> x y))
 22046  	for {
 22047  		t := v.Type
 22048  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22049  			if v_0.Op != OpCom64 {
 22050  				continue
 22051  			}
 22052  			x := v_0.Args[0]
 22053  			if v_1.Op != OpCom64 {
 22054  				continue
 22055  			}
 22056  			y := v_1.Args[0]
 22057  			v.reset(OpCom64)
 22058  			v0 := b.NewValue0(v.Pos, OpAnd64, t)
 22059  			v0.AddArg2(x, y)
 22060  			v.AddArg(v0)
 22061  			return true
 22062  		}
 22063  		break
 22064  	}
 22065  	// match: (Or64 x x)
 22066  	// result: x
 22067  	for {
 22068  		x := v_0
 22069  		if x != v_1 {
 22070  			break
 22071  		}
 22072  		v.copyOf(x)
 22073  		return true
 22074  	}
 22075  	// match: (Or64 (Const64 [0]) x)
 22076  	// result: x
 22077  	for {
 22078  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22079  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 22080  				continue
 22081  			}
 22082  			x := v_1
 22083  			v.copyOf(x)
 22084  			return true
 22085  		}
 22086  		break
 22087  	}
 22088  	// match: (Or64 (Const64 [-1]) _)
 22089  	// result: (Const64 [-1])
 22090  	for {
 22091  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22092  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
 22093  				continue
 22094  			}
 22095  			v.reset(OpConst64)
 22096  			v.AuxInt = int64ToAuxInt(-1)
 22097  			return true
 22098  		}
 22099  		break
 22100  	}
 22101  	// match: (Or64 (Com64 x) x)
 22102  	// result: (Const64 [-1])
 22103  	for {
 22104  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22105  			if v_0.Op != OpCom64 {
 22106  				continue
 22107  			}
 22108  			x := v_0.Args[0]
 22109  			if x != v_1 {
 22110  				continue
 22111  			}
 22112  			v.reset(OpConst64)
 22113  			v.AuxInt = int64ToAuxInt(-1)
 22114  			return true
 22115  		}
 22116  		break
 22117  	}
 22118  	// match: (Or64 x (Or64 x y))
 22119  	// result: (Or64 x y)
 22120  	for {
 22121  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22122  			x := v_0
 22123  			if v_1.Op != OpOr64 {
 22124  				continue
 22125  			}
 22126  			_ = v_1.Args[1]
 22127  			v_1_0 := v_1.Args[0]
 22128  			v_1_1 := v_1.Args[1]
 22129  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 22130  				if x != v_1_0 {
 22131  					continue
 22132  				}
 22133  				y := v_1_1
 22134  				v.reset(OpOr64)
 22135  				v.AddArg2(x, y)
 22136  				return true
 22137  			}
 22138  		}
 22139  		break
 22140  	}
 22141  	// match: (Or64 (And64 x (Const64 [c2])) (Const64 <t> [c1]))
 22142  	// cond: ^(c1 | c2) == 0
 22143  	// result: (Or64 (Const64 <t> [c1]) x)
 22144  	for {
 22145  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22146  			if v_0.Op != OpAnd64 {
 22147  				continue
 22148  			}
 22149  			_ = v_0.Args[1]
 22150  			v_0_0 := v_0.Args[0]
 22151  			v_0_1 := v_0.Args[1]
 22152  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 22153  				x := v_0_0
 22154  				if v_0_1.Op != OpConst64 {
 22155  					continue
 22156  				}
 22157  				c2 := auxIntToInt64(v_0_1.AuxInt)
 22158  				if v_1.Op != OpConst64 {
 22159  					continue
 22160  				}
 22161  				t := v_1.Type
 22162  				c1 := auxIntToInt64(v_1.AuxInt)
 22163  				if !(^(c1 | c2) == 0) {
 22164  					continue
 22165  				}
 22166  				v.reset(OpOr64)
 22167  				v0 := b.NewValue0(v.Pos, OpConst64, t)
 22168  				v0.AuxInt = int64ToAuxInt(c1)
 22169  				v.AddArg2(v0, x)
 22170  				return true
 22171  			}
 22172  		}
 22173  		break
 22174  	}
 22175  	// match: (Or64 (Or64 i:(Const64 <t>) z) x)
 22176  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
 22177  	// result: (Or64 i (Or64 <t> z x))
 22178  	for {
 22179  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22180  			if v_0.Op != OpOr64 {
 22181  				continue
 22182  			}
 22183  			_ = v_0.Args[1]
 22184  			v_0_0 := v_0.Args[0]
 22185  			v_0_1 := v_0.Args[1]
 22186  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 22187  				i := v_0_0
 22188  				if i.Op != OpConst64 {
 22189  					continue
 22190  				}
 22191  				t := i.Type
 22192  				z := v_0_1
 22193  				x := v_1
 22194  				if !(z.Op != OpConst64 && x.Op != OpConst64) {
 22195  					continue
 22196  				}
 22197  				v.reset(OpOr64)
 22198  				v0 := b.NewValue0(v.Pos, OpOr64, t)
 22199  				v0.AddArg2(z, x)
 22200  				v.AddArg2(i, v0)
 22201  				return true
 22202  			}
 22203  		}
 22204  		break
 22205  	}
 22206  	// match: (Or64 (Const64 <t> [c]) (Or64 (Const64 <t> [d]) x))
 22207  	// result: (Or64 (Const64 <t> [c|d]) x)
 22208  	for {
 22209  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22210  			if v_0.Op != OpConst64 {
 22211  				continue
 22212  			}
 22213  			t := v_0.Type
 22214  			c := auxIntToInt64(v_0.AuxInt)
 22215  			if v_1.Op != OpOr64 {
 22216  				continue
 22217  			}
 22218  			_ = v_1.Args[1]
 22219  			v_1_0 := v_1.Args[0]
 22220  			v_1_1 := v_1.Args[1]
 22221  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 22222  				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
 22223  					continue
 22224  				}
 22225  				d := auxIntToInt64(v_1_0.AuxInt)
 22226  				x := v_1_1
 22227  				v.reset(OpOr64)
 22228  				v0 := b.NewValue0(v.Pos, OpConst64, t)
 22229  				v0.AuxInt = int64ToAuxInt(c | d)
 22230  				v.AddArg2(v0, x)
 22231  				return true
 22232  			}
 22233  		}
 22234  		break
 22235  	}
 22236  	// match: (Or64 (Lsh64x64 x z:(Const64 <t> [c])) (Rsh64Ux64 x (Const64 [d])))
 22237  	// cond: c < 64 && d == 64-c && canRotate(config, 64)
 22238  	// result: (RotateLeft64 x z)
 22239  	for {
 22240  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22241  			if v_0.Op != OpLsh64x64 {
 22242  				continue
 22243  			}
 22244  			_ = v_0.Args[1]
 22245  			x := v_0.Args[0]
 22246  			z := v_0.Args[1]
 22247  			if z.Op != OpConst64 {
 22248  				continue
 22249  			}
 22250  			c := auxIntToInt64(z.AuxInt)
 22251  			if v_1.Op != OpRsh64Ux64 {
 22252  				continue
 22253  			}
 22254  			_ = v_1.Args[1]
 22255  			if x != v_1.Args[0] {
 22256  				continue
 22257  			}
 22258  			v_1_1 := v_1.Args[1]
 22259  			if v_1_1.Op != OpConst64 {
 22260  				continue
 22261  			}
 22262  			d := auxIntToInt64(v_1_1.AuxInt)
 22263  			if !(c < 64 && d == 64-c && canRotate(config, 64)) {
 22264  				continue
 22265  			}
 22266  			v.reset(OpRotateLeft64)
 22267  			v.AddArg2(x, z)
 22268  			return true
 22269  		}
 22270  		break
 22271  	}
 22272  	// match: (Or64 left:(Lsh64x64 x y) right:(Rsh64Ux64 x (Sub64 (Const64 [64]) y)))
 22273  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 22274  	// result: (RotateLeft64 x y)
 22275  	for {
 22276  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22277  			left := v_0
 22278  			if left.Op != OpLsh64x64 {
 22279  				continue
 22280  			}
 22281  			y := left.Args[1]
 22282  			x := left.Args[0]
 22283  			right := v_1
 22284  			if right.Op != OpRsh64Ux64 {
 22285  				continue
 22286  			}
 22287  			_ = right.Args[1]
 22288  			if x != right.Args[0] {
 22289  				continue
 22290  			}
 22291  			right_1 := right.Args[1]
 22292  			if right_1.Op != OpSub64 {
 22293  				continue
 22294  			}
 22295  			_ = right_1.Args[1]
 22296  			right_1_0 := right_1.Args[0]
 22297  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 22298  				continue
 22299  			}
 22300  			v.reset(OpRotateLeft64)
 22301  			v.AddArg2(x, y)
 22302  			return true
 22303  		}
 22304  		break
 22305  	}
 22306  	// match: (Or64 left:(Lsh64x32 x y) right:(Rsh64Ux32 x (Sub32 (Const32 [64]) y)))
 22307  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 22308  	// result: (RotateLeft64 x y)
 22309  	for {
 22310  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22311  			left := v_0
 22312  			if left.Op != OpLsh64x32 {
 22313  				continue
 22314  			}
 22315  			y := left.Args[1]
 22316  			x := left.Args[0]
 22317  			right := v_1
 22318  			if right.Op != OpRsh64Ux32 {
 22319  				continue
 22320  			}
 22321  			_ = right.Args[1]
 22322  			if x != right.Args[0] {
 22323  				continue
 22324  			}
 22325  			right_1 := right.Args[1]
 22326  			if right_1.Op != OpSub32 {
 22327  				continue
 22328  			}
 22329  			_ = right_1.Args[1]
 22330  			right_1_0 := right_1.Args[0]
 22331  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 22332  				continue
 22333  			}
 22334  			v.reset(OpRotateLeft64)
 22335  			v.AddArg2(x, y)
 22336  			return true
 22337  		}
 22338  		break
 22339  	}
 22340  	// match: (Or64 left:(Lsh64x16 x y) right:(Rsh64Ux16 x (Sub16 (Const16 [64]) y)))
 22341  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 22342  	// result: (RotateLeft64 x y)
 22343  	for {
 22344  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22345  			left := v_0
 22346  			if left.Op != OpLsh64x16 {
 22347  				continue
 22348  			}
 22349  			y := left.Args[1]
 22350  			x := left.Args[0]
 22351  			right := v_1
 22352  			if right.Op != OpRsh64Ux16 {
 22353  				continue
 22354  			}
 22355  			_ = right.Args[1]
 22356  			if x != right.Args[0] {
 22357  				continue
 22358  			}
 22359  			right_1 := right.Args[1]
 22360  			if right_1.Op != OpSub16 {
 22361  				continue
 22362  			}
 22363  			_ = right_1.Args[1]
 22364  			right_1_0 := right_1.Args[0]
 22365  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 22366  				continue
 22367  			}
 22368  			v.reset(OpRotateLeft64)
 22369  			v.AddArg2(x, y)
 22370  			return true
 22371  		}
 22372  		break
 22373  	}
 22374  	// match: (Or64 left:(Lsh64x8 x y) right:(Rsh64Ux8 x (Sub8 (Const8 [64]) y)))
 22375  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 22376  	// result: (RotateLeft64 x y)
 22377  	for {
 22378  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22379  			left := v_0
 22380  			if left.Op != OpLsh64x8 {
 22381  				continue
 22382  			}
 22383  			y := left.Args[1]
 22384  			x := left.Args[0]
 22385  			right := v_1
 22386  			if right.Op != OpRsh64Ux8 {
 22387  				continue
 22388  			}
 22389  			_ = right.Args[1]
 22390  			if x != right.Args[0] {
 22391  				continue
 22392  			}
 22393  			right_1 := right.Args[1]
 22394  			if right_1.Op != OpSub8 {
 22395  				continue
 22396  			}
 22397  			_ = right_1.Args[1]
 22398  			right_1_0 := right_1.Args[0]
 22399  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 22400  				continue
 22401  			}
 22402  			v.reset(OpRotateLeft64)
 22403  			v.AddArg2(x, y)
 22404  			return true
 22405  		}
 22406  		break
 22407  	}
 22408  	// match: (Or64 right:(Rsh64Ux64 x y) left:(Lsh64x64 x z:(Sub64 (Const64 [64]) y)))
 22409  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 22410  	// result: (RotateLeft64 x z)
 22411  	for {
 22412  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22413  			right := v_0
 22414  			if right.Op != OpRsh64Ux64 {
 22415  				continue
 22416  			}
 22417  			y := right.Args[1]
 22418  			x := right.Args[0]
 22419  			left := v_1
 22420  			if left.Op != OpLsh64x64 {
 22421  				continue
 22422  			}
 22423  			_ = left.Args[1]
 22424  			if x != left.Args[0] {
 22425  				continue
 22426  			}
 22427  			z := left.Args[1]
 22428  			if z.Op != OpSub64 {
 22429  				continue
 22430  			}
 22431  			_ = z.Args[1]
 22432  			z_0 := z.Args[0]
 22433  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 22434  				continue
 22435  			}
 22436  			v.reset(OpRotateLeft64)
 22437  			v.AddArg2(x, z)
 22438  			return true
 22439  		}
 22440  		break
 22441  	}
 22442  	// match: (Or64 right:(Rsh64Ux32 x y) left:(Lsh64x32 x z:(Sub32 (Const32 [64]) y)))
 22443  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 22444  	// result: (RotateLeft64 x z)
 22445  	for {
 22446  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22447  			right := v_0
 22448  			if right.Op != OpRsh64Ux32 {
 22449  				continue
 22450  			}
 22451  			y := right.Args[1]
 22452  			x := right.Args[0]
 22453  			left := v_1
 22454  			if left.Op != OpLsh64x32 {
 22455  				continue
 22456  			}
 22457  			_ = left.Args[1]
 22458  			if x != left.Args[0] {
 22459  				continue
 22460  			}
 22461  			z := left.Args[1]
 22462  			if z.Op != OpSub32 {
 22463  				continue
 22464  			}
 22465  			_ = z.Args[1]
 22466  			z_0 := z.Args[0]
 22467  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 22468  				continue
 22469  			}
 22470  			v.reset(OpRotateLeft64)
 22471  			v.AddArg2(x, z)
 22472  			return true
 22473  		}
 22474  		break
 22475  	}
 22476  	// match: (Or64 right:(Rsh64Ux16 x y) left:(Lsh64x16 x z:(Sub16 (Const16 [64]) y)))
 22477  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 22478  	// result: (RotateLeft64 x z)
 22479  	for {
 22480  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22481  			right := v_0
 22482  			if right.Op != OpRsh64Ux16 {
 22483  				continue
 22484  			}
 22485  			y := right.Args[1]
 22486  			x := right.Args[0]
 22487  			left := v_1
 22488  			if left.Op != OpLsh64x16 {
 22489  				continue
 22490  			}
 22491  			_ = left.Args[1]
 22492  			if x != left.Args[0] {
 22493  				continue
 22494  			}
 22495  			z := left.Args[1]
 22496  			if z.Op != OpSub16 {
 22497  				continue
 22498  			}
 22499  			_ = z.Args[1]
 22500  			z_0 := z.Args[0]
 22501  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 22502  				continue
 22503  			}
 22504  			v.reset(OpRotateLeft64)
 22505  			v.AddArg2(x, z)
 22506  			return true
 22507  		}
 22508  		break
 22509  	}
 22510  	// match: (Or64 right:(Rsh64Ux8 x y) left:(Lsh64x8 x z:(Sub8 (Const8 [64]) y)))
 22511  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 22512  	// result: (RotateLeft64 x z)
 22513  	for {
 22514  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22515  			right := v_0
 22516  			if right.Op != OpRsh64Ux8 {
 22517  				continue
 22518  			}
 22519  			y := right.Args[1]
 22520  			x := right.Args[0]
 22521  			left := v_1
 22522  			if left.Op != OpLsh64x8 {
 22523  				continue
 22524  			}
 22525  			_ = left.Args[1]
 22526  			if x != left.Args[0] {
 22527  				continue
 22528  			}
 22529  			z := left.Args[1]
 22530  			if z.Op != OpSub8 {
 22531  				continue
 22532  			}
 22533  			_ = z.Args[1]
 22534  			z_0 := z.Args[0]
 22535  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 22536  				continue
 22537  			}
 22538  			v.reset(OpRotateLeft64)
 22539  			v.AddArg2(x, z)
 22540  			return true
 22541  		}
 22542  		break
 22543  	}
 22544  	return false
 22545  }
 22546  func rewriteValuegeneric_OpOr8(v *Value) bool {
 22547  	v_1 := v.Args[1]
 22548  	v_0 := v.Args[0]
 22549  	b := v.Block
 22550  	config := b.Func.Config
 22551  	// match: (Or8 (Const8 [c]) (Const8 [d]))
 22552  	// result: (Const8 [c|d])
 22553  	for {
 22554  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22555  			if v_0.Op != OpConst8 {
 22556  				continue
 22557  			}
 22558  			c := auxIntToInt8(v_0.AuxInt)
 22559  			if v_1.Op != OpConst8 {
 22560  				continue
 22561  			}
 22562  			d := auxIntToInt8(v_1.AuxInt)
 22563  			v.reset(OpConst8)
 22564  			v.AuxInt = int8ToAuxInt(c | d)
 22565  			return true
 22566  		}
 22567  		break
 22568  	}
 22569  	// match: (Or8 <t> (Com8 x) (Com8 y))
 22570  	// result: (Com8 (And8 <t> x y))
 22571  	for {
 22572  		t := v.Type
 22573  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22574  			if v_0.Op != OpCom8 {
 22575  				continue
 22576  			}
 22577  			x := v_0.Args[0]
 22578  			if v_1.Op != OpCom8 {
 22579  				continue
 22580  			}
 22581  			y := v_1.Args[0]
 22582  			v.reset(OpCom8)
 22583  			v0 := b.NewValue0(v.Pos, OpAnd8, t)
 22584  			v0.AddArg2(x, y)
 22585  			v.AddArg(v0)
 22586  			return true
 22587  		}
 22588  		break
 22589  	}
 22590  	// match: (Or8 x x)
 22591  	// result: x
 22592  	for {
 22593  		x := v_0
 22594  		if x != v_1 {
 22595  			break
 22596  		}
 22597  		v.copyOf(x)
 22598  		return true
 22599  	}
 22600  	// match: (Or8 (Const8 [0]) x)
 22601  	// result: x
 22602  	for {
 22603  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22604  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 22605  				continue
 22606  			}
 22607  			x := v_1
 22608  			v.copyOf(x)
 22609  			return true
 22610  		}
 22611  		break
 22612  	}
 22613  	// match: (Or8 (Const8 [-1]) _)
 22614  	// result: (Const8 [-1])
 22615  	for {
 22616  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22617  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
 22618  				continue
 22619  			}
 22620  			v.reset(OpConst8)
 22621  			v.AuxInt = int8ToAuxInt(-1)
 22622  			return true
 22623  		}
 22624  		break
 22625  	}
 22626  	// match: (Or8 (Com8 x) x)
 22627  	// result: (Const8 [-1])
 22628  	for {
 22629  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22630  			if v_0.Op != OpCom8 {
 22631  				continue
 22632  			}
 22633  			x := v_0.Args[0]
 22634  			if x != v_1 {
 22635  				continue
 22636  			}
 22637  			v.reset(OpConst8)
 22638  			v.AuxInt = int8ToAuxInt(-1)
 22639  			return true
 22640  		}
 22641  		break
 22642  	}
 22643  	// match: (Or8 x (Or8 x y))
 22644  	// result: (Or8 x y)
 22645  	for {
 22646  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22647  			x := v_0
 22648  			if v_1.Op != OpOr8 {
 22649  				continue
 22650  			}
 22651  			_ = v_1.Args[1]
 22652  			v_1_0 := v_1.Args[0]
 22653  			v_1_1 := v_1.Args[1]
 22654  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 22655  				if x != v_1_0 {
 22656  					continue
 22657  				}
 22658  				y := v_1_1
 22659  				v.reset(OpOr8)
 22660  				v.AddArg2(x, y)
 22661  				return true
 22662  			}
 22663  		}
 22664  		break
 22665  	}
 22666  	// match: (Or8 (And8 x (Const8 [c2])) (Const8 <t> [c1]))
 22667  	// cond: ^(c1 | c2) == 0
 22668  	// result: (Or8 (Const8 <t> [c1]) x)
 22669  	for {
 22670  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22671  			if v_0.Op != OpAnd8 {
 22672  				continue
 22673  			}
 22674  			_ = v_0.Args[1]
 22675  			v_0_0 := v_0.Args[0]
 22676  			v_0_1 := v_0.Args[1]
 22677  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 22678  				x := v_0_0
 22679  				if v_0_1.Op != OpConst8 {
 22680  					continue
 22681  				}
 22682  				c2 := auxIntToInt8(v_0_1.AuxInt)
 22683  				if v_1.Op != OpConst8 {
 22684  					continue
 22685  				}
 22686  				t := v_1.Type
 22687  				c1 := auxIntToInt8(v_1.AuxInt)
 22688  				if !(^(c1 | c2) == 0) {
 22689  					continue
 22690  				}
 22691  				v.reset(OpOr8)
 22692  				v0 := b.NewValue0(v.Pos, OpConst8, t)
 22693  				v0.AuxInt = int8ToAuxInt(c1)
 22694  				v.AddArg2(v0, x)
 22695  				return true
 22696  			}
 22697  		}
 22698  		break
 22699  	}
 22700  	// match: (Or8 (Or8 i:(Const8 <t>) z) x)
 22701  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
 22702  	// result: (Or8 i (Or8 <t> z x))
 22703  	for {
 22704  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22705  			if v_0.Op != OpOr8 {
 22706  				continue
 22707  			}
 22708  			_ = v_0.Args[1]
 22709  			v_0_0 := v_0.Args[0]
 22710  			v_0_1 := v_0.Args[1]
 22711  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 22712  				i := v_0_0
 22713  				if i.Op != OpConst8 {
 22714  					continue
 22715  				}
 22716  				t := i.Type
 22717  				z := v_0_1
 22718  				x := v_1
 22719  				if !(z.Op != OpConst8 && x.Op != OpConst8) {
 22720  					continue
 22721  				}
 22722  				v.reset(OpOr8)
 22723  				v0 := b.NewValue0(v.Pos, OpOr8, t)
 22724  				v0.AddArg2(z, x)
 22725  				v.AddArg2(i, v0)
 22726  				return true
 22727  			}
 22728  		}
 22729  		break
 22730  	}
 22731  	// match: (Or8 (Const8 <t> [c]) (Or8 (Const8 <t> [d]) x))
 22732  	// result: (Or8 (Const8 <t> [c|d]) x)
 22733  	for {
 22734  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22735  			if v_0.Op != OpConst8 {
 22736  				continue
 22737  			}
 22738  			t := v_0.Type
 22739  			c := auxIntToInt8(v_0.AuxInt)
 22740  			if v_1.Op != OpOr8 {
 22741  				continue
 22742  			}
 22743  			_ = v_1.Args[1]
 22744  			v_1_0 := v_1.Args[0]
 22745  			v_1_1 := v_1.Args[1]
 22746  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 22747  				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
 22748  					continue
 22749  				}
 22750  				d := auxIntToInt8(v_1_0.AuxInt)
 22751  				x := v_1_1
 22752  				v.reset(OpOr8)
 22753  				v0 := b.NewValue0(v.Pos, OpConst8, t)
 22754  				v0.AuxInt = int8ToAuxInt(c | d)
 22755  				v.AddArg2(v0, x)
 22756  				return true
 22757  			}
 22758  		}
 22759  		break
 22760  	}
 22761  	// match: (Or8 (Lsh8x64 x z:(Const64 <t> [c])) (Rsh8Ux64 x (Const64 [d])))
 22762  	// cond: c < 8 && d == 8-c && canRotate(config, 8)
 22763  	// result: (RotateLeft8 x z)
 22764  	for {
 22765  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22766  			if v_0.Op != OpLsh8x64 {
 22767  				continue
 22768  			}
 22769  			_ = v_0.Args[1]
 22770  			x := v_0.Args[0]
 22771  			z := v_0.Args[1]
 22772  			if z.Op != OpConst64 {
 22773  				continue
 22774  			}
 22775  			c := auxIntToInt64(z.AuxInt)
 22776  			if v_1.Op != OpRsh8Ux64 {
 22777  				continue
 22778  			}
 22779  			_ = v_1.Args[1]
 22780  			if x != v_1.Args[0] {
 22781  				continue
 22782  			}
 22783  			v_1_1 := v_1.Args[1]
 22784  			if v_1_1.Op != OpConst64 {
 22785  				continue
 22786  			}
 22787  			d := auxIntToInt64(v_1_1.AuxInt)
 22788  			if !(c < 8 && d == 8-c && canRotate(config, 8)) {
 22789  				continue
 22790  			}
 22791  			v.reset(OpRotateLeft8)
 22792  			v.AddArg2(x, z)
 22793  			return true
 22794  		}
 22795  		break
 22796  	}
 22797  	// match: (Or8 left:(Lsh8x64 x y) right:(Rsh8Ux64 x (Sub64 (Const64 [8]) y)))
 22798  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 22799  	// result: (RotateLeft8 x y)
 22800  	for {
 22801  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22802  			left := v_0
 22803  			if left.Op != OpLsh8x64 {
 22804  				continue
 22805  			}
 22806  			y := left.Args[1]
 22807  			x := left.Args[0]
 22808  			right := v_1
 22809  			if right.Op != OpRsh8Ux64 {
 22810  				continue
 22811  			}
 22812  			_ = right.Args[1]
 22813  			if x != right.Args[0] {
 22814  				continue
 22815  			}
 22816  			right_1 := right.Args[1]
 22817  			if right_1.Op != OpSub64 {
 22818  				continue
 22819  			}
 22820  			_ = right_1.Args[1]
 22821  			right_1_0 := right_1.Args[0]
 22822  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 22823  				continue
 22824  			}
 22825  			v.reset(OpRotateLeft8)
 22826  			v.AddArg2(x, y)
 22827  			return true
 22828  		}
 22829  		break
 22830  	}
 22831  	// match: (Or8 left:(Lsh8x32 x y) right:(Rsh8Ux32 x (Sub32 (Const32 [8]) y)))
 22832  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 22833  	// result: (RotateLeft8 x y)
 22834  	for {
 22835  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22836  			left := v_0
 22837  			if left.Op != OpLsh8x32 {
 22838  				continue
 22839  			}
 22840  			y := left.Args[1]
 22841  			x := left.Args[0]
 22842  			right := v_1
 22843  			if right.Op != OpRsh8Ux32 {
 22844  				continue
 22845  			}
 22846  			_ = right.Args[1]
 22847  			if x != right.Args[0] {
 22848  				continue
 22849  			}
 22850  			right_1 := right.Args[1]
 22851  			if right_1.Op != OpSub32 {
 22852  				continue
 22853  			}
 22854  			_ = right_1.Args[1]
 22855  			right_1_0 := right_1.Args[0]
 22856  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 22857  				continue
 22858  			}
 22859  			v.reset(OpRotateLeft8)
 22860  			v.AddArg2(x, y)
 22861  			return true
 22862  		}
 22863  		break
 22864  	}
 22865  	// match: (Or8 left:(Lsh8x16 x y) right:(Rsh8Ux16 x (Sub16 (Const16 [8]) y)))
 22866  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 22867  	// result: (RotateLeft8 x y)
 22868  	for {
 22869  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22870  			left := v_0
 22871  			if left.Op != OpLsh8x16 {
 22872  				continue
 22873  			}
 22874  			y := left.Args[1]
 22875  			x := left.Args[0]
 22876  			right := v_1
 22877  			if right.Op != OpRsh8Ux16 {
 22878  				continue
 22879  			}
 22880  			_ = right.Args[1]
 22881  			if x != right.Args[0] {
 22882  				continue
 22883  			}
 22884  			right_1 := right.Args[1]
 22885  			if right_1.Op != OpSub16 {
 22886  				continue
 22887  			}
 22888  			_ = right_1.Args[1]
 22889  			right_1_0 := right_1.Args[0]
 22890  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 22891  				continue
 22892  			}
 22893  			v.reset(OpRotateLeft8)
 22894  			v.AddArg2(x, y)
 22895  			return true
 22896  		}
 22897  		break
 22898  	}
 22899  	// match: (Or8 left:(Lsh8x8 x y) right:(Rsh8Ux8 x (Sub8 (Const8 [8]) y)))
 22900  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 22901  	// result: (RotateLeft8 x y)
 22902  	for {
 22903  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22904  			left := v_0
 22905  			if left.Op != OpLsh8x8 {
 22906  				continue
 22907  			}
 22908  			y := left.Args[1]
 22909  			x := left.Args[0]
 22910  			right := v_1
 22911  			if right.Op != OpRsh8Ux8 {
 22912  				continue
 22913  			}
 22914  			_ = right.Args[1]
 22915  			if x != right.Args[0] {
 22916  				continue
 22917  			}
 22918  			right_1 := right.Args[1]
 22919  			if right_1.Op != OpSub8 {
 22920  				continue
 22921  			}
 22922  			_ = right_1.Args[1]
 22923  			right_1_0 := right_1.Args[0]
 22924  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 22925  				continue
 22926  			}
 22927  			v.reset(OpRotateLeft8)
 22928  			v.AddArg2(x, y)
 22929  			return true
 22930  		}
 22931  		break
 22932  	}
 22933  	// match: (Or8 right:(Rsh8Ux64 x y) left:(Lsh8x64 x z:(Sub64 (Const64 [8]) y)))
 22934  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 22935  	// result: (RotateLeft8 x z)
 22936  	for {
 22937  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22938  			right := v_0
 22939  			if right.Op != OpRsh8Ux64 {
 22940  				continue
 22941  			}
 22942  			y := right.Args[1]
 22943  			x := right.Args[0]
 22944  			left := v_1
 22945  			if left.Op != OpLsh8x64 {
 22946  				continue
 22947  			}
 22948  			_ = left.Args[1]
 22949  			if x != left.Args[0] {
 22950  				continue
 22951  			}
 22952  			z := left.Args[1]
 22953  			if z.Op != OpSub64 {
 22954  				continue
 22955  			}
 22956  			_ = z.Args[1]
 22957  			z_0 := z.Args[0]
 22958  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 22959  				continue
 22960  			}
 22961  			v.reset(OpRotateLeft8)
 22962  			v.AddArg2(x, z)
 22963  			return true
 22964  		}
 22965  		break
 22966  	}
 22967  	// match: (Or8 right:(Rsh8Ux32 x y) left:(Lsh8x32 x z:(Sub32 (Const32 [8]) y)))
 22968  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 22969  	// result: (RotateLeft8 x z)
 22970  	for {
 22971  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 22972  			right := v_0
 22973  			if right.Op != OpRsh8Ux32 {
 22974  				continue
 22975  			}
 22976  			y := right.Args[1]
 22977  			x := right.Args[0]
 22978  			left := v_1
 22979  			if left.Op != OpLsh8x32 {
 22980  				continue
 22981  			}
 22982  			_ = left.Args[1]
 22983  			if x != left.Args[0] {
 22984  				continue
 22985  			}
 22986  			z := left.Args[1]
 22987  			if z.Op != OpSub32 {
 22988  				continue
 22989  			}
 22990  			_ = z.Args[1]
 22991  			z_0 := z.Args[0]
 22992  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 22993  				continue
 22994  			}
 22995  			v.reset(OpRotateLeft8)
 22996  			v.AddArg2(x, z)
 22997  			return true
 22998  		}
 22999  		break
 23000  	}
 23001  	// match: (Or8 right:(Rsh8Ux16 x y) left:(Lsh8x16 x z:(Sub16 (Const16 [8]) y)))
 23002  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 23003  	// result: (RotateLeft8 x z)
 23004  	for {
 23005  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23006  			right := v_0
 23007  			if right.Op != OpRsh8Ux16 {
 23008  				continue
 23009  			}
 23010  			y := right.Args[1]
 23011  			x := right.Args[0]
 23012  			left := v_1
 23013  			if left.Op != OpLsh8x16 {
 23014  				continue
 23015  			}
 23016  			_ = left.Args[1]
 23017  			if x != left.Args[0] {
 23018  				continue
 23019  			}
 23020  			z := left.Args[1]
 23021  			if z.Op != OpSub16 {
 23022  				continue
 23023  			}
 23024  			_ = z.Args[1]
 23025  			z_0 := z.Args[0]
 23026  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 23027  				continue
 23028  			}
 23029  			v.reset(OpRotateLeft8)
 23030  			v.AddArg2(x, z)
 23031  			return true
 23032  		}
 23033  		break
 23034  	}
 23035  	// match: (Or8 right:(Rsh8Ux8 x y) left:(Lsh8x8 x z:(Sub8 (Const8 [8]) y)))
 23036  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 23037  	// result: (RotateLeft8 x z)
 23038  	for {
 23039  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23040  			right := v_0
 23041  			if right.Op != OpRsh8Ux8 {
 23042  				continue
 23043  			}
 23044  			y := right.Args[1]
 23045  			x := right.Args[0]
 23046  			left := v_1
 23047  			if left.Op != OpLsh8x8 {
 23048  				continue
 23049  			}
 23050  			_ = left.Args[1]
 23051  			if x != left.Args[0] {
 23052  				continue
 23053  			}
 23054  			z := left.Args[1]
 23055  			if z.Op != OpSub8 {
 23056  				continue
 23057  			}
 23058  			_ = z.Args[1]
 23059  			z_0 := z.Args[0]
 23060  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 23061  				continue
 23062  			}
 23063  			v.reset(OpRotateLeft8)
 23064  			v.AddArg2(x, z)
 23065  			return true
 23066  		}
 23067  		break
 23068  	}
 23069  	return false
 23070  }
 23071  func rewriteValuegeneric_OpOrB(v *Value) bool {
 23072  	v_1 := v.Args[1]
 23073  	v_0 := v.Args[0]
 23074  	b := v.Block
 23075  	// match: (OrB (Less64 (Const64 [c]) x) (Less64 x (Const64 [d])))
 23076  	// cond: c >= d
 23077  	// result: (Less64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
 23078  	for {
 23079  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23080  			if v_0.Op != OpLess64 {
 23081  				continue
 23082  			}
 23083  			x := v_0.Args[1]
 23084  			v_0_0 := v_0.Args[0]
 23085  			if v_0_0.Op != OpConst64 {
 23086  				continue
 23087  			}
 23088  			c := auxIntToInt64(v_0_0.AuxInt)
 23089  			if v_1.Op != OpLess64 {
 23090  				continue
 23091  			}
 23092  			_ = v_1.Args[1]
 23093  			if x != v_1.Args[0] {
 23094  				continue
 23095  			}
 23096  			v_1_1 := v_1.Args[1]
 23097  			if v_1_1.Op != OpConst64 {
 23098  				continue
 23099  			}
 23100  			d := auxIntToInt64(v_1_1.AuxInt)
 23101  			if !(c >= d) {
 23102  				continue
 23103  			}
 23104  			v.reset(OpLess64U)
 23105  			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
 23106  			v0.AuxInt = int64ToAuxInt(c - d)
 23107  			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
 23108  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
 23109  			v2.AuxInt = int64ToAuxInt(d)
 23110  			v1.AddArg2(x, v2)
 23111  			v.AddArg2(v0, v1)
 23112  			return true
 23113  		}
 23114  		break
 23115  	}
 23116  	// match: (OrB (Leq64 (Const64 [c]) x) (Less64 x (Const64 [d])))
 23117  	// cond: c >= d
 23118  	// result: (Leq64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
 23119  	for {
 23120  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23121  			if v_0.Op != OpLeq64 {
 23122  				continue
 23123  			}
 23124  			x := v_0.Args[1]
 23125  			v_0_0 := v_0.Args[0]
 23126  			if v_0_0.Op != OpConst64 {
 23127  				continue
 23128  			}
 23129  			c := auxIntToInt64(v_0_0.AuxInt)
 23130  			if v_1.Op != OpLess64 {
 23131  				continue
 23132  			}
 23133  			_ = v_1.Args[1]
 23134  			if x != v_1.Args[0] {
 23135  				continue
 23136  			}
 23137  			v_1_1 := v_1.Args[1]
 23138  			if v_1_1.Op != OpConst64 {
 23139  				continue
 23140  			}
 23141  			d := auxIntToInt64(v_1_1.AuxInt)
 23142  			if !(c >= d) {
 23143  				continue
 23144  			}
 23145  			v.reset(OpLeq64U)
 23146  			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
 23147  			v0.AuxInt = int64ToAuxInt(c - d)
 23148  			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
 23149  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
 23150  			v2.AuxInt = int64ToAuxInt(d)
 23151  			v1.AddArg2(x, v2)
 23152  			v.AddArg2(v0, v1)
 23153  			return true
 23154  		}
 23155  		break
 23156  	}
 23157  	// match: (OrB (Less32 (Const32 [c]) x) (Less32 x (Const32 [d])))
 23158  	// cond: c >= d
 23159  	// result: (Less32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
 23160  	for {
 23161  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23162  			if v_0.Op != OpLess32 {
 23163  				continue
 23164  			}
 23165  			x := v_0.Args[1]
 23166  			v_0_0 := v_0.Args[0]
 23167  			if v_0_0.Op != OpConst32 {
 23168  				continue
 23169  			}
 23170  			c := auxIntToInt32(v_0_0.AuxInt)
 23171  			if v_1.Op != OpLess32 {
 23172  				continue
 23173  			}
 23174  			_ = v_1.Args[1]
 23175  			if x != v_1.Args[0] {
 23176  				continue
 23177  			}
 23178  			v_1_1 := v_1.Args[1]
 23179  			if v_1_1.Op != OpConst32 {
 23180  				continue
 23181  			}
 23182  			d := auxIntToInt32(v_1_1.AuxInt)
 23183  			if !(c >= d) {
 23184  				continue
 23185  			}
 23186  			v.reset(OpLess32U)
 23187  			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
 23188  			v0.AuxInt = int32ToAuxInt(c - d)
 23189  			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
 23190  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
 23191  			v2.AuxInt = int32ToAuxInt(d)
 23192  			v1.AddArg2(x, v2)
 23193  			v.AddArg2(v0, v1)
 23194  			return true
 23195  		}
 23196  		break
 23197  	}
 23198  	// match: (OrB (Leq32 (Const32 [c]) x) (Less32 x (Const32 [d])))
 23199  	// cond: c >= d
 23200  	// result: (Leq32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
 23201  	for {
 23202  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23203  			if v_0.Op != OpLeq32 {
 23204  				continue
 23205  			}
 23206  			x := v_0.Args[1]
 23207  			v_0_0 := v_0.Args[0]
 23208  			if v_0_0.Op != OpConst32 {
 23209  				continue
 23210  			}
 23211  			c := auxIntToInt32(v_0_0.AuxInt)
 23212  			if v_1.Op != OpLess32 {
 23213  				continue
 23214  			}
 23215  			_ = v_1.Args[1]
 23216  			if x != v_1.Args[0] {
 23217  				continue
 23218  			}
 23219  			v_1_1 := v_1.Args[1]
 23220  			if v_1_1.Op != OpConst32 {
 23221  				continue
 23222  			}
 23223  			d := auxIntToInt32(v_1_1.AuxInt)
 23224  			if !(c >= d) {
 23225  				continue
 23226  			}
 23227  			v.reset(OpLeq32U)
 23228  			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
 23229  			v0.AuxInt = int32ToAuxInt(c - d)
 23230  			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
 23231  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
 23232  			v2.AuxInt = int32ToAuxInt(d)
 23233  			v1.AddArg2(x, v2)
 23234  			v.AddArg2(v0, v1)
 23235  			return true
 23236  		}
 23237  		break
 23238  	}
 23239  	// match: (OrB (Less16 (Const16 [c]) x) (Less16 x (Const16 [d])))
 23240  	// cond: c >= d
 23241  	// result: (Less16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
 23242  	for {
 23243  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23244  			if v_0.Op != OpLess16 {
 23245  				continue
 23246  			}
 23247  			x := v_0.Args[1]
 23248  			v_0_0 := v_0.Args[0]
 23249  			if v_0_0.Op != OpConst16 {
 23250  				continue
 23251  			}
 23252  			c := auxIntToInt16(v_0_0.AuxInt)
 23253  			if v_1.Op != OpLess16 {
 23254  				continue
 23255  			}
 23256  			_ = v_1.Args[1]
 23257  			if x != v_1.Args[0] {
 23258  				continue
 23259  			}
 23260  			v_1_1 := v_1.Args[1]
 23261  			if v_1_1.Op != OpConst16 {
 23262  				continue
 23263  			}
 23264  			d := auxIntToInt16(v_1_1.AuxInt)
 23265  			if !(c >= d) {
 23266  				continue
 23267  			}
 23268  			v.reset(OpLess16U)
 23269  			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
 23270  			v0.AuxInt = int16ToAuxInt(c - d)
 23271  			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
 23272  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
 23273  			v2.AuxInt = int16ToAuxInt(d)
 23274  			v1.AddArg2(x, v2)
 23275  			v.AddArg2(v0, v1)
 23276  			return true
 23277  		}
 23278  		break
 23279  	}
 23280  	// match: (OrB (Leq16 (Const16 [c]) x) (Less16 x (Const16 [d])))
 23281  	// cond: c >= d
 23282  	// result: (Leq16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
 23283  	for {
 23284  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23285  			if v_0.Op != OpLeq16 {
 23286  				continue
 23287  			}
 23288  			x := v_0.Args[1]
 23289  			v_0_0 := v_0.Args[0]
 23290  			if v_0_0.Op != OpConst16 {
 23291  				continue
 23292  			}
 23293  			c := auxIntToInt16(v_0_0.AuxInt)
 23294  			if v_1.Op != OpLess16 {
 23295  				continue
 23296  			}
 23297  			_ = v_1.Args[1]
 23298  			if x != v_1.Args[0] {
 23299  				continue
 23300  			}
 23301  			v_1_1 := v_1.Args[1]
 23302  			if v_1_1.Op != OpConst16 {
 23303  				continue
 23304  			}
 23305  			d := auxIntToInt16(v_1_1.AuxInt)
 23306  			if !(c >= d) {
 23307  				continue
 23308  			}
 23309  			v.reset(OpLeq16U)
 23310  			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
 23311  			v0.AuxInt = int16ToAuxInt(c - d)
 23312  			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
 23313  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
 23314  			v2.AuxInt = int16ToAuxInt(d)
 23315  			v1.AddArg2(x, v2)
 23316  			v.AddArg2(v0, v1)
 23317  			return true
 23318  		}
 23319  		break
 23320  	}
 23321  	// match: (OrB (Less8 (Const8 [c]) x) (Less8 x (Const8 [d])))
 23322  	// cond: c >= d
 23323  	// result: (Less8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
 23324  	for {
 23325  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23326  			if v_0.Op != OpLess8 {
 23327  				continue
 23328  			}
 23329  			x := v_0.Args[1]
 23330  			v_0_0 := v_0.Args[0]
 23331  			if v_0_0.Op != OpConst8 {
 23332  				continue
 23333  			}
 23334  			c := auxIntToInt8(v_0_0.AuxInt)
 23335  			if v_1.Op != OpLess8 {
 23336  				continue
 23337  			}
 23338  			_ = v_1.Args[1]
 23339  			if x != v_1.Args[0] {
 23340  				continue
 23341  			}
 23342  			v_1_1 := v_1.Args[1]
 23343  			if v_1_1.Op != OpConst8 {
 23344  				continue
 23345  			}
 23346  			d := auxIntToInt8(v_1_1.AuxInt)
 23347  			if !(c >= d) {
 23348  				continue
 23349  			}
 23350  			v.reset(OpLess8U)
 23351  			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
 23352  			v0.AuxInt = int8ToAuxInt(c - d)
 23353  			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
 23354  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
 23355  			v2.AuxInt = int8ToAuxInt(d)
 23356  			v1.AddArg2(x, v2)
 23357  			v.AddArg2(v0, v1)
 23358  			return true
 23359  		}
 23360  		break
 23361  	}
 23362  	// match: (OrB (Leq8 (Const8 [c]) x) (Less8 x (Const8 [d])))
 23363  	// cond: c >= d
 23364  	// result: (Leq8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
 23365  	for {
 23366  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23367  			if v_0.Op != OpLeq8 {
 23368  				continue
 23369  			}
 23370  			x := v_0.Args[1]
 23371  			v_0_0 := v_0.Args[0]
 23372  			if v_0_0.Op != OpConst8 {
 23373  				continue
 23374  			}
 23375  			c := auxIntToInt8(v_0_0.AuxInt)
 23376  			if v_1.Op != OpLess8 {
 23377  				continue
 23378  			}
 23379  			_ = v_1.Args[1]
 23380  			if x != v_1.Args[0] {
 23381  				continue
 23382  			}
 23383  			v_1_1 := v_1.Args[1]
 23384  			if v_1_1.Op != OpConst8 {
 23385  				continue
 23386  			}
 23387  			d := auxIntToInt8(v_1_1.AuxInt)
 23388  			if !(c >= d) {
 23389  				continue
 23390  			}
 23391  			v.reset(OpLeq8U)
 23392  			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
 23393  			v0.AuxInt = int8ToAuxInt(c - d)
 23394  			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
 23395  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
 23396  			v2.AuxInt = int8ToAuxInt(d)
 23397  			v1.AddArg2(x, v2)
 23398  			v.AddArg2(v0, v1)
 23399  			return true
 23400  		}
 23401  		break
 23402  	}
 23403  	// match: (OrB (Less64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
 23404  	// cond: c >= d+1 && d+1 > d
 23405  	// result: (Less64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
 23406  	for {
 23407  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23408  			if v_0.Op != OpLess64 {
 23409  				continue
 23410  			}
 23411  			x := v_0.Args[1]
 23412  			v_0_0 := v_0.Args[0]
 23413  			if v_0_0.Op != OpConst64 {
 23414  				continue
 23415  			}
 23416  			c := auxIntToInt64(v_0_0.AuxInt)
 23417  			if v_1.Op != OpLeq64 {
 23418  				continue
 23419  			}
 23420  			_ = v_1.Args[1]
 23421  			if x != v_1.Args[0] {
 23422  				continue
 23423  			}
 23424  			v_1_1 := v_1.Args[1]
 23425  			if v_1_1.Op != OpConst64 {
 23426  				continue
 23427  			}
 23428  			d := auxIntToInt64(v_1_1.AuxInt)
 23429  			if !(c >= d+1 && d+1 > d) {
 23430  				continue
 23431  			}
 23432  			v.reset(OpLess64U)
 23433  			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
 23434  			v0.AuxInt = int64ToAuxInt(c - d - 1)
 23435  			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
 23436  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
 23437  			v2.AuxInt = int64ToAuxInt(d + 1)
 23438  			v1.AddArg2(x, v2)
 23439  			v.AddArg2(v0, v1)
 23440  			return true
 23441  		}
 23442  		break
 23443  	}
 23444  	// match: (OrB (Leq64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
 23445  	// cond: c >= d+1 && d+1 > d
 23446  	// result: (Leq64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
 23447  	for {
 23448  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23449  			if v_0.Op != OpLeq64 {
 23450  				continue
 23451  			}
 23452  			x := v_0.Args[1]
 23453  			v_0_0 := v_0.Args[0]
 23454  			if v_0_0.Op != OpConst64 {
 23455  				continue
 23456  			}
 23457  			c := auxIntToInt64(v_0_0.AuxInt)
 23458  			if v_1.Op != OpLeq64 {
 23459  				continue
 23460  			}
 23461  			_ = v_1.Args[1]
 23462  			if x != v_1.Args[0] {
 23463  				continue
 23464  			}
 23465  			v_1_1 := v_1.Args[1]
 23466  			if v_1_1.Op != OpConst64 {
 23467  				continue
 23468  			}
 23469  			d := auxIntToInt64(v_1_1.AuxInt)
 23470  			if !(c >= d+1 && d+1 > d) {
 23471  				continue
 23472  			}
 23473  			v.reset(OpLeq64U)
 23474  			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
 23475  			v0.AuxInt = int64ToAuxInt(c - d - 1)
 23476  			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
 23477  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
 23478  			v2.AuxInt = int64ToAuxInt(d + 1)
 23479  			v1.AddArg2(x, v2)
 23480  			v.AddArg2(v0, v1)
 23481  			return true
 23482  		}
 23483  		break
 23484  	}
 23485  	// match: (OrB (Less32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
 23486  	// cond: c >= d+1 && d+1 > d
 23487  	// result: (Less32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
 23488  	for {
 23489  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23490  			if v_0.Op != OpLess32 {
 23491  				continue
 23492  			}
 23493  			x := v_0.Args[1]
 23494  			v_0_0 := v_0.Args[0]
 23495  			if v_0_0.Op != OpConst32 {
 23496  				continue
 23497  			}
 23498  			c := auxIntToInt32(v_0_0.AuxInt)
 23499  			if v_1.Op != OpLeq32 {
 23500  				continue
 23501  			}
 23502  			_ = v_1.Args[1]
 23503  			if x != v_1.Args[0] {
 23504  				continue
 23505  			}
 23506  			v_1_1 := v_1.Args[1]
 23507  			if v_1_1.Op != OpConst32 {
 23508  				continue
 23509  			}
 23510  			d := auxIntToInt32(v_1_1.AuxInt)
 23511  			if !(c >= d+1 && d+1 > d) {
 23512  				continue
 23513  			}
 23514  			v.reset(OpLess32U)
 23515  			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
 23516  			v0.AuxInt = int32ToAuxInt(c - d - 1)
 23517  			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
 23518  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
 23519  			v2.AuxInt = int32ToAuxInt(d + 1)
 23520  			v1.AddArg2(x, v2)
 23521  			v.AddArg2(v0, v1)
 23522  			return true
 23523  		}
 23524  		break
 23525  	}
 23526  	// match: (OrB (Leq32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
 23527  	// cond: c >= d+1 && d+1 > d
 23528  	// result: (Leq32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
 23529  	for {
 23530  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23531  			if v_0.Op != OpLeq32 {
 23532  				continue
 23533  			}
 23534  			x := v_0.Args[1]
 23535  			v_0_0 := v_0.Args[0]
 23536  			if v_0_0.Op != OpConst32 {
 23537  				continue
 23538  			}
 23539  			c := auxIntToInt32(v_0_0.AuxInt)
 23540  			if v_1.Op != OpLeq32 {
 23541  				continue
 23542  			}
 23543  			_ = v_1.Args[1]
 23544  			if x != v_1.Args[0] {
 23545  				continue
 23546  			}
 23547  			v_1_1 := v_1.Args[1]
 23548  			if v_1_1.Op != OpConst32 {
 23549  				continue
 23550  			}
 23551  			d := auxIntToInt32(v_1_1.AuxInt)
 23552  			if !(c >= d+1 && d+1 > d) {
 23553  				continue
 23554  			}
 23555  			v.reset(OpLeq32U)
 23556  			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
 23557  			v0.AuxInt = int32ToAuxInt(c - d - 1)
 23558  			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
 23559  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
 23560  			v2.AuxInt = int32ToAuxInt(d + 1)
 23561  			v1.AddArg2(x, v2)
 23562  			v.AddArg2(v0, v1)
 23563  			return true
 23564  		}
 23565  		break
 23566  	}
 23567  	// match: (OrB (Less16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
 23568  	// cond: c >= d+1 && d+1 > d
 23569  	// result: (Less16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
 23570  	for {
 23571  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23572  			if v_0.Op != OpLess16 {
 23573  				continue
 23574  			}
 23575  			x := v_0.Args[1]
 23576  			v_0_0 := v_0.Args[0]
 23577  			if v_0_0.Op != OpConst16 {
 23578  				continue
 23579  			}
 23580  			c := auxIntToInt16(v_0_0.AuxInt)
 23581  			if v_1.Op != OpLeq16 {
 23582  				continue
 23583  			}
 23584  			_ = v_1.Args[1]
 23585  			if x != v_1.Args[0] {
 23586  				continue
 23587  			}
 23588  			v_1_1 := v_1.Args[1]
 23589  			if v_1_1.Op != OpConst16 {
 23590  				continue
 23591  			}
 23592  			d := auxIntToInt16(v_1_1.AuxInt)
 23593  			if !(c >= d+1 && d+1 > d) {
 23594  				continue
 23595  			}
 23596  			v.reset(OpLess16U)
 23597  			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
 23598  			v0.AuxInt = int16ToAuxInt(c - d - 1)
 23599  			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
 23600  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
 23601  			v2.AuxInt = int16ToAuxInt(d + 1)
 23602  			v1.AddArg2(x, v2)
 23603  			v.AddArg2(v0, v1)
 23604  			return true
 23605  		}
 23606  		break
 23607  	}
 23608  	// match: (OrB (Leq16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
 23609  	// cond: c >= d+1 && d+1 > d
 23610  	// result: (Leq16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
 23611  	for {
 23612  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23613  			if v_0.Op != OpLeq16 {
 23614  				continue
 23615  			}
 23616  			x := v_0.Args[1]
 23617  			v_0_0 := v_0.Args[0]
 23618  			if v_0_0.Op != OpConst16 {
 23619  				continue
 23620  			}
 23621  			c := auxIntToInt16(v_0_0.AuxInt)
 23622  			if v_1.Op != OpLeq16 {
 23623  				continue
 23624  			}
 23625  			_ = v_1.Args[1]
 23626  			if x != v_1.Args[0] {
 23627  				continue
 23628  			}
 23629  			v_1_1 := v_1.Args[1]
 23630  			if v_1_1.Op != OpConst16 {
 23631  				continue
 23632  			}
 23633  			d := auxIntToInt16(v_1_1.AuxInt)
 23634  			if !(c >= d+1 && d+1 > d) {
 23635  				continue
 23636  			}
 23637  			v.reset(OpLeq16U)
 23638  			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
 23639  			v0.AuxInt = int16ToAuxInt(c - d - 1)
 23640  			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
 23641  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
 23642  			v2.AuxInt = int16ToAuxInt(d + 1)
 23643  			v1.AddArg2(x, v2)
 23644  			v.AddArg2(v0, v1)
 23645  			return true
 23646  		}
 23647  		break
 23648  	}
 23649  	// match: (OrB (Less8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
 23650  	// cond: c >= d+1 && d+1 > d
 23651  	// result: (Less8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
 23652  	for {
 23653  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23654  			if v_0.Op != OpLess8 {
 23655  				continue
 23656  			}
 23657  			x := v_0.Args[1]
 23658  			v_0_0 := v_0.Args[0]
 23659  			if v_0_0.Op != OpConst8 {
 23660  				continue
 23661  			}
 23662  			c := auxIntToInt8(v_0_0.AuxInt)
 23663  			if v_1.Op != OpLeq8 {
 23664  				continue
 23665  			}
 23666  			_ = v_1.Args[1]
 23667  			if x != v_1.Args[0] {
 23668  				continue
 23669  			}
 23670  			v_1_1 := v_1.Args[1]
 23671  			if v_1_1.Op != OpConst8 {
 23672  				continue
 23673  			}
 23674  			d := auxIntToInt8(v_1_1.AuxInt)
 23675  			if !(c >= d+1 && d+1 > d) {
 23676  				continue
 23677  			}
 23678  			v.reset(OpLess8U)
 23679  			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
 23680  			v0.AuxInt = int8ToAuxInt(c - d - 1)
 23681  			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
 23682  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
 23683  			v2.AuxInt = int8ToAuxInt(d + 1)
 23684  			v1.AddArg2(x, v2)
 23685  			v.AddArg2(v0, v1)
 23686  			return true
 23687  		}
 23688  		break
 23689  	}
 23690  	// match: (OrB (Leq8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
 23691  	// cond: c >= d+1 && d+1 > d
 23692  	// result: (Leq8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
 23693  	for {
 23694  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23695  			if v_0.Op != OpLeq8 {
 23696  				continue
 23697  			}
 23698  			x := v_0.Args[1]
 23699  			v_0_0 := v_0.Args[0]
 23700  			if v_0_0.Op != OpConst8 {
 23701  				continue
 23702  			}
 23703  			c := auxIntToInt8(v_0_0.AuxInt)
 23704  			if v_1.Op != OpLeq8 {
 23705  				continue
 23706  			}
 23707  			_ = v_1.Args[1]
 23708  			if x != v_1.Args[0] {
 23709  				continue
 23710  			}
 23711  			v_1_1 := v_1.Args[1]
 23712  			if v_1_1.Op != OpConst8 {
 23713  				continue
 23714  			}
 23715  			d := auxIntToInt8(v_1_1.AuxInt)
 23716  			if !(c >= d+1 && d+1 > d) {
 23717  				continue
 23718  			}
 23719  			v.reset(OpLeq8U)
 23720  			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
 23721  			v0.AuxInt = int8ToAuxInt(c - d - 1)
 23722  			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
 23723  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
 23724  			v2.AuxInt = int8ToAuxInt(d + 1)
 23725  			v1.AddArg2(x, v2)
 23726  			v.AddArg2(v0, v1)
 23727  			return true
 23728  		}
 23729  		break
 23730  	}
 23731  	// match: (OrB (Less64U (Const64 [c]) x) (Less64U x (Const64 [d])))
 23732  	// cond: uint64(c) >= uint64(d)
 23733  	// result: (Less64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
 23734  	for {
 23735  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23736  			if v_0.Op != OpLess64U {
 23737  				continue
 23738  			}
 23739  			x := v_0.Args[1]
 23740  			v_0_0 := v_0.Args[0]
 23741  			if v_0_0.Op != OpConst64 {
 23742  				continue
 23743  			}
 23744  			c := auxIntToInt64(v_0_0.AuxInt)
 23745  			if v_1.Op != OpLess64U {
 23746  				continue
 23747  			}
 23748  			_ = v_1.Args[1]
 23749  			if x != v_1.Args[0] {
 23750  				continue
 23751  			}
 23752  			v_1_1 := v_1.Args[1]
 23753  			if v_1_1.Op != OpConst64 {
 23754  				continue
 23755  			}
 23756  			d := auxIntToInt64(v_1_1.AuxInt)
 23757  			if !(uint64(c) >= uint64(d)) {
 23758  				continue
 23759  			}
 23760  			v.reset(OpLess64U)
 23761  			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
 23762  			v0.AuxInt = int64ToAuxInt(c - d)
 23763  			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
 23764  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
 23765  			v2.AuxInt = int64ToAuxInt(d)
 23766  			v1.AddArg2(x, v2)
 23767  			v.AddArg2(v0, v1)
 23768  			return true
 23769  		}
 23770  		break
 23771  	}
 23772  	// match: (OrB (Leq64U (Const64 [c]) x) (Less64U x (Const64 [d])))
 23773  	// cond: uint64(c) >= uint64(d)
 23774  	// result: (Leq64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
 23775  	for {
 23776  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23777  			if v_0.Op != OpLeq64U {
 23778  				continue
 23779  			}
 23780  			x := v_0.Args[1]
 23781  			v_0_0 := v_0.Args[0]
 23782  			if v_0_0.Op != OpConst64 {
 23783  				continue
 23784  			}
 23785  			c := auxIntToInt64(v_0_0.AuxInt)
 23786  			if v_1.Op != OpLess64U {
 23787  				continue
 23788  			}
 23789  			_ = v_1.Args[1]
 23790  			if x != v_1.Args[0] {
 23791  				continue
 23792  			}
 23793  			v_1_1 := v_1.Args[1]
 23794  			if v_1_1.Op != OpConst64 {
 23795  				continue
 23796  			}
 23797  			d := auxIntToInt64(v_1_1.AuxInt)
 23798  			if !(uint64(c) >= uint64(d)) {
 23799  				continue
 23800  			}
 23801  			v.reset(OpLeq64U)
 23802  			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
 23803  			v0.AuxInt = int64ToAuxInt(c - d)
 23804  			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
 23805  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
 23806  			v2.AuxInt = int64ToAuxInt(d)
 23807  			v1.AddArg2(x, v2)
 23808  			v.AddArg2(v0, v1)
 23809  			return true
 23810  		}
 23811  		break
 23812  	}
 23813  	// match: (OrB (Less32U (Const32 [c]) x) (Less32U x (Const32 [d])))
 23814  	// cond: uint32(c) >= uint32(d)
 23815  	// result: (Less32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
 23816  	for {
 23817  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23818  			if v_0.Op != OpLess32U {
 23819  				continue
 23820  			}
 23821  			x := v_0.Args[1]
 23822  			v_0_0 := v_0.Args[0]
 23823  			if v_0_0.Op != OpConst32 {
 23824  				continue
 23825  			}
 23826  			c := auxIntToInt32(v_0_0.AuxInt)
 23827  			if v_1.Op != OpLess32U {
 23828  				continue
 23829  			}
 23830  			_ = v_1.Args[1]
 23831  			if x != v_1.Args[0] {
 23832  				continue
 23833  			}
 23834  			v_1_1 := v_1.Args[1]
 23835  			if v_1_1.Op != OpConst32 {
 23836  				continue
 23837  			}
 23838  			d := auxIntToInt32(v_1_1.AuxInt)
 23839  			if !(uint32(c) >= uint32(d)) {
 23840  				continue
 23841  			}
 23842  			v.reset(OpLess32U)
 23843  			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
 23844  			v0.AuxInt = int32ToAuxInt(c - d)
 23845  			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
 23846  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
 23847  			v2.AuxInt = int32ToAuxInt(d)
 23848  			v1.AddArg2(x, v2)
 23849  			v.AddArg2(v0, v1)
 23850  			return true
 23851  		}
 23852  		break
 23853  	}
 23854  	// match: (OrB (Leq32U (Const32 [c]) x) (Less32U x (Const32 [d])))
 23855  	// cond: uint32(c) >= uint32(d)
 23856  	// result: (Leq32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
 23857  	for {
 23858  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23859  			if v_0.Op != OpLeq32U {
 23860  				continue
 23861  			}
 23862  			x := v_0.Args[1]
 23863  			v_0_0 := v_0.Args[0]
 23864  			if v_0_0.Op != OpConst32 {
 23865  				continue
 23866  			}
 23867  			c := auxIntToInt32(v_0_0.AuxInt)
 23868  			if v_1.Op != OpLess32U {
 23869  				continue
 23870  			}
 23871  			_ = v_1.Args[1]
 23872  			if x != v_1.Args[0] {
 23873  				continue
 23874  			}
 23875  			v_1_1 := v_1.Args[1]
 23876  			if v_1_1.Op != OpConst32 {
 23877  				continue
 23878  			}
 23879  			d := auxIntToInt32(v_1_1.AuxInt)
 23880  			if !(uint32(c) >= uint32(d)) {
 23881  				continue
 23882  			}
 23883  			v.reset(OpLeq32U)
 23884  			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
 23885  			v0.AuxInt = int32ToAuxInt(c - d)
 23886  			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
 23887  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
 23888  			v2.AuxInt = int32ToAuxInt(d)
 23889  			v1.AddArg2(x, v2)
 23890  			v.AddArg2(v0, v1)
 23891  			return true
 23892  		}
 23893  		break
 23894  	}
 23895  	// match: (OrB (Less16U (Const16 [c]) x) (Less16U x (Const16 [d])))
 23896  	// cond: uint16(c) >= uint16(d)
 23897  	// result: (Less16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
 23898  	for {
 23899  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23900  			if v_0.Op != OpLess16U {
 23901  				continue
 23902  			}
 23903  			x := v_0.Args[1]
 23904  			v_0_0 := v_0.Args[0]
 23905  			if v_0_0.Op != OpConst16 {
 23906  				continue
 23907  			}
 23908  			c := auxIntToInt16(v_0_0.AuxInt)
 23909  			if v_1.Op != OpLess16U {
 23910  				continue
 23911  			}
 23912  			_ = v_1.Args[1]
 23913  			if x != v_1.Args[0] {
 23914  				continue
 23915  			}
 23916  			v_1_1 := v_1.Args[1]
 23917  			if v_1_1.Op != OpConst16 {
 23918  				continue
 23919  			}
 23920  			d := auxIntToInt16(v_1_1.AuxInt)
 23921  			if !(uint16(c) >= uint16(d)) {
 23922  				continue
 23923  			}
 23924  			v.reset(OpLess16U)
 23925  			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
 23926  			v0.AuxInt = int16ToAuxInt(c - d)
 23927  			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
 23928  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
 23929  			v2.AuxInt = int16ToAuxInt(d)
 23930  			v1.AddArg2(x, v2)
 23931  			v.AddArg2(v0, v1)
 23932  			return true
 23933  		}
 23934  		break
 23935  	}
 23936  	// match: (OrB (Leq16U (Const16 [c]) x) (Less16U x (Const16 [d])))
 23937  	// cond: uint16(c) >= uint16(d)
 23938  	// result: (Leq16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
 23939  	for {
 23940  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23941  			if v_0.Op != OpLeq16U {
 23942  				continue
 23943  			}
 23944  			x := v_0.Args[1]
 23945  			v_0_0 := v_0.Args[0]
 23946  			if v_0_0.Op != OpConst16 {
 23947  				continue
 23948  			}
 23949  			c := auxIntToInt16(v_0_0.AuxInt)
 23950  			if v_1.Op != OpLess16U {
 23951  				continue
 23952  			}
 23953  			_ = v_1.Args[1]
 23954  			if x != v_1.Args[0] {
 23955  				continue
 23956  			}
 23957  			v_1_1 := v_1.Args[1]
 23958  			if v_1_1.Op != OpConst16 {
 23959  				continue
 23960  			}
 23961  			d := auxIntToInt16(v_1_1.AuxInt)
 23962  			if !(uint16(c) >= uint16(d)) {
 23963  				continue
 23964  			}
 23965  			v.reset(OpLeq16U)
 23966  			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
 23967  			v0.AuxInt = int16ToAuxInt(c - d)
 23968  			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
 23969  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
 23970  			v2.AuxInt = int16ToAuxInt(d)
 23971  			v1.AddArg2(x, v2)
 23972  			v.AddArg2(v0, v1)
 23973  			return true
 23974  		}
 23975  		break
 23976  	}
 23977  	// match: (OrB (Less8U (Const8 [c]) x) (Less8U x (Const8 [d])))
 23978  	// cond: uint8(c) >= uint8(d)
 23979  	// result: (Less8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
 23980  	for {
 23981  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 23982  			if v_0.Op != OpLess8U {
 23983  				continue
 23984  			}
 23985  			x := v_0.Args[1]
 23986  			v_0_0 := v_0.Args[0]
 23987  			if v_0_0.Op != OpConst8 {
 23988  				continue
 23989  			}
 23990  			c := auxIntToInt8(v_0_0.AuxInt)
 23991  			if v_1.Op != OpLess8U {
 23992  				continue
 23993  			}
 23994  			_ = v_1.Args[1]
 23995  			if x != v_1.Args[0] {
 23996  				continue
 23997  			}
 23998  			v_1_1 := v_1.Args[1]
 23999  			if v_1_1.Op != OpConst8 {
 24000  				continue
 24001  			}
 24002  			d := auxIntToInt8(v_1_1.AuxInt)
 24003  			if !(uint8(c) >= uint8(d)) {
 24004  				continue
 24005  			}
 24006  			v.reset(OpLess8U)
 24007  			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
 24008  			v0.AuxInt = int8ToAuxInt(c - d)
 24009  			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
 24010  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
 24011  			v2.AuxInt = int8ToAuxInt(d)
 24012  			v1.AddArg2(x, v2)
 24013  			v.AddArg2(v0, v1)
 24014  			return true
 24015  		}
 24016  		break
 24017  	}
 24018  	// match: (OrB (Leq8U (Const8 [c]) x) (Less8U x (Const8 [d])))
 24019  	// cond: uint8(c) >= uint8(d)
 24020  	// result: (Leq8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
 24021  	for {
 24022  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 24023  			if v_0.Op != OpLeq8U {
 24024  				continue
 24025  			}
 24026  			x := v_0.Args[1]
 24027  			v_0_0 := v_0.Args[0]
 24028  			if v_0_0.Op != OpConst8 {
 24029  				continue
 24030  			}
 24031  			c := auxIntToInt8(v_0_0.AuxInt)
 24032  			if v_1.Op != OpLess8U {
 24033  				continue
 24034  			}
 24035  			_ = v_1.Args[1]
 24036  			if x != v_1.Args[0] {
 24037  				continue
 24038  			}
 24039  			v_1_1 := v_1.Args[1]
 24040  			if v_1_1.Op != OpConst8 {
 24041  				continue
 24042  			}
 24043  			d := auxIntToInt8(v_1_1.AuxInt)
 24044  			if !(uint8(c) >= uint8(d)) {
 24045  				continue
 24046  			}
 24047  			v.reset(OpLeq8U)
 24048  			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
 24049  			v0.AuxInt = int8ToAuxInt(c - d)
 24050  			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
 24051  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
 24052  			v2.AuxInt = int8ToAuxInt(d)
 24053  			v1.AddArg2(x, v2)
 24054  			v.AddArg2(v0, v1)
 24055  			return true
 24056  		}
 24057  		break
 24058  	}
 24059  	// match: (OrB (Less64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
 24060  	// cond: uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)
 24061  	// result: (Less64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
 24062  	for {
 24063  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 24064  			if v_0.Op != OpLess64U {
 24065  				continue
 24066  			}
 24067  			x := v_0.Args[1]
 24068  			v_0_0 := v_0.Args[0]
 24069  			if v_0_0.Op != OpConst64 {
 24070  				continue
 24071  			}
 24072  			c := auxIntToInt64(v_0_0.AuxInt)
 24073  			if v_1.Op != OpLeq64U {
 24074  				continue
 24075  			}
 24076  			_ = v_1.Args[1]
 24077  			if x != v_1.Args[0] {
 24078  				continue
 24079  			}
 24080  			v_1_1 := v_1.Args[1]
 24081  			if v_1_1.Op != OpConst64 {
 24082  				continue
 24083  			}
 24084  			d := auxIntToInt64(v_1_1.AuxInt)
 24085  			if !(uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)) {
 24086  				continue
 24087  			}
 24088  			v.reset(OpLess64U)
 24089  			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
 24090  			v0.AuxInt = int64ToAuxInt(c - d - 1)
 24091  			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
 24092  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
 24093  			v2.AuxInt = int64ToAuxInt(d + 1)
 24094  			v1.AddArg2(x, v2)
 24095  			v.AddArg2(v0, v1)
 24096  			return true
 24097  		}
 24098  		break
 24099  	}
 24100  	// match: (OrB (Leq64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
 24101  	// cond: uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)
 24102  	// result: (Leq64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
 24103  	for {
 24104  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 24105  			if v_0.Op != OpLeq64U {
 24106  				continue
 24107  			}
 24108  			x := v_0.Args[1]
 24109  			v_0_0 := v_0.Args[0]
 24110  			if v_0_0.Op != OpConst64 {
 24111  				continue
 24112  			}
 24113  			c := auxIntToInt64(v_0_0.AuxInt)
 24114  			if v_1.Op != OpLeq64U {
 24115  				continue
 24116  			}
 24117  			_ = v_1.Args[1]
 24118  			if x != v_1.Args[0] {
 24119  				continue
 24120  			}
 24121  			v_1_1 := v_1.Args[1]
 24122  			if v_1_1.Op != OpConst64 {
 24123  				continue
 24124  			}
 24125  			d := auxIntToInt64(v_1_1.AuxInt)
 24126  			if !(uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)) {
 24127  				continue
 24128  			}
 24129  			v.reset(OpLeq64U)
 24130  			v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
 24131  			v0.AuxInt = int64ToAuxInt(c - d - 1)
 24132  			v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
 24133  			v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
 24134  			v2.AuxInt = int64ToAuxInt(d + 1)
 24135  			v1.AddArg2(x, v2)
 24136  			v.AddArg2(v0, v1)
 24137  			return true
 24138  		}
 24139  		break
 24140  	}
 24141  	// match: (OrB (Less32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
 24142  	// cond: uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)
 24143  	// result: (Less32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
 24144  	for {
 24145  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 24146  			if v_0.Op != OpLess32U {
 24147  				continue
 24148  			}
 24149  			x := v_0.Args[1]
 24150  			v_0_0 := v_0.Args[0]
 24151  			if v_0_0.Op != OpConst32 {
 24152  				continue
 24153  			}
 24154  			c := auxIntToInt32(v_0_0.AuxInt)
 24155  			if v_1.Op != OpLeq32U {
 24156  				continue
 24157  			}
 24158  			_ = v_1.Args[1]
 24159  			if x != v_1.Args[0] {
 24160  				continue
 24161  			}
 24162  			v_1_1 := v_1.Args[1]
 24163  			if v_1_1.Op != OpConst32 {
 24164  				continue
 24165  			}
 24166  			d := auxIntToInt32(v_1_1.AuxInt)
 24167  			if !(uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)) {
 24168  				continue
 24169  			}
 24170  			v.reset(OpLess32U)
 24171  			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
 24172  			v0.AuxInt = int32ToAuxInt(c - d - 1)
 24173  			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
 24174  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
 24175  			v2.AuxInt = int32ToAuxInt(d + 1)
 24176  			v1.AddArg2(x, v2)
 24177  			v.AddArg2(v0, v1)
 24178  			return true
 24179  		}
 24180  		break
 24181  	}
 24182  	// match: (OrB (Leq32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
 24183  	// cond: uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)
 24184  	// result: (Leq32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
 24185  	for {
 24186  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 24187  			if v_0.Op != OpLeq32U {
 24188  				continue
 24189  			}
 24190  			x := v_0.Args[1]
 24191  			v_0_0 := v_0.Args[0]
 24192  			if v_0_0.Op != OpConst32 {
 24193  				continue
 24194  			}
 24195  			c := auxIntToInt32(v_0_0.AuxInt)
 24196  			if v_1.Op != OpLeq32U {
 24197  				continue
 24198  			}
 24199  			_ = v_1.Args[1]
 24200  			if x != v_1.Args[0] {
 24201  				continue
 24202  			}
 24203  			v_1_1 := v_1.Args[1]
 24204  			if v_1_1.Op != OpConst32 {
 24205  				continue
 24206  			}
 24207  			d := auxIntToInt32(v_1_1.AuxInt)
 24208  			if !(uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)) {
 24209  				continue
 24210  			}
 24211  			v.reset(OpLeq32U)
 24212  			v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
 24213  			v0.AuxInt = int32ToAuxInt(c - d - 1)
 24214  			v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
 24215  			v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
 24216  			v2.AuxInt = int32ToAuxInt(d + 1)
 24217  			v1.AddArg2(x, v2)
 24218  			v.AddArg2(v0, v1)
 24219  			return true
 24220  		}
 24221  		break
 24222  	}
 24223  	// match: (OrB (Less16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
 24224  	// cond: uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)
 24225  	// result: (Less16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
 24226  	for {
 24227  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 24228  			if v_0.Op != OpLess16U {
 24229  				continue
 24230  			}
 24231  			x := v_0.Args[1]
 24232  			v_0_0 := v_0.Args[0]
 24233  			if v_0_0.Op != OpConst16 {
 24234  				continue
 24235  			}
 24236  			c := auxIntToInt16(v_0_0.AuxInt)
 24237  			if v_1.Op != OpLeq16U {
 24238  				continue
 24239  			}
 24240  			_ = v_1.Args[1]
 24241  			if x != v_1.Args[0] {
 24242  				continue
 24243  			}
 24244  			v_1_1 := v_1.Args[1]
 24245  			if v_1_1.Op != OpConst16 {
 24246  				continue
 24247  			}
 24248  			d := auxIntToInt16(v_1_1.AuxInt)
 24249  			if !(uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)) {
 24250  				continue
 24251  			}
 24252  			v.reset(OpLess16U)
 24253  			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
 24254  			v0.AuxInt = int16ToAuxInt(c - d - 1)
 24255  			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
 24256  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
 24257  			v2.AuxInt = int16ToAuxInt(d + 1)
 24258  			v1.AddArg2(x, v2)
 24259  			v.AddArg2(v0, v1)
 24260  			return true
 24261  		}
 24262  		break
 24263  	}
 24264  	// match: (OrB (Leq16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
 24265  	// cond: uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)
 24266  	// result: (Leq16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
 24267  	for {
 24268  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 24269  			if v_0.Op != OpLeq16U {
 24270  				continue
 24271  			}
 24272  			x := v_0.Args[1]
 24273  			v_0_0 := v_0.Args[0]
 24274  			if v_0_0.Op != OpConst16 {
 24275  				continue
 24276  			}
 24277  			c := auxIntToInt16(v_0_0.AuxInt)
 24278  			if v_1.Op != OpLeq16U {
 24279  				continue
 24280  			}
 24281  			_ = v_1.Args[1]
 24282  			if x != v_1.Args[0] {
 24283  				continue
 24284  			}
 24285  			v_1_1 := v_1.Args[1]
 24286  			if v_1_1.Op != OpConst16 {
 24287  				continue
 24288  			}
 24289  			d := auxIntToInt16(v_1_1.AuxInt)
 24290  			if !(uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)) {
 24291  				continue
 24292  			}
 24293  			v.reset(OpLeq16U)
 24294  			v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
 24295  			v0.AuxInt = int16ToAuxInt(c - d - 1)
 24296  			v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
 24297  			v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
 24298  			v2.AuxInt = int16ToAuxInt(d + 1)
 24299  			v1.AddArg2(x, v2)
 24300  			v.AddArg2(v0, v1)
 24301  			return true
 24302  		}
 24303  		break
 24304  	}
 24305  	// match: (OrB (Less8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
 24306  	// cond: uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)
 24307  	// result: (Less8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
 24308  	for {
 24309  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 24310  			if v_0.Op != OpLess8U {
 24311  				continue
 24312  			}
 24313  			x := v_0.Args[1]
 24314  			v_0_0 := v_0.Args[0]
 24315  			if v_0_0.Op != OpConst8 {
 24316  				continue
 24317  			}
 24318  			c := auxIntToInt8(v_0_0.AuxInt)
 24319  			if v_1.Op != OpLeq8U {
 24320  				continue
 24321  			}
 24322  			_ = v_1.Args[1]
 24323  			if x != v_1.Args[0] {
 24324  				continue
 24325  			}
 24326  			v_1_1 := v_1.Args[1]
 24327  			if v_1_1.Op != OpConst8 {
 24328  				continue
 24329  			}
 24330  			d := auxIntToInt8(v_1_1.AuxInt)
 24331  			if !(uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)) {
 24332  				continue
 24333  			}
 24334  			v.reset(OpLess8U)
 24335  			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
 24336  			v0.AuxInt = int8ToAuxInt(c - d - 1)
 24337  			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
 24338  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
 24339  			v2.AuxInt = int8ToAuxInt(d + 1)
 24340  			v1.AddArg2(x, v2)
 24341  			v.AddArg2(v0, v1)
 24342  			return true
 24343  		}
 24344  		break
 24345  	}
 24346  	// match: (OrB (Leq8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
 24347  	// cond: uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)
 24348  	// result: (Leq8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
 24349  	for {
 24350  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 24351  			if v_0.Op != OpLeq8U {
 24352  				continue
 24353  			}
 24354  			x := v_0.Args[1]
 24355  			v_0_0 := v_0.Args[0]
 24356  			if v_0_0.Op != OpConst8 {
 24357  				continue
 24358  			}
 24359  			c := auxIntToInt8(v_0_0.AuxInt)
 24360  			if v_1.Op != OpLeq8U {
 24361  				continue
 24362  			}
 24363  			_ = v_1.Args[1]
 24364  			if x != v_1.Args[0] {
 24365  				continue
 24366  			}
 24367  			v_1_1 := v_1.Args[1]
 24368  			if v_1_1.Op != OpConst8 {
 24369  				continue
 24370  			}
 24371  			d := auxIntToInt8(v_1_1.AuxInt)
 24372  			if !(uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)) {
 24373  				continue
 24374  			}
 24375  			v.reset(OpLeq8U)
 24376  			v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
 24377  			v0.AuxInt = int8ToAuxInt(c - d - 1)
 24378  			v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
 24379  			v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
 24380  			v2.AuxInt = int8ToAuxInt(d + 1)
 24381  			v1.AddArg2(x, v2)
 24382  			v.AddArg2(v0, v1)
 24383  			return true
 24384  		}
 24385  		break
 24386  	}
 24387  	return false
 24388  }
 24389  func rewriteValuegeneric_OpPhi(v *Value) bool {
 24390  	b := v.Block
 24391  	// match: (Phi (Const8 [c]) (Const8 [c]))
 24392  	// result: (Const8 [c])
 24393  	for {
 24394  		if len(v.Args) != 2 {
 24395  			break
 24396  		}
 24397  		_ = v.Args[1]
 24398  		v_0 := v.Args[0]
 24399  		if v_0.Op != OpConst8 {
 24400  			break
 24401  		}
 24402  		c := auxIntToInt8(v_0.AuxInt)
 24403  		v_1 := v.Args[1]
 24404  		if v_1.Op != OpConst8 || auxIntToInt8(v_1.AuxInt) != c {
 24405  			break
 24406  		}
 24407  		v.reset(OpConst8)
 24408  		v.AuxInt = int8ToAuxInt(c)
 24409  		return true
 24410  	}
 24411  	// match: (Phi (Const16 [c]) (Const16 [c]))
 24412  	// result: (Const16 [c])
 24413  	for {
 24414  		if len(v.Args) != 2 {
 24415  			break
 24416  		}
 24417  		_ = v.Args[1]
 24418  		v_0 := v.Args[0]
 24419  		if v_0.Op != OpConst16 {
 24420  			break
 24421  		}
 24422  		c := auxIntToInt16(v_0.AuxInt)
 24423  		v_1 := v.Args[1]
 24424  		if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != c {
 24425  			break
 24426  		}
 24427  		v.reset(OpConst16)
 24428  		v.AuxInt = int16ToAuxInt(c)
 24429  		return true
 24430  	}
 24431  	// match: (Phi (Const32 [c]) (Const32 [c]))
 24432  	// result: (Const32 [c])
 24433  	for {
 24434  		if len(v.Args) != 2 {
 24435  			break
 24436  		}
 24437  		_ = v.Args[1]
 24438  		v_0 := v.Args[0]
 24439  		if v_0.Op != OpConst32 {
 24440  			break
 24441  		}
 24442  		c := auxIntToInt32(v_0.AuxInt)
 24443  		v_1 := v.Args[1]
 24444  		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != c {
 24445  			break
 24446  		}
 24447  		v.reset(OpConst32)
 24448  		v.AuxInt = int32ToAuxInt(c)
 24449  		return true
 24450  	}
 24451  	// match: (Phi (Const64 [c]) (Const64 [c]))
 24452  	// result: (Const64 [c])
 24453  	for {
 24454  		if len(v.Args) != 2 {
 24455  			break
 24456  		}
 24457  		_ = v.Args[1]
 24458  		v_0 := v.Args[0]
 24459  		if v_0.Op != OpConst64 {
 24460  			break
 24461  		}
 24462  		c := auxIntToInt64(v_0.AuxInt)
 24463  		v_1 := v.Args[1]
 24464  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c {
 24465  			break
 24466  		}
 24467  		v.reset(OpConst64)
 24468  		v.AuxInt = int64ToAuxInt(c)
 24469  		return true
 24470  	}
 24471  	// match: (Phi <t> nx:(Not x) ny:(Not y))
 24472  	// cond: nx.Uses == 1 && ny.Uses == 1
 24473  	// result: (Not (Phi <t> x y))
 24474  	for {
 24475  		if len(v.Args) != 2 {
 24476  			break
 24477  		}
 24478  		t := v.Type
 24479  		_ = v.Args[1]
 24480  		nx := v.Args[0]
 24481  		if nx.Op != OpNot {
 24482  			break
 24483  		}
 24484  		x := nx.Args[0]
 24485  		ny := v.Args[1]
 24486  		if ny.Op != OpNot {
 24487  			break
 24488  		}
 24489  		y := ny.Args[0]
 24490  		if !(nx.Uses == 1 && ny.Uses == 1) {
 24491  			break
 24492  		}
 24493  		v.reset(OpNot)
 24494  		v0 := b.NewValue0(v.Pos, OpPhi, t)
 24495  		v0.AddArg2(x, y)
 24496  		v.AddArg(v0)
 24497  		return true
 24498  	}
 24499  	return false
 24500  }
 24501  func rewriteValuegeneric_OpPtrIndex(v *Value) bool {
 24502  	v_1 := v.Args[1]
 24503  	v_0 := v.Args[0]
 24504  	b := v.Block
 24505  	config := b.Func.Config
 24506  	typ := &b.Func.Config.Types
 24507  	// match: (PtrIndex <t> ptr idx)
 24508  	// cond: config.PtrSize == 4 && is32Bit(t.Elem().Size())
 24509  	// result: (AddPtr ptr (Mul32 <typ.Int> idx (Const32 <typ.Int> [int32(t.Elem().Size())])))
 24510  	for {
 24511  		t := v.Type
 24512  		ptr := v_0
 24513  		idx := v_1
 24514  		if !(config.PtrSize == 4 && is32Bit(t.Elem().Size())) {
 24515  			break
 24516  		}
 24517  		v.reset(OpAddPtr)
 24518  		v0 := b.NewValue0(v.Pos, OpMul32, typ.Int)
 24519  		v1 := b.NewValue0(v.Pos, OpConst32, typ.Int)
 24520  		v1.AuxInt = int32ToAuxInt(int32(t.Elem().Size()))
 24521  		v0.AddArg2(idx, v1)
 24522  		v.AddArg2(ptr, v0)
 24523  		return true
 24524  	}
 24525  	// match: (PtrIndex <t> ptr idx)
 24526  	// cond: config.PtrSize == 8
 24527  	// result: (AddPtr ptr (Mul64 <typ.Int> idx (Const64 <typ.Int> [t.Elem().Size()])))
 24528  	for {
 24529  		t := v.Type
 24530  		ptr := v_0
 24531  		idx := v_1
 24532  		if !(config.PtrSize == 8) {
 24533  			break
 24534  		}
 24535  		v.reset(OpAddPtr)
 24536  		v0 := b.NewValue0(v.Pos, OpMul64, typ.Int)
 24537  		v1 := b.NewValue0(v.Pos, OpConst64, typ.Int)
 24538  		v1.AuxInt = int64ToAuxInt(t.Elem().Size())
 24539  		v0.AddArg2(idx, v1)
 24540  		v.AddArg2(ptr, v0)
 24541  		return true
 24542  	}
 24543  	return false
 24544  }
 24545  func rewriteValuegeneric_OpRotateLeft16(v *Value) bool {
 24546  	v_1 := v.Args[1]
 24547  	v_0 := v.Args[0]
 24548  	b := v.Block
 24549  	config := b.Func.Config
 24550  	// match: (RotateLeft16 x (Const16 [c]))
 24551  	// cond: c%16 == 0
 24552  	// result: x
 24553  	for {
 24554  		x := v_0
 24555  		if v_1.Op != OpConst16 {
 24556  			break
 24557  		}
 24558  		c := auxIntToInt16(v_1.AuxInt)
 24559  		if !(c%16 == 0) {
 24560  			break
 24561  		}
 24562  		v.copyOf(x)
 24563  		return true
 24564  	}
 24565  	// match: (RotateLeft16 x (And64 y (Const64 [c])))
 24566  	// cond: c&15 == 15
 24567  	// result: (RotateLeft16 x y)
 24568  	for {
 24569  		x := v_0
 24570  		if v_1.Op != OpAnd64 {
 24571  			break
 24572  		}
 24573  		_ = v_1.Args[1]
 24574  		v_1_0 := v_1.Args[0]
 24575  		v_1_1 := v_1.Args[1]
 24576  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 24577  			y := v_1_0
 24578  			if v_1_1.Op != OpConst64 {
 24579  				continue
 24580  			}
 24581  			c := auxIntToInt64(v_1_1.AuxInt)
 24582  			if !(c&15 == 15) {
 24583  				continue
 24584  			}
 24585  			v.reset(OpRotateLeft16)
 24586  			v.AddArg2(x, y)
 24587  			return true
 24588  		}
 24589  		break
 24590  	}
 24591  	// match: (RotateLeft16 x (And32 y (Const32 [c])))
 24592  	// cond: c&15 == 15
 24593  	// result: (RotateLeft16 x y)
 24594  	for {
 24595  		x := v_0
 24596  		if v_1.Op != OpAnd32 {
 24597  			break
 24598  		}
 24599  		_ = v_1.Args[1]
 24600  		v_1_0 := v_1.Args[0]
 24601  		v_1_1 := v_1.Args[1]
 24602  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 24603  			y := v_1_0
 24604  			if v_1_1.Op != OpConst32 {
 24605  				continue
 24606  			}
 24607  			c := auxIntToInt32(v_1_1.AuxInt)
 24608  			if !(c&15 == 15) {
 24609  				continue
 24610  			}
 24611  			v.reset(OpRotateLeft16)
 24612  			v.AddArg2(x, y)
 24613  			return true
 24614  		}
 24615  		break
 24616  	}
 24617  	// match: (RotateLeft16 x (And16 y (Const16 [c])))
 24618  	// cond: c&15 == 15
 24619  	// result: (RotateLeft16 x y)
 24620  	for {
 24621  		x := v_0
 24622  		if v_1.Op != OpAnd16 {
 24623  			break
 24624  		}
 24625  		_ = v_1.Args[1]
 24626  		v_1_0 := v_1.Args[0]
 24627  		v_1_1 := v_1.Args[1]
 24628  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 24629  			y := v_1_0
 24630  			if v_1_1.Op != OpConst16 {
 24631  				continue
 24632  			}
 24633  			c := auxIntToInt16(v_1_1.AuxInt)
 24634  			if !(c&15 == 15) {
 24635  				continue
 24636  			}
 24637  			v.reset(OpRotateLeft16)
 24638  			v.AddArg2(x, y)
 24639  			return true
 24640  		}
 24641  		break
 24642  	}
 24643  	// match: (RotateLeft16 x (And8 y (Const8 [c])))
 24644  	// cond: c&15 == 15
 24645  	// result: (RotateLeft16 x y)
 24646  	for {
 24647  		x := v_0
 24648  		if v_1.Op != OpAnd8 {
 24649  			break
 24650  		}
 24651  		_ = v_1.Args[1]
 24652  		v_1_0 := v_1.Args[0]
 24653  		v_1_1 := v_1.Args[1]
 24654  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 24655  			y := v_1_0
 24656  			if v_1_1.Op != OpConst8 {
 24657  				continue
 24658  			}
 24659  			c := auxIntToInt8(v_1_1.AuxInt)
 24660  			if !(c&15 == 15) {
 24661  				continue
 24662  			}
 24663  			v.reset(OpRotateLeft16)
 24664  			v.AddArg2(x, y)
 24665  			return true
 24666  		}
 24667  		break
 24668  	}
 24669  	// match: (RotateLeft16 x (Neg64 (And64 y (Const64 [c]))))
 24670  	// cond: c&15 == 15
 24671  	// result: (RotateLeft16 x (Neg64 <y.Type> y))
 24672  	for {
 24673  		x := v_0
 24674  		if v_1.Op != OpNeg64 {
 24675  			break
 24676  		}
 24677  		v_1_0 := v_1.Args[0]
 24678  		if v_1_0.Op != OpAnd64 {
 24679  			break
 24680  		}
 24681  		_ = v_1_0.Args[1]
 24682  		v_1_0_0 := v_1_0.Args[0]
 24683  		v_1_0_1 := v_1_0.Args[1]
 24684  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 24685  			y := v_1_0_0
 24686  			if v_1_0_1.Op != OpConst64 {
 24687  				continue
 24688  			}
 24689  			c := auxIntToInt64(v_1_0_1.AuxInt)
 24690  			if !(c&15 == 15) {
 24691  				continue
 24692  			}
 24693  			v.reset(OpRotateLeft16)
 24694  			v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
 24695  			v0.AddArg(y)
 24696  			v.AddArg2(x, v0)
 24697  			return true
 24698  		}
 24699  		break
 24700  	}
 24701  	// match: (RotateLeft16 x (Neg32 (And32 y (Const32 [c]))))
 24702  	// cond: c&15 == 15
 24703  	// result: (RotateLeft16 x (Neg32 <y.Type> y))
 24704  	for {
 24705  		x := v_0
 24706  		if v_1.Op != OpNeg32 {
 24707  			break
 24708  		}
 24709  		v_1_0 := v_1.Args[0]
 24710  		if v_1_0.Op != OpAnd32 {
 24711  			break
 24712  		}
 24713  		_ = v_1_0.Args[1]
 24714  		v_1_0_0 := v_1_0.Args[0]
 24715  		v_1_0_1 := v_1_0.Args[1]
 24716  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 24717  			y := v_1_0_0
 24718  			if v_1_0_1.Op != OpConst32 {
 24719  				continue
 24720  			}
 24721  			c := auxIntToInt32(v_1_0_1.AuxInt)
 24722  			if !(c&15 == 15) {
 24723  				continue
 24724  			}
 24725  			v.reset(OpRotateLeft16)
 24726  			v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
 24727  			v0.AddArg(y)
 24728  			v.AddArg2(x, v0)
 24729  			return true
 24730  		}
 24731  		break
 24732  	}
 24733  	// match: (RotateLeft16 x (Neg16 (And16 y (Const16 [c]))))
 24734  	// cond: c&15 == 15
 24735  	// result: (RotateLeft16 x (Neg16 <y.Type> y))
 24736  	for {
 24737  		x := v_0
 24738  		if v_1.Op != OpNeg16 {
 24739  			break
 24740  		}
 24741  		v_1_0 := v_1.Args[0]
 24742  		if v_1_0.Op != OpAnd16 {
 24743  			break
 24744  		}
 24745  		_ = v_1_0.Args[1]
 24746  		v_1_0_0 := v_1_0.Args[0]
 24747  		v_1_0_1 := v_1_0.Args[1]
 24748  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 24749  			y := v_1_0_0
 24750  			if v_1_0_1.Op != OpConst16 {
 24751  				continue
 24752  			}
 24753  			c := auxIntToInt16(v_1_0_1.AuxInt)
 24754  			if !(c&15 == 15) {
 24755  				continue
 24756  			}
 24757  			v.reset(OpRotateLeft16)
 24758  			v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
 24759  			v0.AddArg(y)
 24760  			v.AddArg2(x, v0)
 24761  			return true
 24762  		}
 24763  		break
 24764  	}
 24765  	// match: (RotateLeft16 x (Neg8 (And8 y (Const8 [c]))))
 24766  	// cond: c&15 == 15
 24767  	// result: (RotateLeft16 x (Neg8 <y.Type> y))
 24768  	for {
 24769  		x := v_0
 24770  		if v_1.Op != OpNeg8 {
 24771  			break
 24772  		}
 24773  		v_1_0 := v_1.Args[0]
 24774  		if v_1_0.Op != OpAnd8 {
 24775  			break
 24776  		}
 24777  		_ = v_1_0.Args[1]
 24778  		v_1_0_0 := v_1_0.Args[0]
 24779  		v_1_0_1 := v_1_0.Args[1]
 24780  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 24781  			y := v_1_0_0
 24782  			if v_1_0_1.Op != OpConst8 {
 24783  				continue
 24784  			}
 24785  			c := auxIntToInt8(v_1_0_1.AuxInt)
 24786  			if !(c&15 == 15) {
 24787  				continue
 24788  			}
 24789  			v.reset(OpRotateLeft16)
 24790  			v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
 24791  			v0.AddArg(y)
 24792  			v.AddArg2(x, v0)
 24793  			return true
 24794  		}
 24795  		break
 24796  	}
 24797  	// match: (RotateLeft16 x (Add64 y (Const64 [c])))
 24798  	// cond: c&15 == 0
 24799  	// result: (RotateLeft16 x y)
 24800  	for {
 24801  		x := v_0
 24802  		if v_1.Op != OpAdd64 {
 24803  			break
 24804  		}
 24805  		_ = v_1.Args[1]
 24806  		v_1_0 := v_1.Args[0]
 24807  		v_1_1 := v_1.Args[1]
 24808  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 24809  			y := v_1_0
 24810  			if v_1_1.Op != OpConst64 {
 24811  				continue
 24812  			}
 24813  			c := auxIntToInt64(v_1_1.AuxInt)
 24814  			if !(c&15 == 0) {
 24815  				continue
 24816  			}
 24817  			v.reset(OpRotateLeft16)
 24818  			v.AddArg2(x, y)
 24819  			return true
 24820  		}
 24821  		break
 24822  	}
 24823  	// match: (RotateLeft16 x (Add32 y (Const32 [c])))
 24824  	// cond: c&15 == 0
 24825  	// result: (RotateLeft16 x y)
 24826  	for {
 24827  		x := v_0
 24828  		if v_1.Op != OpAdd32 {
 24829  			break
 24830  		}
 24831  		_ = v_1.Args[1]
 24832  		v_1_0 := v_1.Args[0]
 24833  		v_1_1 := v_1.Args[1]
 24834  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 24835  			y := v_1_0
 24836  			if v_1_1.Op != OpConst32 {
 24837  				continue
 24838  			}
 24839  			c := auxIntToInt32(v_1_1.AuxInt)
 24840  			if !(c&15 == 0) {
 24841  				continue
 24842  			}
 24843  			v.reset(OpRotateLeft16)
 24844  			v.AddArg2(x, y)
 24845  			return true
 24846  		}
 24847  		break
 24848  	}
 24849  	// match: (RotateLeft16 x (Add16 y (Const16 [c])))
 24850  	// cond: c&15 == 0
 24851  	// result: (RotateLeft16 x y)
 24852  	for {
 24853  		x := v_0
 24854  		if v_1.Op != OpAdd16 {
 24855  			break
 24856  		}
 24857  		_ = v_1.Args[1]
 24858  		v_1_0 := v_1.Args[0]
 24859  		v_1_1 := v_1.Args[1]
 24860  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 24861  			y := v_1_0
 24862  			if v_1_1.Op != OpConst16 {
 24863  				continue
 24864  			}
 24865  			c := auxIntToInt16(v_1_1.AuxInt)
 24866  			if !(c&15 == 0) {
 24867  				continue
 24868  			}
 24869  			v.reset(OpRotateLeft16)
 24870  			v.AddArg2(x, y)
 24871  			return true
 24872  		}
 24873  		break
 24874  	}
 24875  	// match: (RotateLeft16 x (Add8 y (Const8 [c])))
 24876  	// cond: c&15 == 0
 24877  	// result: (RotateLeft16 x y)
 24878  	for {
 24879  		x := v_0
 24880  		if v_1.Op != OpAdd8 {
 24881  			break
 24882  		}
 24883  		_ = v_1.Args[1]
 24884  		v_1_0 := v_1.Args[0]
 24885  		v_1_1 := v_1.Args[1]
 24886  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 24887  			y := v_1_0
 24888  			if v_1_1.Op != OpConst8 {
 24889  				continue
 24890  			}
 24891  			c := auxIntToInt8(v_1_1.AuxInt)
 24892  			if !(c&15 == 0) {
 24893  				continue
 24894  			}
 24895  			v.reset(OpRotateLeft16)
 24896  			v.AddArg2(x, y)
 24897  			return true
 24898  		}
 24899  		break
 24900  	}
 24901  	// match: (RotateLeft16 x (Sub64 (Const64 [c]) y))
 24902  	// cond: c&15 == 0
 24903  	// result: (RotateLeft16 x (Neg64 <y.Type> y))
 24904  	for {
 24905  		x := v_0
 24906  		if v_1.Op != OpSub64 {
 24907  			break
 24908  		}
 24909  		y := v_1.Args[1]
 24910  		v_1_0 := v_1.Args[0]
 24911  		if v_1_0.Op != OpConst64 {
 24912  			break
 24913  		}
 24914  		c := auxIntToInt64(v_1_0.AuxInt)
 24915  		if !(c&15 == 0) {
 24916  			break
 24917  		}
 24918  		v.reset(OpRotateLeft16)
 24919  		v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
 24920  		v0.AddArg(y)
 24921  		v.AddArg2(x, v0)
 24922  		return true
 24923  	}
 24924  	// match: (RotateLeft16 x (Sub32 (Const32 [c]) y))
 24925  	// cond: c&15 == 0
 24926  	// result: (RotateLeft16 x (Neg32 <y.Type> y))
 24927  	for {
 24928  		x := v_0
 24929  		if v_1.Op != OpSub32 {
 24930  			break
 24931  		}
 24932  		y := v_1.Args[1]
 24933  		v_1_0 := v_1.Args[0]
 24934  		if v_1_0.Op != OpConst32 {
 24935  			break
 24936  		}
 24937  		c := auxIntToInt32(v_1_0.AuxInt)
 24938  		if !(c&15 == 0) {
 24939  			break
 24940  		}
 24941  		v.reset(OpRotateLeft16)
 24942  		v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
 24943  		v0.AddArg(y)
 24944  		v.AddArg2(x, v0)
 24945  		return true
 24946  	}
 24947  	// match: (RotateLeft16 x (Sub16 (Const16 [c]) y))
 24948  	// cond: c&15 == 0
 24949  	// result: (RotateLeft16 x (Neg16 <y.Type> y))
 24950  	for {
 24951  		x := v_0
 24952  		if v_1.Op != OpSub16 {
 24953  			break
 24954  		}
 24955  		y := v_1.Args[1]
 24956  		v_1_0 := v_1.Args[0]
 24957  		if v_1_0.Op != OpConst16 {
 24958  			break
 24959  		}
 24960  		c := auxIntToInt16(v_1_0.AuxInt)
 24961  		if !(c&15 == 0) {
 24962  			break
 24963  		}
 24964  		v.reset(OpRotateLeft16)
 24965  		v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
 24966  		v0.AddArg(y)
 24967  		v.AddArg2(x, v0)
 24968  		return true
 24969  	}
 24970  	// match: (RotateLeft16 x (Sub8 (Const8 [c]) y))
 24971  	// cond: c&15 == 0
 24972  	// result: (RotateLeft16 x (Neg8 <y.Type> y))
 24973  	for {
 24974  		x := v_0
 24975  		if v_1.Op != OpSub8 {
 24976  			break
 24977  		}
 24978  		y := v_1.Args[1]
 24979  		v_1_0 := v_1.Args[0]
 24980  		if v_1_0.Op != OpConst8 {
 24981  			break
 24982  		}
 24983  		c := auxIntToInt8(v_1_0.AuxInt)
 24984  		if !(c&15 == 0) {
 24985  			break
 24986  		}
 24987  		v.reset(OpRotateLeft16)
 24988  		v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
 24989  		v0.AddArg(y)
 24990  		v.AddArg2(x, v0)
 24991  		return true
 24992  	}
 24993  	// match: (RotateLeft16 x (Const64 <t> [c]))
 24994  	// cond: config.PtrSize == 4
 24995  	// result: (RotateLeft16 x (Const32 <t> [int32(c)]))
 24996  	for {
 24997  		x := v_0
 24998  		if v_1.Op != OpConst64 {
 24999  			break
 25000  		}
 25001  		t := v_1.Type
 25002  		c := auxIntToInt64(v_1.AuxInt)
 25003  		if !(config.PtrSize == 4) {
 25004  			break
 25005  		}
 25006  		v.reset(OpRotateLeft16)
 25007  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 25008  		v0.AuxInt = int32ToAuxInt(int32(c))
 25009  		v.AddArg2(x, v0)
 25010  		return true
 25011  	}
 25012  	// match: (RotateLeft16 (RotateLeft16 x c) d)
 25013  	// cond: c.Type.Size() == 8 && d.Type.Size() == 8
 25014  	// result: (RotateLeft16 x (Add64 <c.Type> c d))
 25015  	for {
 25016  		if v_0.Op != OpRotateLeft16 {
 25017  			break
 25018  		}
 25019  		c := v_0.Args[1]
 25020  		x := v_0.Args[0]
 25021  		d := v_1
 25022  		if !(c.Type.Size() == 8 && d.Type.Size() == 8) {
 25023  			break
 25024  		}
 25025  		v.reset(OpRotateLeft16)
 25026  		v0 := b.NewValue0(v.Pos, OpAdd64, c.Type)
 25027  		v0.AddArg2(c, d)
 25028  		v.AddArg2(x, v0)
 25029  		return true
 25030  	}
 25031  	// match: (RotateLeft16 (RotateLeft16 x c) d)
 25032  	// cond: c.Type.Size() == 4 && d.Type.Size() == 4
 25033  	// result: (RotateLeft16 x (Add32 <c.Type> c d))
 25034  	for {
 25035  		if v_0.Op != OpRotateLeft16 {
 25036  			break
 25037  		}
 25038  		c := v_0.Args[1]
 25039  		x := v_0.Args[0]
 25040  		d := v_1
 25041  		if !(c.Type.Size() == 4 && d.Type.Size() == 4) {
 25042  			break
 25043  		}
 25044  		v.reset(OpRotateLeft16)
 25045  		v0 := b.NewValue0(v.Pos, OpAdd32, c.Type)
 25046  		v0.AddArg2(c, d)
 25047  		v.AddArg2(x, v0)
 25048  		return true
 25049  	}
 25050  	// match: (RotateLeft16 (RotateLeft16 x c) d)
 25051  	// cond: c.Type.Size() == 2 && d.Type.Size() == 2
 25052  	// result: (RotateLeft16 x (Add16 <c.Type> c d))
 25053  	for {
 25054  		if v_0.Op != OpRotateLeft16 {
 25055  			break
 25056  		}
 25057  		c := v_0.Args[1]
 25058  		x := v_0.Args[0]
 25059  		d := v_1
 25060  		if !(c.Type.Size() == 2 && d.Type.Size() == 2) {
 25061  			break
 25062  		}
 25063  		v.reset(OpRotateLeft16)
 25064  		v0 := b.NewValue0(v.Pos, OpAdd16, c.Type)
 25065  		v0.AddArg2(c, d)
 25066  		v.AddArg2(x, v0)
 25067  		return true
 25068  	}
 25069  	// match: (RotateLeft16 (RotateLeft16 x c) d)
 25070  	// cond: c.Type.Size() == 1 && d.Type.Size() == 1
 25071  	// result: (RotateLeft16 x (Add8 <c.Type> c d))
 25072  	for {
 25073  		if v_0.Op != OpRotateLeft16 {
 25074  			break
 25075  		}
 25076  		c := v_0.Args[1]
 25077  		x := v_0.Args[0]
 25078  		d := v_1
 25079  		if !(c.Type.Size() == 1 && d.Type.Size() == 1) {
 25080  			break
 25081  		}
 25082  		v.reset(OpRotateLeft16)
 25083  		v0 := b.NewValue0(v.Pos, OpAdd8, c.Type)
 25084  		v0.AddArg2(c, d)
 25085  		v.AddArg2(x, v0)
 25086  		return true
 25087  	}
 25088  	return false
 25089  }
 25090  func rewriteValuegeneric_OpRotateLeft32(v *Value) bool {
 25091  	v_1 := v.Args[1]
 25092  	v_0 := v.Args[0]
 25093  	b := v.Block
 25094  	config := b.Func.Config
 25095  	// match: (RotateLeft32 x (Const32 [c]))
 25096  	// cond: c%32 == 0
 25097  	// result: x
 25098  	for {
 25099  		x := v_0
 25100  		if v_1.Op != OpConst32 {
 25101  			break
 25102  		}
 25103  		c := auxIntToInt32(v_1.AuxInt)
 25104  		if !(c%32 == 0) {
 25105  			break
 25106  		}
 25107  		v.copyOf(x)
 25108  		return true
 25109  	}
 25110  	// match: (RotateLeft32 x (And64 y (Const64 [c])))
 25111  	// cond: c&31 == 31
 25112  	// result: (RotateLeft32 x y)
 25113  	for {
 25114  		x := v_0
 25115  		if v_1.Op != OpAnd64 {
 25116  			break
 25117  		}
 25118  		_ = v_1.Args[1]
 25119  		v_1_0 := v_1.Args[0]
 25120  		v_1_1 := v_1.Args[1]
 25121  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 25122  			y := v_1_0
 25123  			if v_1_1.Op != OpConst64 {
 25124  				continue
 25125  			}
 25126  			c := auxIntToInt64(v_1_1.AuxInt)
 25127  			if !(c&31 == 31) {
 25128  				continue
 25129  			}
 25130  			v.reset(OpRotateLeft32)
 25131  			v.AddArg2(x, y)
 25132  			return true
 25133  		}
 25134  		break
 25135  	}
 25136  	// match: (RotateLeft32 x (And32 y (Const32 [c])))
 25137  	// cond: c&31 == 31
 25138  	// result: (RotateLeft32 x y)
 25139  	for {
 25140  		x := v_0
 25141  		if v_1.Op != OpAnd32 {
 25142  			break
 25143  		}
 25144  		_ = v_1.Args[1]
 25145  		v_1_0 := v_1.Args[0]
 25146  		v_1_1 := v_1.Args[1]
 25147  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 25148  			y := v_1_0
 25149  			if v_1_1.Op != OpConst32 {
 25150  				continue
 25151  			}
 25152  			c := auxIntToInt32(v_1_1.AuxInt)
 25153  			if !(c&31 == 31) {
 25154  				continue
 25155  			}
 25156  			v.reset(OpRotateLeft32)
 25157  			v.AddArg2(x, y)
 25158  			return true
 25159  		}
 25160  		break
 25161  	}
 25162  	// match: (RotateLeft32 x (And16 y (Const16 [c])))
 25163  	// cond: c&31 == 31
 25164  	// result: (RotateLeft32 x y)
 25165  	for {
 25166  		x := v_0
 25167  		if v_1.Op != OpAnd16 {
 25168  			break
 25169  		}
 25170  		_ = v_1.Args[1]
 25171  		v_1_0 := v_1.Args[0]
 25172  		v_1_1 := v_1.Args[1]
 25173  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 25174  			y := v_1_0
 25175  			if v_1_1.Op != OpConst16 {
 25176  				continue
 25177  			}
 25178  			c := auxIntToInt16(v_1_1.AuxInt)
 25179  			if !(c&31 == 31) {
 25180  				continue
 25181  			}
 25182  			v.reset(OpRotateLeft32)
 25183  			v.AddArg2(x, y)
 25184  			return true
 25185  		}
 25186  		break
 25187  	}
 25188  	// match: (RotateLeft32 x (And8 y (Const8 [c])))
 25189  	// cond: c&31 == 31
 25190  	// result: (RotateLeft32 x y)
 25191  	for {
 25192  		x := v_0
 25193  		if v_1.Op != OpAnd8 {
 25194  			break
 25195  		}
 25196  		_ = v_1.Args[1]
 25197  		v_1_0 := v_1.Args[0]
 25198  		v_1_1 := v_1.Args[1]
 25199  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 25200  			y := v_1_0
 25201  			if v_1_1.Op != OpConst8 {
 25202  				continue
 25203  			}
 25204  			c := auxIntToInt8(v_1_1.AuxInt)
 25205  			if !(c&31 == 31) {
 25206  				continue
 25207  			}
 25208  			v.reset(OpRotateLeft32)
 25209  			v.AddArg2(x, y)
 25210  			return true
 25211  		}
 25212  		break
 25213  	}
 25214  	// match: (RotateLeft32 x (Neg64 (And64 y (Const64 [c]))))
 25215  	// cond: c&31 == 31
 25216  	// result: (RotateLeft32 x (Neg64 <y.Type> y))
 25217  	for {
 25218  		x := v_0
 25219  		if v_1.Op != OpNeg64 {
 25220  			break
 25221  		}
 25222  		v_1_0 := v_1.Args[0]
 25223  		if v_1_0.Op != OpAnd64 {
 25224  			break
 25225  		}
 25226  		_ = v_1_0.Args[1]
 25227  		v_1_0_0 := v_1_0.Args[0]
 25228  		v_1_0_1 := v_1_0.Args[1]
 25229  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 25230  			y := v_1_0_0
 25231  			if v_1_0_1.Op != OpConst64 {
 25232  				continue
 25233  			}
 25234  			c := auxIntToInt64(v_1_0_1.AuxInt)
 25235  			if !(c&31 == 31) {
 25236  				continue
 25237  			}
 25238  			v.reset(OpRotateLeft32)
 25239  			v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
 25240  			v0.AddArg(y)
 25241  			v.AddArg2(x, v0)
 25242  			return true
 25243  		}
 25244  		break
 25245  	}
 25246  	// match: (RotateLeft32 x (Neg32 (And32 y (Const32 [c]))))
 25247  	// cond: c&31 == 31
 25248  	// result: (RotateLeft32 x (Neg32 <y.Type> y))
 25249  	for {
 25250  		x := v_0
 25251  		if v_1.Op != OpNeg32 {
 25252  			break
 25253  		}
 25254  		v_1_0 := v_1.Args[0]
 25255  		if v_1_0.Op != OpAnd32 {
 25256  			break
 25257  		}
 25258  		_ = v_1_0.Args[1]
 25259  		v_1_0_0 := v_1_0.Args[0]
 25260  		v_1_0_1 := v_1_0.Args[1]
 25261  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 25262  			y := v_1_0_0
 25263  			if v_1_0_1.Op != OpConst32 {
 25264  				continue
 25265  			}
 25266  			c := auxIntToInt32(v_1_0_1.AuxInt)
 25267  			if !(c&31 == 31) {
 25268  				continue
 25269  			}
 25270  			v.reset(OpRotateLeft32)
 25271  			v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
 25272  			v0.AddArg(y)
 25273  			v.AddArg2(x, v0)
 25274  			return true
 25275  		}
 25276  		break
 25277  	}
 25278  	// match: (RotateLeft32 x (Neg16 (And16 y (Const16 [c]))))
 25279  	// cond: c&31 == 31
 25280  	// result: (RotateLeft32 x (Neg16 <y.Type> y))
 25281  	for {
 25282  		x := v_0
 25283  		if v_1.Op != OpNeg16 {
 25284  			break
 25285  		}
 25286  		v_1_0 := v_1.Args[0]
 25287  		if v_1_0.Op != OpAnd16 {
 25288  			break
 25289  		}
 25290  		_ = v_1_0.Args[1]
 25291  		v_1_0_0 := v_1_0.Args[0]
 25292  		v_1_0_1 := v_1_0.Args[1]
 25293  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 25294  			y := v_1_0_0
 25295  			if v_1_0_1.Op != OpConst16 {
 25296  				continue
 25297  			}
 25298  			c := auxIntToInt16(v_1_0_1.AuxInt)
 25299  			if !(c&31 == 31) {
 25300  				continue
 25301  			}
 25302  			v.reset(OpRotateLeft32)
 25303  			v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
 25304  			v0.AddArg(y)
 25305  			v.AddArg2(x, v0)
 25306  			return true
 25307  		}
 25308  		break
 25309  	}
 25310  	// match: (RotateLeft32 x (Neg8 (And8 y (Const8 [c]))))
 25311  	// cond: c&31 == 31
 25312  	// result: (RotateLeft32 x (Neg8 <y.Type> y))
 25313  	for {
 25314  		x := v_0
 25315  		if v_1.Op != OpNeg8 {
 25316  			break
 25317  		}
 25318  		v_1_0 := v_1.Args[0]
 25319  		if v_1_0.Op != OpAnd8 {
 25320  			break
 25321  		}
 25322  		_ = v_1_0.Args[1]
 25323  		v_1_0_0 := v_1_0.Args[0]
 25324  		v_1_0_1 := v_1_0.Args[1]
 25325  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 25326  			y := v_1_0_0
 25327  			if v_1_0_1.Op != OpConst8 {
 25328  				continue
 25329  			}
 25330  			c := auxIntToInt8(v_1_0_1.AuxInt)
 25331  			if !(c&31 == 31) {
 25332  				continue
 25333  			}
 25334  			v.reset(OpRotateLeft32)
 25335  			v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
 25336  			v0.AddArg(y)
 25337  			v.AddArg2(x, v0)
 25338  			return true
 25339  		}
 25340  		break
 25341  	}
 25342  	// match: (RotateLeft32 x (Add64 y (Const64 [c])))
 25343  	// cond: c&31 == 0
 25344  	// result: (RotateLeft32 x y)
 25345  	for {
 25346  		x := v_0
 25347  		if v_1.Op != OpAdd64 {
 25348  			break
 25349  		}
 25350  		_ = v_1.Args[1]
 25351  		v_1_0 := v_1.Args[0]
 25352  		v_1_1 := v_1.Args[1]
 25353  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 25354  			y := v_1_0
 25355  			if v_1_1.Op != OpConst64 {
 25356  				continue
 25357  			}
 25358  			c := auxIntToInt64(v_1_1.AuxInt)
 25359  			if !(c&31 == 0) {
 25360  				continue
 25361  			}
 25362  			v.reset(OpRotateLeft32)
 25363  			v.AddArg2(x, y)
 25364  			return true
 25365  		}
 25366  		break
 25367  	}
 25368  	// match: (RotateLeft32 x (Add32 y (Const32 [c])))
 25369  	// cond: c&31 == 0
 25370  	// result: (RotateLeft32 x y)
 25371  	for {
 25372  		x := v_0
 25373  		if v_1.Op != OpAdd32 {
 25374  			break
 25375  		}
 25376  		_ = v_1.Args[1]
 25377  		v_1_0 := v_1.Args[0]
 25378  		v_1_1 := v_1.Args[1]
 25379  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 25380  			y := v_1_0
 25381  			if v_1_1.Op != OpConst32 {
 25382  				continue
 25383  			}
 25384  			c := auxIntToInt32(v_1_1.AuxInt)
 25385  			if !(c&31 == 0) {
 25386  				continue
 25387  			}
 25388  			v.reset(OpRotateLeft32)
 25389  			v.AddArg2(x, y)
 25390  			return true
 25391  		}
 25392  		break
 25393  	}
 25394  	// match: (RotateLeft32 x (Add16 y (Const16 [c])))
 25395  	// cond: c&31 == 0
 25396  	// result: (RotateLeft32 x y)
 25397  	for {
 25398  		x := v_0
 25399  		if v_1.Op != OpAdd16 {
 25400  			break
 25401  		}
 25402  		_ = v_1.Args[1]
 25403  		v_1_0 := v_1.Args[0]
 25404  		v_1_1 := v_1.Args[1]
 25405  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 25406  			y := v_1_0
 25407  			if v_1_1.Op != OpConst16 {
 25408  				continue
 25409  			}
 25410  			c := auxIntToInt16(v_1_1.AuxInt)
 25411  			if !(c&31 == 0) {
 25412  				continue
 25413  			}
 25414  			v.reset(OpRotateLeft32)
 25415  			v.AddArg2(x, y)
 25416  			return true
 25417  		}
 25418  		break
 25419  	}
 25420  	// match: (RotateLeft32 x (Add8 y (Const8 [c])))
 25421  	// cond: c&31 == 0
 25422  	// result: (RotateLeft32 x y)
 25423  	for {
 25424  		x := v_0
 25425  		if v_1.Op != OpAdd8 {
 25426  			break
 25427  		}
 25428  		_ = v_1.Args[1]
 25429  		v_1_0 := v_1.Args[0]
 25430  		v_1_1 := v_1.Args[1]
 25431  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 25432  			y := v_1_0
 25433  			if v_1_1.Op != OpConst8 {
 25434  				continue
 25435  			}
 25436  			c := auxIntToInt8(v_1_1.AuxInt)
 25437  			if !(c&31 == 0) {
 25438  				continue
 25439  			}
 25440  			v.reset(OpRotateLeft32)
 25441  			v.AddArg2(x, y)
 25442  			return true
 25443  		}
 25444  		break
 25445  	}
 25446  	// match: (RotateLeft32 x (Sub64 (Const64 [c]) y))
 25447  	// cond: c&31 == 0
 25448  	// result: (RotateLeft32 x (Neg64 <y.Type> y))
 25449  	for {
 25450  		x := v_0
 25451  		if v_1.Op != OpSub64 {
 25452  			break
 25453  		}
 25454  		y := v_1.Args[1]
 25455  		v_1_0 := v_1.Args[0]
 25456  		if v_1_0.Op != OpConst64 {
 25457  			break
 25458  		}
 25459  		c := auxIntToInt64(v_1_0.AuxInt)
 25460  		if !(c&31 == 0) {
 25461  			break
 25462  		}
 25463  		v.reset(OpRotateLeft32)
 25464  		v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
 25465  		v0.AddArg(y)
 25466  		v.AddArg2(x, v0)
 25467  		return true
 25468  	}
 25469  	// match: (RotateLeft32 x (Sub32 (Const32 [c]) y))
 25470  	// cond: c&31 == 0
 25471  	// result: (RotateLeft32 x (Neg32 <y.Type> y))
 25472  	for {
 25473  		x := v_0
 25474  		if v_1.Op != OpSub32 {
 25475  			break
 25476  		}
 25477  		y := v_1.Args[1]
 25478  		v_1_0 := v_1.Args[0]
 25479  		if v_1_0.Op != OpConst32 {
 25480  			break
 25481  		}
 25482  		c := auxIntToInt32(v_1_0.AuxInt)
 25483  		if !(c&31 == 0) {
 25484  			break
 25485  		}
 25486  		v.reset(OpRotateLeft32)
 25487  		v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
 25488  		v0.AddArg(y)
 25489  		v.AddArg2(x, v0)
 25490  		return true
 25491  	}
 25492  	// match: (RotateLeft32 x (Sub16 (Const16 [c]) y))
 25493  	// cond: c&31 == 0
 25494  	// result: (RotateLeft32 x (Neg16 <y.Type> y))
 25495  	for {
 25496  		x := v_0
 25497  		if v_1.Op != OpSub16 {
 25498  			break
 25499  		}
 25500  		y := v_1.Args[1]
 25501  		v_1_0 := v_1.Args[0]
 25502  		if v_1_0.Op != OpConst16 {
 25503  			break
 25504  		}
 25505  		c := auxIntToInt16(v_1_0.AuxInt)
 25506  		if !(c&31 == 0) {
 25507  			break
 25508  		}
 25509  		v.reset(OpRotateLeft32)
 25510  		v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
 25511  		v0.AddArg(y)
 25512  		v.AddArg2(x, v0)
 25513  		return true
 25514  	}
 25515  	// match: (RotateLeft32 x (Sub8 (Const8 [c]) y))
 25516  	// cond: c&31 == 0
 25517  	// result: (RotateLeft32 x (Neg8 <y.Type> y))
 25518  	for {
 25519  		x := v_0
 25520  		if v_1.Op != OpSub8 {
 25521  			break
 25522  		}
 25523  		y := v_1.Args[1]
 25524  		v_1_0 := v_1.Args[0]
 25525  		if v_1_0.Op != OpConst8 {
 25526  			break
 25527  		}
 25528  		c := auxIntToInt8(v_1_0.AuxInt)
 25529  		if !(c&31 == 0) {
 25530  			break
 25531  		}
 25532  		v.reset(OpRotateLeft32)
 25533  		v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
 25534  		v0.AddArg(y)
 25535  		v.AddArg2(x, v0)
 25536  		return true
 25537  	}
 25538  	// match: (RotateLeft32 x (Const64 <t> [c]))
 25539  	// cond: config.PtrSize == 4
 25540  	// result: (RotateLeft32 x (Const32 <t> [int32(c)]))
 25541  	for {
 25542  		x := v_0
 25543  		if v_1.Op != OpConst64 {
 25544  			break
 25545  		}
 25546  		t := v_1.Type
 25547  		c := auxIntToInt64(v_1.AuxInt)
 25548  		if !(config.PtrSize == 4) {
 25549  			break
 25550  		}
 25551  		v.reset(OpRotateLeft32)
 25552  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 25553  		v0.AuxInt = int32ToAuxInt(int32(c))
 25554  		v.AddArg2(x, v0)
 25555  		return true
 25556  	}
 25557  	// match: (RotateLeft32 (RotateLeft32 x c) d)
 25558  	// cond: c.Type.Size() == 8 && d.Type.Size() == 8
 25559  	// result: (RotateLeft32 x (Add64 <c.Type> c d))
 25560  	for {
 25561  		if v_0.Op != OpRotateLeft32 {
 25562  			break
 25563  		}
 25564  		c := v_0.Args[1]
 25565  		x := v_0.Args[0]
 25566  		d := v_1
 25567  		if !(c.Type.Size() == 8 && d.Type.Size() == 8) {
 25568  			break
 25569  		}
 25570  		v.reset(OpRotateLeft32)
 25571  		v0 := b.NewValue0(v.Pos, OpAdd64, c.Type)
 25572  		v0.AddArg2(c, d)
 25573  		v.AddArg2(x, v0)
 25574  		return true
 25575  	}
 25576  	// match: (RotateLeft32 (RotateLeft32 x c) d)
 25577  	// cond: c.Type.Size() == 4 && d.Type.Size() == 4
 25578  	// result: (RotateLeft32 x (Add32 <c.Type> c d))
 25579  	for {
 25580  		if v_0.Op != OpRotateLeft32 {
 25581  			break
 25582  		}
 25583  		c := v_0.Args[1]
 25584  		x := v_0.Args[0]
 25585  		d := v_1
 25586  		if !(c.Type.Size() == 4 && d.Type.Size() == 4) {
 25587  			break
 25588  		}
 25589  		v.reset(OpRotateLeft32)
 25590  		v0 := b.NewValue0(v.Pos, OpAdd32, c.Type)
 25591  		v0.AddArg2(c, d)
 25592  		v.AddArg2(x, v0)
 25593  		return true
 25594  	}
 25595  	// match: (RotateLeft32 (RotateLeft32 x c) d)
 25596  	// cond: c.Type.Size() == 2 && d.Type.Size() == 2
 25597  	// result: (RotateLeft32 x (Add16 <c.Type> c d))
 25598  	for {
 25599  		if v_0.Op != OpRotateLeft32 {
 25600  			break
 25601  		}
 25602  		c := v_0.Args[1]
 25603  		x := v_0.Args[0]
 25604  		d := v_1
 25605  		if !(c.Type.Size() == 2 && d.Type.Size() == 2) {
 25606  			break
 25607  		}
 25608  		v.reset(OpRotateLeft32)
 25609  		v0 := b.NewValue0(v.Pos, OpAdd16, c.Type)
 25610  		v0.AddArg2(c, d)
 25611  		v.AddArg2(x, v0)
 25612  		return true
 25613  	}
 25614  	// match: (RotateLeft32 (RotateLeft32 x c) d)
 25615  	// cond: c.Type.Size() == 1 && d.Type.Size() == 1
 25616  	// result: (RotateLeft32 x (Add8 <c.Type> c d))
 25617  	for {
 25618  		if v_0.Op != OpRotateLeft32 {
 25619  			break
 25620  		}
 25621  		c := v_0.Args[1]
 25622  		x := v_0.Args[0]
 25623  		d := v_1
 25624  		if !(c.Type.Size() == 1 && d.Type.Size() == 1) {
 25625  			break
 25626  		}
 25627  		v.reset(OpRotateLeft32)
 25628  		v0 := b.NewValue0(v.Pos, OpAdd8, c.Type)
 25629  		v0.AddArg2(c, d)
 25630  		v.AddArg2(x, v0)
 25631  		return true
 25632  	}
 25633  	return false
 25634  }
 25635  func rewriteValuegeneric_OpRotateLeft64(v *Value) bool {
 25636  	v_1 := v.Args[1]
 25637  	v_0 := v.Args[0]
 25638  	b := v.Block
 25639  	config := b.Func.Config
 25640  	// match: (RotateLeft64 x (Const64 [c]))
 25641  	// cond: c%64 == 0
 25642  	// result: x
 25643  	for {
 25644  		x := v_0
 25645  		if v_1.Op != OpConst64 {
 25646  			break
 25647  		}
 25648  		c := auxIntToInt64(v_1.AuxInt)
 25649  		if !(c%64 == 0) {
 25650  			break
 25651  		}
 25652  		v.copyOf(x)
 25653  		return true
 25654  	}
 25655  	// match: (RotateLeft64 x (And64 y (Const64 [c])))
 25656  	// cond: c&63 == 63
 25657  	// result: (RotateLeft64 x y)
 25658  	for {
 25659  		x := v_0
 25660  		if v_1.Op != OpAnd64 {
 25661  			break
 25662  		}
 25663  		_ = v_1.Args[1]
 25664  		v_1_0 := v_1.Args[0]
 25665  		v_1_1 := v_1.Args[1]
 25666  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 25667  			y := v_1_0
 25668  			if v_1_1.Op != OpConst64 {
 25669  				continue
 25670  			}
 25671  			c := auxIntToInt64(v_1_1.AuxInt)
 25672  			if !(c&63 == 63) {
 25673  				continue
 25674  			}
 25675  			v.reset(OpRotateLeft64)
 25676  			v.AddArg2(x, y)
 25677  			return true
 25678  		}
 25679  		break
 25680  	}
 25681  	// match: (RotateLeft64 x (And32 y (Const32 [c])))
 25682  	// cond: c&63 == 63
 25683  	// result: (RotateLeft64 x y)
 25684  	for {
 25685  		x := v_0
 25686  		if v_1.Op != OpAnd32 {
 25687  			break
 25688  		}
 25689  		_ = v_1.Args[1]
 25690  		v_1_0 := v_1.Args[0]
 25691  		v_1_1 := v_1.Args[1]
 25692  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 25693  			y := v_1_0
 25694  			if v_1_1.Op != OpConst32 {
 25695  				continue
 25696  			}
 25697  			c := auxIntToInt32(v_1_1.AuxInt)
 25698  			if !(c&63 == 63) {
 25699  				continue
 25700  			}
 25701  			v.reset(OpRotateLeft64)
 25702  			v.AddArg2(x, y)
 25703  			return true
 25704  		}
 25705  		break
 25706  	}
 25707  	// match: (RotateLeft64 x (And16 y (Const16 [c])))
 25708  	// cond: c&63 == 63
 25709  	// result: (RotateLeft64 x y)
 25710  	for {
 25711  		x := v_0
 25712  		if v_1.Op != OpAnd16 {
 25713  			break
 25714  		}
 25715  		_ = v_1.Args[1]
 25716  		v_1_0 := v_1.Args[0]
 25717  		v_1_1 := v_1.Args[1]
 25718  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 25719  			y := v_1_0
 25720  			if v_1_1.Op != OpConst16 {
 25721  				continue
 25722  			}
 25723  			c := auxIntToInt16(v_1_1.AuxInt)
 25724  			if !(c&63 == 63) {
 25725  				continue
 25726  			}
 25727  			v.reset(OpRotateLeft64)
 25728  			v.AddArg2(x, y)
 25729  			return true
 25730  		}
 25731  		break
 25732  	}
 25733  	// match: (RotateLeft64 x (And8 y (Const8 [c])))
 25734  	// cond: c&63 == 63
 25735  	// result: (RotateLeft64 x y)
 25736  	for {
 25737  		x := v_0
 25738  		if v_1.Op != OpAnd8 {
 25739  			break
 25740  		}
 25741  		_ = v_1.Args[1]
 25742  		v_1_0 := v_1.Args[0]
 25743  		v_1_1 := v_1.Args[1]
 25744  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 25745  			y := v_1_0
 25746  			if v_1_1.Op != OpConst8 {
 25747  				continue
 25748  			}
 25749  			c := auxIntToInt8(v_1_1.AuxInt)
 25750  			if !(c&63 == 63) {
 25751  				continue
 25752  			}
 25753  			v.reset(OpRotateLeft64)
 25754  			v.AddArg2(x, y)
 25755  			return true
 25756  		}
 25757  		break
 25758  	}
 25759  	// match: (RotateLeft64 x (Neg64 (And64 y (Const64 [c]))))
 25760  	// cond: c&63 == 63
 25761  	// result: (RotateLeft64 x (Neg64 <y.Type> y))
 25762  	for {
 25763  		x := v_0
 25764  		if v_1.Op != OpNeg64 {
 25765  			break
 25766  		}
 25767  		v_1_0 := v_1.Args[0]
 25768  		if v_1_0.Op != OpAnd64 {
 25769  			break
 25770  		}
 25771  		_ = v_1_0.Args[1]
 25772  		v_1_0_0 := v_1_0.Args[0]
 25773  		v_1_0_1 := v_1_0.Args[1]
 25774  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 25775  			y := v_1_0_0
 25776  			if v_1_0_1.Op != OpConst64 {
 25777  				continue
 25778  			}
 25779  			c := auxIntToInt64(v_1_0_1.AuxInt)
 25780  			if !(c&63 == 63) {
 25781  				continue
 25782  			}
 25783  			v.reset(OpRotateLeft64)
 25784  			v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
 25785  			v0.AddArg(y)
 25786  			v.AddArg2(x, v0)
 25787  			return true
 25788  		}
 25789  		break
 25790  	}
 25791  	// match: (RotateLeft64 x (Neg32 (And32 y (Const32 [c]))))
 25792  	// cond: c&63 == 63
 25793  	// result: (RotateLeft64 x (Neg32 <y.Type> y))
 25794  	for {
 25795  		x := v_0
 25796  		if v_1.Op != OpNeg32 {
 25797  			break
 25798  		}
 25799  		v_1_0 := v_1.Args[0]
 25800  		if v_1_0.Op != OpAnd32 {
 25801  			break
 25802  		}
 25803  		_ = v_1_0.Args[1]
 25804  		v_1_0_0 := v_1_0.Args[0]
 25805  		v_1_0_1 := v_1_0.Args[1]
 25806  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 25807  			y := v_1_0_0
 25808  			if v_1_0_1.Op != OpConst32 {
 25809  				continue
 25810  			}
 25811  			c := auxIntToInt32(v_1_0_1.AuxInt)
 25812  			if !(c&63 == 63) {
 25813  				continue
 25814  			}
 25815  			v.reset(OpRotateLeft64)
 25816  			v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
 25817  			v0.AddArg(y)
 25818  			v.AddArg2(x, v0)
 25819  			return true
 25820  		}
 25821  		break
 25822  	}
 25823  	// match: (RotateLeft64 x (Neg16 (And16 y (Const16 [c]))))
 25824  	// cond: c&63 == 63
 25825  	// result: (RotateLeft64 x (Neg16 <y.Type> y))
 25826  	for {
 25827  		x := v_0
 25828  		if v_1.Op != OpNeg16 {
 25829  			break
 25830  		}
 25831  		v_1_0 := v_1.Args[0]
 25832  		if v_1_0.Op != OpAnd16 {
 25833  			break
 25834  		}
 25835  		_ = v_1_0.Args[1]
 25836  		v_1_0_0 := v_1_0.Args[0]
 25837  		v_1_0_1 := v_1_0.Args[1]
 25838  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 25839  			y := v_1_0_0
 25840  			if v_1_0_1.Op != OpConst16 {
 25841  				continue
 25842  			}
 25843  			c := auxIntToInt16(v_1_0_1.AuxInt)
 25844  			if !(c&63 == 63) {
 25845  				continue
 25846  			}
 25847  			v.reset(OpRotateLeft64)
 25848  			v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
 25849  			v0.AddArg(y)
 25850  			v.AddArg2(x, v0)
 25851  			return true
 25852  		}
 25853  		break
 25854  	}
 25855  	// match: (RotateLeft64 x (Neg8 (And8 y (Const8 [c]))))
 25856  	// cond: c&63 == 63
 25857  	// result: (RotateLeft64 x (Neg8 <y.Type> y))
 25858  	for {
 25859  		x := v_0
 25860  		if v_1.Op != OpNeg8 {
 25861  			break
 25862  		}
 25863  		v_1_0 := v_1.Args[0]
 25864  		if v_1_0.Op != OpAnd8 {
 25865  			break
 25866  		}
 25867  		_ = v_1_0.Args[1]
 25868  		v_1_0_0 := v_1_0.Args[0]
 25869  		v_1_0_1 := v_1_0.Args[1]
 25870  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 25871  			y := v_1_0_0
 25872  			if v_1_0_1.Op != OpConst8 {
 25873  				continue
 25874  			}
 25875  			c := auxIntToInt8(v_1_0_1.AuxInt)
 25876  			if !(c&63 == 63) {
 25877  				continue
 25878  			}
 25879  			v.reset(OpRotateLeft64)
 25880  			v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
 25881  			v0.AddArg(y)
 25882  			v.AddArg2(x, v0)
 25883  			return true
 25884  		}
 25885  		break
 25886  	}
 25887  	// match: (RotateLeft64 x (Add64 y (Const64 [c])))
 25888  	// cond: c&63 == 0
 25889  	// result: (RotateLeft64 x y)
 25890  	for {
 25891  		x := v_0
 25892  		if v_1.Op != OpAdd64 {
 25893  			break
 25894  		}
 25895  		_ = v_1.Args[1]
 25896  		v_1_0 := v_1.Args[0]
 25897  		v_1_1 := v_1.Args[1]
 25898  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 25899  			y := v_1_0
 25900  			if v_1_1.Op != OpConst64 {
 25901  				continue
 25902  			}
 25903  			c := auxIntToInt64(v_1_1.AuxInt)
 25904  			if !(c&63 == 0) {
 25905  				continue
 25906  			}
 25907  			v.reset(OpRotateLeft64)
 25908  			v.AddArg2(x, y)
 25909  			return true
 25910  		}
 25911  		break
 25912  	}
 25913  	// match: (RotateLeft64 x (Add32 y (Const32 [c])))
 25914  	// cond: c&63 == 0
 25915  	// result: (RotateLeft64 x y)
 25916  	for {
 25917  		x := v_0
 25918  		if v_1.Op != OpAdd32 {
 25919  			break
 25920  		}
 25921  		_ = v_1.Args[1]
 25922  		v_1_0 := v_1.Args[0]
 25923  		v_1_1 := v_1.Args[1]
 25924  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 25925  			y := v_1_0
 25926  			if v_1_1.Op != OpConst32 {
 25927  				continue
 25928  			}
 25929  			c := auxIntToInt32(v_1_1.AuxInt)
 25930  			if !(c&63 == 0) {
 25931  				continue
 25932  			}
 25933  			v.reset(OpRotateLeft64)
 25934  			v.AddArg2(x, y)
 25935  			return true
 25936  		}
 25937  		break
 25938  	}
 25939  	// match: (RotateLeft64 x (Add16 y (Const16 [c])))
 25940  	// cond: c&63 == 0
 25941  	// result: (RotateLeft64 x y)
 25942  	for {
 25943  		x := v_0
 25944  		if v_1.Op != OpAdd16 {
 25945  			break
 25946  		}
 25947  		_ = v_1.Args[1]
 25948  		v_1_0 := v_1.Args[0]
 25949  		v_1_1 := v_1.Args[1]
 25950  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 25951  			y := v_1_0
 25952  			if v_1_1.Op != OpConst16 {
 25953  				continue
 25954  			}
 25955  			c := auxIntToInt16(v_1_1.AuxInt)
 25956  			if !(c&63 == 0) {
 25957  				continue
 25958  			}
 25959  			v.reset(OpRotateLeft64)
 25960  			v.AddArg2(x, y)
 25961  			return true
 25962  		}
 25963  		break
 25964  	}
 25965  	// match: (RotateLeft64 x (Add8 y (Const8 [c])))
 25966  	// cond: c&63 == 0
 25967  	// result: (RotateLeft64 x y)
 25968  	for {
 25969  		x := v_0
 25970  		if v_1.Op != OpAdd8 {
 25971  			break
 25972  		}
 25973  		_ = v_1.Args[1]
 25974  		v_1_0 := v_1.Args[0]
 25975  		v_1_1 := v_1.Args[1]
 25976  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 25977  			y := v_1_0
 25978  			if v_1_1.Op != OpConst8 {
 25979  				continue
 25980  			}
 25981  			c := auxIntToInt8(v_1_1.AuxInt)
 25982  			if !(c&63 == 0) {
 25983  				continue
 25984  			}
 25985  			v.reset(OpRotateLeft64)
 25986  			v.AddArg2(x, y)
 25987  			return true
 25988  		}
 25989  		break
 25990  	}
 25991  	// match: (RotateLeft64 x (Sub64 (Const64 [c]) y))
 25992  	// cond: c&63 == 0
 25993  	// result: (RotateLeft64 x (Neg64 <y.Type> y))
 25994  	for {
 25995  		x := v_0
 25996  		if v_1.Op != OpSub64 {
 25997  			break
 25998  		}
 25999  		y := v_1.Args[1]
 26000  		v_1_0 := v_1.Args[0]
 26001  		if v_1_0.Op != OpConst64 {
 26002  			break
 26003  		}
 26004  		c := auxIntToInt64(v_1_0.AuxInt)
 26005  		if !(c&63 == 0) {
 26006  			break
 26007  		}
 26008  		v.reset(OpRotateLeft64)
 26009  		v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
 26010  		v0.AddArg(y)
 26011  		v.AddArg2(x, v0)
 26012  		return true
 26013  	}
 26014  	// match: (RotateLeft64 x (Sub32 (Const32 [c]) y))
 26015  	// cond: c&63 == 0
 26016  	// result: (RotateLeft64 x (Neg32 <y.Type> y))
 26017  	for {
 26018  		x := v_0
 26019  		if v_1.Op != OpSub32 {
 26020  			break
 26021  		}
 26022  		y := v_1.Args[1]
 26023  		v_1_0 := v_1.Args[0]
 26024  		if v_1_0.Op != OpConst32 {
 26025  			break
 26026  		}
 26027  		c := auxIntToInt32(v_1_0.AuxInt)
 26028  		if !(c&63 == 0) {
 26029  			break
 26030  		}
 26031  		v.reset(OpRotateLeft64)
 26032  		v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
 26033  		v0.AddArg(y)
 26034  		v.AddArg2(x, v0)
 26035  		return true
 26036  	}
 26037  	// match: (RotateLeft64 x (Sub16 (Const16 [c]) y))
 26038  	// cond: c&63 == 0
 26039  	// result: (RotateLeft64 x (Neg16 <y.Type> y))
 26040  	for {
 26041  		x := v_0
 26042  		if v_1.Op != OpSub16 {
 26043  			break
 26044  		}
 26045  		y := v_1.Args[1]
 26046  		v_1_0 := v_1.Args[0]
 26047  		if v_1_0.Op != OpConst16 {
 26048  			break
 26049  		}
 26050  		c := auxIntToInt16(v_1_0.AuxInt)
 26051  		if !(c&63 == 0) {
 26052  			break
 26053  		}
 26054  		v.reset(OpRotateLeft64)
 26055  		v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
 26056  		v0.AddArg(y)
 26057  		v.AddArg2(x, v0)
 26058  		return true
 26059  	}
 26060  	// match: (RotateLeft64 x (Sub8 (Const8 [c]) y))
 26061  	// cond: c&63 == 0
 26062  	// result: (RotateLeft64 x (Neg8 <y.Type> y))
 26063  	for {
 26064  		x := v_0
 26065  		if v_1.Op != OpSub8 {
 26066  			break
 26067  		}
 26068  		y := v_1.Args[1]
 26069  		v_1_0 := v_1.Args[0]
 26070  		if v_1_0.Op != OpConst8 {
 26071  			break
 26072  		}
 26073  		c := auxIntToInt8(v_1_0.AuxInt)
 26074  		if !(c&63 == 0) {
 26075  			break
 26076  		}
 26077  		v.reset(OpRotateLeft64)
 26078  		v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
 26079  		v0.AddArg(y)
 26080  		v.AddArg2(x, v0)
 26081  		return true
 26082  	}
 26083  	// match: (RotateLeft64 x (Const64 <t> [c]))
 26084  	// cond: config.PtrSize == 4
 26085  	// result: (RotateLeft64 x (Const32 <t> [int32(c)]))
 26086  	for {
 26087  		x := v_0
 26088  		if v_1.Op != OpConst64 {
 26089  			break
 26090  		}
 26091  		t := v_1.Type
 26092  		c := auxIntToInt64(v_1.AuxInt)
 26093  		if !(config.PtrSize == 4) {
 26094  			break
 26095  		}
 26096  		v.reset(OpRotateLeft64)
 26097  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 26098  		v0.AuxInt = int32ToAuxInt(int32(c))
 26099  		v.AddArg2(x, v0)
 26100  		return true
 26101  	}
 26102  	// match: (RotateLeft64 (RotateLeft64 x c) d)
 26103  	// cond: c.Type.Size() == 8 && d.Type.Size() == 8
 26104  	// result: (RotateLeft64 x (Add64 <c.Type> c d))
 26105  	for {
 26106  		if v_0.Op != OpRotateLeft64 {
 26107  			break
 26108  		}
 26109  		c := v_0.Args[1]
 26110  		x := v_0.Args[0]
 26111  		d := v_1
 26112  		if !(c.Type.Size() == 8 && d.Type.Size() == 8) {
 26113  			break
 26114  		}
 26115  		v.reset(OpRotateLeft64)
 26116  		v0 := b.NewValue0(v.Pos, OpAdd64, c.Type)
 26117  		v0.AddArg2(c, d)
 26118  		v.AddArg2(x, v0)
 26119  		return true
 26120  	}
 26121  	// match: (RotateLeft64 (RotateLeft64 x c) d)
 26122  	// cond: c.Type.Size() == 4 && d.Type.Size() == 4
 26123  	// result: (RotateLeft64 x (Add32 <c.Type> c d))
 26124  	for {
 26125  		if v_0.Op != OpRotateLeft64 {
 26126  			break
 26127  		}
 26128  		c := v_0.Args[1]
 26129  		x := v_0.Args[0]
 26130  		d := v_1
 26131  		if !(c.Type.Size() == 4 && d.Type.Size() == 4) {
 26132  			break
 26133  		}
 26134  		v.reset(OpRotateLeft64)
 26135  		v0 := b.NewValue0(v.Pos, OpAdd32, c.Type)
 26136  		v0.AddArg2(c, d)
 26137  		v.AddArg2(x, v0)
 26138  		return true
 26139  	}
 26140  	// match: (RotateLeft64 (RotateLeft64 x c) d)
 26141  	// cond: c.Type.Size() == 2 && d.Type.Size() == 2
 26142  	// result: (RotateLeft64 x (Add16 <c.Type> c d))
 26143  	for {
 26144  		if v_0.Op != OpRotateLeft64 {
 26145  			break
 26146  		}
 26147  		c := v_0.Args[1]
 26148  		x := v_0.Args[0]
 26149  		d := v_1
 26150  		if !(c.Type.Size() == 2 && d.Type.Size() == 2) {
 26151  			break
 26152  		}
 26153  		v.reset(OpRotateLeft64)
 26154  		v0 := b.NewValue0(v.Pos, OpAdd16, c.Type)
 26155  		v0.AddArg2(c, d)
 26156  		v.AddArg2(x, v0)
 26157  		return true
 26158  	}
 26159  	// match: (RotateLeft64 (RotateLeft64 x c) d)
 26160  	// cond: c.Type.Size() == 1 && d.Type.Size() == 1
 26161  	// result: (RotateLeft64 x (Add8 <c.Type> c d))
 26162  	for {
 26163  		if v_0.Op != OpRotateLeft64 {
 26164  			break
 26165  		}
 26166  		c := v_0.Args[1]
 26167  		x := v_0.Args[0]
 26168  		d := v_1
 26169  		if !(c.Type.Size() == 1 && d.Type.Size() == 1) {
 26170  			break
 26171  		}
 26172  		v.reset(OpRotateLeft64)
 26173  		v0 := b.NewValue0(v.Pos, OpAdd8, c.Type)
 26174  		v0.AddArg2(c, d)
 26175  		v.AddArg2(x, v0)
 26176  		return true
 26177  	}
 26178  	return false
 26179  }
 26180  func rewriteValuegeneric_OpRotateLeft8(v *Value) bool {
 26181  	v_1 := v.Args[1]
 26182  	v_0 := v.Args[0]
 26183  	b := v.Block
 26184  	config := b.Func.Config
 26185  	// match: (RotateLeft8 x (Const8 [c]))
 26186  	// cond: c%8 == 0
 26187  	// result: x
 26188  	for {
 26189  		x := v_0
 26190  		if v_1.Op != OpConst8 {
 26191  			break
 26192  		}
 26193  		c := auxIntToInt8(v_1.AuxInt)
 26194  		if !(c%8 == 0) {
 26195  			break
 26196  		}
 26197  		v.copyOf(x)
 26198  		return true
 26199  	}
 26200  	// match: (RotateLeft8 x (And64 y (Const64 [c])))
 26201  	// cond: c&7 == 7
 26202  	// result: (RotateLeft8 x y)
 26203  	for {
 26204  		x := v_0
 26205  		if v_1.Op != OpAnd64 {
 26206  			break
 26207  		}
 26208  		_ = v_1.Args[1]
 26209  		v_1_0 := v_1.Args[0]
 26210  		v_1_1 := v_1.Args[1]
 26211  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 26212  			y := v_1_0
 26213  			if v_1_1.Op != OpConst64 {
 26214  				continue
 26215  			}
 26216  			c := auxIntToInt64(v_1_1.AuxInt)
 26217  			if !(c&7 == 7) {
 26218  				continue
 26219  			}
 26220  			v.reset(OpRotateLeft8)
 26221  			v.AddArg2(x, y)
 26222  			return true
 26223  		}
 26224  		break
 26225  	}
 26226  	// match: (RotateLeft8 x (And32 y (Const32 [c])))
 26227  	// cond: c&7 == 7
 26228  	// result: (RotateLeft8 x y)
 26229  	for {
 26230  		x := v_0
 26231  		if v_1.Op != OpAnd32 {
 26232  			break
 26233  		}
 26234  		_ = v_1.Args[1]
 26235  		v_1_0 := v_1.Args[0]
 26236  		v_1_1 := v_1.Args[1]
 26237  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 26238  			y := v_1_0
 26239  			if v_1_1.Op != OpConst32 {
 26240  				continue
 26241  			}
 26242  			c := auxIntToInt32(v_1_1.AuxInt)
 26243  			if !(c&7 == 7) {
 26244  				continue
 26245  			}
 26246  			v.reset(OpRotateLeft8)
 26247  			v.AddArg2(x, y)
 26248  			return true
 26249  		}
 26250  		break
 26251  	}
 26252  	// match: (RotateLeft8 x (And16 y (Const16 [c])))
 26253  	// cond: c&7 == 7
 26254  	// result: (RotateLeft8 x y)
 26255  	for {
 26256  		x := v_0
 26257  		if v_1.Op != OpAnd16 {
 26258  			break
 26259  		}
 26260  		_ = v_1.Args[1]
 26261  		v_1_0 := v_1.Args[0]
 26262  		v_1_1 := v_1.Args[1]
 26263  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 26264  			y := v_1_0
 26265  			if v_1_1.Op != OpConst16 {
 26266  				continue
 26267  			}
 26268  			c := auxIntToInt16(v_1_1.AuxInt)
 26269  			if !(c&7 == 7) {
 26270  				continue
 26271  			}
 26272  			v.reset(OpRotateLeft8)
 26273  			v.AddArg2(x, y)
 26274  			return true
 26275  		}
 26276  		break
 26277  	}
 26278  	// match: (RotateLeft8 x (And8 y (Const8 [c])))
 26279  	// cond: c&7 == 7
 26280  	// result: (RotateLeft8 x y)
 26281  	for {
 26282  		x := v_0
 26283  		if v_1.Op != OpAnd8 {
 26284  			break
 26285  		}
 26286  		_ = v_1.Args[1]
 26287  		v_1_0 := v_1.Args[0]
 26288  		v_1_1 := v_1.Args[1]
 26289  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 26290  			y := v_1_0
 26291  			if v_1_1.Op != OpConst8 {
 26292  				continue
 26293  			}
 26294  			c := auxIntToInt8(v_1_1.AuxInt)
 26295  			if !(c&7 == 7) {
 26296  				continue
 26297  			}
 26298  			v.reset(OpRotateLeft8)
 26299  			v.AddArg2(x, y)
 26300  			return true
 26301  		}
 26302  		break
 26303  	}
 26304  	// match: (RotateLeft8 x (Neg64 (And64 y (Const64 [c]))))
 26305  	// cond: c&7 == 7
 26306  	// result: (RotateLeft8 x (Neg64 <y.Type> y))
 26307  	for {
 26308  		x := v_0
 26309  		if v_1.Op != OpNeg64 {
 26310  			break
 26311  		}
 26312  		v_1_0 := v_1.Args[0]
 26313  		if v_1_0.Op != OpAnd64 {
 26314  			break
 26315  		}
 26316  		_ = v_1_0.Args[1]
 26317  		v_1_0_0 := v_1_0.Args[0]
 26318  		v_1_0_1 := v_1_0.Args[1]
 26319  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 26320  			y := v_1_0_0
 26321  			if v_1_0_1.Op != OpConst64 {
 26322  				continue
 26323  			}
 26324  			c := auxIntToInt64(v_1_0_1.AuxInt)
 26325  			if !(c&7 == 7) {
 26326  				continue
 26327  			}
 26328  			v.reset(OpRotateLeft8)
 26329  			v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
 26330  			v0.AddArg(y)
 26331  			v.AddArg2(x, v0)
 26332  			return true
 26333  		}
 26334  		break
 26335  	}
 26336  	// match: (RotateLeft8 x (Neg32 (And32 y (Const32 [c]))))
 26337  	// cond: c&7 == 7
 26338  	// result: (RotateLeft8 x (Neg32 <y.Type> y))
 26339  	for {
 26340  		x := v_0
 26341  		if v_1.Op != OpNeg32 {
 26342  			break
 26343  		}
 26344  		v_1_0 := v_1.Args[0]
 26345  		if v_1_0.Op != OpAnd32 {
 26346  			break
 26347  		}
 26348  		_ = v_1_0.Args[1]
 26349  		v_1_0_0 := v_1_0.Args[0]
 26350  		v_1_0_1 := v_1_0.Args[1]
 26351  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 26352  			y := v_1_0_0
 26353  			if v_1_0_1.Op != OpConst32 {
 26354  				continue
 26355  			}
 26356  			c := auxIntToInt32(v_1_0_1.AuxInt)
 26357  			if !(c&7 == 7) {
 26358  				continue
 26359  			}
 26360  			v.reset(OpRotateLeft8)
 26361  			v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
 26362  			v0.AddArg(y)
 26363  			v.AddArg2(x, v0)
 26364  			return true
 26365  		}
 26366  		break
 26367  	}
 26368  	// match: (RotateLeft8 x (Neg16 (And16 y (Const16 [c]))))
 26369  	// cond: c&7 == 7
 26370  	// result: (RotateLeft8 x (Neg16 <y.Type> y))
 26371  	for {
 26372  		x := v_0
 26373  		if v_1.Op != OpNeg16 {
 26374  			break
 26375  		}
 26376  		v_1_0 := v_1.Args[0]
 26377  		if v_1_0.Op != OpAnd16 {
 26378  			break
 26379  		}
 26380  		_ = v_1_0.Args[1]
 26381  		v_1_0_0 := v_1_0.Args[0]
 26382  		v_1_0_1 := v_1_0.Args[1]
 26383  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 26384  			y := v_1_0_0
 26385  			if v_1_0_1.Op != OpConst16 {
 26386  				continue
 26387  			}
 26388  			c := auxIntToInt16(v_1_0_1.AuxInt)
 26389  			if !(c&7 == 7) {
 26390  				continue
 26391  			}
 26392  			v.reset(OpRotateLeft8)
 26393  			v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
 26394  			v0.AddArg(y)
 26395  			v.AddArg2(x, v0)
 26396  			return true
 26397  		}
 26398  		break
 26399  	}
 26400  	// match: (RotateLeft8 x (Neg8 (And8 y (Const8 [c]))))
 26401  	// cond: c&7 == 7
 26402  	// result: (RotateLeft8 x (Neg8 <y.Type> y))
 26403  	for {
 26404  		x := v_0
 26405  		if v_1.Op != OpNeg8 {
 26406  			break
 26407  		}
 26408  		v_1_0 := v_1.Args[0]
 26409  		if v_1_0.Op != OpAnd8 {
 26410  			break
 26411  		}
 26412  		_ = v_1_0.Args[1]
 26413  		v_1_0_0 := v_1_0.Args[0]
 26414  		v_1_0_1 := v_1_0.Args[1]
 26415  		for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
 26416  			y := v_1_0_0
 26417  			if v_1_0_1.Op != OpConst8 {
 26418  				continue
 26419  			}
 26420  			c := auxIntToInt8(v_1_0_1.AuxInt)
 26421  			if !(c&7 == 7) {
 26422  				continue
 26423  			}
 26424  			v.reset(OpRotateLeft8)
 26425  			v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
 26426  			v0.AddArg(y)
 26427  			v.AddArg2(x, v0)
 26428  			return true
 26429  		}
 26430  		break
 26431  	}
 26432  	// match: (RotateLeft8 x (Add64 y (Const64 [c])))
 26433  	// cond: c&7 == 0
 26434  	// result: (RotateLeft8 x y)
 26435  	for {
 26436  		x := v_0
 26437  		if v_1.Op != OpAdd64 {
 26438  			break
 26439  		}
 26440  		_ = v_1.Args[1]
 26441  		v_1_0 := v_1.Args[0]
 26442  		v_1_1 := v_1.Args[1]
 26443  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 26444  			y := v_1_0
 26445  			if v_1_1.Op != OpConst64 {
 26446  				continue
 26447  			}
 26448  			c := auxIntToInt64(v_1_1.AuxInt)
 26449  			if !(c&7 == 0) {
 26450  				continue
 26451  			}
 26452  			v.reset(OpRotateLeft8)
 26453  			v.AddArg2(x, y)
 26454  			return true
 26455  		}
 26456  		break
 26457  	}
 26458  	// match: (RotateLeft8 x (Add32 y (Const32 [c])))
 26459  	// cond: c&7 == 0
 26460  	// result: (RotateLeft8 x y)
 26461  	for {
 26462  		x := v_0
 26463  		if v_1.Op != OpAdd32 {
 26464  			break
 26465  		}
 26466  		_ = v_1.Args[1]
 26467  		v_1_0 := v_1.Args[0]
 26468  		v_1_1 := v_1.Args[1]
 26469  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 26470  			y := v_1_0
 26471  			if v_1_1.Op != OpConst32 {
 26472  				continue
 26473  			}
 26474  			c := auxIntToInt32(v_1_1.AuxInt)
 26475  			if !(c&7 == 0) {
 26476  				continue
 26477  			}
 26478  			v.reset(OpRotateLeft8)
 26479  			v.AddArg2(x, y)
 26480  			return true
 26481  		}
 26482  		break
 26483  	}
 26484  	// match: (RotateLeft8 x (Add16 y (Const16 [c])))
 26485  	// cond: c&7 == 0
 26486  	// result: (RotateLeft8 x y)
 26487  	for {
 26488  		x := v_0
 26489  		if v_1.Op != OpAdd16 {
 26490  			break
 26491  		}
 26492  		_ = v_1.Args[1]
 26493  		v_1_0 := v_1.Args[0]
 26494  		v_1_1 := v_1.Args[1]
 26495  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 26496  			y := v_1_0
 26497  			if v_1_1.Op != OpConst16 {
 26498  				continue
 26499  			}
 26500  			c := auxIntToInt16(v_1_1.AuxInt)
 26501  			if !(c&7 == 0) {
 26502  				continue
 26503  			}
 26504  			v.reset(OpRotateLeft8)
 26505  			v.AddArg2(x, y)
 26506  			return true
 26507  		}
 26508  		break
 26509  	}
 26510  	// match: (RotateLeft8 x (Add8 y (Const8 [c])))
 26511  	// cond: c&7 == 0
 26512  	// result: (RotateLeft8 x y)
 26513  	for {
 26514  		x := v_0
 26515  		if v_1.Op != OpAdd8 {
 26516  			break
 26517  		}
 26518  		_ = v_1.Args[1]
 26519  		v_1_0 := v_1.Args[0]
 26520  		v_1_1 := v_1.Args[1]
 26521  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 26522  			y := v_1_0
 26523  			if v_1_1.Op != OpConst8 {
 26524  				continue
 26525  			}
 26526  			c := auxIntToInt8(v_1_1.AuxInt)
 26527  			if !(c&7 == 0) {
 26528  				continue
 26529  			}
 26530  			v.reset(OpRotateLeft8)
 26531  			v.AddArg2(x, y)
 26532  			return true
 26533  		}
 26534  		break
 26535  	}
 26536  	// match: (RotateLeft8 x (Sub64 (Const64 [c]) y))
 26537  	// cond: c&7 == 0
 26538  	// result: (RotateLeft8 x (Neg64 <y.Type> y))
 26539  	for {
 26540  		x := v_0
 26541  		if v_1.Op != OpSub64 {
 26542  			break
 26543  		}
 26544  		y := v_1.Args[1]
 26545  		v_1_0 := v_1.Args[0]
 26546  		if v_1_0.Op != OpConst64 {
 26547  			break
 26548  		}
 26549  		c := auxIntToInt64(v_1_0.AuxInt)
 26550  		if !(c&7 == 0) {
 26551  			break
 26552  		}
 26553  		v.reset(OpRotateLeft8)
 26554  		v0 := b.NewValue0(v.Pos, OpNeg64, y.Type)
 26555  		v0.AddArg(y)
 26556  		v.AddArg2(x, v0)
 26557  		return true
 26558  	}
 26559  	// match: (RotateLeft8 x (Sub32 (Const32 [c]) y))
 26560  	// cond: c&7 == 0
 26561  	// result: (RotateLeft8 x (Neg32 <y.Type> y))
 26562  	for {
 26563  		x := v_0
 26564  		if v_1.Op != OpSub32 {
 26565  			break
 26566  		}
 26567  		y := v_1.Args[1]
 26568  		v_1_0 := v_1.Args[0]
 26569  		if v_1_0.Op != OpConst32 {
 26570  			break
 26571  		}
 26572  		c := auxIntToInt32(v_1_0.AuxInt)
 26573  		if !(c&7 == 0) {
 26574  			break
 26575  		}
 26576  		v.reset(OpRotateLeft8)
 26577  		v0 := b.NewValue0(v.Pos, OpNeg32, y.Type)
 26578  		v0.AddArg(y)
 26579  		v.AddArg2(x, v0)
 26580  		return true
 26581  	}
 26582  	// match: (RotateLeft8 x (Sub16 (Const16 [c]) y))
 26583  	// cond: c&7 == 0
 26584  	// result: (RotateLeft8 x (Neg16 <y.Type> y))
 26585  	for {
 26586  		x := v_0
 26587  		if v_1.Op != OpSub16 {
 26588  			break
 26589  		}
 26590  		y := v_1.Args[1]
 26591  		v_1_0 := v_1.Args[0]
 26592  		if v_1_0.Op != OpConst16 {
 26593  			break
 26594  		}
 26595  		c := auxIntToInt16(v_1_0.AuxInt)
 26596  		if !(c&7 == 0) {
 26597  			break
 26598  		}
 26599  		v.reset(OpRotateLeft8)
 26600  		v0 := b.NewValue0(v.Pos, OpNeg16, y.Type)
 26601  		v0.AddArg(y)
 26602  		v.AddArg2(x, v0)
 26603  		return true
 26604  	}
 26605  	// match: (RotateLeft8 x (Sub8 (Const8 [c]) y))
 26606  	// cond: c&7 == 0
 26607  	// result: (RotateLeft8 x (Neg8 <y.Type> y))
 26608  	for {
 26609  		x := v_0
 26610  		if v_1.Op != OpSub8 {
 26611  			break
 26612  		}
 26613  		y := v_1.Args[1]
 26614  		v_1_0 := v_1.Args[0]
 26615  		if v_1_0.Op != OpConst8 {
 26616  			break
 26617  		}
 26618  		c := auxIntToInt8(v_1_0.AuxInt)
 26619  		if !(c&7 == 0) {
 26620  			break
 26621  		}
 26622  		v.reset(OpRotateLeft8)
 26623  		v0 := b.NewValue0(v.Pos, OpNeg8, y.Type)
 26624  		v0.AddArg(y)
 26625  		v.AddArg2(x, v0)
 26626  		return true
 26627  	}
 26628  	// match: (RotateLeft8 x (Const64 <t> [c]))
 26629  	// cond: config.PtrSize == 4
 26630  	// result: (RotateLeft8 x (Const32 <t> [int32(c)]))
 26631  	for {
 26632  		x := v_0
 26633  		if v_1.Op != OpConst64 {
 26634  			break
 26635  		}
 26636  		t := v_1.Type
 26637  		c := auxIntToInt64(v_1.AuxInt)
 26638  		if !(config.PtrSize == 4) {
 26639  			break
 26640  		}
 26641  		v.reset(OpRotateLeft8)
 26642  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 26643  		v0.AuxInt = int32ToAuxInt(int32(c))
 26644  		v.AddArg2(x, v0)
 26645  		return true
 26646  	}
 26647  	// match: (RotateLeft8 (RotateLeft8 x c) d)
 26648  	// cond: c.Type.Size() == 8 && d.Type.Size() == 8
 26649  	// result: (RotateLeft8 x (Add64 <c.Type> c d))
 26650  	for {
 26651  		if v_0.Op != OpRotateLeft8 {
 26652  			break
 26653  		}
 26654  		c := v_0.Args[1]
 26655  		x := v_0.Args[0]
 26656  		d := v_1
 26657  		if !(c.Type.Size() == 8 && d.Type.Size() == 8) {
 26658  			break
 26659  		}
 26660  		v.reset(OpRotateLeft8)
 26661  		v0 := b.NewValue0(v.Pos, OpAdd64, c.Type)
 26662  		v0.AddArg2(c, d)
 26663  		v.AddArg2(x, v0)
 26664  		return true
 26665  	}
 26666  	// match: (RotateLeft8 (RotateLeft8 x c) d)
 26667  	// cond: c.Type.Size() == 4 && d.Type.Size() == 4
 26668  	// result: (RotateLeft8 x (Add32 <c.Type> c d))
 26669  	for {
 26670  		if v_0.Op != OpRotateLeft8 {
 26671  			break
 26672  		}
 26673  		c := v_0.Args[1]
 26674  		x := v_0.Args[0]
 26675  		d := v_1
 26676  		if !(c.Type.Size() == 4 && d.Type.Size() == 4) {
 26677  			break
 26678  		}
 26679  		v.reset(OpRotateLeft8)
 26680  		v0 := b.NewValue0(v.Pos, OpAdd32, c.Type)
 26681  		v0.AddArg2(c, d)
 26682  		v.AddArg2(x, v0)
 26683  		return true
 26684  	}
 26685  	// match: (RotateLeft8 (RotateLeft8 x c) d)
 26686  	// cond: c.Type.Size() == 2 && d.Type.Size() == 2
 26687  	// result: (RotateLeft8 x (Add16 <c.Type> c d))
 26688  	for {
 26689  		if v_0.Op != OpRotateLeft8 {
 26690  			break
 26691  		}
 26692  		c := v_0.Args[1]
 26693  		x := v_0.Args[0]
 26694  		d := v_1
 26695  		if !(c.Type.Size() == 2 && d.Type.Size() == 2) {
 26696  			break
 26697  		}
 26698  		v.reset(OpRotateLeft8)
 26699  		v0 := b.NewValue0(v.Pos, OpAdd16, c.Type)
 26700  		v0.AddArg2(c, d)
 26701  		v.AddArg2(x, v0)
 26702  		return true
 26703  	}
 26704  	// match: (RotateLeft8 (RotateLeft8 x c) d)
 26705  	// cond: c.Type.Size() == 1 && d.Type.Size() == 1
 26706  	// result: (RotateLeft8 x (Add8 <c.Type> c d))
 26707  	for {
 26708  		if v_0.Op != OpRotateLeft8 {
 26709  			break
 26710  		}
 26711  		c := v_0.Args[1]
 26712  		x := v_0.Args[0]
 26713  		d := v_1
 26714  		if !(c.Type.Size() == 1 && d.Type.Size() == 1) {
 26715  			break
 26716  		}
 26717  		v.reset(OpRotateLeft8)
 26718  		v0 := b.NewValue0(v.Pos, OpAdd8, c.Type)
 26719  		v0.AddArg2(c, d)
 26720  		v.AddArg2(x, v0)
 26721  		return true
 26722  	}
 26723  	return false
 26724  }
 26725  func rewriteValuegeneric_OpRound32F(v *Value) bool {
 26726  	v_0 := v.Args[0]
 26727  	// match: (Round32F x:(Const32F))
 26728  	// result: x
 26729  	for {
 26730  		x := v_0
 26731  		if x.Op != OpConst32F {
 26732  			break
 26733  		}
 26734  		v.copyOf(x)
 26735  		return true
 26736  	}
 26737  	return false
 26738  }
 26739  func rewriteValuegeneric_OpRound64F(v *Value) bool {
 26740  	v_0 := v.Args[0]
 26741  	// match: (Round64F x:(Const64F))
 26742  	// result: x
 26743  	for {
 26744  		x := v_0
 26745  		if x.Op != OpConst64F {
 26746  			break
 26747  		}
 26748  		v.copyOf(x)
 26749  		return true
 26750  	}
 26751  	return false
 26752  }
 26753  func rewriteValuegeneric_OpRoundToEven(v *Value) bool {
 26754  	v_0 := v.Args[0]
 26755  	// match: (RoundToEven (Const64F [c]))
 26756  	// result: (Const64F [math.RoundToEven(c)])
 26757  	for {
 26758  		if v_0.Op != OpConst64F {
 26759  			break
 26760  		}
 26761  		c := auxIntToFloat64(v_0.AuxInt)
 26762  		v.reset(OpConst64F)
 26763  		v.AuxInt = float64ToAuxInt(math.RoundToEven(c))
 26764  		return true
 26765  	}
 26766  	return false
 26767  }
 26768  func rewriteValuegeneric_OpRsh16Ux16(v *Value) bool {
 26769  	v_1 := v.Args[1]
 26770  	v_0 := v.Args[0]
 26771  	b := v.Block
 26772  	// match: (Rsh16Ux16 <t> x (Const16 [c]))
 26773  	// result: (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))]))
 26774  	for {
 26775  		t := v.Type
 26776  		x := v_0
 26777  		if v_1.Op != OpConst16 {
 26778  			break
 26779  		}
 26780  		c := auxIntToInt16(v_1.AuxInt)
 26781  		v.reset(OpRsh16Ux64)
 26782  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 26783  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 26784  		v.AddArg2(x, v0)
 26785  		return true
 26786  	}
 26787  	// match: (Rsh16Ux16 (Const16 [0]) _)
 26788  	// result: (Const16 [0])
 26789  	for {
 26790  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 26791  			break
 26792  		}
 26793  		v.reset(OpConst16)
 26794  		v.AuxInt = int16ToAuxInt(0)
 26795  		return true
 26796  	}
 26797  	return false
 26798  }
 26799  func rewriteValuegeneric_OpRsh16Ux32(v *Value) bool {
 26800  	v_1 := v.Args[1]
 26801  	v_0 := v.Args[0]
 26802  	b := v.Block
 26803  	// match: (Rsh16Ux32 <t> x (Const32 [c]))
 26804  	// result: (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))]))
 26805  	for {
 26806  		t := v.Type
 26807  		x := v_0
 26808  		if v_1.Op != OpConst32 {
 26809  			break
 26810  		}
 26811  		c := auxIntToInt32(v_1.AuxInt)
 26812  		v.reset(OpRsh16Ux64)
 26813  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 26814  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 26815  		v.AddArg2(x, v0)
 26816  		return true
 26817  	}
 26818  	// match: (Rsh16Ux32 (Const16 [0]) _)
 26819  	// result: (Const16 [0])
 26820  	for {
 26821  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 26822  			break
 26823  		}
 26824  		v.reset(OpConst16)
 26825  		v.AuxInt = int16ToAuxInt(0)
 26826  		return true
 26827  	}
 26828  	return false
 26829  }
 26830  func rewriteValuegeneric_OpRsh16Ux64(v *Value) bool {
 26831  	v_1 := v.Args[1]
 26832  	v_0 := v.Args[0]
 26833  	b := v.Block
 26834  	typ := &b.Func.Config.Types
 26835  	// match: (Rsh16Ux64 (Const16 [c]) (Const64 [d]))
 26836  	// result: (Const16 [int16(uint16(c) >> uint64(d))])
 26837  	for {
 26838  		if v_0.Op != OpConst16 {
 26839  			break
 26840  		}
 26841  		c := auxIntToInt16(v_0.AuxInt)
 26842  		if v_1.Op != OpConst64 {
 26843  			break
 26844  		}
 26845  		d := auxIntToInt64(v_1.AuxInt)
 26846  		v.reset(OpConst16)
 26847  		v.AuxInt = int16ToAuxInt(int16(uint16(c) >> uint64(d)))
 26848  		return true
 26849  	}
 26850  	// match: (Rsh16Ux64 x (Const64 [0]))
 26851  	// result: x
 26852  	for {
 26853  		x := v_0
 26854  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 26855  			break
 26856  		}
 26857  		v.copyOf(x)
 26858  		return true
 26859  	}
 26860  	// match: (Rsh16Ux64 (Const16 [0]) _)
 26861  	// result: (Const16 [0])
 26862  	for {
 26863  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 26864  			break
 26865  		}
 26866  		v.reset(OpConst16)
 26867  		v.AuxInt = int16ToAuxInt(0)
 26868  		return true
 26869  	}
 26870  	// match: (Rsh16Ux64 _ (Const64 [c]))
 26871  	// cond: uint64(c) >= 16
 26872  	// result: (Const16 [0])
 26873  	for {
 26874  		if v_1.Op != OpConst64 {
 26875  			break
 26876  		}
 26877  		c := auxIntToInt64(v_1.AuxInt)
 26878  		if !(uint64(c) >= 16) {
 26879  			break
 26880  		}
 26881  		v.reset(OpConst16)
 26882  		v.AuxInt = int16ToAuxInt(0)
 26883  		return true
 26884  	}
 26885  	// match: (Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d]))
 26886  	// cond: !uaddOvf(c,d)
 26887  	// result: (Rsh16Ux64 x (Const64 <t> [c+d]))
 26888  	for {
 26889  		t := v.Type
 26890  		if v_0.Op != OpRsh16Ux64 {
 26891  			break
 26892  		}
 26893  		_ = v_0.Args[1]
 26894  		x := v_0.Args[0]
 26895  		v_0_1 := v_0.Args[1]
 26896  		if v_0_1.Op != OpConst64 {
 26897  			break
 26898  		}
 26899  		c := auxIntToInt64(v_0_1.AuxInt)
 26900  		if v_1.Op != OpConst64 {
 26901  			break
 26902  		}
 26903  		d := auxIntToInt64(v_1.AuxInt)
 26904  		if !(!uaddOvf(c, d)) {
 26905  			break
 26906  		}
 26907  		v.reset(OpRsh16Ux64)
 26908  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 26909  		v0.AuxInt = int64ToAuxInt(c + d)
 26910  		v.AddArg2(x, v0)
 26911  		return true
 26912  	}
 26913  	// match: (Rsh16Ux64 (Rsh16x64 x _) (Const64 <t> [15]))
 26914  	// result: (Rsh16Ux64 x (Const64 <t> [15]))
 26915  	for {
 26916  		if v_0.Op != OpRsh16x64 {
 26917  			break
 26918  		}
 26919  		x := v_0.Args[0]
 26920  		if v_1.Op != OpConst64 {
 26921  			break
 26922  		}
 26923  		t := v_1.Type
 26924  		if auxIntToInt64(v_1.AuxInt) != 15 {
 26925  			break
 26926  		}
 26927  		v.reset(OpRsh16Ux64)
 26928  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 26929  		v0.AuxInt = int64ToAuxInt(15)
 26930  		v.AddArg2(x, v0)
 26931  		return true
 26932  	}
 26933  	// match: (Rsh16Ux64 i:(Lsh16x64 x (Const64 [c])) (Const64 [c]))
 26934  	// cond: c >= 0 && c < 16 && i.Uses == 1
 26935  	// result: (And16 x (Const16 <v.Type> [int16(^uint16(0)>>c)]))
 26936  	for {
 26937  		i := v_0
 26938  		if i.Op != OpLsh16x64 {
 26939  			break
 26940  		}
 26941  		_ = i.Args[1]
 26942  		x := i.Args[0]
 26943  		i_1 := i.Args[1]
 26944  		if i_1.Op != OpConst64 {
 26945  			break
 26946  		}
 26947  		c := auxIntToInt64(i_1.AuxInt)
 26948  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 16 && i.Uses == 1) {
 26949  			break
 26950  		}
 26951  		v.reset(OpAnd16)
 26952  		v0 := b.NewValue0(v.Pos, OpConst16, v.Type)
 26953  		v0.AuxInt = int16ToAuxInt(int16(^uint16(0) >> c))
 26954  		v.AddArg2(x, v0)
 26955  		return true
 26956  	}
 26957  	// match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
 26958  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
 26959  	// result: (Rsh16Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
 26960  	for {
 26961  		if v_0.Op != OpLsh16x64 {
 26962  			break
 26963  		}
 26964  		_ = v_0.Args[1]
 26965  		v_0_0 := v_0.Args[0]
 26966  		if v_0_0.Op != OpRsh16Ux64 {
 26967  			break
 26968  		}
 26969  		_ = v_0_0.Args[1]
 26970  		x := v_0_0.Args[0]
 26971  		v_0_0_1 := v_0_0.Args[1]
 26972  		if v_0_0_1.Op != OpConst64 {
 26973  			break
 26974  		}
 26975  		c1 := auxIntToInt64(v_0_0_1.AuxInt)
 26976  		v_0_1 := v_0.Args[1]
 26977  		if v_0_1.Op != OpConst64 {
 26978  			break
 26979  		}
 26980  		c2 := auxIntToInt64(v_0_1.AuxInt)
 26981  		if v_1.Op != OpConst64 {
 26982  			break
 26983  		}
 26984  		c3 := auxIntToInt64(v_1.AuxInt)
 26985  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
 26986  			break
 26987  		}
 26988  		v.reset(OpRsh16Ux64)
 26989  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 26990  		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
 26991  		v.AddArg2(x, v0)
 26992  		return true
 26993  	}
 26994  	// match: (Rsh16Ux64 (Lsh16x64 x (Const64 [8])) (Const64 [8]))
 26995  	// result: (ZeroExt8to16 (Trunc16to8 <typ.UInt8> x))
 26996  	for {
 26997  		if v_0.Op != OpLsh16x64 {
 26998  			break
 26999  		}
 27000  		_ = v_0.Args[1]
 27001  		x := v_0.Args[0]
 27002  		v_0_1 := v_0.Args[1]
 27003  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 8 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 8 {
 27004  			break
 27005  		}
 27006  		v.reset(OpZeroExt8to16)
 27007  		v0 := b.NewValue0(v.Pos, OpTrunc16to8, typ.UInt8)
 27008  		v0.AddArg(x)
 27009  		v.AddArg(v0)
 27010  		return true
 27011  	}
 27012  	return false
 27013  }
 27014  func rewriteValuegeneric_OpRsh16Ux8(v *Value) bool {
 27015  	v_1 := v.Args[1]
 27016  	v_0 := v.Args[0]
 27017  	b := v.Block
 27018  	// match: (Rsh16Ux8 <t> x (Const8 [c]))
 27019  	// result: (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))]))
 27020  	for {
 27021  		t := v.Type
 27022  		x := v_0
 27023  		if v_1.Op != OpConst8 {
 27024  			break
 27025  		}
 27026  		c := auxIntToInt8(v_1.AuxInt)
 27027  		v.reset(OpRsh16Ux64)
 27028  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27029  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 27030  		v.AddArg2(x, v0)
 27031  		return true
 27032  	}
 27033  	// match: (Rsh16Ux8 (Const16 [0]) _)
 27034  	// result: (Const16 [0])
 27035  	for {
 27036  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 27037  			break
 27038  		}
 27039  		v.reset(OpConst16)
 27040  		v.AuxInt = int16ToAuxInt(0)
 27041  		return true
 27042  	}
 27043  	return false
 27044  }
 27045  func rewriteValuegeneric_OpRsh16x16(v *Value) bool {
 27046  	v_1 := v.Args[1]
 27047  	v_0 := v.Args[0]
 27048  	b := v.Block
 27049  	// match: (Rsh16x16 <t> x (Const16 [c]))
 27050  	// result: (Rsh16x64 x (Const64 <t> [int64(uint16(c))]))
 27051  	for {
 27052  		t := v.Type
 27053  		x := v_0
 27054  		if v_1.Op != OpConst16 {
 27055  			break
 27056  		}
 27057  		c := auxIntToInt16(v_1.AuxInt)
 27058  		v.reset(OpRsh16x64)
 27059  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27060  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 27061  		v.AddArg2(x, v0)
 27062  		return true
 27063  	}
 27064  	// match: (Rsh16x16 (Const16 [0]) _)
 27065  	// result: (Const16 [0])
 27066  	for {
 27067  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 27068  			break
 27069  		}
 27070  		v.reset(OpConst16)
 27071  		v.AuxInt = int16ToAuxInt(0)
 27072  		return true
 27073  	}
 27074  	return false
 27075  }
 27076  func rewriteValuegeneric_OpRsh16x32(v *Value) bool {
 27077  	v_1 := v.Args[1]
 27078  	v_0 := v.Args[0]
 27079  	b := v.Block
 27080  	// match: (Rsh16x32 <t> x (Const32 [c]))
 27081  	// result: (Rsh16x64 x (Const64 <t> [int64(uint32(c))]))
 27082  	for {
 27083  		t := v.Type
 27084  		x := v_0
 27085  		if v_1.Op != OpConst32 {
 27086  			break
 27087  		}
 27088  		c := auxIntToInt32(v_1.AuxInt)
 27089  		v.reset(OpRsh16x64)
 27090  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27091  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 27092  		v.AddArg2(x, v0)
 27093  		return true
 27094  	}
 27095  	// match: (Rsh16x32 (Const16 [0]) _)
 27096  	// result: (Const16 [0])
 27097  	for {
 27098  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 27099  			break
 27100  		}
 27101  		v.reset(OpConst16)
 27102  		v.AuxInt = int16ToAuxInt(0)
 27103  		return true
 27104  	}
 27105  	return false
 27106  }
 27107  func rewriteValuegeneric_OpRsh16x64(v *Value) bool {
 27108  	v_1 := v.Args[1]
 27109  	v_0 := v.Args[0]
 27110  	b := v.Block
 27111  	typ := &b.Func.Config.Types
 27112  	// match: (Rsh16x64 (Const16 [c]) (Const64 [d]))
 27113  	// result: (Const16 [c >> uint64(d)])
 27114  	for {
 27115  		if v_0.Op != OpConst16 {
 27116  			break
 27117  		}
 27118  		c := auxIntToInt16(v_0.AuxInt)
 27119  		if v_1.Op != OpConst64 {
 27120  			break
 27121  		}
 27122  		d := auxIntToInt64(v_1.AuxInt)
 27123  		v.reset(OpConst16)
 27124  		v.AuxInt = int16ToAuxInt(c >> uint64(d))
 27125  		return true
 27126  	}
 27127  	// match: (Rsh16x64 x (Const64 [0]))
 27128  	// result: x
 27129  	for {
 27130  		x := v_0
 27131  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 27132  			break
 27133  		}
 27134  		v.copyOf(x)
 27135  		return true
 27136  	}
 27137  	// match: (Rsh16x64 (Const16 [0]) _)
 27138  	// result: (Const16 [0])
 27139  	for {
 27140  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 27141  			break
 27142  		}
 27143  		v.reset(OpConst16)
 27144  		v.AuxInt = int16ToAuxInt(0)
 27145  		return true
 27146  	}
 27147  	// match: (Rsh16x64 <t> (Rsh16x64 x (Const64 [c])) (Const64 [d]))
 27148  	// cond: !uaddOvf(c,d)
 27149  	// result: (Rsh16x64 x (Const64 <t> [c+d]))
 27150  	for {
 27151  		t := v.Type
 27152  		if v_0.Op != OpRsh16x64 {
 27153  			break
 27154  		}
 27155  		_ = v_0.Args[1]
 27156  		x := v_0.Args[0]
 27157  		v_0_1 := v_0.Args[1]
 27158  		if v_0_1.Op != OpConst64 {
 27159  			break
 27160  		}
 27161  		c := auxIntToInt64(v_0_1.AuxInt)
 27162  		if v_1.Op != OpConst64 {
 27163  			break
 27164  		}
 27165  		d := auxIntToInt64(v_1.AuxInt)
 27166  		if !(!uaddOvf(c, d)) {
 27167  			break
 27168  		}
 27169  		v.reset(OpRsh16x64)
 27170  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27171  		v0.AuxInt = int64ToAuxInt(c + d)
 27172  		v.AddArg2(x, v0)
 27173  		return true
 27174  	}
 27175  	// match: (Rsh16x64 (Lsh16x64 x (Const64 [8])) (Const64 [8]))
 27176  	// result: (SignExt8to16 (Trunc16to8 <typ.Int8> x))
 27177  	for {
 27178  		if v_0.Op != OpLsh16x64 {
 27179  			break
 27180  		}
 27181  		_ = v_0.Args[1]
 27182  		x := v_0.Args[0]
 27183  		v_0_1 := v_0.Args[1]
 27184  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 8 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 8 {
 27185  			break
 27186  		}
 27187  		v.reset(OpSignExt8to16)
 27188  		v0 := b.NewValue0(v.Pos, OpTrunc16to8, typ.Int8)
 27189  		v0.AddArg(x)
 27190  		v.AddArg(v0)
 27191  		return true
 27192  	}
 27193  	return false
 27194  }
 27195  func rewriteValuegeneric_OpRsh16x8(v *Value) bool {
 27196  	v_1 := v.Args[1]
 27197  	v_0 := v.Args[0]
 27198  	b := v.Block
 27199  	// match: (Rsh16x8 <t> x (Const8 [c]))
 27200  	// result: (Rsh16x64 x (Const64 <t> [int64(uint8(c))]))
 27201  	for {
 27202  		t := v.Type
 27203  		x := v_0
 27204  		if v_1.Op != OpConst8 {
 27205  			break
 27206  		}
 27207  		c := auxIntToInt8(v_1.AuxInt)
 27208  		v.reset(OpRsh16x64)
 27209  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27210  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 27211  		v.AddArg2(x, v0)
 27212  		return true
 27213  	}
 27214  	// match: (Rsh16x8 (Const16 [0]) _)
 27215  	// result: (Const16 [0])
 27216  	for {
 27217  		if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 27218  			break
 27219  		}
 27220  		v.reset(OpConst16)
 27221  		v.AuxInt = int16ToAuxInt(0)
 27222  		return true
 27223  	}
 27224  	return false
 27225  }
 27226  func rewriteValuegeneric_OpRsh32Ux16(v *Value) bool {
 27227  	v_1 := v.Args[1]
 27228  	v_0 := v.Args[0]
 27229  	b := v.Block
 27230  	// match: (Rsh32Ux16 <t> x (Const16 [c]))
 27231  	// result: (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))]))
 27232  	for {
 27233  		t := v.Type
 27234  		x := v_0
 27235  		if v_1.Op != OpConst16 {
 27236  			break
 27237  		}
 27238  		c := auxIntToInt16(v_1.AuxInt)
 27239  		v.reset(OpRsh32Ux64)
 27240  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27241  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 27242  		v.AddArg2(x, v0)
 27243  		return true
 27244  	}
 27245  	// match: (Rsh32Ux16 (Const32 [0]) _)
 27246  	// result: (Const32 [0])
 27247  	for {
 27248  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 27249  			break
 27250  		}
 27251  		v.reset(OpConst32)
 27252  		v.AuxInt = int32ToAuxInt(0)
 27253  		return true
 27254  	}
 27255  	return false
 27256  }
 27257  func rewriteValuegeneric_OpRsh32Ux32(v *Value) bool {
 27258  	v_1 := v.Args[1]
 27259  	v_0 := v.Args[0]
 27260  	b := v.Block
 27261  	// match: (Rsh32Ux32 <t> x (Const32 [c]))
 27262  	// result: (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))]))
 27263  	for {
 27264  		t := v.Type
 27265  		x := v_0
 27266  		if v_1.Op != OpConst32 {
 27267  			break
 27268  		}
 27269  		c := auxIntToInt32(v_1.AuxInt)
 27270  		v.reset(OpRsh32Ux64)
 27271  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27272  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 27273  		v.AddArg2(x, v0)
 27274  		return true
 27275  	}
 27276  	// match: (Rsh32Ux32 (Const32 [0]) _)
 27277  	// result: (Const32 [0])
 27278  	for {
 27279  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 27280  			break
 27281  		}
 27282  		v.reset(OpConst32)
 27283  		v.AuxInt = int32ToAuxInt(0)
 27284  		return true
 27285  	}
 27286  	return false
 27287  }
 27288  func rewriteValuegeneric_OpRsh32Ux64(v *Value) bool {
 27289  	v_1 := v.Args[1]
 27290  	v_0 := v.Args[0]
 27291  	b := v.Block
 27292  	typ := &b.Func.Config.Types
 27293  	// match: (Rsh32Ux64 (Const32 [c]) (Const64 [d]))
 27294  	// result: (Const32 [int32(uint32(c) >> uint64(d))])
 27295  	for {
 27296  		if v_0.Op != OpConst32 {
 27297  			break
 27298  		}
 27299  		c := auxIntToInt32(v_0.AuxInt)
 27300  		if v_1.Op != OpConst64 {
 27301  			break
 27302  		}
 27303  		d := auxIntToInt64(v_1.AuxInt)
 27304  		v.reset(OpConst32)
 27305  		v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
 27306  		return true
 27307  	}
 27308  	// match: (Rsh32Ux64 x (Const64 [0]))
 27309  	// result: x
 27310  	for {
 27311  		x := v_0
 27312  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 27313  			break
 27314  		}
 27315  		v.copyOf(x)
 27316  		return true
 27317  	}
 27318  	// match: (Rsh32Ux64 (Const32 [0]) _)
 27319  	// result: (Const32 [0])
 27320  	for {
 27321  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 27322  			break
 27323  		}
 27324  		v.reset(OpConst32)
 27325  		v.AuxInt = int32ToAuxInt(0)
 27326  		return true
 27327  	}
 27328  	// match: (Rsh32Ux64 _ (Const64 [c]))
 27329  	// cond: uint64(c) >= 32
 27330  	// result: (Const32 [0])
 27331  	for {
 27332  		if v_1.Op != OpConst64 {
 27333  			break
 27334  		}
 27335  		c := auxIntToInt64(v_1.AuxInt)
 27336  		if !(uint64(c) >= 32) {
 27337  			break
 27338  		}
 27339  		v.reset(OpConst32)
 27340  		v.AuxInt = int32ToAuxInt(0)
 27341  		return true
 27342  	}
 27343  	// match: (Rsh32Ux64 <t> (Rsh32Ux64 x (Const64 [c])) (Const64 [d]))
 27344  	// cond: !uaddOvf(c,d)
 27345  	// result: (Rsh32Ux64 x (Const64 <t> [c+d]))
 27346  	for {
 27347  		t := v.Type
 27348  		if v_0.Op != OpRsh32Ux64 {
 27349  			break
 27350  		}
 27351  		_ = v_0.Args[1]
 27352  		x := v_0.Args[0]
 27353  		v_0_1 := v_0.Args[1]
 27354  		if v_0_1.Op != OpConst64 {
 27355  			break
 27356  		}
 27357  		c := auxIntToInt64(v_0_1.AuxInt)
 27358  		if v_1.Op != OpConst64 {
 27359  			break
 27360  		}
 27361  		d := auxIntToInt64(v_1.AuxInt)
 27362  		if !(!uaddOvf(c, d)) {
 27363  			break
 27364  		}
 27365  		v.reset(OpRsh32Ux64)
 27366  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27367  		v0.AuxInt = int64ToAuxInt(c + d)
 27368  		v.AddArg2(x, v0)
 27369  		return true
 27370  	}
 27371  	// match: (Rsh32Ux64 (Rsh32x64 x _) (Const64 <t> [31]))
 27372  	// result: (Rsh32Ux64 x (Const64 <t> [31]))
 27373  	for {
 27374  		if v_0.Op != OpRsh32x64 {
 27375  			break
 27376  		}
 27377  		x := v_0.Args[0]
 27378  		if v_1.Op != OpConst64 {
 27379  			break
 27380  		}
 27381  		t := v_1.Type
 27382  		if auxIntToInt64(v_1.AuxInt) != 31 {
 27383  			break
 27384  		}
 27385  		v.reset(OpRsh32Ux64)
 27386  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27387  		v0.AuxInt = int64ToAuxInt(31)
 27388  		v.AddArg2(x, v0)
 27389  		return true
 27390  	}
 27391  	// match: (Rsh32Ux64 i:(Lsh32x64 x (Const64 [c])) (Const64 [c]))
 27392  	// cond: c >= 0 && c < 32 && i.Uses == 1
 27393  	// result: (And32 x (Const32 <v.Type> [int32(^uint32(0)>>c)]))
 27394  	for {
 27395  		i := v_0
 27396  		if i.Op != OpLsh32x64 {
 27397  			break
 27398  		}
 27399  		_ = i.Args[1]
 27400  		x := i.Args[0]
 27401  		i_1 := i.Args[1]
 27402  		if i_1.Op != OpConst64 {
 27403  			break
 27404  		}
 27405  		c := auxIntToInt64(i_1.AuxInt)
 27406  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 32 && i.Uses == 1) {
 27407  			break
 27408  		}
 27409  		v.reset(OpAnd32)
 27410  		v0 := b.NewValue0(v.Pos, OpConst32, v.Type)
 27411  		v0.AuxInt = int32ToAuxInt(int32(^uint32(0) >> c))
 27412  		v.AddArg2(x, v0)
 27413  		return true
 27414  	}
 27415  	// match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
 27416  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
 27417  	// result: (Rsh32Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
 27418  	for {
 27419  		if v_0.Op != OpLsh32x64 {
 27420  			break
 27421  		}
 27422  		_ = v_0.Args[1]
 27423  		v_0_0 := v_0.Args[0]
 27424  		if v_0_0.Op != OpRsh32Ux64 {
 27425  			break
 27426  		}
 27427  		_ = v_0_0.Args[1]
 27428  		x := v_0_0.Args[0]
 27429  		v_0_0_1 := v_0_0.Args[1]
 27430  		if v_0_0_1.Op != OpConst64 {
 27431  			break
 27432  		}
 27433  		c1 := auxIntToInt64(v_0_0_1.AuxInt)
 27434  		v_0_1 := v_0.Args[1]
 27435  		if v_0_1.Op != OpConst64 {
 27436  			break
 27437  		}
 27438  		c2 := auxIntToInt64(v_0_1.AuxInt)
 27439  		if v_1.Op != OpConst64 {
 27440  			break
 27441  		}
 27442  		c3 := auxIntToInt64(v_1.AuxInt)
 27443  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
 27444  			break
 27445  		}
 27446  		v.reset(OpRsh32Ux64)
 27447  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 27448  		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
 27449  		v.AddArg2(x, v0)
 27450  		return true
 27451  	}
 27452  	// match: (Rsh32Ux64 (Lsh32x64 x (Const64 [24])) (Const64 [24]))
 27453  	// result: (ZeroExt8to32 (Trunc32to8 <typ.UInt8> x))
 27454  	for {
 27455  		if v_0.Op != OpLsh32x64 {
 27456  			break
 27457  		}
 27458  		_ = v_0.Args[1]
 27459  		x := v_0.Args[0]
 27460  		v_0_1 := v_0.Args[1]
 27461  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 24 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 24 {
 27462  			break
 27463  		}
 27464  		v.reset(OpZeroExt8to32)
 27465  		v0 := b.NewValue0(v.Pos, OpTrunc32to8, typ.UInt8)
 27466  		v0.AddArg(x)
 27467  		v.AddArg(v0)
 27468  		return true
 27469  	}
 27470  	// match: (Rsh32Ux64 (Lsh32x64 x (Const64 [16])) (Const64 [16]))
 27471  	// result: (ZeroExt16to32 (Trunc32to16 <typ.UInt16> x))
 27472  	for {
 27473  		if v_0.Op != OpLsh32x64 {
 27474  			break
 27475  		}
 27476  		_ = v_0.Args[1]
 27477  		x := v_0.Args[0]
 27478  		v_0_1 := v_0.Args[1]
 27479  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 16 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 16 {
 27480  			break
 27481  		}
 27482  		v.reset(OpZeroExt16to32)
 27483  		v0 := b.NewValue0(v.Pos, OpTrunc32to16, typ.UInt16)
 27484  		v0.AddArg(x)
 27485  		v.AddArg(v0)
 27486  		return true
 27487  	}
 27488  	return false
 27489  }
 27490  func rewriteValuegeneric_OpRsh32Ux8(v *Value) bool {
 27491  	v_1 := v.Args[1]
 27492  	v_0 := v.Args[0]
 27493  	b := v.Block
 27494  	// match: (Rsh32Ux8 <t> x (Const8 [c]))
 27495  	// result: (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))]))
 27496  	for {
 27497  		t := v.Type
 27498  		x := v_0
 27499  		if v_1.Op != OpConst8 {
 27500  			break
 27501  		}
 27502  		c := auxIntToInt8(v_1.AuxInt)
 27503  		v.reset(OpRsh32Ux64)
 27504  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27505  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 27506  		v.AddArg2(x, v0)
 27507  		return true
 27508  	}
 27509  	// match: (Rsh32Ux8 (Const32 [0]) _)
 27510  	// result: (Const32 [0])
 27511  	for {
 27512  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 27513  			break
 27514  		}
 27515  		v.reset(OpConst32)
 27516  		v.AuxInt = int32ToAuxInt(0)
 27517  		return true
 27518  	}
 27519  	return false
 27520  }
 27521  func rewriteValuegeneric_OpRsh32x16(v *Value) bool {
 27522  	v_1 := v.Args[1]
 27523  	v_0 := v.Args[0]
 27524  	b := v.Block
 27525  	// match: (Rsh32x16 <t> x (Const16 [c]))
 27526  	// result: (Rsh32x64 x (Const64 <t> [int64(uint16(c))]))
 27527  	for {
 27528  		t := v.Type
 27529  		x := v_0
 27530  		if v_1.Op != OpConst16 {
 27531  			break
 27532  		}
 27533  		c := auxIntToInt16(v_1.AuxInt)
 27534  		v.reset(OpRsh32x64)
 27535  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27536  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 27537  		v.AddArg2(x, v0)
 27538  		return true
 27539  	}
 27540  	// match: (Rsh32x16 (Const32 [0]) _)
 27541  	// result: (Const32 [0])
 27542  	for {
 27543  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 27544  			break
 27545  		}
 27546  		v.reset(OpConst32)
 27547  		v.AuxInt = int32ToAuxInt(0)
 27548  		return true
 27549  	}
 27550  	return false
 27551  }
 27552  func rewriteValuegeneric_OpRsh32x32(v *Value) bool {
 27553  	v_1 := v.Args[1]
 27554  	v_0 := v.Args[0]
 27555  	b := v.Block
 27556  	// match: (Rsh32x32 <t> x (Const32 [c]))
 27557  	// result: (Rsh32x64 x (Const64 <t> [int64(uint32(c))]))
 27558  	for {
 27559  		t := v.Type
 27560  		x := v_0
 27561  		if v_1.Op != OpConst32 {
 27562  			break
 27563  		}
 27564  		c := auxIntToInt32(v_1.AuxInt)
 27565  		v.reset(OpRsh32x64)
 27566  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27567  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 27568  		v.AddArg2(x, v0)
 27569  		return true
 27570  	}
 27571  	// match: (Rsh32x32 (Const32 [0]) _)
 27572  	// result: (Const32 [0])
 27573  	for {
 27574  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 27575  			break
 27576  		}
 27577  		v.reset(OpConst32)
 27578  		v.AuxInt = int32ToAuxInt(0)
 27579  		return true
 27580  	}
 27581  	return false
 27582  }
 27583  func rewriteValuegeneric_OpRsh32x64(v *Value) bool {
 27584  	v_1 := v.Args[1]
 27585  	v_0 := v.Args[0]
 27586  	b := v.Block
 27587  	typ := &b.Func.Config.Types
 27588  	// match: (Rsh32x64 (Const32 [c]) (Const64 [d]))
 27589  	// result: (Const32 [c >> uint64(d)])
 27590  	for {
 27591  		if v_0.Op != OpConst32 {
 27592  			break
 27593  		}
 27594  		c := auxIntToInt32(v_0.AuxInt)
 27595  		if v_1.Op != OpConst64 {
 27596  			break
 27597  		}
 27598  		d := auxIntToInt64(v_1.AuxInt)
 27599  		v.reset(OpConst32)
 27600  		v.AuxInt = int32ToAuxInt(c >> uint64(d))
 27601  		return true
 27602  	}
 27603  	// match: (Rsh32x64 x (Const64 [0]))
 27604  	// result: x
 27605  	for {
 27606  		x := v_0
 27607  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 27608  			break
 27609  		}
 27610  		v.copyOf(x)
 27611  		return true
 27612  	}
 27613  	// match: (Rsh32x64 (Const32 [0]) _)
 27614  	// result: (Const32 [0])
 27615  	for {
 27616  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 27617  			break
 27618  		}
 27619  		v.reset(OpConst32)
 27620  		v.AuxInt = int32ToAuxInt(0)
 27621  		return true
 27622  	}
 27623  	// match: (Rsh32x64 <t> (Rsh32x64 x (Const64 [c])) (Const64 [d]))
 27624  	// cond: !uaddOvf(c,d)
 27625  	// result: (Rsh32x64 x (Const64 <t> [c+d]))
 27626  	for {
 27627  		t := v.Type
 27628  		if v_0.Op != OpRsh32x64 {
 27629  			break
 27630  		}
 27631  		_ = v_0.Args[1]
 27632  		x := v_0.Args[0]
 27633  		v_0_1 := v_0.Args[1]
 27634  		if v_0_1.Op != OpConst64 {
 27635  			break
 27636  		}
 27637  		c := auxIntToInt64(v_0_1.AuxInt)
 27638  		if v_1.Op != OpConst64 {
 27639  			break
 27640  		}
 27641  		d := auxIntToInt64(v_1.AuxInt)
 27642  		if !(!uaddOvf(c, d)) {
 27643  			break
 27644  		}
 27645  		v.reset(OpRsh32x64)
 27646  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27647  		v0.AuxInt = int64ToAuxInt(c + d)
 27648  		v.AddArg2(x, v0)
 27649  		return true
 27650  	}
 27651  	// match: (Rsh32x64 (Lsh32x64 x (Const64 [24])) (Const64 [24]))
 27652  	// result: (SignExt8to32 (Trunc32to8 <typ.Int8> x))
 27653  	for {
 27654  		if v_0.Op != OpLsh32x64 {
 27655  			break
 27656  		}
 27657  		_ = v_0.Args[1]
 27658  		x := v_0.Args[0]
 27659  		v_0_1 := v_0.Args[1]
 27660  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 24 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 24 {
 27661  			break
 27662  		}
 27663  		v.reset(OpSignExt8to32)
 27664  		v0 := b.NewValue0(v.Pos, OpTrunc32to8, typ.Int8)
 27665  		v0.AddArg(x)
 27666  		v.AddArg(v0)
 27667  		return true
 27668  	}
 27669  	// match: (Rsh32x64 (Lsh32x64 x (Const64 [16])) (Const64 [16]))
 27670  	// result: (SignExt16to32 (Trunc32to16 <typ.Int16> x))
 27671  	for {
 27672  		if v_0.Op != OpLsh32x64 {
 27673  			break
 27674  		}
 27675  		_ = v_0.Args[1]
 27676  		x := v_0.Args[0]
 27677  		v_0_1 := v_0.Args[1]
 27678  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 16 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 16 {
 27679  			break
 27680  		}
 27681  		v.reset(OpSignExt16to32)
 27682  		v0 := b.NewValue0(v.Pos, OpTrunc32to16, typ.Int16)
 27683  		v0.AddArg(x)
 27684  		v.AddArg(v0)
 27685  		return true
 27686  	}
 27687  	return false
 27688  }
 27689  func rewriteValuegeneric_OpRsh32x8(v *Value) bool {
 27690  	v_1 := v.Args[1]
 27691  	v_0 := v.Args[0]
 27692  	b := v.Block
 27693  	// match: (Rsh32x8 <t> x (Const8 [c]))
 27694  	// result: (Rsh32x64 x (Const64 <t> [int64(uint8(c))]))
 27695  	for {
 27696  		t := v.Type
 27697  		x := v_0
 27698  		if v_1.Op != OpConst8 {
 27699  			break
 27700  		}
 27701  		c := auxIntToInt8(v_1.AuxInt)
 27702  		v.reset(OpRsh32x64)
 27703  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27704  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 27705  		v.AddArg2(x, v0)
 27706  		return true
 27707  	}
 27708  	// match: (Rsh32x8 (Const32 [0]) _)
 27709  	// result: (Const32 [0])
 27710  	for {
 27711  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 27712  			break
 27713  		}
 27714  		v.reset(OpConst32)
 27715  		v.AuxInt = int32ToAuxInt(0)
 27716  		return true
 27717  	}
 27718  	return false
 27719  }
 27720  func rewriteValuegeneric_OpRsh64Ux16(v *Value) bool {
 27721  	v_1 := v.Args[1]
 27722  	v_0 := v.Args[0]
 27723  	b := v.Block
 27724  	// match: (Rsh64Ux16 <t> x (Const16 [c]))
 27725  	// result: (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))]))
 27726  	for {
 27727  		t := v.Type
 27728  		x := v_0
 27729  		if v_1.Op != OpConst16 {
 27730  			break
 27731  		}
 27732  		c := auxIntToInt16(v_1.AuxInt)
 27733  		v.reset(OpRsh64Ux64)
 27734  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27735  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 27736  		v.AddArg2(x, v0)
 27737  		return true
 27738  	}
 27739  	// match: (Rsh64Ux16 (Const64 [0]) _)
 27740  	// result: (Const64 [0])
 27741  	for {
 27742  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 27743  			break
 27744  		}
 27745  		v.reset(OpConst64)
 27746  		v.AuxInt = int64ToAuxInt(0)
 27747  		return true
 27748  	}
 27749  	return false
 27750  }
 27751  func rewriteValuegeneric_OpRsh64Ux32(v *Value) bool {
 27752  	v_1 := v.Args[1]
 27753  	v_0 := v.Args[0]
 27754  	b := v.Block
 27755  	// match: (Rsh64Ux32 <t> x (Const32 [c]))
 27756  	// result: (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))]))
 27757  	for {
 27758  		t := v.Type
 27759  		x := v_0
 27760  		if v_1.Op != OpConst32 {
 27761  			break
 27762  		}
 27763  		c := auxIntToInt32(v_1.AuxInt)
 27764  		v.reset(OpRsh64Ux64)
 27765  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27766  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 27767  		v.AddArg2(x, v0)
 27768  		return true
 27769  	}
 27770  	// match: (Rsh64Ux32 (Const64 [0]) _)
 27771  	// result: (Const64 [0])
 27772  	for {
 27773  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 27774  			break
 27775  		}
 27776  		v.reset(OpConst64)
 27777  		v.AuxInt = int64ToAuxInt(0)
 27778  		return true
 27779  	}
 27780  	return false
 27781  }
 27782  func rewriteValuegeneric_OpRsh64Ux64(v *Value) bool {
 27783  	v_1 := v.Args[1]
 27784  	v_0 := v.Args[0]
 27785  	b := v.Block
 27786  	typ := &b.Func.Config.Types
 27787  	// match: (Rsh64Ux64 (Const64 [c]) (Const64 [d]))
 27788  	// result: (Const64 [int64(uint64(c) >> uint64(d))])
 27789  	for {
 27790  		if v_0.Op != OpConst64 {
 27791  			break
 27792  		}
 27793  		c := auxIntToInt64(v_0.AuxInt)
 27794  		if v_1.Op != OpConst64 {
 27795  			break
 27796  		}
 27797  		d := auxIntToInt64(v_1.AuxInt)
 27798  		v.reset(OpConst64)
 27799  		v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
 27800  		return true
 27801  	}
 27802  	// match: (Rsh64Ux64 x (Const64 [0]))
 27803  	// result: x
 27804  	for {
 27805  		x := v_0
 27806  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 27807  			break
 27808  		}
 27809  		v.copyOf(x)
 27810  		return true
 27811  	}
 27812  	// match: (Rsh64Ux64 (Const64 [0]) _)
 27813  	// result: (Const64 [0])
 27814  	for {
 27815  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 27816  			break
 27817  		}
 27818  		v.reset(OpConst64)
 27819  		v.AuxInt = int64ToAuxInt(0)
 27820  		return true
 27821  	}
 27822  	// match: (Rsh64Ux64 _ (Const64 [c]))
 27823  	// cond: uint64(c) >= 64
 27824  	// result: (Const64 [0])
 27825  	for {
 27826  		if v_1.Op != OpConst64 {
 27827  			break
 27828  		}
 27829  		c := auxIntToInt64(v_1.AuxInt)
 27830  		if !(uint64(c) >= 64) {
 27831  			break
 27832  		}
 27833  		v.reset(OpConst64)
 27834  		v.AuxInt = int64ToAuxInt(0)
 27835  		return true
 27836  	}
 27837  	// match: (Rsh64Ux64 <t> (Rsh64Ux64 x (Const64 [c])) (Const64 [d]))
 27838  	// cond: !uaddOvf(c,d)
 27839  	// result: (Rsh64Ux64 x (Const64 <t> [c+d]))
 27840  	for {
 27841  		t := v.Type
 27842  		if v_0.Op != OpRsh64Ux64 {
 27843  			break
 27844  		}
 27845  		_ = v_0.Args[1]
 27846  		x := v_0.Args[0]
 27847  		v_0_1 := v_0.Args[1]
 27848  		if v_0_1.Op != OpConst64 {
 27849  			break
 27850  		}
 27851  		c := auxIntToInt64(v_0_1.AuxInt)
 27852  		if v_1.Op != OpConst64 {
 27853  			break
 27854  		}
 27855  		d := auxIntToInt64(v_1.AuxInt)
 27856  		if !(!uaddOvf(c, d)) {
 27857  			break
 27858  		}
 27859  		v.reset(OpRsh64Ux64)
 27860  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27861  		v0.AuxInt = int64ToAuxInt(c + d)
 27862  		v.AddArg2(x, v0)
 27863  		return true
 27864  	}
 27865  	// match: (Rsh64Ux64 (Rsh64x64 x _) (Const64 <t> [63]))
 27866  	// result: (Rsh64Ux64 x (Const64 <t> [63]))
 27867  	for {
 27868  		if v_0.Op != OpRsh64x64 {
 27869  			break
 27870  		}
 27871  		x := v_0.Args[0]
 27872  		if v_1.Op != OpConst64 {
 27873  			break
 27874  		}
 27875  		t := v_1.Type
 27876  		if auxIntToInt64(v_1.AuxInt) != 63 {
 27877  			break
 27878  		}
 27879  		v.reset(OpRsh64Ux64)
 27880  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 27881  		v0.AuxInt = int64ToAuxInt(63)
 27882  		v.AddArg2(x, v0)
 27883  		return true
 27884  	}
 27885  	// match: (Rsh64Ux64 i:(Lsh64x64 x (Const64 [c])) (Const64 [c]))
 27886  	// cond: c >= 0 && c < 64 && i.Uses == 1
 27887  	// result: (And64 x (Const64 <v.Type> [int64(^uint64(0)>>c)]))
 27888  	for {
 27889  		i := v_0
 27890  		if i.Op != OpLsh64x64 {
 27891  			break
 27892  		}
 27893  		_ = i.Args[1]
 27894  		x := i.Args[0]
 27895  		i_1 := i.Args[1]
 27896  		if i_1.Op != OpConst64 {
 27897  			break
 27898  		}
 27899  		c := auxIntToInt64(i_1.AuxInt)
 27900  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 64 && i.Uses == 1) {
 27901  			break
 27902  		}
 27903  		v.reset(OpAnd64)
 27904  		v0 := b.NewValue0(v.Pos, OpConst64, v.Type)
 27905  		v0.AuxInt = int64ToAuxInt(int64(^uint64(0) >> c))
 27906  		v.AddArg2(x, v0)
 27907  		return true
 27908  	}
 27909  	// match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
 27910  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
 27911  	// result: (Rsh64Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
 27912  	for {
 27913  		if v_0.Op != OpLsh64x64 {
 27914  			break
 27915  		}
 27916  		_ = v_0.Args[1]
 27917  		v_0_0 := v_0.Args[0]
 27918  		if v_0_0.Op != OpRsh64Ux64 {
 27919  			break
 27920  		}
 27921  		_ = v_0_0.Args[1]
 27922  		x := v_0_0.Args[0]
 27923  		v_0_0_1 := v_0_0.Args[1]
 27924  		if v_0_0_1.Op != OpConst64 {
 27925  			break
 27926  		}
 27927  		c1 := auxIntToInt64(v_0_0_1.AuxInt)
 27928  		v_0_1 := v_0.Args[1]
 27929  		if v_0_1.Op != OpConst64 {
 27930  			break
 27931  		}
 27932  		c2 := auxIntToInt64(v_0_1.AuxInt)
 27933  		if v_1.Op != OpConst64 {
 27934  			break
 27935  		}
 27936  		c3 := auxIntToInt64(v_1.AuxInt)
 27937  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
 27938  			break
 27939  		}
 27940  		v.reset(OpRsh64Ux64)
 27941  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 27942  		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
 27943  		v.AddArg2(x, v0)
 27944  		return true
 27945  	}
 27946  	// match: (Rsh64Ux64 (Lsh64x64 x (Const64 [56])) (Const64 [56]))
 27947  	// result: (ZeroExt8to64 (Trunc64to8 <typ.UInt8> x))
 27948  	for {
 27949  		if v_0.Op != OpLsh64x64 {
 27950  			break
 27951  		}
 27952  		_ = v_0.Args[1]
 27953  		x := v_0.Args[0]
 27954  		v_0_1 := v_0.Args[1]
 27955  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 56 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 56 {
 27956  			break
 27957  		}
 27958  		v.reset(OpZeroExt8to64)
 27959  		v0 := b.NewValue0(v.Pos, OpTrunc64to8, typ.UInt8)
 27960  		v0.AddArg(x)
 27961  		v.AddArg(v0)
 27962  		return true
 27963  	}
 27964  	// match: (Rsh64Ux64 (Lsh64x64 x (Const64 [48])) (Const64 [48]))
 27965  	// result: (ZeroExt16to64 (Trunc64to16 <typ.UInt16> x))
 27966  	for {
 27967  		if v_0.Op != OpLsh64x64 {
 27968  			break
 27969  		}
 27970  		_ = v_0.Args[1]
 27971  		x := v_0.Args[0]
 27972  		v_0_1 := v_0.Args[1]
 27973  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 48 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 48 {
 27974  			break
 27975  		}
 27976  		v.reset(OpZeroExt16to64)
 27977  		v0 := b.NewValue0(v.Pos, OpTrunc64to16, typ.UInt16)
 27978  		v0.AddArg(x)
 27979  		v.AddArg(v0)
 27980  		return true
 27981  	}
 27982  	// match: (Rsh64Ux64 (Lsh64x64 x (Const64 [32])) (Const64 [32]))
 27983  	// result: (ZeroExt32to64 (Trunc64to32 <typ.UInt32> x))
 27984  	for {
 27985  		if v_0.Op != OpLsh64x64 {
 27986  			break
 27987  		}
 27988  		_ = v_0.Args[1]
 27989  		x := v_0.Args[0]
 27990  		v_0_1 := v_0.Args[1]
 27991  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 32 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 32 {
 27992  			break
 27993  		}
 27994  		v.reset(OpZeroExt32to64)
 27995  		v0 := b.NewValue0(v.Pos, OpTrunc64to32, typ.UInt32)
 27996  		v0.AddArg(x)
 27997  		v.AddArg(v0)
 27998  		return true
 27999  	}
 28000  	return false
 28001  }
 28002  func rewriteValuegeneric_OpRsh64Ux8(v *Value) bool {
 28003  	v_1 := v.Args[1]
 28004  	v_0 := v.Args[0]
 28005  	b := v.Block
 28006  	// match: (Rsh64Ux8 <t> x (Const8 [c]))
 28007  	// result: (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))]))
 28008  	for {
 28009  		t := v.Type
 28010  		x := v_0
 28011  		if v_1.Op != OpConst8 {
 28012  			break
 28013  		}
 28014  		c := auxIntToInt8(v_1.AuxInt)
 28015  		v.reset(OpRsh64Ux64)
 28016  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 28017  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 28018  		v.AddArg2(x, v0)
 28019  		return true
 28020  	}
 28021  	// match: (Rsh64Ux8 (Const64 [0]) _)
 28022  	// result: (Const64 [0])
 28023  	for {
 28024  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 28025  			break
 28026  		}
 28027  		v.reset(OpConst64)
 28028  		v.AuxInt = int64ToAuxInt(0)
 28029  		return true
 28030  	}
 28031  	return false
 28032  }
 28033  func rewriteValuegeneric_OpRsh64x16(v *Value) bool {
 28034  	v_1 := v.Args[1]
 28035  	v_0 := v.Args[0]
 28036  	b := v.Block
 28037  	// match: (Rsh64x16 <t> x (Const16 [c]))
 28038  	// result: (Rsh64x64 x (Const64 <t> [int64(uint16(c))]))
 28039  	for {
 28040  		t := v.Type
 28041  		x := v_0
 28042  		if v_1.Op != OpConst16 {
 28043  			break
 28044  		}
 28045  		c := auxIntToInt16(v_1.AuxInt)
 28046  		v.reset(OpRsh64x64)
 28047  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 28048  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 28049  		v.AddArg2(x, v0)
 28050  		return true
 28051  	}
 28052  	// match: (Rsh64x16 (Const64 [0]) _)
 28053  	// result: (Const64 [0])
 28054  	for {
 28055  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 28056  			break
 28057  		}
 28058  		v.reset(OpConst64)
 28059  		v.AuxInt = int64ToAuxInt(0)
 28060  		return true
 28061  	}
 28062  	return false
 28063  }
 28064  func rewriteValuegeneric_OpRsh64x32(v *Value) bool {
 28065  	v_1 := v.Args[1]
 28066  	v_0 := v.Args[0]
 28067  	b := v.Block
 28068  	// match: (Rsh64x32 <t> x (Const32 [c]))
 28069  	// result: (Rsh64x64 x (Const64 <t> [int64(uint32(c))]))
 28070  	for {
 28071  		t := v.Type
 28072  		x := v_0
 28073  		if v_1.Op != OpConst32 {
 28074  			break
 28075  		}
 28076  		c := auxIntToInt32(v_1.AuxInt)
 28077  		v.reset(OpRsh64x64)
 28078  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 28079  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 28080  		v.AddArg2(x, v0)
 28081  		return true
 28082  	}
 28083  	// match: (Rsh64x32 (Const64 [0]) _)
 28084  	// result: (Const64 [0])
 28085  	for {
 28086  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 28087  			break
 28088  		}
 28089  		v.reset(OpConst64)
 28090  		v.AuxInt = int64ToAuxInt(0)
 28091  		return true
 28092  	}
 28093  	return false
 28094  }
 28095  func rewriteValuegeneric_OpRsh64x64(v *Value) bool {
 28096  	v_1 := v.Args[1]
 28097  	v_0 := v.Args[0]
 28098  	b := v.Block
 28099  	typ := &b.Func.Config.Types
 28100  	// match: (Rsh64x64 (Const64 [c]) (Const64 [d]))
 28101  	// result: (Const64 [c >> uint64(d)])
 28102  	for {
 28103  		if v_0.Op != OpConst64 {
 28104  			break
 28105  		}
 28106  		c := auxIntToInt64(v_0.AuxInt)
 28107  		if v_1.Op != OpConst64 {
 28108  			break
 28109  		}
 28110  		d := auxIntToInt64(v_1.AuxInt)
 28111  		v.reset(OpConst64)
 28112  		v.AuxInt = int64ToAuxInt(c >> uint64(d))
 28113  		return true
 28114  	}
 28115  	// match: (Rsh64x64 x (Const64 [0]))
 28116  	// result: x
 28117  	for {
 28118  		x := v_0
 28119  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 28120  			break
 28121  		}
 28122  		v.copyOf(x)
 28123  		return true
 28124  	}
 28125  	// match: (Rsh64x64 (Const64 [0]) _)
 28126  	// result: (Const64 [0])
 28127  	for {
 28128  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 28129  			break
 28130  		}
 28131  		v.reset(OpConst64)
 28132  		v.AuxInt = int64ToAuxInt(0)
 28133  		return true
 28134  	}
 28135  	// match: (Rsh64x64 <t> (Rsh64x64 x (Const64 [c])) (Const64 [d]))
 28136  	// cond: !uaddOvf(c,d)
 28137  	// result: (Rsh64x64 x (Const64 <t> [c+d]))
 28138  	for {
 28139  		t := v.Type
 28140  		if v_0.Op != OpRsh64x64 {
 28141  			break
 28142  		}
 28143  		_ = v_0.Args[1]
 28144  		x := v_0.Args[0]
 28145  		v_0_1 := v_0.Args[1]
 28146  		if v_0_1.Op != OpConst64 {
 28147  			break
 28148  		}
 28149  		c := auxIntToInt64(v_0_1.AuxInt)
 28150  		if v_1.Op != OpConst64 {
 28151  			break
 28152  		}
 28153  		d := auxIntToInt64(v_1.AuxInt)
 28154  		if !(!uaddOvf(c, d)) {
 28155  			break
 28156  		}
 28157  		v.reset(OpRsh64x64)
 28158  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 28159  		v0.AuxInt = int64ToAuxInt(c + d)
 28160  		v.AddArg2(x, v0)
 28161  		return true
 28162  	}
 28163  	// match: (Rsh64x64 (Lsh64x64 x (Const64 [56])) (Const64 [56]))
 28164  	// result: (SignExt8to64 (Trunc64to8 <typ.Int8> x))
 28165  	for {
 28166  		if v_0.Op != OpLsh64x64 {
 28167  			break
 28168  		}
 28169  		_ = v_0.Args[1]
 28170  		x := v_0.Args[0]
 28171  		v_0_1 := v_0.Args[1]
 28172  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 56 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 56 {
 28173  			break
 28174  		}
 28175  		v.reset(OpSignExt8to64)
 28176  		v0 := b.NewValue0(v.Pos, OpTrunc64to8, typ.Int8)
 28177  		v0.AddArg(x)
 28178  		v.AddArg(v0)
 28179  		return true
 28180  	}
 28181  	// match: (Rsh64x64 (Lsh64x64 x (Const64 [48])) (Const64 [48]))
 28182  	// result: (SignExt16to64 (Trunc64to16 <typ.Int16> x))
 28183  	for {
 28184  		if v_0.Op != OpLsh64x64 {
 28185  			break
 28186  		}
 28187  		_ = v_0.Args[1]
 28188  		x := v_0.Args[0]
 28189  		v_0_1 := v_0.Args[1]
 28190  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 48 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 48 {
 28191  			break
 28192  		}
 28193  		v.reset(OpSignExt16to64)
 28194  		v0 := b.NewValue0(v.Pos, OpTrunc64to16, typ.Int16)
 28195  		v0.AddArg(x)
 28196  		v.AddArg(v0)
 28197  		return true
 28198  	}
 28199  	// match: (Rsh64x64 (Lsh64x64 x (Const64 [32])) (Const64 [32]))
 28200  	// result: (SignExt32to64 (Trunc64to32 <typ.Int32> x))
 28201  	for {
 28202  		if v_0.Op != OpLsh64x64 {
 28203  			break
 28204  		}
 28205  		_ = v_0.Args[1]
 28206  		x := v_0.Args[0]
 28207  		v_0_1 := v_0.Args[1]
 28208  		if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 32 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 32 {
 28209  			break
 28210  		}
 28211  		v.reset(OpSignExt32to64)
 28212  		v0 := b.NewValue0(v.Pos, OpTrunc64to32, typ.Int32)
 28213  		v0.AddArg(x)
 28214  		v.AddArg(v0)
 28215  		return true
 28216  	}
 28217  	return false
 28218  }
 28219  func rewriteValuegeneric_OpRsh64x8(v *Value) bool {
 28220  	v_1 := v.Args[1]
 28221  	v_0 := v.Args[0]
 28222  	b := v.Block
 28223  	// match: (Rsh64x8 <t> x (Const8 [c]))
 28224  	// result: (Rsh64x64 x (Const64 <t> [int64(uint8(c))]))
 28225  	for {
 28226  		t := v.Type
 28227  		x := v_0
 28228  		if v_1.Op != OpConst8 {
 28229  			break
 28230  		}
 28231  		c := auxIntToInt8(v_1.AuxInt)
 28232  		v.reset(OpRsh64x64)
 28233  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 28234  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 28235  		v.AddArg2(x, v0)
 28236  		return true
 28237  	}
 28238  	// match: (Rsh64x8 (Const64 [0]) _)
 28239  	// result: (Const64 [0])
 28240  	for {
 28241  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 28242  			break
 28243  		}
 28244  		v.reset(OpConst64)
 28245  		v.AuxInt = int64ToAuxInt(0)
 28246  		return true
 28247  	}
 28248  	return false
 28249  }
 28250  func rewriteValuegeneric_OpRsh8Ux16(v *Value) bool {
 28251  	v_1 := v.Args[1]
 28252  	v_0 := v.Args[0]
 28253  	b := v.Block
 28254  	// match: (Rsh8Ux16 <t> x (Const16 [c]))
 28255  	// result: (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))]))
 28256  	for {
 28257  		t := v.Type
 28258  		x := v_0
 28259  		if v_1.Op != OpConst16 {
 28260  			break
 28261  		}
 28262  		c := auxIntToInt16(v_1.AuxInt)
 28263  		v.reset(OpRsh8Ux64)
 28264  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 28265  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 28266  		v.AddArg2(x, v0)
 28267  		return true
 28268  	}
 28269  	// match: (Rsh8Ux16 (Const8 [0]) _)
 28270  	// result: (Const8 [0])
 28271  	for {
 28272  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 28273  			break
 28274  		}
 28275  		v.reset(OpConst8)
 28276  		v.AuxInt = int8ToAuxInt(0)
 28277  		return true
 28278  	}
 28279  	return false
 28280  }
 28281  func rewriteValuegeneric_OpRsh8Ux32(v *Value) bool {
 28282  	v_1 := v.Args[1]
 28283  	v_0 := v.Args[0]
 28284  	b := v.Block
 28285  	// match: (Rsh8Ux32 <t> x (Const32 [c]))
 28286  	// result: (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))]))
 28287  	for {
 28288  		t := v.Type
 28289  		x := v_0
 28290  		if v_1.Op != OpConst32 {
 28291  			break
 28292  		}
 28293  		c := auxIntToInt32(v_1.AuxInt)
 28294  		v.reset(OpRsh8Ux64)
 28295  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 28296  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 28297  		v.AddArg2(x, v0)
 28298  		return true
 28299  	}
 28300  	// match: (Rsh8Ux32 (Const8 [0]) _)
 28301  	// result: (Const8 [0])
 28302  	for {
 28303  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 28304  			break
 28305  		}
 28306  		v.reset(OpConst8)
 28307  		v.AuxInt = int8ToAuxInt(0)
 28308  		return true
 28309  	}
 28310  	return false
 28311  }
 28312  func rewriteValuegeneric_OpRsh8Ux64(v *Value) bool {
 28313  	v_1 := v.Args[1]
 28314  	v_0 := v.Args[0]
 28315  	b := v.Block
 28316  	typ := &b.Func.Config.Types
 28317  	// match: (Rsh8Ux64 (Const8 [c]) (Const64 [d]))
 28318  	// result: (Const8 [int8(uint8(c) >> uint64(d))])
 28319  	for {
 28320  		if v_0.Op != OpConst8 {
 28321  			break
 28322  		}
 28323  		c := auxIntToInt8(v_0.AuxInt)
 28324  		if v_1.Op != OpConst64 {
 28325  			break
 28326  		}
 28327  		d := auxIntToInt64(v_1.AuxInt)
 28328  		v.reset(OpConst8)
 28329  		v.AuxInt = int8ToAuxInt(int8(uint8(c) >> uint64(d)))
 28330  		return true
 28331  	}
 28332  	// match: (Rsh8Ux64 x (Const64 [0]))
 28333  	// result: x
 28334  	for {
 28335  		x := v_0
 28336  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 28337  			break
 28338  		}
 28339  		v.copyOf(x)
 28340  		return true
 28341  	}
 28342  	// match: (Rsh8Ux64 (Const8 [0]) _)
 28343  	// result: (Const8 [0])
 28344  	for {
 28345  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 28346  			break
 28347  		}
 28348  		v.reset(OpConst8)
 28349  		v.AuxInt = int8ToAuxInt(0)
 28350  		return true
 28351  	}
 28352  	// match: (Rsh8Ux64 _ (Const64 [c]))
 28353  	// cond: uint64(c) >= 8
 28354  	// result: (Const8 [0])
 28355  	for {
 28356  		if v_1.Op != OpConst64 {
 28357  			break
 28358  		}
 28359  		c := auxIntToInt64(v_1.AuxInt)
 28360  		if !(uint64(c) >= 8) {
 28361  			break
 28362  		}
 28363  		v.reset(OpConst8)
 28364  		v.AuxInt = int8ToAuxInt(0)
 28365  		return true
 28366  	}
 28367  	// match: (Rsh8Ux64 <t> (Rsh8Ux64 x (Const64 [c])) (Const64 [d]))
 28368  	// cond: !uaddOvf(c,d)
 28369  	// result: (Rsh8Ux64 x (Const64 <t> [c+d]))
 28370  	for {
 28371  		t := v.Type
 28372  		if v_0.Op != OpRsh8Ux64 {
 28373  			break
 28374  		}
 28375  		_ = v_0.Args[1]
 28376  		x := v_0.Args[0]
 28377  		v_0_1 := v_0.Args[1]
 28378  		if v_0_1.Op != OpConst64 {
 28379  			break
 28380  		}
 28381  		c := auxIntToInt64(v_0_1.AuxInt)
 28382  		if v_1.Op != OpConst64 {
 28383  			break
 28384  		}
 28385  		d := auxIntToInt64(v_1.AuxInt)
 28386  		if !(!uaddOvf(c, d)) {
 28387  			break
 28388  		}
 28389  		v.reset(OpRsh8Ux64)
 28390  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 28391  		v0.AuxInt = int64ToAuxInt(c + d)
 28392  		v.AddArg2(x, v0)
 28393  		return true
 28394  	}
 28395  	// match: (Rsh8Ux64 (Rsh8x64 x _) (Const64 <t> [7] ))
 28396  	// result: (Rsh8Ux64 x (Const64 <t> [7] ))
 28397  	for {
 28398  		if v_0.Op != OpRsh8x64 {
 28399  			break
 28400  		}
 28401  		x := v_0.Args[0]
 28402  		if v_1.Op != OpConst64 {
 28403  			break
 28404  		}
 28405  		t := v_1.Type
 28406  		if auxIntToInt64(v_1.AuxInt) != 7 {
 28407  			break
 28408  		}
 28409  		v.reset(OpRsh8Ux64)
 28410  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 28411  		v0.AuxInt = int64ToAuxInt(7)
 28412  		v.AddArg2(x, v0)
 28413  		return true
 28414  	}
 28415  	// match: (Rsh8Ux64 i:(Lsh8x64 x (Const64 [c])) (Const64 [c]))
 28416  	// cond: c >= 0 && c < 8 && i.Uses == 1
 28417  	// result: (And8 x (Const8 <v.Type> [int8 (^uint8 (0)>>c)]))
 28418  	for {
 28419  		i := v_0
 28420  		if i.Op != OpLsh8x64 {
 28421  			break
 28422  		}
 28423  		_ = i.Args[1]
 28424  		x := i.Args[0]
 28425  		i_1 := i.Args[1]
 28426  		if i_1.Op != OpConst64 {
 28427  			break
 28428  		}
 28429  		c := auxIntToInt64(i_1.AuxInt)
 28430  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != c || !(c >= 0 && c < 8 && i.Uses == 1) {
 28431  			break
 28432  		}
 28433  		v.reset(OpAnd8)
 28434  		v0 := b.NewValue0(v.Pos, OpConst8, v.Type)
 28435  		v0.AuxInt = int8ToAuxInt(int8(^uint8(0) >> c))
 28436  		v.AddArg2(x, v0)
 28437  		return true
 28438  	}
 28439  	// match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
 28440  	// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
 28441  	// result: (Rsh8Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
 28442  	for {
 28443  		if v_0.Op != OpLsh8x64 {
 28444  			break
 28445  		}
 28446  		_ = v_0.Args[1]
 28447  		v_0_0 := v_0.Args[0]
 28448  		if v_0_0.Op != OpRsh8Ux64 {
 28449  			break
 28450  		}
 28451  		_ = v_0_0.Args[1]
 28452  		x := v_0_0.Args[0]
 28453  		v_0_0_1 := v_0_0.Args[1]
 28454  		if v_0_0_1.Op != OpConst64 {
 28455  			break
 28456  		}
 28457  		c1 := auxIntToInt64(v_0_0_1.AuxInt)
 28458  		v_0_1 := v_0.Args[1]
 28459  		if v_0_1.Op != OpConst64 {
 28460  			break
 28461  		}
 28462  		c2 := auxIntToInt64(v_0_1.AuxInt)
 28463  		if v_1.Op != OpConst64 {
 28464  			break
 28465  		}
 28466  		c3 := auxIntToInt64(v_1.AuxInt)
 28467  		if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
 28468  			break
 28469  		}
 28470  		v.reset(OpRsh8Ux64)
 28471  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
 28472  		v0.AuxInt = int64ToAuxInt(c1 - c2 + c3)
 28473  		v.AddArg2(x, v0)
 28474  		return true
 28475  	}
 28476  	return false
 28477  }
 28478  func rewriteValuegeneric_OpRsh8Ux8(v *Value) bool {
 28479  	v_1 := v.Args[1]
 28480  	v_0 := v.Args[0]
 28481  	b := v.Block
 28482  	// match: (Rsh8Ux8 <t> x (Const8 [c]))
 28483  	// result: (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))]))
 28484  	for {
 28485  		t := v.Type
 28486  		x := v_0
 28487  		if v_1.Op != OpConst8 {
 28488  			break
 28489  		}
 28490  		c := auxIntToInt8(v_1.AuxInt)
 28491  		v.reset(OpRsh8Ux64)
 28492  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 28493  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 28494  		v.AddArg2(x, v0)
 28495  		return true
 28496  	}
 28497  	// match: (Rsh8Ux8 (Const8 [0]) _)
 28498  	// result: (Const8 [0])
 28499  	for {
 28500  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 28501  			break
 28502  		}
 28503  		v.reset(OpConst8)
 28504  		v.AuxInt = int8ToAuxInt(0)
 28505  		return true
 28506  	}
 28507  	return false
 28508  }
 28509  func rewriteValuegeneric_OpRsh8x16(v *Value) bool {
 28510  	v_1 := v.Args[1]
 28511  	v_0 := v.Args[0]
 28512  	b := v.Block
 28513  	// match: (Rsh8x16 <t> x (Const16 [c]))
 28514  	// result: (Rsh8x64 x (Const64 <t> [int64(uint16(c))]))
 28515  	for {
 28516  		t := v.Type
 28517  		x := v_0
 28518  		if v_1.Op != OpConst16 {
 28519  			break
 28520  		}
 28521  		c := auxIntToInt16(v_1.AuxInt)
 28522  		v.reset(OpRsh8x64)
 28523  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 28524  		v0.AuxInt = int64ToAuxInt(int64(uint16(c)))
 28525  		v.AddArg2(x, v0)
 28526  		return true
 28527  	}
 28528  	// match: (Rsh8x16 (Const8 [0]) _)
 28529  	// result: (Const8 [0])
 28530  	for {
 28531  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 28532  			break
 28533  		}
 28534  		v.reset(OpConst8)
 28535  		v.AuxInt = int8ToAuxInt(0)
 28536  		return true
 28537  	}
 28538  	return false
 28539  }
 28540  func rewriteValuegeneric_OpRsh8x32(v *Value) bool {
 28541  	v_1 := v.Args[1]
 28542  	v_0 := v.Args[0]
 28543  	b := v.Block
 28544  	// match: (Rsh8x32 <t> x (Const32 [c]))
 28545  	// result: (Rsh8x64 x (Const64 <t> [int64(uint32(c))]))
 28546  	for {
 28547  		t := v.Type
 28548  		x := v_0
 28549  		if v_1.Op != OpConst32 {
 28550  			break
 28551  		}
 28552  		c := auxIntToInt32(v_1.AuxInt)
 28553  		v.reset(OpRsh8x64)
 28554  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 28555  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
 28556  		v.AddArg2(x, v0)
 28557  		return true
 28558  	}
 28559  	// match: (Rsh8x32 (Const8 [0]) _)
 28560  	// result: (Const8 [0])
 28561  	for {
 28562  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 28563  			break
 28564  		}
 28565  		v.reset(OpConst8)
 28566  		v.AuxInt = int8ToAuxInt(0)
 28567  		return true
 28568  	}
 28569  	return false
 28570  }
 28571  func rewriteValuegeneric_OpRsh8x64(v *Value) bool {
 28572  	v_1 := v.Args[1]
 28573  	v_0 := v.Args[0]
 28574  	b := v.Block
 28575  	// match: (Rsh8x64 (Const8 [c]) (Const64 [d]))
 28576  	// result: (Const8 [c >> uint64(d)])
 28577  	for {
 28578  		if v_0.Op != OpConst8 {
 28579  			break
 28580  		}
 28581  		c := auxIntToInt8(v_0.AuxInt)
 28582  		if v_1.Op != OpConst64 {
 28583  			break
 28584  		}
 28585  		d := auxIntToInt64(v_1.AuxInt)
 28586  		v.reset(OpConst8)
 28587  		v.AuxInt = int8ToAuxInt(c >> uint64(d))
 28588  		return true
 28589  	}
 28590  	// match: (Rsh8x64 x (Const64 [0]))
 28591  	// result: x
 28592  	for {
 28593  		x := v_0
 28594  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 28595  			break
 28596  		}
 28597  		v.copyOf(x)
 28598  		return true
 28599  	}
 28600  	// match: (Rsh8x64 (Const8 [0]) _)
 28601  	// result: (Const8 [0])
 28602  	for {
 28603  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 28604  			break
 28605  		}
 28606  		v.reset(OpConst8)
 28607  		v.AuxInt = int8ToAuxInt(0)
 28608  		return true
 28609  	}
 28610  	// match: (Rsh8x64 <t> (Rsh8x64 x (Const64 [c])) (Const64 [d]))
 28611  	// cond: !uaddOvf(c,d)
 28612  	// result: (Rsh8x64 x (Const64 <t> [c+d]))
 28613  	for {
 28614  		t := v.Type
 28615  		if v_0.Op != OpRsh8x64 {
 28616  			break
 28617  		}
 28618  		_ = v_0.Args[1]
 28619  		x := v_0.Args[0]
 28620  		v_0_1 := v_0.Args[1]
 28621  		if v_0_1.Op != OpConst64 {
 28622  			break
 28623  		}
 28624  		c := auxIntToInt64(v_0_1.AuxInt)
 28625  		if v_1.Op != OpConst64 {
 28626  			break
 28627  		}
 28628  		d := auxIntToInt64(v_1.AuxInt)
 28629  		if !(!uaddOvf(c, d)) {
 28630  			break
 28631  		}
 28632  		v.reset(OpRsh8x64)
 28633  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 28634  		v0.AuxInt = int64ToAuxInt(c + d)
 28635  		v.AddArg2(x, v0)
 28636  		return true
 28637  	}
 28638  	return false
 28639  }
 28640  func rewriteValuegeneric_OpRsh8x8(v *Value) bool {
 28641  	v_1 := v.Args[1]
 28642  	v_0 := v.Args[0]
 28643  	b := v.Block
 28644  	// match: (Rsh8x8 <t> x (Const8 [c]))
 28645  	// result: (Rsh8x64 x (Const64 <t> [int64(uint8(c))]))
 28646  	for {
 28647  		t := v.Type
 28648  		x := v_0
 28649  		if v_1.Op != OpConst8 {
 28650  			break
 28651  		}
 28652  		c := auxIntToInt8(v_1.AuxInt)
 28653  		v.reset(OpRsh8x64)
 28654  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 28655  		v0.AuxInt = int64ToAuxInt(int64(uint8(c)))
 28656  		v.AddArg2(x, v0)
 28657  		return true
 28658  	}
 28659  	// match: (Rsh8x8 (Const8 [0]) _)
 28660  	// result: (Const8 [0])
 28661  	for {
 28662  		if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 28663  			break
 28664  		}
 28665  		v.reset(OpConst8)
 28666  		v.AuxInt = int8ToAuxInt(0)
 28667  		return true
 28668  	}
 28669  	return false
 28670  }
 28671  func rewriteValuegeneric_OpSelect0(v *Value) bool {
 28672  	v_0 := v.Args[0]
 28673  	// match: (Select0 (Div128u (Const64 [0]) lo y))
 28674  	// result: (Div64u lo y)
 28675  	for {
 28676  		if v_0.Op != OpDiv128u {
 28677  			break
 28678  		}
 28679  		y := v_0.Args[2]
 28680  		v_0_0 := v_0.Args[0]
 28681  		if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 0 {
 28682  			break
 28683  		}
 28684  		lo := v_0.Args[1]
 28685  		v.reset(OpDiv64u)
 28686  		v.AddArg2(lo, y)
 28687  		return true
 28688  	}
 28689  	// match: (Select0 (Mul32uover (Const32 [1]) x))
 28690  	// result: x
 28691  	for {
 28692  		if v_0.Op != OpMul32uover {
 28693  			break
 28694  		}
 28695  		_ = v_0.Args[1]
 28696  		v_0_0 := v_0.Args[0]
 28697  		v_0_1 := v_0.Args[1]
 28698  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 28699  			if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != 1 {
 28700  				continue
 28701  			}
 28702  			x := v_0_1
 28703  			v.copyOf(x)
 28704  			return true
 28705  		}
 28706  		break
 28707  	}
 28708  	// match: (Select0 (Mul64uover (Const64 [1]) x))
 28709  	// result: x
 28710  	for {
 28711  		if v_0.Op != OpMul64uover {
 28712  			break
 28713  		}
 28714  		_ = v_0.Args[1]
 28715  		v_0_0 := v_0.Args[0]
 28716  		v_0_1 := v_0.Args[1]
 28717  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 28718  			if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 1 {
 28719  				continue
 28720  			}
 28721  			x := v_0_1
 28722  			v.copyOf(x)
 28723  			return true
 28724  		}
 28725  		break
 28726  	}
 28727  	// match: (Select0 (Mul64uover (Const64 [0]) x))
 28728  	// result: (Const64 [0])
 28729  	for {
 28730  		if v_0.Op != OpMul64uover {
 28731  			break
 28732  		}
 28733  		_ = v_0.Args[1]
 28734  		v_0_0 := v_0.Args[0]
 28735  		v_0_1 := v_0.Args[1]
 28736  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 28737  			if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 0 {
 28738  				continue
 28739  			}
 28740  			v.reset(OpConst64)
 28741  			v.AuxInt = int64ToAuxInt(0)
 28742  			return true
 28743  		}
 28744  		break
 28745  	}
 28746  	// match: (Select0 (Mul32uover (Const32 [0]) x))
 28747  	// result: (Const32 [0])
 28748  	for {
 28749  		if v_0.Op != OpMul32uover {
 28750  			break
 28751  		}
 28752  		_ = v_0.Args[1]
 28753  		v_0_0 := v_0.Args[0]
 28754  		v_0_1 := v_0.Args[1]
 28755  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 28756  			if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != 0 {
 28757  				continue
 28758  			}
 28759  			v.reset(OpConst32)
 28760  			v.AuxInt = int32ToAuxInt(0)
 28761  			return true
 28762  		}
 28763  		break
 28764  	}
 28765  	return false
 28766  }
 28767  func rewriteValuegeneric_OpSelect1(v *Value) bool {
 28768  	v_0 := v.Args[0]
 28769  	// match: (Select1 (Div128u (Const64 [0]) lo y))
 28770  	// result: (Mod64u lo y)
 28771  	for {
 28772  		if v_0.Op != OpDiv128u {
 28773  			break
 28774  		}
 28775  		y := v_0.Args[2]
 28776  		v_0_0 := v_0.Args[0]
 28777  		if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 0 {
 28778  			break
 28779  		}
 28780  		lo := v_0.Args[1]
 28781  		v.reset(OpMod64u)
 28782  		v.AddArg2(lo, y)
 28783  		return true
 28784  	}
 28785  	// match: (Select1 (Mul32uover (Const32 [1]) x))
 28786  	// result: (ConstBool [false])
 28787  	for {
 28788  		if v_0.Op != OpMul32uover {
 28789  			break
 28790  		}
 28791  		_ = v_0.Args[1]
 28792  		v_0_0 := v_0.Args[0]
 28793  		v_0_1 := v_0.Args[1]
 28794  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 28795  			if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != 1 {
 28796  				continue
 28797  			}
 28798  			v.reset(OpConstBool)
 28799  			v.AuxInt = boolToAuxInt(false)
 28800  			return true
 28801  		}
 28802  		break
 28803  	}
 28804  	// match: (Select1 (Mul64uover (Const64 [1]) x))
 28805  	// result: (ConstBool [false])
 28806  	for {
 28807  		if v_0.Op != OpMul64uover {
 28808  			break
 28809  		}
 28810  		_ = v_0.Args[1]
 28811  		v_0_0 := v_0.Args[0]
 28812  		v_0_1 := v_0.Args[1]
 28813  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 28814  			if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 1 {
 28815  				continue
 28816  			}
 28817  			v.reset(OpConstBool)
 28818  			v.AuxInt = boolToAuxInt(false)
 28819  			return true
 28820  		}
 28821  		break
 28822  	}
 28823  	// match: (Select1 (Mul64uover (Const64 [0]) x))
 28824  	// result: (ConstBool [false])
 28825  	for {
 28826  		if v_0.Op != OpMul64uover {
 28827  			break
 28828  		}
 28829  		_ = v_0.Args[1]
 28830  		v_0_0 := v_0.Args[0]
 28831  		v_0_1 := v_0.Args[1]
 28832  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 28833  			if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != 0 {
 28834  				continue
 28835  			}
 28836  			v.reset(OpConstBool)
 28837  			v.AuxInt = boolToAuxInt(false)
 28838  			return true
 28839  		}
 28840  		break
 28841  	}
 28842  	// match: (Select1 (Mul32uover (Const32 [0]) x))
 28843  	// result: (ConstBool [false])
 28844  	for {
 28845  		if v_0.Op != OpMul32uover {
 28846  			break
 28847  		}
 28848  		_ = v_0.Args[1]
 28849  		v_0_0 := v_0.Args[0]
 28850  		v_0_1 := v_0.Args[1]
 28851  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 28852  			if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != 0 {
 28853  				continue
 28854  			}
 28855  			v.reset(OpConstBool)
 28856  			v.AuxInt = boolToAuxInt(false)
 28857  			return true
 28858  		}
 28859  		break
 28860  	}
 28861  	return false
 28862  }
 28863  func rewriteValuegeneric_OpSelectN(v *Value) bool {
 28864  	v_0 := v.Args[0]
 28865  	b := v.Block
 28866  	config := b.Func.Config
 28867  	typ := &b.Func.Config.Types
 28868  	// match: (SelectN [0] (MakeResult x ___))
 28869  	// result: x
 28870  	for {
 28871  		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpMakeResult || len(v_0.Args) < 1 {
 28872  			break
 28873  		}
 28874  		x := v_0.Args[0]
 28875  		v.copyOf(x)
 28876  		return true
 28877  	}
 28878  	// match: (SelectN [1] (MakeResult x y ___))
 28879  	// result: y
 28880  	for {
 28881  		if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpMakeResult || len(v_0.Args) < 2 {
 28882  			break
 28883  		}
 28884  		y := v_0.Args[1]
 28885  		v.copyOf(y)
 28886  		return true
 28887  	}
 28888  	// match: (SelectN [2] (MakeResult x y z ___))
 28889  	// result: z
 28890  	for {
 28891  		if auxIntToInt64(v.AuxInt) != 2 || v_0.Op != OpMakeResult || len(v_0.Args) < 3 {
 28892  			break
 28893  		}
 28894  		z := v_0.Args[2]
 28895  		v.copyOf(z)
 28896  		return true
 28897  	}
 28898  	// match: (SelectN [0] call:(StaticCall {sym} sptr (Const64 [c]) mem))
 28899  	// cond: isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)
 28900  	// result: (Zero {types.Types[types.TUINT8]} [int64(c)] sptr mem)
 28901  	for {
 28902  		if auxIntToInt64(v.AuxInt) != 0 {
 28903  			break
 28904  		}
 28905  		call := v_0
 28906  		if call.Op != OpStaticCall || len(call.Args) != 3 {
 28907  			break
 28908  		}
 28909  		sym := auxToCall(call.Aux)
 28910  		mem := call.Args[2]
 28911  		sptr := call.Args[0]
 28912  		call_1 := call.Args[1]
 28913  		if call_1.Op != OpConst64 {
 28914  			break
 28915  		}
 28916  		c := auxIntToInt64(call_1.AuxInt)
 28917  		if !(isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)) {
 28918  			break
 28919  		}
 28920  		v.reset(OpZero)
 28921  		v.AuxInt = int64ToAuxInt(int64(c))
 28922  		v.Aux = typeToAux(types.Types[types.TUINT8])
 28923  		v.AddArg2(sptr, mem)
 28924  		return true
 28925  	}
 28926  	// match: (SelectN [0] call:(StaticCall {sym} sptr (Const32 [c]) mem))
 28927  	// cond: isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)
 28928  	// result: (Zero {types.Types[types.TUINT8]} [int64(c)] sptr mem)
 28929  	for {
 28930  		if auxIntToInt64(v.AuxInt) != 0 {
 28931  			break
 28932  		}
 28933  		call := v_0
 28934  		if call.Op != OpStaticCall || len(call.Args) != 3 {
 28935  			break
 28936  		}
 28937  		sym := auxToCall(call.Aux)
 28938  		mem := call.Args[2]
 28939  		sptr := call.Args[0]
 28940  		call_1 := call.Args[1]
 28941  		if call_1.Op != OpConst32 {
 28942  			break
 28943  		}
 28944  		c := auxIntToInt32(call_1.AuxInt)
 28945  		if !(isInlinableMemclr(config, int64(c)) && isSameCall(sym, "runtime.memclrNoHeapPointers") && call.Uses == 1 && clobber(call)) {
 28946  			break
 28947  		}
 28948  		v.reset(OpZero)
 28949  		v.AuxInt = int64ToAuxInt(int64(c))
 28950  		v.Aux = typeToAux(types.Types[types.TUINT8])
 28951  		v.AddArg2(sptr, mem)
 28952  		return true
 28953  	}
 28954  	// match: (SelectN [0] call:(StaticCall {sym} s1:(Store _ (Const64 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem)))))
 28955  	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)
 28956  	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
 28957  	for {
 28958  		if auxIntToInt64(v.AuxInt) != 0 {
 28959  			break
 28960  		}
 28961  		call := v_0
 28962  		if call.Op != OpStaticCall || len(call.Args) != 1 {
 28963  			break
 28964  		}
 28965  		sym := auxToCall(call.Aux)
 28966  		s1 := call.Args[0]
 28967  		if s1.Op != OpStore {
 28968  			break
 28969  		}
 28970  		_ = s1.Args[2]
 28971  		s1_1 := s1.Args[1]
 28972  		if s1_1.Op != OpConst64 {
 28973  			break
 28974  		}
 28975  		sz := auxIntToInt64(s1_1.AuxInt)
 28976  		s2 := s1.Args[2]
 28977  		if s2.Op != OpStore {
 28978  			break
 28979  		}
 28980  		_ = s2.Args[2]
 28981  		src := s2.Args[1]
 28982  		s3 := s2.Args[2]
 28983  		if s3.Op != OpStore {
 28984  			break
 28985  		}
 28986  		mem := s3.Args[2]
 28987  		dst := s3.Args[1]
 28988  		if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)) {
 28989  			break
 28990  		}
 28991  		v.reset(OpMove)
 28992  		v.AuxInt = int64ToAuxInt(int64(sz))
 28993  		v.Aux = typeToAux(types.Types[types.TUINT8])
 28994  		v.AddArg3(dst, src, mem)
 28995  		return true
 28996  	}
 28997  	// match: (SelectN [0] call:(StaticCall {sym} s1:(Store _ (Const32 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem)))))
 28998  	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)
 28999  	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
 29000  	for {
 29001  		if auxIntToInt64(v.AuxInt) != 0 {
 29002  			break
 29003  		}
 29004  		call := v_0
 29005  		if call.Op != OpStaticCall || len(call.Args) != 1 {
 29006  			break
 29007  		}
 29008  		sym := auxToCall(call.Aux)
 29009  		s1 := call.Args[0]
 29010  		if s1.Op != OpStore {
 29011  			break
 29012  		}
 29013  		_ = s1.Args[2]
 29014  		s1_1 := s1.Args[1]
 29015  		if s1_1.Op != OpConst32 {
 29016  			break
 29017  		}
 29018  		sz := auxIntToInt32(s1_1.AuxInt)
 29019  		s2 := s1.Args[2]
 29020  		if s2.Op != OpStore {
 29021  			break
 29022  		}
 29023  		_ = s2.Args[2]
 29024  		src := s2.Args[1]
 29025  		s3 := s2.Args[2]
 29026  		if s3.Op != OpStore {
 29027  			break
 29028  		}
 29029  		mem := s3.Args[2]
 29030  		dst := s3.Args[1]
 29031  		if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst, src, int64(sz), config) && clobber(s1, s2, s3, call)) {
 29032  			break
 29033  		}
 29034  		v.reset(OpMove)
 29035  		v.AuxInt = int64ToAuxInt(int64(sz))
 29036  		v.Aux = typeToAux(types.Types[types.TUINT8])
 29037  		v.AddArg3(dst, src, mem)
 29038  		return true
 29039  	}
 29040  	// match: (SelectN [0] call:(StaticCall {sym} dst src (Const64 [sz]) mem))
 29041  	// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
 29042  	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
 29043  	for {
 29044  		if auxIntToInt64(v.AuxInt) != 0 {
 29045  			break
 29046  		}
 29047  		call := v_0
 29048  		if call.Op != OpStaticCall || len(call.Args) != 4 {
 29049  			break
 29050  		}
 29051  		sym := auxToCall(call.Aux)
 29052  		mem := call.Args[3]
 29053  		dst := call.Args[0]
 29054  		src := call.Args[1]
 29055  		call_2 := call.Args[2]
 29056  		if call_2.Op != OpConst64 {
 29057  			break
 29058  		}
 29059  		sz := auxIntToInt64(call_2.AuxInt)
 29060  		if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
 29061  			break
 29062  		}
 29063  		v.reset(OpMove)
 29064  		v.AuxInt = int64ToAuxInt(int64(sz))
 29065  		v.Aux = typeToAux(types.Types[types.TUINT8])
 29066  		v.AddArg3(dst, src, mem)
 29067  		return true
 29068  	}
 29069  	// match: (SelectN [0] call:(StaticCall {sym} dst src (Const32 [sz]) mem))
 29070  	// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
 29071  	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
 29072  	for {
 29073  		if auxIntToInt64(v.AuxInt) != 0 {
 29074  			break
 29075  		}
 29076  		call := v_0
 29077  		if call.Op != OpStaticCall || len(call.Args) != 4 {
 29078  			break
 29079  		}
 29080  		sym := auxToCall(call.Aux)
 29081  		mem := call.Args[3]
 29082  		dst := call.Args[0]
 29083  		src := call.Args[1]
 29084  		call_2 := call.Args[2]
 29085  		if call_2.Op != OpConst32 {
 29086  			break
 29087  		}
 29088  		sz := auxIntToInt32(call_2.AuxInt)
 29089  		if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
 29090  			break
 29091  		}
 29092  		v.reset(OpMove)
 29093  		v.AuxInt = int64ToAuxInt(int64(sz))
 29094  		v.Aux = typeToAux(types.Types[types.TUINT8])
 29095  		v.AddArg3(dst, src, mem)
 29096  		return true
 29097  	}
 29098  	// match: (SelectN [0] call:(StaticLECall {sym} dst src (Const64 [sz]) mem))
 29099  	// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
 29100  	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
 29101  	for {
 29102  		if auxIntToInt64(v.AuxInt) != 0 {
 29103  			break
 29104  		}
 29105  		call := v_0
 29106  		if call.Op != OpStaticLECall || len(call.Args) != 4 {
 29107  			break
 29108  		}
 29109  		sym := auxToCall(call.Aux)
 29110  		mem := call.Args[3]
 29111  		dst := call.Args[0]
 29112  		src := call.Args[1]
 29113  		call_2 := call.Args[2]
 29114  		if call_2.Op != OpConst64 {
 29115  			break
 29116  		}
 29117  		sz := auxIntToInt64(call_2.AuxInt)
 29118  		if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
 29119  			break
 29120  		}
 29121  		v.reset(OpMove)
 29122  		v.AuxInt = int64ToAuxInt(int64(sz))
 29123  		v.Aux = typeToAux(types.Types[types.TUINT8])
 29124  		v.AddArg3(dst, src, mem)
 29125  		return true
 29126  	}
 29127  	// match: (SelectN [0] call:(StaticLECall {sym} dst src (Const32 [sz]) mem))
 29128  	// cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)
 29129  	// result: (Move {types.Types[types.TUINT8]} [int64(sz)] dst src mem)
 29130  	for {
 29131  		if auxIntToInt64(v.AuxInt) != 0 {
 29132  			break
 29133  		}
 29134  		call := v_0
 29135  		if call.Op != OpStaticLECall || len(call.Args) != 4 {
 29136  			break
 29137  		}
 29138  		sym := auxToCall(call.Aux)
 29139  		mem := call.Args[3]
 29140  		dst := call.Args[0]
 29141  		src := call.Args[1]
 29142  		call_2 := call.Args[2]
 29143  		if call_2.Op != OpConst32 {
 29144  			break
 29145  		}
 29146  		sz := auxIntToInt32(call_2.AuxInt)
 29147  		if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) {
 29148  			break
 29149  		}
 29150  		v.reset(OpMove)
 29151  		v.AuxInt = int64ToAuxInt(int64(sz))
 29152  		v.Aux = typeToAux(types.Types[types.TUINT8])
 29153  		v.AddArg3(dst, src, mem)
 29154  		return true
 29155  	}
 29156  	// match: (SelectN [0] call:(StaticLECall {sym} a x))
 29157  	// cond: needRaceCleanup(sym, call) && clobber(call)
 29158  	// result: x
 29159  	for {
 29160  		if auxIntToInt64(v.AuxInt) != 0 {
 29161  			break
 29162  		}
 29163  		call := v_0
 29164  		if call.Op != OpStaticLECall || len(call.Args) != 2 {
 29165  			break
 29166  		}
 29167  		sym := auxToCall(call.Aux)
 29168  		x := call.Args[1]
 29169  		if !(needRaceCleanup(sym, call) && clobber(call)) {
 29170  			break
 29171  		}
 29172  		v.copyOf(x)
 29173  		return true
 29174  	}
 29175  	// match: (SelectN [0] call:(StaticLECall {sym} x))
 29176  	// cond: needRaceCleanup(sym, call) && clobber(call)
 29177  	// result: x
 29178  	for {
 29179  		if auxIntToInt64(v.AuxInt) != 0 {
 29180  			break
 29181  		}
 29182  		call := v_0
 29183  		if call.Op != OpStaticLECall || len(call.Args) != 1 {
 29184  			break
 29185  		}
 29186  		sym := auxToCall(call.Aux)
 29187  		x := call.Args[0]
 29188  		if !(needRaceCleanup(sym, call) && clobber(call)) {
 29189  			break
 29190  		}
 29191  		v.copyOf(x)
 29192  		return true
 29193  	}
 29194  	// match: (SelectN [1] (StaticCall {sym} _ newLen:(Const64) _ _ _ _))
 29195  	// cond: v.Type.IsInteger() && isSameCall(sym, "runtime.growslice")
 29196  	// result: newLen
 29197  	for {
 29198  		if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStaticCall || len(v_0.Args) != 6 {
 29199  			break
 29200  		}
 29201  		sym := auxToCall(v_0.Aux)
 29202  		_ = v_0.Args[1]
 29203  		newLen := v_0.Args[1]
 29204  		if newLen.Op != OpConst64 || !(v.Type.IsInteger() && isSameCall(sym, "runtime.growslice")) {
 29205  			break
 29206  		}
 29207  		v.copyOf(newLen)
 29208  		return true
 29209  	}
 29210  	// match: (SelectN [1] (StaticCall {sym} _ newLen:(Const32) _ _ _ _))
 29211  	// cond: v.Type.IsInteger() && isSameCall(sym, "runtime.growslice")
 29212  	// result: newLen
 29213  	for {
 29214  		if auxIntToInt64(v.AuxInt) != 1 || v_0.Op != OpStaticCall || len(v_0.Args) != 6 {
 29215  			break
 29216  		}
 29217  		sym := auxToCall(v_0.Aux)
 29218  		_ = v_0.Args[1]
 29219  		newLen := v_0.Args[1]
 29220  		if newLen.Op != OpConst32 || !(v.Type.IsInteger() && isSameCall(sym, "runtime.growslice")) {
 29221  			break
 29222  		}
 29223  		v.copyOf(newLen)
 29224  		return true
 29225  	}
 29226  	// match: (SelectN [0] (StaticLECall {f} x y (SelectN [1] c:(StaticLECall {g} x y mem))))
 29227  	// cond: isSameCall(f, "runtime.cmpstring") && isSameCall(g, "runtime.cmpstring")
 29228  	// result: @c.Block (SelectN [0] <typ.Int> c)
 29229  	for {
 29230  		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpStaticLECall || len(v_0.Args) != 3 {
 29231  			break
 29232  		}
 29233  		f := auxToCall(v_0.Aux)
 29234  		_ = v_0.Args[2]
 29235  		x := v_0.Args[0]
 29236  		y := v_0.Args[1]
 29237  		v_0_2 := v_0.Args[2]
 29238  		if v_0_2.Op != OpSelectN || auxIntToInt64(v_0_2.AuxInt) != 1 {
 29239  			break
 29240  		}
 29241  		c := v_0_2.Args[0]
 29242  		if c.Op != OpStaticLECall || len(c.Args) != 3 {
 29243  			break
 29244  		}
 29245  		g := auxToCall(c.Aux)
 29246  		if x != c.Args[0] || y != c.Args[1] || !(isSameCall(f, "runtime.cmpstring") && isSameCall(g, "runtime.cmpstring")) {
 29247  			break
 29248  		}
 29249  		b = c.Block
 29250  		v0 := b.NewValue0(v.Pos, OpSelectN, typ.Int)
 29251  		v.copyOf(v0)
 29252  		v0.AuxInt = int64ToAuxInt(0)
 29253  		v0.AddArg(c)
 29254  		return true
 29255  	}
 29256  	// match: (SelectN [1] c:(StaticLECall {f} _ _ mem))
 29257  	// cond: c.Uses == 1 && isSameCall(f, "runtime.cmpstring") && clobber(c)
 29258  	// result: mem
 29259  	for {
 29260  		if auxIntToInt64(v.AuxInt) != 1 {
 29261  			break
 29262  		}
 29263  		c := v_0
 29264  		if c.Op != OpStaticLECall || len(c.Args) != 3 {
 29265  			break
 29266  		}
 29267  		f := auxToCall(c.Aux)
 29268  		mem := c.Args[2]
 29269  		if !(c.Uses == 1 && isSameCall(f, "runtime.cmpstring") && clobber(c)) {
 29270  			break
 29271  		}
 29272  		v.copyOf(mem)
 29273  		return true
 29274  	}
 29275  	return false
 29276  }
 29277  func rewriteValuegeneric_OpSignExt16to32(v *Value) bool {
 29278  	v_0 := v.Args[0]
 29279  	// match: (SignExt16to32 (Const16 [c]))
 29280  	// result: (Const32 [int32(c)])
 29281  	for {
 29282  		if v_0.Op != OpConst16 {
 29283  			break
 29284  		}
 29285  		c := auxIntToInt16(v_0.AuxInt)
 29286  		v.reset(OpConst32)
 29287  		v.AuxInt = int32ToAuxInt(int32(c))
 29288  		return true
 29289  	}
 29290  	// match: (SignExt16to32 (Trunc32to16 x:(Rsh32x64 _ (Const64 [s]))))
 29291  	// cond: s >= 16
 29292  	// result: x
 29293  	for {
 29294  		if v_0.Op != OpTrunc32to16 {
 29295  			break
 29296  		}
 29297  		x := v_0.Args[0]
 29298  		if x.Op != OpRsh32x64 {
 29299  			break
 29300  		}
 29301  		_ = x.Args[1]
 29302  		x_1 := x.Args[1]
 29303  		if x_1.Op != OpConst64 {
 29304  			break
 29305  		}
 29306  		s := auxIntToInt64(x_1.AuxInt)
 29307  		if !(s >= 16) {
 29308  			break
 29309  		}
 29310  		v.copyOf(x)
 29311  		return true
 29312  	}
 29313  	return false
 29314  }
 29315  func rewriteValuegeneric_OpSignExt16to64(v *Value) bool {
 29316  	v_0 := v.Args[0]
 29317  	// match: (SignExt16to64 (Const16 [c]))
 29318  	// result: (Const64 [int64(c)])
 29319  	for {
 29320  		if v_0.Op != OpConst16 {
 29321  			break
 29322  		}
 29323  		c := auxIntToInt16(v_0.AuxInt)
 29324  		v.reset(OpConst64)
 29325  		v.AuxInt = int64ToAuxInt(int64(c))
 29326  		return true
 29327  	}
 29328  	// match: (SignExt16to64 (Trunc64to16 x:(Rsh64x64 _ (Const64 [s]))))
 29329  	// cond: s >= 48
 29330  	// result: x
 29331  	for {
 29332  		if v_0.Op != OpTrunc64to16 {
 29333  			break
 29334  		}
 29335  		x := v_0.Args[0]
 29336  		if x.Op != OpRsh64x64 {
 29337  			break
 29338  		}
 29339  		_ = x.Args[1]
 29340  		x_1 := x.Args[1]
 29341  		if x_1.Op != OpConst64 {
 29342  			break
 29343  		}
 29344  		s := auxIntToInt64(x_1.AuxInt)
 29345  		if !(s >= 48) {
 29346  			break
 29347  		}
 29348  		v.copyOf(x)
 29349  		return true
 29350  	}
 29351  	return false
 29352  }
 29353  func rewriteValuegeneric_OpSignExt32to64(v *Value) bool {
 29354  	v_0 := v.Args[0]
 29355  	// match: (SignExt32to64 (Const32 [c]))
 29356  	// result: (Const64 [int64(c)])
 29357  	for {
 29358  		if v_0.Op != OpConst32 {
 29359  			break
 29360  		}
 29361  		c := auxIntToInt32(v_0.AuxInt)
 29362  		v.reset(OpConst64)
 29363  		v.AuxInt = int64ToAuxInt(int64(c))
 29364  		return true
 29365  	}
 29366  	// match: (SignExt32to64 (Trunc64to32 x:(Rsh64x64 _ (Const64 [s]))))
 29367  	// cond: s >= 32
 29368  	// result: x
 29369  	for {
 29370  		if v_0.Op != OpTrunc64to32 {
 29371  			break
 29372  		}
 29373  		x := v_0.Args[0]
 29374  		if x.Op != OpRsh64x64 {
 29375  			break
 29376  		}
 29377  		_ = x.Args[1]
 29378  		x_1 := x.Args[1]
 29379  		if x_1.Op != OpConst64 {
 29380  			break
 29381  		}
 29382  		s := auxIntToInt64(x_1.AuxInt)
 29383  		if !(s >= 32) {
 29384  			break
 29385  		}
 29386  		v.copyOf(x)
 29387  		return true
 29388  	}
 29389  	return false
 29390  }
 29391  func rewriteValuegeneric_OpSignExt8to16(v *Value) bool {
 29392  	v_0 := v.Args[0]
 29393  	// match: (SignExt8to16 (Const8 [c]))
 29394  	// result: (Const16 [int16(c)])
 29395  	for {
 29396  		if v_0.Op != OpConst8 {
 29397  			break
 29398  		}
 29399  		c := auxIntToInt8(v_0.AuxInt)
 29400  		v.reset(OpConst16)
 29401  		v.AuxInt = int16ToAuxInt(int16(c))
 29402  		return true
 29403  	}
 29404  	// match: (SignExt8to16 (Trunc16to8 x:(Rsh16x64 _ (Const64 [s]))))
 29405  	// cond: s >= 8
 29406  	// result: x
 29407  	for {
 29408  		if v_0.Op != OpTrunc16to8 {
 29409  			break
 29410  		}
 29411  		x := v_0.Args[0]
 29412  		if x.Op != OpRsh16x64 {
 29413  			break
 29414  		}
 29415  		_ = x.Args[1]
 29416  		x_1 := x.Args[1]
 29417  		if x_1.Op != OpConst64 {
 29418  			break
 29419  		}
 29420  		s := auxIntToInt64(x_1.AuxInt)
 29421  		if !(s >= 8) {
 29422  			break
 29423  		}
 29424  		v.copyOf(x)
 29425  		return true
 29426  	}
 29427  	return false
 29428  }
 29429  func rewriteValuegeneric_OpSignExt8to32(v *Value) bool {
 29430  	v_0 := v.Args[0]
 29431  	// match: (SignExt8to32 (Const8 [c]))
 29432  	// result: (Const32 [int32(c)])
 29433  	for {
 29434  		if v_0.Op != OpConst8 {
 29435  			break
 29436  		}
 29437  		c := auxIntToInt8(v_0.AuxInt)
 29438  		v.reset(OpConst32)
 29439  		v.AuxInt = int32ToAuxInt(int32(c))
 29440  		return true
 29441  	}
 29442  	// match: (SignExt8to32 (Trunc32to8 x:(Rsh32x64 _ (Const64 [s]))))
 29443  	// cond: s >= 24
 29444  	// result: x
 29445  	for {
 29446  		if v_0.Op != OpTrunc32to8 {
 29447  			break
 29448  		}
 29449  		x := v_0.Args[0]
 29450  		if x.Op != OpRsh32x64 {
 29451  			break
 29452  		}
 29453  		_ = x.Args[1]
 29454  		x_1 := x.Args[1]
 29455  		if x_1.Op != OpConst64 {
 29456  			break
 29457  		}
 29458  		s := auxIntToInt64(x_1.AuxInt)
 29459  		if !(s >= 24) {
 29460  			break
 29461  		}
 29462  		v.copyOf(x)
 29463  		return true
 29464  	}
 29465  	return false
 29466  }
 29467  func rewriteValuegeneric_OpSignExt8to64(v *Value) bool {
 29468  	v_0 := v.Args[0]
 29469  	// match: (SignExt8to64 (Const8 [c]))
 29470  	// result: (Const64 [int64(c)])
 29471  	for {
 29472  		if v_0.Op != OpConst8 {
 29473  			break
 29474  		}
 29475  		c := auxIntToInt8(v_0.AuxInt)
 29476  		v.reset(OpConst64)
 29477  		v.AuxInt = int64ToAuxInt(int64(c))
 29478  		return true
 29479  	}
 29480  	// match: (SignExt8to64 (Trunc64to8 x:(Rsh64x64 _ (Const64 [s]))))
 29481  	// cond: s >= 56
 29482  	// result: x
 29483  	for {
 29484  		if v_0.Op != OpTrunc64to8 {
 29485  			break
 29486  		}
 29487  		x := v_0.Args[0]
 29488  		if x.Op != OpRsh64x64 {
 29489  			break
 29490  		}
 29491  		_ = x.Args[1]
 29492  		x_1 := x.Args[1]
 29493  		if x_1.Op != OpConst64 {
 29494  			break
 29495  		}
 29496  		s := auxIntToInt64(x_1.AuxInt)
 29497  		if !(s >= 56) {
 29498  			break
 29499  		}
 29500  		v.copyOf(x)
 29501  		return true
 29502  	}
 29503  	return false
 29504  }
 29505  func rewriteValuegeneric_OpSliceCap(v *Value) bool {
 29506  	v_0 := v.Args[0]
 29507  	// match: (SliceCap (SliceMake _ _ (Const64 <t> [c])))
 29508  	// result: (Const64 <t> [c])
 29509  	for {
 29510  		if v_0.Op != OpSliceMake {
 29511  			break
 29512  		}
 29513  		_ = v_0.Args[2]
 29514  		v_0_2 := v_0.Args[2]
 29515  		if v_0_2.Op != OpConst64 {
 29516  			break
 29517  		}
 29518  		t := v_0_2.Type
 29519  		c := auxIntToInt64(v_0_2.AuxInt)
 29520  		v.reset(OpConst64)
 29521  		v.Type = t
 29522  		v.AuxInt = int64ToAuxInt(c)
 29523  		return true
 29524  	}
 29525  	// match: (SliceCap (SliceMake _ _ (Const32 <t> [c])))
 29526  	// result: (Const32 <t> [c])
 29527  	for {
 29528  		if v_0.Op != OpSliceMake {
 29529  			break
 29530  		}
 29531  		_ = v_0.Args[2]
 29532  		v_0_2 := v_0.Args[2]
 29533  		if v_0_2.Op != OpConst32 {
 29534  			break
 29535  		}
 29536  		t := v_0_2.Type
 29537  		c := auxIntToInt32(v_0_2.AuxInt)
 29538  		v.reset(OpConst32)
 29539  		v.Type = t
 29540  		v.AuxInt = int32ToAuxInt(c)
 29541  		return true
 29542  	}
 29543  	// match: (SliceCap (SliceMake _ _ (SliceCap x)))
 29544  	// result: (SliceCap x)
 29545  	for {
 29546  		if v_0.Op != OpSliceMake {
 29547  			break
 29548  		}
 29549  		_ = v_0.Args[2]
 29550  		v_0_2 := v_0.Args[2]
 29551  		if v_0_2.Op != OpSliceCap {
 29552  			break
 29553  		}
 29554  		x := v_0_2.Args[0]
 29555  		v.reset(OpSliceCap)
 29556  		v.AddArg(x)
 29557  		return true
 29558  	}
 29559  	// match: (SliceCap (SliceMake _ _ (SliceLen x)))
 29560  	// result: (SliceLen x)
 29561  	for {
 29562  		if v_0.Op != OpSliceMake {
 29563  			break
 29564  		}
 29565  		_ = v_0.Args[2]
 29566  		v_0_2 := v_0.Args[2]
 29567  		if v_0_2.Op != OpSliceLen {
 29568  			break
 29569  		}
 29570  		x := v_0_2.Args[0]
 29571  		v.reset(OpSliceLen)
 29572  		v.AddArg(x)
 29573  		return true
 29574  	}
 29575  	return false
 29576  }
 29577  func rewriteValuegeneric_OpSliceLen(v *Value) bool {
 29578  	v_0 := v.Args[0]
 29579  	// match: (SliceLen (SliceMake _ (Const64 <t> [c]) _))
 29580  	// result: (Const64 <t> [c])
 29581  	for {
 29582  		if v_0.Op != OpSliceMake {
 29583  			break
 29584  		}
 29585  		_ = v_0.Args[1]
 29586  		v_0_1 := v_0.Args[1]
 29587  		if v_0_1.Op != OpConst64 {
 29588  			break
 29589  		}
 29590  		t := v_0_1.Type
 29591  		c := auxIntToInt64(v_0_1.AuxInt)
 29592  		v.reset(OpConst64)
 29593  		v.Type = t
 29594  		v.AuxInt = int64ToAuxInt(c)
 29595  		return true
 29596  	}
 29597  	// match: (SliceLen (SliceMake _ (Const32 <t> [c]) _))
 29598  	// result: (Const32 <t> [c])
 29599  	for {
 29600  		if v_0.Op != OpSliceMake {
 29601  			break
 29602  		}
 29603  		_ = v_0.Args[1]
 29604  		v_0_1 := v_0.Args[1]
 29605  		if v_0_1.Op != OpConst32 {
 29606  			break
 29607  		}
 29608  		t := v_0_1.Type
 29609  		c := auxIntToInt32(v_0_1.AuxInt)
 29610  		v.reset(OpConst32)
 29611  		v.Type = t
 29612  		v.AuxInt = int32ToAuxInt(c)
 29613  		return true
 29614  	}
 29615  	// match: (SliceLen (SliceMake _ (SliceLen x) _))
 29616  	// result: (SliceLen x)
 29617  	for {
 29618  		if v_0.Op != OpSliceMake {
 29619  			break
 29620  		}
 29621  		_ = v_0.Args[1]
 29622  		v_0_1 := v_0.Args[1]
 29623  		if v_0_1.Op != OpSliceLen {
 29624  			break
 29625  		}
 29626  		x := v_0_1.Args[0]
 29627  		v.reset(OpSliceLen)
 29628  		v.AddArg(x)
 29629  		return true
 29630  	}
 29631  	// match: (SliceLen (SelectN [0] (StaticLECall {sym} _ newLen:(Const64) _ _ _ _)))
 29632  	// cond: isSameCall(sym, "runtime.growslice")
 29633  	// result: newLen
 29634  	for {
 29635  		if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
 29636  			break
 29637  		}
 29638  		v_0_0 := v_0.Args[0]
 29639  		if v_0_0.Op != OpStaticLECall || len(v_0_0.Args) != 6 {
 29640  			break
 29641  		}
 29642  		sym := auxToCall(v_0_0.Aux)
 29643  		_ = v_0_0.Args[1]
 29644  		newLen := v_0_0.Args[1]
 29645  		if newLen.Op != OpConst64 || !(isSameCall(sym, "runtime.growslice")) {
 29646  			break
 29647  		}
 29648  		v.copyOf(newLen)
 29649  		return true
 29650  	}
 29651  	// match: (SliceLen (SelectN [0] (StaticLECall {sym} _ newLen:(Const32) _ _ _ _)))
 29652  	// cond: isSameCall(sym, "runtime.growslice")
 29653  	// result: newLen
 29654  	for {
 29655  		if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
 29656  			break
 29657  		}
 29658  		v_0_0 := v_0.Args[0]
 29659  		if v_0_0.Op != OpStaticLECall || len(v_0_0.Args) != 6 {
 29660  			break
 29661  		}
 29662  		sym := auxToCall(v_0_0.Aux)
 29663  		_ = v_0_0.Args[1]
 29664  		newLen := v_0_0.Args[1]
 29665  		if newLen.Op != OpConst32 || !(isSameCall(sym, "runtime.growslice")) {
 29666  			break
 29667  		}
 29668  		v.copyOf(newLen)
 29669  		return true
 29670  	}
 29671  	return false
 29672  }
 29673  func rewriteValuegeneric_OpSlicePtr(v *Value) bool {
 29674  	v_0 := v.Args[0]
 29675  	// match: (SlicePtr (SliceMake (SlicePtr x) _ _))
 29676  	// result: (SlicePtr x)
 29677  	for {
 29678  		if v_0.Op != OpSliceMake {
 29679  			break
 29680  		}
 29681  		v_0_0 := v_0.Args[0]
 29682  		if v_0_0.Op != OpSlicePtr {
 29683  			break
 29684  		}
 29685  		x := v_0_0.Args[0]
 29686  		v.reset(OpSlicePtr)
 29687  		v.AddArg(x)
 29688  		return true
 29689  	}
 29690  	return false
 29691  }
 29692  func rewriteValuegeneric_OpSlicemask(v *Value) bool {
 29693  	v_0 := v.Args[0]
 29694  	// match: (Slicemask (Const32 [x]))
 29695  	// cond: x > 0
 29696  	// result: (Const32 [-1])
 29697  	for {
 29698  		if v_0.Op != OpConst32 {
 29699  			break
 29700  		}
 29701  		x := auxIntToInt32(v_0.AuxInt)
 29702  		if !(x > 0) {
 29703  			break
 29704  		}
 29705  		v.reset(OpConst32)
 29706  		v.AuxInt = int32ToAuxInt(-1)
 29707  		return true
 29708  	}
 29709  	// match: (Slicemask (Const32 [0]))
 29710  	// result: (Const32 [0])
 29711  	for {
 29712  		if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 29713  			break
 29714  		}
 29715  		v.reset(OpConst32)
 29716  		v.AuxInt = int32ToAuxInt(0)
 29717  		return true
 29718  	}
 29719  	// match: (Slicemask (Const64 [x]))
 29720  	// cond: x > 0
 29721  	// result: (Const64 [-1])
 29722  	for {
 29723  		if v_0.Op != OpConst64 {
 29724  			break
 29725  		}
 29726  		x := auxIntToInt64(v_0.AuxInt)
 29727  		if !(x > 0) {
 29728  			break
 29729  		}
 29730  		v.reset(OpConst64)
 29731  		v.AuxInt = int64ToAuxInt(-1)
 29732  		return true
 29733  	}
 29734  	// match: (Slicemask (Const64 [0]))
 29735  	// result: (Const64 [0])
 29736  	for {
 29737  		if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 29738  			break
 29739  		}
 29740  		v.reset(OpConst64)
 29741  		v.AuxInt = int64ToAuxInt(0)
 29742  		return true
 29743  	}
 29744  	return false
 29745  }
 29746  func rewriteValuegeneric_OpSqrt(v *Value) bool {
 29747  	v_0 := v.Args[0]
 29748  	// match: (Sqrt (Const64F [c]))
 29749  	// cond: !math.IsNaN(math.Sqrt(c))
 29750  	// result: (Const64F [math.Sqrt(c)])
 29751  	for {
 29752  		if v_0.Op != OpConst64F {
 29753  			break
 29754  		}
 29755  		c := auxIntToFloat64(v_0.AuxInt)
 29756  		if !(!math.IsNaN(math.Sqrt(c))) {
 29757  			break
 29758  		}
 29759  		v.reset(OpConst64F)
 29760  		v.AuxInt = float64ToAuxInt(math.Sqrt(c))
 29761  		return true
 29762  	}
 29763  	return false
 29764  }
 29765  func rewriteValuegeneric_OpStaticCall(v *Value) bool {
 29766  	b := v.Block
 29767  	typ := &b.Func.Config.Types
 29768  	// match: (StaticCall {callAux} p q _ mem)
 29769  	// cond: isSameCall(callAux, "runtime.memequal") && isSamePtr(p, q)
 29770  	// result: (MakeResult (ConstBool <typ.Bool> [true]) mem)
 29771  	for {
 29772  		if len(v.Args) != 4 {
 29773  			break
 29774  		}
 29775  		callAux := auxToCall(v.Aux)
 29776  		mem := v.Args[3]
 29777  		p := v.Args[0]
 29778  		q := v.Args[1]
 29779  		if !(isSameCall(callAux, "runtime.memequal") && isSamePtr(p, q)) {
 29780  			break
 29781  		}
 29782  		v.reset(OpMakeResult)
 29783  		v0 := b.NewValue0(v.Pos, OpConstBool, typ.Bool)
 29784  		v0.AuxInt = boolToAuxInt(true)
 29785  		v.AddArg2(v0, mem)
 29786  		return true
 29787  	}
 29788  	return false
 29789  }
 29790  func rewriteValuegeneric_OpStaticLECall(v *Value) bool {
 29791  	b := v.Block
 29792  	config := b.Func.Config
 29793  	typ := &b.Func.Config.Types
 29794  	// match: (StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [1]) mem)
 29795  	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon)
 29796  	// result: (MakeResult (Eq8 (Load <typ.Int8> sptr mem) (Const8 <typ.Int8> [int8(read8(scon,0))])) mem)
 29797  	for {
 29798  		if len(v.Args) != 4 {
 29799  			break
 29800  		}
 29801  		callAux := auxToCall(v.Aux)
 29802  		mem := v.Args[3]
 29803  		sptr := v.Args[0]
 29804  		v_1 := v.Args[1]
 29805  		if v_1.Op != OpAddr {
 29806  			break
 29807  		}
 29808  		scon := auxToSym(v_1.Aux)
 29809  		v_1_0 := v_1.Args[0]
 29810  		if v_1_0.Op != OpSB {
 29811  			break
 29812  		}
 29813  		v_2 := v.Args[2]
 29814  		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 1 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon)) {
 29815  			break
 29816  		}
 29817  		v.reset(OpMakeResult)
 29818  		v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
 29819  		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int8)
 29820  		v1.AddArg2(sptr, mem)
 29821  		v2 := b.NewValue0(v.Pos, OpConst8, typ.Int8)
 29822  		v2.AuxInt = int8ToAuxInt(int8(read8(scon, 0)))
 29823  		v0.AddArg2(v1, v2)
 29824  		v.AddArg2(v0, mem)
 29825  		return true
 29826  	}
 29827  	// match: (StaticLECall {callAux} (Addr {scon} (SB)) sptr (Const64 [1]) mem)
 29828  	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon)
 29829  	// result: (MakeResult (Eq8 (Load <typ.Int8> sptr mem) (Const8 <typ.Int8> [int8(read8(scon,0))])) mem)
 29830  	for {
 29831  		if len(v.Args) != 4 {
 29832  			break
 29833  		}
 29834  		callAux := auxToCall(v.Aux)
 29835  		mem := v.Args[3]
 29836  		v_0 := v.Args[0]
 29837  		if v_0.Op != OpAddr {
 29838  			break
 29839  		}
 29840  		scon := auxToSym(v_0.Aux)
 29841  		v_0_0 := v_0.Args[0]
 29842  		if v_0_0.Op != OpSB {
 29843  			break
 29844  		}
 29845  		sptr := v.Args[1]
 29846  		v_2 := v.Args[2]
 29847  		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 1 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon)) {
 29848  			break
 29849  		}
 29850  		v.reset(OpMakeResult)
 29851  		v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
 29852  		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int8)
 29853  		v1.AddArg2(sptr, mem)
 29854  		v2 := b.NewValue0(v.Pos, OpConst8, typ.Int8)
 29855  		v2.AuxInt = int8ToAuxInt(int8(read8(scon, 0)))
 29856  		v0.AddArg2(v1, v2)
 29857  		v.AddArg2(v0, mem)
 29858  		return true
 29859  	}
 29860  	// match: (StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [2]) mem)
 29861  	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)
 29862  	// result: (MakeResult (Eq16 (Load <typ.Int16> sptr mem) (Const16 <typ.Int16> [int16(read16(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
 29863  	for {
 29864  		if len(v.Args) != 4 {
 29865  			break
 29866  		}
 29867  		callAux := auxToCall(v.Aux)
 29868  		mem := v.Args[3]
 29869  		sptr := v.Args[0]
 29870  		v_1 := v.Args[1]
 29871  		if v_1.Op != OpAddr {
 29872  			break
 29873  		}
 29874  		scon := auxToSym(v_1.Aux)
 29875  		v_1_0 := v_1.Args[0]
 29876  		if v_1_0.Op != OpSB {
 29877  			break
 29878  		}
 29879  		v_2 := v.Args[2]
 29880  		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 2 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)) {
 29881  			break
 29882  		}
 29883  		v.reset(OpMakeResult)
 29884  		v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
 29885  		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int16)
 29886  		v1.AddArg2(sptr, mem)
 29887  		v2 := b.NewValue0(v.Pos, OpConst16, typ.Int16)
 29888  		v2.AuxInt = int16ToAuxInt(int16(read16(scon, 0, config.ctxt.Arch.ByteOrder)))
 29889  		v0.AddArg2(v1, v2)
 29890  		v.AddArg2(v0, mem)
 29891  		return true
 29892  	}
 29893  	// match: (StaticLECall {callAux} (Addr {scon} (SB)) sptr (Const64 [2]) mem)
 29894  	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)
 29895  	// result: (MakeResult (Eq16 (Load <typ.Int16> sptr mem) (Const16 <typ.Int16> [int16(read16(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
 29896  	for {
 29897  		if len(v.Args) != 4 {
 29898  			break
 29899  		}
 29900  		callAux := auxToCall(v.Aux)
 29901  		mem := v.Args[3]
 29902  		v_0 := v.Args[0]
 29903  		if v_0.Op != OpAddr {
 29904  			break
 29905  		}
 29906  		scon := auxToSym(v_0.Aux)
 29907  		v_0_0 := v_0.Args[0]
 29908  		if v_0_0.Op != OpSB {
 29909  			break
 29910  		}
 29911  		sptr := v.Args[1]
 29912  		v_2 := v.Args[2]
 29913  		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 2 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)) {
 29914  			break
 29915  		}
 29916  		v.reset(OpMakeResult)
 29917  		v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
 29918  		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int16)
 29919  		v1.AddArg2(sptr, mem)
 29920  		v2 := b.NewValue0(v.Pos, OpConst16, typ.Int16)
 29921  		v2.AuxInt = int16ToAuxInt(int16(read16(scon, 0, config.ctxt.Arch.ByteOrder)))
 29922  		v0.AddArg2(v1, v2)
 29923  		v.AddArg2(v0, mem)
 29924  		return true
 29925  	}
 29926  	// match: (StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [4]) mem)
 29927  	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)
 29928  	// result: (MakeResult (Eq32 (Load <typ.Int32> sptr mem) (Const32 <typ.Int32> [int32(read32(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
 29929  	for {
 29930  		if len(v.Args) != 4 {
 29931  			break
 29932  		}
 29933  		callAux := auxToCall(v.Aux)
 29934  		mem := v.Args[3]
 29935  		sptr := v.Args[0]
 29936  		v_1 := v.Args[1]
 29937  		if v_1.Op != OpAddr {
 29938  			break
 29939  		}
 29940  		scon := auxToSym(v_1.Aux)
 29941  		v_1_0 := v_1.Args[0]
 29942  		if v_1_0.Op != OpSB {
 29943  			break
 29944  		}
 29945  		v_2 := v.Args[2]
 29946  		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 4 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)) {
 29947  			break
 29948  		}
 29949  		v.reset(OpMakeResult)
 29950  		v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
 29951  		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int32)
 29952  		v1.AddArg2(sptr, mem)
 29953  		v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
 29954  		v2.AuxInt = int32ToAuxInt(int32(read32(scon, 0, config.ctxt.Arch.ByteOrder)))
 29955  		v0.AddArg2(v1, v2)
 29956  		v.AddArg2(v0, mem)
 29957  		return true
 29958  	}
 29959  	// match: (StaticLECall {callAux} (Addr {scon} (SB)) sptr (Const64 [4]) mem)
 29960  	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)
 29961  	// result: (MakeResult (Eq32 (Load <typ.Int32> sptr mem) (Const32 <typ.Int32> [int32(read32(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
 29962  	for {
 29963  		if len(v.Args) != 4 {
 29964  			break
 29965  		}
 29966  		callAux := auxToCall(v.Aux)
 29967  		mem := v.Args[3]
 29968  		v_0 := v.Args[0]
 29969  		if v_0.Op != OpAddr {
 29970  			break
 29971  		}
 29972  		scon := auxToSym(v_0.Aux)
 29973  		v_0_0 := v_0.Args[0]
 29974  		if v_0_0.Op != OpSB {
 29975  			break
 29976  		}
 29977  		sptr := v.Args[1]
 29978  		v_2 := v.Args[2]
 29979  		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 4 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config)) {
 29980  			break
 29981  		}
 29982  		v.reset(OpMakeResult)
 29983  		v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
 29984  		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int32)
 29985  		v1.AddArg2(sptr, mem)
 29986  		v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
 29987  		v2.AuxInt = int32ToAuxInt(int32(read32(scon, 0, config.ctxt.Arch.ByteOrder)))
 29988  		v0.AddArg2(v1, v2)
 29989  		v.AddArg2(v0, mem)
 29990  		return true
 29991  	}
 29992  	// match: (StaticLECall {callAux} sptr (Addr {scon} (SB)) (Const64 [8]) mem)
 29993  	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config) && config.PtrSize == 8
 29994  	// result: (MakeResult (Eq64 (Load <typ.Int64> sptr mem) (Const64 <typ.Int64> [int64(read64(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
 29995  	for {
 29996  		if len(v.Args) != 4 {
 29997  			break
 29998  		}
 29999  		callAux := auxToCall(v.Aux)
 30000  		mem := v.Args[3]
 30001  		sptr := v.Args[0]
 30002  		v_1 := v.Args[1]
 30003  		if v_1.Op != OpAddr {
 30004  			break
 30005  		}
 30006  		scon := auxToSym(v_1.Aux)
 30007  		v_1_0 := v_1.Args[0]
 30008  		if v_1_0.Op != OpSB {
 30009  			break
 30010  		}
 30011  		v_2 := v.Args[2]
 30012  		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 8 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config) && config.PtrSize == 8) {
 30013  			break
 30014  		}
 30015  		v.reset(OpMakeResult)
 30016  		v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
 30017  		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int64)
 30018  		v1.AddArg2(sptr, mem)
 30019  		v2 := b.NewValue0(v.Pos, OpConst64, typ.Int64)
 30020  		v2.AuxInt = int64ToAuxInt(int64(read64(scon, 0, config.ctxt.Arch.ByteOrder)))
 30021  		v0.AddArg2(v1, v2)
 30022  		v.AddArg2(v0, mem)
 30023  		return true
 30024  	}
 30025  	// match: (StaticLECall {callAux} (Addr {scon} (SB)) sptr (Const64 [8]) mem)
 30026  	// cond: isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config) && config.PtrSize == 8
 30027  	// result: (MakeResult (Eq64 (Load <typ.Int64> sptr mem) (Const64 <typ.Int64> [int64(read64(scon,0,config.ctxt.Arch.ByteOrder))])) mem)
 30028  	for {
 30029  		if len(v.Args) != 4 {
 30030  			break
 30031  		}
 30032  		callAux := auxToCall(v.Aux)
 30033  		mem := v.Args[3]
 30034  		v_0 := v.Args[0]
 30035  		if v_0.Op != OpAddr {
 30036  			break
 30037  		}
 30038  		scon := auxToSym(v_0.Aux)
 30039  		v_0_0 := v_0.Args[0]
 30040  		if v_0_0.Op != OpSB {
 30041  			break
 30042  		}
 30043  		sptr := v.Args[1]
 30044  		v_2 := v.Args[2]
 30045  		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 8 || !(isSameCall(callAux, "runtime.memequal") && symIsRO(scon) && canLoadUnaligned(config) && config.PtrSize == 8) {
 30046  			break
 30047  		}
 30048  		v.reset(OpMakeResult)
 30049  		v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
 30050  		v1 := b.NewValue0(v.Pos, OpLoad, typ.Int64)
 30051  		v1.AddArg2(sptr, mem)
 30052  		v2 := b.NewValue0(v.Pos, OpConst64, typ.Int64)
 30053  		v2.AuxInt = int64ToAuxInt(int64(read64(scon, 0, config.ctxt.Arch.ByteOrder)))
 30054  		v0.AddArg2(v1, v2)
 30055  		v.AddArg2(v0, mem)
 30056  		return true
 30057  	}
 30058  	// match: (StaticLECall {callAux} _ _ (Const64 [0]) mem)
 30059  	// cond: isSameCall(callAux, "runtime.memequal")
 30060  	// result: (MakeResult (ConstBool <typ.Bool> [true]) mem)
 30061  	for {
 30062  		if len(v.Args) != 4 {
 30063  			break
 30064  		}
 30065  		callAux := auxToCall(v.Aux)
 30066  		mem := v.Args[3]
 30067  		v_2 := v.Args[2]
 30068  		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 0 || !(isSameCall(callAux, "runtime.memequal")) {
 30069  			break
 30070  		}
 30071  		v.reset(OpMakeResult)
 30072  		v0 := b.NewValue0(v.Pos, OpConstBool, typ.Bool)
 30073  		v0.AuxInt = boolToAuxInt(true)
 30074  		v.AddArg2(v0, mem)
 30075  		return true
 30076  	}
 30077  	// match: (StaticLECall {callAux} p q _ mem)
 30078  	// cond: isSameCall(callAux, "runtime.memequal") && isSamePtr(p, q)
 30079  	// result: (MakeResult (ConstBool <typ.Bool> [true]) mem)
 30080  	for {
 30081  		if len(v.Args) != 4 {
 30082  			break
 30083  		}
 30084  		callAux := auxToCall(v.Aux)
 30085  		mem := v.Args[3]
 30086  		p := v.Args[0]
 30087  		q := v.Args[1]
 30088  		if !(isSameCall(callAux, "runtime.memequal") && isSamePtr(p, q)) {
 30089  			break
 30090  		}
 30091  		v.reset(OpMakeResult)
 30092  		v0 := b.NewValue0(v.Pos, OpConstBool, typ.Bool)
 30093  		v0.AuxInt = boolToAuxInt(true)
 30094  		v.AddArg2(v0, mem)
 30095  		return true
 30096  	}
 30097  	// match: (StaticLECall {callAux} _ (Const64 [0]) (Const64 [0]) mem)
 30098  	// cond: isSameCall(callAux, "runtime.makeslice")
 30099  	// result: (MakeResult (Addr <v.Type.FieldType(0)> {ir.Syms.Zerobase} (SB)) mem)
 30100  	for {
 30101  		if len(v.Args) != 4 {
 30102  			break
 30103  		}
 30104  		callAux := auxToCall(v.Aux)
 30105  		mem := v.Args[3]
 30106  		v_1 := v.Args[1]
 30107  		if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 {
 30108  			break
 30109  		}
 30110  		v_2 := v.Args[2]
 30111  		if v_2.Op != OpConst64 || auxIntToInt64(v_2.AuxInt) != 0 || !(isSameCall(callAux, "runtime.makeslice")) {
 30112  			break
 30113  		}
 30114  		v.reset(OpMakeResult)
 30115  		v0 := b.NewValue0(v.Pos, OpAddr, v.Type.FieldType(0))
 30116  		v0.Aux = symToAux(ir.Syms.Zerobase)
 30117  		v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr)
 30118  		v0.AddArg(v1)
 30119  		v.AddArg2(v0, mem)
 30120  		return true
 30121  	}
 30122  	// match: (StaticLECall {callAux} _ (Const32 [0]) (Const32 [0]) mem)
 30123  	// cond: isSameCall(callAux, "runtime.makeslice")
 30124  	// result: (MakeResult (Addr <v.Type.FieldType(0)> {ir.Syms.Zerobase} (SB)) mem)
 30125  	for {
 30126  		if len(v.Args) != 4 {
 30127  			break
 30128  		}
 30129  		callAux := auxToCall(v.Aux)
 30130  		mem := v.Args[3]
 30131  		v_1 := v.Args[1]
 30132  		if v_1.Op != OpConst32 || auxIntToInt32(v_1.AuxInt) != 0 {
 30133  			break
 30134  		}
 30135  		v_2 := v.Args[2]
 30136  		if v_2.Op != OpConst32 || auxIntToInt32(v_2.AuxInt) != 0 || !(isSameCall(callAux, "runtime.makeslice")) {
 30137  			break
 30138  		}
 30139  		v.reset(OpMakeResult)
 30140  		v0 := b.NewValue0(v.Pos, OpAddr, v.Type.FieldType(0))
 30141  		v0.Aux = symToAux(ir.Syms.Zerobase)
 30142  		v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr)
 30143  		v0.AddArg(v1)
 30144  		v.AddArg2(v0, mem)
 30145  		return true
 30146  	}
 30147  	return false
 30148  }
 30149  func rewriteValuegeneric_OpStore(v *Value) bool {
 30150  	v_2 := v.Args[2]
 30151  	v_1 := v.Args[1]
 30152  	v_0 := v.Args[0]
 30153  	b := v.Block
 30154  	// match: (Store {t1} p1 (Load <t2> p2 mem) mem)
 30155  	// cond: isSamePtr(p1, p2) && t2.Size() == t1.Size()
 30156  	// result: mem
 30157  	for {
 30158  		t1 := auxToType(v.Aux)
 30159  		p1 := v_0
 30160  		if v_1.Op != OpLoad {
 30161  			break
 30162  		}
 30163  		t2 := v_1.Type
 30164  		mem := v_1.Args[1]
 30165  		p2 := v_1.Args[0]
 30166  		if mem != v_2 || !(isSamePtr(p1, p2) && t2.Size() == t1.Size()) {
 30167  			break
 30168  		}
 30169  		v.copyOf(mem)
 30170  		return true
 30171  	}
 30172  	// match: (Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ oldmem))
 30173  	// cond: isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size())
 30174  	// result: mem
 30175  	for {
 30176  		t1 := auxToType(v.Aux)
 30177  		p1 := v_0
 30178  		if v_1.Op != OpLoad {
 30179  			break
 30180  		}
 30181  		t2 := v_1.Type
 30182  		oldmem := v_1.Args[1]
 30183  		p2 := v_1.Args[0]
 30184  		mem := v_2
 30185  		if mem.Op != OpStore {
 30186  			break
 30187  		}
 30188  		t3 := auxToType(mem.Aux)
 30189  		_ = mem.Args[2]
 30190  		p3 := mem.Args[0]
 30191  		if oldmem != mem.Args[2] || !(isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size())) {
 30192  			break
 30193  		}
 30194  		v.copyOf(mem)
 30195  		return true
 30196  	}
 30197  	// match: (Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ (Store {t4} p4 _ oldmem)))
 30198  	// cond: isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size()) && disjoint(p1, t1.Size(), p4, t4.Size())
 30199  	// result: mem
 30200  	for {
 30201  		t1 := auxToType(v.Aux)
 30202  		p1 := v_0
 30203  		if v_1.Op != OpLoad {
 30204  			break
 30205  		}
 30206  		t2 := v_1.Type
 30207  		oldmem := v_1.Args[1]
 30208  		p2 := v_1.Args[0]
 30209  		mem := v_2
 30210  		if mem.Op != OpStore {
 30211  			break
 30212  		}
 30213  		t3 := auxToType(mem.Aux)
 30214  		_ = mem.Args[2]
 30215  		p3 := mem.Args[0]
 30216  		mem_2 := mem.Args[2]
 30217  		if mem_2.Op != OpStore {
 30218  			break
 30219  		}
 30220  		t4 := auxToType(mem_2.Aux)
 30221  		_ = mem_2.Args[2]
 30222  		p4 := mem_2.Args[0]
 30223  		if oldmem != mem_2.Args[2] || !(isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size()) && disjoint(p1, t1.Size(), p4, t4.Size())) {
 30224  			break
 30225  		}
 30226  		v.copyOf(mem)
 30227  		return true
 30228  	}
 30229  	// match: (Store {t1} p1 (Load <t2> p2 oldmem) mem:(Store {t3} p3 _ (Store {t4} p4 _ (Store {t5} p5 _ oldmem))))
 30230  	// cond: isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size()) && disjoint(p1, t1.Size(), p4, t4.Size()) && disjoint(p1, t1.Size(), p5, t5.Size())
 30231  	// result: mem
 30232  	for {
 30233  		t1 := auxToType(v.Aux)
 30234  		p1 := v_0
 30235  		if v_1.Op != OpLoad {
 30236  			break
 30237  		}
 30238  		t2 := v_1.Type
 30239  		oldmem := v_1.Args[1]
 30240  		p2 := v_1.Args[0]
 30241  		mem := v_2
 30242  		if mem.Op != OpStore {
 30243  			break
 30244  		}
 30245  		t3 := auxToType(mem.Aux)
 30246  		_ = mem.Args[2]
 30247  		p3 := mem.Args[0]
 30248  		mem_2 := mem.Args[2]
 30249  		if mem_2.Op != OpStore {
 30250  			break
 30251  		}
 30252  		t4 := auxToType(mem_2.Aux)
 30253  		_ = mem_2.Args[2]
 30254  		p4 := mem_2.Args[0]
 30255  		mem_2_2 := mem_2.Args[2]
 30256  		if mem_2_2.Op != OpStore {
 30257  			break
 30258  		}
 30259  		t5 := auxToType(mem_2_2.Aux)
 30260  		_ = mem_2_2.Args[2]
 30261  		p5 := mem_2_2.Args[0]
 30262  		if oldmem != mem_2_2.Args[2] || !(isSamePtr(p1, p2) && t2.Size() == t1.Size() && disjoint(p1, t1.Size(), p3, t3.Size()) && disjoint(p1, t1.Size(), p4, t4.Size()) && disjoint(p1, t1.Size(), p5, t5.Size())) {
 30263  			break
 30264  		}
 30265  		v.copyOf(mem)
 30266  		return true
 30267  	}
 30268  	// match: (Store {t} (OffPtr [o] p1) x mem:(Zero [n] p2 _))
 30269  	// cond: isConstZero(x) && o >= 0 && t.Size() + o <= n && isSamePtr(p1, p2)
 30270  	// result: mem
 30271  	for {
 30272  		t := auxToType(v.Aux)
 30273  		if v_0.Op != OpOffPtr {
 30274  			break
 30275  		}
 30276  		o := auxIntToInt64(v_0.AuxInt)
 30277  		p1 := v_0.Args[0]
 30278  		x := v_1
 30279  		mem := v_2
 30280  		if mem.Op != OpZero {
 30281  			break
 30282  		}
 30283  		n := auxIntToInt64(mem.AuxInt)
 30284  		p2 := mem.Args[0]
 30285  		if !(isConstZero(x) && o >= 0 && t.Size()+o <= n && isSamePtr(p1, p2)) {
 30286  			break
 30287  		}
 30288  		v.copyOf(mem)
 30289  		return true
 30290  	}
 30291  	// match: (Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Zero [n] p3 _)))
 30292  	// cond: isConstZero(x) && o1 >= 0 && t1.Size() + o1 <= n && isSamePtr(p1, p3) && disjoint(op, t1.Size(), p2, t2.Size())
 30293  	// result: mem
 30294  	for {
 30295  		t1 := auxToType(v.Aux)
 30296  		op := v_0
 30297  		if op.Op != OpOffPtr {
 30298  			break
 30299  		}
 30300  		o1 := auxIntToInt64(op.AuxInt)
 30301  		p1 := op.Args[0]
 30302  		x := v_1
 30303  		mem := v_2
 30304  		if mem.Op != OpStore {
 30305  			break
 30306  		}
 30307  		t2 := auxToType(mem.Aux)
 30308  		_ = mem.Args[2]
 30309  		p2 := mem.Args[0]
 30310  		mem_2 := mem.Args[2]
 30311  		if mem_2.Op != OpZero {
 30312  			break
 30313  		}
 30314  		n := auxIntToInt64(mem_2.AuxInt)
 30315  		p3 := mem_2.Args[0]
 30316  		if !(isConstZero(x) && o1 >= 0 && t1.Size()+o1 <= n && isSamePtr(p1, p3) && disjoint(op, t1.Size(), p2, t2.Size())) {
 30317  			break
 30318  		}
 30319  		v.copyOf(mem)
 30320  		return true
 30321  	}
 30322  	// match: (Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Store {t3} p3 _ (Zero [n] p4 _))))
 30323  	// cond: isConstZero(x) && o1 >= 0 && t1.Size() + o1 <= n && isSamePtr(p1, p4) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size())
 30324  	// result: mem
 30325  	for {
 30326  		t1 := auxToType(v.Aux)
 30327  		op := v_0
 30328  		if op.Op != OpOffPtr {
 30329  			break
 30330  		}
 30331  		o1 := auxIntToInt64(op.AuxInt)
 30332  		p1 := op.Args[0]
 30333  		x := v_1
 30334  		mem := v_2
 30335  		if mem.Op != OpStore {
 30336  			break
 30337  		}
 30338  		t2 := auxToType(mem.Aux)
 30339  		_ = mem.Args[2]
 30340  		p2 := mem.Args[0]
 30341  		mem_2 := mem.Args[2]
 30342  		if mem_2.Op != OpStore {
 30343  			break
 30344  		}
 30345  		t3 := auxToType(mem_2.Aux)
 30346  		_ = mem_2.Args[2]
 30347  		p3 := mem_2.Args[0]
 30348  		mem_2_2 := mem_2.Args[2]
 30349  		if mem_2_2.Op != OpZero {
 30350  			break
 30351  		}
 30352  		n := auxIntToInt64(mem_2_2.AuxInt)
 30353  		p4 := mem_2_2.Args[0]
 30354  		if !(isConstZero(x) && o1 >= 0 && t1.Size()+o1 <= n && isSamePtr(p1, p4) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size())) {
 30355  			break
 30356  		}
 30357  		v.copyOf(mem)
 30358  		return true
 30359  	}
 30360  	// match: (Store {t1} op:(OffPtr [o1] p1) x mem:(Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ (Zero [n] p5 _)))))
 30361  	// cond: isConstZero(x) && o1 >= 0 && t1.Size() + o1 <= n && isSamePtr(p1, p5) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size())
 30362  	// result: mem
 30363  	for {
 30364  		t1 := auxToType(v.Aux)
 30365  		op := v_0
 30366  		if op.Op != OpOffPtr {
 30367  			break
 30368  		}
 30369  		o1 := auxIntToInt64(op.AuxInt)
 30370  		p1 := op.Args[0]
 30371  		x := v_1
 30372  		mem := v_2
 30373  		if mem.Op != OpStore {
 30374  			break
 30375  		}
 30376  		t2 := auxToType(mem.Aux)
 30377  		_ = mem.Args[2]
 30378  		p2 := mem.Args[0]
 30379  		mem_2 := mem.Args[2]
 30380  		if mem_2.Op != OpStore {
 30381  			break
 30382  		}
 30383  		t3 := auxToType(mem_2.Aux)
 30384  		_ = mem_2.Args[2]
 30385  		p3 := mem_2.Args[0]
 30386  		mem_2_2 := mem_2.Args[2]
 30387  		if mem_2_2.Op != OpStore {
 30388  			break
 30389  		}
 30390  		t4 := auxToType(mem_2_2.Aux)
 30391  		_ = mem_2_2.Args[2]
 30392  		p4 := mem_2_2.Args[0]
 30393  		mem_2_2_2 := mem_2_2.Args[2]
 30394  		if mem_2_2_2.Op != OpZero {
 30395  			break
 30396  		}
 30397  		n := auxIntToInt64(mem_2_2_2.AuxInt)
 30398  		p5 := mem_2_2_2.Args[0]
 30399  		if !(isConstZero(x) && o1 >= 0 && t1.Size()+o1 <= n && isSamePtr(p1, p5) && disjoint(op, t1.Size(), p2, t2.Size()) && disjoint(op, t1.Size(), p3, t3.Size()) && disjoint(op, t1.Size(), p4, t4.Size())) {
 30400  			break
 30401  		}
 30402  		v.copyOf(mem)
 30403  		return true
 30404  	}
 30405  	// match: (Store _ (StructMake ___) _)
 30406  	// result: rewriteStructStore(v)
 30407  	for {
 30408  		if v_1.Op != OpStructMake {
 30409  			break
 30410  		}
 30411  		v.copyOf(rewriteStructStore(v))
 30412  		return true
 30413  	}
 30414  	// match: (Store {t} dst (Load src mem) mem)
 30415  	// cond: !CanSSA(t)
 30416  	// result: (Move {t} [t.Size()] dst src mem)
 30417  	for {
 30418  		t := auxToType(v.Aux)
 30419  		dst := v_0
 30420  		if v_1.Op != OpLoad {
 30421  			break
 30422  		}
 30423  		mem := v_1.Args[1]
 30424  		src := v_1.Args[0]
 30425  		if mem != v_2 || !(!CanSSA(t)) {
 30426  			break
 30427  		}
 30428  		v.reset(OpMove)
 30429  		v.AuxInt = int64ToAuxInt(t.Size())
 30430  		v.Aux = typeToAux(t)
 30431  		v.AddArg3(dst, src, mem)
 30432  		return true
 30433  	}
 30434  	// match: (Store {t} dst (Load src mem) (VarDef {x} mem))
 30435  	// cond: !CanSSA(t)
 30436  	// result: (Move {t} [t.Size()] dst src (VarDef {x} mem))
 30437  	for {
 30438  		t := auxToType(v.Aux)
 30439  		dst := v_0
 30440  		if v_1.Op != OpLoad {
 30441  			break
 30442  		}
 30443  		mem := v_1.Args[1]
 30444  		src := v_1.Args[0]
 30445  		if v_2.Op != OpVarDef {
 30446  			break
 30447  		}
 30448  		x := auxToSym(v_2.Aux)
 30449  		if mem != v_2.Args[0] || !(!CanSSA(t)) {
 30450  			break
 30451  		}
 30452  		v.reset(OpMove)
 30453  		v.AuxInt = int64ToAuxInt(t.Size())
 30454  		v.Aux = typeToAux(t)
 30455  		v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
 30456  		v0.Aux = symToAux(x)
 30457  		v0.AddArg(mem)
 30458  		v.AddArg3(dst, src, v0)
 30459  		return true
 30460  	}
 30461  	// match: (Store _ (ArrayMake0) mem)
 30462  	// result: mem
 30463  	for {
 30464  		if v_1.Op != OpArrayMake0 {
 30465  			break
 30466  		}
 30467  		mem := v_2
 30468  		v.copyOf(mem)
 30469  		return true
 30470  	}
 30471  	// match: (Store dst (ArrayMake1 e) mem)
 30472  	// result: (Store {e.Type} dst e mem)
 30473  	for {
 30474  		dst := v_0
 30475  		if v_1.Op != OpArrayMake1 {
 30476  			break
 30477  		}
 30478  		e := v_1.Args[0]
 30479  		mem := v_2
 30480  		v.reset(OpStore)
 30481  		v.Aux = typeToAux(e.Type)
 30482  		v.AddArg3(dst, e, mem)
 30483  		return true
 30484  	}
 30485  	// match: (Store (SelectN [0] call:(StaticLECall _ _)) x mem:(SelectN [1] call))
 30486  	// cond: isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")
 30487  	// result: mem
 30488  	for {
 30489  		if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
 30490  			break
 30491  		}
 30492  		call := v_0.Args[0]
 30493  		if call.Op != OpStaticLECall || len(call.Args) != 2 {
 30494  			break
 30495  		}
 30496  		x := v_1
 30497  		mem := v_2
 30498  		if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")) {
 30499  			break
 30500  		}
 30501  		v.copyOf(mem)
 30502  		return true
 30503  	}
 30504  	// match: (Store (OffPtr (SelectN [0] call:(StaticLECall _ _))) x mem:(SelectN [1] call))
 30505  	// cond: isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")
 30506  	// result: mem
 30507  	for {
 30508  		if v_0.Op != OpOffPtr {
 30509  			break
 30510  		}
 30511  		v_0_0 := v_0.Args[0]
 30512  		if v_0_0.Op != OpSelectN || auxIntToInt64(v_0_0.AuxInt) != 0 {
 30513  			break
 30514  		}
 30515  		call := v_0_0.Args[0]
 30516  		if call.Op != OpStaticLECall || len(call.Args) != 2 {
 30517  			break
 30518  		}
 30519  		x := v_1
 30520  		mem := v_2
 30521  		if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")) {
 30522  			break
 30523  		}
 30524  		v.copyOf(mem)
 30525  		return true
 30526  	}
 30527  	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [0] p2) d2 m3:(Move [n] p3 _ mem)))
 30528  	// cond: m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)
 30529  	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 mem))
 30530  	for {
 30531  		t1 := auxToType(v.Aux)
 30532  		op1 := v_0
 30533  		if op1.Op != OpOffPtr {
 30534  			break
 30535  		}
 30536  		o1 := auxIntToInt64(op1.AuxInt)
 30537  		p1 := op1.Args[0]
 30538  		d1 := v_1
 30539  		m2 := v_2
 30540  		if m2.Op != OpStore {
 30541  			break
 30542  		}
 30543  		t2 := auxToType(m2.Aux)
 30544  		_ = m2.Args[2]
 30545  		op2 := m2.Args[0]
 30546  		if op2.Op != OpOffPtr || auxIntToInt64(op2.AuxInt) != 0 {
 30547  			break
 30548  		}
 30549  		p2 := op2.Args[0]
 30550  		d2 := m2.Args[1]
 30551  		m3 := m2.Args[2]
 30552  		if m3.Op != OpMove {
 30553  			break
 30554  		}
 30555  		n := auxIntToInt64(m3.AuxInt)
 30556  		mem := m3.Args[2]
 30557  		p3 := m3.Args[0]
 30558  		if !(m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)) {
 30559  			break
 30560  		}
 30561  		v.reset(OpStore)
 30562  		v.Aux = typeToAux(t1)
 30563  		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 30564  		v0.Aux = typeToAux(t2)
 30565  		v0.AddArg3(op2, d2, mem)
 30566  		v.AddArg3(op1, d1, v0)
 30567  		return true
 30568  	}
 30569  	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [0] p3) d3 m4:(Move [n] p4 _ mem))))
 30570  	// cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == t3.Size() && o1-o2 == t2.Size() && n == t3.Size() + t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2, m3, m4)
 30571  	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 mem)))
 30572  	for {
 30573  		t1 := auxToType(v.Aux)
 30574  		op1 := v_0
 30575  		if op1.Op != OpOffPtr {
 30576  			break
 30577  		}
 30578  		o1 := auxIntToInt64(op1.AuxInt)
 30579  		p1 := op1.Args[0]
 30580  		d1 := v_1
 30581  		m2 := v_2
 30582  		if m2.Op != OpStore {
 30583  			break
 30584  		}
 30585  		t2 := auxToType(m2.Aux)
 30586  		_ = m2.Args[2]
 30587  		op2 := m2.Args[0]
 30588  		if op2.Op != OpOffPtr {
 30589  			break
 30590  		}
 30591  		o2 := auxIntToInt64(op2.AuxInt)
 30592  		p2 := op2.Args[0]
 30593  		d2 := m2.Args[1]
 30594  		m3 := m2.Args[2]
 30595  		if m3.Op != OpStore {
 30596  			break
 30597  		}
 30598  		t3 := auxToType(m3.Aux)
 30599  		_ = m3.Args[2]
 30600  		op3 := m3.Args[0]
 30601  		if op3.Op != OpOffPtr || auxIntToInt64(op3.AuxInt) != 0 {
 30602  			break
 30603  		}
 30604  		p3 := op3.Args[0]
 30605  		d3 := m3.Args[1]
 30606  		m4 := m3.Args[2]
 30607  		if m4.Op != OpMove {
 30608  			break
 30609  		}
 30610  		n := auxIntToInt64(m4.AuxInt)
 30611  		mem := m4.Args[2]
 30612  		p4 := m4.Args[0]
 30613  		if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == t3.Size() && o1-o2 == t2.Size() && n == t3.Size()+t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2, m3, m4)) {
 30614  			break
 30615  		}
 30616  		v.reset(OpStore)
 30617  		v.Aux = typeToAux(t1)
 30618  		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 30619  		v0.Aux = typeToAux(t2)
 30620  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 30621  		v1.Aux = typeToAux(t3)
 30622  		v1.AddArg3(op3, d3, mem)
 30623  		v0.AddArg3(op2, d2, v1)
 30624  		v.AddArg3(op1, d1, v0)
 30625  		return true
 30626  	}
 30627  	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [o3] p3) d3 m4:(Store {t4} op4:(OffPtr [0] p4) d4 m5:(Move [n] p5 _ mem)))))
 30628  	// cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == t4.Size() && o2-o3 == t3.Size() && o1-o2 == t2.Size() && n == t4.Size() + t3.Size() + t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2, m3, m4, m5)
 30629  	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 (Store {t4} op4 d4 mem))))
 30630  	for {
 30631  		t1 := auxToType(v.Aux)
 30632  		op1 := v_0
 30633  		if op1.Op != OpOffPtr {
 30634  			break
 30635  		}
 30636  		o1 := auxIntToInt64(op1.AuxInt)
 30637  		p1 := op1.Args[0]
 30638  		d1 := v_1
 30639  		m2 := v_2
 30640  		if m2.Op != OpStore {
 30641  			break
 30642  		}
 30643  		t2 := auxToType(m2.Aux)
 30644  		_ = m2.Args[2]
 30645  		op2 := m2.Args[0]
 30646  		if op2.Op != OpOffPtr {
 30647  			break
 30648  		}
 30649  		o2 := auxIntToInt64(op2.AuxInt)
 30650  		p2 := op2.Args[0]
 30651  		d2 := m2.Args[1]
 30652  		m3 := m2.Args[2]
 30653  		if m3.Op != OpStore {
 30654  			break
 30655  		}
 30656  		t3 := auxToType(m3.Aux)
 30657  		_ = m3.Args[2]
 30658  		op3 := m3.Args[0]
 30659  		if op3.Op != OpOffPtr {
 30660  			break
 30661  		}
 30662  		o3 := auxIntToInt64(op3.AuxInt)
 30663  		p3 := op3.Args[0]
 30664  		d3 := m3.Args[1]
 30665  		m4 := m3.Args[2]
 30666  		if m4.Op != OpStore {
 30667  			break
 30668  		}
 30669  		t4 := auxToType(m4.Aux)
 30670  		_ = m4.Args[2]
 30671  		op4 := m4.Args[0]
 30672  		if op4.Op != OpOffPtr || auxIntToInt64(op4.AuxInt) != 0 {
 30673  			break
 30674  		}
 30675  		p4 := op4.Args[0]
 30676  		d4 := m4.Args[1]
 30677  		m5 := m4.Args[2]
 30678  		if m5.Op != OpMove {
 30679  			break
 30680  		}
 30681  		n := auxIntToInt64(m5.AuxInt)
 30682  		mem := m5.Args[2]
 30683  		p5 := m5.Args[0]
 30684  		if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == t4.Size() && o2-o3 == t3.Size() && o1-o2 == t2.Size() && n == t4.Size()+t3.Size()+t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2, m3, m4, m5)) {
 30685  			break
 30686  		}
 30687  		v.reset(OpStore)
 30688  		v.Aux = typeToAux(t1)
 30689  		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 30690  		v0.Aux = typeToAux(t2)
 30691  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 30692  		v1.Aux = typeToAux(t3)
 30693  		v2 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 30694  		v2.Aux = typeToAux(t4)
 30695  		v2.AddArg3(op4, d4, mem)
 30696  		v1.AddArg3(op3, d3, v2)
 30697  		v0.AddArg3(op2, d2, v1)
 30698  		v.AddArg3(op1, d1, v0)
 30699  		return true
 30700  	}
 30701  	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [0] p2) d2 m3:(Zero [n] p3 mem)))
 30702  	// cond: m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)
 30703  	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 mem))
 30704  	for {
 30705  		t1 := auxToType(v.Aux)
 30706  		op1 := v_0
 30707  		if op1.Op != OpOffPtr {
 30708  			break
 30709  		}
 30710  		o1 := auxIntToInt64(op1.AuxInt)
 30711  		p1 := op1.Args[0]
 30712  		d1 := v_1
 30713  		m2 := v_2
 30714  		if m2.Op != OpStore {
 30715  			break
 30716  		}
 30717  		t2 := auxToType(m2.Aux)
 30718  		_ = m2.Args[2]
 30719  		op2 := m2.Args[0]
 30720  		if op2.Op != OpOffPtr || auxIntToInt64(op2.AuxInt) != 0 {
 30721  			break
 30722  		}
 30723  		p2 := op2.Args[0]
 30724  		d2 := m2.Args[1]
 30725  		m3 := m2.Args[2]
 30726  		if m3.Op != OpZero {
 30727  			break
 30728  		}
 30729  		n := auxIntToInt64(m3.AuxInt)
 30730  		mem := m3.Args[1]
 30731  		p3 := m3.Args[0]
 30732  		if !(m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3)) {
 30733  			break
 30734  		}
 30735  		v.reset(OpStore)
 30736  		v.Aux = typeToAux(t1)
 30737  		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 30738  		v0.Aux = typeToAux(t2)
 30739  		v0.AddArg3(op2, d2, mem)
 30740  		v.AddArg3(op1, d1, v0)
 30741  		return true
 30742  	}
 30743  	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [0] p3) d3 m4:(Zero [n] p4 mem))))
 30744  	// cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == t3.Size() && o1-o2 == t2.Size() && n == t3.Size() + t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2, m3, m4)
 30745  	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 mem)))
 30746  	for {
 30747  		t1 := auxToType(v.Aux)
 30748  		op1 := v_0
 30749  		if op1.Op != OpOffPtr {
 30750  			break
 30751  		}
 30752  		o1 := auxIntToInt64(op1.AuxInt)
 30753  		p1 := op1.Args[0]
 30754  		d1 := v_1
 30755  		m2 := v_2
 30756  		if m2.Op != OpStore {
 30757  			break
 30758  		}
 30759  		t2 := auxToType(m2.Aux)
 30760  		_ = m2.Args[2]
 30761  		op2 := m2.Args[0]
 30762  		if op2.Op != OpOffPtr {
 30763  			break
 30764  		}
 30765  		o2 := auxIntToInt64(op2.AuxInt)
 30766  		p2 := op2.Args[0]
 30767  		d2 := m2.Args[1]
 30768  		m3 := m2.Args[2]
 30769  		if m3.Op != OpStore {
 30770  			break
 30771  		}
 30772  		t3 := auxToType(m3.Aux)
 30773  		_ = m3.Args[2]
 30774  		op3 := m3.Args[0]
 30775  		if op3.Op != OpOffPtr || auxIntToInt64(op3.AuxInt) != 0 {
 30776  			break
 30777  		}
 30778  		p3 := op3.Args[0]
 30779  		d3 := m3.Args[1]
 30780  		m4 := m3.Args[2]
 30781  		if m4.Op != OpZero {
 30782  			break
 30783  		}
 30784  		n := auxIntToInt64(m4.AuxInt)
 30785  		mem := m4.Args[1]
 30786  		p4 := m4.Args[0]
 30787  		if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == t3.Size() && o1-o2 == t2.Size() && n == t3.Size()+t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2, m3, m4)) {
 30788  			break
 30789  		}
 30790  		v.reset(OpStore)
 30791  		v.Aux = typeToAux(t1)
 30792  		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 30793  		v0.Aux = typeToAux(t2)
 30794  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 30795  		v1.Aux = typeToAux(t3)
 30796  		v1.AddArg3(op3, d3, mem)
 30797  		v0.AddArg3(op2, d2, v1)
 30798  		v.AddArg3(op1, d1, v0)
 30799  		return true
 30800  	}
 30801  	// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [o3] p3) d3 m4:(Store {t4} op4:(OffPtr [0] p4) d4 m5:(Zero [n] p5 mem)))))
 30802  	// cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == t4.Size() && o2-o3 == t3.Size() && o1-o2 == t2.Size() && n == t4.Size() + t3.Size() + t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2, m3, m4, m5)
 30803  	// result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 (Store {t4} op4 d4 mem))))
 30804  	for {
 30805  		t1 := auxToType(v.Aux)
 30806  		op1 := v_0
 30807  		if op1.Op != OpOffPtr {
 30808  			break
 30809  		}
 30810  		o1 := auxIntToInt64(op1.AuxInt)
 30811  		p1 := op1.Args[0]
 30812  		d1 := v_1
 30813  		m2 := v_2
 30814  		if m2.Op != OpStore {
 30815  			break
 30816  		}
 30817  		t2 := auxToType(m2.Aux)
 30818  		_ = m2.Args[2]
 30819  		op2 := m2.Args[0]
 30820  		if op2.Op != OpOffPtr {
 30821  			break
 30822  		}
 30823  		o2 := auxIntToInt64(op2.AuxInt)
 30824  		p2 := op2.Args[0]
 30825  		d2 := m2.Args[1]
 30826  		m3 := m2.Args[2]
 30827  		if m3.Op != OpStore {
 30828  			break
 30829  		}
 30830  		t3 := auxToType(m3.Aux)
 30831  		_ = m3.Args[2]
 30832  		op3 := m3.Args[0]
 30833  		if op3.Op != OpOffPtr {
 30834  			break
 30835  		}
 30836  		o3 := auxIntToInt64(op3.AuxInt)
 30837  		p3 := op3.Args[0]
 30838  		d3 := m3.Args[1]
 30839  		m4 := m3.Args[2]
 30840  		if m4.Op != OpStore {
 30841  			break
 30842  		}
 30843  		t4 := auxToType(m4.Aux)
 30844  		_ = m4.Args[2]
 30845  		op4 := m4.Args[0]
 30846  		if op4.Op != OpOffPtr || auxIntToInt64(op4.AuxInt) != 0 {
 30847  			break
 30848  		}
 30849  		p4 := op4.Args[0]
 30850  		d4 := m4.Args[1]
 30851  		m5 := m4.Args[2]
 30852  		if m5.Op != OpZero {
 30853  			break
 30854  		}
 30855  		n := auxIntToInt64(m5.AuxInt)
 30856  		mem := m5.Args[1]
 30857  		p5 := m5.Args[0]
 30858  		if !(m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && m5.Uses == 1 && o3 == t4.Size() && o2-o3 == t3.Size() && o1-o2 == t2.Size() && n == t4.Size()+t3.Size()+t2.Size()+t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && clobber(m2, m3, m4, m5)) {
 30859  			break
 30860  		}
 30861  		v.reset(OpStore)
 30862  		v.Aux = typeToAux(t1)
 30863  		v0 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 30864  		v0.Aux = typeToAux(t2)
 30865  		v1 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 30866  		v1.Aux = typeToAux(t3)
 30867  		v2 := b.NewValue0(v.Pos, OpStore, types.TypeMem)
 30868  		v2.Aux = typeToAux(t4)
 30869  		v2.AddArg3(op4, d4, mem)
 30870  		v1.AddArg3(op3, d3, v2)
 30871  		v0.AddArg3(op2, d2, v1)
 30872  		v.AddArg3(op1, d1, v0)
 30873  		return true
 30874  	}
 30875  	return false
 30876  }
 30877  func rewriteValuegeneric_OpStringLen(v *Value) bool {
 30878  	v_0 := v.Args[0]
 30879  	// match: (StringLen (StringMake _ (Const64 <t> [c])))
 30880  	// result: (Const64 <t> [c])
 30881  	for {
 30882  		if v_0.Op != OpStringMake {
 30883  			break
 30884  		}
 30885  		_ = v_0.Args[1]
 30886  		v_0_1 := v_0.Args[1]
 30887  		if v_0_1.Op != OpConst64 {
 30888  			break
 30889  		}
 30890  		t := v_0_1.Type
 30891  		c := auxIntToInt64(v_0_1.AuxInt)
 30892  		v.reset(OpConst64)
 30893  		v.Type = t
 30894  		v.AuxInt = int64ToAuxInt(c)
 30895  		return true
 30896  	}
 30897  	return false
 30898  }
 30899  func rewriteValuegeneric_OpStringPtr(v *Value) bool {
 30900  	v_0 := v.Args[0]
 30901  	// match: (StringPtr (StringMake (Addr <t> {s} base) _))
 30902  	// result: (Addr <t> {s} base)
 30903  	for {
 30904  		if v_0.Op != OpStringMake {
 30905  			break
 30906  		}
 30907  		v_0_0 := v_0.Args[0]
 30908  		if v_0_0.Op != OpAddr {
 30909  			break
 30910  		}
 30911  		t := v_0_0.Type
 30912  		s := auxToSym(v_0_0.Aux)
 30913  		base := v_0_0.Args[0]
 30914  		v.reset(OpAddr)
 30915  		v.Type = t
 30916  		v.Aux = symToAux(s)
 30917  		v.AddArg(base)
 30918  		return true
 30919  	}
 30920  	return false
 30921  }
 30922  func rewriteValuegeneric_OpStructSelect(v *Value) bool {
 30923  	v_0 := v.Args[0]
 30924  	b := v.Block
 30925  	// match: (StructSelect [i] x:(StructMake ___))
 30926  	// result: x.Args[i]
 30927  	for {
 30928  		i := auxIntToInt64(v.AuxInt)
 30929  		x := v_0
 30930  		if x.Op != OpStructMake {
 30931  			break
 30932  		}
 30933  		v.copyOf(x.Args[i])
 30934  		return true
 30935  	}
 30936  	// match: (StructSelect [i] x:(Load <t> ptr mem))
 30937  	// cond: !CanSSA(t)
 30938  	// result: @x.Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(int(i))] ptr) mem)
 30939  	for {
 30940  		i := auxIntToInt64(v.AuxInt)
 30941  		x := v_0
 30942  		if x.Op != OpLoad {
 30943  			break
 30944  		}
 30945  		t := x.Type
 30946  		mem := x.Args[1]
 30947  		ptr := x.Args[0]
 30948  		if !(!CanSSA(t)) {
 30949  			break
 30950  		}
 30951  		b = x.Block
 30952  		v0 := b.NewValue0(v.Pos, OpLoad, v.Type)
 30953  		v.copyOf(v0)
 30954  		v1 := b.NewValue0(v.Pos, OpOffPtr, v.Type.PtrTo())
 30955  		v1.AuxInt = int64ToAuxInt(t.FieldOff(int(i)))
 30956  		v1.AddArg(ptr)
 30957  		v0.AddArg2(v1, mem)
 30958  		return true
 30959  	}
 30960  	// match: (StructSelect [0] (IData x))
 30961  	// result: (IData x)
 30962  	for {
 30963  		if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpIData {
 30964  			break
 30965  		}
 30966  		x := v_0.Args[0]
 30967  		v.reset(OpIData)
 30968  		v.AddArg(x)
 30969  		return true
 30970  	}
 30971  	return false
 30972  }
 30973  func rewriteValuegeneric_OpSub16(v *Value) bool {
 30974  	v_1 := v.Args[1]
 30975  	v_0 := v.Args[0]
 30976  	b := v.Block
 30977  	// match: (Sub16 (Const16 [c]) (Const16 [d]))
 30978  	// result: (Const16 [c-d])
 30979  	for {
 30980  		if v_0.Op != OpConst16 {
 30981  			break
 30982  		}
 30983  		c := auxIntToInt16(v_0.AuxInt)
 30984  		if v_1.Op != OpConst16 {
 30985  			break
 30986  		}
 30987  		d := auxIntToInt16(v_1.AuxInt)
 30988  		v.reset(OpConst16)
 30989  		v.AuxInt = int16ToAuxInt(c - d)
 30990  		return true
 30991  	}
 30992  	// match: (Sub16 x (Const16 <t> [c]))
 30993  	// cond: x.Op != OpConst16
 30994  	// result: (Add16 (Const16 <t> [-c]) x)
 30995  	for {
 30996  		x := v_0
 30997  		if v_1.Op != OpConst16 {
 30998  			break
 30999  		}
 31000  		t := v_1.Type
 31001  		c := auxIntToInt16(v_1.AuxInt)
 31002  		if !(x.Op != OpConst16) {
 31003  			break
 31004  		}
 31005  		v.reset(OpAdd16)
 31006  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 31007  		v0.AuxInt = int16ToAuxInt(-c)
 31008  		v.AddArg2(v0, x)
 31009  		return true
 31010  	}
 31011  	// match: (Sub16 <t> (Mul16 x y) (Mul16 x z))
 31012  	// result: (Mul16 x (Sub16 <t> y z))
 31013  	for {
 31014  		t := v.Type
 31015  		if v_0.Op != OpMul16 {
 31016  			break
 31017  		}
 31018  		_ = v_0.Args[1]
 31019  		v_0_0 := v_0.Args[0]
 31020  		v_0_1 := v_0.Args[1]
 31021  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 31022  			x := v_0_0
 31023  			y := v_0_1
 31024  			if v_1.Op != OpMul16 {
 31025  				continue
 31026  			}
 31027  			_ = v_1.Args[1]
 31028  			v_1_0 := v_1.Args[0]
 31029  			v_1_1 := v_1.Args[1]
 31030  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 31031  				if x != v_1_0 {
 31032  					continue
 31033  				}
 31034  				z := v_1_1
 31035  				v.reset(OpMul16)
 31036  				v0 := b.NewValue0(v.Pos, OpSub16, t)
 31037  				v0.AddArg2(y, z)
 31038  				v.AddArg2(x, v0)
 31039  				return true
 31040  			}
 31041  		}
 31042  		break
 31043  	}
 31044  	// match: (Sub16 x x)
 31045  	// result: (Const16 [0])
 31046  	for {
 31047  		x := v_0
 31048  		if x != v_1 {
 31049  			break
 31050  		}
 31051  		v.reset(OpConst16)
 31052  		v.AuxInt = int16ToAuxInt(0)
 31053  		return true
 31054  	}
 31055  	// match: (Sub16 (Neg16 x) (Com16 x))
 31056  	// result: (Const16 [1])
 31057  	for {
 31058  		if v_0.Op != OpNeg16 {
 31059  			break
 31060  		}
 31061  		x := v_0.Args[0]
 31062  		if v_1.Op != OpCom16 || x != v_1.Args[0] {
 31063  			break
 31064  		}
 31065  		v.reset(OpConst16)
 31066  		v.AuxInt = int16ToAuxInt(1)
 31067  		return true
 31068  	}
 31069  	// match: (Sub16 (Com16 x) (Neg16 x))
 31070  	// result: (Const16 [-1])
 31071  	for {
 31072  		if v_0.Op != OpCom16 {
 31073  			break
 31074  		}
 31075  		x := v_0.Args[0]
 31076  		if v_1.Op != OpNeg16 || x != v_1.Args[0] {
 31077  			break
 31078  		}
 31079  		v.reset(OpConst16)
 31080  		v.AuxInt = int16ToAuxInt(-1)
 31081  		return true
 31082  	}
 31083  	// match: (Sub16 (Add16 t x) (Add16 t y))
 31084  	// result: (Sub16 x y)
 31085  	for {
 31086  		if v_0.Op != OpAdd16 {
 31087  			break
 31088  		}
 31089  		_ = v_0.Args[1]
 31090  		v_0_0 := v_0.Args[0]
 31091  		v_0_1 := v_0.Args[1]
 31092  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 31093  			t := v_0_0
 31094  			x := v_0_1
 31095  			if v_1.Op != OpAdd16 {
 31096  				continue
 31097  			}
 31098  			_ = v_1.Args[1]
 31099  			v_1_0 := v_1.Args[0]
 31100  			v_1_1 := v_1.Args[1]
 31101  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 31102  				if t != v_1_0 {
 31103  					continue
 31104  				}
 31105  				y := v_1_1
 31106  				v.reset(OpSub16)
 31107  				v.AddArg2(x, y)
 31108  				return true
 31109  			}
 31110  		}
 31111  		break
 31112  	}
 31113  	// match: (Sub16 (Add16 x y) x)
 31114  	// result: y
 31115  	for {
 31116  		if v_0.Op != OpAdd16 {
 31117  			break
 31118  		}
 31119  		_ = v_0.Args[1]
 31120  		v_0_0 := v_0.Args[0]
 31121  		v_0_1 := v_0.Args[1]
 31122  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 31123  			x := v_0_0
 31124  			y := v_0_1
 31125  			if x != v_1 {
 31126  				continue
 31127  			}
 31128  			v.copyOf(y)
 31129  			return true
 31130  		}
 31131  		break
 31132  	}
 31133  	// match: (Sub16 (Add16 x y) y)
 31134  	// result: x
 31135  	for {
 31136  		if v_0.Op != OpAdd16 {
 31137  			break
 31138  		}
 31139  		_ = v_0.Args[1]
 31140  		v_0_0 := v_0.Args[0]
 31141  		v_0_1 := v_0.Args[1]
 31142  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 31143  			x := v_0_0
 31144  			y := v_0_1
 31145  			if y != v_1 {
 31146  				continue
 31147  			}
 31148  			v.copyOf(x)
 31149  			return true
 31150  		}
 31151  		break
 31152  	}
 31153  	// match: (Sub16 (Sub16 x y) x)
 31154  	// result: (Neg16 y)
 31155  	for {
 31156  		if v_0.Op != OpSub16 {
 31157  			break
 31158  		}
 31159  		y := v_0.Args[1]
 31160  		x := v_0.Args[0]
 31161  		if x != v_1 {
 31162  			break
 31163  		}
 31164  		v.reset(OpNeg16)
 31165  		v.AddArg(y)
 31166  		return true
 31167  	}
 31168  	// match: (Sub16 x (Add16 x y))
 31169  	// result: (Neg16 y)
 31170  	for {
 31171  		x := v_0
 31172  		if v_1.Op != OpAdd16 {
 31173  			break
 31174  		}
 31175  		_ = v_1.Args[1]
 31176  		v_1_0 := v_1.Args[0]
 31177  		v_1_1 := v_1.Args[1]
 31178  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 31179  			if x != v_1_0 {
 31180  				continue
 31181  			}
 31182  			y := v_1_1
 31183  			v.reset(OpNeg16)
 31184  			v.AddArg(y)
 31185  			return true
 31186  		}
 31187  		break
 31188  	}
 31189  	// match: (Sub16 x (Sub16 i:(Const16 <t>) z))
 31190  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
 31191  	// result: (Sub16 (Add16 <t> x z) i)
 31192  	for {
 31193  		x := v_0
 31194  		if v_1.Op != OpSub16 {
 31195  			break
 31196  		}
 31197  		z := v_1.Args[1]
 31198  		i := v_1.Args[0]
 31199  		if i.Op != OpConst16 {
 31200  			break
 31201  		}
 31202  		t := i.Type
 31203  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
 31204  			break
 31205  		}
 31206  		v.reset(OpSub16)
 31207  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
 31208  		v0.AddArg2(x, z)
 31209  		v.AddArg2(v0, i)
 31210  		return true
 31211  	}
 31212  	// match: (Sub16 x (Add16 z i:(Const16 <t>)))
 31213  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
 31214  	// result: (Sub16 (Sub16 <t> x z) i)
 31215  	for {
 31216  		x := v_0
 31217  		if v_1.Op != OpAdd16 {
 31218  			break
 31219  		}
 31220  		_ = v_1.Args[1]
 31221  		v_1_0 := v_1.Args[0]
 31222  		v_1_1 := v_1.Args[1]
 31223  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 31224  			z := v_1_0
 31225  			i := v_1_1
 31226  			if i.Op != OpConst16 {
 31227  				continue
 31228  			}
 31229  			t := i.Type
 31230  			if !(z.Op != OpConst16 && x.Op != OpConst16) {
 31231  				continue
 31232  			}
 31233  			v.reset(OpSub16)
 31234  			v0 := b.NewValue0(v.Pos, OpSub16, t)
 31235  			v0.AddArg2(x, z)
 31236  			v.AddArg2(v0, i)
 31237  			return true
 31238  		}
 31239  		break
 31240  	}
 31241  	// match: (Sub16 (Sub16 i:(Const16 <t>) z) x)
 31242  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
 31243  	// result: (Sub16 i (Add16 <t> z x))
 31244  	for {
 31245  		if v_0.Op != OpSub16 {
 31246  			break
 31247  		}
 31248  		z := v_0.Args[1]
 31249  		i := v_0.Args[0]
 31250  		if i.Op != OpConst16 {
 31251  			break
 31252  		}
 31253  		t := i.Type
 31254  		x := v_1
 31255  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
 31256  			break
 31257  		}
 31258  		v.reset(OpSub16)
 31259  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
 31260  		v0.AddArg2(z, x)
 31261  		v.AddArg2(i, v0)
 31262  		return true
 31263  	}
 31264  	// match: (Sub16 (Add16 z i:(Const16 <t>)) x)
 31265  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
 31266  	// result: (Add16 i (Sub16 <t> z x))
 31267  	for {
 31268  		if v_0.Op != OpAdd16 {
 31269  			break
 31270  		}
 31271  		_ = v_0.Args[1]
 31272  		v_0_0 := v_0.Args[0]
 31273  		v_0_1 := v_0.Args[1]
 31274  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 31275  			z := v_0_0
 31276  			i := v_0_1
 31277  			if i.Op != OpConst16 {
 31278  				continue
 31279  			}
 31280  			t := i.Type
 31281  			x := v_1
 31282  			if !(z.Op != OpConst16 && x.Op != OpConst16) {
 31283  				continue
 31284  			}
 31285  			v.reset(OpAdd16)
 31286  			v0 := b.NewValue0(v.Pos, OpSub16, t)
 31287  			v0.AddArg2(z, x)
 31288  			v.AddArg2(i, v0)
 31289  			return true
 31290  		}
 31291  		break
 31292  	}
 31293  	// match: (Sub16 (Const16 <t> [c]) (Sub16 (Const16 <t> [d]) x))
 31294  	// result: (Add16 (Const16 <t> [c-d]) x)
 31295  	for {
 31296  		if v_0.Op != OpConst16 {
 31297  			break
 31298  		}
 31299  		t := v_0.Type
 31300  		c := auxIntToInt16(v_0.AuxInt)
 31301  		if v_1.Op != OpSub16 {
 31302  			break
 31303  		}
 31304  		x := v_1.Args[1]
 31305  		v_1_0 := v_1.Args[0]
 31306  		if v_1_0.Op != OpConst16 || v_1_0.Type != t {
 31307  			break
 31308  		}
 31309  		d := auxIntToInt16(v_1_0.AuxInt)
 31310  		v.reset(OpAdd16)
 31311  		v0 := b.NewValue0(v.Pos, OpConst16, t)
 31312  		v0.AuxInt = int16ToAuxInt(c - d)
 31313  		v.AddArg2(v0, x)
 31314  		return true
 31315  	}
 31316  	// match: (Sub16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
 31317  	// result: (Sub16 (Const16 <t> [c-d]) x)
 31318  	for {
 31319  		if v_0.Op != OpConst16 {
 31320  			break
 31321  		}
 31322  		t := v_0.Type
 31323  		c := auxIntToInt16(v_0.AuxInt)
 31324  		if v_1.Op != OpAdd16 {
 31325  			break
 31326  		}
 31327  		_ = v_1.Args[1]
 31328  		v_1_0 := v_1.Args[0]
 31329  		v_1_1 := v_1.Args[1]
 31330  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 31331  			if v_1_0.Op != OpConst16 || v_1_0.Type != t {
 31332  				continue
 31333  			}
 31334  			d := auxIntToInt16(v_1_0.AuxInt)
 31335  			x := v_1_1
 31336  			v.reset(OpSub16)
 31337  			v0 := b.NewValue0(v.Pos, OpConst16, t)
 31338  			v0.AuxInt = int16ToAuxInt(c - d)
 31339  			v.AddArg2(v0, x)
 31340  			return true
 31341  		}
 31342  		break
 31343  	}
 31344  	return false
 31345  }
 31346  func rewriteValuegeneric_OpSub32(v *Value) bool {
 31347  	v_1 := v.Args[1]
 31348  	v_0 := v.Args[0]
 31349  	b := v.Block
 31350  	// match: (Sub32 (Const32 [c]) (Const32 [d]))
 31351  	// result: (Const32 [c-d])
 31352  	for {
 31353  		if v_0.Op != OpConst32 {
 31354  			break
 31355  		}
 31356  		c := auxIntToInt32(v_0.AuxInt)
 31357  		if v_1.Op != OpConst32 {
 31358  			break
 31359  		}
 31360  		d := auxIntToInt32(v_1.AuxInt)
 31361  		v.reset(OpConst32)
 31362  		v.AuxInt = int32ToAuxInt(c - d)
 31363  		return true
 31364  	}
 31365  	// match: (Sub32 x (Const32 <t> [c]))
 31366  	// cond: x.Op != OpConst32
 31367  	// result: (Add32 (Const32 <t> [-c]) x)
 31368  	for {
 31369  		x := v_0
 31370  		if v_1.Op != OpConst32 {
 31371  			break
 31372  		}
 31373  		t := v_1.Type
 31374  		c := auxIntToInt32(v_1.AuxInt)
 31375  		if !(x.Op != OpConst32) {
 31376  			break
 31377  		}
 31378  		v.reset(OpAdd32)
 31379  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 31380  		v0.AuxInt = int32ToAuxInt(-c)
 31381  		v.AddArg2(v0, x)
 31382  		return true
 31383  	}
 31384  	// match: (Sub32 <t> (Mul32 x y) (Mul32 x z))
 31385  	// result: (Mul32 x (Sub32 <t> y z))
 31386  	for {
 31387  		t := v.Type
 31388  		if v_0.Op != OpMul32 {
 31389  			break
 31390  		}
 31391  		_ = v_0.Args[1]
 31392  		v_0_0 := v_0.Args[0]
 31393  		v_0_1 := v_0.Args[1]
 31394  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 31395  			x := v_0_0
 31396  			y := v_0_1
 31397  			if v_1.Op != OpMul32 {
 31398  				continue
 31399  			}
 31400  			_ = v_1.Args[1]
 31401  			v_1_0 := v_1.Args[0]
 31402  			v_1_1 := v_1.Args[1]
 31403  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 31404  				if x != v_1_0 {
 31405  					continue
 31406  				}
 31407  				z := v_1_1
 31408  				v.reset(OpMul32)
 31409  				v0 := b.NewValue0(v.Pos, OpSub32, t)
 31410  				v0.AddArg2(y, z)
 31411  				v.AddArg2(x, v0)
 31412  				return true
 31413  			}
 31414  		}
 31415  		break
 31416  	}
 31417  	// match: (Sub32 x x)
 31418  	// result: (Const32 [0])
 31419  	for {
 31420  		x := v_0
 31421  		if x != v_1 {
 31422  			break
 31423  		}
 31424  		v.reset(OpConst32)
 31425  		v.AuxInt = int32ToAuxInt(0)
 31426  		return true
 31427  	}
 31428  	// match: (Sub32 (Neg32 x) (Com32 x))
 31429  	// result: (Const32 [1])
 31430  	for {
 31431  		if v_0.Op != OpNeg32 {
 31432  			break
 31433  		}
 31434  		x := v_0.Args[0]
 31435  		if v_1.Op != OpCom32 || x != v_1.Args[0] {
 31436  			break
 31437  		}
 31438  		v.reset(OpConst32)
 31439  		v.AuxInt = int32ToAuxInt(1)
 31440  		return true
 31441  	}
 31442  	// match: (Sub32 (Com32 x) (Neg32 x))
 31443  	// result: (Const32 [-1])
 31444  	for {
 31445  		if v_0.Op != OpCom32 {
 31446  			break
 31447  		}
 31448  		x := v_0.Args[0]
 31449  		if v_1.Op != OpNeg32 || x != v_1.Args[0] {
 31450  			break
 31451  		}
 31452  		v.reset(OpConst32)
 31453  		v.AuxInt = int32ToAuxInt(-1)
 31454  		return true
 31455  	}
 31456  	// match: (Sub32 (Add32 t x) (Add32 t y))
 31457  	// result: (Sub32 x y)
 31458  	for {
 31459  		if v_0.Op != OpAdd32 {
 31460  			break
 31461  		}
 31462  		_ = v_0.Args[1]
 31463  		v_0_0 := v_0.Args[0]
 31464  		v_0_1 := v_0.Args[1]
 31465  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 31466  			t := v_0_0
 31467  			x := v_0_1
 31468  			if v_1.Op != OpAdd32 {
 31469  				continue
 31470  			}
 31471  			_ = v_1.Args[1]
 31472  			v_1_0 := v_1.Args[0]
 31473  			v_1_1 := v_1.Args[1]
 31474  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 31475  				if t != v_1_0 {
 31476  					continue
 31477  				}
 31478  				y := v_1_1
 31479  				v.reset(OpSub32)
 31480  				v.AddArg2(x, y)
 31481  				return true
 31482  			}
 31483  		}
 31484  		break
 31485  	}
 31486  	// match: (Sub32 (Add32 x y) x)
 31487  	// result: y
 31488  	for {
 31489  		if v_0.Op != OpAdd32 {
 31490  			break
 31491  		}
 31492  		_ = v_0.Args[1]
 31493  		v_0_0 := v_0.Args[0]
 31494  		v_0_1 := v_0.Args[1]
 31495  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 31496  			x := v_0_0
 31497  			y := v_0_1
 31498  			if x != v_1 {
 31499  				continue
 31500  			}
 31501  			v.copyOf(y)
 31502  			return true
 31503  		}
 31504  		break
 31505  	}
 31506  	// match: (Sub32 (Add32 x y) y)
 31507  	// result: x
 31508  	for {
 31509  		if v_0.Op != OpAdd32 {
 31510  			break
 31511  		}
 31512  		_ = v_0.Args[1]
 31513  		v_0_0 := v_0.Args[0]
 31514  		v_0_1 := v_0.Args[1]
 31515  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 31516  			x := v_0_0
 31517  			y := v_0_1
 31518  			if y != v_1 {
 31519  				continue
 31520  			}
 31521  			v.copyOf(x)
 31522  			return true
 31523  		}
 31524  		break
 31525  	}
 31526  	// match: (Sub32 (Sub32 x y) x)
 31527  	// result: (Neg32 y)
 31528  	for {
 31529  		if v_0.Op != OpSub32 {
 31530  			break
 31531  		}
 31532  		y := v_0.Args[1]
 31533  		x := v_0.Args[0]
 31534  		if x != v_1 {
 31535  			break
 31536  		}
 31537  		v.reset(OpNeg32)
 31538  		v.AddArg(y)
 31539  		return true
 31540  	}
 31541  	// match: (Sub32 x (Add32 x y))
 31542  	// result: (Neg32 y)
 31543  	for {
 31544  		x := v_0
 31545  		if v_1.Op != OpAdd32 {
 31546  			break
 31547  		}
 31548  		_ = v_1.Args[1]
 31549  		v_1_0 := v_1.Args[0]
 31550  		v_1_1 := v_1.Args[1]
 31551  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 31552  			if x != v_1_0 {
 31553  				continue
 31554  			}
 31555  			y := v_1_1
 31556  			v.reset(OpNeg32)
 31557  			v.AddArg(y)
 31558  			return true
 31559  		}
 31560  		break
 31561  	}
 31562  	// match: (Sub32 x (Sub32 i:(Const32 <t>) z))
 31563  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
 31564  	// result: (Sub32 (Add32 <t> x z) i)
 31565  	for {
 31566  		x := v_0
 31567  		if v_1.Op != OpSub32 {
 31568  			break
 31569  		}
 31570  		z := v_1.Args[1]
 31571  		i := v_1.Args[0]
 31572  		if i.Op != OpConst32 {
 31573  			break
 31574  		}
 31575  		t := i.Type
 31576  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
 31577  			break
 31578  		}
 31579  		v.reset(OpSub32)
 31580  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
 31581  		v0.AddArg2(x, z)
 31582  		v.AddArg2(v0, i)
 31583  		return true
 31584  	}
 31585  	// match: (Sub32 x (Add32 z i:(Const32 <t>)))
 31586  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
 31587  	// result: (Sub32 (Sub32 <t> x z) i)
 31588  	for {
 31589  		x := v_0
 31590  		if v_1.Op != OpAdd32 {
 31591  			break
 31592  		}
 31593  		_ = v_1.Args[1]
 31594  		v_1_0 := v_1.Args[0]
 31595  		v_1_1 := v_1.Args[1]
 31596  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 31597  			z := v_1_0
 31598  			i := v_1_1
 31599  			if i.Op != OpConst32 {
 31600  				continue
 31601  			}
 31602  			t := i.Type
 31603  			if !(z.Op != OpConst32 && x.Op != OpConst32) {
 31604  				continue
 31605  			}
 31606  			v.reset(OpSub32)
 31607  			v0 := b.NewValue0(v.Pos, OpSub32, t)
 31608  			v0.AddArg2(x, z)
 31609  			v.AddArg2(v0, i)
 31610  			return true
 31611  		}
 31612  		break
 31613  	}
 31614  	// match: (Sub32 (Sub32 i:(Const32 <t>) z) x)
 31615  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
 31616  	// result: (Sub32 i (Add32 <t> z x))
 31617  	for {
 31618  		if v_0.Op != OpSub32 {
 31619  			break
 31620  		}
 31621  		z := v_0.Args[1]
 31622  		i := v_0.Args[0]
 31623  		if i.Op != OpConst32 {
 31624  			break
 31625  		}
 31626  		t := i.Type
 31627  		x := v_1
 31628  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
 31629  			break
 31630  		}
 31631  		v.reset(OpSub32)
 31632  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
 31633  		v0.AddArg2(z, x)
 31634  		v.AddArg2(i, v0)
 31635  		return true
 31636  	}
 31637  	// match: (Sub32 (Add32 z i:(Const32 <t>)) x)
 31638  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
 31639  	// result: (Add32 i (Sub32 <t> z x))
 31640  	for {
 31641  		if v_0.Op != OpAdd32 {
 31642  			break
 31643  		}
 31644  		_ = v_0.Args[1]
 31645  		v_0_0 := v_0.Args[0]
 31646  		v_0_1 := v_0.Args[1]
 31647  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 31648  			z := v_0_0
 31649  			i := v_0_1
 31650  			if i.Op != OpConst32 {
 31651  				continue
 31652  			}
 31653  			t := i.Type
 31654  			x := v_1
 31655  			if !(z.Op != OpConst32 && x.Op != OpConst32) {
 31656  				continue
 31657  			}
 31658  			v.reset(OpAdd32)
 31659  			v0 := b.NewValue0(v.Pos, OpSub32, t)
 31660  			v0.AddArg2(z, x)
 31661  			v.AddArg2(i, v0)
 31662  			return true
 31663  		}
 31664  		break
 31665  	}
 31666  	// match: (Sub32 (Const32 <t> [c]) (Sub32 (Const32 <t> [d]) x))
 31667  	// result: (Add32 (Const32 <t> [c-d]) x)
 31668  	for {
 31669  		if v_0.Op != OpConst32 {
 31670  			break
 31671  		}
 31672  		t := v_0.Type
 31673  		c := auxIntToInt32(v_0.AuxInt)
 31674  		if v_1.Op != OpSub32 {
 31675  			break
 31676  		}
 31677  		x := v_1.Args[1]
 31678  		v_1_0 := v_1.Args[0]
 31679  		if v_1_0.Op != OpConst32 || v_1_0.Type != t {
 31680  			break
 31681  		}
 31682  		d := auxIntToInt32(v_1_0.AuxInt)
 31683  		v.reset(OpAdd32)
 31684  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 31685  		v0.AuxInt = int32ToAuxInt(c - d)
 31686  		v.AddArg2(v0, x)
 31687  		return true
 31688  	}
 31689  	// match: (Sub32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
 31690  	// result: (Sub32 (Const32 <t> [c-d]) x)
 31691  	for {
 31692  		if v_0.Op != OpConst32 {
 31693  			break
 31694  		}
 31695  		t := v_0.Type
 31696  		c := auxIntToInt32(v_0.AuxInt)
 31697  		if v_1.Op != OpAdd32 {
 31698  			break
 31699  		}
 31700  		_ = v_1.Args[1]
 31701  		v_1_0 := v_1.Args[0]
 31702  		v_1_1 := v_1.Args[1]
 31703  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 31704  			if v_1_0.Op != OpConst32 || v_1_0.Type != t {
 31705  				continue
 31706  			}
 31707  			d := auxIntToInt32(v_1_0.AuxInt)
 31708  			x := v_1_1
 31709  			v.reset(OpSub32)
 31710  			v0 := b.NewValue0(v.Pos, OpConst32, t)
 31711  			v0.AuxInt = int32ToAuxInt(c - d)
 31712  			v.AddArg2(v0, x)
 31713  			return true
 31714  		}
 31715  		break
 31716  	}
 31717  	return false
 31718  }
 31719  func rewriteValuegeneric_OpSub32F(v *Value) bool {
 31720  	v_1 := v.Args[1]
 31721  	v_0 := v.Args[0]
 31722  	// match: (Sub32F (Const32F [c]) (Const32F [d]))
 31723  	// cond: c-d == c-d
 31724  	// result: (Const32F [c-d])
 31725  	for {
 31726  		if v_0.Op != OpConst32F {
 31727  			break
 31728  		}
 31729  		c := auxIntToFloat32(v_0.AuxInt)
 31730  		if v_1.Op != OpConst32F {
 31731  			break
 31732  		}
 31733  		d := auxIntToFloat32(v_1.AuxInt)
 31734  		if !(c-d == c-d) {
 31735  			break
 31736  		}
 31737  		v.reset(OpConst32F)
 31738  		v.AuxInt = float32ToAuxInt(c - d)
 31739  		return true
 31740  	}
 31741  	return false
 31742  }
 31743  func rewriteValuegeneric_OpSub64(v *Value) bool {
 31744  	v_1 := v.Args[1]
 31745  	v_0 := v.Args[0]
 31746  	b := v.Block
 31747  	// match: (Sub64 (Const64 [c]) (Const64 [d]))
 31748  	// result: (Const64 [c-d])
 31749  	for {
 31750  		if v_0.Op != OpConst64 {
 31751  			break
 31752  		}
 31753  		c := auxIntToInt64(v_0.AuxInt)
 31754  		if v_1.Op != OpConst64 {
 31755  			break
 31756  		}
 31757  		d := auxIntToInt64(v_1.AuxInt)
 31758  		v.reset(OpConst64)
 31759  		v.AuxInt = int64ToAuxInt(c - d)
 31760  		return true
 31761  	}
 31762  	// match: (Sub64 x (Const64 <t> [c]))
 31763  	// cond: x.Op != OpConst64
 31764  	// result: (Add64 (Const64 <t> [-c]) x)
 31765  	for {
 31766  		x := v_0
 31767  		if v_1.Op != OpConst64 {
 31768  			break
 31769  		}
 31770  		t := v_1.Type
 31771  		c := auxIntToInt64(v_1.AuxInt)
 31772  		if !(x.Op != OpConst64) {
 31773  			break
 31774  		}
 31775  		v.reset(OpAdd64)
 31776  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 31777  		v0.AuxInt = int64ToAuxInt(-c)
 31778  		v.AddArg2(v0, x)
 31779  		return true
 31780  	}
 31781  	// match: (Sub64 <t> (Mul64 x y) (Mul64 x z))
 31782  	// result: (Mul64 x (Sub64 <t> y z))
 31783  	for {
 31784  		t := v.Type
 31785  		if v_0.Op != OpMul64 {
 31786  			break
 31787  		}
 31788  		_ = v_0.Args[1]
 31789  		v_0_0 := v_0.Args[0]
 31790  		v_0_1 := v_0.Args[1]
 31791  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 31792  			x := v_0_0
 31793  			y := v_0_1
 31794  			if v_1.Op != OpMul64 {
 31795  				continue
 31796  			}
 31797  			_ = v_1.Args[1]
 31798  			v_1_0 := v_1.Args[0]
 31799  			v_1_1 := v_1.Args[1]
 31800  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 31801  				if x != v_1_0 {
 31802  					continue
 31803  				}
 31804  				z := v_1_1
 31805  				v.reset(OpMul64)
 31806  				v0 := b.NewValue0(v.Pos, OpSub64, t)
 31807  				v0.AddArg2(y, z)
 31808  				v.AddArg2(x, v0)
 31809  				return true
 31810  			}
 31811  		}
 31812  		break
 31813  	}
 31814  	// match: (Sub64 x x)
 31815  	// result: (Const64 [0])
 31816  	for {
 31817  		x := v_0
 31818  		if x != v_1 {
 31819  			break
 31820  		}
 31821  		v.reset(OpConst64)
 31822  		v.AuxInt = int64ToAuxInt(0)
 31823  		return true
 31824  	}
 31825  	// match: (Sub64 (Neg64 x) (Com64 x))
 31826  	// result: (Const64 [1])
 31827  	for {
 31828  		if v_0.Op != OpNeg64 {
 31829  			break
 31830  		}
 31831  		x := v_0.Args[0]
 31832  		if v_1.Op != OpCom64 || x != v_1.Args[0] {
 31833  			break
 31834  		}
 31835  		v.reset(OpConst64)
 31836  		v.AuxInt = int64ToAuxInt(1)
 31837  		return true
 31838  	}
 31839  	// match: (Sub64 (Com64 x) (Neg64 x))
 31840  	// result: (Const64 [-1])
 31841  	for {
 31842  		if v_0.Op != OpCom64 {
 31843  			break
 31844  		}
 31845  		x := v_0.Args[0]
 31846  		if v_1.Op != OpNeg64 || x != v_1.Args[0] {
 31847  			break
 31848  		}
 31849  		v.reset(OpConst64)
 31850  		v.AuxInt = int64ToAuxInt(-1)
 31851  		return true
 31852  	}
 31853  	// match: (Sub64 (Add64 t x) (Add64 t y))
 31854  	// result: (Sub64 x y)
 31855  	for {
 31856  		if v_0.Op != OpAdd64 {
 31857  			break
 31858  		}
 31859  		_ = v_0.Args[1]
 31860  		v_0_0 := v_0.Args[0]
 31861  		v_0_1 := v_0.Args[1]
 31862  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 31863  			t := v_0_0
 31864  			x := v_0_1
 31865  			if v_1.Op != OpAdd64 {
 31866  				continue
 31867  			}
 31868  			_ = v_1.Args[1]
 31869  			v_1_0 := v_1.Args[0]
 31870  			v_1_1 := v_1.Args[1]
 31871  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 31872  				if t != v_1_0 {
 31873  					continue
 31874  				}
 31875  				y := v_1_1
 31876  				v.reset(OpSub64)
 31877  				v.AddArg2(x, y)
 31878  				return true
 31879  			}
 31880  		}
 31881  		break
 31882  	}
 31883  	// match: (Sub64 (Add64 x y) x)
 31884  	// result: y
 31885  	for {
 31886  		if v_0.Op != OpAdd64 {
 31887  			break
 31888  		}
 31889  		_ = v_0.Args[1]
 31890  		v_0_0 := v_0.Args[0]
 31891  		v_0_1 := v_0.Args[1]
 31892  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 31893  			x := v_0_0
 31894  			y := v_0_1
 31895  			if x != v_1 {
 31896  				continue
 31897  			}
 31898  			v.copyOf(y)
 31899  			return true
 31900  		}
 31901  		break
 31902  	}
 31903  	// match: (Sub64 (Add64 x y) y)
 31904  	// result: x
 31905  	for {
 31906  		if v_0.Op != OpAdd64 {
 31907  			break
 31908  		}
 31909  		_ = v_0.Args[1]
 31910  		v_0_0 := v_0.Args[0]
 31911  		v_0_1 := v_0.Args[1]
 31912  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 31913  			x := v_0_0
 31914  			y := v_0_1
 31915  			if y != v_1 {
 31916  				continue
 31917  			}
 31918  			v.copyOf(x)
 31919  			return true
 31920  		}
 31921  		break
 31922  	}
 31923  	// match: (Sub64 (Sub64 x y) x)
 31924  	// result: (Neg64 y)
 31925  	for {
 31926  		if v_0.Op != OpSub64 {
 31927  			break
 31928  		}
 31929  		y := v_0.Args[1]
 31930  		x := v_0.Args[0]
 31931  		if x != v_1 {
 31932  			break
 31933  		}
 31934  		v.reset(OpNeg64)
 31935  		v.AddArg(y)
 31936  		return true
 31937  	}
 31938  	// match: (Sub64 x (Add64 x y))
 31939  	// result: (Neg64 y)
 31940  	for {
 31941  		x := v_0
 31942  		if v_1.Op != OpAdd64 {
 31943  			break
 31944  		}
 31945  		_ = v_1.Args[1]
 31946  		v_1_0 := v_1.Args[0]
 31947  		v_1_1 := v_1.Args[1]
 31948  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 31949  			if x != v_1_0 {
 31950  				continue
 31951  			}
 31952  			y := v_1_1
 31953  			v.reset(OpNeg64)
 31954  			v.AddArg(y)
 31955  			return true
 31956  		}
 31957  		break
 31958  	}
 31959  	// match: (Sub64 x (Sub64 i:(Const64 <t>) z))
 31960  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
 31961  	// result: (Sub64 (Add64 <t> x z) i)
 31962  	for {
 31963  		x := v_0
 31964  		if v_1.Op != OpSub64 {
 31965  			break
 31966  		}
 31967  		z := v_1.Args[1]
 31968  		i := v_1.Args[0]
 31969  		if i.Op != OpConst64 {
 31970  			break
 31971  		}
 31972  		t := i.Type
 31973  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
 31974  			break
 31975  		}
 31976  		v.reset(OpSub64)
 31977  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
 31978  		v0.AddArg2(x, z)
 31979  		v.AddArg2(v0, i)
 31980  		return true
 31981  	}
 31982  	// match: (Sub64 x (Add64 z i:(Const64 <t>)))
 31983  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
 31984  	// result: (Sub64 (Sub64 <t> x z) i)
 31985  	for {
 31986  		x := v_0
 31987  		if v_1.Op != OpAdd64 {
 31988  			break
 31989  		}
 31990  		_ = v_1.Args[1]
 31991  		v_1_0 := v_1.Args[0]
 31992  		v_1_1 := v_1.Args[1]
 31993  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 31994  			z := v_1_0
 31995  			i := v_1_1
 31996  			if i.Op != OpConst64 {
 31997  				continue
 31998  			}
 31999  			t := i.Type
 32000  			if !(z.Op != OpConst64 && x.Op != OpConst64) {
 32001  				continue
 32002  			}
 32003  			v.reset(OpSub64)
 32004  			v0 := b.NewValue0(v.Pos, OpSub64, t)
 32005  			v0.AddArg2(x, z)
 32006  			v.AddArg2(v0, i)
 32007  			return true
 32008  		}
 32009  		break
 32010  	}
 32011  	// match: (Sub64 (Sub64 i:(Const64 <t>) z) x)
 32012  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
 32013  	// result: (Sub64 i (Add64 <t> z x))
 32014  	for {
 32015  		if v_0.Op != OpSub64 {
 32016  			break
 32017  		}
 32018  		z := v_0.Args[1]
 32019  		i := v_0.Args[0]
 32020  		if i.Op != OpConst64 {
 32021  			break
 32022  		}
 32023  		t := i.Type
 32024  		x := v_1
 32025  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
 32026  			break
 32027  		}
 32028  		v.reset(OpSub64)
 32029  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
 32030  		v0.AddArg2(z, x)
 32031  		v.AddArg2(i, v0)
 32032  		return true
 32033  	}
 32034  	// match: (Sub64 (Add64 z i:(Const64 <t>)) x)
 32035  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
 32036  	// result: (Add64 i (Sub64 <t> z x))
 32037  	for {
 32038  		if v_0.Op != OpAdd64 {
 32039  			break
 32040  		}
 32041  		_ = v_0.Args[1]
 32042  		v_0_0 := v_0.Args[0]
 32043  		v_0_1 := v_0.Args[1]
 32044  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 32045  			z := v_0_0
 32046  			i := v_0_1
 32047  			if i.Op != OpConst64 {
 32048  				continue
 32049  			}
 32050  			t := i.Type
 32051  			x := v_1
 32052  			if !(z.Op != OpConst64 && x.Op != OpConst64) {
 32053  				continue
 32054  			}
 32055  			v.reset(OpAdd64)
 32056  			v0 := b.NewValue0(v.Pos, OpSub64, t)
 32057  			v0.AddArg2(z, x)
 32058  			v.AddArg2(i, v0)
 32059  			return true
 32060  		}
 32061  		break
 32062  	}
 32063  	// match: (Sub64 (Const64 <t> [c]) (Sub64 (Const64 <t> [d]) x))
 32064  	// result: (Add64 (Const64 <t> [c-d]) x)
 32065  	for {
 32066  		if v_0.Op != OpConst64 {
 32067  			break
 32068  		}
 32069  		t := v_0.Type
 32070  		c := auxIntToInt64(v_0.AuxInt)
 32071  		if v_1.Op != OpSub64 {
 32072  			break
 32073  		}
 32074  		x := v_1.Args[1]
 32075  		v_1_0 := v_1.Args[0]
 32076  		if v_1_0.Op != OpConst64 || v_1_0.Type != t {
 32077  			break
 32078  		}
 32079  		d := auxIntToInt64(v_1_0.AuxInt)
 32080  		v.reset(OpAdd64)
 32081  		v0 := b.NewValue0(v.Pos, OpConst64, t)
 32082  		v0.AuxInt = int64ToAuxInt(c - d)
 32083  		v.AddArg2(v0, x)
 32084  		return true
 32085  	}
 32086  	// match: (Sub64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
 32087  	// result: (Sub64 (Const64 <t> [c-d]) x)
 32088  	for {
 32089  		if v_0.Op != OpConst64 {
 32090  			break
 32091  		}
 32092  		t := v_0.Type
 32093  		c := auxIntToInt64(v_0.AuxInt)
 32094  		if v_1.Op != OpAdd64 {
 32095  			break
 32096  		}
 32097  		_ = v_1.Args[1]
 32098  		v_1_0 := v_1.Args[0]
 32099  		v_1_1 := v_1.Args[1]
 32100  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 32101  			if v_1_0.Op != OpConst64 || v_1_0.Type != t {
 32102  				continue
 32103  			}
 32104  			d := auxIntToInt64(v_1_0.AuxInt)
 32105  			x := v_1_1
 32106  			v.reset(OpSub64)
 32107  			v0 := b.NewValue0(v.Pos, OpConst64, t)
 32108  			v0.AuxInt = int64ToAuxInt(c - d)
 32109  			v.AddArg2(v0, x)
 32110  			return true
 32111  		}
 32112  		break
 32113  	}
 32114  	return false
 32115  }
 32116  func rewriteValuegeneric_OpSub64F(v *Value) bool {
 32117  	v_1 := v.Args[1]
 32118  	v_0 := v.Args[0]
 32119  	// match: (Sub64F (Const64F [c]) (Const64F [d]))
 32120  	// cond: c-d == c-d
 32121  	// result: (Const64F [c-d])
 32122  	for {
 32123  		if v_0.Op != OpConst64F {
 32124  			break
 32125  		}
 32126  		c := auxIntToFloat64(v_0.AuxInt)
 32127  		if v_1.Op != OpConst64F {
 32128  			break
 32129  		}
 32130  		d := auxIntToFloat64(v_1.AuxInt)
 32131  		if !(c-d == c-d) {
 32132  			break
 32133  		}
 32134  		v.reset(OpConst64F)
 32135  		v.AuxInt = float64ToAuxInt(c - d)
 32136  		return true
 32137  	}
 32138  	return false
 32139  }
 32140  func rewriteValuegeneric_OpSub8(v *Value) bool {
 32141  	v_1 := v.Args[1]
 32142  	v_0 := v.Args[0]
 32143  	b := v.Block
 32144  	// match: (Sub8 (Const8 [c]) (Const8 [d]))
 32145  	// result: (Const8 [c-d])
 32146  	for {
 32147  		if v_0.Op != OpConst8 {
 32148  			break
 32149  		}
 32150  		c := auxIntToInt8(v_0.AuxInt)
 32151  		if v_1.Op != OpConst8 {
 32152  			break
 32153  		}
 32154  		d := auxIntToInt8(v_1.AuxInt)
 32155  		v.reset(OpConst8)
 32156  		v.AuxInt = int8ToAuxInt(c - d)
 32157  		return true
 32158  	}
 32159  	// match: (Sub8 x (Const8 <t> [c]))
 32160  	// cond: x.Op != OpConst8
 32161  	// result: (Add8 (Const8 <t> [-c]) x)
 32162  	for {
 32163  		x := v_0
 32164  		if v_1.Op != OpConst8 {
 32165  			break
 32166  		}
 32167  		t := v_1.Type
 32168  		c := auxIntToInt8(v_1.AuxInt)
 32169  		if !(x.Op != OpConst8) {
 32170  			break
 32171  		}
 32172  		v.reset(OpAdd8)
 32173  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 32174  		v0.AuxInt = int8ToAuxInt(-c)
 32175  		v.AddArg2(v0, x)
 32176  		return true
 32177  	}
 32178  	// match: (Sub8 <t> (Mul8 x y) (Mul8 x z))
 32179  	// result: (Mul8 x (Sub8 <t> y z))
 32180  	for {
 32181  		t := v.Type
 32182  		if v_0.Op != OpMul8 {
 32183  			break
 32184  		}
 32185  		_ = v_0.Args[1]
 32186  		v_0_0 := v_0.Args[0]
 32187  		v_0_1 := v_0.Args[1]
 32188  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 32189  			x := v_0_0
 32190  			y := v_0_1
 32191  			if v_1.Op != OpMul8 {
 32192  				continue
 32193  			}
 32194  			_ = v_1.Args[1]
 32195  			v_1_0 := v_1.Args[0]
 32196  			v_1_1 := v_1.Args[1]
 32197  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 32198  				if x != v_1_0 {
 32199  					continue
 32200  				}
 32201  				z := v_1_1
 32202  				v.reset(OpMul8)
 32203  				v0 := b.NewValue0(v.Pos, OpSub8, t)
 32204  				v0.AddArg2(y, z)
 32205  				v.AddArg2(x, v0)
 32206  				return true
 32207  			}
 32208  		}
 32209  		break
 32210  	}
 32211  	// match: (Sub8 x x)
 32212  	// result: (Const8 [0])
 32213  	for {
 32214  		x := v_0
 32215  		if x != v_1 {
 32216  			break
 32217  		}
 32218  		v.reset(OpConst8)
 32219  		v.AuxInt = int8ToAuxInt(0)
 32220  		return true
 32221  	}
 32222  	// match: (Sub8 (Neg8 x) (Com8 x))
 32223  	// result: (Const8 [1])
 32224  	for {
 32225  		if v_0.Op != OpNeg8 {
 32226  			break
 32227  		}
 32228  		x := v_0.Args[0]
 32229  		if v_1.Op != OpCom8 || x != v_1.Args[0] {
 32230  			break
 32231  		}
 32232  		v.reset(OpConst8)
 32233  		v.AuxInt = int8ToAuxInt(1)
 32234  		return true
 32235  	}
 32236  	// match: (Sub8 (Com8 x) (Neg8 x))
 32237  	// result: (Const8 [-1])
 32238  	for {
 32239  		if v_0.Op != OpCom8 {
 32240  			break
 32241  		}
 32242  		x := v_0.Args[0]
 32243  		if v_1.Op != OpNeg8 || x != v_1.Args[0] {
 32244  			break
 32245  		}
 32246  		v.reset(OpConst8)
 32247  		v.AuxInt = int8ToAuxInt(-1)
 32248  		return true
 32249  	}
 32250  	// match: (Sub8 (Add8 t x) (Add8 t y))
 32251  	// result: (Sub8 x y)
 32252  	for {
 32253  		if v_0.Op != OpAdd8 {
 32254  			break
 32255  		}
 32256  		_ = v_0.Args[1]
 32257  		v_0_0 := v_0.Args[0]
 32258  		v_0_1 := v_0.Args[1]
 32259  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 32260  			t := v_0_0
 32261  			x := v_0_1
 32262  			if v_1.Op != OpAdd8 {
 32263  				continue
 32264  			}
 32265  			_ = v_1.Args[1]
 32266  			v_1_0 := v_1.Args[0]
 32267  			v_1_1 := v_1.Args[1]
 32268  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 32269  				if t != v_1_0 {
 32270  					continue
 32271  				}
 32272  				y := v_1_1
 32273  				v.reset(OpSub8)
 32274  				v.AddArg2(x, y)
 32275  				return true
 32276  			}
 32277  		}
 32278  		break
 32279  	}
 32280  	// match: (Sub8 (Add8 x y) x)
 32281  	// result: y
 32282  	for {
 32283  		if v_0.Op != OpAdd8 {
 32284  			break
 32285  		}
 32286  		_ = v_0.Args[1]
 32287  		v_0_0 := v_0.Args[0]
 32288  		v_0_1 := v_0.Args[1]
 32289  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 32290  			x := v_0_0
 32291  			y := v_0_1
 32292  			if x != v_1 {
 32293  				continue
 32294  			}
 32295  			v.copyOf(y)
 32296  			return true
 32297  		}
 32298  		break
 32299  	}
 32300  	// match: (Sub8 (Add8 x y) y)
 32301  	// result: x
 32302  	for {
 32303  		if v_0.Op != OpAdd8 {
 32304  			break
 32305  		}
 32306  		_ = v_0.Args[1]
 32307  		v_0_0 := v_0.Args[0]
 32308  		v_0_1 := v_0.Args[1]
 32309  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 32310  			x := v_0_0
 32311  			y := v_0_1
 32312  			if y != v_1 {
 32313  				continue
 32314  			}
 32315  			v.copyOf(x)
 32316  			return true
 32317  		}
 32318  		break
 32319  	}
 32320  	// match: (Sub8 (Sub8 x y) x)
 32321  	// result: (Neg8 y)
 32322  	for {
 32323  		if v_0.Op != OpSub8 {
 32324  			break
 32325  		}
 32326  		y := v_0.Args[1]
 32327  		x := v_0.Args[0]
 32328  		if x != v_1 {
 32329  			break
 32330  		}
 32331  		v.reset(OpNeg8)
 32332  		v.AddArg(y)
 32333  		return true
 32334  	}
 32335  	// match: (Sub8 x (Add8 x y))
 32336  	// result: (Neg8 y)
 32337  	for {
 32338  		x := v_0
 32339  		if v_1.Op != OpAdd8 {
 32340  			break
 32341  		}
 32342  		_ = v_1.Args[1]
 32343  		v_1_0 := v_1.Args[0]
 32344  		v_1_1 := v_1.Args[1]
 32345  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 32346  			if x != v_1_0 {
 32347  				continue
 32348  			}
 32349  			y := v_1_1
 32350  			v.reset(OpNeg8)
 32351  			v.AddArg(y)
 32352  			return true
 32353  		}
 32354  		break
 32355  	}
 32356  	// match: (Sub8 x (Sub8 i:(Const8 <t>) z))
 32357  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
 32358  	// result: (Sub8 (Add8 <t> x z) i)
 32359  	for {
 32360  		x := v_0
 32361  		if v_1.Op != OpSub8 {
 32362  			break
 32363  		}
 32364  		z := v_1.Args[1]
 32365  		i := v_1.Args[0]
 32366  		if i.Op != OpConst8 {
 32367  			break
 32368  		}
 32369  		t := i.Type
 32370  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
 32371  			break
 32372  		}
 32373  		v.reset(OpSub8)
 32374  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
 32375  		v0.AddArg2(x, z)
 32376  		v.AddArg2(v0, i)
 32377  		return true
 32378  	}
 32379  	// match: (Sub8 x (Add8 z i:(Const8 <t>)))
 32380  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
 32381  	// result: (Sub8 (Sub8 <t> x z) i)
 32382  	for {
 32383  		x := v_0
 32384  		if v_1.Op != OpAdd8 {
 32385  			break
 32386  		}
 32387  		_ = v_1.Args[1]
 32388  		v_1_0 := v_1.Args[0]
 32389  		v_1_1 := v_1.Args[1]
 32390  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 32391  			z := v_1_0
 32392  			i := v_1_1
 32393  			if i.Op != OpConst8 {
 32394  				continue
 32395  			}
 32396  			t := i.Type
 32397  			if !(z.Op != OpConst8 && x.Op != OpConst8) {
 32398  				continue
 32399  			}
 32400  			v.reset(OpSub8)
 32401  			v0 := b.NewValue0(v.Pos, OpSub8, t)
 32402  			v0.AddArg2(x, z)
 32403  			v.AddArg2(v0, i)
 32404  			return true
 32405  		}
 32406  		break
 32407  	}
 32408  	// match: (Sub8 (Sub8 i:(Const8 <t>) z) x)
 32409  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
 32410  	// result: (Sub8 i (Add8 <t> z x))
 32411  	for {
 32412  		if v_0.Op != OpSub8 {
 32413  			break
 32414  		}
 32415  		z := v_0.Args[1]
 32416  		i := v_0.Args[0]
 32417  		if i.Op != OpConst8 {
 32418  			break
 32419  		}
 32420  		t := i.Type
 32421  		x := v_1
 32422  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
 32423  			break
 32424  		}
 32425  		v.reset(OpSub8)
 32426  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
 32427  		v0.AddArg2(z, x)
 32428  		v.AddArg2(i, v0)
 32429  		return true
 32430  	}
 32431  	// match: (Sub8 (Add8 z i:(Const8 <t>)) x)
 32432  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
 32433  	// result: (Add8 i (Sub8 <t> z x))
 32434  	for {
 32435  		if v_0.Op != OpAdd8 {
 32436  			break
 32437  		}
 32438  		_ = v_0.Args[1]
 32439  		v_0_0 := v_0.Args[0]
 32440  		v_0_1 := v_0.Args[1]
 32441  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 32442  			z := v_0_0
 32443  			i := v_0_1
 32444  			if i.Op != OpConst8 {
 32445  				continue
 32446  			}
 32447  			t := i.Type
 32448  			x := v_1
 32449  			if !(z.Op != OpConst8 && x.Op != OpConst8) {
 32450  				continue
 32451  			}
 32452  			v.reset(OpAdd8)
 32453  			v0 := b.NewValue0(v.Pos, OpSub8, t)
 32454  			v0.AddArg2(z, x)
 32455  			v.AddArg2(i, v0)
 32456  			return true
 32457  		}
 32458  		break
 32459  	}
 32460  	// match: (Sub8 (Const8 <t> [c]) (Sub8 (Const8 <t> [d]) x))
 32461  	// result: (Add8 (Const8 <t> [c-d]) x)
 32462  	for {
 32463  		if v_0.Op != OpConst8 {
 32464  			break
 32465  		}
 32466  		t := v_0.Type
 32467  		c := auxIntToInt8(v_0.AuxInt)
 32468  		if v_1.Op != OpSub8 {
 32469  			break
 32470  		}
 32471  		x := v_1.Args[1]
 32472  		v_1_0 := v_1.Args[0]
 32473  		if v_1_0.Op != OpConst8 || v_1_0.Type != t {
 32474  			break
 32475  		}
 32476  		d := auxIntToInt8(v_1_0.AuxInt)
 32477  		v.reset(OpAdd8)
 32478  		v0 := b.NewValue0(v.Pos, OpConst8, t)
 32479  		v0.AuxInt = int8ToAuxInt(c - d)
 32480  		v.AddArg2(v0, x)
 32481  		return true
 32482  	}
 32483  	// match: (Sub8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
 32484  	// result: (Sub8 (Const8 <t> [c-d]) x)
 32485  	for {
 32486  		if v_0.Op != OpConst8 {
 32487  			break
 32488  		}
 32489  		t := v_0.Type
 32490  		c := auxIntToInt8(v_0.AuxInt)
 32491  		if v_1.Op != OpAdd8 {
 32492  			break
 32493  		}
 32494  		_ = v_1.Args[1]
 32495  		v_1_0 := v_1.Args[0]
 32496  		v_1_1 := v_1.Args[1]
 32497  		for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
 32498  			if v_1_0.Op != OpConst8 || v_1_0.Type != t {
 32499  				continue
 32500  			}
 32501  			d := auxIntToInt8(v_1_0.AuxInt)
 32502  			x := v_1_1
 32503  			v.reset(OpSub8)
 32504  			v0 := b.NewValue0(v.Pos, OpConst8, t)
 32505  			v0.AuxInt = int8ToAuxInt(c - d)
 32506  			v.AddArg2(v0, x)
 32507  			return true
 32508  		}
 32509  		break
 32510  	}
 32511  	return false
 32512  }
 32513  func rewriteValuegeneric_OpTrunc(v *Value) bool {
 32514  	v_0 := v.Args[0]
 32515  	// match: (Trunc (Const64F [c]))
 32516  	// result: (Const64F [math.Trunc(c)])
 32517  	for {
 32518  		if v_0.Op != OpConst64F {
 32519  			break
 32520  		}
 32521  		c := auxIntToFloat64(v_0.AuxInt)
 32522  		v.reset(OpConst64F)
 32523  		v.AuxInt = float64ToAuxInt(math.Trunc(c))
 32524  		return true
 32525  	}
 32526  	return false
 32527  }
 32528  func rewriteValuegeneric_OpTrunc16to8(v *Value) bool {
 32529  	v_0 := v.Args[0]
 32530  	// match: (Trunc16to8 (Const16 [c]))
 32531  	// result: (Const8 [int8(c)])
 32532  	for {
 32533  		if v_0.Op != OpConst16 {
 32534  			break
 32535  		}
 32536  		c := auxIntToInt16(v_0.AuxInt)
 32537  		v.reset(OpConst8)
 32538  		v.AuxInt = int8ToAuxInt(int8(c))
 32539  		return true
 32540  	}
 32541  	// match: (Trunc16to8 (ZeroExt8to16 x))
 32542  	// result: x
 32543  	for {
 32544  		if v_0.Op != OpZeroExt8to16 {
 32545  			break
 32546  		}
 32547  		x := v_0.Args[0]
 32548  		v.copyOf(x)
 32549  		return true
 32550  	}
 32551  	// match: (Trunc16to8 (SignExt8to16 x))
 32552  	// result: x
 32553  	for {
 32554  		if v_0.Op != OpSignExt8to16 {
 32555  			break
 32556  		}
 32557  		x := v_0.Args[0]
 32558  		v.copyOf(x)
 32559  		return true
 32560  	}
 32561  	// match: (Trunc16to8 (And16 (Const16 [y]) x))
 32562  	// cond: y&0xFF == 0xFF
 32563  	// result: (Trunc16to8 x)
 32564  	for {
 32565  		if v_0.Op != OpAnd16 {
 32566  			break
 32567  		}
 32568  		_ = v_0.Args[1]
 32569  		v_0_0 := v_0.Args[0]
 32570  		v_0_1 := v_0.Args[1]
 32571  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 32572  			if v_0_0.Op != OpConst16 {
 32573  				continue
 32574  			}
 32575  			y := auxIntToInt16(v_0_0.AuxInt)
 32576  			x := v_0_1
 32577  			if !(y&0xFF == 0xFF) {
 32578  				continue
 32579  			}
 32580  			v.reset(OpTrunc16to8)
 32581  			v.AddArg(x)
 32582  			return true
 32583  		}
 32584  		break
 32585  	}
 32586  	return false
 32587  }
 32588  func rewriteValuegeneric_OpTrunc32to16(v *Value) bool {
 32589  	v_0 := v.Args[0]
 32590  	// match: (Trunc32to16 (Const32 [c]))
 32591  	// result: (Const16 [int16(c)])
 32592  	for {
 32593  		if v_0.Op != OpConst32 {
 32594  			break
 32595  		}
 32596  		c := auxIntToInt32(v_0.AuxInt)
 32597  		v.reset(OpConst16)
 32598  		v.AuxInt = int16ToAuxInt(int16(c))
 32599  		return true
 32600  	}
 32601  	// match: (Trunc32to16 (ZeroExt8to32 x))
 32602  	// result: (ZeroExt8to16 x)
 32603  	for {
 32604  		if v_0.Op != OpZeroExt8to32 {
 32605  			break
 32606  		}
 32607  		x := v_0.Args[0]
 32608  		v.reset(OpZeroExt8to16)
 32609  		v.AddArg(x)
 32610  		return true
 32611  	}
 32612  	// match: (Trunc32to16 (ZeroExt16to32 x))
 32613  	// result: x
 32614  	for {
 32615  		if v_0.Op != OpZeroExt16to32 {
 32616  			break
 32617  		}
 32618  		x := v_0.Args[0]
 32619  		v.copyOf(x)
 32620  		return true
 32621  	}
 32622  	// match: (Trunc32to16 (SignExt8to32 x))
 32623  	// result: (SignExt8to16 x)
 32624  	for {
 32625  		if v_0.Op != OpSignExt8to32 {
 32626  			break
 32627  		}
 32628  		x := v_0.Args[0]
 32629  		v.reset(OpSignExt8to16)
 32630  		v.AddArg(x)
 32631  		return true
 32632  	}
 32633  	// match: (Trunc32to16 (SignExt16to32 x))
 32634  	// result: x
 32635  	for {
 32636  		if v_0.Op != OpSignExt16to32 {
 32637  			break
 32638  		}
 32639  		x := v_0.Args[0]
 32640  		v.copyOf(x)
 32641  		return true
 32642  	}
 32643  	// match: (Trunc32to16 (And32 (Const32 [y]) x))
 32644  	// cond: y&0xFFFF == 0xFFFF
 32645  	// result: (Trunc32to16 x)
 32646  	for {
 32647  		if v_0.Op != OpAnd32 {
 32648  			break
 32649  		}
 32650  		_ = v_0.Args[1]
 32651  		v_0_0 := v_0.Args[0]
 32652  		v_0_1 := v_0.Args[1]
 32653  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 32654  			if v_0_0.Op != OpConst32 {
 32655  				continue
 32656  			}
 32657  			y := auxIntToInt32(v_0_0.AuxInt)
 32658  			x := v_0_1
 32659  			if !(y&0xFFFF == 0xFFFF) {
 32660  				continue
 32661  			}
 32662  			v.reset(OpTrunc32to16)
 32663  			v.AddArg(x)
 32664  			return true
 32665  		}
 32666  		break
 32667  	}
 32668  	return false
 32669  }
 32670  func rewriteValuegeneric_OpTrunc32to8(v *Value) bool {
 32671  	v_0 := v.Args[0]
 32672  	// match: (Trunc32to8 (Const32 [c]))
 32673  	// result: (Const8 [int8(c)])
 32674  	for {
 32675  		if v_0.Op != OpConst32 {
 32676  			break
 32677  		}
 32678  		c := auxIntToInt32(v_0.AuxInt)
 32679  		v.reset(OpConst8)
 32680  		v.AuxInt = int8ToAuxInt(int8(c))
 32681  		return true
 32682  	}
 32683  	// match: (Trunc32to8 (ZeroExt8to32 x))
 32684  	// result: x
 32685  	for {
 32686  		if v_0.Op != OpZeroExt8to32 {
 32687  			break
 32688  		}
 32689  		x := v_0.Args[0]
 32690  		v.copyOf(x)
 32691  		return true
 32692  	}
 32693  	// match: (Trunc32to8 (SignExt8to32 x))
 32694  	// result: x
 32695  	for {
 32696  		if v_0.Op != OpSignExt8to32 {
 32697  			break
 32698  		}
 32699  		x := v_0.Args[0]
 32700  		v.copyOf(x)
 32701  		return true
 32702  	}
 32703  	// match: (Trunc32to8 (And32 (Const32 [y]) x))
 32704  	// cond: y&0xFF == 0xFF
 32705  	// result: (Trunc32to8 x)
 32706  	for {
 32707  		if v_0.Op != OpAnd32 {
 32708  			break
 32709  		}
 32710  		_ = v_0.Args[1]
 32711  		v_0_0 := v_0.Args[0]
 32712  		v_0_1 := v_0.Args[1]
 32713  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 32714  			if v_0_0.Op != OpConst32 {
 32715  				continue
 32716  			}
 32717  			y := auxIntToInt32(v_0_0.AuxInt)
 32718  			x := v_0_1
 32719  			if !(y&0xFF == 0xFF) {
 32720  				continue
 32721  			}
 32722  			v.reset(OpTrunc32to8)
 32723  			v.AddArg(x)
 32724  			return true
 32725  		}
 32726  		break
 32727  	}
 32728  	return false
 32729  }
 32730  func rewriteValuegeneric_OpTrunc64to16(v *Value) bool {
 32731  	v_0 := v.Args[0]
 32732  	// match: (Trunc64to16 (Const64 [c]))
 32733  	// result: (Const16 [int16(c)])
 32734  	for {
 32735  		if v_0.Op != OpConst64 {
 32736  			break
 32737  		}
 32738  		c := auxIntToInt64(v_0.AuxInt)
 32739  		v.reset(OpConst16)
 32740  		v.AuxInt = int16ToAuxInt(int16(c))
 32741  		return true
 32742  	}
 32743  	// match: (Trunc64to16 (ZeroExt8to64 x))
 32744  	// result: (ZeroExt8to16 x)
 32745  	for {
 32746  		if v_0.Op != OpZeroExt8to64 {
 32747  			break
 32748  		}
 32749  		x := v_0.Args[0]
 32750  		v.reset(OpZeroExt8to16)
 32751  		v.AddArg(x)
 32752  		return true
 32753  	}
 32754  	// match: (Trunc64to16 (ZeroExt16to64 x))
 32755  	// result: x
 32756  	for {
 32757  		if v_0.Op != OpZeroExt16to64 {
 32758  			break
 32759  		}
 32760  		x := v_0.Args[0]
 32761  		v.copyOf(x)
 32762  		return true
 32763  	}
 32764  	// match: (Trunc64to16 (SignExt8to64 x))
 32765  	// result: (SignExt8to16 x)
 32766  	for {
 32767  		if v_0.Op != OpSignExt8to64 {
 32768  			break
 32769  		}
 32770  		x := v_0.Args[0]
 32771  		v.reset(OpSignExt8to16)
 32772  		v.AddArg(x)
 32773  		return true
 32774  	}
 32775  	// match: (Trunc64to16 (SignExt16to64 x))
 32776  	// result: x
 32777  	for {
 32778  		if v_0.Op != OpSignExt16to64 {
 32779  			break
 32780  		}
 32781  		x := v_0.Args[0]
 32782  		v.copyOf(x)
 32783  		return true
 32784  	}
 32785  	// match: (Trunc64to16 (And64 (Const64 [y]) x))
 32786  	// cond: y&0xFFFF == 0xFFFF
 32787  	// result: (Trunc64to16 x)
 32788  	for {
 32789  		if v_0.Op != OpAnd64 {
 32790  			break
 32791  		}
 32792  		_ = v_0.Args[1]
 32793  		v_0_0 := v_0.Args[0]
 32794  		v_0_1 := v_0.Args[1]
 32795  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 32796  			if v_0_0.Op != OpConst64 {
 32797  				continue
 32798  			}
 32799  			y := auxIntToInt64(v_0_0.AuxInt)
 32800  			x := v_0_1
 32801  			if !(y&0xFFFF == 0xFFFF) {
 32802  				continue
 32803  			}
 32804  			v.reset(OpTrunc64to16)
 32805  			v.AddArg(x)
 32806  			return true
 32807  		}
 32808  		break
 32809  	}
 32810  	return false
 32811  }
 32812  func rewriteValuegeneric_OpTrunc64to32(v *Value) bool {
 32813  	v_0 := v.Args[0]
 32814  	// match: (Trunc64to32 (Const64 [c]))
 32815  	// result: (Const32 [int32(c)])
 32816  	for {
 32817  		if v_0.Op != OpConst64 {
 32818  			break
 32819  		}
 32820  		c := auxIntToInt64(v_0.AuxInt)
 32821  		v.reset(OpConst32)
 32822  		v.AuxInt = int32ToAuxInt(int32(c))
 32823  		return true
 32824  	}
 32825  	// match: (Trunc64to32 (ZeroExt8to64 x))
 32826  	// result: (ZeroExt8to32 x)
 32827  	for {
 32828  		if v_0.Op != OpZeroExt8to64 {
 32829  			break
 32830  		}
 32831  		x := v_0.Args[0]
 32832  		v.reset(OpZeroExt8to32)
 32833  		v.AddArg(x)
 32834  		return true
 32835  	}
 32836  	// match: (Trunc64to32 (ZeroExt16to64 x))
 32837  	// result: (ZeroExt16to32 x)
 32838  	for {
 32839  		if v_0.Op != OpZeroExt16to64 {
 32840  			break
 32841  		}
 32842  		x := v_0.Args[0]
 32843  		v.reset(OpZeroExt16to32)
 32844  		v.AddArg(x)
 32845  		return true
 32846  	}
 32847  	// match: (Trunc64to32 (ZeroExt32to64 x))
 32848  	// result: x
 32849  	for {
 32850  		if v_0.Op != OpZeroExt32to64 {
 32851  			break
 32852  		}
 32853  		x := v_0.Args[0]
 32854  		v.copyOf(x)
 32855  		return true
 32856  	}
 32857  	// match: (Trunc64to32 (SignExt8to64 x))
 32858  	// result: (SignExt8to32 x)
 32859  	for {
 32860  		if v_0.Op != OpSignExt8to64 {
 32861  			break
 32862  		}
 32863  		x := v_0.Args[0]
 32864  		v.reset(OpSignExt8to32)
 32865  		v.AddArg(x)
 32866  		return true
 32867  	}
 32868  	// match: (Trunc64to32 (SignExt16to64 x))
 32869  	// result: (SignExt16to32 x)
 32870  	for {
 32871  		if v_0.Op != OpSignExt16to64 {
 32872  			break
 32873  		}
 32874  		x := v_0.Args[0]
 32875  		v.reset(OpSignExt16to32)
 32876  		v.AddArg(x)
 32877  		return true
 32878  	}
 32879  	// match: (Trunc64to32 (SignExt32to64 x))
 32880  	// result: x
 32881  	for {
 32882  		if v_0.Op != OpSignExt32to64 {
 32883  			break
 32884  		}
 32885  		x := v_0.Args[0]
 32886  		v.copyOf(x)
 32887  		return true
 32888  	}
 32889  	// match: (Trunc64to32 (And64 (Const64 [y]) x))
 32890  	// cond: y&0xFFFFFFFF == 0xFFFFFFFF
 32891  	// result: (Trunc64to32 x)
 32892  	for {
 32893  		if v_0.Op != OpAnd64 {
 32894  			break
 32895  		}
 32896  		_ = v_0.Args[1]
 32897  		v_0_0 := v_0.Args[0]
 32898  		v_0_1 := v_0.Args[1]
 32899  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 32900  			if v_0_0.Op != OpConst64 {
 32901  				continue
 32902  			}
 32903  			y := auxIntToInt64(v_0_0.AuxInt)
 32904  			x := v_0_1
 32905  			if !(y&0xFFFFFFFF == 0xFFFFFFFF) {
 32906  				continue
 32907  			}
 32908  			v.reset(OpTrunc64to32)
 32909  			v.AddArg(x)
 32910  			return true
 32911  		}
 32912  		break
 32913  	}
 32914  	return false
 32915  }
 32916  func rewriteValuegeneric_OpTrunc64to8(v *Value) bool {
 32917  	v_0 := v.Args[0]
 32918  	// match: (Trunc64to8 (Const64 [c]))
 32919  	// result: (Const8 [int8(c)])
 32920  	for {
 32921  		if v_0.Op != OpConst64 {
 32922  			break
 32923  		}
 32924  		c := auxIntToInt64(v_0.AuxInt)
 32925  		v.reset(OpConst8)
 32926  		v.AuxInt = int8ToAuxInt(int8(c))
 32927  		return true
 32928  	}
 32929  	// match: (Trunc64to8 (ZeroExt8to64 x))
 32930  	// result: x
 32931  	for {
 32932  		if v_0.Op != OpZeroExt8to64 {
 32933  			break
 32934  		}
 32935  		x := v_0.Args[0]
 32936  		v.copyOf(x)
 32937  		return true
 32938  	}
 32939  	// match: (Trunc64to8 (SignExt8to64 x))
 32940  	// result: x
 32941  	for {
 32942  		if v_0.Op != OpSignExt8to64 {
 32943  			break
 32944  		}
 32945  		x := v_0.Args[0]
 32946  		v.copyOf(x)
 32947  		return true
 32948  	}
 32949  	// match: (Trunc64to8 (And64 (Const64 [y]) x))
 32950  	// cond: y&0xFF == 0xFF
 32951  	// result: (Trunc64to8 x)
 32952  	for {
 32953  		if v_0.Op != OpAnd64 {
 32954  			break
 32955  		}
 32956  		_ = v_0.Args[1]
 32957  		v_0_0 := v_0.Args[0]
 32958  		v_0_1 := v_0.Args[1]
 32959  		for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
 32960  			if v_0_0.Op != OpConst64 {
 32961  				continue
 32962  			}
 32963  			y := auxIntToInt64(v_0_0.AuxInt)
 32964  			x := v_0_1
 32965  			if !(y&0xFF == 0xFF) {
 32966  				continue
 32967  			}
 32968  			v.reset(OpTrunc64to8)
 32969  			v.AddArg(x)
 32970  			return true
 32971  		}
 32972  		break
 32973  	}
 32974  	return false
 32975  }
 32976  func rewriteValuegeneric_OpXor16(v *Value) bool {
 32977  	v_1 := v.Args[1]
 32978  	v_0 := v.Args[0]
 32979  	b := v.Block
 32980  	config := b.Func.Config
 32981  	// match: (Xor16 (Const16 [c]) (Const16 [d]))
 32982  	// result: (Const16 [c^d])
 32983  	for {
 32984  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 32985  			if v_0.Op != OpConst16 {
 32986  				continue
 32987  			}
 32988  			c := auxIntToInt16(v_0.AuxInt)
 32989  			if v_1.Op != OpConst16 {
 32990  				continue
 32991  			}
 32992  			d := auxIntToInt16(v_1.AuxInt)
 32993  			v.reset(OpConst16)
 32994  			v.AuxInt = int16ToAuxInt(c ^ d)
 32995  			return true
 32996  		}
 32997  		break
 32998  	}
 32999  	// match: (Xor16 x x)
 33000  	// result: (Const16 [0])
 33001  	for {
 33002  		x := v_0
 33003  		if x != v_1 {
 33004  			break
 33005  		}
 33006  		v.reset(OpConst16)
 33007  		v.AuxInt = int16ToAuxInt(0)
 33008  		return true
 33009  	}
 33010  	// match: (Xor16 (Const16 [0]) x)
 33011  	// result: x
 33012  	for {
 33013  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33014  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 {
 33015  				continue
 33016  			}
 33017  			x := v_1
 33018  			v.copyOf(x)
 33019  			return true
 33020  		}
 33021  		break
 33022  	}
 33023  	// match: (Xor16 (Com16 x) x)
 33024  	// result: (Const16 [-1])
 33025  	for {
 33026  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33027  			if v_0.Op != OpCom16 {
 33028  				continue
 33029  			}
 33030  			x := v_0.Args[0]
 33031  			if x != v_1 {
 33032  				continue
 33033  			}
 33034  			v.reset(OpConst16)
 33035  			v.AuxInt = int16ToAuxInt(-1)
 33036  			return true
 33037  		}
 33038  		break
 33039  	}
 33040  	// match: (Xor16 (Const16 [-1]) x)
 33041  	// result: (Com16 x)
 33042  	for {
 33043  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33044  			if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 {
 33045  				continue
 33046  			}
 33047  			x := v_1
 33048  			v.reset(OpCom16)
 33049  			v.AddArg(x)
 33050  			return true
 33051  		}
 33052  		break
 33053  	}
 33054  	// match: (Xor16 x (Xor16 x y))
 33055  	// result: y
 33056  	for {
 33057  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33058  			x := v_0
 33059  			if v_1.Op != OpXor16 {
 33060  				continue
 33061  			}
 33062  			_ = v_1.Args[1]
 33063  			v_1_0 := v_1.Args[0]
 33064  			v_1_1 := v_1.Args[1]
 33065  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 33066  				if x != v_1_0 {
 33067  					continue
 33068  				}
 33069  				y := v_1_1
 33070  				v.copyOf(y)
 33071  				return true
 33072  			}
 33073  		}
 33074  		break
 33075  	}
 33076  	// match: (Xor16 (Xor16 i:(Const16 <t>) z) x)
 33077  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
 33078  	// result: (Xor16 i (Xor16 <t> z x))
 33079  	for {
 33080  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33081  			if v_0.Op != OpXor16 {
 33082  				continue
 33083  			}
 33084  			_ = v_0.Args[1]
 33085  			v_0_0 := v_0.Args[0]
 33086  			v_0_1 := v_0.Args[1]
 33087  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 33088  				i := v_0_0
 33089  				if i.Op != OpConst16 {
 33090  					continue
 33091  				}
 33092  				t := i.Type
 33093  				z := v_0_1
 33094  				x := v_1
 33095  				if !(z.Op != OpConst16 && x.Op != OpConst16) {
 33096  					continue
 33097  				}
 33098  				v.reset(OpXor16)
 33099  				v0 := b.NewValue0(v.Pos, OpXor16, t)
 33100  				v0.AddArg2(z, x)
 33101  				v.AddArg2(i, v0)
 33102  				return true
 33103  			}
 33104  		}
 33105  		break
 33106  	}
 33107  	// match: (Xor16 (Const16 <t> [c]) (Xor16 (Const16 <t> [d]) x))
 33108  	// result: (Xor16 (Const16 <t> [c^d]) x)
 33109  	for {
 33110  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33111  			if v_0.Op != OpConst16 {
 33112  				continue
 33113  			}
 33114  			t := v_0.Type
 33115  			c := auxIntToInt16(v_0.AuxInt)
 33116  			if v_1.Op != OpXor16 {
 33117  				continue
 33118  			}
 33119  			_ = v_1.Args[1]
 33120  			v_1_0 := v_1.Args[0]
 33121  			v_1_1 := v_1.Args[1]
 33122  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 33123  				if v_1_0.Op != OpConst16 || v_1_0.Type != t {
 33124  					continue
 33125  				}
 33126  				d := auxIntToInt16(v_1_0.AuxInt)
 33127  				x := v_1_1
 33128  				v.reset(OpXor16)
 33129  				v0 := b.NewValue0(v.Pos, OpConst16, t)
 33130  				v0.AuxInt = int16ToAuxInt(c ^ d)
 33131  				v.AddArg2(v0, x)
 33132  				return true
 33133  			}
 33134  		}
 33135  		break
 33136  	}
 33137  	// match: (Xor16 (Lsh16x64 x z:(Const64 <t> [c])) (Rsh16Ux64 x (Const64 [d])))
 33138  	// cond: c < 16 && d == 16-c && canRotate(config, 16)
 33139  	// result: (RotateLeft16 x z)
 33140  	for {
 33141  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33142  			if v_0.Op != OpLsh16x64 {
 33143  				continue
 33144  			}
 33145  			_ = v_0.Args[1]
 33146  			x := v_0.Args[0]
 33147  			z := v_0.Args[1]
 33148  			if z.Op != OpConst64 {
 33149  				continue
 33150  			}
 33151  			c := auxIntToInt64(z.AuxInt)
 33152  			if v_1.Op != OpRsh16Ux64 {
 33153  				continue
 33154  			}
 33155  			_ = v_1.Args[1]
 33156  			if x != v_1.Args[0] {
 33157  				continue
 33158  			}
 33159  			v_1_1 := v_1.Args[1]
 33160  			if v_1_1.Op != OpConst64 {
 33161  				continue
 33162  			}
 33163  			d := auxIntToInt64(v_1_1.AuxInt)
 33164  			if !(c < 16 && d == 16-c && canRotate(config, 16)) {
 33165  				continue
 33166  			}
 33167  			v.reset(OpRotateLeft16)
 33168  			v.AddArg2(x, z)
 33169  			return true
 33170  		}
 33171  		break
 33172  	}
 33173  	// match: (Xor16 left:(Lsh16x64 x y) right:(Rsh16Ux64 x (Sub64 (Const64 [16]) y)))
 33174  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 33175  	// result: (RotateLeft16 x y)
 33176  	for {
 33177  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33178  			left := v_0
 33179  			if left.Op != OpLsh16x64 {
 33180  				continue
 33181  			}
 33182  			y := left.Args[1]
 33183  			x := left.Args[0]
 33184  			right := v_1
 33185  			if right.Op != OpRsh16Ux64 {
 33186  				continue
 33187  			}
 33188  			_ = right.Args[1]
 33189  			if x != right.Args[0] {
 33190  				continue
 33191  			}
 33192  			right_1 := right.Args[1]
 33193  			if right_1.Op != OpSub64 {
 33194  				continue
 33195  			}
 33196  			_ = right_1.Args[1]
 33197  			right_1_0 := right_1.Args[0]
 33198  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 33199  				continue
 33200  			}
 33201  			v.reset(OpRotateLeft16)
 33202  			v.AddArg2(x, y)
 33203  			return true
 33204  		}
 33205  		break
 33206  	}
 33207  	// match: (Xor16 left:(Lsh16x32 x y) right:(Rsh16Ux32 x (Sub32 (Const32 [16]) y)))
 33208  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 33209  	// result: (RotateLeft16 x y)
 33210  	for {
 33211  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33212  			left := v_0
 33213  			if left.Op != OpLsh16x32 {
 33214  				continue
 33215  			}
 33216  			y := left.Args[1]
 33217  			x := left.Args[0]
 33218  			right := v_1
 33219  			if right.Op != OpRsh16Ux32 {
 33220  				continue
 33221  			}
 33222  			_ = right.Args[1]
 33223  			if x != right.Args[0] {
 33224  				continue
 33225  			}
 33226  			right_1 := right.Args[1]
 33227  			if right_1.Op != OpSub32 {
 33228  				continue
 33229  			}
 33230  			_ = right_1.Args[1]
 33231  			right_1_0 := right_1.Args[0]
 33232  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 33233  				continue
 33234  			}
 33235  			v.reset(OpRotateLeft16)
 33236  			v.AddArg2(x, y)
 33237  			return true
 33238  		}
 33239  		break
 33240  	}
 33241  	// match: (Xor16 left:(Lsh16x16 x y) right:(Rsh16Ux16 x (Sub16 (Const16 [16]) y)))
 33242  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 33243  	// result: (RotateLeft16 x y)
 33244  	for {
 33245  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33246  			left := v_0
 33247  			if left.Op != OpLsh16x16 {
 33248  				continue
 33249  			}
 33250  			y := left.Args[1]
 33251  			x := left.Args[0]
 33252  			right := v_1
 33253  			if right.Op != OpRsh16Ux16 {
 33254  				continue
 33255  			}
 33256  			_ = right.Args[1]
 33257  			if x != right.Args[0] {
 33258  				continue
 33259  			}
 33260  			right_1 := right.Args[1]
 33261  			if right_1.Op != OpSub16 {
 33262  				continue
 33263  			}
 33264  			_ = right_1.Args[1]
 33265  			right_1_0 := right_1.Args[0]
 33266  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 33267  				continue
 33268  			}
 33269  			v.reset(OpRotateLeft16)
 33270  			v.AddArg2(x, y)
 33271  			return true
 33272  		}
 33273  		break
 33274  	}
 33275  	// match: (Xor16 left:(Lsh16x8 x y) right:(Rsh16Ux8 x (Sub8 (Const8 [16]) y)))
 33276  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 33277  	// result: (RotateLeft16 x y)
 33278  	for {
 33279  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33280  			left := v_0
 33281  			if left.Op != OpLsh16x8 {
 33282  				continue
 33283  			}
 33284  			y := left.Args[1]
 33285  			x := left.Args[0]
 33286  			right := v_1
 33287  			if right.Op != OpRsh16Ux8 {
 33288  				continue
 33289  			}
 33290  			_ = right.Args[1]
 33291  			if x != right.Args[0] {
 33292  				continue
 33293  			}
 33294  			right_1 := right.Args[1]
 33295  			if right_1.Op != OpSub8 {
 33296  				continue
 33297  			}
 33298  			_ = right_1.Args[1]
 33299  			right_1_0 := right_1.Args[0]
 33300  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 33301  				continue
 33302  			}
 33303  			v.reset(OpRotateLeft16)
 33304  			v.AddArg2(x, y)
 33305  			return true
 33306  		}
 33307  		break
 33308  	}
 33309  	// match: (Xor16 right:(Rsh16Ux64 x y) left:(Lsh16x64 x z:(Sub64 (Const64 [16]) y)))
 33310  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 33311  	// result: (RotateLeft16 x z)
 33312  	for {
 33313  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33314  			right := v_0
 33315  			if right.Op != OpRsh16Ux64 {
 33316  				continue
 33317  			}
 33318  			y := right.Args[1]
 33319  			x := right.Args[0]
 33320  			left := v_1
 33321  			if left.Op != OpLsh16x64 {
 33322  				continue
 33323  			}
 33324  			_ = left.Args[1]
 33325  			if x != left.Args[0] {
 33326  				continue
 33327  			}
 33328  			z := left.Args[1]
 33329  			if z.Op != OpSub64 {
 33330  				continue
 33331  			}
 33332  			_ = z.Args[1]
 33333  			z_0 := z.Args[0]
 33334  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 33335  				continue
 33336  			}
 33337  			v.reset(OpRotateLeft16)
 33338  			v.AddArg2(x, z)
 33339  			return true
 33340  		}
 33341  		break
 33342  	}
 33343  	// match: (Xor16 right:(Rsh16Ux32 x y) left:(Lsh16x32 x z:(Sub32 (Const32 [16]) y)))
 33344  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 33345  	// result: (RotateLeft16 x z)
 33346  	for {
 33347  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33348  			right := v_0
 33349  			if right.Op != OpRsh16Ux32 {
 33350  				continue
 33351  			}
 33352  			y := right.Args[1]
 33353  			x := right.Args[0]
 33354  			left := v_1
 33355  			if left.Op != OpLsh16x32 {
 33356  				continue
 33357  			}
 33358  			_ = left.Args[1]
 33359  			if x != left.Args[0] {
 33360  				continue
 33361  			}
 33362  			z := left.Args[1]
 33363  			if z.Op != OpSub32 {
 33364  				continue
 33365  			}
 33366  			_ = z.Args[1]
 33367  			z_0 := z.Args[0]
 33368  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 33369  				continue
 33370  			}
 33371  			v.reset(OpRotateLeft16)
 33372  			v.AddArg2(x, z)
 33373  			return true
 33374  		}
 33375  		break
 33376  	}
 33377  	// match: (Xor16 right:(Rsh16Ux16 x y) left:(Lsh16x16 x z:(Sub16 (Const16 [16]) y)))
 33378  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 33379  	// result: (RotateLeft16 x z)
 33380  	for {
 33381  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33382  			right := v_0
 33383  			if right.Op != OpRsh16Ux16 {
 33384  				continue
 33385  			}
 33386  			y := right.Args[1]
 33387  			x := right.Args[0]
 33388  			left := v_1
 33389  			if left.Op != OpLsh16x16 {
 33390  				continue
 33391  			}
 33392  			_ = left.Args[1]
 33393  			if x != left.Args[0] {
 33394  				continue
 33395  			}
 33396  			z := left.Args[1]
 33397  			if z.Op != OpSub16 {
 33398  				continue
 33399  			}
 33400  			_ = z.Args[1]
 33401  			z_0 := z.Args[0]
 33402  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 33403  				continue
 33404  			}
 33405  			v.reset(OpRotateLeft16)
 33406  			v.AddArg2(x, z)
 33407  			return true
 33408  		}
 33409  		break
 33410  	}
 33411  	// match: (Xor16 right:(Rsh16Ux8 x y) left:(Lsh16x8 x z:(Sub8 (Const8 [16]) y)))
 33412  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)
 33413  	// result: (RotateLeft16 x z)
 33414  	for {
 33415  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33416  			right := v_0
 33417  			if right.Op != OpRsh16Ux8 {
 33418  				continue
 33419  			}
 33420  			y := right.Args[1]
 33421  			x := right.Args[0]
 33422  			left := v_1
 33423  			if left.Op != OpLsh16x8 {
 33424  				continue
 33425  			}
 33426  			_ = left.Args[1]
 33427  			if x != left.Args[0] {
 33428  				continue
 33429  			}
 33430  			z := left.Args[1]
 33431  			if z.Op != OpSub8 {
 33432  				continue
 33433  			}
 33434  			_ = z.Args[1]
 33435  			z_0 := z.Args[0]
 33436  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) {
 33437  				continue
 33438  			}
 33439  			v.reset(OpRotateLeft16)
 33440  			v.AddArg2(x, z)
 33441  			return true
 33442  		}
 33443  		break
 33444  	}
 33445  	return false
 33446  }
 33447  func rewriteValuegeneric_OpXor32(v *Value) bool {
 33448  	v_1 := v.Args[1]
 33449  	v_0 := v.Args[0]
 33450  	b := v.Block
 33451  	config := b.Func.Config
 33452  	// match: (Xor32 (Const32 [c]) (Const32 [d]))
 33453  	// result: (Const32 [c^d])
 33454  	for {
 33455  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33456  			if v_0.Op != OpConst32 {
 33457  				continue
 33458  			}
 33459  			c := auxIntToInt32(v_0.AuxInt)
 33460  			if v_1.Op != OpConst32 {
 33461  				continue
 33462  			}
 33463  			d := auxIntToInt32(v_1.AuxInt)
 33464  			v.reset(OpConst32)
 33465  			v.AuxInt = int32ToAuxInt(c ^ d)
 33466  			return true
 33467  		}
 33468  		break
 33469  	}
 33470  	// match: (Xor32 x x)
 33471  	// result: (Const32 [0])
 33472  	for {
 33473  		x := v_0
 33474  		if x != v_1 {
 33475  			break
 33476  		}
 33477  		v.reset(OpConst32)
 33478  		v.AuxInt = int32ToAuxInt(0)
 33479  		return true
 33480  	}
 33481  	// match: (Xor32 (Const32 [0]) x)
 33482  	// result: x
 33483  	for {
 33484  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33485  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 {
 33486  				continue
 33487  			}
 33488  			x := v_1
 33489  			v.copyOf(x)
 33490  			return true
 33491  		}
 33492  		break
 33493  	}
 33494  	// match: (Xor32 (Com32 x) x)
 33495  	// result: (Const32 [-1])
 33496  	for {
 33497  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33498  			if v_0.Op != OpCom32 {
 33499  				continue
 33500  			}
 33501  			x := v_0.Args[0]
 33502  			if x != v_1 {
 33503  				continue
 33504  			}
 33505  			v.reset(OpConst32)
 33506  			v.AuxInt = int32ToAuxInt(-1)
 33507  			return true
 33508  		}
 33509  		break
 33510  	}
 33511  	// match: (Xor32 (Const32 [-1]) x)
 33512  	// result: (Com32 x)
 33513  	for {
 33514  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33515  			if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 {
 33516  				continue
 33517  			}
 33518  			x := v_1
 33519  			v.reset(OpCom32)
 33520  			v.AddArg(x)
 33521  			return true
 33522  		}
 33523  		break
 33524  	}
 33525  	// match: (Xor32 x (Xor32 x y))
 33526  	// result: y
 33527  	for {
 33528  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33529  			x := v_0
 33530  			if v_1.Op != OpXor32 {
 33531  				continue
 33532  			}
 33533  			_ = v_1.Args[1]
 33534  			v_1_0 := v_1.Args[0]
 33535  			v_1_1 := v_1.Args[1]
 33536  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 33537  				if x != v_1_0 {
 33538  					continue
 33539  				}
 33540  				y := v_1_1
 33541  				v.copyOf(y)
 33542  				return true
 33543  			}
 33544  		}
 33545  		break
 33546  	}
 33547  	// match: (Xor32 (Xor32 i:(Const32 <t>) z) x)
 33548  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
 33549  	// result: (Xor32 i (Xor32 <t> z x))
 33550  	for {
 33551  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33552  			if v_0.Op != OpXor32 {
 33553  				continue
 33554  			}
 33555  			_ = v_0.Args[1]
 33556  			v_0_0 := v_0.Args[0]
 33557  			v_0_1 := v_0.Args[1]
 33558  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 33559  				i := v_0_0
 33560  				if i.Op != OpConst32 {
 33561  					continue
 33562  				}
 33563  				t := i.Type
 33564  				z := v_0_1
 33565  				x := v_1
 33566  				if !(z.Op != OpConst32 && x.Op != OpConst32) {
 33567  					continue
 33568  				}
 33569  				v.reset(OpXor32)
 33570  				v0 := b.NewValue0(v.Pos, OpXor32, t)
 33571  				v0.AddArg2(z, x)
 33572  				v.AddArg2(i, v0)
 33573  				return true
 33574  			}
 33575  		}
 33576  		break
 33577  	}
 33578  	// match: (Xor32 (Const32 <t> [c]) (Xor32 (Const32 <t> [d]) x))
 33579  	// result: (Xor32 (Const32 <t> [c^d]) x)
 33580  	for {
 33581  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33582  			if v_0.Op != OpConst32 {
 33583  				continue
 33584  			}
 33585  			t := v_0.Type
 33586  			c := auxIntToInt32(v_0.AuxInt)
 33587  			if v_1.Op != OpXor32 {
 33588  				continue
 33589  			}
 33590  			_ = v_1.Args[1]
 33591  			v_1_0 := v_1.Args[0]
 33592  			v_1_1 := v_1.Args[1]
 33593  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 33594  				if v_1_0.Op != OpConst32 || v_1_0.Type != t {
 33595  					continue
 33596  				}
 33597  				d := auxIntToInt32(v_1_0.AuxInt)
 33598  				x := v_1_1
 33599  				v.reset(OpXor32)
 33600  				v0 := b.NewValue0(v.Pos, OpConst32, t)
 33601  				v0.AuxInt = int32ToAuxInt(c ^ d)
 33602  				v.AddArg2(v0, x)
 33603  				return true
 33604  			}
 33605  		}
 33606  		break
 33607  	}
 33608  	// match: (Xor32 (Lsh32x64 x z:(Const64 <t> [c])) (Rsh32Ux64 x (Const64 [d])))
 33609  	// cond: c < 32 && d == 32-c && canRotate(config, 32)
 33610  	// result: (RotateLeft32 x z)
 33611  	for {
 33612  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33613  			if v_0.Op != OpLsh32x64 {
 33614  				continue
 33615  			}
 33616  			_ = v_0.Args[1]
 33617  			x := v_0.Args[0]
 33618  			z := v_0.Args[1]
 33619  			if z.Op != OpConst64 {
 33620  				continue
 33621  			}
 33622  			c := auxIntToInt64(z.AuxInt)
 33623  			if v_1.Op != OpRsh32Ux64 {
 33624  				continue
 33625  			}
 33626  			_ = v_1.Args[1]
 33627  			if x != v_1.Args[0] {
 33628  				continue
 33629  			}
 33630  			v_1_1 := v_1.Args[1]
 33631  			if v_1_1.Op != OpConst64 {
 33632  				continue
 33633  			}
 33634  			d := auxIntToInt64(v_1_1.AuxInt)
 33635  			if !(c < 32 && d == 32-c && canRotate(config, 32)) {
 33636  				continue
 33637  			}
 33638  			v.reset(OpRotateLeft32)
 33639  			v.AddArg2(x, z)
 33640  			return true
 33641  		}
 33642  		break
 33643  	}
 33644  	// match: (Xor32 left:(Lsh32x64 x y) right:(Rsh32Ux64 x (Sub64 (Const64 [32]) y)))
 33645  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 33646  	// result: (RotateLeft32 x y)
 33647  	for {
 33648  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33649  			left := v_0
 33650  			if left.Op != OpLsh32x64 {
 33651  				continue
 33652  			}
 33653  			y := left.Args[1]
 33654  			x := left.Args[0]
 33655  			right := v_1
 33656  			if right.Op != OpRsh32Ux64 {
 33657  				continue
 33658  			}
 33659  			_ = right.Args[1]
 33660  			if x != right.Args[0] {
 33661  				continue
 33662  			}
 33663  			right_1 := right.Args[1]
 33664  			if right_1.Op != OpSub64 {
 33665  				continue
 33666  			}
 33667  			_ = right_1.Args[1]
 33668  			right_1_0 := right_1.Args[0]
 33669  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 33670  				continue
 33671  			}
 33672  			v.reset(OpRotateLeft32)
 33673  			v.AddArg2(x, y)
 33674  			return true
 33675  		}
 33676  		break
 33677  	}
 33678  	// match: (Xor32 left:(Lsh32x32 x y) right:(Rsh32Ux32 x (Sub32 (Const32 [32]) y)))
 33679  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 33680  	// result: (RotateLeft32 x y)
 33681  	for {
 33682  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33683  			left := v_0
 33684  			if left.Op != OpLsh32x32 {
 33685  				continue
 33686  			}
 33687  			y := left.Args[1]
 33688  			x := left.Args[0]
 33689  			right := v_1
 33690  			if right.Op != OpRsh32Ux32 {
 33691  				continue
 33692  			}
 33693  			_ = right.Args[1]
 33694  			if x != right.Args[0] {
 33695  				continue
 33696  			}
 33697  			right_1 := right.Args[1]
 33698  			if right_1.Op != OpSub32 {
 33699  				continue
 33700  			}
 33701  			_ = right_1.Args[1]
 33702  			right_1_0 := right_1.Args[0]
 33703  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 33704  				continue
 33705  			}
 33706  			v.reset(OpRotateLeft32)
 33707  			v.AddArg2(x, y)
 33708  			return true
 33709  		}
 33710  		break
 33711  	}
 33712  	// match: (Xor32 left:(Lsh32x16 x y) right:(Rsh32Ux16 x (Sub16 (Const16 [32]) y)))
 33713  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 33714  	// result: (RotateLeft32 x y)
 33715  	for {
 33716  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33717  			left := v_0
 33718  			if left.Op != OpLsh32x16 {
 33719  				continue
 33720  			}
 33721  			y := left.Args[1]
 33722  			x := left.Args[0]
 33723  			right := v_1
 33724  			if right.Op != OpRsh32Ux16 {
 33725  				continue
 33726  			}
 33727  			_ = right.Args[1]
 33728  			if x != right.Args[0] {
 33729  				continue
 33730  			}
 33731  			right_1 := right.Args[1]
 33732  			if right_1.Op != OpSub16 {
 33733  				continue
 33734  			}
 33735  			_ = right_1.Args[1]
 33736  			right_1_0 := right_1.Args[0]
 33737  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 33738  				continue
 33739  			}
 33740  			v.reset(OpRotateLeft32)
 33741  			v.AddArg2(x, y)
 33742  			return true
 33743  		}
 33744  		break
 33745  	}
 33746  	// match: (Xor32 left:(Lsh32x8 x y) right:(Rsh32Ux8 x (Sub8 (Const8 [32]) y)))
 33747  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 33748  	// result: (RotateLeft32 x y)
 33749  	for {
 33750  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33751  			left := v_0
 33752  			if left.Op != OpLsh32x8 {
 33753  				continue
 33754  			}
 33755  			y := left.Args[1]
 33756  			x := left.Args[0]
 33757  			right := v_1
 33758  			if right.Op != OpRsh32Ux8 {
 33759  				continue
 33760  			}
 33761  			_ = right.Args[1]
 33762  			if x != right.Args[0] {
 33763  				continue
 33764  			}
 33765  			right_1 := right.Args[1]
 33766  			if right_1.Op != OpSub8 {
 33767  				continue
 33768  			}
 33769  			_ = right_1.Args[1]
 33770  			right_1_0 := right_1.Args[0]
 33771  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 33772  				continue
 33773  			}
 33774  			v.reset(OpRotateLeft32)
 33775  			v.AddArg2(x, y)
 33776  			return true
 33777  		}
 33778  		break
 33779  	}
 33780  	// match: (Xor32 right:(Rsh32Ux64 x y) left:(Lsh32x64 x z:(Sub64 (Const64 [32]) y)))
 33781  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 33782  	// result: (RotateLeft32 x z)
 33783  	for {
 33784  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33785  			right := v_0
 33786  			if right.Op != OpRsh32Ux64 {
 33787  				continue
 33788  			}
 33789  			y := right.Args[1]
 33790  			x := right.Args[0]
 33791  			left := v_1
 33792  			if left.Op != OpLsh32x64 {
 33793  				continue
 33794  			}
 33795  			_ = left.Args[1]
 33796  			if x != left.Args[0] {
 33797  				continue
 33798  			}
 33799  			z := left.Args[1]
 33800  			if z.Op != OpSub64 {
 33801  				continue
 33802  			}
 33803  			_ = z.Args[1]
 33804  			z_0 := z.Args[0]
 33805  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 33806  				continue
 33807  			}
 33808  			v.reset(OpRotateLeft32)
 33809  			v.AddArg2(x, z)
 33810  			return true
 33811  		}
 33812  		break
 33813  	}
 33814  	// match: (Xor32 right:(Rsh32Ux32 x y) left:(Lsh32x32 x z:(Sub32 (Const32 [32]) y)))
 33815  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 33816  	// result: (RotateLeft32 x z)
 33817  	for {
 33818  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33819  			right := v_0
 33820  			if right.Op != OpRsh32Ux32 {
 33821  				continue
 33822  			}
 33823  			y := right.Args[1]
 33824  			x := right.Args[0]
 33825  			left := v_1
 33826  			if left.Op != OpLsh32x32 {
 33827  				continue
 33828  			}
 33829  			_ = left.Args[1]
 33830  			if x != left.Args[0] {
 33831  				continue
 33832  			}
 33833  			z := left.Args[1]
 33834  			if z.Op != OpSub32 {
 33835  				continue
 33836  			}
 33837  			_ = z.Args[1]
 33838  			z_0 := z.Args[0]
 33839  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 33840  				continue
 33841  			}
 33842  			v.reset(OpRotateLeft32)
 33843  			v.AddArg2(x, z)
 33844  			return true
 33845  		}
 33846  		break
 33847  	}
 33848  	// match: (Xor32 right:(Rsh32Ux16 x y) left:(Lsh32x16 x z:(Sub16 (Const16 [32]) y)))
 33849  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 33850  	// result: (RotateLeft32 x z)
 33851  	for {
 33852  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33853  			right := v_0
 33854  			if right.Op != OpRsh32Ux16 {
 33855  				continue
 33856  			}
 33857  			y := right.Args[1]
 33858  			x := right.Args[0]
 33859  			left := v_1
 33860  			if left.Op != OpLsh32x16 {
 33861  				continue
 33862  			}
 33863  			_ = left.Args[1]
 33864  			if x != left.Args[0] {
 33865  				continue
 33866  			}
 33867  			z := left.Args[1]
 33868  			if z.Op != OpSub16 {
 33869  				continue
 33870  			}
 33871  			_ = z.Args[1]
 33872  			z_0 := z.Args[0]
 33873  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 33874  				continue
 33875  			}
 33876  			v.reset(OpRotateLeft32)
 33877  			v.AddArg2(x, z)
 33878  			return true
 33879  		}
 33880  		break
 33881  	}
 33882  	// match: (Xor32 right:(Rsh32Ux8 x y) left:(Lsh32x8 x z:(Sub8 (Const8 [32]) y)))
 33883  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)
 33884  	// result: (RotateLeft32 x z)
 33885  	for {
 33886  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33887  			right := v_0
 33888  			if right.Op != OpRsh32Ux8 {
 33889  				continue
 33890  			}
 33891  			y := right.Args[1]
 33892  			x := right.Args[0]
 33893  			left := v_1
 33894  			if left.Op != OpLsh32x8 {
 33895  				continue
 33896  			}
 33897  			_ = left.Args[1]
 33898  			if x != left.Args[0] {
 33899  				continue
 33900  			}
 33901  			z := left.Args[1]
 33902  			if z.Op != OpSub8 {
 33903  				continue
 33904  			}
 33905  			_ = z.Args[1]
 33906  			z_0 := z.Args[0]
 33907  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) {
 33908  				continue
 33909  			}
 33910  			v.reset(OpRotateLeft32)
 33911  			v.AddArg2(x, z)
 33912  			return true
 33913  		}
 33914  		break
 33915  	}
 33916  	return false
 33917  }
 33918  func rewriteValuegeneric_OpXor64(v *Value) bool {
 33919  	v_1 := v.Args[1]
 33920  	v_0 := v.Args[0]
 33921  	b := v.Block
 33922  	config := b.Func.Config
 33923  	// match: (Xor64 (Const64 [c]) (Const64 [d]))
 33924  	// result: (Const64 [c^d])
 33925  	for {
 33926  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33927  			if v_0.Op != OpConst64 {
 33928  				continue
 33929  			}
 33930  			c := auxIntToInt64(v_0.AuxInt)
 33931  			if v_1.Op != OpConst64 {
 33932  				continue
 33933  			}
 33934  			d := auxIntToInt64(v_1.AuxInt)
 33935  			v.reset(OpConst64)
 33936  			v.AuxInt = int64ToAuxInt(c ^ d)
 33937  			return true
 33938  		}
 33939  		break
 33940  	}
 33941  	// match: (Xor64 x x)
 33942  	// result: (Const64 [0])
 33943  	for {
 33944  		x := v_0
 33945  		if x != v_1 {
 33946  			break
 33947  		}
 33948  		v.reset(OpConst64)
 33949  		v.AuxInt = int64ToAuxInt(0)
 33950  		return true
 33951  	}
 33952  	// match: (Xor64 (Const64 [0]) x)
 33953  	// result: x
 33954  	for {
 33955  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33956  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 {
 33957  				continue
 33958  			}
 33959  			x := v_1
 33960  			v.copyOf(x)
 33961  			return true
 33962  		}
 33963  		break
 33964  	}
 33965  	// match: (Xor64 (Com64 x) x)
 33966  	// result: (Const64 [-1])
 33967  	for {
 33968  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33969  			if v_0.Op != OpCom64 {
 33970  				continue
 33971  			}
 33972  			x := v_0.Args[0]
 33973  			if x != v_1 {
 33974  				continue
 33975  			}
 33976  			v.reset(OpConst64)
 33977  			v.AuxInt = int64ToAuxInt(-1)
 33978  			return true
 33979  		}
 33980  		break
 33981  	}
 33982  	// match: (Xor64 (Const64 [-1]) x)
 33983  	// result: (Com64 x)
 33984  	for {
 33985  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 33986  			if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 {
 33987  				continue
 33988  			}
 33989  			x := v_1
 33990  			v.reset(OpCom64)
 33991  			v.AddArg(x)
 33992  			return true
 33993  		}
 33994  		break
 33995  	}
 33996  	// match: (Xor64 x (Xor64 x y))
 33997  	// result: y
 33998  	for {
 33999  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34000  			x := v_0
 34001  			if v_1.Op != OpXor64 {
 34002  				continue
 34003  			}
 34004  			_ = v_1.Args[1]
 34005  			v_1_0 := v_1.Args[0]
 34006  			v_1_1 := v_1.Args[1]
 34007  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 34008  				if x != v_1_0 {
 34009  					continue
 34010  				}
 34011  				y := v_1_1
 34012  				v.copyOf(y)
 34013  				return true
 34014  			}
 34015  		}
 34016  		break
 34017  	}
 34018  	// match: (Xor64 (Xor64 i:(Const64 <t>) z) x)
 34019  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
 34020  	// result: (Xor64 i (Xor64 <t> z x))
 34021  	for {
 34022  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34023  			if v_0.Op != OpXor64 {
 34024  				continue
 34025  			}
 34026  			_ = v_0.Args[1]
 34027  			v_0_0 := v_0.Args[0]
 34028  			v_0_1 := v_0.Args[1]
 34029  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 34030  				i := v_0_0
 34031  				if i.Op != OpConst64 {
 34032  					continue
 34033  				}
 34034  				t := i.Type
 34035  				z := v_0_1
 34036  				x := v_1
 34037  				if !(z.Op != OpConst64 && x.Op != OpConst64) {
 34038  					continue
 34039  				}
 34040  				v.reset(OpXor64)
 34041  				v0 := b.NewValue0(v.Pos, OpXor64, t)
 34042  				v0.AddArg2(z, x)
 34043  				v.AddArg2(i, v0)
 34044  				return true
 34045  			}
 34046  		}
 34047  		break
 34048  	}
 34049  	// match: (Xor64 (Const64 <t> [c]) (Xor64 (Const64 <t> [d]) x))
 34050  	// result: (Xor64 (Const64 <t> [c^d]) x)
 34051  	for {
 34052  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34053  			if v_0.Op != OpConst64 {
 34054  				continue
 34055  			}
 34056  			t := v_0.Type
 34057  			c := auxIntToInt64(v_0.AuxInt)
 34058  			if v_1.Op != OpXor64 {
 34059  				continue
 34060  			}
 34061  			_ = v_1.Args[1]
 34062  			v_1_0 := v_1.Args[0]
 34063  			v_1_1 := v_1.Args[1]
 34064  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 34065  				if v_1_0.Op != OpConst64 || v_1_0.Type != t {
 34066  					continue
 34067  				}
 34068  				d := auxIntToInt64(v_1_0.AuxInt)
 34069  				x := v_1_1
 34070  				v.reset(OpXor64)
 34071  				v0 := b.NewValue0(v.Pos, OpConst64, t)
 34072  				v0.AuxInt = int64ToAuxInt(c ^ d)
 34073  				v.AddArg2(v0, x)
 34074  				return true
 34075  			}
 34076  		}
 34077  		break
 34078  	}
 34079  	// match: (Xor64 (Lsh64x64 x z:(Const64 <t> [c])) (Rsh64Ux64 x (Const64 [d])))
 34080  	// cond: c < 64 && d == 64-c && canRotate(config, 64)
 34081  	// result: (RotateLeft64 x z)
 34082  	for {
 34083  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34084  			if v_0.Op != OpLsh64x64 {
 34085  				continue
 34086  			}
 34087  			_ = v_0.Args[1]
 34088  			x := v_0.Args[0]
 34089  			z := v_0.Args[1]
 34090  			if z.Op != OpConst64 {
 34091  				continue
 34092  			}
 34093  			c := auxIntToInt64(z.AuxInt)
 34094  			if v_1.Op != OpRsh64Ux64 {
 34095  				continue
 34096  			}
 34097  			_ = v_1.Args[1]
 34098  			if x != v_1.Args[0] {
 34099  				continue
 34100  			}
 34101  			v_1_1 := v_1.Args[1]
 34102  			if v_1_1.Op != OpConst64 {
 34103  				continue
 34104  			}
 34105  			d := auxIntToInt64(v_1_1.AuxInt)
 34106  			if !(c < 64 && d == 64-c && canRotate(config, 64)) {
 34107  				continue
 34108  			}
 34109  			v.reset(OpRotateLeft64)
 34110  			v.AddArg2(x, z)
 34111  			return true
 34112  		}
 34113  		break
 34114  	}
 34115  	// match: (Xor64 left:(Lsh64x64 x y) right:(Rsh64Ux64 x (Sub64 (Const64 [64]) y)))
 34116  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 34117  	// result: (RotateLeft64 x y)
 34118  	for {
 34119  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34120  			left := v_0
 34121  			if left.Op != OpLsh64x64 {
 34122  				continue
 34123  			}
 34124  			y := left.Args[1]
 34125  			x := left.Args[0]
 34126  			right := v_1
 34127  			if right.Op != OpRsh64Ux64 {
 34128  				continue
 34129  			}
 34130  			_ = right.Args[1]
 34131  			if x != right.Args[0] {
 34132  				continue
 34133  			}
 34134  			right_1 := right.Args[1]
 34135  			if right_1.Op != OpSub64 {
 34136  				continue
 34137  			}
 34138  			_ = right_1.Args[1]
 34139  			right_1_0 := right_1.Args[0]
 34140  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 34141  				continue
 34142  			}
 34143  			v.reset(OpRotateLeft64)
 34144  			v.AddArg2(x, y)
 34145  			return true
 34146  		}
 34147  		break
 34148  	}
 34149  	// match: (Xor64 left:(Lsh64x32 x y) right:(Rsh64Ux32 x (Sub32 (Const32 [64]) y)))
 34150  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 34151  	// result: (RotateLeft64 x y)
 34152  	for {
 34153  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34154  			left := v_0
 34155  			if left.Op != OpLsh64x32 {
 34156  				continue
 34157  			}
 34158  			y := left.Args[1]
 34159  			x := left.Args[0]
 34160  			right := v_1
 34161  			if right.Op != OpRsh64Ux32 {
 34162  				continue
 34163  			}
 34164  			_ = right.Args[1]
 34165  			if x != right.Args[0] {
 34166  				continue
 34167  			}
 34168  			right_1 := right.Args[1]
 34169  			if right_1.Op != OpSub32 {
 34170  				continue
 34171  			}
 34172  			_ = right_1.Args[1]
 34173  			right_1_0 := right_1.Args[0]
 34174  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 34175  				continue
 34176  			}
 34177  			v.reset(OpRotateLeft64)
 34178  			v.AddArg2(x, y)
 34179  			return true
 34180  		}
 34181  		break
 34182  	}
 34183  	// match: (Xor64 left:(Lsh64x16 x y) right:(Rsh64Ux16 x (Sub16 (Const16 [64]) y)))
 34184  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 34185  	// result: (RotateLeft64 x y)
 34186  	for {
 34187  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34188  			left := v_0
 34189  			if left.Op != OpLsh64x16 {
 34190  				continue
 34191  			}
 34192  			y := left.Args[1]
 34193  			x := left.Args[0]
 34194  			right := v_1
 34195  			if right.Op != OpRsh64Ux16 {
 34196  				continue
 34197  			}
 34198  			_ = right.Args[1]
 34199  			if x != right.Args[0] {
 34200  				continue
 34201  			}
 34202  			right_1 := right.Args[1]
 34203  			if right_1.Op != OpSub16 {
 34204  				continue
 34205  			}
 34206  			_ = right_1.Args[1]
 34207  			right_1_0 := right_1.Args[0]
 34208  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 34209  				continue
 34210  			}
 34211  			v.reset(OpRotateLeft64)
 34212  			v.AddArg2(x, y)
 34213  			return true
 34214  		}
 34215  		break
 34216  	}
 34217  	// match: (Xor64 left:(Lsh64x8 x y) right:(Rsh64Ux8 x (Sub8 (Const8 [64]) y)))
 34218  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 34219  	// result: (RotateLeft64 x y)
 34220  	for {
 34221  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34222  			left := v_0
 34223  			if left.Op != OpLsh64x8 {
 34224  				continue
 34225  			}
 34226  			y := left.Args[1]
 34227  			x := left.Args[0]
 34228  			right := v_1
 34229  			if right.Op != OpRsh64Ux8 {
 34230  				continue
 34231  			}
 34232  			_ = right.Args[1]
 34233  			if x != right.Args[0] {
 34234  				continue
 34235  			}
 34236  			right_1 := right.Args[1]
 34237  			if right_1.Op != OpSub8 {
 34238  				continue
 34239  			}
 34240  			_ = right_1.Args[1]
 34241  			right_1_0 := right_1.Args[0]
 34242  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 34243  				continue
 34244  			}
 34245  			v.reset(OpRotateLeft64)
 34246  			v.AddArg2(x, y)
 34247  			return true
 34248  		}
 34249  		break
 34250  	}
 34251  	// match: (Xor64 right:(Rsh64Ux64 x y) left:(Lsh64x64 x z:(Sub64 (Const64 [64]) y)))
 34252  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 34253  	// result: (RotateLeft64 x z)
 34254  	for {
 34255  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34256  			right := v_0
 34257  			if right.Op != OpRsh64Ux64 {
 34258  				continue
 34259  			}
 34260  			y := right.Args[1]
 34261  			x := right.Args[0]
 34262  			left := v_1
 34263  			if left.Op != OpLsh64x64 {
 34264  				continue
 34265  			}
 34266  			_ = left.Args[1]
 34267  			if x != left.Args[0] {
 34268  				continue
 34269  			}
 34270  			z := left.Args[1]
 34271  			if z.Op != OpSub64 {
 34272  				continue
 34273  			}
 34274  			_ = z.Args[1]
 34275  			z_0 := z.Args[0]
 34276  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 34277  				continue
 34278  			}
 34279  			v.reset(OpRotateLeft64)
 34280  			v.AddArg2(x, z)
 34281  			return true
 34282  		}
 34283  		break
 34284  	}
 34285  	// match: (Xor64 right:(Rsh64Ux32 x y) left:(Lsh64x32 x z:(Sub32 (Const32 [64]) y)))
 34286  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 34287  	// result: (RotateLeft64 x z)
 34288  	for {
 34289  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34290  			right := v_0
 34291  			if right.Op != OpRsh64Ux32 {
 34292  				continue
 34293  			}
 34294  			y := right.Args[1]
 34295  			x := right.Args[0]
 34296  			left := v_1
 34297  			if left.Op != OpLsh64x32 {
 34298  				continue
 34299  			}
 34300  			_ = left.Args[1]
 34301  			if x != left.Args[0] {
 34302  				continue
 34303  			}
 34304  			z := left.Args[1]
 34305  			if z.Op != OpSub32 {
 34306  				continue
 34307  			}
 34308  			_ = z.Args[1]
 34309  			z_0 := z.Args[0]
 34310  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 34311  				continue
 34312  			}
 34313  			v.reset(OpRotateLeft64)
 34314  			v.AddArg2(x, z)
 34315  			return true
 34316  		}
 34317  		break
 34318  	}
 34319  	// match: (Xor64 right:(Rsh64Ux16 x y) left:(Lsh64x16 x z:(Sub16 (Const16 [64]) y)))
 34320  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 34321  	// result: (RotateLeft64 x z)
 34322  	for {
 34323  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34324  			right := v_0
 34325  			if right.Op != OpRsh64Ux16 {
 34326  				continue
 34327  			}
 34328  			y := right.Args[1]
 34329  			x := right.Args[0]
 34330  			left := v_1
 34331  			if left.Op != OpLsh64x16 {
 34332  				continue
 34333  			}
 34334  			_ = left.Args[1]
 34335  			if x != left.Args[0] {
 34336  				continue
 34337  			}
 34338  			z := left.Args[1]
 34339  			if z.Op != OpSub16 {
 34340  				continue
 34341  			}
 34342  			_ = z.Args[1]
 34343  			z_0 := z.Args[0]
 34344  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 34345  				continue
 34346  			}
 34347  			v.reset(OpRotateLeft64)
 34348  			v.AddArg2(x, z)
 34349  			return true
 34350  		}
 34351  		break
 34352  	}
 34353  	// match: (Xor64 right:(Rsh64Ux8 x y) left:(Lsh64x8 x z:(Sub8 (Const8 [64]) y)))
 34354  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)
 34355  	// result: (RotateLeft64 x z)
 34356  	for {
 34357  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34358  			right := v_0
 34359  			if right.Op != OpRsh64Ux8 {
 34360  				continue
 34361  			}
 34362  			y := right.Args[1]
 34363  			x := right.Args[0]
 34364  			left := v_1
 34365  			if left.Op != OpLsh64x8 {
 34366  				continue
 34367  			}
 34368  			_ = left.Args[1]
 34369  			if x != left.Args[0] {
 34370  				continue
 34371  			}
 34372  			z := left.Args[1]
 34373  			if z.Op != OpSub8 {
 34374  				continue
 34375  			}
 34376  			_ = z.Args[1]
 34377  			z_0 := z.Args[0]
 34378  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) {
 34379  				continue
 34380  			}
 34381  			v.reset(OpRotateLeft64)
 34382  			v.AddArg2(x, z)
 34383  			return true
 34384  		}
 34385  		break
 34386  	}
 34387  	return false
 34388  }
 34389  func rewriteValuegeneric_OpXor8(v *Value) bool {
 34390  	v_1 := v.Args[1]
 34391  	v_0 := v.Args[0]
 34392  	b := v.Block
 34393  	config := b.Func.Config
 34394  	// match: (Xor8 (Const8 [c]) (Const8 [d]))
 34395  	// result: (Const8 [c^d])
 34396  	for {
 34397  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34398  			if v_0.Op != OpConst8 {
 34399  				continue
 34400  			}
 34401  			c := auxIntToInt8(v_0.AuxInt)
 34402  			if v_1.Op != OpConst8 {
 34403  				continue
 34404  			}
 34405  			d := auxIntToInt8(v_1.AuxInt)
 34406  			v.reset(OpConst8)
 34407  			v.AuxInt = int8ToAuxInt(c ^ d)
 34408  			return true
 34409  		}
 34410  		break
 34411  	}
 34412  	// match: (Xor8 x x)
 34413  	// result: (Const8 [0])
 34414  	for {
 34415  		x := v_0
 34416  		if x != v_1 {
 34417  			break
 34418  		}
 34419  		v.reset(OpConst8)
 34420  		v.AuxInt = int8ToAuxInt(0)
 34421  		return true
 34422  	}
 34423  	// match: (Xor8 (Const8 [0]) x)
 34424  	// result: x
 34425  	for {
 34426  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34427  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 {
 34428  				continue
 34429  			}
 34430  			x := v_1
 34431  			v.copyOf(x)
 34432  			return true
 34433  		}
 34434  		break
 34435  	}
 34436  	// match: (Xor8 (Com8 x) x)
 34437  	// result: (Const8 [-1])
 34438  	for {
 34439  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34440  			if v_0.Op != OpCom8 {
 34441  				continue
 34442  			}
 34443  			x := v_0.Args[0]
 34444  			if x != v_1 {
 34445  				continue
 34446  			}
 34447  			v.reset(OpConst8)
 34448  			v.AuxInt = int8ToAuxInt(-1)
 34449  			return true
 34450  		}
 34451  		break
 34452  	}
 34453  	// match: (Xor8 (Const8 [-1]) x)
 34454  	// result: (Com8 x)
 34455  	for {
 34456  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34457  			if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 {
 34458  				continue
 34459  			}
 34460  			x := v_1
 34461  			v.reset(OpCom8)
 34462  			v.AddArg(x)
 34463  			return true
 34464  		}
 34465  		break
 34466  	}
 34467  	// match: (Xor8 x (Xor8 x y))
 34468  	// result: y
 34469  	for {
 34470  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34471  			x := v_0
 34472  			if v_1.Op != OpXor8 {
 34473  				continue
 34474  			}
 34475  			_ = v_1.Args[1]
 34476  			v_1_0 := v_1.Args[0]
 34477  			v_1_1 := v_1.Args[1]
 34478  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 34479  				if x != v_1_0 {
 34480  					continue
 34481  				}
 34482  				y := v_1_1
 34483  				v.copyOf(y)
 34484  				return true
 34485  			}
 34486  		}
 34487  		break
 34488  	}
 34489  	// match: (Xor8 (Xor8 i:(Const8 <t>) z) x)
 34490  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
 34491  	// result: (Xor8 i (Xor8 <t> z x))
 34492  	for {
 34493  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34494  			if v_0.Op != OpXor8 {
 34495  				continue
 34496  			}
 34497  			_ = v_0.Args[1]
 34498  			v_0_0 := v_0.Args[0]
 34499  			v_0_1 := v_0.Args[1]
 34500  			for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
 34501  				i := v_0_0
 34502  				if i.Op != OpConst8 {
 34503  					continue
 34504  				}
 34505  				t := i.Type
 34506  				z := v_0_1
 34507  				x := v_1
 34508  				if !(z.Op != OpConst8 && x.Op != OpConst8) {
 34509  					continue
 34510  				}
 34511  				v.reset(OpXor8)
 34512  				v0 := b.NewValue0(v.Pos, OpXor8, t)
 34513  				v0.AddArg2(z, x)
 34514  				v.AddArg2(i, v0)
 34515  				return true
 34516  			}
 34517  		}
 34518  		break
 34519  	}
 34520  	// match: (Xor8 (Const8 <t> [c]) (Xor8 (Const8 <t> [d]) x))
 34521  	// result: (Xor8 (Const8 <t> [c^d]) x)
 34522  	for {
 34523  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34524  			if v_0.Op != OpConst8 {
 34525  				continue
 34526  			}
 34527  			t := v_0.Type
 34528  			c := auxIntToInt8(v_0.AuxInt)
 34529  			if v_1.Op != OpXor8 {
 34530  				continue
 34531  			}
 34532  			_ = v_1.Args[1]
 34533  			v_1_0 := v_1.Args[0]
 34534  			v_1_1 := v_1.Args[1]
 34535  			for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
 34536  				if v_1_0.Op != OpConst8 || v_1_0.Type != t {
 34537  					continue
 34538  				}
 34539  				d := auxIntToInt8(v_1_0.AuxInt)
 34540  				x := v_1_1
 34541  				v.reset(OpXor8)
 34542  				v0 := b.NewValue0(v.Pos, OpConst8, t)
 34543  				v0.AuxInt = int8ToAuxInt(c ^ d)
 34544  				v.AddArg2(v0, x)
 34545  				return true
 34546  			}
 34547  		}
 34548  		break
 34549  	}
 34550  	// match: (Xor8 (Lsh8x64 x z:(Const64 <t> [c])) (Rsh8Ux64 x (Const64 [d])))
 34551  	// cond: c < 8 && d == 8-c && canRotate(config, 8)
 34552  	// result: (RotateLeft8 x z)
 34553  	for {
 34554  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34555  			if v_0.Op != OpLsh8x64 {
 34556  				continue
 34557  			}
 34558  			_ = v_0.Args[1]
 34559  			x := v_0.Args[0]
 34560  			z := v_0.Args[1]
 34561  			if z.Op != OpConst64 {
 34562  				continue
 34563  			}
 34564  			c := auxIntToInt64(z.AuxInt)
 34565  			if v_1.Op != OpRsh8Ux64 {
 34566  				continue
 34567  			}
 34568  			_ = v_1.Args[1]
 34569  			if x != v_1.Args[0] {
 34570  				continue
 34571  			}
 34572  			v_1_1 := v_1.Args[1]
 34573  			if v_1_1.Op != OpConst64 {
 34574  				continue
 34575  			}
 34576  			d := auxIntToInt64(v_1_1.AuxInt)
 34577  			if !(c < 8 && d == 8-c && canRotate(config, 8)) {
 34578  				continue
 34579  			}
 34580  			v.reset(OpRotateLeft8)
 34581  			v.AddArg2(x, z)
 34582  			return true
 34583  		}
 34584  		break
 34585  	}
 34586  	// match: (Xor8 left:(Lsh8x64 x y) right:(Rsh8Ux64 x (Sub64 (Const64 [8]) y)))
 34587  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 34588  	// result: (RotateLeft8 x y)
 34589  	for {
 34590  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34591  			left := v_0
 34592  			if left.Op != OpLsh8x64 {
 34593  				continue
 34594  			}
 34595  			y := left.Args[1]
 34596  			x := left.Args[0]
 34597  			right := v_1
 34598  			if right.Op != OpRsh8Ux64 {
 34599  				continue
 34600  			}
 34601  			_ = right.Args[1]
 34602  			if x != right.Args[0] {
 34603  				continue
 34604  			}
 34605  			right_1 := right.Args[1]
 34606  			if right_1.Op != OpSub64 {
 34607  				continue
 34608  			}
 34609  			_ = right_1.Args[1]
 34610  			right_1_0 := right_1.Args[0]
 34611  			if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 34612  				continue
 34613  			}
 34614  			v.reset(OpRotateLeft8)
 34615  			v.AddArg2(x, y)
 34616  			return true
 34617  		}
 34618  		break
 34619  	}
 34620  	// match: (Xor8 left:(Lsh8x32 x y) right:(Rsh8Ux32 x (Sub32 (Const32 [8]) y)))
 34621  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 34622  	// result: (RotateLeft8 x y)
 34623  	for {
 34624  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34625  			left := v_0
 34626  			if left.Op != OpLsh8x32 {
 34627  				continue
 34628  			}
 34629  			y := left.Args[1]
 34630  			x := left.Args[0]
 34631  			right := v_1
 34632  			if right.Op != OpRsh8Ux32 {
 34633  				continue
 34634  			}
 34635  			_ = right.Args[1]
 34636  			if x != right.Args[0] {
 34637  				continue
 34638  			}
 34639  			right_1 := right.Args[1]
 34640  			if right_1.Op != OpSub32 {
 34641  				continue
 34642  			}
 34643  			_ = right_1.Args[1]
 34644  			right_1_0 := right_1.Args[0]
 34645  			if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 34646  				continue
 34647  			}
 34648  			v.reset(OpRotateLeft8)
 34649  			v.AddArg2(x, y)
 34650  			return true
 34651  		}
 34652  		break
 34653  	}
 34654  	// match: (Xor8 left:(Lsh8x16 x y) right:(Rsh8Ux16 x (Sub16 (Const16 [8]) y)))
 34655  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 34656  	// result: (RotateLeft8 x y)
 34657  	for {
 34658  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34659  			left := v_0
 34660  			if left.Op != OpLsh8x16 {
 34661  				continue
 34662  			}
 34663  			y := left.Args[1]
 34664  			x := left.Args[0]
 34665  			right := v_1
 34666  			if right.Op != OpRsh8Ux16 {
 34667  				continue
 34668  			}
 34669  			_ = right.Args[1]
 34670  			if x != right.Args[0] {
 34671  				continue
 34672  			}
 34673  			right_1 := right.Args[1]
 34674  			if right_1.Op != OpSub16 {
 34675  				continue
 34676  			}
 34677  			_ = right_1.Args[1]
 34678  			right_1_0 := right_1.Args[0]
 34679  			if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 34680  				continue
 34681  			}
 34682  			v.reset(OpRotateLeft8)
 34683  			v.AddArg2(x, y)
 34684  			return true
 34685  		}
 34686  		break
 34687  	}
 34688  	// match: (Xor8 left:(Lsh8x8 x y) right:(Rsh8Ux8 x (Sub8 (Const8 [8]) y)))
 34689  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 34690  	// result: (RotateLeft8 x y)
 34691  	for {
 34692  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34693  			left := v_0
 34694  			if left.Op != OpLsh8x8 {
 34695  				continue
 34696  			}
 34697  			y := left.Args[1]
 34698  			x := left.Args[0]
 34699  			right := v_1
 34700  			if right.Op != OpRsh8Ux8 {
 34701  				continue
 34702  			}
 34703  			_ = right.Args[1]
 34704  			if x != right.Args[0] {
 34705  				continue
 34706  			}
 34707  			right_1 := right.Args[1]
 34708  			if right_1.Op != OpSub8 {
 34709  				continue
 34710  			}
 34711  			_ = right_1.Args[1]
 34712  			right_1_0 := right_1.Args[0]
 34713  			if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 34714  				continue
 34715  			}
 34716  			v.reset(OpRotateLeft8)
 34717  			v.AddArg2(x, y)
 34718  			return true
 34719  		}
 34720  		break
 34721  	}
 34722  	// match: (Xor8 right:(Rsh8Ux64 x y) left:(Lsh8x64 x z:(Sub64 (Const64 [8]) y)))
 34723  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 34724  	// result: (RotateLeft8 x z)
 34725  	for {
 34726  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34727  			right := v_0
 34728  			if right.Op != OpRsh8Ux64 {
 34729  				continue
 34730  			}
 34731  			y := right.Args[1]
 34732  			x := right.Args[0]
 34733  			left := v_1
 34734  			if left.Op != OpLsh8x64 {
 34735  				continue
 34736  			}
 34737  			_ = left.Args[1]
 34738  			if x != left.Args[0] {
 34739  				continue
 34740  			}
 34741  			z := left.Args[1]
 34742  			if z.Op != OpSub64 {
 34743  				continue
 34744  			}
 34745  			_ = z.Args[1]
 34746  			z_0 := z.Args[0]
 34747  			if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 34748  				continue
 34749  			}
 34750  			v.reset(OpRotateLeft8)
 34751  			v.AddArg2(x, z)
 34752  			return true
 34753  		}
 34754  		break
 34755  	}
 34756  	// match: (Xor8 right:(Rsh8Ux32 x y) left:(Lsh8x32 x z:(Sub32 (Const32 [8]) y)))
 34757  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 34758  	// result: (RotateLeft8 x z)
 34759  	for {
 34760  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34761  			right := v_0
 34762  			if right.Op != OpRsh8Ux32 {
 34763  				continue
 34764  			}
 34765  			y := right.Args[1]
 34766  			x := right.Args[0]
 34767  			left := v_1
 34768  			if left.Op != OpLsh8x32 {
 34769  				continue
 34770  			}
 34771  			_ = left.Args[1]
 34772  			if x != left.Args[0] {
 34773  				continue
 34774  			}
 34775  			z := left.Args[1]
 34776  			if z.Op != OpSub32 {
 34777  				continue
 34778  			}
 34779  			_ = z.Args[1]
 34780  			z_0 := z.Args[0]
 34781  			if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 34782  				continue
 34783  			}
 34784  			v.reset(OpRotateLeft8)
 34785  			v.AddArg2(x, z)
 34786  			return true
 34787  		}
 34788  		break
 34789  	}
 34790  	// match: (Xor8 right:(Rsh8Ux16 x y) left:(Lsh8x16 x z:(Sub16 (Const16 [8]) y)))
 34791  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 34792  	// result: (RotateLeft8 x z)
 34793  	for {
 34794  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34795  			right := v_0
 34796  			if right.Op != OpRsh8Ux16 {
 34797  				continue
 34798  			}
 34799  			y := right.Args[1]
 34800  			x := right.Args[0]
 34801  			left := v_1
 34802  			if left.Op != OpLsh8x16 {
 34803  				continue
 34804  			}
 34805  			_ = left.Args[1]
 34806  			if x != left.Args[0] {
 34807  				continue
 34808  			}
 34809  			z := left.Args[1]
 34810  			if z.Op != OpSub16 {
 34811  				continue
 34812  			}
 34813  			_ = z.Args[1]
 34814  			z_0 := z.Args[0]
 34815  			if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 34816  				continue
 34817  			}
 34818  			v.reset(OpRotateLeft8)
 34819  			v.AddArg2(x, z)
 34820  			return true
 34821  		}
 34822  		break
 34823  	}
 34824  	// match: (Xor8 right:(Rsh8Ux8 x y) left:(Lsh8x8 x z:(Sub8 (Const8 [8]) y)))
 34825  	// cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)
 34826  	// result: (RotateLeft8 x z)
 34827  	for {
 34828  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
 34829  			right := v_0
 34830  			if right.Op != OpRsh8Ux8 {
 34831  				continue
 34832  			}
 34833  			y := right.Args[1]
 34834  			x := right.Args[0]
 34835  			left := v_1
 34836  			if left.Op != OpLsh8x8 {
 34837  				continue
 34838  			}
 34839  			_ = left.Args[1]
 34840  			if x != left.Args[0] {
 34841  				continue
 34842  			}
 34843  			z := left.Args[1]
 34844  			if z.Op != OpSub8 {
 34845  				continue
 34846  			}
 34847  			_ = z.Args[1]
 34848  			z_0 := z.Args[0]
 34849  			if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) {
 34850  				continue
 34851  			}
 34852  			v.reset(OpRotateLeft8)
 34853  			v.AddArg2(x, z)
 34854  			return true
 34855  		}
 34856  		break
 34857  	}
 34858  	return false
 34859  }
 34860  func rewriteValuegeneric_OpZero(v *Value) bool {
 34861  	v_1 := v.Args[1]
 34862  	v_0 := v.Args[0]
 34863  	b := v.Block
 34864  	// match: (Zero (SelectN [0] call:(StaticLECall _ _)) mem:(SelectN [1] call))
 34865  	// cond: isSameCall(call.Aux, "runtime.newobject")
 34866  	// result: mem
 34867  	for {
 34868  		if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 {
 34869  			break
 34870  		}
 34871  		call := v_0.Args[0]
 34872  		if call.Op != OpStaticLECall || len(call.Args) != 2 {
 34873  			break
 34874  		}
 34875  		mem := v_1
 34876  		if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isSameCall(call.Aux, "runtime.newobject")) {
 34877  			break
 34878  		}
 34879  		v.copyOf(mem)
 34880  		return true
 34881  	}
 34882  	// match: (Zero {t1} [n] p1 store:(Store {t2} (OffPtr [o2] p2) _ mem))
 34883  	// cond: isSamePtr(p1, p2) && store.Uses == 1 && n >= o2 + t2.Size() && clobber(store)
 34884  	// result: (Zero {t1} [n] p1 mem)
 34885  	for {
 34886  		n := auxIntToInt64(v.AuxInt)
 34887  		t1 := auxToType(v.Aux)
 34888  		p1 := v_0
 34889  		store := v_1
 34890  		if store.Op != OpStore {
 34891  			break
 34892  		}
 34893  		t2 := auxToType(store.Aux)
 34894  		mem := store.Args[2]
 34895  		store_0 := store.Args[0]
 34896  		if store_0.Op != OpOffPtr {
 34897  			break
 34898  		}
 34899  		o2 := auxIntToInt64(store_0.AuxInt)
 34900  		p2 := store_0.Args[0]
 34901  		if !(isSamePtr(p1, p2) && store.Uses == 1 && n >= o2+t2.Size() && clobber(store)) {
 34902  			break
 34903  		}
 34904  		v.reset(OpZero)
 34905  		v.AuxInt = int64ToAuxInt(n)
 34906  		v.Aux = typeToAux(t1)
 34907  		v.AddArg2(p1, mem)
 34908  		return true
 34909  	}
 34910  	// match: (Zero {t} [n] dst1 move:(Move {t} [n] dst2 _ mem))
 34911  	// cond: move.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move)
 34912  	// result: (Zero {t} [n] dst1 mem)
 34913  	for {
 34914  		n := auxIntToInt64(v.AuxInt)
 34915  		t := auxToType(v.Aux)
 34916  		dst1 := v_0
 34917  		move := v_1
 34918  		if move.Op != OpMove || auxIntToInt64(move.AuxInt) != n || auxToType(move.Aux) != t {
 34919  			break
 34920  		}
 34921  		mem := move.Args[2]
 34922  		dst2 := move.Args[0]
 34923  		if !(move.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move)) {
 34924  			break
 34925  		}
 34926  		v.reset(OpZero)
 34927  		v.AuxInt = int64ToAuxInt(n)
 34928  		v.Aux = typeToAux(t)
 34929  		v.AddArg2(dst1, mem)
 34930  		return true
 34931  	}
 34932  	// match: (Zero {t} [n] dst1 vardef:(VarDef {x} move:(Move {t} [n] dst2 _ mem)))
 34933  	// cond: move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move, vardef)
 34934  	// result: (Zero {t} [n] dst1 (VarDef {x} mem))
 34935  	for {
 34936  		n := auxIntToInt64(v.AuxInt)
 34937  		t := auxToType(v.Aux)
 34938  		dst1 := v_0
 34939  		vardef := v_1
 34940  		if vardef.Op != OpVarDef {
 34941  			break
 34942  		}
 34943  		x := auxToSym(vardef.Aux)
 34944  		move := vardef.Args[0]
 34945  		if move.Op != OpMove || auxIntToInt64(move.AuxInt) != n || auxToType(move.Aux) != t {
 34946  			break
 34947  		}
 34948  		mem := move.Args[2]
 34949  		dst2 := move.Args[0]
 34950  		if !(move.Uses == 1 && vardef.Uses == 1 && isSamePtr(dst1, dst2) && clobber(move, vardef)) {
 34951  			break
 34952  		}
 34953  		v.reset(OpZero)
 34954  		v.AuxInt = int64ToAuxInt(n)
 34955  		v.Aux = typeToAux(t)
 34956  		v0 := b.NewValue0(v.Pos, OpVarDef, types.TypeMem)
 34957  		v0.Aux = symToAux(x)
 34958  		v0.AddArg(mem)
 34959  		v.AddArg2(dst1, v0)
 34960  		return true
 34961  	}
 34962  	// match: (Zero {t} [s] dst1 zero:(Zero {t} [s] dst2 _))
 34963  	// cond: isSamePtr(dst1, dst2)
 34964  	// result: zero
 34965  	for {
 34966  		s := auxIntToInt64(v.AuxInt)
 34967  		t := auxToType(v.Aux)
 34968  		dst1 := v_0
 34969  		zero := v_1
 34970  		if zero.Op != OpZero || auxIntToInt64(zero.AuxInt) != s || auxToType(zero.Aux) != t {
 34971  			break
 34972  		}
 34973  		dst2 := zero.Args[0]
 34974  		if !(isSamePtr(dst1, dst2)) {
 34975  			break
 34976  		}
 34977  		v.copyOf(zero)
 34978  		return true
 34979  	}
 34980  	// match: (Zero {t} [s] dst1 vardef:(VarDef (Zero {t} [s] dst2 _)))
 34981  	// cond: isSamePtr(dst1, dst2)
 34982  	// result: vardef
 34983  	for {
 34984  		s := auxIntToInt64(v.AuxInt)
 34985  		t := auxToType(v.Aux)
 34986  		dst1 := v_0
 34987  		vardef := v_1
 34988  		if vardef.Op != OpVarDef {
 34989  			break
 34990  		}
 34991  		vardef_0 := vardef.Args[0]
 34992  		if vardef_0.Op != OpZero || auxIntToInt64(vardef_0.AuxInt) != s || auxToType(vardef_0.Aux) != t {
 34993  			break
 34994  		}
 34995  		dst2 := vardef_0.Args[0]
 34996  		if !(isSamePtr(dst1, dst2)) {
 34997  			break
 34998  		}
 34999  		v.copyOf(vardef)
 35000  		return true
 35001  	}
 35002  	return false
 35003  }
 35004  func rewriteValuegeneric_OpZeroExt16to32(v *Value) bool {
 35005  	v_0 := v.Args[0]
 35006  	// match: (ZeroExt16to32 (Const16 [c]))
 35007  	// result: (Const32 [int32(uint16(c))])
 35008  	for {
 35009  		if v_0.Op != OpConst16 {
 35010  			break
 35011  		}
 35012  		c := auxIntToInt16(v_0.AuxInt)
 35013  		v.reset(OpConst32)
 35014  		v.AuxInt = int32ToAuxInt(int32(uint16(c)))
 35015  		return true
 35016  	}
 35017  	// match: (ZeroExt16to32 (Trunc32to16 x:(Rsh32Ux64 _ (Const64 [s]))))
 35018  	// cond: s >= 16
 35019  	// result: x
 35020  	for {
 35021  		if v_0.Op != OpTrunc32to16 {
 35022  			break
 35023  		}
 35024  		x := v_0.Args[0]
 35025  		if x.Op != OpRsh32Ux64 {
 35026  			break
 35027  		}
 35028  		_ = x.Args[1]
 35029  		x_1 := x.Args[1]
 35030  		if x_1.Op != OpConst64 {
 35031  			break
 35032  		}
 35033  		s := auxIntToInt64(x_1.AuxInt)
 35034  		if !(s >= 16) {
 35035  			break
 35036  		}
 35037  		v.copyOf(x)
 35038  		return true
 35039  	}
 35040  	return false
 35041  }
 35042  func rewriteValuegeneric_OpZeroExt16to64(v *Value) bool {
 35043  	v_0 := v.Args[0]
 35044  	// match: (ZeroExt16to64 (Const16 [c]))
 35045  	// result: (Const64 [int64(uint16(c))])
 35046  	for {
 35047  		if v_0.Op != OpConst16 {
 35048  			break
 35049  		}
 35050  		c := auxIntToInt16(v_0.AuxInt)
 35051  		v.reset(OpConst64)
 35052  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
 35053  		return true
 35054  	}
 35055  	// match: (ZeroExt16to64 (Trunc64to16 x:(Rsh64Ux64 _ (Const64 [s]))))
 35056  	// cond: s >= 48
 35057  	// result: x
 35058  	for {
 35059  		if v_0.Op != OpTrunc64to16 {
 35060  			break
 35061  		}
 35062  		x := v_0.Args[0]
 35063  		if x.Op != OpRsh64Ux64 {
 35064  			break
 35065  		}
 35066  		_ = x.Args[1]
 35067  		x_1 := x.Args[1]
 35068  		if x_1.Op != OpConst64 {
 35069  			break
 35070  		}
 35071  		s := auxIntToInt64(x_1.AuxInt)
 35072  		if !(s >= 48) {
 35073  			break
 35074  		}
 35075  		v.copyOf(x)
 35076  		return true
 35077  	}
 35078  	return false
 35079  }
 35080  func rewriteValuegeneric_OpZeroExt32to64(v *Value) bool {
 35081  	v_0 := v.Args[0]
 35082  	// match: (ZeroExt32to64 (Const32 [c]))
 35083  	// result: (Const64 [int64(uint32(c))])
 35084  	for {
 35085  		if v_0.Op != OpConst32 {
 35086  			break
 35087  		}
 35088  		c := auxIntToInt32(v_0.AuxInt)
 35089  		v.reset(OpConst64)
 35090  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
 35091  		return true
 35092  	}
 35093  	// match: (ZeroExt32to64 (Trunc64to32 x:(Rsh64Ux64 _ (Const64 [s]))))
 35094  	// cond: s >= 32
 35095  	// result: x
 35096  	for {
 35097  		if v_0.Op != OpTrunc64to32 {
 35098  			break
 35099  		}
 35100  		x := v_0.Args[0]
 35101  		if x.Op != OpRsh64Ux64 {
 35102  			break
 35103  		}
 35104  		_ = x.Args[1]
 35105  		x_1 := x.Args[1]
 35106  		if x_1.Op != OpConst64 {
 35107  			break
 35108  		}
 35109  		s := auxIntToInt64(x_1.AuxInt)
 35110  		if !(s >= 32) {
 35111  			break
 35112  		}
 35113  		v.copyOf(x)
 35114  		return true
 35115  	}
 35116  	return false
 35117  }
 35118  func rewriteValuegeneric_OpZeroExt8to16(v *Value) bool {
 35119  	v_0 := v.Args[0]
 35120  	// match: (ZeroExt8to16 (Const8 [c]))
 35121  	// result: (Const16 [int16( uint8(c))])
 35122  	for {
 35123  		if v_0.Op != OpConst8 {
 35124  			break
 35125  		}
 35126  		c := auxIntToInt8(v_0.AuxInt)
 35127  		v.reset(OpConst16)
 35128  		v.AuxInt = int16ToAuxInt(int16(uint8(c)))
 35129  		return true
 35130  	}
 35131  	// match: (ZeroExt8to16 (Trunc16to8 x:(Rsh16Ux64 _ (Const64 [s]))))
 35132  	// cond: s >= 8
 35133  	// result: x
 35134  	for {
 35135  		if v_0.Op != OpTrunc16to8 {
 35136  			break
 35137  		}
 35138  		x := v_0.Args[0]
 35139  		if x.Op != OpRsh16Ux64 {
 35140  			break
 35141  		}
 35142  		_ = x.Args[1]
 35143  		x_1 := x.Args[1]
 35144  		if x_1.Op != OpConst64 {
 35145  			break
 35146  		}
 35147  		s := auxIntToInt64(x_1.AuxInt)
 35148  		if !(s >= 8) {
 35149  			break
 35150  		}
 35151  		v.copyOf(x)
 35152  		return true
 35153  	}
 35154  	return false
 35155  }
 35156  func rewriteValuegeneric_OpZeroExt8to32(v *Value) bool {
 35157  	v_0 := v.Args[0]
 35158  	// match: (ZeroExt8to32 (Const8 [c]))
 35159  	// result: (Const32 [int32( uint8(c))])
 35160  	for {
 35161  		if v_0.Op != OpConst8 {
 35162  			break
 35163  		}
 35164  		c := auxIntToInt8(v_0.AuxInt)
 35165  		v.reset(OpConst32)
 35166  		v.AuxInt = int32ToAuxInt(int32(uint8(c)))
 35167  		return true
 35168  	}
 35169  	// match: (ZeroExt8to32 (Trunc32to8 x:(Rsh32Ux64 _ (Const64 [s]))))
 35170  	// cond: s >= 24
 35171  	// result: x
 35172  	for {
 35173  		if v_0.Op != OpTrunc32to8 {
 35174  			break
 35175  		}
 35176  		x := v_0.Args[0]
 35177  		if x.Op != OpRsh32Ux64 {
 35178  			break
 35179  		}
 35180  		_ = x.Args[1]
 35181  		x_1 := x.Args[1]
 35182  		if x_1.Op != OpConst64 {
 35183  			break
 35184  		}
 35185  		s := auxIntToInt64(x_1.AuxInt)
 35186  		if !(s >= 24) {
 35187  			break
 35188  		}
 35189  		v.copyOf(x)
 35190  		return true
 35191  	}
 35192  	return false
 35193  }
 35194  func rewriteValuegeneric_OpZeroExt8to64(v *Value) bool {
 35195  	v_0 := v.Args[0]
 35196  	// match: (ZeroExt8to64 (Const8 [c]))
 35197  	// result: (Const64 [int64( uint8(c))])
 35198  	for {
 35199  		if v_0.Op != OpConst8 {
 35200  			break
 35201  		}
 35202  		c := auxIntToInt8(v_0.AuxInt)
 35203  		v.reset(OpConst64)
 35204  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
 35205  		return true
 35206  	}
 35207  	// match: (ZeroExt8to64 (Trunc64to8 x:(Rsh64Ux64 _ (Const64 [s]))))
 35208  	// cond: s >= 56
 35209  	// result: x
 35210  	for {
 35211  		if v_0.Op != OpTrunc64to8 {
 35212  			break
 35213  		}
 35214  		x := v_0.Args[0]
 35215  		if x.Op != OpRsh64Ux64 {
 35216  			break
 35217  		}
 35218  		_ = x.Args[1]
 35219  		x_1 := x.Args[1]
 35220  		if x_1.Op != OpConst64 {
 35221  			break
 35222  		}
 35223  		s := auxIntToInt64(x_1.AuxInt)
 35224  		if !(s >= 56) {
 35225  			break
 35226  		}
 35227  		v.copyOf(x)
 35228  		return true
 35229  	}
 35230  	return false
 35231  }
 35232  func rewriteBlockgeneric(b *Block) bool {
 35233  	switch b.Kind {
 35234  	case BlockIf:
 35235  		// match: (If (Not cond) yes no)
 35236  		// result: (If cond no yes)
 35237  		for b.Controls[0].Op == OpNot {
 35238  			v_0 := b.Controls[0]
 35239  			cond := v_0.Args[0]
 35240  			b.resetWithControl(BlockIf, cond)
 35241  			b.swapSuccessors()
 35242  			return true
 35243  		}
 35244  		// match: (If (ConstBool [c]) yes no)
 35245  		// cond: c
 35246  		// result: (First yes no)
 35247  		for b.Controls[0].Op == OpConstBool {
 35248  			v_0 := b.Controls[0]
 35249  			c := auxIntToBool(v_0.AuxInt)
 35250  			if !(c) {
 35251  				break
 35252  			}
 35253  			b.Reset(BlockFirst)
 35254  			return true
 35255  		}
 35256  		// match: (If (ConstBool [c]) yes no)
 35257  		// cond: !c
 35258  		// result: (First no yes)
 35259  		for b.Controls[0].Op == OpConstBool {
 35260  			v_0 := b.Controls[0]
 35261  			c := auxIntToBool(v_0.AuxInt)
 35262  			if !(!c) {
 35263  				break
 35264  			}
 35265  			b.Reset(BlockFirst)
 35266  			b.swapSuccessors()
 35267  			return true
 35268  		}
 35269  	}
 35270  	return false
 35271  }
 35272  

View as plain text