Source file src/cmd/compile/internal/ssa/rewriteRISCV64.go

     1  // Code generated from _gen/RISCV64.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "internal/buildcfg"
     6  import "math"
     7  import "cmd/compile/internal/types"
     8  
     9  func rewriteValueRISCV64(v *Value) bool {
    10  	switch v.Op {
    11  	case OpAbs:
    12  		v.Op = OpRISCV64FABSD
    13  		return true
    14  	case OpAdd16:
    15  		v.Op = OpRISCV64ADD
    16  		return true
    17  	case OpAdd32:
    18  		v.Op = OpRISCV64ADD
    19  		return true
    20  	case OpAdd32F:
    21  		v.Op = OpRISCV64FADDS
    22  		return true
    23  	case OpAdd64:
    24  		v.Op = OpRISCV64ADD
    25  		return true
    26  	case OpAdd64F:
    27  		v.Op = OpRISCV64FADDD
    28  		return true
    29  	case OpAdd8:
    30  		v.Op = OpRISCV64ADD
    31  		return true
    32  	case OpAddPtr:
    33  		v.Op = OpRISCV64ADD
    34  		return true
    35  	case OpAddr:
    36  		return rewriteValueRISCV64_OpAddr(v)
    37  	case OpAnd16:
    38  		v.Op = OpRISCV64AND
    39  		return true
    40  	case OpAnd32:
    41  		v.Op = OpRISCV64AND
    42  		return true
    43  	case OpAnd64:
    44  		v.Op = OpRISCV64AND
    45  		return true
    46  	case OpAnd8:
    47  		v.Op = OpRISCV64AND
    48  		return true
    49  	case OpAndB:
    50  		v.Op = OpRISCV64AND
    51  		return true
    52  	case OpAtomicAdd32:
    53  		v.Op = OpRISCV64LoweredAtomicAdd32
    54  		return true
    55  	case OpAtomicAdd64:
    56  		v.Op = OpRISCV64LoweredAtomicAdd64
    57  		return true
    58  	case OpAtomicAnd32:
    59  		v.Op = OpRISCV64LoweredAtomicAnd32
    60  		return true
    61  	case OpAtomicAnd8:
    62  		return rewriteValueRISCV64_OpAtomicAnd8(v)
    63  	case OpAtomicCompareAndSwap32:
    64  		return rewriteValueRISCV64_OpAtomicCompareAndSwap32(v)
    65  	case OpAtomicCompareAndSwap64:
    66  		v.Op = OpRISCV64LoweredAtomicCas64
    67  		return true
    68  	case OpAtomicExchange32:
    69  		v.Op = OpRISCV64LoweredAtomicExchange32
    70  		return true
    71  	case OpAtomicExchange64:
    72  		v.Op = OpRISCV64LoweredAtomicExchange64
    73  		return true
    74  	case OpAtomicLoad32:
    75  		v.Op = OpRISCV64LoweredAtomicLoad32
    76  		return true
    77  	case OpAtomicLoad64:
    78  		v.Op = OpRISCV64LoweredAtomicLoad64
    79  		return true
    80  	case OpAtomicLoad8:
    81  		v.Op = OpRISCV64LoweredAtomicLoad8
    82  		return true
    83  	case OpAtomicLoadPtr:
    84  		v.Op = OpRISCV64LoweredAtomicLoad64
    85  		return true
    86  	case OpAtomicOr32:
    87  		v.Op = OpRISCV64LoweredAtomicOr32
    88  		return true
    89  	case OpAtomicOr8:
    90  		return rewriteValueRISCV64_OpAtomicOr8(v)
    91  	case OpAtomicStore32:
    92  		v.Op = OpRISCV64LoweredAtomicStore32
    93  		return true
    94  	case OpAtomicStore64:
    95  		v.Op = OpRISCV64LoweredAtomicStore64
    96  		return true
    97  	case OpAtomicStore8:
    98  		v.Op = OpRISCV64LoweredAtomicStore8
    99  		return true
   100  	case OpAtomicStorePtrNoWB:
   101  		v.Op = OpRISCV64LoweredAtomicStore64
   102  		return true
   103  	case OpAvg64u:
   104  		return rewriteValueRISCV64_OpAvg64u(v)
   105  	case OpClosureCall:
   106  		v.Op = OpRISCV64CALLclosure
   107  		return true
   108  	case OpCom16:
   109  		v.Op = OpRISCV64NOT
   110  		return true
   111  	case OpCom32:
   112  		v.Op = OpRISCV64NOT
   113  		return true
   114  	case OpCom64:
   115  		v.Op = OpRISCV64NOT
   116  		return true
   117  	case OpCom8:
   118  		v.Op = OpRISCV64NOT
   119  		return true
   120  	case OpConst16:
   121  		return rewriteValueRISCV64_OpConst16(v)
   122  	case OpConst32:
   123  		return rewriteValueRISCV64_OpConst32(v)
   124  	case OpConst32F:
   125  		return rewriteValueRISCV64_OpConst32F(v)
   126  	case OpConst64:
   127  		return rewriteValueRISCV64_OpConst64(v)
   128  	case OpConst64F:
   129  		return rewriteValueRISCV64_OpConst64F(v)
   130  	case OpConst8:
   131  		return rewriteValueRISCV64_OpConst8(v)
   132  	case OpConstBool:
   133  		return rewriteValueRISCV64_OpConstBool(v)
   134  	case OpConstNil:
   135  		return rewriteValueRISCV64_OpConstNil(v)
   136  	case OpCopysign:
   137  		v.Op = OpRISCV64FSGNJD
   138  		return true
   139  	case OpCvt32Fto32:
   140  		v.Op = OpRISCV64FCVTWS
   141  		return true
   142  	case OpCvt32Fto64:
   143  		v.Op = OpRISCV64FCVTLS
   144  		return true
   145  	case OpCvt32Fto64F:
   146  		v.Op = OpRISCV64FCVTDS
   147  		return true
   148  	case OpCvt32to32F:
   149  		v.Op = OpRISCV64FCVTSW
   150  		return true
   151  	case OpCvt32to64F:
   152  		v.Op = OpRISCV64FCVTDW
   153  		return true
   154  	case OpCvt64Fto32:
   155  		v.Op = OpRISCV64FCVTWD
   156  		return true
   157  	case OpCvt64Fto32F:
   158  		v.Op = OpRISCV64FCVTSD
   159  		return true
   160  	case OpCvt64Fto64:
   161  		v.Op = OpRISCV64FCVTLD
   162  		return true
   163  	case OpCvt64to32F:
   164  		v.Op = OpRISCV64FCVTSL
   165  		return true
   166  	case OpCvt64to64F:
   167  		v.Op = OpRISCV64FCVTDL
   168  		return true
   169  	case OpCvtBoolToUint8:
   170  		v.Op = OpCopy
   171  		return true
   172  	case OpDiv16:
   173  		return rewriteValueRISCV64_OpDiv16(v)
   174  	case OpDiv16u:
   175  		return rewriteValueRISCV64_OpDiv16u(v)
   176  	case OpDiv32:
   177  		return rewriteValueRISCV64_OpDiv32(v)
   178  	case OpDiv32F:
   179  		v.Op = OpRISCV64FDIVS
   180  		return true
   181  	case OpDiv32u:
   182  		v.Op = OpRISCV64DIVUW
   183  		return true
   184  	case OpDiv64:
   185  		return rewriteValueRISCV64_OpDiv64(v)
   186  	case OpDiv64F:
   187  		v.Op = OpRISCV64FDIVD
   188  		return true
   189  	case OpDiv64u:
   190  		v.Op = OpRISCV64DIVU
   191  		return true
   192  	case OpDiv8:
   193  		return rewriteValueRISCV64_OpDiv8(v)
   194  	case OpDiv8u:
   195  		return rewriteValueRISCV64_OpDiv8u(v)
   196  	case OpEq16:
   197  		return rewriteValueRISCV64_OpEq16(v)
   198  	case OpEq32:
   199  		return rewriteValueRISCV64_OpEq32(v)
   200  	case OpEq32F:
   201  		v.Op = OpRISCV64FEQS
   202  		return true
   203  	case OpEq64:
   204  		return rewriteValueRISCV64_OpEq64(v)
   205  	case OpEq64F:
   206  		v.Op = OpRISCV64FEQD
   207  		return true
   208  	case OpEq8:
   209  		return rewriteValueRISCV64_OpEq8(v)
   210  	case OpEqB:
   211  		return rewriteValueRISCV64_OpEqB(v)
   212  	case OpEqPtr:
   213  		return rewriteValueRISCV64_OpEqPtr(v)
   214  	case OpFMA:
   215  		v.Op = OpRISCV64FMADDD
   216  		return true
   217  	case OpGetCallerPC:
   218  		v.Op = OpRISCV64LoweredGetCallerPC
   219  		return true
   220  	case OpGetCallerSP:
   221  		v.Op = OpRISCV64LoweredGetCallerSP
   222  		return true
   223  	case OpGetClosurePtr:
   224  		v.Op = OpRISCV64LoweredGetClosurePtr
   225  		return true
   226  	case OpHmul32:
   227  		return rewriteValueRISCV64_OpHmul32(v)
   228  	case OpHmul32u:
   229  		return rewriteValueRISCV64_OpHmul32u(v)
   230  	case OpHmul64:
   231  		v.Op = OpRISCV64MULH
   232  		return true
   233  	case OpHmul64u:
   234  		v.Op = OpRISCV64MULHU
   235  		return true
   236  	case OpInterCall:
   237  		v.Op = OpRISCV64CALLinter
   238  		return true
   239  	case OpIsInBounds:
   240  		v.Op = OpLess64U
   241  		return true
   242  	case OpIsNonNil:
   243  		v.Op = OpRISCV64SNEZ
   244  		return true
   245  	case OpIsSliceInBounds:
   246  		v.Op = OpLeq64U
   247  		return true
   248  	case OpLeq16:
   249  		return rewriteValueRISCV64_OpLeq16(v)
   250  	case OpLeq16U:
   251  		return rewriteValueRISCV64_OpLeq16U(v)
   252  	case OpLeq32:
   253  		return rewriteValueRISCV64_OpLeq32(v)
   254  	case OpLeq32F:
   255  		v.Op = OpRISCV64FLES
   256  		return true
   257  	case OpLeq32U:
   258  		return rewriteValueRISCV64_OpLeq32U(v)
   259  	case OpLeq64:
   260  		return rewriteValueRISCV64_OpLeq64(v)
   261  	case OpLeq64F:
   262  		v.Op = OpRISCV64FLED
   263  		return true
   264  	case OpLeq64U:
   265  		return rewriteValueRISCV64_OpLeq64U(v)
   266  	case OpLeq8:
   267  		return rewriteValueRISCV64_OpLeq8(v)
   268  	case OpLeq8U:
   269  		return rewriteValueRISCV64_OpLeq8U(v)
   270  	case OpLess16:
   271  		return rewriteValueRISCV64_OpLess16(v)
   272  	case OpLess16U:
   273  		return rewriteValueRISCV64_OpLess16U(v)
   274  	case OpLess32:
   275  		return rewriteValueRISCV64_OpLess32(v)
   276  	case OpLess32F:
   277  		v.Op = OpRISCV64FLTS
   278  		return true
   279  	case OpLess32U:
   280  		return rewriteValueRISCV64_OpLess32U(v)
   281  	case OpLess64:
   282  		v.Op = OpRISCV64SLT
   283  		return true
   284  	case OpLess64F:
   285  		v.Op = OpRISCV64FLTD
   286  		return true
   287  	case OpLess64U:
   288  		v.Op = OpRISCV64SLTU
   289  		return true
   290  	case OpLess8:
   291  		return rewriteValueRISCV64_OpLess8(v)
   292  	case OpLess8U:
   293  		return rewriteValueRISCV64_OpLess8U(v)
   294  	case OpLoad:
   295  		return rewriteValueRISCV64_OpLoad(v)
   296  	case OpLocalAddr:
   297  		return rewriteValueRISCV64_OpLocalAddr(v)
   298  	case OpLsh16x16:
   299  		return rewriteValueRISCV64_OpLsh16x16(v)
   300  	case OpLsh16x32:
   301  		return rewriteValueRISCV64_OpLsh16x32(v)
   302  	case OpLsh16x64:
   303  		return rewriteValueRISCV64_OpLsh16x64(v)
   304  	case OpLsh16x8:
   305  		return rewriteValueRISCV64_OpLsh16x8(v)
   306  	case OpLsh32x16:
   307  		return rewriteValueRISCV64_OpLsh32x16(v)
   308  	case OpLsh32x32:
   309  		return rewriteValueRISCV64_OpLsh32x32(v)
   310  	case OpLsh32x64:
   311  		return rewriteValueRISCV64_OpLsh32x64(v)
   312  	case OpLsh32x8:
   313  		return rewriteValueRISCV64_OpLsh32x8(v)
   314  	case OpLsh64x16:
   315  		return rewriteValueRISCV64_OpLsh64x16(v)
   316  	case OpLsh64x32:
   317  		return rewriteValueRISCV64_OpLsh64x32(v)
   318  	case OpLsh64x64:
   319  		return rewriteValueRISCV64_OpLsh64x64(v)
   320  	case OpLsh64x8:
   321  		return rewriteValueRISCV64_OpLsh64x8(v)
   322  	case OpLsh8x16:
   323  		return rewriteValueRISCV64_OpLsh8x16(v)
   324  	case OpLsh8x32:
   325  		return rewriteValueRISCV64_OpLsh8x32(v)
   326  	case OpLsh8x64:
   327  		return rewriteValueRISCV64_OpLsh8x64(v)
   328  	case OpLsh8x8:
   329  		return rewriteValueRISCV64_OpLsh8x8(v)
   330  	case OpMax32F:
   331  		v.Op = OpRISCV64LoweredFMAXS
   332  		return true
   333  	case OpMax64:
   334  		return rewriteValueRISCV64_OpMax64(v)
   335  	case OpMax64F:
   336  		v.Op = OpRISCV64LoweredFMAXD
   337  		return true
   338  	case OpMax64u:
   339  		return rewriteValueRISCV64_OpMax64u(v)
   340  	case OpMin32F:
   341  		v.Op = OpRISCV64LoweredFMINS
   342  		return true
   343  	case OpMin64:
   344  		return rewriteValueRISCV64_OpMin64(v)
   345  	case OpMin64F:
   346  		v.Op = OpRISCV64LoweredFMIND
   347  		return true
   348  	case OpMin64u:
   349  		return rewriteValueRISCV64_OpMin64u(v)
   350  	case OpMod16:
   351  		return rewriteValueRISCV64_OpMod16(v)
   352  	case OpMod16u:
   353  		return rewriteValueRISCV64_OpMod16u(v)
   354  	case OpMod32:
   355  		return rewriteValueRISCV64_OpMod32(v)
   356  	case OpMod32u:
   357  		v.Op = OpRISCV64REMUW
   358  		return true
   359  	case OpMod64:
   360  		return rewriteValueRISCV64_OpMod64(v)
   361  	case OpMod64u:
   362  		v.Op = OpRISCV64REMU
   363  		return true
   364  	case OpMod8:
   365  		return rewriteValueRISCV64_OpMod8(v)
   366  	case OpMod8u:
   367  		return rewriteValueRISCV64_OpMod8u(v)
   368  	case OpMove:
   369  		return rewriteValueRISCV64_OpMove(v)
   370  	case OpMul16:
   371  		return rewriteValueRISCV64_OpMul16(v)
   372  	case OpMul32:
   373  		v.Op = OpRISCV64MULW
   374  		return true
   375  	case OpMul32F:
   376  		v.Op = OpRISCV64FMULS
   377  		return true
   378  	case OpMul64:
   379  		v.Op = OpRISCV64MUL
   380  		return true
   381  	case OpMul64F:
   382  		v.Op = OpRISCV64FMULD
   383  		return true
   384  	case OpMul64uhilo:
   385  		v.Op = OpRISCV64LoweredMuluhilo
   386  		return true
   387  	case OpMul64uover:
   388  		v.Op = OpRISCV64LoweredMuluover
   389  		return true
   390  	case OpMul8:
   391  		return rewriteValueRISCV64_OpMul8(v)
   392  	case OpNeg16:
   393  		v.Op = OpRISCV64NEG
   394  		return true
   395  	case OpNeg32:
   396  		v.Op = OpRISCV64NEG
   397  		return true
   398  	case OpNeg32F:
   399  		v.Op = OpRISCV64FNEGS
   400  		return true
   401  	case OpNeg64:
   402  		v.Op = OpRISCV64NEG
   403  		return true
   404  	case OpNeg64F:
   405  		v.Op = OpRISCV64FNEGD
   406  		return true
   407  	case OpNeg8:
   408  		v.Op = OpRISCV64NEG
   409  		return true
   410  	case OpNeq16:
   411  		return rewriteValueRISCV64_OpNeq16(v)
   412  	case OpNeq32:
   413  		return rewriteValueRISCV64_OpNeq32(v)
   414  	case OpNeq32F:
   415  		v.Op = OpRISCV64FNES
   416  		return true
   417  	case OpNeq64:
   418  		return rewriteValueRISCV64_OpNeq64(v)
   419  	case OpNeq64F:
   420  		v.Op = OpRISCV64FNED
   421  		return true
   422  	case OpNeq8:
   423  		return rewriteValueRISCV64_OpNeq8(v)
   424  	case OpNeqB:
   425  		return rewriteValueRISCV64_OpNeqB(v)
   426  	case OpNeqPtr:
   427  		return rewriteValueRISCV64_OpNeqPtr(v)
   428  	case OpNilCheck:
   429  		v.Op = OpRISCV64LoweredNilCheck
   430  		return true
   431  	case OpNot:
   432  		v.Op = OpRISCV64SEQZ
   433  		return true
   434  	case OpOffPtr:
   435  		return rewriteValueRISCV64_OpOffPtr(v)
   436  	case OpOr16:
   437  		v.Op = OpRISCV64OR
   438  		return true
   439  	case OpOr32:
   440  		v.Op = OpRISCV64OR
   441  		return true
   442  	case OpOr64:
   443  		v.Op = OpRISCV64OR
   444  		return true
   445  	case OpOr8:
   446  		v.Op = OpRISCV64OR
   447  		return true
   448  	case OpOrB:
   449  		v.Op = OpRISCV64OR
   450  		return true
   451  	case OpPanicBounds:
   452  		return rewriteValueRISCV64_OpPanicBounds(v)
   453  	case OpPubBarrier:
   454  		v.Op = OpRISCV64LoweredPubBarrier
   455  		return true
   456  	case OpRISCV64ADD:
   457  		return rewriteValueRISCV64_OpRISCV64ADD(v)
   458  	case OpRISCV64ADDI:
   459  		return rewriteValueRISCV64_OpRISCV64ADDI(v)
   460  	case OpRISCV64AND:
   461  		return rewriteValueRISCV64_OpRISCV64AND(v)
   462  	case OpRISCV64ANDI:
   463  		return rewriteValueRISCV64_OpRISCV64ANDI(v)
   464  	case OpRISCV64FADDD:
   465  		return rewriteValueRISCV64_OpRISCV64FADDD(v)
   466  	case OpRISCV64FADDS:
   467  		return rewriteValueRISCV64_OpRISCV64FADDS(v)
   468  	case OpRISCV64FMADDD:
   469  		return rewriteValueRISCV64_OpRISCV64FMADDD(v)
   470  	case OpRISCV64FMADDS:
   471  		return rewriteValueRISCV64_OpRISCV64FMADDS(v)
   472  	case OpRISCV64FMSUBD:
   473  		return rewriteValueRISCV64_OpRISCV64FMSUBD(v)
   474  	case OpRISCV64FMSUBS:
   475  		return rewriteValueRISCV64_OpRISCV64FMSUBS(v)
   476  	case OpRISCV64FNMADDD:
   477  		return rewriteValueRISCV64_OpRISCV64FNMADDD(v)
   478  	case OpRISCV64FNMADDS:
   479  		return rewriteValueRISCV64_OpRISCV64FNMADDS(v)
   480  	case OpRISCV64FNMSUBD:
   481  		return rewriteValueRISCV64_OpRISCV64FNMSUBD(v)
   482  	case OpRISCV64FNMSUBS:
   483  		return rewriteValueRISCV64_OpRISCV64FNMSUBS(v)
   484  	case OpRISCV64FSUBD:
   485  		return rewriteValueRISCV64_OpRISCV64FSUBD(v)
   486  	case OpRISCV64FSUBS:
   487  		return rewriteValueRISCV64_OpRISCV64FSUBS(v)
   488  	case OpRISCV64MOVBUload:
   489  		return rewriteValueRISCV64_OpRISCV64MOVBUload(v)
   490  	case OpRISCV64MOVBUreg:
   491  		return rewriteValueRISCV64_OpRISCV64MOVBUreg(v)
   492  	case OpRISCV64MOVBload:
   493  		return rewriteValueRISCV64_OpRISCV64MOVBload(v)
   494  	case OpRISCV64MOVBreg:
   495  		return rewriteValueRISCV64_OpRISCV64MOVBreg(v)
   496  	case OpRISCV64MOVBstore:
   497  		return rewriteValueRISCV64_OpRISCV64MOVBstore(v)
   498  	case OpRISCV64MOVBstorezero:
   499  		return rewriteValueRISCV64_OpRISCV64MOVBstorezero(v)
   500  	case OpRISCV64MOVDload:
   501  		return rewriteValueRISCV64_OpRISCV64MOVDload(v)
   502  	case OpRISCV64MOVDnop:
   503  		return rewriteValueRISCV64_OpRISCV64MOVDnop(v)
   504  	case OpRISCV64MOVDreg:
   505  		return rewriteValueRISCV64_OpRISCV64MOVDreg(v)
   506  	case OpRISCV64MOVDstore:
   507  		return rewriteValueRISCV64_OpRISCV64MOVDstore(v)
   508  	case OpRISCV64MOVDstorezero:
   509  		return rewriteValueRISCV64_OpRISCV64MOVDstorezero(v)
   510  	case OpRISCV64MOVHUload:
   511  		return rewriteValueRISCV64_OpRISCV64MOVHUload(v)
   512  	case OpRISCV64MOVHUreg:
   513  		return rewriteValueRISCV64_OpRISCV64MOVHUreg(v)
   514  	case OpRISCV64MOVHload:
   515  		return rewriteValueRISCV64_OpRISCV64MOVHload(v)
   516  	case OpRISCV64MOVHreg:
   517  		return rewriteValueRISCV64_OpRISCV64MOVHreg(v)
   518  	case OpRISCV64MOVHstore:
   519  		return rewriteValueRISCV64_OpRISCV64MOVHstore(v)
   520  	case OpRISCV64MOVHstorezero:
   521  		return rewriteValueRISCV64_OpRISCV64MOVHstorezero(v)
   522  	case OpRISCV64MOVWUload:
   523  		return rewriteValueRISCV64_OpRISCV64MOVWUload(v)
   524  	case OpRISCV64MOVWUreg:
   525  		return rewriteValueRISCV64_OpRISCV64MOVWUreg(v)
   526  	case OpRISCV64MOVWload:
   527  		return rewriteValueRISCV64_OpRISCV64MOVWload(v)
   528  	case OpRISCV64MOVWreg:
   529  		return rewriteValueRISCV64_OpRISCV64MOVWreg(v)
   530  	case OpRISCV64MOVWstore:
   531  		return rewriteValueRISCV64_OpRISCV64MOVWstore(v)
   532  	case OpRISCV64MOVWstorezero:
   533  		return rewriteValueRISCV64_OpRISCV64MOVWstorezero(v)
   534  	case OpRISCV64NEG:
   535  		return rewriteValueRISCV64_OpRISCV64NEG(v)
   536  	case OpRISCV64NEGW:
   537  		return rewriteValueRISCV64_OpRISCV64NEGW(v)
   538  	case OpRISCV64OR:
   539  		return rewriteValueRISCV64_OpRISCV64OR(v)
   540  	case OpRISCV64ORI:
   541  		return rewriteValueRISCV64_OpRISCV64ORI(v)
   542  	case OpRISCV64ROL:
   543  		return rewriteValueRISCV64_OpRISCV64ROL(v)
   544  	case OpRISCV64ROLW:
   545  		return rewriteValueRISCV64_OpRISCV64ROLW(v)
   546  	case OpRISCV64ROR:
   547  		return rewriteValueRISCV64_OpRISCV64ROR(v)
   548  	case OpRISCV64RORW:
   549  		return rewriteValueRISCV64_OpRISCV64RORW(v)
   550  	case OpRISCV64SEQZ:
   551  		return rewriteValueRISCV64_OpRISCV64SEQZ(v)
   552  	case OpRISCV64SLL:
   553  		return rewriteValueRISCV64_OpRISCV64SLL(v)
   554  	case OpRISCV64SLLI:
   555  		return rewriteValueRISCV64_OpRISCV64SLLI(v)
   556  	case OpRISCV64SLLW:
   557  		return rewriteValueRISCV64_OpRISCV64SLLW(v)
   558  	case OpRISCV64SLT:
   559  		return rewriteValueRISCV64_OpRISCV64SLT(v)
   560  	case OpRISCV64SLTI:
   561  		return rewriteValueRISCV64_OpRISCV64SLTI(v)
   562  	case OpRISCV64SLTIU:
   563  		return rewriteValueRISCV64_OpRISCV64SLTIU(v)
   564  	case OpRISCV64SLTU:
   565  		return rewriteValueRISCV64_OpRISCV64SLTU(v)
   566  	case OpRISCV64SNEZ:
   567  		return rewriteValueRISCV64_OpRISCV64SNEZ(v)
   568  	case OpRISCV64SRA:
   569  		return rewriteValueRISCV64_OpRISCV64SRA(v)
   570  	case OpRISCV64SRAI:
   571  		return rewriteValueRISCV64_OpRISCV64SRAI(v)
   572  	case OpRISCV64SRAW:
   573  		return rewriteValueRISCV64_OpRISCV64SRAW(v)
   574  	case OpRISCV64SRL:
   575  		return rewriteValueRISCV64_OpRISCV64SRL(v)
   576  	case OpRISCV64SRLI:
   577  		return rewriteValueRISCV64_OpRISCV64SRLI(v)
   578  	case OpRISCV64SRLW:
   579  		return rewriteValueRISCV64_OpRISCV64SRLW(v)
   580  	case OpRISCV64SUB:
   581  		return rewriteValueRISCV64_OpRISCV64SUB(v)
   582  	case OpRISCV64SUBW:
   583  		return rewriteValueRISCV64_OpRISCV64SUBW(v)
   584  	case OpRISCV64XOR:
   585  		return rewriteValueRISCV64_OpRISCV64XOR(v)
   586  	case OpRotateLeft16:
   587  		return rewriteValueRISCV64_OpRotateLeft16(v)
   588  	case OpRotateLeft32:
   589  		v.Op = OpRISCV64ROLW
   590  		return true
   591  	case OpRotateLeft64:
   592  		v.Op = OpRISCV64ROL
   593  		return true
   594  	case OpRotateLeft8:
   595  		return rewriteValueRISCV64_OpRotateLeft8(v)
   596  	case OpRound32F:
   597  		v.Op = OpRISCV64LoweredRound32F
   598  		return true
   599  	case OpRound64F:
   600  		v.Op = OpRISCV64LoweredRound64F
   601  		return true
   602  	case OpRsh16Ux16:
   603  		return rewriteValueRISCV64_OpRsh16Ux16(v)
   604  	case OpRsh16Ux32:
   605  		return rewriteValueRISCV64_OpRsh16Ux32(v)
   606  	case OpRsh16Ux64:
   607  		return rewriteValueRISCV64_OpRsh16Ux64(v)
   608  	case OpRsh16Ux8:
   609  		return rewriteValueRISCV64_OpRsh16Ux8(v)
   610  	case OpRsh16x16:
   611  		return rewriteValueRISCV64_OpRsh16x16(v)
   612  	case OpRsh16x32:
   613  		return rewriteValueRISCV64_OpRsh16x32(v)
   614  	case OpRsh16x64:
   615  		return rewriteValueRISCV64_OpRsh16x64(v)
   616  	case OpRsh16x8:
   617  		return rewriteValueRISCV64_OpRsh16x8(v)
   618  	case OpRsh32Ux16:
   619  		return rewriteValueRISCV64_OpRsh32Ux16(v)
   620  	case OpRsh32Ux32:
   621  		return rewriteValueRISCV64_OpRsh32Ux32(v)
   622  	case OpRsh32Ux64:
   623  		return rewriteValueRISCV64_OpRsh32Ux64(v)
   624  	case OpRsh32Ux8:
   625  		return rewriteValueRISCV64_OpRsh32Ux8(v)
   626  	case OpRsh32x16:
   627  		return rewriteValueRISCV64_OpRsh32x16(v)
   628  	case OpRsh32x32:
   629  		return rewriteValueRISCV64_OpRsh32x32(v)
   630  	case OpRsh32x64:
   631  		return rewriteValueRISCV64_OpRsh32x64(v)
   632  	case OpRsh32x8:
   633  		return rewriteValueRISCV64_OpRsh32x8(v)
   634  	case OpRsh64Ux16:
   635  		return rewriteValueRISCV64_OpRsh64Ux16(v)
   636  	case OpRsh64Ux32:
   637  		return rewriteValueRISCV64_OpRsh64Ux32(v)
   638  	case OpRsh64Ux64:
   639  		return rewriteValueRISCV64_OpRsh64Ux64(v)
   640  	case OpRsh64Ux8:
   641  		return rewriteValueRISCV64_OpRsh64Ux8(v)
   642  	case OpRsh64x16:
   643  		return rewriteValueRISCV64_OpRsh64x16(v)
   644  	case OpRsh64x32:
   645  		return rewriteValueRISCV64_OpRsh64x32(v)
   646  	case OpRsh64x64:
   647  		return rewriteValueRISCV64_OpRsh64x64(v)
   648  	case OpRsh64x8:
   649  		return rewriteValueRISCV64_OpRsh64x8(v)
   650  	case OpRsh8Ux16:
   651  		return rewriteValueRISCV64_OpRsh8Ux16(v)
   652  	case OpRsh8Ux32:
   653  		return rewriteValueRISCV64_OpRsh8Ux32(v)
   654  	case OpRsh8Ux64:
   655  		return rewriteValueRISCV64_OpRsh8Ux64(v)
   656  	case OpRsh8Ux8:
   657  		return rewriteValueRISCV64_OpRsh8Ux8(v)
   658  	case OpRsh8x16:
   659  		return rewriteValueRISCV64_OpRsh8x16(v)
   660  	case OpRsh8x32:
   661  		return rewriteValueRISCV64_OpRsh8x32(v)
   662  	case OpRsh8x64:
   663  		return rewriteValueRISCV64_OpRsh8x64(v)
   664  	case OpRsh8x8:
   665  		return rewriteValueRISCV64_OpRsh8x8(v)
   666  	case OpSelect0:
   667  		return rewriteValueRISCV64_OpSelect0(v)
   668  	case OpSelect1:
   669  		return rewriteValueRISCV64_OpSelect1(v)
   670  	case OpSignExt16to32:
   671  		v.Op = OpRISCV64MOVHreg
   672  		return true
   673  	case OpSignExt16to64:
   674  		v.Op = OpRISCV64MOVHreg
   675  		return true
   676  	case OpSignExt32to64:
   677  		v.Op = OpRISCV64MOVWreg
   678  		return true
   679  	case OpSignExt8to16:
   680  		v.Op = OpRISCV64MOVBreg
   681  		return true
   682  	case OpSignExt8to32:
   683  		v.Op = OpRISCV64MOVBreg
   684  		return true
   685  	case OpSignExt8to64:
   686  		v.Op = OpRISCV64MOVBreg
   687  		return true
   688  	case OpSlicemask:
   689  		return rewriteValueRISCV64_OpSlicemask(v)
   690  	case OpSqrt:
   691  		v.Op = OpRISCV64FSQRTD
   692  		return true
   693  	case OpSqrt32:
   694  		v.Op = OpRISCV64FSQRTS
   695  		return true
   696  	case OpStaticCall:
   697  		v.Op = OpRISCV64CALLstatic
   698  		return true
   699  	case OpStore:
   700  		return rewriteValueRISCV64_OpStore(v)
   701  	case OpSub16:
   702  		v.Op = OpRISCV64SUB
   703  		return true
   704  	case OpSub32:
   705  		v.Op = OpRISCV64SUB
   706  		return true
   707  	case OpSub32F:
   708  		v.Op = OpRISCV64FSUBS
   709  		return true
   710  	case OpSub64:
   711  		v.Op = OpRISCV64SUB
   712  		return true
   713  	case OpSub64F:
   714  		v.Op = OpRISCV64FSUBD
   715  		return true
   716  	case OpSub8:
   717  		v.Op = OpRISCV64SUB
   718  		return true
   719  	case OpSubPtr:
   720  		v.Op = OpRISCV64SUB
   721  		return true
   722  	case OpTailCall:
   723  		v.Op = OpRISCV64CALLtail
   724  		return true
   725  	case OpTrunc16to8:
   726  		v.Op = OpCopy
   727  		return true
   728  	case OpTrunc32to16:
   729  		v.Op = OpCopy
   730  		return true
   731  	case OpTrunc32to8:
   732  		v.Op = OpCopy
   733  		return true
   734  	case OpTrunc64to16:
   735  		v.Op = OpCopy
   736  		return true
   737  	case OpTrunc64to32:
   738  		v.Op = OpCopy
   739  		return true
   740  	case OpTrunc64to8:
   741  		v.Op = OpCopy
   742  		return true
   743  	case OpWB:
   744  		v.Op = OpRISCV64LoweredWB
   745  		return true
   746  	case OpXor16:
   747  		v.Op = OpRISCV64XOR
   748  		return true
   749  	case OpXor32:
   750  		v.Op = OpRISCV64XOR
   751  		return true
   752  	case OpXor64:
   753  		v.Op = OpRISCV64XOR
   754  		return true
   755  	case OpXor8:
   756  		v.Op = OpRISCV64XOR
   757  		return true
   758  	case OpZero:
   759  		return rewriteValueRISCV64_OpZero(v)
   760  	case OpZeroExt16to32:
   761  		v.Op = OpRISCV64MOVHUreg
   762  		return true
   763  	case OpZeroExt16to64:
   764  		v.Op = OpRISCV64MOVHUreg
   765  		return true
   766  	case OpZeroExt32to64:
   767  		v.Op = OpRISCV64MOVWUreg
   768  		return true
   769  	case OpZeroExt8to16:
   770  		v.Op = OpRISCV64MOVBUreg
   771  		return true
   772  	case OpZeroExt8to32:
   773  		v.Op = OpRISCV64MOVBUreg
   774  		return true
   775  	case OpZeroExt8to64:
   776  		v.Op = OpRISCV64MOVBUreg
   777  		return true
   778  	}
   779  	return false
   780  }
   781  func rewriteValueRISCV64_OpAddr(v *Value) bool {
   782  	v_0 := v.Args[0]
   783  	// match: (Addr {sym} base)
   784  	// result: (MOVaddr {sym} [0] base)
   785  	for {
   786  		sym := auxToSym(v.Aux)
   787  		base := v_0
   788  		v.reset(OpRISCV64MOVaddr)
   789  		v.AuxInt = int32ToAuxInt(0)
   790  		v.Aux = symToAux(sym)
   791  		v.AddArg(base)
   792  		return true
   793  	}
   794  }
   795  func rewriteValueRISCV64_OpAtomicAnd8(v *Value) bool {
   796  	v_2 := v.Args[2]
   797  	v_1 := v.Args[1]
   798  	v_0 := v.Args[0]
   799  	b := v.Block
   800  	typ := &b.Func.Config.Types
   801  	// match: (AtomicAnd8 ptr val mem)
   802  	// result: (LoweredAtomicAnd32 (ANDI <typ.Uintptr> [^3] ptr) (NOT <typ.UInt32> (SLL <typ.UInt32> (XORI <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr)))) mem)
   803  	for {
   804  		ptr := v_0
   805  		val := v_1
   806  		mem := v_2
   807  		v.reset(OpRISCV64LoweredAtomicAnd32)
   808  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
   809  		v0.AuxInt = int64ToAuxInt(^3)
   810  		v0.AddArg(ptr)
   811  		v1 := b.NewValue0(v.Pos, OpRISCV64NOT, typ.UInt32)
   812  		v2 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
   813  		v3 := b.NewValue0(v.Pos, OpRISCV64XORI, typ.UInt32)
   814  		v3.AuxInt = int64ToAuxInt(0xff)
   815  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   816  		v4.AddArg(val)
   817  		v3.AddArg(v4)
   818  		v5 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   819  		v5.AuxInt = int64ToAuxInt(3)
   820  		v6 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
   821  		v6.AuxInt = int64ToAuxInt(3)
   822  		v6.AddArg(ptr)
   823  		v5.AddArg(v6)
   824  		v2.AddArg2(v3, v5)
   825  		v1.AddArg(v2)
   826  		v.AddArg3(v0, v1, mem)
   827  		return true
   828  	}
   829  }
   830  func rewriteValueRISCV64_OpAtomicCompareAndSwap32(v *Value) bool {
   831  	v_3 := v.Args[3]
   832  	v_2 := v.Args[2]
   833  	v_1 := v.Args[1]
   834  	v_0 := v.Args[0]
   835  	b := v.Block
   836  	typ := &b.Func.Config.Types
   837  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   838  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   839  	for {
   840  		ptr := v_0
   841  		old := v_1
   842  		new := v_2
   843  		mem := v_3
   844  		v.reset(OpRISCV64LoweredAtomicCas32)
   845  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   846  		v0.AddArg(old)
   847  		v.AddArg4(ptr, v0, new, mem)
   848  		return true
   849  	}
   850  }
   851  func rewriteValueRISCV64_OpAtomicOr8(v *Value) bool {
   852  	v_2 := v.Args[2]
   853  	v_1 := v.Args[1]
   854  	v_0 := v.Args[0]
   855  	b := v.Block
   856  	typ := &b.Func.Config.Types
   857  	// match: (AtomicOr8 ptr val mem)
   858  	// result: (LoweredAtomicOr32 (ANDI <typ.Uintptr> [^3] ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr))) mem)
   859  	for {
   860  		ptr := v_0
   861  		val := v_1
   862  		mem := v_2
   863  		v.reset(OpRISCV64LoweredAtomicOr32)
   864  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
   865  		v0.AuxInt = int64ToAuxInt(^3)
   866  		v0.AddArg(ptr)
   867  		v1 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
   868  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   869  		v2.AddArg(val)
   870  		v3 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   871  		v3.AuxInt = int64ToAuxInt(3)
   872  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
   873  		v4.AuxInt = int64ToAuxInt(3)
   874  		v4.AddArg(ptr)
   875  		v3.AddArg(v4)
   876  		v1.AddArg2(v2, v3)
   877  		v.AddArg3(v0, v1, mem)
   878  		return true
   879  	}
   880  }
   881  func rewriteValueRISCV64_OpAvg64u(v *Value) bool {
   882  	v_1 := v.Args[1]
   883  	v_0 := v.Args[0]
   884  	b := v.Block
   885  	// match: (Avg64u <t> x y)
   886  	// result: (ADD (ADD <t> (SRLI <t> [1] x) (SRLI <t> [1] y)) (ANDI <t> [1] (AND <t> x y)))
   887  	for {
   888  		t := v.Type
   889  		x := v_0
   890  		y := v_1
   891  		v.reset(OpRISCV64ADD)
   892  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, t)
   893  		v1 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
   894  		v1.AuxInt = int64ToAuxInt(1)
   895  		v1.AddArg(x)
   896  		v2 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
   897  		v2.AuxInt = int64ToAuxInt(1)
   898  		v2.AddArg(y)
   899  		v0.AddArg2(v1, v2)
   900  		v3 := b.NewValue0(v.Pos, OpRISCV64ANDI, t)
   901  		v3.AuxInt = int64ToAuxInt(1)
   902  		v4 := b.NewValue0(v.Pos, OpRISCV64AND, t)
   903  		v4.AddArg2(x, y)
   904  		v3.AddArg(v4)
   905  		v.AddArg2(v0, v3)
   906  		return true
   907  	}
   908  }
   909  func rewriteValueRISCV64_OpConst16(v *Value) bool {
   910  	// match: (Const16 [val])
   911  	// result: (MOVDconst [int64(val)])
   912  	for {
   913  		val := auxIntToInt16(v.AuxInt)
   914  		v.reset(OpRISCV64MOVDconst)
   915  		v.AuxInt = int64ToAuxInt(int64(val))
   916  		return true
   917  	}
   918  }
   919  func rewriteValueRISCV64_OpConst32(v *Value) bool {
   920  	// match: (Const32 [val])
   921  	// result: (MOVDconst [int64(val)])
   922  	for {
   923  		val := auxIntToInt32(v.AuxInt)
   924  		v.reset(OpRISCV64MOVDconst)
   925  		v.AuxInt = int64ToAuxInt(int64(val))
   926  		return true
   927  	}
   928  }
   929  func rewriteValueRISCV64_OpConst32F(v *Value) bool {
   930  	b := v.Block
   931  	typ := &b.Func.Config.Types
   932  	// match: (Const32F [val])
   933  	// result: (FMVSX (MOVDconst [int64(math.Float32bits(val))]))
   934  	for {
   935  		val := auxIntToFloat32(v.AuxInt)
   936  		v.reset(OpRISCV64FMVSX)
   937  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
   938  		v0.AuxInt = int64ToAuxInt(int64(math.Float32bits(val)))
   939  		v.AddArg(v0)
   940  		return true
   941  	}
   942  }
   943  func rewriteValueRISCV64_OpConst64(v *Value) bool {
   944  	// match: (Const64 [val])
   945  	// result: (MOVDconst [int64(val)])
   946  	for {
   947  		val := auxIntToInt64(v.AuxInt)
   948  		v.reset(OpRISCV64MOVDconst)
   949  		v.AuxInt = int64ToAuxInt(int64(val))
   950  		return true
   951  	}
   952  }
   953  func rewriteValueRISCV64_OpConst64F(v *Value) bool {
   954  	b := v.Block
   955  	typ := &b.Func.Config.Types
   956  	// match: (Const64F [val])
   957  	// result: (FMVDX (MOVDconst [int64(math.Float64bits(val))]))
   958  	for {
   959  		val := auxIntToFloat64(v.AuxInt)
   960  		v.reset(OpRISCV64FMVDX)
   961  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
   962  		v0.AuxInt = int64ToAuxInt(int64(math.Float64bits(val)))
   963  		v.AddArg(v0)
   964  		return true
   965  	}
   966  }
   967  func rewriteValueRISCV64_OpConst8(v *Value) bool {
   968  	// match: (Const8 [val])
   969  	// result: (MOVDconst [int64(val)])
   970  	for {
   971  		val := auxIntToInt8(v.AuxInt)
   972  		v.reset(OpRISCV64MOVDconst)
   973  		v.AuxInt = int64ToAuxInt(int64(val))
   974  		return true
   975  	}
   976  }
   977  func rewriteValueRISCV64_OpConstBool(v *Value) bool {
   978  	// match: (ConstBool [val])
   979  	// result: (MOVDconst [int64(b2i(val))])
   980  	for {
   981  		val := auxIntToBool(v.AuxInt)
   982  		v.reset(OpRISCV64MOVDconst)
   983  		v.AuxInt = int64ToAuxInt(int64(b2i(val)))
   984  		return true
   985  	}
   986  }
   987  func rewriteValueRISCV64_OpConstNil(v *Value) bool {
   988  	// match: (ConstNil)
   989  	// result: (MOVDconst [0])
   990  	for {
   991  		v.reset(OpRISCV64MOVDconst)
   992  		v.AuxInt = int64ToAuxInt(0)
   993  		return true
   994  	}
   995  }
   996  func rewriteValueRISCV64_OpDiv16(v *Value) bool {
   997  	v_1 := v.Args[1]
   998  	v_0 := v.Args[0]
   999  	b := v.Block
  1000  	typ := &b.Func.Config.Types
  1001  	// match: (Div16 x y [false])
  1002  	// result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
  1003  	for {
  1004  		if auxIntToBool(v.AuxInt) != false {
  1005  			break
  1006  		}
  1007  		x := v_0
  1008  		y := v_1
  1009  		v.reset(OpRISCV64DIVW)
  1010  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1011  		v0.AddArg(x)
  1012  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1013  		v1.AddArg(y)
  1014  		v.AddArg2(v0, v1)
  1015  		return true
  1016  	}
  1017  	return false
  1018  }
  1019  func rewriteValueRISCV64_OpDiv16u(v *Value) bool {
  1020  	v_1 := v.Args[1]
  1021  	v_0 := v.Args[0]
  1022  	b := v.Block
  1023  	typ := &b.Func.Config.Types
  1024  	// match: (Div16u x y)
  1025  	// result: (DIVUW (ZeroExt16to32 x) (ZeroExt16to32 y))
  1026  	for {
  1027  		x := v_0
  1028  		y := v_1
  1029  		v.reset(OpRISCV64DIVUW)
  1030  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1031  		v0.AddArg(x)
  1032  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1033  		v1.AddArg(y)
  1034  		v.AddArg2(v0, v1)
  1035  		return true
  1036  	}
  1037  }
  1038  func rewriteValueRISCV64_OpDiv32(v *Value) bool {
  1039  	v_1 := v.Args[1]
  1040  	v_0 := v.Args[0]
  1041  	// match: (Div32 x y [false])
  1042  	// result: (DIVW x y)
  1043  	for {
  1044  		if auxIntToBool(v.AuxInt) != false {
  1045  			break
  1046  		}
  1047  		x := v_0
  1048  		y := v_1
  1049  		v.reset(OpRISCV64DIVW)
  1050  		v.AddArg2(x, y)
  1051  		return true
  1052  	}
  1053  	return false
  1054  }
  1055  func rewriteValueRISCV64_OpDiv64(v *Value) bool {
  1056  	v_1 := v.Args[1]
  1057  	v_0 := v.Args[0]
  1058  	// match: (Div64 x y [false])
  1059  	// result: (DIV x y)
  1060  	for {
  1061  		if auxIntToBool(v.AuxInt) != false {
  1062  			break
  1063  		}
  1064  		x := v_0
  1065  		y := v_1
  1066  		v.reset(OpRISCV64DIV)
  1067  		v.AddArg2(x, y)
  1068  		return true
  1069  	}
  1070  	return false
  1071  }
  1072  func rewriteValueRISCV64_OpDiv8(v *Value) bool {
  1073  	v_1 := v.Args[1]
  1074  	v_0 := v.Args[0]
  1075  	b := v.Block
  1076  	typ := &b.Func.Config.Types
  1077  	// match: (Div8 x y)
  1078  	// result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
  1079  	for {
  1080  		x := v_0
  1081  		y := v_1
  1082  		v.reset(OpRISCV64DIVW)
  1083  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1084  		v0.AddArg(x)
  1085  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1086  		v1.AddArg(y)
  1087  		v.AddArg2(v0, v1)
  1088  		return true
  1089  	}
  1090  }
  1091  func rewriteValueRISCV64_OpDiv8u(v *Value) bool {
  1092  	v_1 := v.Args[1]
  1093  	v_0 := v.Args[0]
  1094  	b := v.Block
  1095  	typ := &b.Func.Config.Types
  1096  	// match: (Div8u x y)
  1097  	// result: (DIVUW (ZeroExt8to32 x) (ZeroExt8to32 y))
  1098  	for {
  1099  		x := v_0
  1100  		y := v_1
  1101  		v.reset(OpRISCV64DIVUW)
  1102  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1103  		v0.AddArg(x)
  1104  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1105  		v1.AddArg(y)
  1106  		v.AddArg2(v0, v1)
  1107  		return true
  1108  	}
  1109  }
  1110  func rewriteValueRISCV64_OpEq16(v *Value) bool {
  1111  	v_1 := v.Args[1]
  1112  	v_0 := v.Args[0]
  1113  	b := v.Block
  1114  	typ := &b.Func.Config.Types
  1115  	// match: (Eq16 x y)
  1116  	// result: (SEQZ (SUB <x.Type> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1117  	for {
  1118  		x := v_0
  1119  		y := v_1
  1120  		v.reset(OpRISCV64SEQZ)
  1121  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1122  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1123  		v1.AddArg(x)
  1124  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1125  		v2.AddArg(y)
  1126  		v0.AddArg2(v1, v2)
  1127  		v.AddArg(v0)
  1128  		return true
  1129  	}
  1130  }
  1131  func rewriteValueRISCV64_OpEq32(v *Value) bool {
  1132  	v_1 := v.Args[1]
  1133  	v_0 := v.Args[0]
  1134  	b := v.Block
  1135  	typ := &b.Func.Config.Types
  1136  	// match: (Eq32 x y)
  1137  	// cond: x.Type.IsSigned()
  1138  	// result: (SEQZ (SUB <x.Type> (SignExt32to64 x) (SignExt32to64 y)))
  1139  	for {
  1140  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1141  			x := v_0
  1142  			y := v_1
  1143  			if !(x.Type.IsSigned()) {
  1144  				continue
  1145  			}
  1146  			v.reset(OpRISCV64SEQZ)
  1147  			v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1148  			v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1149  			v1.AddArg(x)
  1150  			v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1151  			v2.AddArg(y)
  1152  			v0.AddArg2(v1, v2)
  1153  			v.AddArg(v0)
  1154  			return true
  1155  		}
  1156  		break
  1157  	}
  1158  	// match: (Eq32 x y)
  1159  	// cond: !x.Type.IsSigned()
  1160  	// result: (SEQZ (SUB <x.Type> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1161  	for {
  1162  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1163  			x := v_0
  1164  			y := v_1
  1165  			if !(!x.Type.IsSigned()) {
  1166  				continue
  1167  			}
  1168  			v.reset(OpRISCV64SEQZ)
  1169  			v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1170  			v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1171  			v1.AddArg(x)
  1172  			v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1173  			v2.AddArg(y)
  1174  			v0.AddArg2(v1, v2)
  1175  			v.AddArg(v0)
  1176  			return true
  1177  		}
  1178  		break
  1179  	}
  1180  	return false
  1181  }
  1182  func rewriteValueRISCV64_OpEq64(v *Value) bool {
  1183  	v_1 := v.Args[1]
  1184  	v_0 := v.Args[0]
  1185  	b := v.Block
  1186  	// match: (Eq64 x y)
  1187  	// result: (SEQZ (SUB <x.Type> x y))
  1188  	for {
  1189  		x := v_0
  1190  		y := v_1
  1191  		v.reset(OpRISCV64SEQZ)
  1192  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1193  		v0.AddArg2(x, y)
  1194  		v.AddArg(v0)
  1195  		return true
  1196  	}
  1197  }
  1198  func rewriteValueRISCV64_OpEq8(v *Value) bool {
  1199  	v_1 := v.Args[1]
  1200  	v_0 := v.Args[0]
  1201  	b := v.Block
  1202  	typ := &b.Func.Config.Types
  1203  	// match: (Eq8 x y)
  1204  	// result: (SEQZ (SUB <x.Type> (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1205  	for {
  1206  		x := v_0
  1207  		y := v_1
  1208  		v.reset(OpRISCV64SEQZ)
  1209  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1210  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1211  		v1.AddArg(x)
  1212  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1213  		v2.AddArg(y)
  1214  		v0.AddArg2(v1, v2)
  1215  		v.AddArg(v0)
  1216  		return true
  1217  	}
  1218  }
  1219  func rewriteValueRISCV64_OpEqB(v *Value) bool {
  1220  	v_1 := v.Args[1]
  1221  	v_0 := v.Args[0]
  1222  	b := v.Block
  1223  	typ := &b.Func.Config.Types
  1224  	// match: (EqB x y)
  1225  	// result: (SEQZ (SUB <typ.Bool> x y))
  1226  	for {
  1227  		x := v_0
  1228  		y := v_1
  1229  		v.reset(OpRISCV64SEQZ)
  1230  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
  1231  		v0.AddArg2(x, y)
  1232  		v.AddArg(v0)
  1233  		return true
  1234  	}
  1235  }
  1236  func rewriteValueRISCV64_OpEqPtr(v *Value) bool {
  1237  	v_1 := v.Args[1]
  1238  	v_0 := v.Args[0]
  1239  	b := v.Block
  1240  	typ := &b.Func.Config.Types
  1241  	// match: (EqPtr x y)
  1242  	// result: (SEQZ (SUB <typ.Uintptr> x y))
  1243  	for {
  1244  		x := v_0
  1245  		y := v_1
  1246  		v.reset(OpRISCV64SEQZ)
  1247  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Uintptr)
  1248  		v0.AddArg2(x, y)
  1249  		v.AddArg(v0)
  1250  		return true
  1251  	}
  1252  }
  1253  func rewriteValueRISCV64_OpHmul32(v *Value) bool {
  1254  	v_1 := v.Args[1]
  1255  	v_0 := v.Args[0]
  1256  	b := v.Block
  1257  	typ := &b.Func.Config.Types
  1258  	// match: (Hmul32 x y)
  1259  	// result: (SRAI [32] (MUL (SignExt32to64 x) (SignExt32to64 y)))
  1260  	for {
  1261  		x := v_0
  1262  		y := v_1
  1263  		v.reset(OpRISCV64SRAI)
  1264  		v.AuxInt = int64ToAuxInt(32)
  1265  		v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
  1266  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1267  		v1.AddArg(x)
  1268  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1269  		v2.AddArg(y)
  1270  		v0.AddArg2(v1, v2)
  1271  		v.AddArg(v0)
  1272  		return true
  1273  	}
  1274  }
  1275  func rewriteValueRISCV64_OpHmul32u(v *Value) bool {
  1276  	v_1 := v.Args[1]
  1277  	v_0 := v.Args[0]
  1278  	b := v.Block
  1279  	typ := &b.Func.Config.Types
  1280  	// match: (Hmul32u x y)
  1281  	// result: (SRLI [32] (MUL (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1282  	for {
  1283  		x := v_0
  1284  		y := v_1
  1285  		v.reset(OpRISCV64SRLI)
  1286  		v.AuxInt = int64ToAuxInt(32)
  1287  		v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
  1288  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1289  		v1.AddArg(x)
  1290  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1291  		v2.AddArg(y)
  1292  		v0.AddArg2(v1, v2)
  1293  		v.AddArg(v0)
  1294  		return true
  1295  	}
  1296  }
  1297  func rewriteValueRISCV64_OpLeq16(v *Value) bool {
  1298  	v_1 := v.Args[1]
  1299  	v_0 := v.Args[0]
  1300  	b := v.Block
  1301  	typ := &b.Func.Config.Types
  1302  	// match: (Leq16 x y)
  1303  	// result: (Not (Less16 y x))
  1304  	for {
  1305  		x := v_0
  1306  		y := v_1
  1307  		v.reset(OpNot)
  1308  		v0 := b.NewValue0(v.Pos, OpLess16, typ.Bool)
  1309  		v0.AddArg2(y, x)
  1310  		v.AddArg(v0)
  1311  		return true
  1312  	}
  1313  }
  1314  func rewriteValueRISCV64_OpLeq16U(v *Value) bool {
  1315  	v_1 := v.Args[1]
  1316  	v_0 := v.Args[0]
  1317  	b := v.Block
  1318  	typ := &b.Func.Config.Types
  1319  	// match: (Leq16U x y)
  1320  	// result: (Not (Less16U y x))
  1321  	for {
  1322  		x := v_0
  1323  		y := v_1
  1324  		v.reset(OpNot)
  1325  		v0 := b.NewValue0(v.Pos, OpLess16U, typ.Bool)
  1326  		v0.AddArg2(y, x)
  1327  		v.AddArg(v0)
  1328  		return true
  1329  	}
  1330  }
  1331  func rewriteValueRISCV64_OpLeq32(v *Value) bool {
  1332  	v_1 := v.Args[1]
  1333  	v_0 := v.Args[0]
  1334  	b := v.Block
  1335  	typ := &b.Func.Config.Types
  1336  	// match: (Leq32 x y)
  1337  	// result: (Not (Less32 y x))
  1338  	for {
  1339  		x := v_0
  1340  		y := v_1
  1341  		v.reset(OpNot)
  1342  		v0 := b.NewValue0(v.Pos, OpLess32, typ.Bool)
  1343  		v0.AddArg2(y, x)
  1344  		v.AddArg(v0)
  1345  		return true
  1346  	}
  1347  }
  1348  func rewriteValueRISCV64_OpLeq32U(v *Value) bool {
  1349  	v_1 := v.Args[1]
  1350  	v_0 := v.Args[0]
  1351  	b := v.Block
  1352  	typ := &b.Func.Config.Types
  1353  	// match: (Leq32U x y)
  1354  	// result: (Not (Less32U y x))
  1355  	for {
  1356  		x := v_0
  1357  		y := v_1
  1358  		v.reset(OpNot)
  1359  		v0 := b.NewValue0(v.Pos, OpLess32U, typ.Bool)
  1360  		v0.AddArg2(y, x)
  1361  		v.AddArg(v0)
  1362  		return true
  1363  	}
  1364  }
  1365  func rewriteValueRISCV64_OpLeq64(v *Value) bool {
  1366  	v_1 := v.Args[1]
  1367  	v_0 := v.Args[0]
  1368  	b := v.Block
  1369  	typ := &b.Func.Config.Types
  1370  	// match: (Leq64 x y)
  1371  	// result: (Not (Less64 y x))
  1372  	for {
  1373  		x := v_0
  1374  		y := v_1
  1375  		v.reset(OpNot)
  1376  		v0 := b.NewValue0(v.Pos, OpLess64, typ.Bool)
  1377  		v0.AddArg2(y, x)
  1378  		v.AddArg(v0)
  1379  		return true
  1380  	}
  1381  }
  1382  func rewriteValueRISCV64_OpLeq64U(v *Value) bool {
  1383  	v_1 := v.Args[1]
  1384  	v_0 := v.Args[0]
  1385  	b := v.Block
  1386  	typ := &b.Func.Config.Types
  1387  	// match: (Leq64U x y)
  1388  	// result: (Not (Less64U y x))
  1389  	for {
  1390  		x := v_0
  1391  		y := v_1
  1392  		v.reset(OpNot)
  1393  		v0 := b.NewValue0(v.Pos, OpLess64U, typ.Bool)
  1394  		v0.AddArg2(y, x)
  1395  		v.AddArg(v0)
  1396  		return true
  1397  	}
  1398  }
  1399  func rewriteValueRISCV64_OpLeq8(v *Value) bool {
  1400  	v_1 := v.Args[1]
  1401  	v_0 := v.Args[0]
  1402  	b := v.Block
  1403  	typ := &b.Func.Config.Types
  1404  	// match: (Leq8 x y)
  1405  	// result: (Not (Less8 y x))
  1406  	for {
  1407  		x := v_0
  1408  		y := v_1
  1409  		v.reset(OpNot)
  1410  		v0 := b.NewValue0(v.Pos, OpLess8, typ.Bool)
  1411  		v0.AddArg2(y, x)
  1412  		v.AddArg(v0)
  1413  		return true
  1414  	}
  1415  }
  1416  func rewriteValueRISCV64_OpLeq8U(v *Value) bool {
  1417  	v_1 := v.Args[1]
  1418  	v_0 := v.Args[0]
  1419  	b := v.Block
  1420  	typ := &b.Func.Config.Types
  1421  	// match: (Leq8U x y)
  1422  	// result: (Not (Less8U y x))
  1423  	for {
  1424  		x := v_0
  1425  		y := v_1
  1426  		v.reset(OpNot)
  1427  		v0 := b.NewValue0(v.Pos, OpLess8U, typ.Bool)
  1428  		v0.AddArg2(y, x)
  1429  		v.AddArg(v0)
  1430  		return true
  1431  	}
  1432  }
  1433  func rewriteValueRISCV64_OpLess16(v *Value) bool {
  1434  	v_1 := v.Args[1]
  1435  	v_0 := v.Args[0]
  1436  	b := v.Block
  1437  	typ := &b.Func.Config.Types
  1438  	// match: (Less16 x y)
  1439  	// result: (SLT (SignExt16to64 x) (SignExt16to64 y))
  1440  	for {
  1441  		x := v_0
  1442  		y := v_1
  1443  		v.reset(OpRISCV64SLT)
  1444  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1445  		v0.AddArg(x)
  1446  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1447  		v1.AddArg(y)
  1448  		v.AddArg2(v0, v1)
  1449  		return true
  1450  	}
  1451  }
  1452  func rewriteValueRISCV64_OpLess16U(v *Value) bool {
  1453  	v_1 := v.Args[1]
  1454  	v_0 := v.Args[0]
  1455  	b := v.Block
  1456  	typ := &b.Func.Config.Types
  1457  	// match: (Less16U x y)
  1458  	// result: (SLTU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1459  	for {
  1460  		x := v_0
  1461  		y := v_1
  1462  		v.reset(OpRISCV64SLTU)
  1463  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1464  		v0.AddArg(x)
  1465  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1466  		v1.AddArg(y)
  1467  		v.AddArg2(v0, v1)
  1468  		return true
  1469  	}
  1470  }
  1471  func rewriteValueRISCV64_OpLess32(v *Value) bool {
  1472  	v_1 := v.Args[1]
  1473  	v_0 := v.Args[0]
  1474  	b := v.Block
  1475  	typ := &b.Func.Config.Types
  1476  	// match: (Less32 x y)
  1477  	// result: (SLT (SignExt32to64 x) (SignExt32to64 y))
  1478  	for {
  1479  		x := v_0
  1480  		y := v_1
  1481  		v.reset(OpRISCV64SLT)
  1482  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1483  		v0.AddArg(x)
  1484  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1485  		v1.AddArg(y)
  1486  		v.AddArg2(v0, v1)
  1487  		return true
  1488  	}
  1489  }
  1490  func rewriteValueRISCV64_OpLess32U(v *Value) bool {
  1491  	v_1 := v.Args[1]
  1492  	v_0 := v.Args[0]
  1493  	b := v.Block
  1494  	typ := &b.Func.Config.Types
  1495  	// match: (Less32U x y)
  1496  	// result: (SLTU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1497  	for {
  1498  		x := v_0
  1499  		y := v_1
  1500  		v.reset(OpRISCV64SLTU)
  1501  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1502  		v0.AddArg(x)
  1503  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1504  		v1.AddArg(y)
  1505  		v.AddArg2(v0, v1)
  1506  		return true
  1507  	}
  1508  }
  1509  func rewriteValueRISCV64_OpLess8(v *Value) bool {
  1510  	v_1 := v.Args[1]
  1511  	v_0 := v.Args[0]
  1512  	b := v.Block
  1513  	typ := &b.Func.Config.Types
  1514  	// match: (Less8 x y)
  1515  	// result: (SLT (SignExt8to64 x) (SignExt8to64 y))
  1516  	for {
  1517  		x := v_0
  1518  		y := v_1
  1519  		v.reset(OpRISCV64SLT)
  1520  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1521  		v0.AddArg(x)
  1522  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1523  		v1.AddArg(y)
  1524  		v.AddArg2(v0, v1)
  1525  		return true
  1526  	}
  1527  }
  1528  func rewriteValueRISCV64_OpLess8U(v *Value) bool {
  1529  	v_1 := v.Args[1]
  1530  	v_0 := v.Args[0]
  1531  	b := v.Block
  1532  	typ := &b.Func.Config.Types
  1533  	// match: (Less8U x y)
  1534  	// result: (SLTU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1535  	for {
  1536  		x := v_0
  1537  		y := v_1
  1538  		v.reset(OpRISCV64SLTU)
  1539  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1540  		v0.AddArg(x)
  1541  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1542  		v1.AddArg(y)
  1543  		v.AddArg2(v0, v1)
  1544  		return true
  1545  	}
  1546  }
  1547  func rewriteValueRISCV64_OpLoad(v *Value) bool {
  1548  	v_1 := v.Args[1]
  1549  	v_0 := v.Args[0]
  1550  	// match: (Load <t> ptr mem)
  1551  	// cond: t.IsBoolean()
  1552  	// result: (MOVBUload ptr mem)
  1553  	for {
  1554  		t := v.Type
  1555  		ptr := v_0
  1556  		mem := v_1
  1557  		if !(t.IsBoolean()) {
  1558  			break
  1559  		}
  1560  		v.reset(OpRISCV64MOVBUload)
  1561  		v.AddArg2(ptr, mem)
  1562  		return true
  1563  	}
  1564  	// match: (Load <t> ptr mem)
  1565  	// cond: ( is8BitInt(t) && t.IsSigned())
  1566  	// result: (MOVBload ptr mem)
  1567  	for {
  1568  		t := v.Type
  1569  		ptr := v_0
  1570  		mem := v_1
  1571  		if !(is8BitInt(t) && t.IsSigned()) {
  1572  			break
  1573  		}
  1574  		v.reset(OpRISCV64MOVBload)
  1575  		v.AddArg2(ptr, mem)
  1576  		return true
  1577  	}
  1578  	// match: (Load <t> ptr mem)
  1579  	// cond: ( is8BitInt(t) && !t.IsSigned())
  1580  	// result: (MOVBUload ptr mem)
  1581  	for {
  1582  		t := v.Type
  1583  		ptr := v_0
  1584  		mem := v_1
  1585  		if !(is8BitInt(t) && !t.IsSigned()) {
  1586  			break
  1587  		}
  1588  		v.reset(OpRISCV64MOVBUload)
  1589  		v.AddArg2(ptr, mem)
  1590  		return true
  1591  	}
  1592  	// match: (Load <t> ptr mem)
  1593  	// cond: (is16BitInt(t) && t.IsSigned())
  1594  	// result: (MOVHload ptr mem)
  1595  	for {
  1596  		t := v.Type
  1597  		ptr := v_0
  1598  		mem := v_1
  1599  		if !(is16BitInt(t) && t.IsSigned()) {
  1600  			break
  1601  		}
  1602  		v.reset(OpRISCV64MOVHload)
  1603  		v.AddArg2(ptr, mem)
  1604  		return true
  1605  	}
  1606  	// match: (Load <t> ptr mem)
  1607  	// cond: (is16BitInt(t) && !t.IsSigned())
  1608  	// result: (MOVHUload ptr mem)
  1609  	for {
  1610  		t := v.Type
  1611  		ptr := v_0
  1612  		mem := v_1
  1613  		if !(is16BitInt(t) && !t.IsSigned()) {
  1614  			break
  1615  		}
  1616  		v.reset(OpRISCV64MOVHUload)
  1617  		v.AddArg2(ptr, mem)
  1618  		return true
  1619  	}
  1620  	// match: (Load <t> ptr mem)
  1621  	// cond: (is32BitInt(t) && t.IsSigned())
  1622  	// result: (MOVWload ptr mem)
  1623  	for {
  1624  		t := v.Type
  1625  		ptr := v_0
  1626  		mem := v_1
  1627  		if !(is32BitInt(t) && t.IsSigned()) {
  1628  			break
  1629  		}
  1630  		v.reset(OpRISCV64MOVWload)
  1631  		v.AddArg2(ptr, mem)
  1632  		return true
  1633  	}
  1634  	// match: (Load <t> ptr mem)
  1635  	// cond: (is32BitInt(t) && !t.IsSigned())
  1636  	// result: (MOVWUload ptr mem)
  1637  	for {
  1638  		t := v.Type
  1639  		ptr := v_0
  1640  		mem := v_1
  1641  		if !(is32BitInt(t) && !t.IsSigned()) {
  1642  			break
  1643  		}
  1644  		v.reset(OpRISCV64MOVWUload)
  1645  		v.AddArg2(ptr, mem)
  1646  		return true
  1647  	}
  1648  	// match: (Load <t> ptr mem)
  1649  	// cond: (is64BitInt(t) || isPtr(t))
  1650  	// result: (MOVDload ptr mem)
  1651  	for {
  1652  		t := v.Type
  1653  		ptr := v_0
  1654  		mem := v_1
  1655  		if !(is64BitInt(t) || isPtr(t)) {
  1656  			break
  1657  		}
  1658  		v.reset(OpRISCV64MOVDload)
  1659  		v.AddArg2(ptr, mem)
  1660  		return true
  1661  	}
  1662  	// match: (Load <t> ptr mem)
  1663  	// cond: is32BitFloat(t)
  1664  	// result: (FMOVWload ptr mem)
  1665  	for {
  1666  		t := v.Type
  1667  		ptr := v_0
  1668  		mem := v_1
  1669  		if !(is32BitFloat(t)) {
  1670  			break
  1671  		}
  1672  		v.reset(OpRISCV64FMOVWload)
  1673  		v.AddArg2(ptr, mem)
  1674  		return true
  1675  	}
  1676  	// match: (Load <t> ptr mem)
  1677  	// cond: is64BitFloat(t)
  1678  	// result: (FMOVDload ptr mem)
  1679  	for {
  1680  		t := v.Type
  1681  		ptr := v_0
  1682  		mem := v_1
  1683  		if !(is64BitFloat(t)) {
  1684  			break
  1685  		}
  1686  		v.reset(OpRISCV64FMOVDload)
  1687  		v.AddArg2(ptr, mem)
  1688  		return true
  1689  	}
  1690  	return false
  1691  }
  1692  func rewriteValueRISCV64_OpLocalAddr(v *Value) bool {
  1693  	v_1 := v.Args[1]
  1694  	v_0 := v.Args[0]
  1695  	b := v.Block
  1696  	typ := &b.Func.Config.Types
  1697  	// match: (LocalAddr <t> {sym} base mem)
  1698  	// cond: t.Elem().HasPointers()
  1699  	// result: (MOVaddr {sym} (SPanchored base mem))
  1700  	for {
  1701  		t := v.Type
  1702  		sym := auxToSym(v.Aux)
  1703  		base := v_0
  1704  		mem := v_1
  1705  		if !(t.Elem().HasPointers()) {
  1706  			break
  1707  		}
  1708  		v.reset(OpRISCV64MOVaddr)
  1709  		v.Aux = symToAux(sym)
  1710  		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
  1711  		v0.AddArg2(base, mem)
  1712  		v.AddArg(v0)
  1713  		return true
  1714  	}
  1715  	// match: (LocalAddr <t> {sym} base _)
  1716  	// cond: !t.Elem().HasPointers()
  1717  	// result: (MOVaddr {sym} base)
  1718  	for {
  1719  		t := v.Type
  1720  		sym := auxToSym(v.Aux)
  1721  		base := v_0
  1722  		if !(!t.Elem().HasPointers()) {
  1723  			break
  1724  		}
  1725  		v.reset(OpRISCV64MOVaddr)
  1726  		v.Aux = symToAux(sym)
  1727  		v.AddArg(base)
  1728  		return true
  1729  	}
  1730  	return false
  1731  }
  1732  func rewriteValueRISCV64_OpLsh16x16(v *Value) bool {
  1733  	v_1 := v.Args[1]
  1734  	v_0 := v.Args[0]
  1735  	b := v.Block
  1736  	typ := &b.Func.Config.Types
  1737  	// match: (Lsh16x16 <t> x y)
  1738  	// cond: !shiftIsBounded(v)
  1739  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  1740  	for {
  1741  		t := v.Type
  1742  		x := v_0
  1743  		y := v_1
  1744  		if !(!shiftIsBounded(v)) {
  1745  			break
  1746  		}
  1747  		v.reset(OpRISCV64AND)
  1748  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1749  		v0.AddArg2(x, y)
  1750  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1751  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1752  		v2.AuxInt = int64ToAuxInt(64)
  1753  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1754  		v3.AddArg(y)
  1755  		v2.AddArg(v3)
  1756  		v1.AddArg(v2)
  1757  		v.AddArg2(v0, v1)
  1758  		return true
  1759  	}
  1760  	// match: (Lsh16x16 x y)
  1761  	// cond: shiftIsBounded(v)
  1762  	// result: (SLL x y)
  1763  	for {
  1764  		x := v_0
  1765  		y := v_1
  1766  		if !(shiftIsBounded(v)) {
  1767  			break
  1768  		}
  1769  		v.reset(OpRISCV64SLL)
  1770  		v.AddArg2(x, y)
  1771  		return true
  1772  	}
  1773  	return false
  1774  }
  1775  func rewriteValueRISCV64_OpLsh16x32(v *Value) bool {
  1776  	v_1 := v.Args[1]
  1777  	v_0 := v.Args[0]
  1778  	b := v.Block
  1779  	typ := &b.Func.Config.Types
  1780  	// match: (Lsh16x32 <t> x y)
  1781  	// cond: !shiftIsBounded(v)
  1782  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  1783  	for {
  1784  		t := v.Type
  1785  		x := v_0
  1786  		y := v_1
  1787  		if !(!shiftIsBounded(v)) {
  1788  			break
  1789  		}
  1790  		v.reset(OpRISCV64AND)
  1791  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1792  		v0.AddArg2(x, y)
  1793  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1794  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1795  		v2.AuxInt = int64ToAuxInt(64)
  1796  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1797  		v3.AddArg(y)
  1798  		v2.AddArg(v3)
  1799  		v1.AddArg(v2)
  1800  		v.AddArg2(v0, v1)
  1801  		return true
  1802  	}
  1803  	// match: (Lsh16x32 x y)
  1804  	// cond: shiftIsBounded(v)
  1805  	// result: (SLL x y)
  1806  	for {
  1807  		x := v_0
  1808  		y := v_1
  1809  		if !(shiftIsBounded(v)) {
  1810  			break
  1811  		}
  1812  		v.reset(OpRISCV64SLL)
  1813  		v.AddArg2(x, y)
  1814  		return true
  1815  	}
  1816  	return false
  1817  }
  1818  func rewriteValueRISCV64_OpLsh16x64(v *Value) bool {
  1819  	v_1 := v.Args[1]
  1820  	v_0 := v.Args[0]
  1821  	b := v.Block
  1822  	// match: (Lsh16x64 <t> x y)
  1823  	// cond: !shiftIsBounded(v)
  1824  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] y)))
  1825  	for {
  1826  		t := v.Type
  1827  		x := v_0
  1828  		y := v_1
  1829  		if !(!shiftIsBounded(v)) {
  1830  			break
  1831  		}
  1832  		v.reset(OpRISCV64AND)
  1833  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1834  		v0.AddArg2(x, y)
  1835  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1836  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1837  		v2.AuxInt = int64ToAuxInt(64)
  1838  		v2.AddArg(y)
  1839  		v1.AddArg(v2)
  1840  		v.AddArg2(v0, v1)
  1841  		return true
  1842  	}
  1843  	// match: (Lsh16x64 x y)
  1844  	// cond: shiftIsBounded(v)
  1845  	// result: (SLL x y)
  1846  	for {
  1847  		x := v_0
  1848  		y := v_1
  1849  		if !(shiftIsBounded(v)) {
  1850  			break
  1851  		}
  1852  		v.reset(OpRISCV64SLL)
  1853  		v.AddArg2(x, y)
  1854  		return true
  1855  	}
  1856  	return false
  1857  }
  1858  func rewriteValueRISCV64_OpLsh16x8(v *Value) bool {
  1859  	v_1 := v.Args[1]
  1860  	v_0 := v.Args[0]
  1861  	b := v.Block
  1862  	typ := &b.Func.Config.Types
  1863  	// match: (Lsh16x8 <t> x y)
  1864  	// cond: !shiftIsBounded(v)
  1865  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  1866  	for {
  1867  		t := v.Type
  1868  		x := v_0
  1869  		y := v_1
  1870  		if !(!shiftIsBounded(v)) {
  1871  			break
  1872  		}
  1873  		v.reset(OpRISCV64AND)
  1874  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1875  		v0.AddArg2(x, y)
  1876  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1877  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1878  		v2.AuxInt = int64ToAuxInt(64)
  1879  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1880  		v3.AddArg(y)
  1881  		v2.AddArg(v3)
  1882  		v1.AddArg(v2)
  1883  		v.AddArg2(v0, v1)
  1884  		return true
  1885  	}
  1886  	// match: (Lsh16x8 x y)
  1887  	// cond: shiftIsBounded(v)
  1888  	// result: (SLL x y)
  1889  	for {
  1890  		x := v_0
  1891  		y := v_1
  1892  		if !(shiftIsBounded(v)) {
  1893  			break
  1894  		}
  1895  		v.reset(OpRISCV64SLL)
  1896  		v.AddArg2(x, y)
  1897  		return true
  1898  	}
  1899  	return false
  1900  }
  1901  func rewriteValueRISCV64_OpLsh32x16(v *Value) bool {
  1902  	v_1 := v.Args[1]
  1903  	v_0 := v.Args[0]
  1904  	b := v.Block
  1905  	typ := &b.Func.Config.Types
  1906  	// match: (Lsh32x16 <t> x y)
  1907  	// cond: !shiftIsBounded(v)
  1908  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  1909  	for {
  1910  		t := v.Type
  1911  		x := v_0
  1912  		y := v_1
  1913  		if !(!shiftIsBounded(v)) {
  1914  			break
  1915  		}
  1916  		v.reset(OpRISCV64AND)
  1917  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1918  		v0.AddArg2(x, y)
  1919  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  1920  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1921  		v2.AuxInt = int64ToAuxInt(64)
  1922  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1923  		v3.AddArg(y)
  1924  		v2.AddArg(v3)
  1925  		v1.AddArg(v2)
  1926  		v.AddArg2(v0, v1)
  1927  		return true
  1928  	}
  1929  	// match: (Lsh32x16 x y)
  1930  	// cond: shiftIsBounded(v)
  1931  	// result: (SLL x y)
  1932  	for {
  1933  		x := v_0
  1934  		y := v_1
  1935  		if !(shiftIsBounded(v)) {
  1936  			break
  1937  		}
  1938  		v.reset(OpRISCV64SLL)
  1939  		v.AddArg2(x, y)
  1940  		return true
  1941  	}
  1942  	return false
  1943  }
  1944  func rewriteValueRISCV64_OpLsh32x32(v *Value) bool {
  1945  	v_1 := v.Args[1]
  1946  	v_0 := v.Args[0]
  1947  	b := v.Block
  1948  	typ := &b.Func.Config.Types
  1949  	// match: (Lsh32x32 <t> x y)
  1950  	// cond: !shiftIsBounded(v)
  1951  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  1952  	for {
  1953  		t := v.Type
  1954  		x := v_0
  1955  		y := v_1
  1956  		if !(!shiftIsBounded(v)) {
  1957  			break
  1958  		}
  1959  		v.reset(OpRISCV64AND)
  1960  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1961  		v0.AddArg2(x, y)
  1962  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  1963  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1964  		v2.AuxInt = int64ToAuxInt(64)
  1965  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1966  		v3.AddArg(y)
  1967  		v2.AddArg(v3)
  1968  		v1.AddArg(v2)
  1969  		v.AddArg2(v0, v1)
  1970  		return true
  1971  	}
  1972  	// match: (Lsh32x32 x y)
  1973  	// cond: shiftIsBounded(v)
  1974  	// result: (SLL x y)
  1975  	for {
  1976  		x := v_0
  1977  		y := v_1
  1978  		if !(shiftIsBounded(v)) {
  1979  			break
  1980  		}
  1981  		v.reset(OpRISCV64SLL)
  1982  		v.AddArg2(x, y)
  1983  		return true
  1984  	}
  1985  	return false
  1986  }
  1987  func rewriteValueRISCV64_OpLsh32x64(v *Value) bool {
  1988  	v_1 := v.Args[1]
  1989  	v_0 := v.Args[0]
  1990  	b := v.Block
  1991  	// match: (Lsh32x64 <t> x y)
  1992  	// cond: !shiftIsBounded(v)
  1993  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] y)))
  1994  	for {
  1995  		t := v.Type
  1996  		x := v_0
  1997  		y := v_1
  1998  		if !(!shiftIsBounded(v)) {
  1999  			break
  2000  		}
  2001  		v.reset(OpRISCV64AND)
  2002  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2003  		v0.AddArg2(x, y)
  2004  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2005  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2006  		v2.AuxInt = int64ToAuxInt(64)
  2007  		v2.AddArg(y)
  2008  		v1.AddArg(v2)
  2009  		v.AddArg2(v0, v1)
  2010  		return true
  2011  	}
  2012  	// match: (Lsh32x64 x y)
  2013  	// cond: shiftIsBounded(v)
  2014  	// result: (SLL x y)
  2015  	for {
  2016  		x := v_0
  2017  		y := v_1
  2018  		if !(shiftIsBounded(v)) {
  2019  			break
  2020  		}
  2021  		v.reset(OpRISCV64SLL)
  2022  		v.AddArg2(x, y)
  2023  		return true
  2024  	}
  2025  	return false
  2026  }
  2027  func rewriteValueRISCV64_OpLsh32x8(v *Value) bool {
  2028  	v_1 := v.Args[1]
  2029  	v_0 := v.Args[0]
  2030  	b := v.Block
  2031  	typ := &b.Func.Config.Types
  2032  	// match: (Lsh32x8 <t> x y)
  2033  	// cond: !shiftIsBounded(v)
  2034  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2035  	for {
  2036  		t := v.Type
  2037  		x := v_0
  2038  		y := v_1
  2039  		if !(!shiftIsBounded(v)) {
  2040  			break
  2041  		}
  2042  		v.reset(OpRISCV64AND)
  2043  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2044  		v0.AddArg2(x, y)
  2045  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2046  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2047  		v2.AuxInt = int64ToAuxInt(64)
  2048  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2049  		v3.AddArg(y)
  2050  		v2.AddArg(v3)
  2051  		v1.AddArg(v2)
  2052  		v.AddArg2(v0, v1)
  2053  		return true
  2054  	}
  2055  	// match: (Lsh32x8 x y)
  2056  	// cond: shiftIsBounded(v)
  2057  	// result: (SLL x y)
  2058  	for {
  2059  		x := v_0
  2060  		y := v_1
  2061  		if !(shiftIsBounded(v)) {
  2062  			break
  2063  		}
  2064  		v.reset(OpRISCV64SLL)
  2065  		v.AddArg2(x, y)
  2066  		return true
  2067  	}
  2068  	return false
  2069  }
  2070  func rewriteValueRISCV64_OpLsh64x16(v *Value) bool {
  2071  	v_1 := v.Args[1]
  2072  	v_0 := v.Args[0]
  2073  	b := v.Block
  2074  	typ := &b.Func.Config.Types
  2075  	// match: (Lsh64x16 <t> x y)
  2076  	// cond: !shiftIsBounded(v)
  2077  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2078  	for {
  2079  		t := v.Type
  2080  		x := v_0
  2081  		y := v_1
  2082  		if !(!shiftIsBounded(v)) {
  2083  			break
  2084  		}
  2085  		v.reset(OpRISCV64AND)
  2086  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2087  		v0.AddArg2(x, y)
  2088  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2089  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2090  		v2.AuxInt = int64ToAuxInt(64)
  2091  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2092  		v3.AddArg(y)
  2093  		v2.AddArg(v3)
  2094  		v1.AddArg(v2)
  2095  		v.AddArg2(v0, v1)
  2096  		return true
  2097  	}
  2098  	// match: (Lsh64x16 x y)
  2099  	// cond: shiftIsBounded(v)
  2100  	// result: (SLL x y)
  2101  	for {
  2102  		x := v_0
  2103  		y := v_1
  2104  		if !(shiftIsBounded(v)) {
  2105  			break
  2106  		}
  2107  		v.reset(OpRISCV64SLL)
  2108  		v.AddArg2(x, y)
  2109  		return true
  2110  	}
  2111  	return false
  2112  }
  2113  func rewriteValueRISCV64_OpLsh64x32(v *Value) bool {
  2114  	v_1 := v.Args[1]
  2115  	v_0 := v.Args[0]
  2116  	b := v.Block
  2117  	typ := &b.Func.Config.Types
  2118  	// match: (Lsh64x32 <t> x y)
  2119  	// cond: !shiftIsBounded(v)
  2120  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2121  	for {
  2122  		t := v.Type
  2123  		x := v_0
  2124  		y := v_1
  2125  		if !(!shiftIsBounded(v)) {
  2126  			break
  2127  		}
  2128  		v.reset(OpRISCV64AND)
  2129  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2130  		v0.AddArg2(x, y)
  2131  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2132  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2133  		v2.AuxInt = int64ToAuxInt(64)
  2134  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2135  		v3.AddArg(y)
  2136  		v2.AddArg(v3)
  2137  		v1.AddArg(v2)
  2138  		v.AddArg2(v0, v1)
  2139  		return true
  2140  	}
  2141  	// match: (Lsh64x32 x y)
  2142  	// cond: shiftIsBounded(v)
  2143  	// result: (SLL x y)
  2144  	for {
  2145  		x := v_0
  2146  		y := v_1
  2147  		if !(shiftIsBounded(v)) {
  2148  			break
  2149  		}
  2150  		v.reset(OpRISCV64SLL)
  2151  		v.AddArg2(x, y)
  2152  		return true
  2153  	}
  2154  	return false
  2155  }
  2156  func rewriteValueRISCV64_OpLsh64x64(v *Value) bool {
  2157  	v_1 := v.Args[1]
  2158  	v_0 := v.Args[0]
  2159  	b := v.Block
  2160  	// match: (Lsh64x64 <t> x y)
  2161  	// cond: !shiftIsBounded(v)
  2162  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
  2163  	for {
  2164  		t := v.Type
  2165  		x := v_0
  2166  		y := v_1
  2167  		if !(!shiftIsBounded(v)) {
  2168  			break
  2169  		}
  2170  		v.reset(OpRISCV64AND)
  2171  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2172  		v0.AddArg2(x, y)
  2173  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2174  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2175  		v2.AuxInt = int64ToAuxInt(64)
  2176  		v2.AddArg(y)
  2177  		v1.AddArg(v2)
  2178  		v.AddArg2(v0, v1)
  2179  		return true
  2180  	}
  2181  	// match: (Lsh64x64 x y)
  2182  	// cond: shiftIsBounded(v)
  2183  	// result: (SLL x y)
  2184  	for {
  2185  		x := v_0
  2186  		y := v_1
  2187  		if !(shiftIsBounded(v)) {
  2188  			break
  2189  		}
  2190  		v.reset(OpRISCV64SLL)
  2191  		v.AddArg2(x, y)
  2192  		return true
  2193  	}
  2194  	return false
  2195  }
  2196  func rewriteValueRISCV64_OpLsh64x8(v *Value) bool {
  2197  	v_1 := v.Args[1]
  2198  	v_0 := v.Args[0]
  2199  	b := v.Block
  2200  	typ := &b.Func.Config.Types
  2201  	// match: (Lsh64x8 <t> x y)
  2202  	// cond: !shiftIsBounded(v)
  2203  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2204  	for {
  2205  		t := v.Type
  2206  		x := v_0
  2207  		y := v_1
  2208  		if !(!shiftIsBounded(v)) {
  2209  			break
  2210  		}
  2211  		v.reset(OpRISCV64AND)
  2212  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2213  		v0.AddArg2(x, y)
  2214  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2215  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2216  		v2.AuxInt = int64ToAuxInt(64)
  2217  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2218  		v3.AddArg(y)
  2219  		v2.AddArg(v3)
  2220  		v1.AddArg(v2)
  2221  		v.AddArg2(v0, v1)
  2222  		return true
  2223  	}
  2224  	// match: (Lsh64x8 x y)
  2225  	// cond: shiftIsBounded(v)
  2226  	// result: (SLL x y)
  2227  	for {
  2228  		x := v_0
  2229  		y := v_1
  2230  		if !(shiftIsBounded(v)) {
  2231  			break
  2232  		}
  2233  		v.reset(OpRISCV64SLL)
  2234  		v.AddArg2(x, y)
  2235  		return true
  2236  	}
  2237  	return false
  2238  }
  2239  func rewriteValueRISCV64_OpLsh8x16(v *Value) bool {
  2240  	v_1 := v.Args[1]
  2241  	v_0 := v.Args[0]
  2242  	b := v.Block
  2243  	typ := &b.Func.Config.Types
  2244  	// match: (Lsh8x16 <t> x y)
  2245  	// cond: !shiftIsBounded(v)
  2246  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2247  	for {
  2248  		t := v.Type
  2249  		x := v_0
  2250  		y := v_1
  2251  		if !(!shiftIsBounded(v)) {
  2252  			break
  2253  		}
  2254  		v.reset(OpRISCV64AND)
  2255  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2256  		v0.AddArg2(x, y)
  2257  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2258  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2259  		v2.AuxInt = int64ToAuxInt(64)
  2260  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2261  		v3.AddArg(y)
  2262  		v2.AddArg(v3)
  2263  		v1.AddArg(v2)
  2264  		v.AddArg2(v0, v1)
  2265  		return true
  2266  	}
  2267  	// match: (Lsh8x16 x y)
  2268  	// cond: shiftIsBounded(v)
  2269  	// result: (SLL x y)
  2270  	for {
  2271  		x := v_0
  2272  		y := v_1
  2273  		if !(shiftIsBounded(v)) {
  2274  			break
  2275  		}
  2276  		v.reset(OpRISCV64SLL)
  2277  		v.AddArg2(x, y)
  2278  		return true
  2279  	}
  2280  	return false
  2281  }
  2282  func rewriteValueRISCV64_OpLsh8x32(v *Value) bool {
  2283  	v_1 := v.Args[1]
  2284  	v_0 := v.Args[0]
  2285  	b := v.Block
  2286  	typ := &b.Func.Config.Types
  2287  	// match: (Lsh8x32 <t> x y)
  2288  	// cond: !shiftIsBounded(v)
  2289  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2290  	for {
  2291  		t := v.Type
  2292  		x := v_0
  2293  		y := v_1
  2294  		if !(!shiftIsBounded(v)) {
  2295  			break
  2296  		}
  2297  		v.reset(OpRISCV64AND)
  2298  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2299  		v0.AddArg2(x, y)
  2300  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2301  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2302  		v2.AuxInt = int64ToAuxInt(64)
  2303  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2304  		v3.AddArg(y)
  2305  		v2.AddArg(v3)
  2306  		v1.AddArg(v2)
  2307  		v.AddArg2(v0, v1)
  2308  		return true
  2309  	}
  2310  	// match: (Lsh8x32 x y)
  2311  	// cond: shiftIsBounded(v)
  2312  	// result: (SLL x y)
  2313  	for {
  2314  		x := v_0
  2315  		y := v_1
  2316  		if !(shiftIsBounded(v)) {
  2317  			break
  2318  		}
  2319  		v.reset(OpRISCV64SLL)
  2320  		v.AddArg2(x, y)
  2321  		return true
  2322  	}
  2323  	return false
  2324  }
  2325  func rewriteValueRISCV64_OpLsh8x64(v *Value) bool {
  2326  	v_1 := v.Args[1]
  2327  	v_0 := v.Args[0]
  2328  	b := v.Block
  2329  	// match: (Lsh8x64 <t> x y)
  2330  	// cond: !shiftIsBounded(v)
  2331  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] y)))
  2332  	for {
  2333  		t := v.Type
  2334  		x := v_0
  2335  		y := v_1
  2336  		if !(!shiftIsBounded(v)) {
  2337  			break
  2338  		}
  2339  		v.reset(OpRISCV64AND)
  2340  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2341  		v0.AddArg2(x, y)
  2342  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2343  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2344  		v2.AuxInt = int64ToAuxInt(64)
  2345  		v2.AddArg(y)
  2346  		v1.AddArg(v2)
  2347  		v.AddArg2(v0, v1)
  2348  		return true
  2349  	}
  2350  	// match: (Lsh8x64 x y)
  2351  	// cond: shiftIsBounded(v)
  2352  	// result: (SLL x y)
  2353  	for {
  2354  		x := v_0
  2355  		y := v_1
  2356  		if !(shiftIsBounded(v)) {
  2357  			break
  2358  		}
  2359  		v.reset(OpRISCV64SLL)
  2360  		v.AddArg2(x, y)
  2361  		return true
  2362  	}
  2363  	return false
  2364  }
  2365  func rewriteValueRISCV64_OpLsh8x8(v *Value) bool {
  2366  	v_1 := v.Args[1]
  2367  	v_0 := v.Args[0]
  2368  	b := v.Block
  2369  	typ := &b.Func.Config.Types
  2370  	// match: (Lsh8x8 <t> x y)
  2371  	// cond: !shiftIsBounded(v)
  2372  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2373  	for {
  2374  		t := v.Type
  2375  		x := v_0
  2376  		y := v_1
  2377  		if !(!shiftIsBounded(v)) {
  2378  			break
  2379  		}
  2380  		v.reset(OpRISCV64AND)
  2381  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2382  		v0.AddArg2(x, y)
  2383  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2384  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2385  		v2.AuxInt = int64ToAuxInt(64)
  2386  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2387  		v3.AddArg(y)
  2388  		v2.AddArg(v3)
  2389  		v1.AddArg(v2)
  2390  		v.AddArg2(v0, v1)
  2391  		return true
  2392  	}
  2393  	// match: (Lsh8x8 x y)
  2394  	// cond: shiftIsBounded(v)
  2395  	// result: (SLL x y)
  2396  	for {
  2397  		x := v_0
  2398  		y := v_1
  2399  		if !(shiftIsBounded(v)) {
  2400  			break
  2401  		}
  2402  		v.reset(OpRISCV64SLL)
  2403  		v.AddArg2(x, y)
  2404  		return true
  2405  	}
  2406  	return false
  2407  }
  2408  func rewriteValueRISCV64_OpMax64(v *Value) bool {
  2409  	v_1 := v.Args[1]
  2410  	v_0 := v.Args[0]
  2411  	// match: (Max64 x y)
  2412  	// cond: buildcfg.GORISCV64 >= 22
  2413  	// result: (MAX x y)
  2414  	for {
  2415  		x := v_0
  2416  		y := v_1
  2417  		if !(buildcfg.GORISCV64 >= 22) {
  2418  			break
  2419  		}
  2420  		v.reset(OpRISCV64MAX)
  2421  		v.AddArg2(x, y)
  2422  		return true
  2423  	}
  2424  	return false
  2425  }
  2426  func rewriteValueRISCV64_OpMax64u(v *Value) bool {
  2427  	v_1 := v.Args[1]
  2428  	v_0 := v.Args[0]
  2429  	// match: (Max64u x y)
  2430  	// cond: buildcfg.GORISCV64 >= 22
  2431  	// result: (MAXU x y)
  2432  	for {
  2433  		x := v_0
  2434  		y := v_1
  2435  		if !(buildcfg.GORISCV64 >= 22) {
  2436  			break
  2437  		}
  2438  		v.reset(OpRISCV64MAXU)
  2439  		v.AddArg2(x, y)
  2440  		return true
  2441  	}
  2442  	return false
  2443  }
  2444  func rewriteValueRISCV64_OpMin64(v *Value) bool {
  2445  	v_1 := v.Args[1]
  2446  	v_0 := v.Args[0]
  2447  	// match: (Min64 x y)
  2448  	// cond: buildcfg.GORISCV64 >= 22
  2449  	// result: (MIN x y)
  2450  	for {
  2451  		x := v_0
  2452  		y := v_1
  2453  		if !(buildcfg.GORISCV64 >= 22) {
  2454  			break
  2455  		}
  2456  		v.reset(OpRISCV64MIN)
  2457  		v.AddArg2(x, y)
  2458  		return true
  2459  	}
  2460  	return false
  2461  }
  2462  func rewriteValueRISCV64_OpMin64u(v *Value) bool {
  2463  	v_1 := v.Args[1]
  2464  	v_0 := v.Args[0]
  2465  	// match: (Min64u x y)
  2466  	// cond: buildcfg.GORISCV64 >= 22
  2467  	// result: (MINU x y)
  2468  	for {
  2469  		x := v_0
  2470  		y := v_1
  2471  		if !(buildcfg.GORISCV64 >= 22) {
  2472  			break
  2473  		}
  2474  		v.reset(OpRISCV64MINU)
  2475  		v.AddArg2(x, y)
  2476  		return true
  2477  	}
  2478  	return false
  2479  }
  2480  func rewriteValueRISCV64_OpMod16(v *Value) bool {
  2481  	v_1 := v.Args[1]
  2482  	v_0 := v.Args[0]
  2483  	b := v.Block
  2484  	typ := &b.Func.Config.Types
  2485  	// match: (Mod16 x y [false])
  2486  	// result: (REMW (SignExt16to32 x) (SignExt16to32 y))
  2487  	for {
  2488  		if auxIntToBool(v.AuxInt) != false {
  2489  			break
  2490  		}
  2491  		x := v_0
  2492  		y := v_1
  2493  		v.reset(OpRISCV64REMW)
  2494  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2495  		v0.AddArg(x)
  2496  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2497  		v1.AddArg(y)
  2498  		v.AddArg2(v0, v1)
  2499  		return true
  2500  	}
  2501  	return false
  2502  }
  2503  func rewriteValueRISCV64_OpMod16u(v *Value) bool {
  2504  	v_1 := v.Args[1]
  2505  	v_0 := v.Args[0]
  2506  	b := v.Block
  2507  	typ := &b.Func.Config.Types
  2508  	// match: (Mod16u x y)
  2509  	// result: (REMUW (ZeroExt16to32 x) (ZeroExt16to32 y))
  2510  	for {
  2511  		x := v_0
  2512  		y := v_1
  2513  		v.reset(OpRISCV64REMUW)
  2514  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2515  		v0.AddArg(x)
  2516  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2517  		v1.AddArg(y)
  2518  		v.AddArg2(v0, v1)
  2519  		return true
  2520  	}
  2521  }
  2522  func rewriteValueRISCV64_OpMod32(v *Value) bool {
  2523  	v_1 := v.Args[1]
  2524  	v_0 := v.Args[0]
  2525  	// match: (Mod32 x y [false])
  2526  	// result: (REMW x y)
  2527  	for {
  2528  		if auxIntToBool(v.AuxInt) != false {
  2529  			break
  2530  		}
  2531  		x := v_0
  2532  		y := v_1
  2533  		v.reset(OpRISCV64REMW)
  2534  		v.AddArg2(x, y)
  2535  		return true
  2536  	}
  2537  	return false
  2538  }
  2539  func rewriteValueRISCV64_OpMod64(v *Value) bool {
  2540  	v_1 := v.Args[1]
  2541  	v_0 := v.Args[0]
  2542  	// match: (Mod64 x y [false])
  2543  	// result: (REM x y)
  2544  	for {
  2545  		if auxIntToBool(v.AuxInt) != false {
  2546  			break
  2547  		}
  2548  		x := v_0
  2549  		y := v_1
  2550  		v.reset(OpRISCV64REM)
  2551  		v.AddArg2(x, y)
  2552  		return true
  2553  	}
  2554  	return false
  2555  }
  2556  func rewriteValueRISCV64_OpMod8(v *Value) bool {
  2557  	v_1 := v.Args[1]
  2558  	v_0 := v.Args[0]
  2559  	b := v.Block
  2560  	typ := &b.Func.Config.Types
  2561  	// match: (Mod8 x y)
  2562  	// result: (REMW (SignExt8to32 x) (SignExt8to32 y))
  2563  	for {
  2564  		x := v_0
  2565  		y := v_1
  2566  		v.reset(OpRISCV64REMW)
  2567  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2568  		v0.AddArg(x)
  2569  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2570  		v1.AddArg(y)
  2571  		v.AddArg2(v0, v1)
  2572  		return true
  2573  	}
  2574  }
  2575  func rewriteValueRISCV64_OpMod8u(v *Value) bool {
  2576  	v_1 := v.Args[1]
  2577  	v_0 := v.Args[0]
  2578  	b := v.Block
  2579  	typ := &b.Func.Config.Types
  2580  	// match: (Mod8u x y)
  2581  	// result: (REMUW (ZeroExt8to32 x) (ZeroExt8to32 y))
  2582  	for {
  2583  		x := v_0
  2584  		y := v_1
  2585  		v.reset(OpRISCV64REMUW)
  2586  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2587  		v0.AddArg(x)
  2588  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2589  		v1.AddArg(y)
  2590  		v.AddArg2(v0, v1)
  2591  		return true
  2592  	}
  2593  }
  2594  func rewriteValueRISCV64_OpMove(v *Value) bool {
  2595  	v_2 := v.Args[2]
  2596  	v_1 := v.Args[1]
  2597  	v_0 := v.Args[0]
  2598  	b := v.Block
  2599  	config := b.Func.Config
  2600  	typ := &b.Func.Config.Types
  2601  	// match: (Move [0] _ _ mem)
  2602  	// result: mem
  2603  	for {
  2604  		if auxIntToInt64(v.AuxInt) != 0 {
  2605  			break
  2606  		}
  2607  		mem := v_2
  2608  		v.copyOf(mem)
  2609  		return true
  2610  	}
  2611  	// match: (Move [1] dst src mem)
  2612  	// result: (MOVBstore dst (MOVBload src mem) mem)
  2613  	for {
  2614  		if auxIntToInt64(v.AuxInt) != 1 {
  2615  			break
  2616  		}
  2617  		dst := v_0
  2618  		src := v_1
  2619  		mem := v_2
  2620  		v.reset(OpRISCV64MOVBstore)
  2621  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2622  		v0.AddArg2(src, mem)
  2623  		v.AddArg3(dst, v0, mem)
  2624  		return true
  2625  	}
  2626  	// match: (Move [2] {t} dst src mem)
  2627  	// cond: t.Alignment()%2 == 0
  2628  	// result: (MOVHstore dst (MOVHload src mem) mem)
  2629  	for {
  2630  		if auxIntToInt64(v.AuxInt) != 2 {
  2631  			break
  2632  		}
  2633  		t := auxToType(v.Aux)
  2634  		dst := v_0
  2635  		src := v_1
  2636  		mem := v_2
  2637  		if !(t.Alignment()%2 == 0) {
  2638  			break
  2639  		}
  2640  		v.reset(OpRISCV64MOVHstore)
  2641  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2642  		v0.AddArg2(src, mem)
  2643  		v.AddArg3(dst, v0, mem)
  2644  		return true
  2645  	}
  2646  	// match: (Move [2] dst src mem)
  2647  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
  2648  	for {
  2649  		if auxIntToInt64(v.AuxInt) != 2 {
  2650  			break
  2651  		}
  2652  		dst := v_0
  2653  		src := v_1
  2654  		mem := v_2
  2655  		v.reset(OpRISCV64MOVBstore)
  2656  		v.AuxInt = int32ToAuxInt(1)
  2657  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2658  		v0.AuxInt = int32ToAuxInt(1)
  2659  		v0.AddArg2(src, mem)
  2660  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2661  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2662  		v2.AddArg2(src, mem)
  2663  		v1.AddArg3(dst, v2, mem)
  2664  		v.AddArg3(dst, v0, v1)
  2665  		return true
  2666  	}
  2667  	// match: (Move [4] {t} dst src mem)
  2668  	// cond: t.Alignment()%4 == 0
  2669  	// result: (MOVWstore dst (MOVWload src mem) mem)
  2670  	for {
  2671  		if auxIntToInt64(v.AuxInt) != 4 {
  2672  			break
  2673  		}
  2674  		t := auxToType(v.Aux)
  2675  		dst := v_0
  2676  		src := v_1
  2677  		mem := v_2
  2678  		if !(t.Alignment()%4 == 0) {
  2679  			break
  2680  		}
  2681  		v.reset(OpRISCV64MOVWstore)
  2682  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2683  		v0.AddArg2(src, mem)
  2684  		v.AddArg3(dst, v0, mem)
  2685  		return true
  2686  	}
  2687  	// match: (Move [4] {t} dst src mem)
  2688  	// cond: t.Alignment()%2 == 0
  2689  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
  2690  	for {
  2691  		if auxIntToInt64(v.AuxInt) != 4 {
  2692  			break
  2693  		}
  2694  		t := auxToType(v.Aux)
  2695  		dst := v_0
  2696  		src := v_1
  2697  		mem := v_2
  2698  		if !(t.Alignment()%2 == 0) {
  2699  			break
  2700  		}
  2701  		v.reset(OpRISCV64MOVHstore)
  2702  		v.AuxInt = int32ToAuxInt(2)
  2703  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2704  		v0.AuxInt = int32ToAuxInt(2)
  2705  		v0.AddArg2(src, mem)
  2706  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2707  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2708  		v2.AddArg2(src, mem)
  2709  		v1.AddArg3(dst, v2, mem)
  2710  		v.AddArg3(dst, v0, v1)
  2711  		return true
  2712  	}
  2713  	// match: (Move [4] dst src mem)
  2714  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
  2715  	for {
  2716  		if auxIntToInt64(v.AuxInt) != 4 {
  2717  			break
  2718  		}
  2719  		dst := v_0
  2720  		src := v_1
  2721  		mem := v_2
  2722  		v.reset(OpRISCV64MOVBstore)
  2723  		v.AuxInt = int32ToAuxInt(3)
  2724  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2725  		v0.AuxInt = int32ToAuxInt(3)
  2726  		v0.AddArg2(src, mem)
  2727  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2728  		v1.AuxInt = int32ToAuxInt(2)
  2729  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2730  		v2.AuxInt = int32ToAuxInt(2)
  2731  		v2.AddArg2(src, mem)
  2732  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2733  		v3.AuxInt = int32ToAuxInt(1)
  2734  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2735  		v4.AuxInt = int32ToAuxInt(1)
  2736  		v4.AddArg2(src, mem)
  2737  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2738  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2739  		v6.AddArg2(src, mem)
  2740  		v5.AddArg3(dst, v6, mem)
  2741  		v3.AddArg3(dst, v4, v5)
  2742  		v1.AddArg3(dst, v2, v3)
  2743  		v.AddArg3(dst, v0, v1)
  2744  		return true
  2745  	}
  2746  	// match: (Move [8] {t} dst src mem)
  2747  	// cond: t.Alignment()%8 == 0
  2748  	// result: (MOVDstore dst (MOVDload src mem) mem)
  2749  	for {
  2750  		if auxIntToInt64(v.AuxInt) != 8 {
  2751  			break
  2752  		}
  2753  		t := auxToType(v.Aux)
  2754  		dst := v_0
  2755  		src := v_1
  2756  		mem := v_2
  2757  		if !(t.Alignment()%8 == 0) {
  2758  			break
  2759  		}
  2760  		v.reset(OpRISCV64MOVDstore)
  2761  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2762  		v0.AddArg2(src, mem)
  2763  		v.AddArg3(dst, v0, mem)
  2764  		return true
  2765  	}
  2766  	// match: (Move [8] {t} dst src mem)
  2767  	// cond: t.Alignment()%4 == 0
  2768  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  2769  	for {
  2770  		if auxIntToInt64(v.AuxInt) != 8 {
  2771  			break
  2772  		}
  2773  		t := auxToType(v.Aux)
  2774  		dst := v_0
  2775  		src := v_1
  2776  		mem := v_2
  2777  		if !(t.Alignment()%4 == 0) {
  2778  			break
  2779  		}
  2780  		v.reset(OpRISCV64MOVWstore)
  2781  		v.AuxInt = int32ToAuxInt(4)
  2782  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2783  		v0.AuxInt = int32ToAuxInt(4)
  2784  		v0.AddArg2(src, mem)
  2785  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  2786  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2787  		v2.AddArg2(src, mem)
  2788  		v1.AddArg3(dst, v2, mem)
  2789  		v.AddArg3(dst, v0, v1)
  2790  		return true
  2791  	}
  2792  	// match: (Move [8] {t} dst src mem)
  2793  	// cond: t.Alignment()%2 == 0
  2794  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  2795  	for {
  2796  		if auxIntToInt64(v.AuxInt) != 8 {
  2797  			break
  2798  		}
  2799  		t := auxToType(v.Aux)
  2800  		dst := v_0
  2801  		src := v_1
  2802  		mem := v_2
  2803  		if !(t.Alignment()%2 == 0) {
  2804  			break
  2805  		}
  2806  		v.reset(OpRISCV64MOVHstore)
  2807  		v.AuxInt = int32ToAuxInt(6)
  2808  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2809  		v0.AuxInt = int32ToAuxInt(6)
  2810  		v0.AddArg2(src, mem)
  2811  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2812  		v1.AuxInt = int32ToAuxInt(4)
  2813  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2814  		v2.AuxInt = int32ToAuxInt(4)
  2815  		v2.AddArg2(src, mem)
  2816  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2817  		v3.AuxInt = int32ToAuxInt(2)
  2818  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2819  		v4.AuxInt = int32ToAuxInt(2)
  2820  		v4.AddArg2(src, mem)
  2821  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2822  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2823  		v6.AddArg2(src, mem)
  2824  		v5.AddArg3(dst, v6, mem)
  2825  		v3.AddArg3(dst, v4, v5)
  2826  		v1.AddArg3(dst, v2, v3)
  2827  		v.AddArg3(dst, v0, v1)
  2828  		return true
  2829  	}
  2830  	// match: (Move [3] dst src mem)
  2831  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
  2832  	for {
  2833  		if auxIntToInt64(v.AuxInt) != 3 {
  2834  			break
  2835  		}
  2836  		dst := v_0
  2837  		src := v_1
  2838  		mem := v_2
  2839  		v.reset(OpRISCV64MOVBstore)
  2840  		v.AuxInt = int32ToAuxInt(2)
  2841  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2842  		v0.AuxInt = int32ToAuxInt(2)
  2843  		v0.AddArg2(src, mem)
  2844  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2845  		v1.AuxInt = int32ToAuxInt(1)
  2846  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2847  		v2.AuxInt = int32ToAuxInt(1)
  2848  		v2.AddArg2(src, mem)
  2849  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2850  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2851  		v4.AddArg2(src, mem)
  2852  		v3.AddArg3(dst, v4, mem)
  2853  		v1.AddArg3(dst, v2, v3)
  2854  		v.AddArg3(dst, v0, v1)
  2855  		return true
  2856  	}
  2857  	// match: (Move [6] {t} dst src mem)
  2858  	// cond: t.Alignment()%2 == 0
  2859  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  2860  	for {
  2861  		if auxIntToInt64(v.AuxInt) != 6 {
  2862  			break
  2863  		}
  2864  		t := auxToType(v.Aux)
  2865  		dst := v_0
  2866  		src := v_1
  2867  		mem := v_2
  2868  		if !(t.Alignment()%2 == 0) {
  2869  			break
  2870  		}
  2871  		v.reset(OpRISCV64MOVHstore)
  2872  		v.AuxInt = int32ToAuxInt(4)
  2873  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2874  		v0.AuxInt = int32ToAuxInt(4)
  2875  		v0.AddArg2(src, mem)
  2876  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2877  		v1.AuxInt = int32ToAuxInt(2)
  2878  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2879  		v2.AuxInt = int32ToAuxInt(2)
  2880  		v2.AddArg2(src, mem)
  2881  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2882  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2883  		v4.AddArg2(src, mem)
  2884  		v3.AddArg3(dst, v4, mem)
  2885  		v1.AddArg3(dst, v2, v3)
  2886  		v.AddArg3(dst, v0, v1)
  2887  		return true
  2888  	}
  2889  	// match: (Move [12] {t} dst src mem)
  2890  	// cond: t.Alignment()%4 == 0
  2891  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
  2892  	for {
  2893  		if auxIntToInt64(v.AuxInt) != 12 {
  2894  			break
  2895  		}
  2896  		t := auxToType(v.Aux)
  2897  		dst := v_0
  2898  		src := v_1
  2899  		mem := v_2
  2900  		if !(t.Alignment()%4 == 0) {
  2901  			break
  2902  		}
  2903  		v.reset(OpRISCV64MOVWstore)
  2904  		v.AuxInt = int32ToAuxInt(8)
  2905  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2906  		v0.AuxInt = int32ToAuxInt(8)
  2907  		v0.AddArg2(src, mem)
  2908  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  2909  		v1.AuxInt = int32ToAuxInt(4)
  2910  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2911  		v2.AuxInt = int32ToAuxInt(4)
  2912  		v2.AddArg2(src, mem)
  2913  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  2914  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2915  		v4.AddArg2(src, mem)
  2916  		v3.AddArg3(dst, v4, mem)
  2917  		v1.AddArg3(dst, v2, v3)
  2918  		v.AddArg3(dst, v0, v1)
  2919  		return true
  2920  	}
  2921  	// match: (Move [16] {t} dst src mem)
  2922  	// cond: t.Alignment()%8 == 0
  2923  	// result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
  2924  	for {
  2925  		if auxIntToInt64(v.AuxInt) != 16 {
  2926  			break
  2927  		}
  2928  		t := auxToType(v.Aux)
  2929  		dst := v_0
  2930  		src := v_1
  2931  		mem := v_2
  2932  		if !(t.Alignment()%8 == 0) {
  2933  			break
  2934  		}
  2935  		v.reset(OpRISCV64MOVDstore)
  2936  		v.AuxInt = int32ToAuxInt(8)
  2937  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2938  		v0.AuxInt = int32ToAuxInt(8)
  2939  		v0.AddArg2(src, mem)
  2940  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  2941  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2942  		v2.AddArg2(src, mem)
  2943  		v1.AddArg3(dst, v2, mem)
  2944  		v.AddArg3(dst, v0, v1)
  2945  		return true
  2946  	}
  2947  	// match: (Move [24] {t} dst src mem)
  2948  	// cond: t.Alignment()%8 == 0
  2949  	// result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)))
  2950  	for {
  2951  		if auxIntToInt64(v.AuxInt) != 24 {
  2952  			break
  2953  		}
  2954  		t := auxToType(v.Aux)
  2955  		dst := v_0
  2956  		src := v_1
  2957  		mem := v_2
  2958  		if !(t.Alignment()%8 == 0) {
  2959  			break
  2960  		}
  2961  		v.reset(OpRISCV64MOVDstore)
  2962  		v.AuxInt = int32ToAuxInt(16)
  2963  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2964  		v0.AuxInt = int32ToAuxInt(16)
  2965  		v0.AddArg2(src, mem)
  2966  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  2967  		v1.AuxInt = int32ToAuxInt(8)
  2968  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2969  		v2.AuxInt = int32ToAuxInt(8)
  2970  		v2.AddArg2(src, mem)
  2971  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  2972  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2973  		v4.AddArg2(src, mem)
  2974  		v3.AddArg3(dst, v4, mem)
  2975  		v1.AddArg3(dst, v2, v3)
  2976  		v.AddArg3(dst, v0, v1)
  2977  		return true
  2978  	}
  2979  	// match: (Move [32] {t} dst src mem)
  2980  	// cond: t.Alignment()%8 == 0
  2981  	// result: (MOVDstore [24] dst (MOVDload [24] src mem) (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))))
  2982  	for {
  2983  		if auxIntToInt64(v.AuxInt) != 32 {
  2984  			break
  2985  		}
  2986  		t := auxToType(v.Aux)
  2987  		dst := v_0
  2988  		src := v_1
  2989  		mem := v_2
  2990  		if !(t.Alignment()%8 == 0) {
  2991  			break
  2992  		}
  2993  		v.reset(OpRISCV64MOVDstore)
  2994  		v.AuxInt = int32ToAuxInt(24)
  2995  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2996  		v0.AuxInt = int32ToAuxInt(24)
  2997  		v0.AddArg2(src, mem)
  2998  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  2999  		v1.AuxInt = int32ToAuxInt(16)
  3000  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3001  		v2.AuxInt = int32ToAuxInt(16)
  3002  		v2.AddArg2(src, mem)
  3003  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3004  		v3.AuxInt = int32ToAuxInt(8)
  3005  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3006  		v4.AuxInt = int32ToAuxInt(8)
  3007  		v4.AddArg2(src, mem)
  3008  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  3009  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  3010  		v6.AddArg2(src, mem)
  3011  		v5.AddArg3(dst, v6, mem)
  3012  		v3.AddArg3(dst, v4, v5)
  3013  		v1.AddArg3(dst, v2, v3)
  3014  		v.AddArg3(dst, v0, v1)
  3015  		return true
  3016  	}
  3017  	// match: (Move [s] {t} dst src mem)
  3018  	// cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)
  3019  	// result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
  3020  	for {
  3021  		s := auxIntToInt64(v.AuxInt)
  3022  		t := auxToType(v.Aux)
  3023  		dst := v_0
  3024  		src := v_1
  3025  		mem := v_2
  3026  		if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
  3027  			break
  3028  		}
  3029  		v.reset(OpRISCV64DUFFCOPY)
  3030  		v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
  3031  		v.AddArg3(dst, src, mem)
  3032  		return true
  3033  	}
  3034  	// match: (Move [s] {t} dst src mem)
  3035  	// cond: (s <= 16 || logLargeCopy(v, s))
  3036  	// result: (LoweredMove [t.Alignment()] dst src (ADDI <src.Type> [s-moveSize(t.Alignment(), config)] src) mem)
  3037  	for {
  3038  		s := auxIntToInt64(v.AuxInt)
  3039  		t := auxToType(v.Aux)
  3040  		dst := v_0
  3041  		src := v_1
  3042  		mem := v_2
  3043  		if !(s <= 16 || logLargeCopy(v, s)) {
  3044  			break
  3045  		}
  3046  		v.reset(OpRISCV64LoweredMove)
  3047  		v.AuxInt = int64ToAuxInt(t.Alignment())
  3048  		v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, src.Type)
  3049  		v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  3050  		v0.AddArg(src)
  3051  		v.AddArg4(dst, src, v0, mem)
  3052  		return true
  3053  	}
  3054  	return false
  3055  }
  3056  func rewriteValueRISCV64_OpMul16(v *Value) bool {
  3057  	v_1 := v.Args[1]
  3058  	v_0 := v.Args[0]
  3059  	b := v.Block
  3060  	typ := &b.Func.Config.Types
  3061  	// match: (Mul16 x y)
  3062  	// result: (MULW (SignExt16to32 x) (SignExt16to32 y))
  3063  	for {
  3064  		x := v_0
  3065  		y := v_1
  3066  		v.reset(OpRISCV64MULW)
  3067  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  3068  		v0.AddArg(x)
  3069  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  3070  		v1.AddArg(y)
  3071  		v.AddArg2(v0, v1)
  3072  		return true
  3073  	}
  3074  }
  3075  func rewriteValueRISCV64_OpMul8(v *Value) bool {
  3076  	v_1 := v.Args[1]
  3077  	v_0 := v.Args[0]
  3078  	b := v.Block
  3079  	typ := &b.Func.Config.Types
  3080  	// match: (Mul8 x y)
  3081  	// result: (MULW (SignExt8to32 x) (SignExt8to32 y))
  3082  	for {
  3083  		x := v_0
  3084  		y := v_1
  3085  		v.reset(OpRISCV64MULW)
  3086  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  3087  		v0.AddArg(x)
  3088  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  3089  		v1.AddArg(y)
  3090  		v.AddArg2(v0, v1)
  3091  		return true
  3092  	}
  3093  }
  3094  func rewriteValueRISCV64_OpNeq16(v *Value) bool {
  3095  	v_1 := v.Args[1]
  3096  	v_0 := v.Args[0]
  3097  	b := v.Block
  3098  	typ := &b.Func.Config.Types
  3099  	// match: (Neq16 x y)
  3100  	// result: (Not (Eq16 x y))
  3101  	for {
  3102  		x := v_0
  3103  		y := v_1
  3104  		v.reset(OpNot)
  3105  		v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
  3106  		v0.AddArg2(x, y)
  3107  		v.AddArg(v0)
  3108  		return true
  3109  	}
  3110  }
  3111  func rewriteValueRISCV64_OpNeq32(v *Value) bool {
  3112  	v_1 := v.Args[1]
  3113  	v_0 := v.Args[0]
  3114  	b := v.Block
  3115  	typ := &b.Func.Config.Types
  3116  	// match: (Neq32 x y)
  3117  	// result: (Not (Eq32 x y))
  3118  	for {
  3119  		x := v_0
  3120  		y := v_1
  3121  		v.reset(OpNot)
  3122  		v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
  3123  		v0.AddArg2(x, y)
  3124  		v.AddArg(v0)
  3125  		return true
  3126  	}
  3127  }
  3128  func rewriteValueRISCV64_OpNeq64(v *Value) bool {
  3129  	v_1 := v.Args[1]
  3130  	v_0 := v.Args[0]
  3131  	b := v.Block
  3132  	typ := &b.Func.Config.Types
  3133  	// match: (Neq64 x y)
  3134  	// result: (Not (Eq64 x y))
  3135  	for {
  3136  		x := v_0
  3137  		y := v_1
  3138  		v.reset(OpNot)
  3139  		v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
  3140  		v0.AddArg2(x, y)
  3141  		v.AddArg(v0)
  3142  		return true
  3143  	}
  3144  }
  3145  func rewriteValueRISCV64_OpNeq8(v *Value) bool {
  3146  	v_1 := v.Args[1]
  3147  	v_0 := v.Args[0]
  3148  	b := v.Block
  3149  	typ := &b.Func.Config.Types
  3150  	// match: (Neq8 x y)
  3151  	// result: (Not (Eq8 x y))
  3152  	for {
  3153  		x := v_0
  3154  		y := v_1
  3155  		v.reset(OpNot)
  3156  		v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
  3157  		v0.AddArg2(x, y)
  3158  		v.AddArg(v0)
  3159  		return true
  3160  	}
  3161  }
  3162  func rewriteValueRISCV64_OpNeqB(v *Value) bool {
  3163  	v_1 := v.Args[1]
  3164  	v_0 := v.Args[0]
  3165  	b := v.Block
  3166  	typ := &b.Func.Config.Types
  3167  	// match: (NeqB x y)
  3168  	// result: (SNEZ (SUB <typ.Bool> x y))
  3169  	for {
  3170  		x := v_0
  3171  		y := v_1
  3172  		v.reset(OpRISCV64SNEZ)
  3173  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
  3174  		v0.AddArg2(x, y)
  3175  		v.AddArg(v0)
  3176  		return true
  3177  	}
  3178  }
  3179  func rewriteValueRISCV64_OpNeqPtr(v *Value) bool {
  3180  	v_1 := v.Args[1]
  3181  	v_0 := v.Args[0]
  3182  	b := v.Block
  3183  	typ := &b.Func.Config.Types
  3184  	// match: (NeqPtr x y)
  3185  	// result: (Not (EqPtr x y))
  3186  	for {
  3187  		x := v_0
  3188  		y := v_1
  3189  		v.reset(OpNot)
  3190  		v0 := b.NewValue0(v.Pos, OpEqPtr, typ.Bool)
  3191  		v0.AddArg2(x, y)
  3192  		v.AddArg(v0)
  3193  		return true
  3194  	}
  3195  }
  3196  func rewriteValueRISCV64_OpOffPtr(v *Value) bool {
  3197  	v_0 := v.Args[0]
  3198  	b := v.Block
  3199  	typ := &b.Func.Config.Types
  3200  	// match: (OffPtr [off] ptr:(SP))
  3201  	// cond: is32Bit(off)
  3202  	// result: (MOVaddr [int32(off)] ptr)
  3203  	for {
  3204  		off := auxIntToInt64(v.AuxInt)
  3205  		ptr := v_0
  3206  		if ptr.Op != OpSP || !(is32Bit(off)) {
  3207  			break
  3208  		}
  3209  		v.reset(OpRISCV64MOVaddr)
  3210  		v.AuxInt = int32ToAuxInt(int32(off))
  3211  		v.AddArg(ptr)
  3212  		return true
  3213  	}
  3214  	// match: (OffPtr [off] ptr)
  3215  	// cond: is32Bit(off)
  3216  	// result: (ADDI [off] ptr)
  3217  	for {
  3218  		off := auxIntToInt64(v.AuxInt)
  3219  		ptr := v_0
  3220  		if !(is32Bit(off)) {
  3221  			break
  3222  		}
  3223  		v.reset(OpRISCV64ADDI)
  3224  		v.AuxInt = int64ToAuxInt(off)
  3225  		v.AddArg(ptr)
  3226  		return true
  3227  	}
  3228  	// match: (OffPtr [off] ptr)
  3229  	// result: (ADD (MOVDconst [off]) ptr)
  3230  	for {
  3231  		off := auxIntToInt64(v.AuxInt)
  3232  		ptr := v_0
  3233  		v.reset(OpRISCV64ADD)
  3234  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  3235  		v0.AuxInt = int64ToAuxInt(off)
  3236  		v.AddArg2(v0, ptr)
  3237  		return true
  3238  	}
  3239  }
  3240  func rewriteValueRISCV64_OpPanicBounds(v *Value) bool {
  3241  	v_2 := v.Args[2]
  3242  	v_1 := v.Args[1]
  3243  	v_0 := v.Args[0]
  3244  	// match: (PanicBounds [kind] x y mem)
  3245  	// cond: boundsABI(kind) == 0
  3246  	// result: (LoweredPanicBoundsA [kind] x y mem)
  3247  	for {
  3248  		kind := auxIntToInt64(v.AuxInt)
  3249  		x := v_0
  3250  		y := v_1
  3251  		mem := v_2
  3252  		if !(boundsABI(kind) == 0) {
  3253  			break
  3254  		}
  3255  		v.reset(OpRISCV64LoweredPanicBoundsA)
  3256  		v.AuxInt = int64ToAuxInt(kind)
  3257  		v.AddArg3(x, y, mem)
  3258  		return true
  3259  	}
  3260  	// match: (PanicBounds [kind] x y mem)
  3261  	// cond: boundsABI(kind) == 1
  3262  	// result: (LoweredPanicBoundsB [kind] x y mem)
  3263  	for {
  3264  		kind := auxIntToInt64(v.AuxInt)
  3265  		x := v_0
  3266  		y := v_1
  3267  		mem := v_2
  3268  		if !(boundsABI(kind) == 1) {
  3269  			break
  3270  		}
  3271  		v.reset(OpRISCV64LoweredPanicBoundsB)
  3272  		v.AuxInt = int64ToAuxInt(kind)
  3273  		v.AddArg3(x, y, mem)
  3274  		return true
  3275  	}
  3276  	// match: (PanicBounds [kind] x y mem)
  3277  	// cond: boundsABI(kind) == 2
  3278  	// result: (LoweredPanicBoundsC [kind] x y mem)
  3279  	for {
  3280  		kind := auxIntToInt64(v.AuxInt)
  3281  		x := v_0
  3282  		y := v_1
  3283  		mem := v_2
  3284  		if !(boundsABI(kind) == 2) {
  3285  			break
  3286  		}
  3287  		v.reset(OpRISCV64LoweredPanicBoundsC)
  3288  		v.AuxInt = int64ToAuxInt(kind)
  3289  		v.AddArg3(x, y, mem)
  3290  		return true
  3291  	}
  3292  	return false
  3293  }
  3294  func rewriteValueRISCV64_OpRISCV64ADD(v *Value) bool {
  3295  	v_1 := v.Args[1]
  3296  	v_0 := v.Args[0]
  3297  	// match: (ADD (MOVDconst <t> [val]) x)
  3298  	// cond: is32Bit(val) && !t.IsPtr()
  3299  	// result: (ADDI [val] x)
  3300  	for {
  3301  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3302  			if v_0.Op != OpRISCV64MOVDconst {
  3303  				continue
  3304  			}
  3305  			t := v_0.Type
  3306  			val := auxIntToInt64(v_0.AuxInt)
  3307  			x := v_1
  3308  			if !(is32Bit(val) && !t.IsPtr()) {
  3309  				continue
  3310  			}
  3311  			v.reset(OpRISCV64ADDI)
  3312  			v.AuxInt = int64ToAuxInt(val)
  3313  			v.AddArg(x)
  3314  			return true
  3315  		}
  3316  		break
  3317  	}
  3318  	// match: (ADD (SLLI [1] x) y)
  3319  	// cond: buildcfg.GORISCV64 >= 22
  3320  	// result: (SH1ADD x y)
  3321  	for {
  3322  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3323  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 1 {
  3324  				continue
  3325  			}
  3326  			x := v_0.Args[0]
  3327  			y := v_1
  3328  			if !(buildcfg.GORISCV64 >= 22) {
  3329  				continue
  3330  			}
  3331  			v.reset(OpRISCV64SH1ADD)
  3332  			v.AddArg2(x, y)
  3333  			return true
  3334  		}
  3335  		break
  3336  	}
  3337  	// match: (ADD (SLLI [2] x) y)
  3338  	// cond: buildcfg.GORISCV64 >= 22
  3339  	// result: (SH2ADD x y)
  3340  	for {
  3341  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3342  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 2 {
  3343  				continue
  3344  			}
  3345  			x := v_0.Args[0]
  3346  			y := v_1
  3347  			if !(buildcfg.GORISCV64 >= 22) {
  3348  				continue
  3349  			}
  3350  			v.reset(OpRISCV64SH2ADD)
  3351  			v.AddArg2(x, y)
  3352  			return true
  3353  		}
  3354  		break
  3355  	}
  3356  	// match: (ADD (SLLI [3] x) y)
  3357  	// cond: buildcfg.GORISCV64 >= 22
  3358  	// result: (SH3ADD x y)
  3359  	for {
  3360  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3361  			if v_0.Op != OpRISCV64SLLI || auxIntToInt64(v_0.AuxInt) != 3 {
  3362  				continue
  3363  			}
  3364  			x := v_0.Args[0]
  3365  			y := v_1
  3366  			if !(buildcfg.GORISCV64 >= 22) {
  3367  				continue
  3368  			}
  3369  			v.reset(OpRISCV64SH3ADD)
  3370  			v.AddArg2(x, y)
  3371  			return true
  3372  		}
  3373  		break
  3374  	}
  3375  	return false
  3376  }
  3377  func rewriteValueRISCV64_OpRISCV64ADDI(v *Value) bool {
  3378  	v_0 := v.Args[0]
  3379  	// match: (ADDI [c] (MOVaddr [d] {s} x))
  3380  	// cond: is32Bit(c+int64(d))
  3381  	// result: (MOVaddr [int32(c)+d] {s} x)
  3382  	for {
  3383  		c := auxIntToInt64(v.AuxInt)
  3384  		if v_0.Op != OpRISCV64MOVaddr {
  3385  			break
  3386  		}
  3387  		d := auxIntToInt32(v_0.AuxInt)
  3388  		s := auxToSym(v_0.Aux)
  3389  		x := v_0.Args[0]
  3390  		if !(is32Bit(c + int64(d))) {
  3391  			break
  3392  		}
  3393  		v.reset(OpRISCV64MOVaddr)
  3394  		v.AuxInt = int32ToAuxInt(int32(c) + d)
  3395  		v.Aux = symToAux(s)
  3396  		v.AddArg(x)
  3397  		return true
  3398  	}
  3399  	// match: (ADDI [0] x)
  3400  	// result: x
  3401  	for {
  3402  		if auxIntToInt64(v.AuxInt) != 0 {
  3403  			break
  3404  		}
  3405  		x := v_0
  3406  		v.copyOf(x)
  3407  		return true
  3408  	}
  3409  	// match: (ADDI [x] (MOVDconst [y]))
  3410  	// cond: is32Bit(x + y)
  3411  	// result: (MOVDconst [x + y])
  3412  	for {
  3413  		x := auxIntToInt64(v.AuxInt)
  3414  		if v_0.Op != OpRISCV64MOVDconst {
  3415  			break
  3416  		}
  3417  		y := auxIntToInt64(v_0.AuxInt)
  3418  		if !(is32Bit(x + y)) {
  3419  			break
  3420  		}
  3421  		v.reset(OpRISCV64MOVDconst)
  3422  		v.AuxInt = int64ToAuxInt(x + y)
  3423  		return true
  3424  	}
  3425  	// match: (ADDI [x] (ADDI [y] z))
  3426  	// cond: is32Bit(x + y)
  3427  	// result: (ADDI [x + y] z)
  3428  	for {
  3429  		x := auxIntToInt64(v.AuxInt)
  3430  		if v_0.Op != OpRISCV64ADDI {
  3431  			break
  3432  		}
  3433  		y := auxIntToInt64(v_0.AuxInt)
  3434  		z := v_0.Args[0]
  3435  		if !(is32Bit(x + y)) {
  3436  			break
  3437  		}
  3438  		v.reset(OpRISCV64ADDI)
  3439  		v.AuxInt = int64ToAuxInt(x + y)
  3440  		v.AddArg(z)
  3441  		return true
  3442  	}
  3443  	return false
  3444  }
  3445  func rewriteValueRISCV64_OpRISCV64AND(v *Value) bool {
  3446  	v_1 := v.Args[1]
  3447  	v_0 := v.Args[0]
  3448  	// match: (AND (MOVDconst [val]) x)
  3449  	// cond: is32Bit(val)
  3450  	// result: (ANDI [val] x)
  3451  	for {
  3452  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3453  			if v_0.Op != OpRISCV64MOVDconst {
  3454  				continue
  3455  			}
  3456  			val := auxIntToInt64(v_0.AuxInt)
  3457  			x := v_1
  3458  			if !(is32Bit(val)) {
  3459  				continue
  3460  			}
  3461  			v.reset(OpRISCV64ANDI)
  3462  			v.AuxInt = int64ToAuxInt(val)
  3463  			v.AddArg(x)
  3464  			return true
  3465  		}
  3466  		break
  3467  	}
  3468  	return false
  3469  }
  3470  func rewriteValueRISCV64_OpRISCV64ANDI(v *Value) bool {
  3471  	v_0 := v.Args[0]
  3472  	// match: (ANDI [0] x)
  3473  	// result: (MOVDconst [0])
  3474  	for {
  3475  		if auxIntToInt64(v.AuxInt) != 0 {
  3476  			break
  3477  		}
  3478  		v.reset(OpRISCV64MOVDconst)
  3479  		v.AuxInt = int64ToAuxInt(0)
  3480  		return true
  3481  	}
  3482  	// match: (ANDI [-1] x)
  3483  	// result: x
  3484  	for {
  3485  		if auxIntToInt64(v.AuxInt) != -1 {
  3486  			break
  3487  		}
  3488  		x := v_0
  3489  		v.copyOf(x)
  3490  		return true
  3491  	}
  3492  	// match: (ANDI [x] (MOVDconst [y]))
  3493  	// result: (MOVDconst [x & y])
  3494  	for {
  3495  		x := auxIntToInt64(v.AuxInt)
  3496  		if v_0.Op != OpRISCV64MOVDconst {
  3497  			break
  3498  		}
  3499  		y := auxIntToInt64(v_0.AuxInt)
  3500  		v.reset(OpRISCV64MOVDconst)
  3501  		v.AuxInt = int64ToAuxInt(x & y)
  3502  		return true
  3503  	}
  3504  	// match: (ANDI [x] (ANDI [y] z))
  3505  	// result: (ANDI [x & y] z)
  3506  	for {
  3507  		x := auxIntToInt64(v.AuxInt)
  3508  		if v_0.Op != OpRISCV64ANDI {
  3509  			break
  3510  		}
  3511  		y := auxIntToInt64(v_0.AuxInt)
  3512  		z := v_0.Args[0]
  3513  		v.reset(OpRISCV64ANDI)
  3514  		v.AuxInt = int64ToAuxInt(x & y)
  3515  		v.AddArg(z)
  3516  		return true
  3517  	}
  3518  	return false
  3519  }
  3520  func rewriteValueRISCV64_OpRISCV64FADDD(v *Value) bool {
  3521  	v_1 := v.Args[1]
  3522  	v_0 := v.Args[0]
  3523  	// match: (FADDD a (FMULD x y))
  3524  	// cond: a.Block.Func.useFMA(v)
  3525  	// result: (FMADDD x y a)
  3526  	for {
  3527  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3528  			a := v_0
  3529  			if v_1.Op != OpRISCV64FMULD {
  3530  				continue
  3531  			}
  3532  			y := v_1.Args[1]
  3533  			x := v_1.Args[0]
  3534  			if !(a.Block.Func.useFMA(v)) {
  3535  				continue
  3536  			}
  3537  			v.reset(OpRISCV64FMADDD)
  3538  			v.AddArg3(x, y, a)
  3539  			return true
  3540  		}
  3541  		break
  3542  	}
  3543  	return false
  3544  }
  3545  func rewriteValueRISCV64_OpRISCV64FADDS(v *Value) bool {
  3546  	v_1 := v.Args[1]
  3547  	v_0 := v.Args[0]
  3548  	// match: (FADDS a (FMULS x y))
  3549  	// cond: a.Block.Func.useFMA(v)
  3550  	// result: (FMADDS x y a)
  3551  	for {
  3552  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3553  			a := v_0
  3554  			if v_1.Op != OpRISCV64FMULS {
  3555  				continue
  3556  			}
  3557  			y := v_1.Args[1]
  3558  			x := v_1.Args[0]
  3559  			if !(a.Block.Func.useFMA(v)) {
  3560  				continue
  3561  			}
  3562  			v.reset(OpRISCV64FMADDS)
  3563  			v.AddArg3(x, y, a)
  3564  			return true
  3565  		}
  3566  		break
  3567  	}
  3568  	return false
  3569  }
  3570  func rewriteValueRISCV64_OpRISCV64FMADDD(v *Value) bool {
  3571  	v_2 := v.Args[2]
  3572  	v_1 := v.Args[1]
  3573  	v_0 := v.Args[0]
  3574  	// match: (FMADDD neg:(FNEGD x) y z)
  3575  	// cond: neg.Uses == 1
  3576  	// result: (FNMSUBD x y z)
  3577  	for {
  3578  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3579  			neg := v_0
  3580  			if neg.Op != OpRISCV64FNEGD {
  3581  				continue
  3582  			}
  3583  			x := neg.Args[0]
  3584  			y := v_1
  3585  			z := v_2
  3586  			if !(neg.Uses == 1) {
  3587  				continue
  3588  			}
  3589  			v.reset(OpRISCV64FNMSUBD)
  3590  			v.AddArg3(x, y, z)
  3591  			return true
  3592  		}
  3593  		break
  3594  	}
  3595  	// match: (FMADDD x y neg:(FNEGD z))
  3596  	// cond: neg.Uses == 1
  3597  	// result: (FMSUBD x y z)
  3598  	for {
  3599  		x := v_0
  3600  		y := v_1
  3601  		neg := v_2
  3602  		if neg.Op != OpRISCV64FNEGD {
  3603  			break
  3604  		}
  3605  		z := neg.Args[0]
  3606  		if !(neg.Uses == 1) {
  3607  			break
  3608  		}
  3609  		v.reset(OpRISCV64FMSUBD)
  3610  		v.AddArg3(x, y, z)
  3611  		return true
  3612  	}
  3613  	return false
  3614  }
  3615  func rewriteValueRISCV64_OpRISCV64FMADDS(v *Value) bool {
  3616  	v_2 := v.Args[2]
  3617  	v_1 := v.Args[1]
  3618  	v_0 := v.Args[0]
  3619  	// match: (FMADDS neg:(FNEGS x) y z)
  3620  	// cond: neg.Uses == 1
  3621  	// result: (FNMSUBS x y z)
  3622  	for {
  3623  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3624  			neg := v_0
  3625  			if neg.Op != OpRISCV64FNEGS {
  3626  				continue
  3627  			}
  3628  			x := neg.Args[0]
  3629  			y := v_1
  3630  			z := v_2
  3631  			if !(neg.Uses == 1) {
  3632  				continue
  3633  			}
  3634  			v.reset(OpRISCV64FNMSUBS)
  3635  			v.AddArg3(x, y, z)
  3636  			return true
  3637  		}
  3638  		break
  3639  	}
  3640  	// match: (FMADDS x y neg:(FNEGS z))
  3641  	// cond: neg.Uses == 1
  3642  	// result: (FMSUBS x y z)
  3643  	for {
  3644  		x := v_0
  3645  		y := v_1
  3646  		neg := v_2
  3647  		if neg.Op != OpRISCV64FNEGS {
  3648  			break
  3649  		}
  3650  		z := neg.Args[0]
  3651  		if !(neg.Uses == 1) {
  3652  			break
  3653  		}
  3654  		v.reset(OpRISCV64FMSUBS)
  3655  		v.AddArg3(x, y, z)
  3656  		return true
  3657  	}
  3658  	return false
  3659  }
  3660  func rewriteValueRISCV64_OpRISCV64FMSUBD(v *Value) bool {
  3661  	v_2 := v.Args[2]
  3662  	v_1 := v.Args[1]
  3663  	v_0 := v.Args[0]
  3664  	// match: (FMSUBD neg:(FNEGD x) y z)
  3665  	// cond: neg.Uses == 1
  3666  	// result: (FNMADDD x y z)
  3667  	for {
  3668  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3669  			neg := v_0
  3670  			if neg.Op != OpRISCV64FNEGD {
  3671  				continue
  3672  			}
  3673  			x := neg.Args[0]
  3674  			y := v_1
  3675  			z := v_2
  3676  			if !(neg.Uses == 1) {
  3677  				continue
  3678  			}
  3679  			v.reset(OpRISCV64FNMADDD)
  3680  			v.AddArg3(x, y, z)
  3681  			return true
  3682  		}
  3683  		break
  3684  	}
  3685  	// match: (FMSUBD x y neg:(FNEGD z))
  3686  	// cond: neg.Uses == 1
  3687  	// result: (FMADDD x y z)
  3688  	for {
  3689  		x := v_0
  3690  		y := v_1
  3691  		neg := v_2
  3692  		if neg.Op != OpRISCV64FNEGD {
  3693  			break
  3694  		}
  3695  		z := neg.Args[0]
  3696  		if !(neg.Uses == 1) {
  3697  			break
  3698  		}
  3699  		v.reset(OpRISCV64FMADDD)
  3700  		v.AddArg3(x, y, z)
  3701  		return true
  3702  	}
  3703  	return false
  3704  }
  3705  func rewriteValueRISCV64_OpRISCV64FMSUBS(v *Value) bool {
  3706  	v_2 := v.Args[2]
  3707  	v_1 := v.Args[1]
  3708  	v_0 := v.Args[0]
  3709  	// match: (FMSUBS neg:(FNEGS x) y z)
  3710  	// cond: neg.Uses == 1
  3711  	// result: (FNMADDS x y z)
  3712  	for {
  3713  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3714  			neg := v_0
  3715  			if neg.Op != OpRISCV64FNEGS {
  3716  				continue
  3717  			}
  3718  			x := neg.Args[0]
  3719  			y := v_1
  3720  			z := v_2
  3721  			if !(neg.Uses == 1) {
  3722  				continue
  3723  			}
  3724  			v.reset(OpRISCV64FNMADDS)
  3725  			v.AddArg3(x, y, z)
  3726  			return true
  3727  		}
  3728  		break
  3729  	}
  3730  	// match: (FMSUBS x y neg:(FNEGS z))
  3731  	// cond: neg.Uses == 1
  3732  	// result: (FMADDS x y z)
  3733  	for {
  3734  		x := v_0
  3735  		y := v_1
  3736  		neg := v_2
  3737  		if neg.Op != OpRISCV64FNEGS {
  3738  			break
  3739  		}
  3740  		z := neg.Args[0]
  3741  		if !(neg.Uses == 1) {
  3742  			break
  3743  		}
  3744  		v.reset(OpRISCV64FMADDS)
  3745  		v.AddArg3(x, y, z)
  3746  		return true
  3747  	}
  3748  	return false
  3749  }
  3750  func rewriteValueRISCV64_OpRISCV64FNMADDD(v *Value) bool {
  3751  	v_2 := v.Args[2]
  3752  	v_1 := v.Args[1]
  3753  	v_0 := v.Args[0]
  3754  	// match: (FNMADDD neg:(FNEGD x) y z)
  3755  	// cond: neg.Uses == 1
  3756  	// result: (FMSUBD x y z)
  3757  	for {
  3758  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3759  			neg := v_0
  3760  			if neg.Op != OpRISCV64FNEGD {
  3761  				continue
  3762  			}
  3763  			x := neg.Args[0]
  3764  			y := v_1
  3765  			z := v_2
  3766  			if !(neg.Uses == 1) {
  3767  				continue
  3768  			}
  3769  			v.reset(OpRISCV64FMSUBD)
  3770  			v.AddArg3(x, y, z)
  3771  			return true
  3772  		}
  3773  		break
  3774  	}
  3775  	// match: (FNMADDD x y neg:(FNEGD z))
  3776  	// cond: neg.Uses == 1
  3777  	// result: (FNMSUBD x y z)
  3778  	for {
  3779  		x := v_0
  3780  		y := v_1
  3781  		neg := v_2
  3782  		if neg.Op != OpRISCV64FNEGD {
  3783  			break
  3784  		}
  3785  		z := neg.Args[0]
  3786  		if !(neg.Uses == 1) {
  3787  			break
  3788  		}
  3789  		v.reset(OpRISCV64FNMSUBD)
  3790  		v.AddArg3(x, y, z)
  3791  		return true
  3792  	}
  3793  	return false
  3794  }
  3795  func rewriteValueRISCV64_OpRISCV64FNMADDS(v *Value) bool {
  3796  	v_2 := v.Args[2]
  3797  	v_1 := v.Args[1]
  3798  	v_0 := v.Args[0]
  3799  	// match: (FNMADDS neg:(FNEGS x) y z)
  3800  	// cond: neg.Uses == 1
  3801  	// result: (FMSUBS x y z)
  3802  	for {
  3803  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3804  			neg := v_0
  3805  			if neg.Op != OpRISCV64FNEGS {
  3806  				continue
  3807  			}
  3808  			x := neg.Args[0]
  3809  			y := v_1
  3810  			z := v_2
  3811  			if !(neg.Uses == 1) {
  3812  				continue
  3813  			}
  3814  			v.reset(OpRISCV64FMSUBS)
  3815  			v.AddArg3(x, y, z)
  3816  			return true
  3817  		}
  3818  		break
  3819  	}
  3820  	// match: (FNMADDS x y neg:(FNEGS z))
  3821  	// cond: neg.Uses == 1
  3822  	// result: (FNMSUBS x y z)
  3823  	for {
  3824  		x := v_0
  3825  		y := v_1
  3826  		neg := v_2
  3827  		if neg.Op != OpRISCV64FNEGS {
  3828  			break
  3829  		}
  3830  		z := neg.Args[0]
  3831  		if !(neg.Uses == 1) {
  3832  			break
  3833  		}
  3834  		v.reset(OpRISCV64FNMSUBS)
  3835  		v.AddArg3(x, y, z)
  3836  		return true
  3837  	}
  3838  	return false
  3839  }
  3840  func rewriteValueRISCV64_OpRISCV64FNMSUBD(v *Value) bool {
  3841  	v_2 := v.Args[2]
  3842  	v_1 := v.Args[1]
  3843  	v_0 := v.Args[0]
  3844  	// match: (FNMSUBD neg:(FNEGD x) y z)
  3845  	// cond: neg.Uses == 1
  3846  	// result: (FMADDD x y z)
  3847  	for {
  3848  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3849  			neg := v_0
  3850  			if neg.Op != OpRISCV64FNEGD {
  3851  				continue
  3852  			}
  3853  			x := neg.Args[0]
  3854  			y := v_1
  3855  			z := v_2
  3856  			if !(neg.Uses == 1) {
  3857  				continue
  3858  			}
  3859  			v.reset(OpRISCV64FMADDD)
  3860  			v.AddArg3(x, y, z)
  3861  			return true
  3862  		}
  3863  		break
  3864  	}
  3865  	// match: (FNMSUBD x y neg:(FNEGD z))
  3866  	// cond: neg.Uses == 1
  3867  	// result: (FNMADDD x y z)
  3868  	for {
  3869  		x := v_0
  3870  		y := v_1
  3871  		neg := v_2
  3872  		if neg.Op != OpRISCV64FNEGD {
  3873  			break
  3874  		}
  3875  		z := neg.Args[0]
  3876  		if !(neg.Uses == 1) {
  3877  			break
  3878  		}
  3879  		v.reset(OpRISCV64FNMADDD)
  3880  		v.AddArg3(x, y, z)
  3881  		return true
  3882  	}
  3883  	return false
  3884  }
  3885  func rewriteValueRISCV64_OpRISCV64FNMSUBS(v *Value) bool {
  3886  	v_2 := v.Args[2]
  3887  	v_1 := v.Args[1]
  3888  	v_0 := v.Args[0]
  3889  	// match: (FNMSUBS neg:(FNEGS x) y z)
  3890  	// cond: neg.Uses == 1
  3891  	// result: (FMADDS x y z)
  3892  	for {
  3893  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3894  			neg := v_0
  3895  			if neg.Op != OpRISCV64FNEGS {
  3896  				continue
  3897  			}
  3898  			x := neg.Args[0]
  3899  			y := v_1
  3900  			z := v_2
  3901  			if !(neg.Uses == 1) {
  3902  				continue
  3903  			}
  3904  			v.reset(OpRISCV64FMADDS)
  3905  			v.AddArg3(x, y, z)
  3906  			return true
  3907  		}
  3908  		break
  3909  	}
  3910  	// match: (FNMSUBS x y neg:(FNEGS z))
  3911  	// cond: neg.Uses == 1
  3912  	// result: (FNMADDS x y z)
  3913  	for {
  3914  		x := v_0
  3915  		y := v_1
  3916  		neg := v_2
  3917  		if neg.Op != OpRISCV64FNEGS {
  3918  			break
  3919  		}
  3920  		z := neg.Args[0]
  3921  		if !(neg.Uses == 1) {
  3922  			break
  3923  		}
  3924  		v.reset(OpRISCV64FNMADDS)
  3925  		v.AddArg3(x, y, z)
  3926  		return true
  3927  	}
  3928  	return false
  3929  }
  3930  func rewriteValueRISCV64_OpRISCV64FSUBD(v *Value) bool {
  3931  	v_1 := v.Args[1]
  3932  	v_0 := v.Args[0]
  3933  	// match: (FSUBD a (FMULD x y))
  3934  	// cond: a.Block.Func.useFMA(v)
  3935  	// result: (FNMSUBD x y a)
  3936  	for {
  3937  		a := v_0
  3938  		if v_1.Op != OpRISCV64FMULD {
  3939  			break
  3940  		}
  3941  		y := v_1.Args[1]
  3942  		x := v_1.Args[0]
  3943  		if !(a.Block.Func.useFMA(v)) {
  3944  			break
  3945  		}
  3946  		v.reset(OpRISCV64FNMSUBD)
  3947  		v.AddArg3(x, y, a)
  3948  		return true
  3949  	}
  3950  	// match: (FSUBD (FMULD x y) a)
  3951  	// cond: a.Block.Func.useFMA(v)
  3952  	// result: (FMSUBD x y a)
  3953  	for {
  3954  		if v_0.Op != OpRISCV64FMULD {
  3955  			break
  3956  		}
  3957  		y := v_0.Args[1]
  3958  		x := v_0.Args[0]
  3959  		a := v_1
  3960  		if !(a.Block.Func.useFMA(v)) {
  3961  			break
  3962  		}
  3963  		v.reset(OpRISCV64FMSUBD)
  3964  		v.AddArg3(x, y, a)
  3965  		return true
  3966  	}
  3967  	return false
  3968  }
  3969  func rewriteValueRISCV64_OpRISCV64FSUBS(v *Value) bool {
  3970  	v_1 := v.Args[1]
  3971  	v_0 := v.Args[0]
  3972  	// match: (FSUBS a (FMULS x y))
  3973  	// cond: a.Block.Func.useFMA(v)
  3974  	// result: (FNMSUBS x y a)
  3975  	for {
  3976  		a := v_0
  3977  		if v_1.Op != OpRISCV64FMULS {
  3978  			break
  3979  		}
  3980  		y := v_1.Args[1]
  3981  		x := v_1.Args[0]
  3982  		if !(a.Block.Func.useFMA(v)) {
  3983  			break
  3984  		}
  3985  		v.reset(OpRISCV64FNMSUBS)
  3986  		v.AddArg3(x, y, a)
  3987  		return true
  3988  	}
  3989  	// match: (FSUBS (FMULS x y) a)
  3990  	// cond: a.Block.Func.useFMA(v)
  3991  	// result: (FMSUBS x y a)
  3992  	for {
  3993  		if v_0.Op != OpRISCV64FMULS {
  3994  			break
  3995  		}
  3996  		y := v_0.Args[1]
  3997  		x := v_0.Args[0]
  3998  		a := v_1
  3999  		if !(a.Block.Func.useFMA(v)) {
  4000  			break
  4001  		}
  4002  		v.reset(OpRISCV64FMSUBS)
  4003  		v.AddArg3(x, y, a)
  4004  		return true
  4005  	}
  4006  	return false
  4007  }
  4008  func rewriteValueRISCV64_OpRISCV64MOVBUload(v *Value) bool {
  4009  	v_1 := v.Args[1]
  4010  	v_0 := v.Args[0]
  4011  	// match: (MOVBUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4012  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4013  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4014  	for {
  4015  		off1 := auxIntToInt32(v.AuxInt)
  4016  		sym1 := auxToSym(v.Aux)
  4017  		if v_0.Op != OpRISCV64MOVaddr {
  4018  			break
  4019  		}
  4020  		off2 := auxIntToInt32(v_0.AuxInt)
  4021  		sym2 := auxToSym(v_0.Aux)
  4022  		base := v_0.Args[0]
  4023  		mem := v_1
  4024  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4025  			break
  4026  		}
  4027  		v.reset(OpRISCV64MOVBUload)
  4028  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4029  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4030  		v.AddArg2(base, mem)
  4031  		return true
  4032  	}
  4033  	// match: (MOVBUload [off1] {sym} (ADDI [off2] base) mem)
  4034  	// cond: is32Bit(int64(off1)+off2)
  4035  	// result: (MOVBUload [off1+int32(off2)] {sym} base mem)
  4036  	for {
  4037  		off1 := auxIntToInt32(v.AuxInt)
  4038  		sym := auxToSym(v.Aux)
  4039  		if v_0.Op != OpRISCV64ADDI {
  4040  			break
  4041  		}
  4042  		off2 := auxIntToInt64(v_0.AuxInt)
  4043  		base := v_0.Args[0]
  4044  		mem := v_1
  4045  		if !(is32Bit(int64(off1) + off2)) {
  4046  			break
  4047  		}
  4048  		v.reset(OpRISCV64MOVBUload)
  4049  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4050  		v.Aux = symToAux(sym)
  4051  		v.AddArg2(base, mem)
  4052  		return true
  4053  	}
  4054  	return false
  4055  }
  4056  func rewriteValueRISCV64_OpRISCV64MOVBUreg(v *Value) bool {
  4057  	v_0 := v.Args[0]
  4058  	b := v.Block
  4059  	// match: (MOVBUreg x:(FLES _ _))
  4060  	// result: x
  4061  	for {
  4062  		x := v_0
  4063  		if x.Op != OpRISCV64FLES {
  4064  			break
  4065  		}
  4066  		v.copyOf(x)
  4067  		return true
  4068  	}
  4069  	// match: (MOVBUreg x:(FLTS _ _))
  4070  	// result: x
  4071  	for {
  4072  		x := v_0
  4073  		if x.Op != OpRISCV64FLTS {
  4074  			break
  4075  		}
  4076  		v.copyOf(x)
  4077  		return true
  4078  	}
  4079  	// match: (MOVBUreg x:(FEQS _ _))
  4080  	// result: x
  4081  	for {
  4082  		x := v_0
  4083  		if x.Op != OpRISCV64FEQS {
  4084  			break
  4085  		}
  4086  		v.copyOf(x)
  4087  		return true
  4088  	}
  4089  	// match: (MOVBUreg x:(FNES _ _))
  4090  	// result: x
  4091  	for {
  4092  		x := v_0
  4093  		if x.Op != OpRISCV64FNES {
  4094  			break
  4095  		}
  4096  		v.copyOf(x)
  4097  		return true
  4098  	}
  4099  	// match: (MOVBUreg x:(FLED _ _))
  4100  	// result: x
  4101  	for {
  4102  		x := v_0
  4103  		if x.Op != OpRISCV64FLED {
  4104  			break
  4105  		}
  4106  		v.copyOf(x)
  4107  		return true
  4108  	}
  4109  	// match: (MOVBUreg x:(FLTD _ _))
  4110  	// result: x
  4111  	for {
  4112  		x := v_0
  4113  		if x.Op != OpRISCV64FLTD {
  4114  			break
  4115  		}
  4116  		v.copyOf(x)
  4117  		return true
  4118  	}
  4119  	// match: (MOVBUreg x:(FEQD _ _))
  4120  	// result: x
  4121  	for {
  4122  		x := v_0
  4123  		if x.Op != OpRISCV64FEQD {
  4124  			break
  4125  		}
  4126  		v.copyOf(x)
  4127  		return true
  4128  	}
  4129  	// match: (MOVBUreg x:(FNED _ _))
  4130  	// result: x
  4131  	for {
  4132  		x := v_0
  4133  		if x.Op != OpRISCV64FNED {
  4134  			break
  4135  		}
  4136  		v.copyOf(x)
  4137  		return true
  4138  	}
  4139  	// match: (MOVBUreg x:(SEQZ _))
  4140  	// result: x
  4141  	for {
  4142  		x := v_0
  4143  		if x.Op != OpRISCV64SEQZ {
  4144  			break
  4145  		}
  4146  		v.copyOf(x)
  4147  		return true
  4148  	}
  4149  	// match: (MOVBUreg x:(SNEZ _))
  4150  	// result: x
  4151  	for {
  4152  		x := v_0
  4153  		if x.Op != OpRISCV64SNEZ {
  4154  			break
  4155  		}
  4156  		v.copyOf(x)
  4157  		return true
  4158  	}
  4159  	// match: (MOVBUreg x:(SLT _ _))
  4160  	// result: x
  4161  	for {
  4162  		x := v_0
  4163  		if x.Op != OpRISCV64SLT {
  4164  			break
  4165  		}
  4166  		v.copyOf(x)
  4167  		return true
  4168  	}
  4169  	// match: (MOVBUreg x:(SLTU _ _))
  4170  	// result: x
  4171  	for {
  4172  		x := v_0
  4173  		if x.Op != OpRISCV64SLTU {
  4174  			break
  4175  		}
  4176  		v.copyOf(x)
  4177  		return true
  4178  	}
  4179  	// match: (MOVBUreg x:(ANDI [c] y))
  4180  	// cond: c >= 0 && int64(uint8(c)) == c
  4181  	// result: x
  4182  	for {
  4183  		x := v_0
  4184  		if x.Op != OpRISCV64ANDI {
  4185  			break
  4186  		}
  4187  		c := auxIntToInt64(x.AuxInt)
  4188  		if !(c >= 0 && int64(uint8(c)) == c) {
  4189  			break
  4190  		}
  4191  		v.copyOf(x)
  4192  		return true
  4193  	}
  4194  	// match: (MOVBUreg (ANDI [c] x))
  4195  	// cond: c < 0
  4196  	// result: (ANDI [int64(uint8(c))] x)
  4197  	for {
  4198  		if v_0.Op != OpRISCV64ANDI {
  4199  			break
  4200  		}
  4201  		c := auxIntToInt64(v_0.AuxInt)
  4202  		x := v_0.Args[0]
  4203  		if !(c < 0) {
  4204  			break
  4205  		}
  4206  		v.reset(OpRISCV64ANDI)
  4207  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  4208  		v.AddArg(x)
  4209  		return true
  4210  	}
  4211  	// match: (MOVBUreg (MOVDconst [c]))
  4212  	// result: (MOVDconst [int64(uint8(c))])
  4213  	for {
  4214  		if v_0.Op != OpRISCV64MOVDconst {
  4215  			break
  4216  		}
  4217  		c := auxIntToInt64(v_0.AuxInt)
  4218  		v.reset(OpRISCV64MOVDconst)
  4219  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  4220  		return true
  4221  	}
  4222  	// match: (MOVBUreg x:(MOVBUload _ _))
  4223  	// result: (MOVDreg x)
  4224  	for {
  4225  		x := v_0
  4226  		if x.Op != OpRISCV64MOVBUload {
  4227  			break
  4228  		}
  4229  		v.reset(OpRISCV64MOVDreg)
  4230  		v.AddArg(x)
  4231  		return true
  4232  	}
  4233  	// match: (MOVBUreg x:(Select0 (LoweredAtomicLoad8 _ _)))
  4234  	// result: (MOVDreg x)
  4235  	for {
  4236  		x := v_0
  4237  		if x.Op != OpSelect0 {
  4238  			break
  4239  		}
  4240  		x_0 := x.Args[0]
  4241  		if x_0.Op != OpRISCV64LoweredAtomicLoad8 {
  4242  			break
  4243  		}
  4244  		v.reset(OpRISCV64MOVDreg)
  4245  		v.AddArg(x)
  4246  		return true
  4247  	}
  4248  	// match: (MOVBUreg x:(Select0 (LoweredAtomicCas32 _ _ _ _)))
  4249  	// result: (MOVDreg x)
  4250  	for {
  4251  		x := v_0
  4252  		if x.Op != OpSelect0 {
  4253  			break
  4254  		}
  4255  		x_0 := x.Args[0]
  4256  		if x_0.Op != OpRISCV64LoweredAtomicCas32 {
  4257  			break
  4258  		}
  4259  		v.reset(OpRISCV64MOVDreg)
  4260  		v.AddArg(x)
  4261  		return true
  4262  	}
  4263  	// match: (MOVBUreg x:(Select0 (LoweredAtomicCas64 _ _ _ _)))
  4264  	// result: (MOVDreg x)
  4265  	for {
  4266  		x := v_0
  4267  		if x.Op != OpSelect0 {
  4268  			break
  4269  		}
  4270  		x_0 := x.Args[0]
  4271  		if x_0.Op != OpRISCV64LoweredAtomicCas64 {
  4272  			break
  4273  		}
  4274  		v.reset(OpRISCV64MOVDreg)
  4275  		v.AddArg(x)
  4276  		return true
  4277  	}
  4278  	// match: (MOVBUreg x:(MOVBUreg _))
  4279  	// result: (MOVDreg x)
  4280  	for {
  4281  		x := v_0
  4282  		if x.Op != OpRISCV64MOVBUreg {
  4283  			break
  4284  		}
  4285  		v.reset(OpRISCV64MOVDreg)
  4286  		v.AddArg(x)
  4287  		return true
  4288  	}
  4289  	// match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
  4290  	// cond: x.Uses == 1 && clobber(x)
  4291  	// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
  4292  	for {
  4293  		t := v.Type
  4294  		x := v_0
  4295  		if x.Op != OpRISCV64MOVBload {
  4296  			break
  4297  		}
  4298  		off := auxIntToInt32(x.AuxInt)
  4299  		sym := auxToSym(x.Aux)
  4300  		mem := x.Args[1]
  4301  		ptr := x.Args[0]
  4302  		if !(x.Uses == 1 && clobber(x)) {
  4303  			break
  4304  		}
  4305  		b = x.Block
  4306  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVBUload, t)
  4307  		v.copyOf(v0)
  4308  		v0.AuxInt = int32ToAuxInt(off)
  4309  		v0.Aux = symToAux(sym)
  4310  		v0.AddArg2(ptr, mem)
  4311  		return true
  4312  	}
  4313  	return false
  4314  }
  4315  func rewriteValueRISCV64_OpRISCV64MOVBload(v *Value) bool {
  4316  	v_1 := v.Args[1]
  4317  	v_0 := v.Args[0]
  4318  	// match: (MOVBload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4319  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4320  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4321  	for {
  4322  		off1 := auxIntToInt32(v.AuxInt)
  4323  		sym1 := auxToSym(v.Aux)
  4324  		if v_0.Op != OpRISCV64MOVaddr {
  4325  			break
  4326  		}
  4327  		off2 := auxIntToInt32(v_0.AuxInt)
  4328  		sym2 := auxToSym(v_0.Aux)
  4329  		base := v_0.Args[0]
  4330  		mem := v_1
  4331  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4332  			break
  4333  		}
  4334  		v.reset(OpRISCV64MOVBload)
  4335  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4336  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4337  		v.AddArg2(base, mem)
  4338  		return true
  4339  	}
  4340  	// match: (MOVBload [off1] {sym} (ADDI [off2] base) mem)
  4341  	// cond: is32Bit(int64(off1)+off2)
  4342  	// result: (MOVBload [off1+int32(off2)] {sym} base mem)
  4343  	for {
  4344  		off1 := auxIntToInt32(v.AuxInt)
  4345  		sym := auxToSym(v.Aux)
  4346  		if v_0.Op != OpRISCV64ADDI {
  4347  			break
  4348  		}
  4349  		off2 := auxIntToInt64(v_0.AuxInt)
  4350  		base := v_0.Args[0]
  4351  		mem := v_1
  4352  		if !(is32Bit(int64(off1) + off2)) {
  4353  			break
  4354  		}
  4355  		v.reset(OpRISCV64MOVBload)
  4356  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4357  		v.Aux = symToAux(sym)
  4358  		v.AddArg2(base, mem)
  4359  		return true
  4360  	}
  4361  	return false
  4362  }
  4363  func rewriteValueRISCV64_OpRISCV64MOVBreg(v *Value) bool {
  4364  	v_0 := v.Args[0]
  4365  	b := v.Block
  4366  	// match: (MOVBreg x:(ANDI [c] y))
  4367  	// cond: c >= 0 && int64(int8(c)) == c
  4368  	// result: x
  4369  	for {
  4370  		x := v_0
  4371  		if x.Op != OpRISCV64ANDI {
  4372  			break
  4373  		}
  4374  		c := auxIntToInt64(x.AuxInt)
  4375  		if !(c >= 0 && int64(int8(c)) == c) {
  4376  			break
  4377  		}
  4378  		v.copyOf(x)
  4379  		return true
  4380  	}
  4381  	// match: (MOVBreg (MOVDconst [c]))
  4382  	// result: (MOVDconst [int64(int8(c))])
  4383  	for {
  4384  		if v_0.Op != OpRISCV64MOVDconst {
  4385  			break
  4386  		}
  4387  		c := auxIntToInt64(v_0.AuxInt)
  4388  		v.reset(OpRISCV64MOVDconst)
  4389  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  4390  		return true
  4391  	}
  4392  	// match: (MOVBreg x:(MOVBload _ _))
  4393  	// result: (MOVDreg x)
  4394  	for {
  4395  		x := v_0
  4396  		if x.Op != OpRISCV64MOVBload {
  4397  			break
  4398  		}
  4399  		v.reset(OpRISCV64MOVDreg)
  4400  		v.AddArg(x)
  4401  		return true
  4402  	}
  4403  	// match: (MOVBreg x:(MOVBreg _))
  4404  	// result: (MOVDreg x)
  4405  	for {
  4406  		x := v_0
  4407  		if x.Op != OpRISCV64MOVBreg {
  4408  			break
  4409  		}
  4410  		v.reset(OpRISCV64MOVDreg)
  4411  		v.AddArg(x)
  4412  		return true
  4413  	}
  4414  	// match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
  4415  	// cond: x.Uses == 1 && clobber(x)
  4416  	// result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
  4417  	for {
  4418  		t := v.Type
  4419  		x := v_0
  4420  		if x.Op != OpRISCV64MOVBUload {
  4421  			break
  4422  		}
  4423  		off := auxIntToInt32(x.AuxInt)
  4424  		sym := auxToSym(x.Aux)
  4425  		mem := x.Args[1]
  4426  		ptr := x.Args[0]
  4427  		if !(x.Uses == 1 && clobber(x)) {
  4428  			break
  4429  		}
  4430  		b = x.Block
  4431  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVBload, t)
  4432  		v.copyOf(v0)
  4433  		v0.AuxInt = int32ToAuxInt(off)
  4434  		v0.Aux = symToAux(sym)
  4435  		v0.AddArg2(ptr, mem)
  4436  		return true
  4437  	}
  4438  	return false
  4439  }
  4440  func rewriteValueRISCV64_OpRISCV64MOVBstore(v *Value) bool {
  4441  	v_2 := v.Args[2]
  4442  	v_1 := v.Args[1]
  4443  	v_0 := v.Args[0]
  4444  	// match: (MOVBstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  4445  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4446  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  4447  	for {
  4448  		off1 := auxIntToInt32(v.AuxInt)
  4449  		sym1 := auxToSym(v.Aux)
  4450  		if v_0.Op != OpRISCV64MOVaddr {
  4451  			break
  4452  		}
  4453  		off2 := auxIntToInt32(v_0.AuxInt)
  4454  		sym2 := auxToSym(v_0.Aux)
  4455  		base := v_0.Args[0]
  4456  		val := v_1
  4457  		mem := v_2
  4458  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4459  			break
  4460  		}
  4461  		v.reset(OpRISCV64MOVBstore)
  4462  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4463  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4464  		v.AddArg3(base, val, mem)
  4465  		return true
  4466  	}
  4467  	// match: (MOVBstore [off1] {sym} (ADDI [off2] base) val mem)
  4468  	// cond: is32Bit(int64(off1)+off2)
  4469  	// result: (MOVBstore [off1+int32(off2)] {sym} base val mem)
  4470  	for {
  4471  		off1 := auxIntToInt32(v.AuxInt)
  4472  		sym := auxToSym(v.Aux)
  4473  		if v_0.Op != OpRISCV64ADDI {
  4474  			break
  4475  		}
  4476  		off2 := auxIntToInt64(v_0.AuxInt)
  4477  		base := v_0.Args[0]
  4478  		val := v_1
  4479  		mem := v_2
  4480  		if !(is32Bit(int64(off1) + off2)) {
  4481  			break
  4482  		}
  4483  		v.reset(OpRISCV64MOVBstore)
  4484  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4485  		v.Aux = symToAux(sym)
  4486  		v.AddArg3(base, val, mem)
  4487  		return true
  4488  	}
  4489  	// match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
  4490  	// result: (MOVBstorezero [off] {sym} ptr mem)
  4491  	for {
  4492  		off := auxIntToInt32(v.AuxInt)
  4493  		sym := auxToSym(v.Aux)
  4494  		ptr := v_0
  4495  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4496  			break
  4497  		}
  4498  		mem := v_2
  4499  		v.reset(OpRISCV64MOVBstorezero)
  4500  		v.AuxInt = int32ToAuxInt(off)
  4501  		v.Aux = symToAux(sym)
  4502  		v.AddArg2(ptr, mem)
  4503  		return true
  4504  	}
  4505  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  4506  	// result: (MOVBstore [off] {sym} ptr x mem)
  4507  	for {
  4508  		off := auxIntToInt32(v.AuxInt)
  4509  		sym := auxToSym(v.Aux)
  4510  		ptr := v_0
  4511  		if v_1.Op != OpRISCV64MOVBreg {
  4512  			break
  4513  		}
  4514  		x := v_1.Args[0]
  4515  		mem := v_2
  4516  		v.reset(OpRISCV64MOVBstore)
  4517  		v.AuxInt = int32ToAuxInt(off)
  4518  		v.Aux = symToAux(sym)
  4519  		v.AddArg3(ptr, x, mem)
  4520  		return true
  4521  	}
  4522  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  4523  	// result: (MOVBstore [off] {sym} ptr x mem)
  4524  	for {
  4525  		off := auxIntToInt32(v.AuxInt)
  4526  		sym := auxToSym(v.Aux)
  4527  		ptr := v_0
  4528  		if v_1.Op != OpRISCV64MOVHreg {
  4529  			break
  4530  		}
  4531  		x := v_1.Args[0]
  4532  		mem := v_2
  4533  		v.reset(OpRISCV64MOVBstore)
  4534  		v.AuxInt = int32ToAuxInt(off)
  4535  		v.Aux = symToAux(sym)
  4536  		v.AddArg3(ptr, x, mem)
  4537  		return true
  4538  	}
  4539  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  4540  	// result: (MOVBstore [off] {sym} ptr x mem)
  4541  	for {
  4542  		off := auxIntToInt32(v.AuxInt)
  4543  		sym := auxToSym(v.Aux)
  4544  		ptr := v_0
  4545  		if v_1.Op != OpRISCV64MOVWreg {
  4546  			break
  4547  		}
  4548  		x := v_1.Args[0]
  4549  		mem := v_2
  4550  		v.reset(OpRISCV64MOVBstore)
  4551  		v.AuxInt = int32ToAuxInt(off)
  4552  		v.Aux = symToAux(sym)
  4553  		v.AddArg3(ptr, x, mem)
  4554  		return true
  4555  	}
  4556  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  4557  	// result: (MOVBstore [off] {sym} ptr x mem)
  4558  	for {
  4559  		off := auxIntToInt32(v.AuxInt)
  4560  		sym := auxToSym(v.Aux)
  4561  		ptr := v_0
  4562  		if v_1.Op != OpRISCV64MOVBUreg {
  4563  			break
  4564  		}
  4565  		x := v_1.Args[0]
  4566  		mem := v_2
  4567  		v.reset(OpRISCV64MOVBstore)
  4568  		v.AuxInt = int32ToAuxInt(off)
  4569  		v.Aux = symToAux(sym)
  4570  		v.AddArg3(ptr, x, mem)
  4571  		return true
  4572  	}
  4573  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  4574  	// result: (MOVBstore [off] {sym} ptr x mem)
  4575  	for {
  4576  		off := auxIntToInt32(v.AuxInt)
  4577  		sym := auxToSym(v.Aux)
  4578  		ptr := v_0
  4579  		if v_1.Op != OpRISCV64MOVHUreg {
  4580  			break
  4581  		}
  4582  		x := v_1.Args[0]
  4583  		mem := v_2
  4584  		v.reset(OpRISCV64MOVBstore)
  4585  		v.AuxInt = int32ToAuxInt(off)
  4586  		v.Aux = symToAux(sym)
  4587  		v.AddArg3(ptr, x, mem)
  4588  		return true
  4589  	}
  4590  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  4591  	// result: (MOVBstore [off] {sym} ptr x mem)
  4592  	for {
  4593  		off := auxIntToInt32(v.AuxInt)
  4594  		sym := auxToSym(v.Aux)
  4595  		ptr := v_0
  4596  		if v_1.Op != OpRISCV64MOVWUreg {
  4597  			break
  4598  		}
  4599  		x := v_1.Args[0]
  4600  		mem := v_2
  4601  		v.reset(OpRISCV64MOVBstore)
  4602  		v.AuxInt = int32ToAuxInt(off)
  4603  		v.Aux = symToAux(sym)
  4604  		v.AddArg3(ptr, x, mem)
  4605  		return true
  4606  	}
  4607  	return false
  4608  }
  4609  func rewriteValueRISCV64_OpRISCV64MOVBstorezero(v *Value) bool {
  4610  	v_1 := v.Args[1]
  4611  	v_0 := v.Args[0]
  4612  	// match: (MOVBstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
  4613  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  4614  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4615  	for {
  4616  		off1 := auxIntToInt32(v.AuxInt)
  4617  		sym1 := auxToSym(v.Aux)
  4618  		if v_0.Op != OpRISCV64MOVaddr {
  4619  			break
  4620  		}
  4621  		off2 := auxIntToInt32(v_0.AuxInt)
  4622  		sym2 := auxToSym(v_0.Aux)
  4623  		ptr := v_0.Args[0]
  4624  		mem := v_1
  4625  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  4626  			break
  4627  		}
  4628  		v.reset(OpRISCV64MOVBstorezero)
  4629  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4630  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4631  		v.AddArg2(ptr, mem)
  4632  		return true
  4633  	}
  4634  	// match: (MOVBstorezero [off1] {sym} (ADDI [off2] ptr) mem)
  4635  	// cond: is32Bit(int64(off1)+off2)
  4636  	// result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem)
  4637  	for {
  4638  		off1 := auxIntToInt32(v.AuxInt)
  4639  		sym := auxToSym(v.Aux)
  4640  		if v_0.Op != OpRISCV64ADDI {
  4641  			break
  4642  		}
  4643  		off2 := auxIntToInt64(v_0.AuxInt)
  4644  		ptr := v_0.Args[0]
  4645  		mem := v_1
  4646  		if !(is32Bit(int64(off1) + off2)) {
  4647  			break
  4648  		}
  4649  		v.reset(OpRISCV64MOVBstorezero)
  4650  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4651  		v.Aux = symToAux(sym)
  4652  		v.AddArg2(ptr, mem)
  4653  		return true
  4654  	}
  4655  	return false
  4656  }
  4657  func rewriteValueRISCV64_OpRISCV64MOVDload(v *Value) bool {
  4658  	v_1 := v.Args[1]
  4659  	v_0 := v.Args[0]
  4660  	// match: (MOVDload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4661  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4662  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4663  	for {
  4664  		off1 := auxIntToInt32(v.AuxInt)
  4665  		sym1 := auxToSym(v.Aux)
  4666  		if v_0.Op != OpRISCV64MOVaddr {
  4667  			break
  4668  		}
  4669  		off2 := auxIntToInt32(v_0.AuxInt)
  4670  		sym2 := auxToSym(v_0.Aux)
  4671  		base := v_0.Args[0]
  4672  		mem := v_1
  4673  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4674  			break
  4675  		}
  4676  		v.reset(OpRISCV64MOVDload)
  4677  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4678  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4679  		v.AddArg2(base, mem)
  4680  		return true
  4681  	}
  4682  	// match: (MOVDload [off1] {sym} (ADDI [off2] base) mem)
  4683  	// cond: is32Bit(int64(off1)+off2)
  4684  	// result: (MOVDload [off1+int32(off2)] {sym} base mem)
  4685  	for {
  4686  		off1 := auxIntToInt32(v.AuxInt)
  4687  		sym := auxToSym(v.Aux)
  4688  		if v_0.Op != OpRISCV64ADDI {
  4689  			break
  4690  		}
  4691  		off2 := auxIntToInt64(v_0.AuxInt)
  4692  		base := v_0.Args[0]
  4693  		mem := v_1
  4694  		if !(is32Bit(int64(off1) + off2)) {
  4695  			break
  4696  		}
  4697  		v.reset(OpRISCV64MOVDload)
  4698  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4699  		v.Aux = symToAux(sym)
  4700  		v.AddArg2(base, mem)
  4701  		return true
  4702  	}
  4703  	return false
  4704  }
  4705  func rewriteValueRISCV64_OpRISCV64MOVDnop(v *Value) bool {
  4706  	v_0 := v.Args[0]
  4707  	// match: (MOVDnop (MOVDconst [c]))
  4708  	// result: (MOVDconst [c])
  4709  	for {
  4710  		if v_0.Op != OpRISCV64MOVDconst {
  4711  			break
  4712  		}
  4713  		c := auxIntToInt64(v_0.AuxInt)
  4714  		v.reset(OpRISCV64MOVDconst)
  4715  		v.AuxInt = int64ToAuxInt(c)
  4716  		return true
  4717  	}
  4718  	return false
  4719  }
  4720  func rewriteValueRISCV64_OpRISCV64MOVDreg(v *Value) bool {
  4721  	v_0 := v.Args[0]
  4722  	// match: (MOVDreg x)
  4723  	// cond: x.Uses == 1
  4724  	// result: (MOVDnop x)
  4725  	for {
  4726  		x := v_0
  4727  		if !(x.Uses == 1) {
  4728  			break
  4729  		}
  4730  		v.reset(OpRISCV64MOVDnop)
  4731  		v.AddArg(x)
  4732  		return true
  4733  	}
  4734  	return false
  4735  }
  4736  func rewriteValueRISCV64_OpRISCV64MOVDstore(v *Value) bool {
  4737  	v_2 := v.Args[2]
  4738  	v_1 := v.Args[1]
  4739  	v_0 := v.Args[0]
  4740  	// match: (MOVDstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  4741  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4742  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  4743  	for {
  4744  		off1 := auxIntToInt32(v.AuxInt)
  4745  		sym1 := auxToSym(v.Aux)
  4746  		if v_0.Op != OpRISCV64MOVaddr {
  4747  			break
  4748  		}
  4749  		off2 := auxIntToInt32(v_0.AuxInt)
  4750  		sym2 := auxToSym(v_0.Aux)
  4751  		base := v_0.Args[0]
  4752  		val := v_1
  4753  		mem := v_2
  4754  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4755  			break
  4756  		}
  4757  		v.reset(OpRISCV64MOVDstore)
  4758  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4759  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4760  		v.AddArg3(base, val, mem)
  4761  		return true
  4762  	}
  4763  	// match: (MOVDstore [off1] {sym} (ADDI [off2] base) val mem)
  4764  	// cond: is32Bit(int64(off1)+off2)
  4765  	// result: (MOVDstore [off1+int32(off2)] {sym} base val mem)
  4766  	for {
  4767  		off1 := auxIntToInt32(v.AuxInt)
  4768  		sym := auxToSym(v.Aux)
  4769  		if v_0.Op != OpRISCV64ADDI {
  4770  			break
  4771  		}
  4772  		off2 := auxIntToInt64(v_0.AuxInt)
  4773  		base := v_0.Args[0]
  4774  		val := v_1
  4775  		mem := v_2
  4776  		if !(is32Bit(int64(off1) + off2)) {
  4777  			break
  4778  		}
  4779  		v.reset(OpRISCV64MOVDstore)
  4780  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4781  		v.Aux = symToAux(sym)
  4782  		v.AddArg3(base, val, mem)
  4783  		return true
  4784  	}
  4785  	// match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
  4786  	// result: (MOVDstorezero [off] {sym} ptr mem)
  4787  	for {
  4788  		off := auxIntToInt32(v.AuxInt)
  4789  		sym := auxToSym(v.Aux)
  4790  		ptr := v_0
  4791  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4792  			break
  4793  		}
  4794  		mem := v_2
  4795  		v.reset(OpRISCV64MOVDstorezero)
  4796  		v.AuxInt = int32ToAuxInt(off)
  4797  		v.Aux = symToAux(sym)
  4798  		v.AddArg2(ptr, mem)
  4799  		return true
  4800  	}
  4801  	return false
  4802  }
  4803  func rewriteValueRISCV64_OpRISCV64MOVDstorezero(v *Value) bool {
  4804  	v_1 := v.Args[1]
  4805  	v_0 := v.Args[0]
  4806  	// match: (MOVDstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
  4807  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  4808  	// result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4809  	for {
  4810  		off1 := auxIntToInt32(v.AuxInt)
  4811  		sym1 := auxToSym(v.Aux)
  4812  		if v_0.Op != OpRISCV64MOVaddr {
  4813  			break
  4814  		}
  4815  		off2 := auxIntToInt32(v_0.AuxInt)
  4816  		sym2 := auxToSym(v_0.Aux)
  4817  		ptr := v_0.Args[0]
  4818  		mem := v_1
  4819  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  4820  			break
  4821  		}
  4822  		v.reset(OpRISCV64MOVDstorezero)
  4823  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4824  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4825  		v.AddArg2(ptr, mem)
  4826  		return true
  4827  	}
  4828  	// match: (MOVDstorezero [off1] {sym} (ADDI [off2] ptr) mem)
  4829  	// cond: is32Bit(int64(off1)+off2)
  4830  	// result: (MOVDstorezero [off1+int32(off2)] {sym} ptr mem)
  4831  	for {
  4832  		off1 := auxIntToInt32(v.AuxInt)
  4833  		sym := auxToSym(v.Aux)
  4834  		if v_0.Op != OpRISCV64ADDI {
  4835  			break
  4836  		}
  4837  		off2 := auxIntToInt64(v_0.AuxInt)
  4838  		ptr := v_0.Args[0]
  4839  		mem := v_1
  4840  		if !(is32Bit(int64(off1) + off2)) {
  4841  			break
  4842  		}
  4843  		v.reset(OpRISCV64MOVDstorezero)
  4844  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4845  		v.Aux = symToAux(sym)
  4846  		v.AddArg2(ptr, mem)
  4847  		return true
  4848  	}
  4849  	return false
  4850  }
  4851  func rewriteValueRISCV64_OpRISCV64MOVHUload(v *Value) bool {
  4852  	v_1 := v.Args[1]
  4853  	v_0 := v.Args[0]
  4854  	// match: (MOVHUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4855  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4856  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4857  	for {
  4858  		off1 := auxIntToInt32(v.AuxInt)
  4859  		sym1 := auxToSym(v.Aux)
  4860  		if v_0.Op != OpRISCV64MOVaddr {
  4861  			break
  4862  		}
  4863  		off2 := auxIntToInt32(v_0.AuxInt)
  4864  		sym2 := auxToSym(v_0.Aux)
  4865  		base := v_0.Args[0]
  4866  		mem := v_1
  4867  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4868  			break
  4869  		}
  4870  		v.reset(OpRISCV64MOVHUload)
  4871  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4872  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4873  		v.AddArg2(base, mem)
  4874  		return true
  4875  	}
  4876  	// match: (MOVHUload [off1] {sym} (ADDI [off2] base) mem)
  4877  	// cond: is32Bit(int64(off1)+off2)
  4878  	// result: (MOVHUload [off1+int32(off2)] {sym} base mem)
  4879  	for {
  4880  		off1 := auxIntToInt32(v.AuxInt)
  4881  		sym := auxToSym(v.Aux)
  4882  		if v_0.Op != OpRISCV64ADDI {
  4883  			break
  4884  		}
  4885  		off2 := auxIntToInt64(v_0.AuxInt)
  4886  		base := v_0.Args[0]
  4887  		mem := v_1
  4888  		if !(is32Bit(int64(off1) + off2)) {
  4889  			break
  4890  		}
  4891  		v.reset(OpRISCV64MOVHUload)
  4892  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4893  		v.Aux = symToAux(sym)
  4894  		v.AddArg2(base, mem)
  4895  		return true
  4896  	}
  4897  	return false
  4898  }
  4899  func rewriteValueRISCV64_OpRISCV64MOVHUreg(v *Value) bool {
  4900  	v_0 := v.Args[0]
  4901  	b := v.Block
  4902  	// match: (MOVHUreg x:(ANDI [c] y))
  4903  	// cond: c >= 0 && int64(uint16(c)) == c
  4904  	// result: x
  4905  	for {
  4906  		x := v_0
  4907  		if x.Op != OpRISCV64ANDI {
  4908  			break
  4909  		}
  4910  		c := auxIntToInt64(x.AuxInt)
  4911  		if !(c >= 0 && int64(uint16(c)) == c) {
  4912  			break
  4913  		}
  4914  		v.copyOf(x)
  4915  		return true
  4916  	}
  4917  	// match: (MOVHUreg (ANDI [c] x))
  4918  	// cond: c < 0
  4919  	// result: (ANDI [int64(uint16(c))] x)
  4920  	for {
  4921  		if v_0.Op != OpRISCV64ANDI {
  4922  			break
  4923  		}
  4924  		c := auxIntToInt64(v_0.AuxInt)
  4925  		x := v_0.Args[0]
  4926  		if !(c < 0) {
  4927  			break
  4928  		}
  4929  		v.reset(OpRISCV64ANDI)
  4930  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  4931  		v.AddArg(x)
  4932  		return true
  4933  	}
  4934  	// match: (MOVHUreg (MOVDconst [c]))
  4935  	// result: (MOVDconst [int64(uint16(c))])
  4936  	for {
  4937  		if v_0.Op != OpRISCV64MOVDconst {
  4938  			break
  4939  		}
  4940  		c := auxIntToInt64(v_0.AuxInt)
  4941  		v.reset(OpRISCV64MOVDconst)
  4942  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  4943  		return true
  4944  	}
  4945  	// match: (MOVHUreg x:(MOVBUload _ _))
  4946  	// result: (MOVDreg x)
  4947  	for {
  4948  		x := v_0
  4949  		if x.Op != OpRISCV64MOVBUload {
  4950  			break
  4951  		}
  4952  		v.reset(OpRISCV64MOVDreg)
  4953  		v.AddArg(x)
  4954  		return true
  4955  	}
  4956  	// match: (MOVHUreg x:(MOVHUload _ _))
  4957  	// result: (MOVDreg x)
  4958  	for {
  4959  		x := v_0
  4960  		if x.Op != OpRISCV64MOVHUload {
  4961  			break
  4962  		}
  4963  		v.reset(OpRISCV64MOVDreg)
  4964  		v.AddArg(x)
  4965  		return true
  4966  	}
  4967  	// match: (MOVHUreg x:(MOVBUreg _))
  4968  	// result: (MOVDreg x)
  4969  	for {
  4970  		x := v_0
  4971  		if x.Op != OpRISCV64MOVBUreg {
  4972  			break
  4973  		}
  4974  		v.reset(OpRISCV64MOVDreg)
  4975  		v.AddArg(x)
  4976  		return true
  4977  	}
  4978  	// match: (MOVHUreg x:(MOVHUreg _))
  4979  	// result: (MOVDreg x)
  4980  	for {
  4981  		x := v_0
  4982  		if x.Op != OpRISCV64MOVHUreg {
  4983  			break
  4984  		}
  4985  		v.reset(OpRISCV64MOVDreg)
  4986  		v.AddArg(x)
  4987  		return true
  4988  	}
  4989  	// match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
  4990  	// cond: x.Uses == 1 && clobber(x)
  4991  	// result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
  4992  	for {
  4993  		t := v.Type
  4994  		x := v_0
  4995  		if x.Op != OpRISCV64MOVHload {
  4996  			break
  4997  		}
  4998  		off := auxIntToInt32(x.AuxInt)
  4999  		sym := auxToSym(x.Aux)
  5000  		mem := x.Args[1]
  5001  		ptr := x.Args[0]
  5002  		if !(x.Uses == 1 && clobber(x)) {
  5003  			break
  5004  		}
  5005  		b = x.Block
  5006  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVHUload, t)
  5007  		v.copyOf(v0)
  5008  		v0.AuxInt = int32ToAuxInt(off)
  5009  		v0.Aux = symToAux(sym)
  5010  		v0.AddArg2(ptr, mem)
  5011  		return true
  5012  	}
  5013  	return false
  5014  }
  5015  func rewriteValueRISCV64_OpRISCV64MOVHload(v *Value) bool {
  5016  	v_1 := v.Args[1]
  5017  	v_0 := v.Args[0]
  5018  	// match: (MOVHload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5019  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  5020  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5021  	for {
  5022  		off1 := auxIntToInt32(v.AuxInt)
  5023  		sym1 := auxToSym(v.Aux)
  5024  		if v_0.Op != OpRISCV64MOVaddr {
  5025  			break
  5026  		}
  5027  		off2 := auxIntToInt32(v_0.AuxInt)
  5028  		sym2 := auxToSym(v_0.Aux)
  5029  		base := v_0.Args[0]
  5030  		mem := v_1
  5031  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  5032  			break
  5033  		}
  5034  		v.reset(OpRISCV64MOVHload)
  5035  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5036  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5037  		v.AddArg2(base, mem)
  5038  		return true
  5039  	}
  5040  	// match: (MOVHload [off1] {sym} (ADDI [off2] base) mem)
  5041  	// cond: is32Bit(int64(off1)+off2)
  5042  	// result: (MOVHload [off1+int32(off2)] {sym} base mem)
  5043  	for {
  5044  		off1 := auxIntToInt32(v.AuxInt)
  5045  		sym := auxToSym(v.Aux)
  5046  		if v_0.Op != OpRISCV64ADDI {
  5047  			break
  5048  		}
  5049  		off2 := auxIntToInt64(v_0.AuxInt)
  5050  		base := v_0.Args[0]
  5051  		mem := v_1
  5052  		if !(is32Bit(int64(off1) + off2)) {
  5053  			break
  5054  		}
  5055  		v.reset(OpRISCV64MOVHload)
  5056  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5057  		v.Aux = symToAux(sym)
  5058  		v.AddArg2(base, mem)
  5059  		return true
  5060  	}
  5061  	return false
  5062  }
  5063  func rewriteValueRISCV64_OpRISCV64MOVHreg(v *Value) bool {
  5064  	v_0 := v.Args[0]
  5065  	b := v.Block
  5066  	// match: (MOVHreg x:(ANDI [c] y))
  5067  	// cond: c >= 0 && int64(int16(c)) == c
  5068  	// result: x
  5069  	for {
  5070  		x := v_0
  5071  		if x.Op != OpRISCV64ANDI {
  5072  			break
  5073  		}
  5074  		c := auxIntToInt64(x.AuxInt)
  5075  		if !(c >= 0 && int64(int16(c)) == c) {
  5076  			break
  5077  		}
  5078  		v.copyOf(x)
  5079  		return true
  5080  	}
  5081  	// match: (MOVHreg (MOVDconst [c]))
  5082  	// result: (MOVDconst [int64(int16(c))])
  5083  	for {
  5084  		if v_0.Op != OpRISCV64MOVDconst {
  5085  			break
  5086  		}
  5087  		c := auxIntToInt64(v_0.AuxInt)
  5088  		v.reset(OpRISCV64MOVDconst)
  5089  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  5090  		return true
  5091  	}
  5092  	// match: (MOVHreg x:(MOVBload _ _))
  5093  	// result: (MOVDreg x)
  5094  	for {
  5095  		x := v_0
  5096  		if x.Op != OpRISCV64MOVBload {
  5097  			break
  5098  		}
  5099  		v.reset(OpRISCV64MOVDreg)
  5100  		v.AddArg(x)
  5101  		return true
  5102  	}
  5103  	// match: (MOVHreg x:(MOVBUload _ _))
  5104  	// result: (MOVDreg x)
  5105  	for {
  5106  		x := v_0
  5107  		if x.Op != OpRISCV64MOVBUload {
  5108  			break
  5109  		}
  5110  		v.reset(OpRISCV64MOVDreg)
  5111  		v.AddArg(x)
  5112  		return true
  5113  	}
  5114  	// match: (MOVHreg x:(MOVHload _ _))
  5115  	// result: (MOVDreg x)
  5116  	for {
  5117  		x := v_0
  5118  		if x.Op != OpRISCV64MOVHload {
  5119  			break
  5120  		}
  5121  		v.reset(OpRISCV64MOVDreg)
  5122  		v.AddArg(x)
  5123  		return true
  5124  	}
  5125  	// match: (MOVHreg x:(MOVBreg _))
  5126  	// result: (MOVDreg x)
  5127  	for {
  5128  		x := v_0
  5129  		if x.Op != OpRISCV64MOVBreg {
  5130  			break
  5131  		}
  5132  		v.reset(OpRISCV64MOVDreg)
  5133  		v.AddArg(x)
  5134  		return true
  5135  	}
  5136  	// match: (MOVHreg x:(MOVBUreg _))
  5137  	// result: (MOVDreg x)
  5138  	for {
  5139  		x := v_0
  5140  		if x.Op != OpRISCV64MOVBUreg {
  5141  			break
  5142  		}
  5143  		v.reset(OpRISCV64MOVDreg)
  5144  		v.AddArg(x)
  5145  		return true
  5146  	}
  5147  	// match: (MOVHreg x:(MOVHreg _))
  5148  	// result: (MOVDreg x)
  5149  	for {
  5150  		x := v_0
  5151  		if x.Op != OpRISCV64MOVHreg {
  5152  			break
  5153  		}
  5154  		v.reset(OpRISCV64MOVDreg)
  5155  		v.AddArg(x)
  5156  		return true
  5157  	}
  5158  	// match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
  5159  	// cond: x.Uses == 1 && clobber(x)
  5160  	// result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
  5161  	for {
  5162  		t := v.Type
  5163  		x := v_0
  5164  		if x.Op != OpRISCV64MOVHUload {
  5165  			break
  5166  		}
  5167  		off := auxIntToInt32(x.AuxInt)
  5168  		sym := auxToSym(x.Aux)
  5169  		mem := x.Args[1]
  5170  		ptr := x.Args[0]
  5171  		if !(x.Uses == 1 && clobber(x)) {
  5172  			break
  5173  		}
  5174  		b = x.Block
  5175  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVHload, t)
  5176  		v.copyOf(v0)
  5177  		v0.AuxInt = int32ToAuxInt(off)
  5178  		v0.Aux = symToAux(sym)
  5179  		v0.AddArg2(ptr, mem)
  5180  		return true
  5181  	}
  5182  	return false
  5183  }
  5184  func rewriteValueRISCV64_OpRISCV64MOVHstore(v *Value) bool {
  5185  	v_2 := v.Args[2]
  5186  	v_1 := v.Args[1]
  5187  	v_0 := v.Args[0]
  5188  	// match: (MOVHstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  5189  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  5190  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  5191  	for {
  5192  		off1 := auxIntToInt32(v.AuxInt)
  5193  		sym1 := auxToSym(v.Aux)
  5194  		if v_0.Op != OpRISCV64MOVaddr {
  5195  			break
  5196  		}
  5197  		off2 := auxIntToInt32(v_0.AuxInt)
  5198  		sym2 := auxToSym(v_0.Aux)
  5199  		base := v_0.Args[0]
  5200  		val := v_1
  5201  		mem := v_2
  5202  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  5203  			break
  5204  		}
  5205  		v.reset(OpRISCV64MOVHstore)
  5206  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5207  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5208  		v.AddArg3(base, val, mem)
  5209  		return true
  5210  	}
  5211  	// match: (MOVHstore [off1] {sym} (ADDI [off2] base) val mem)
  5212  	// cond: is32Bit(int64(off1)+off2)
  5213  	// result: (MOVHstore [off1+int32(off2)] {sym} base val mem)
  5214  	for {
  5215  		off1 := auxIntToInt32(v.AuxInt)
  5216  		sym := auxToSym(v.Aux)
  5217  		if v_0.Op != OpRISCV64ADDI {
  5218  			break
  5219  		}
  5220  		off2 := auxIntToInt64(v_0.AuxInt)
  5221  		base := v_0.Args[0]
  5222  		val := v_1
  5223  		mem := v_2
  5224  		if !(is32Bit(int64(off1) + off2)) {
  5225  			break
  5226  		}
  5227  		v.reset(OpRISCV64MOVHstore)
  5228  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5229  		v.Aux = symToAux(sym)
  5230  		v.AddArg3(base, val, mem)
  5231  		return true
  5232  	}
  5233  	// match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem)
  5234  	// result: (MOVHstorezero [off] {sym} ptr mem)
  5235  	for {
  5236  		off := auxIntToInt32(v.AuxInt)
  5237  		sym := auxToSym(v.Aux)
  5238  		ptr := v_0
  5239  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5240  			break
  5241  		}
  5242  		mem := v_2
  5243  		v.reset(OpRISCV64MOVHstorezero)
  5244  		v.AuxInt = int32ToAuxInt(off)
  5245  		v.Aux = symToAux(sym)
  5246  		v.AddArg2(ptr, mem)
  5247  		return true
  5248  	}
  5249  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  5250  	// result: (MOVHstore [off] {sym} ptr x mem)
  5251  	for {
  5252  		off := auxIntToInt32(v.AuxInt)
  5253  		sym := auxToSym(v.Aux)
  5254  		ptr := v_0
  5255  		if v_1.Op != OpRISCV64MOVHreg {
  5256  			break
  5257  		}
  5258  		x := v_1.Args[0]
  5259  		mem := v_2
  5260  		v.reset(OpRISCV64MOVHstore)
  5261  		v.AuxInt = int32ToAuxInt(off)
  5262  		v.Aux = symToAux(sym)
  5263  		v.AddArg3(ptr, x, mem)
  5264  		return true
  5265  	}
  5266  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  5267  	// result: (MOVHstore [off] {sym} ptr x mem)
  5268  	for {
  5269  		off := auxIntToInt32(v.AuxInt)
  5270  		sym := auxToSym(v.Aux)
  5271  		ptr := v_0
  5272  		if v_1.Op != OpRISCV64MOVWreg {
  5273  			break
  5274  		}
  5275  		x := v_1.Args[0]
  5276  		mem := v_2
  5277  		v.reset(OpRISCV64MOVHstore)
  5278  		v.AuxInt = int32ToAuxInt(off)
  5279  		v.Aux = symToAux(sym)
  5280  		v.AddArg3(ptr, x, mem)
  5281  		return true
  5282  	}
  5283  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  5284  	// result: (MOVHstore [off] {sym} ptr x mem)
  5285  	for {
  5286  		off := auxIntToInt32(v.AuxInt)
  5287  		sym := auxToSym(v.Aux)
  5288  		ptr := v_0
  5289  		if v_1.Op != OpRISCV64MOVHUreg {
  5290  			break
  5291  		}
  5292  		x := v_1.Args[0]
  5293  		mem := v_2
  5294  		v.reset(OpRISCV64MOVHstore)
  5295  		v.AuxInt = int32ToAuxInt(off)
  5296  		v.Aux = symToAux(sym)
  5297  		v.AddArg3(ptr, x, mem)
  5298  		return true
  5299  	}
  5300  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  5301  	// result: (MOVHstore [off] {sym} ptr x mem)
  5302  	for {
  5303  		off := auxIntToInt32(v.AuxInt)
  5304  		sym := auxToSym(v.Aux)
  5305  		ptr := v_0
  5306  		if v_1.Op != OpRISCV64MOVWUreg {
  5307  			break
  5308  		}
  5309  		x := v_1.Args[0]
  5310  		mem := v_2
  5311  		v.reset(OpRISCV64MOVHstore)
  5312  		v.AuxInt = int32ToAuxInt(off)
  5313  		v.Aux = symToAux(sym)
  5314  		v.AddArg3(ptr, x, mem)
  5315  		return true
  5316  	}
  5317  	return false
  5318  }
  5319  func rewriteValueRISCV64_OpRISCV64MOVHstorezero(v *Value) bool {
  5320  	v_1 := v.Args[1]
  5321  	v_0 := v.Args[0]
  5322  	// match: (MOVHstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
  5323  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  5324  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5325  	for {
  5326  		off1 := auxIntToInt32(v.AuxInt)
  5327  		sym1 := auxToSym(v.Aux)
  5328  		if v_0.Op != OpRISCV64MOVaddr {
  5329  			break
  5330  		}
  5331  		off2 := auxIntToInt32(v_0.AuxInt)
  5332  		sym2 := auxToSym(v_0.Aux)
  5333  		ptr := v_0.Args[0]
  5334  		mem := v_1
  5335  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  5336  			break
  5337  		}
  5338  		v.reset(OpRISCV64MOVHstorezero)
  5339  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5340  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5341  		v.AddArg2(ptr, mem)
  5342  		return true
  5343  	}
  5344  	// match: (MOVHstorezero [off1] {sym} (ADDI [off2] ptr) mem)
  5345  	// cond: is32Bit(int64(off1)+off2)
  5346  	// result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem)
  5347  	for {
  5348  		off1 := auxIntToInt32(v.AuxInt)
  5349  		sym := auxToSym(v.Aux)
  5350  		if v_0.Op != OpRISCV64ADDI {
  5351  			break
  5352  		}
  5353  		off2 := auxIntToInt64(v_0.AuxInt)
  5354  		ptr := v_0.Args[0]
  5355  		mem := v_1
  5356  		if !(is32Bit(int64(off1) + off2)) {
  5357  			break
  5358  		}
  5359  		v.reset(OpRISCV64MOVHstorezero)
  5360  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5361  		v.Aux = symToAux(sym)
  5362  		v.AddArg2(ptr, mem)
  5363  		return true
  5364  	}
  5365  	return false
  5366  }
  5367  func rewriteValueRISCV64_OpRISCV64MOVWUload(v *Value) bool {
  5368  	v_1 := v.Args[1]
  5369  	v_0 := v.Args[0]
  5370  	// match: (MOVWUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5371  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  5372  	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5373  	for {
  5374  		off1 := auxIntToInt32(v.AuxInt)
  5375  		sym1 := auxToSym(v.Aux)
  5376  		if v_0.Op != OpRISCV64MOVaddr {
  5377  			break
  5378  		}
  5379  		off2 := auxIntToInt32(v_0.AuxInt)
  5380  		sym2 := auxToSym(v_0.Aux)
  5381  		base := v_0.Args[0]
  5382  		mem := v_1
  5383  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  5384  			break
  5385  		}
  5386  		v.reset(OpRISCV64MOVWUload)
  5387  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5388  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5389  		v.AddArg2(base, mem)
  5390  		return true
  5391  	}
  5392  	// match: (MOVWUload [off1] {sym} (ADDI [off2] base) mem)
  5393  	// cond: is32Bit(int64(off1)+off2)
  5394  	// result: (MOVWUload [off1+int32(off2)] {sym} base mem)
  5395  	for {
  5396  		off1 := auxIntToInt32(v.AuxInt)
  5397  		sym := auxToSym(v.Aux)
  5398  		if v_0.Op != OpRISCV64ADDI {
  5399  			break
  5400  		}
  5401  		off2 := auxIntToInt64(v_0.AuxInt)
  5402  		base := v_0.Args[0]
  5403  		mem := v_1
  5404  		if !(is32Bit(int64(off1) + off2)) {
  5405  			break
  5406  		}
  5407  		v.reset(OpRISCV64MOVWUload)
  5408  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5409  		v.Aux = symToAux(sym)
  5410  		v.AddArg2(base, mem)
  5411  		return true
  5412  	}
  5413  	return false
  5414  }
  5415  func rewriteValueRISCV64_OpRISCV64MOVWUreg(v *Value) bool {
  5416  	v_0 := v.Args[0]
  5417  	b := v.Block
  5418  	typ := &b.Func.Config.Types
  5419  	// match: (MOVWUreg x:(ANDI [c] y))
  5420  	// cond: c >= 0 && int64(uint32(c)) == c
  5421  	// result: x
  5422  	for {
  5423  		x := v_0
  5424  		if x.Op != OpRISCV64ANDI {
  5425  			break
  5426  		}
  5427  		c := auxIntToInt64(x.AuxInt)
  5428  		if !(c >= 0 && int64(uint32(c)) == c) {
  5429  			break
  5430  		}
  5431  		v.copyOf(x)
  5432  		return true
  5433  	}
  5434  	// match: (MOVWUreg (ANDI [c] x))
  5435  	// cond: c < 0
  5436  	// result: (AND (MOVDconst [int64(uint32(c))]) x)
  5437  	for {
  5438  		if v_0.Op != OpRISCV64ANDI {
  5439  			break
  5440  		}
  5441  		c := auxIntToInt64(v_0.AuxInt)
  5442  		x := v_0.Args[0]
  5443  		if !(c < 0) {
  5444  			break
  5445  		}
  5446  		v.reset(OpRISCV64AND)
  5447  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  5448  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
  5449  		v.AddArg2(v0, x)
  5450  		return true
  5451  	}
  5452  	// match: (MOVWUreg (MOVDconst [c]))
  5453  	// result: (MOVDconst [int64(uint32(c))])
  5454  	for {
  5455  		if v_0.Op != OpRISCV64MOVDconst {
  5456  			break
  5457  		}
  5458  		c := auxIntToInt64(v_0.AuxInt)
  5459  		v.reset(OpRISCV64MOVDconst)
  5460  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  5461  		return true
  5462  	}
  5463  	// match: (MOVWUreg x:(MOVBUload _ _))
  5464  	// result: (MOVDreg x)
  5465  	for {
  5466  		x := v_0
  5467  		if x.Op != OpRISCV64MOVBUload {
  5468  			break
  5469  		}
  5470  		v.reset(OpRISCV64MOVDreg)
  5471  		v.AddArg(x)
  5472  		return true
  5473  	}
  5474  	// match: (MOVWUreg x:(MOVHUload _ _))
  5475  	// result: (MOVDreg x)
  5476  	for {
  5477  		x := v_0
  5478  		if x.Op != OpRISCV64MOVHUload {
  5479  			break
  5480  		}
  5481  		v.reset(OpRISCV64MOVDreg)
  5482  		v.AddArg(x)
  5483  		return true
  5484  	}
  5485  	// match: (MOVWUreg x:(MOVWUload _ _))
  5486  	// result: (MOVDreg x)
  5487  	for {
  5488  		x := v_0
  5489  		if x.Op != OpRISCV64MOVWUload {
  5490  			break
  5491  		}
  5492  		v.reset(OpRISCV64MOVDreg)
  5493  		v.AddArg(x)
  5494  		return true
  5495  	}
  5496  	// match: (MOVWUreg x:(MOVBUreg _))
  5497  	// result: (MOVDreg x)
  5498  	for {
  5499  		x := v_0
  5500  		if x.Op != OpRISCV64MOVBUreg {
  5501  			break
  5502  		}
  5503  		v.reset(OpRISCV64MOVDreg)
  5504  		v.AddArg(x)
  5505  		return true
  5506  	}
  5507  	// match: (MOVWUreg x:(MOVHUreg _))
  5508  	// result: (MOVDreg x)
  5509  	for {
  5510  		x := v_0
  5511  		if x.Op != OpRISCV64MOVHUreg {
  5512  			break
  5513  		}
  5514  		v.reset(OpRISCV64MOVDreg)
  5515  		v.AddArg(x)
  5516  		return true
  5517  	}
  5518  	// match: (MOVWUreg x:(MOVWUreg _))
  5519  	// result: (MOVDreg x)
  5520  	for {
  5521  		x := v_0
  5522  		if x.Op != OpRISCV64MOVWUreg {
  5523  			break
  5524  		}
  5525  		v.reset(OpRISCV64MOVDreg)
  5526  		v.AddArg(x)
  5527  		return true
  5528  	}
  5529  	// match: (MOVWUreg <t> x:(MOVWload [off] {sym} ptr mem))
  5530  	// cond: x.Uses == 1 && clobber(x)
  5531  	// result: @x.Block (MOVWUload <t> [off] {sym} ptr mem)
  5532  	for {
  5533  		t := v.Type
  5534  		x := v_0
  5535  		if x.Op != OpRISCV64MOVWload {
  5536  			break
  5537  		}
  5538  		off := auxIntToInt32(x.AuxInt)
  5539  		sym := auxToSym(x.Aux)
  5540  		mem := x.Args[1]
  5541  		ptr := x.Args[0]
  5542  		if !(x.Uses == 1 && clobber(x)) {
  5543  			break
  5544  		}
  5545  		b = x.Block
  5546  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVWUload, t)
  5547  		v.copyOf(v0)
  5548  		v0.AuxInt = int32ToAuxInt(off)
  5549  		v0.Aux = symToAux(sym)
  5550  		v0.AddArg2(ptr, mem)
  5551  		return true
  5552  	}
  5553  	return false
  5554  }
  5555  func rewriteValueRISCV64_OpRISCV64MOVWload(v *Value) bool {
  5556  	v_1 := v.Args[1]
  5557  	v_0 := v.Args[0]
  5558  	// match: (MOVWload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5559  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  5560  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5561  	for {
  5562  		off1 := auxIntToInt32(v.AuxInt)
  5563  		sym1 := auxToSym(v.Aux)
  5564  		if v_0.Op != OpRISCV64MOVaddr {
  5565  			break
  5566  		}
  5567  		off2 := auxIntToInt32(v_0.AuxInt)
  5568  		sym2 := auxToSym(v_0.Aux)
  5569  		base := v_0.Args[0]
  5570  		mem := v_1
  5571  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  5572  			break
  5573  		}
  5574  		v.reset(OpRISCV64MOVWload)
  5575  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5576  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5577  		v.AddArg2(base, mem)
  5578  		return true
  5579  	}
  5580  	// match: (MOVWload [off1] {sym} (ADDI [off2] base) mem)
  5581  	// cond: is32Bit(int64(off1)+off2)
  5582  	// result: (MOVWload [off1+int32(off2)] {sym} base mem)
  5583  	for {
  5584  		off1 := auxIntToInt32(v.AuxInt)
  5585  		sym := auxToSym(v.Aux)
  5586  		if v_0.Op != OpRISCV64ADDI {
  5587  			break
  5588  		}
  5589  		off2 := auxIntToInt64(v_0.AuxInt)
  5590  		base := v_0.Args[0]
  5591  		mem := v_1
  5592  		if !(is32Bit(int64(off1) + off2)) {
  5593  			break
  5594  		}
  5595  		v.reset(OpRISCV64MOVWload)
  5596  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5597  		v.Aux = symToAux(sym)
  5598  		v.AddArg2(base, mem)
  5599  		return true
  5600  	}
  5601  	return false
  5602  }
  5603  func rewriteValueRISCV64_OpRISCV64MOVWreg(v *Value) bool {
  5604  	v_0 := v.Args[0]
  5605  	b := v.Block
  5606  	// match: (MOVWreg x:(ANDI [c] y))
  5607  	// cond: c >= 0 && int64(int32(c)) == c
  5608  	// result: x
  5609  	for {
  5610  		x := v_0
  5611  		if x.Op != OpRISCV64ANDI {
  5612  			break
  5613  		}
  5614  		c := auxIntToInt64(x.AuxInt)
  5615  		if !(c >= 0 && int64(int32(c)) == c) {
  5616  			break
  5617  		}
  5618  		v.copyOf(x)
  5619  		return true
  5620  	}
  5621  	// match: (MOVWreg (MOVDconst [c]))
  5622  	// result: (MOVDconst [int64(int32(c))])
  5623  	for {
  5624  		if v_0.Op != OpRISCV64MOVDconst {
  5625  			break
  5626  		}
  5627  		c := auxIntToInt64(v_0.AuxInt)
  5628  		v.reset(OpRISCV64MOVDconst)
  5629  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  5630  		return true
  5631  	}
  5632  	// match: (MOVWreg x:(MOVBload _ _))
  5633  	// result: (MOVDreg x)
  5634  	for {
  5635  		x := v_0
  5636  		if x.Op != OpRISCV64MOVBload {
  5637  			break
  5638  		}
  5639  		v.reset(OpRISCV64MOVDreg)
  5640  		v.AddArg(x)
  5641  		return true
  5642  	}
  5643  	// match: (MOVWreg x:(MOVBUload _ _))
  5644  	// result: (MOVDreg x)
  5645  	for {
  5646  		x := v_0
  5647  		if x.Op != OpRISCV64MOVBUload {
  5648  			break
  5649  		}
  5650  		v.reset(OpRISCV64MOVDreg)
  5651  		v.AddArg(x)
  5652  		return true
  5653  	}
  5654  	// match: (MOVWreg x:(MOVHload _ _))
  5655  	// result: (MOVDreg x)
  5656  	for {
  5657  		x := v_0
  5658  		if x.Op != OpRISCV64MOVHload {
  5659  			break
  5660  		}
  5661  		v.reset(OpRISCV64MOVDreg)
  5662  		v.AddArg(x)
  5663  		return true
  5664  	}
  5665  	// match: (MOVWreg x:(MOVHUload _ _))
  5666  	// result: (MOVDreg x)
  5667  	for {
  5668  		x := v_0
  5669  		if x.Op != OpRISCV64MOVHUload {
  5670  			break
  5671  		}
  5672  		v.reset(OpRISCV64MOVDreg)
  5673  		v.AddArg(x)
  5674  		return true
  5675  	}
  5676  	// match: (MOVWreg x:(MOVWload _ _))
  5677  	// result: (MOVDreg x)
  5678  	for {
  5679  		x := v_0
  5680  		if x.Op != OpRISCV64MOVWload {
  5681  			break
  5682  		}
  5683  		v.reset(OpRISCV64MOVDreg)
  5684  		v.AddArg(x)
  5685  		return true
  5686  	}
  5687  	// match: (MOVWreg x:(ADDIW _))
  5688  	// result: (MOVDreg x)
  5689  	for {
  5690  		x := v_0
  5691  		if x.Op != OpRISCV64ADDIW {
  5692  			break
  5693  		}
  5694  		v.reset(OpRISCV64MOVDreg)
  5695  		v.AddArg(x)
  5696  		return true
  5697  	}
  5698  	// match: (MOVWreg x:(SUBW _ _))
  5699  	// result: (MOVDreg x)
  5700  	for {
  5701  		x := v_0
  5702  		if x.Op != OpRISCV64SUBW {
  5703  			break
  5704  		}
  5705  		v.reset(OpRISCV64MOVDreg)
  5706  		v.AddArg(x)
  5707  		return true
  5708  	}
  5709  	// match: (MOVWreg x:(NEGW _))
  5710  	// result: (MOVDreg x)
  5711  	for {
  5712  		x := v_0
  5713  		if x.Op != OpRISCV64NEGW {
  5714  			break
  5715  		}
  5716  		v.reset(OpRISCV64MOVDreg)
  5717  		v.AddArg(x)
  5718  		return true
  5719  	}
  5720  	// match: (MOVWreg x:(MULW _ _))
  5721  	// result: (MOVDreg x)
  5722  	for {
  5723  		x := v_0
  5724  		if x.Op != OpRISCV64MULW {
  5725  			break
  5726  		}
  5727  		v.reset(OpRISCV64MOVDreg)
  5728  		v.AddArg(x)
  5729  		return true
  5730  	}
  5731  	// match: (MOVWreg x:(DIVW _ _))
  5732  	// result: (MOVDreg x)
  5733  	for {
  5734  		x := v_0
  5735  		if x.Op != OpRISCV64DIVW {
  5736  			break
  5737  		}
  5738  		v.reset(OpRISCV64MOVDreg)
  5739  		v.AddArg(x)
  5740  		return true
  5741  	}
  5742  	// match: (MOVWreg x:(DIVUW _ _))
  5743  	// result: (MOVDreg x)
  5744  	for {
  5745  		x := v_0
  5746  		if x.Op != OpRISCV64DIVUW {
  5747  			break
  5748  		}
  5749  		v.reset(OpRISCV64MOVDreg)
  5750  		v.AddArg(x)
  5751  		return true
  5752  	}
  5753  	// match: (MOVWreg x:(REMW _ _))
  5754  	// result: (MOVDreg x)
  5755  	for {
  5756  		x := v_0
  5757  		if x.Op != OpRISCV64REMW {
  5758  			break
  5759  		}
  5760  		v.reset(OpRISCV64MOVDreg)
  5761  		v.AddArg(x)
  5762  		return true
  5763  	}
  5764  	// match: (MOVWreg x:(REMUW _ _))
  5765  	// result: (MOVDreg x)
  5766  	for {
  5767  		x := v_0
  5768  		if x.Op != OpRISCV64REMUW {
  5769  			break
  5770  		}
  5771  		v.reset(OpRISCV64MOVDreg)
  5772  		v.AddArg(x)
  5773  		return true
  5774  	}
  5775  	// match: (MOVWreg x:(ROLW _ _))
  5776  	// result: (MOVDreg x)
  5777  	for {
  5778  		x := v_0
  5779  		if x.Op != OpRISCV64ROLW {
  5780  			break
  5781  		}
  5782  		v.reset(OpRISCV64MOVDreg)
  5783  		v.AddArg(x)
  5784  		return true
  5785  	}
  5786  	// match: (MOVWreg x:(RORW _ _))
  5787  	// result: (MOVDreg x)
  5788  	for {
  5789  		x := v_0
  5790  		if x.Op != OpRISCV64RORW {
  5791  			break
  5792  		}
  5793  		v.reset(OpRISCV64MOVDreg)
  5794  		v.AddArg(x)
  5795  		return true
  5796  	}
  5797  	// match: (MOVWreg x:(RORIW _))
  5798  	// result: (MOVDreg x)
  5799  	for {
  5800  		x := v_0
  5801  		if x.Op != OpRISCV64RORIW {
  5802  			break
  5803  		}
  5804  		v.reset(OpRISCV64MOVDreg)
  5805  		v.AddArg(x)
  5806  		return true
  5807  	}
  5808  	// match: (MOVWreg x:(MOVBreg _))
  5809  	// result: (MOVDreg x)
  5810  	for {
  5811  		x := v_0
  5812  		if x.Op != OpRISCV64MOVBreg {
  5813  			break
  5814  		}
  5815  		v.reset(OpRISCV64MOVDreg)
  5816  		v.AddArg(x)
  5817  		return true
  5818  	}
  5819  	// match: (MOVWreg x:(MOVBUreg _))
  5820  	// result: (MOVDreg x)
  5821  	for {
  5822  		x := v_0
  5823  		if x.Op != OpRISCV64MOVBUreg {
  5824  			break
  5825  		}
  5826  		v.reset(OpRISCV64MOVDreg)
  5827  		v.AddArg(x)
  5828  		return true
  5829  	}
  5830  	// match: (MOVWreg x:(MOVHreg _))
  5831  	// result: (MOVDreg x)
  5832  	for {
  5833  		x := v_0
  5834  		if x.Op != OpRISCV64MOVHreg {
  5835  			break
  5836  		}
  5837  		v.reset(OpRISCV64MOVDreg)
  5838  		v.AddArg(x)
  5839  		return true
  5840  	}
  5841  	// match: (MOVWreg x:(MOVWreg _))
  5842  	// result: (MOVDreg x)
  5843  	for {
  5844  		x := v_0
  5845  		if x.Op != OpRISCV64MOVWreg {
  5846  			break
  5847  		}
  5848  		v.reset(OpRISCV64MOVDreg)
  5849  		v.AddArg(x)
  5850  		return true
  5851  	}
  5852  	// match: (MOVWreg <t> x:(MOVWUload [off] {sym} ptr mem))
  5853  	// cond: x.Uses == 1 && clobber(x)
  5854  	// result: @x.Block (MOVWload <t> [off] {sym} ptr mem)
  5855  	for {
  5856  		t := v.Type
  5857  		x := v_0
  5858  		if x.Op != OpRISCV64MOVWUload {
  5859  			break
  5860  		}
  5861  		off := auxIntToInt32(x.AuxInt)
  5862  		sym := auxToSym(x.Aux)
  5863  		mem := x.Args[1]
  5864  		ptr := x.Args[0]
  5865  		if !(x.Uses == 1 && clobber(x)) {
  5866  			break
  5867  		}
  5868  		b = x.Block
  5869  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVWload, t)
  5870  		v.copyOf(v0)
  5871  		v0.AuxInt = int32ToAuxInt(off)
  5872  		v0.Aux = symToAux(sym)
  5873  		v0.AddArg2(ptr, mem)
  5874  		return true
  5875  	}
  5876  	return false
  5877  }
  5878  func rewriteValueRISCV64_OpRISCV64MOVWstore(v *Value) bool {
  5879  	v_2 := v.Args[2]
  5880  	v_1 := v.Args[1]
  5881  	v_0 := v.Args[0]
  5882  	// match: (MOVWstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  5883  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  5884  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  5885  	for {
  5886  		off1 := auxIntToInt32(v.AuxInt)
  5887  		sym1 := auxToSym(v.Aux)
  5888  		if v_0.Op != OpRISCV64MOVaddr {
  5889  			break
  5890  		}
  5891  		off2 := auxIntToInt32(v_0.AuxInt)
  5892  		sym2 := auxToSym(v_0.Aux)
  5893  		base := v_0.Args[0]
  5894  		val := v_1
  5895  		mem := v_2
  5896  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  5897  			break
  5898  		}
  5899  		v.reset(OpRISCV64MOVWstore)
  5900  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5901  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5902  		v.AddArg3(base, val, mem)
  5903  		return true
  5904  	}
  5905  	// match: (MOVWstore [off1] {sym} (ADDI [off2] base) val mem)
  5906  	// cond: is32Bit(int64(off1)+off2)
  5907  	// result: (MOVWstore [off1+int32(off2)] {sym} base val mem)
  5908  	for {
  5909  		off1 := auxIntToInt32(v.AuxInt)
  5910  		sym := auxToSym(v.Aux)
  5911  		if v_0.Op != OpRISCV64ADDI {
  5912  			break
  5913  		}
  5914  		off2 := auxIntToInt64(v_0.AuxInt)
  5915  		base := v_0.Args[0]
  5916  		val := v_1
  5917  		mem := v_2
  5918  		if !(is32Bit(int64(off1) + off2)) {
  5919  			break
  5920  		}
  5921  		v.reset(OpRISCV64MOVWstore)
  5922  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5923  		v.Aux = symToAux(sym)
  5924  		v.AddArg3(base, val, mem)
  5925  		return true
  5926  	}
  5927  	// match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem)
  5928  	// result: (MOVWstorezero [off] {sym} ptr mem)
  5929  	for {
  5930  		off := auxIntToInt32(v.AuxInt)
  5931  		sym := auxToSym(v.Aux)
  5932  		ptr := v_0
  5933  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5934  			break
  5935  		}
  5936  		mem := v_2
  5937  		v.reset(OpRISCV64MOVWstorezero)
  5938  		v.AuxInt = int32ToAuxInt(off)
  5939  		v.Aux = symToAux(sym)
  5940  		v.AddArg2(ptr, mem)
  5941  		return true
  5942  	}
  5943  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  5944  	// result: (MOVWstore [off] {sym} ptr x mem)
  5945  	for {
  5946  		off := auxIntToInt32(v.AuxInt)
  5947  		sym := auxToSym(v.Aux)
  5948  		ptr := v_0
  5949  		if v_1.Op != OpRISCV64MOVWreg {
  5950  			break
  5951  		}
  5952  		x := v_1.Args[0]
  5953  		mem := v_2
  5954  		v.reset(OpRISCV64MOVWstore)
  5955  		v.AuxInt = int32ToAuxInt(off)
  5956  		v.Aux = symToAux(sym)
  5957  		v.AddArg3(ptr, x, mem)
  5958  		return true
  5959  	}
  5960  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  5961  	// result: (MOVWstore [off] {sym} ptr x mem)
  5962  	for {
  5963  		off := auxIntToInt32(v.AuxInt)
  5964  		sym := auxToSym(v.Aux)
  5965  		ptr := v_0
  5966  		if v_1.Op != OpRISCV64MOVWUreg {
  5967  			break
  5968  		}
  5969  		x := v_1.Args[0]
  5970  		mem := v_2
  5971  		v.reset(OpRISCV64MOVWstore)
  5972  		v.AuxInt = int32ToAuxInt(off)
  5973  		v.Aux = symToAux(sym)
  5974  		v.AddArg3(ptr, x, mem)
  5975  		return true
  5976  	}
  5977  	return false
  5978  }
  5979  func rewriteValueRISCV64_OpRISCV64MOVWstorezero(v *Value) bool {
  5980  	v_1 := v.Args[1]
  5981  	v_0 := v.Args[0]
  5982  	// match: (MOVWstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
  5983  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  5984  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5985  	for {
  5986  		off1 := auxIntToInt32(v.AuxInt)
  5987  		sym1 := auxToSym(v.Aux)
  5988  		if v_0.Op != OpRISCV64MOVaddr {
  5989  			break
  5990  		}
  5991  		off2 := auxIntToInt32(v_0.AuxInt)
  5992  		sym2 := auxToSym(v_0.Aux)
  5993  		ptr := v_0.Args[0]
  5994  		mem := v_1
  5995  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  5996  			break
  5997  		}
  5998  		v.reset(OpRISCV64MOVWstorezero)
  5999  		v.AuxInt = int32ToAuxInt(off1 + off2)
  6000  		v.Aux = symToAux(mergeSym(sym1, sym2))
  6001  		v.AddArg2(ptr, mem)
  6002  		return true
  6003  	}
  6004  	// match: (MOVWstorezero [off1] {sym} (ADDI [off2] ptr) mem)
  6005  	// cond: is32Bit(int64(off1)+off2)
  6006  	// result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem)
  6007  	for {
  6008  		off1 := auxIntToInt32(v.AuxInt)
  6009  		sym := auxToSym(v.Aux)
  6010  		if v_0.Op != OpRISCV64ADDI {
  6011  			break
  6012  		}
  6013  		off2 := auxIntToInt64(v_0.AuxInt)
  6014  		ptr := v_0.Args[0]
  6015  		mem := v_1
  6016  		if !(is32Bit(int64(off1) + off2)) {
  6017  			break
  6018  		}
  6019  		v.reset(OpRISCV64MOVWstorezero)
  6020  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  6021  		v.Aux = symToAux(sym)
  6022  		v.AddArg2(ptr, mem)
  6023  		return true
  6024  	}
  6025  	return false
  6026  }
  6027  func rewriteValueRISCV64_OpRISCV64NEG(v *Value) bool {
  6028  	v_0 := v.Args[0]
  6029  	b := v.Block
  6030  	// match: (NEG (SUB x y))
  6031  	// result: (SUB y x)
  6032  	for {
  6033  		if v_0.Op != OpRISCV64SUB {
  6034  			break
  6035  		}
  6036  		y := v_0.Args[1]
  6037  		x := v_0.Args[0]
  6038  		v.reset(OpRISCV64SUB)
  6039  		v.AddArg2(y, x)
  6040  		return true
  6041  	}
  6042  	// match: (NEG <t> s:(ADDI [val] (SUB x y)))
  6043  	// cond: s.Uses == 1 && is32Bit(-val)
  6044  	// result: (ADDI [-val] (SUB <t> y x))
  6045  	for {
  6046  		t := v.Type
  6047  		s := v_0
  6048  		if s.Op != OpRISCV64ADDI {
  6049  			break
  6050  		}
  6051  		val := auxIntToInt64(s.AuxInt)
  6052  		s_0 := s.Args[0]
  6053  		if s_0.Op != OpRISCV64SUB {
  6054  			break
  6055  		}
  6056  		y := s_0.Args[1]
  6057  		x := s_0.Args[0]
  6058  		if !(s.Uses == 1 && is32Bit(-val)) {
  6059  			break
  6060  		}
  6061  		v.reset(OpRISCV64ADDI)
  6062  		v.AuxInt = int64ToAuxInt(-val)
  6063  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, t)
  6064  		v0.AddArg2(y, x)
  6065  		v.AddArg(v0)
  6066  		return true
  6067  	}
  6068  	// match: (NEG (NEG x))
  6069  	// result: x
  6070  	for {
  6071  		if v_0.Op != OpRISCV64NEG {
  6072  			break
  6073  		}
  6074  		x := v_0.Args[0]
  6075  		v.copyOf(x)
  6076  		return true
  6077  	}
  6078  	// match: (NEG (MOVDconst [x]))
  6079  	// result: (MOVDconst [-x])
  6080  	for {
  6081  		if v_0.Op != OpRISCV64MOVDconst {
  6082  			break
  6083  		}
  6084  		x := auxIntToInt64(v_0.AuxInt)
  6085  		v.reset(OpRISCV64MOVDconst)
  6086  		v.AuxInt = int64ToAuxInt(-x)
  6087  		return true
  6088  	}
  6089  	return false
  6090  }
  6091  func rewriteValueRISCV64_OpRISCV64NEGW(v *Value) bool {
  6092  	v_0 := v.Args[0]
  6093  	// match: (NEGW (MOVDconst [x]))
  6094  	// result: (MOVDconst [int64(int32(-x))])
  6095  	for {
  6096  		if v_0.Op != OpRISCV64MOVDconst {
  6097  			break
  6098  		}
  6099  		x := auxIntToInt64(v_0.AuxInt)
  6100  		v.reset(OpRISCV64MOVDconst)
  6101  		v.AuxInt = int64ToAuxInt(int64(int32(-x)))
  6102  		return true
  6103  	}
  6104  	return false
  6105  }
  6106  func rewriteValueRISCV64_OpRISCV64OR(v *Value) bool {
  6107  	v_1 := v.Args[1]
  6108  	v_0 := v.Args[0]
  6109  	// match: (OR (MOVDconst [val]) x)
  6110  	// cond: is32Bit(val)
  6111  	// result: (ORI [val] x)
  6112  	for {
  6113  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6114  			if v_0.Op != OpRISCV64MOVDconst {
  6115  				continue
  6116  			}
  6117  			val := auxIntToInt64(v_0.AuxInt)
  6118  			x := v_1
  6119  			if !(is32Bit(val)) {
  6120  				continue
  6121  			}
  6122  			v.reset(OpRISCV64ORI)
  6123  			v.AuxInt = int64ToAuxInt(val)
  6124  			v.AddArg(x)
  6125  			return true
  6126  		}
  6127  		break
  6128  	}
  6129  	return false
  6130  }
  6131  func rewriteValueRISCV64_OpRISCV64ORI(v *Value) bool {
  6132  	v_0 := v.Args[0]
  6133  	// match: (ORI [0] x)
  6134  	// result: x
  6135  	for {
  6136  		if auxIntToInt64(v.AuxInt) != 0 {
  6137  			break
  6138  		}
  6139  		x := v_0
  6140  		v.copyOf(x)
  6141  		return true
  6142  	}
  6143  	// match: (ORI [-1] x)
  6144  	// result: (MOVDconst [-1])
  6145  	for {
  6146  		if auxIntToInt64(v.AuxInt) != -1 {
  6147  			break
  6148  		}
  6149  		v.reset(OpRISCV64MOVDconst)
  6150  		v.AuxInt = int64ToAuxInt(-1)
  6151  		return true
  6152  	}
  6153  	// match: (ORI [x] (MOVDconst [y]))
  6154  	// result: (MOVDconst [x | y])
  6155  	for {
  6156  		x := auxIntToInt64(v.AuxInt)
  6157  		if v_0.Op != OpRISCV64MOVDconst {
  6158  			break
  6159  		}
  6160  		y := auxIntToInt64(v_0.AuxInt)
  6161  		v.reset(OpRISCV64MOVDconst)
  6162  		v.AuxInt = int64ToAuxInt(x | y)
  6163  		return true
  6164  	}
  6165  	// match: (ORI [x] (ORI [y] z))
  6166  	// result: (ORI [x | y] z)
  6167  	for {
  6168  		x := auxIntToInt64(v.AuxInt)
  6169  		if v_0.Op != OpRISCV64ORI {
  6170  			break
  6171  		}
  6172  		y := auxIntToInt64(v_0.AuxInt)
  6173  		z := v_0.Args[0]
  6174  		v.reset(OpRISCV64ORI)
  6175  		v.AuxInt = int64ToAuxInt(x | y)
  6176  		v.AddArg(z)
  6177  		return true
  6178  	}
  6179  	return false
  6180  }
  6181  func rewriteValueRISCV64_OpRISCV64ROL(v *Value) bool {
  6182  	v_1 := v.Args[1]
  6183  	v_0 := v.Args[0]
  6184  	// match: (ROL x (MOVDconst [val]))
  6185  	// result: (RORI [int64(int8(-val)&63)] x)
  6186  	for {
  6187  		x := v_0
  6188  		if v_1.Op != OpRISCV64MOVDconst {
  6189  			break
  6190  		}
  6191  		val := auxIntToInt64(v_1.AuxInt)
  6192  		v.reset(OpRISCV64RORI)
  6193  		v.AuxInt = int64ToAuxInt(int64(int8(-val) & 63))
  6194  		v.AddArg(x)
  6195  		return true
  6196  	}
  6197  	// match: (ROL x (NEG y))
  6198  	// result: (ROR x y)
  6199  	for {
  6200  		x := v_0
  6201  		if v_1.Op != OpRISCV64NEG {
  6202  			break
  6203  		}
  6204  		y := v_1.Args[0]
  6205  		v.reset(OpRISCV64ROR)
  6206  		v.AddArg2(x, y)
  6207  		return true
  6208  	}
  6209  	return false
  6210  }
  6211  func rewriteValueRISCV64_OpRISCV64ROLW(v *Value) bool {
  6212  	v_1 := v.Args[1]
  6213  	v_0 := v.Args[0]
  6214  	// match: (ROLW x (MOVDconst [val]))
  6215  	// result: (RORIW [int64(int8(-val)&31)] x)
  6216  	for {
  6217  		x := v_0
  6218  		if v_1.Op != OpRISCV64MOVDconst {
  6219  			break
  6220  		}
  6221  		val := auxIntToInt64(v_1.AuxInt)
  6222  		v.reset(OpRISCV64RORIW)
  6223  		v.AuxInt = int64ToAuxInt(int64(int8(-val) & 31))
  6224  		v.AddArg(x)
  6225  		return true
  6226  	}
  6227  	// match: (ROLW x (NEG y))
  6228  	// result: (RORW x y)
  6229  	for {
  6230  		x := v_0
  6231  		if v_1.Op != OpRISCV64NEG {
  6232  			break
  6233  		}
  6234  		y := v_1.Args[0]
  6235  		v.reset(OpRISCV64RORW)
  6236  		v.AddArg2(x, y)
  6237  		return true
  6238  	}
  6239  	return false
  6240  }
  6241  func rewriteValueRISCV64_OpRISCV64ROR(v *Value) bool {
  6242  	v_1 := v.Args[1]
  6243  	v_0 := v.Args[0]
  6244  	// match: (ROR x (MOVDconst [val]))
  6245  	// result: (RORI [int64(val&63)] x)
  6246  	for {
  6247  		x := v_0
  6248  		if v_1.Op != OpRISCV64MOVDconst {
  6249  			break
  6250  		}
  6251  		val := auxIntToInt64(v_1.AuxInt)
  6252  		v.reset(OpRISCV64RORI)
  6253  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6254  		v.AddArg(x)
  6255  		return true
  6256  	}
  6257  	return false
  6258  }
  6259  func rewriteValueRISCV64_OpRISCV64RORW(v *Value) bool {
  6260  	v_1 := v.Args[1]
  6261  	v_0 := v.Args[0]
  6262  	// match: (RORW x (MOVDconst [val]))
  6263  	// result: (RORIW [int64(val&31)] x)
  6264  	for {
  6265  		x := v_0
  6266  		if v_1.Op != OpRISCV64MOVDconst {
  6267  			break
  6268  		}
  6269  		val := auxIntToInt64(v_1.AuxInt)
  6270  		v.reset(OpRISCV64RORIW)
  6271  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6272  		v.AddArg(x)
  6273  		return true
  6274  	}
  6275  	return false
  6276  }
  6277  func rewriteValueRISCV64_OpRISCV64SEQZ(v *Value) bool {
  6278  	v_0 := v.Args[0]
  6279  	// match: (SEQZ (NEG x))
  6280  	// result: (SEQZ x)
  6281  	for {
  6282  		if v_0.Op != OpRISCV64NEG {
  6283  			break
  6284  		}
  6285  		x := v_0.Args[0]
  6286  		v.reset(OpRISCV64SEQZ)
  6287  		v.AddArg(x)
  6288  		return true
  6289  	}
  6290  	// match: (SEQZ (SEQZ x))
  6291  	// result: (SNEZ x)
  6292  	for {
  6293  		if v_0.Op != OpRISCV64SEQZ {
  6294  			break
  6295  		}
  6296  		x := v_0.Args[0]
  6297  		v.reset(OpRISCV64SNEZ)
  6298  		v.AddArg(x)
  6299  		return true
  6300  	}
  6301  	// match: (SEQZ (SNEZ x))
  6302  	// result: (SEQZ x)
  6303  	for {
  6304  		if v_0.Op != OpRISCV64SNEZ {
  6305  			break
  6306  		}
  6307  		x := v_0.Args[0]
  6308  		v.reset(OpRISCV64SEQZ)
  6309  		v.AddArg(x)
  6310  		return true
  6311  	}
  6312  	return false
  6313  }
  6314  func rewriteValueRISCV64_OpRISCV64SLL(v *Value) bool {
  6315  	v_1 := v.Args[1]
  6316  	v_0 := v.Args[0]
  6317  	// match: (SLL x (MOVDconst [val]))
  6318  	// result: (SLLI [int64(val&63)] x)
  6319  	for {
  6320  		x := v_0
  6321  		if v_1.Op != OpRISCV64MOVDconst {
  6322  			break
  6323  		}
  6324  		val := auxIntToInt64(v_1.AuxInt)
  6325  		v.reset(OpRISCV64SLLI)
  6326  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6327  		v.AddArg(x)
  6328  		return true
  6329  	}
  6330  	return false
  6331  }
  6332  func rewriteValueRISCV64_OpRISCV64SLLI(v *Value) bool {
  6333  	v_0 := v.Args[0]
  6334  	// match: (SLLI [x] (MOVDconst [y]))
  6335  	// cond: is32Bit(y << uint32(x))
  6336  	// result: (MOVDconst [y << uint32(x)])
  6337  	for {
  6338  		x := auxIntToInt64(v.AuxInt)
  6339  		if v_0.Op != OpRISCV64MOVDconst {
  6340  			break
  6341  		}
  6342  		y := auxIntToInt64(v_0.AuxInt)
  6343  		if !(is32Bit(y << uint32(x))) {
  6344  			break
  6345  		}
  6346  		v.reset(OpRISCV64MOVDconst)
  6347  		v.AuxInt = int64ToAuxInt(y << uint32(x))
  6348  		return true
  6349  	}
  6350  	return false
  6351  }
  6352  func rewriteValueRISCV64_OpRISCV64SLLW(v *Value) bool {
  6353  	v_1 := v.Args[1]
  6354  	v_0 := v.Args[0]
  6355  	// match: (SLLW x (MOVDconst [val]))
  6356  	// result: (SLLIW [int64(val&31)] x)
  6357  	for {
  6358  		x := v_0
  6359  		if v_1.Op != OpRISCV64MOVDconst {
  6360  			break
  6361  		}
  6362  		val := auxIntToInt64(v_1.AuxInt)
  6363  		v.reset(OpRISCV64SLLIW)
  6364  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6365  		v.AddArg(x)
  6366  		return true
  6367  	}
  6368  	return false
  6369  }
  6370  func rewriteValueRISCV64_OpRISCV64SLT(v *Value) bool {
  6371  	v_1 := v.Args[1]
  6372  	v_0 := v.Args[0]
  6373  	// match: (SLT x (MOVDconst [val]))
  6374  	// cond: val >= -2048 && val <= 2047
  6375  	// result: (SLTI [val] x)
  6376  	for {
  6377  		x := v_0
  6378  		if v_1.Op != OpRISCV64MOVDconst {
  6379  			break
  6380  		}
  6381  		val := auxIntToInt64(v_1.AuxInt)
  6382  		if !(val >= -2048 && val <= 2047) {
  6383  			break
  6384  		}
  6385  		v.reset(OpRISCV64SLTI)
  6386  		v.AuxInt = int64ToAuxInt(val)
  6387  		v.AddArg(x)
  6388  		return true
  6389  	}
  6390  	// match: (SLT x x)
  6391  	// result: (MOVDconst [0])
  6392  	for {
  6393  		x := v_0
  6394  		if x != v_1 {
  6395  			break
  6396  		}
  6397  		v.reset(OpRISCV64MOVDconst)
  6398  		v.AuxInt = int64ToAuxInt(0)
  6399  		return true
  6400  	}
  6401  	return false
  6402  }
  6403  func rewriteValueRISCV64_OpRISCV64SLTI(v *Value) bool {
  6404  	v_0 := v.Args[0]
  6405  	// match: (SLTI [x] (MOVDconst [y]))
  6406  	// result: (MOVDconst [b2i(int64(y) < int64(x))])
  6407  	for {
  6408  		x := auxIntToInt64(v.AuxInt)
  6409  		if v_0.Op != OpRISCV64MOVDconst {
  6410  			break
  6411  		}
  6412  		y := auxIntToInt64(v_0.AuxInt)
  6413  		v.reset(OpRISCV64MOVDconst)
  6414  		v.AuxInt = int64ToAuxInt(b2i(int64(y) < int64(x)))
  6415  		return true
  6416  	}
  6417  	// match: (SLTI [x] (ANDI [y] _))
  6418  	// cond: y >= 0 && int64(y) < int64(x)
  6419  	// result: (MOVDconst [1])
  6420  	for {
  6421  		x := auxIntToInt64(v.AuxInt)
  6422  		if v_0.Op != OpRISCV64ANDI {
  6423  			break
  6424  		}
  6425  		y := auxIntToInt64(v_0.AuxInt)
  6426  		if !(y >= 0 && int64(y) < int64(x)) {
  6427  			break
  6428  		}
  6429  		v.reset(OpRISCV64MOVDconst)
  6430  		v.AuxInt = int64ToAuxInt(1)
  6431  		return true
  6432  	}
  6433  	// match: (SLTI [x] (ORI [y] _))
  6434  	// cond: y >= 0 && int64(y) >= int64(x)
  6435  	// result: (MOVDconst [0])
  6436  	for {
  6437  		x := auxIntToInt64(v.AuxInt)
  6438  		if v_0.Op != OpRISCV64ORI {
  6439  			break
  6440  		}
  6441  		y := auxIntToInt64(v_0.AuxInt)
  6442  		if !(y >= 0 && int64(y) >= int64(x)) {
  6443  			break
  6444  		}
  6445  		v.reset(OpRISCV64MOVDconst)
  6446  		v.AuxInt = int64ToAuxInt(0)
  6447  		return true
  6448  	}
  6449  	return false
  6450  }
  6451  func rewriteValueRISCV64_OpRISCV64SLTIU(v *Value) bool {
  6452  	v_0 := v.Args[0]
  6453  	// match: (SLTIU [x] (MOVDconst [y]))
  6454  	// result: (MOVDconst [b2i(uint64(y) < uint64(x))])
  6455  	for {
  6456  		x := auxIntToInt64(v.AuxInt)
  6457  		if v_0.Op != OpRISCV64MOVDconst {
  6458  			break
  6459  		}
  6460  		y := auxIntToInt64(v_0.AuxInt)
  6461  		v.reset(OpRISCV64MOVDconst)
  6462  		v.AuxInt = int64ToAuxInt(b2i(uint64(y) < uint64(x)))
  6463  		return true
  6464  	}
  6465  	// match: (SLTIU [x] (ANDI [y] _))
  6466  	// cond: y >= 0 && uint64(y) < uint64(x)
  6467  	// result: (MOVDconst [1])
  6468  	for {
  6469  		x := auxIntToInt64(v.AuxInt)
  6470  		if v_0.Op != OpRISCV64ANDI {
  6471  			break
  6472  		}
  6473  		y := auxIntToInt64(v_0.AuxInt)
  6474  		if !(y >= 0 && uint64(y) < uint64(x)) {
  6475  			break
  6476  		}
  6477  		v.reset(OpRISCV64MOVDconst)
  6478  		v.AuxInt = int64ToAuxInt(1)
  6479  		return true
  6480  	}
  6481  	// match: (SLTIU [x] (ORI [y] _))
  6482  	// cond: y >= 0 && uint64(y) >= uint64(x)
  6483  	// result: (MOVDconst [0])
  6484  	for {
  6485  		x := auxIntToInt64(v.AuxInt)
  6486  		if v_0.Op != OpRISCV64ORI {
  6487  			break
  6488  		}
  6489  		y := auxIntToInt64(v_0.AuxInt)
  6490  		if !(y >= 0 && uint64(y) >= uint64(x)) {
  6491  			break
  6492  		}
  6493  		v.reset(OpRISCV64MOVDconst)
  6494  		v.AuxInt = int64ToAuxInt(0)
  6495  		return true
  6496  	}
  6497  	return false
  6498  }
  6499  func rewriteValueRISCV64_OpRISCV64SLTU(v *Value) bool {
  6500  	v_1 := v.Args[1]
  6501  	v_0 := v.Args[0]
  6502  	// match: (SLTU x (MOVDconst [val]))
  6503  	// cond: val >= -2048 && val <= 2047
  6504  	// result: (SLTIU [val] x)
  6505  	for {
  6506  		x := v_0
  6507  		if v_1.Op != OpRISCV64MOVDconst {
  6508  			break
  6509  		}
  6510  		val := auxIntToInt64(v_1.AuxInt)
  6511  		if !(val >= -2048 && val <= 2047) {
  6512  			break
  6513  		}
  6514  		v.reset(OpRISCV64SLTIU)
  6515  		v.AuxInt = int64ToAuxInt(val)
  6516  		v.AddArg(x)
  6517  		return true
  6518  	}
  6519  	// match: (SLTU x x)
  6520  	// result: (MOVDconst [0])
  6521  	for {
  6522  		x := v_0
  6523  		if x != v_1 {
  6524  			break
  6525  		}
  6526  		v.reset(OpRISCV64MOVDconst)
  6527  		v.AuxInt = int64ToAuxInt(0)
  6528  		return true
  6529  	}
  6530  	return false
  6531  }
  6532  func rewriteValueRISCV64_OpRISCV64SNEZ(v *Value) bool {
  6533  	v_0 := v.Args[0]
  6534  	// match: (SNEZ (NEG x))
  6535  	// result: (SNEZ x)
  6536  	for {
  6537  		if v_0.Op != OpRISCV64NEG {
  6538  			break
  6539  		}
  6540  		x := v_0.Args[0]
  6541  		v.reset(OpRISCV64SNEZ)
  6542  		v.AddArg(x)
  6543  		return true
  6544  	}
  6545  	// match: (SNEZ (SEQZ x))
  6546  	// result: (SEQZ x)
  6547  	for {
  6548  		if v_0.Op != OpRISCV64SEQZ {
  6549  			break
  6550  		}
  6551  		x := v_0.Args[0]
  6552  		v.reset(OpRISCV64SEQZ)
  6553  		v.AddArg(x)
  6554  		return true
  6555  	}
  6556  	// match: (SNEZ (SNEZ x))
  6557  	// result: (SNEZ x)
  6558  	for {
  6559  		if v_0.Op != OpRISCV64SNEZ {
  6560  			break
  6561  		}
  6562  		x := v_0.Args[0]
  6563  		v.reset(OpRISCV64SNEZ)
  6564  		v.AddArg(x)
  6565  		return true
  6566  	}
  6567  	return false
  6568  }
  6569  func rewriteValueRISCV64_OpRISCV64SRA(v *Value) bool {
  6570  	v_1 := v.Args[1]
  6571  	v_0 := v.Args[0]
  6572  	// match: (SRA x (MOVDconst [val]))
  6573  	// result: (SRAI [int64(val&63)] x)
  6574  	for {
  6575  		x := v_0
  6576  		if v_1.Op != OpRISCV64MOVDconst {
  6577  			break
  6578  		}
  6579  		val := auxIntToInt64(v_1.AuxInt)
  6580  		v.reset(OpRISCV64SRAI)
  6581  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6582  		v.AddArg(x)
  6583  		return true
  6584  	}
  6585  	return false
  6586  }
  6587  func rewriteValueRISCV64_OpRISCV64SRAI(v *Value) bool {
  6588  	v_0 := v.Args[0]
  6589  	b := v.Block
  6590  	// match: (SRAI <t> [x] (MOVWreg y))
  6591  	// cond: x >= 0 && x <= 31
  6592  	// result: (SRAIW <t> [int64(x)] y)
  6593  	for {
  6594  		t := v.Type
  6595  		x := auxIntToInt64(v.AuxInt)
  6596  		if v_0.Op != OpRISCV64MOVWreg {
  6597  			break
  6598  		}
  6599  		y := v_0.Args[0]
  6600  		if !(x >= 0 && x <= 31) {
  6601  			break
  6602  		}
  6603  		v.reset(OpRISCV64SRAIW)
  6604  		v.Type = t
  6605  		v.AuxInt = int64ToAuxInt(int64(x))
  6606  		v.AddArg(y)
  6607  		return true
  6608  	}
  6609  	// match: (SRAI <t> [x] (MOVBreg y))
  6610  	// cond: x >= 8
  6611  	// result: (SRAI [63] (SLLI <t> [56] y))
  6612  	for {
  6613  		t := v.Type
  6614  		x := auxIntToInt64(v.AuxInt)
  6615  		if v_0.Op != OpRISCV64MOVBreg {
  6616  			break
  6617  		}
  6618  		y := v_0.Args[0]
  6619  		if !(x >= 8) {
  6620  			break
  6621  		}
  6622  		v.reset(OpRISCV64SRAI)
  6623  		v.AuxInt = int64ToAuxInt(63)
  6624  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t)
  6625  		v0.AuxInt = int64ToAuxInt(56)
  6626  		v0.AddArg(y)
  6627  		v.AddArg(v0)
  6628  		return true
  6629  	}
  6630  	// match: (SRAI <t> [x] (MOVHreg y))
  6631  	// cond: x >= 16
  6632  	// result: (SRAI [63] (SLLI <t> [48] y))
  6633  	for {
  6634  		t := v.Type
  6635  		x := auxIntToInt64(v.AuxInt)
  6636  		if v_0.Op != OpRISCV64MOVHreg {
  6637  			break
  6638  		}
  6639  		y := v_0.Args[0]
  6640  		if !(x >= 16) {
  6641  			break
  6642  		}
  6643  		v.reset(OpRISCV64SRAI)
  6644  		v.AuxInt = int64ToAuxInt(63)
  6645  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t)
  6646  		v0.AuxInt = int64ToAuxInt(48)
  6647  		v0.AddArg(y)
  6648  		v.AddArg(v0)
  6649  		return true
  6650  	}
  6651  	// match: (SRAI <t> [x] (MOVWreg y))
  6652  	// cond: x >= 32
  6653  	// result: (SRAIW [31] y)
  6654  	for {
  6655  		x := auxIntToInt64(v.AuxInt)
  6656  		if v_0.Op != OpRISCV64MOVWreg {
  6657  			break
  6658  		}
  6659  		y := v_0.Args[0]
  6660  		if !(x >= 32) {
  6661  			break
  6662  		}
  6663  		v.reset(OpRISCV64SRAIW)
  6664  		v.AuxInt = int64ToAuxInt(31)
  6665  		v.AddArg(y)
  6666  		return true
  6667  	}
  6668  	// match: (SRAI [x] (MOVDconst [y]))
  6669  	// result: (MOVDconst [int64(y) >> uint32(x)])
  6670  	for {
  6671  		x := auxIntToInt64(v.AuxInt)
  6672  		if v_0.Op != OpRISCV64MOVDconst {
  6673  			break
  6674  		}
  6675  		y := auxIntToInt64(v_0.AuxInt)
  6676  		v.reset(OpRISCV64MOVDconst)
  6677  		v.AuxInt = int64ToAuxInt(int64(y) >> uint32(x))
  6678  		return true
  6679  	}
  6680  	return false
  6681  }
  6682  func rewriteValueRISCV64_OpRISCV64SRAW(v *Value) bool {
  6683  	v_1 := v.Args[1]
  6684  	v_0 := v.Args[0]
  6685  	// match: (SRAW x (MOVDconst [val]))
  6686  	// result: (SRAIW [int64(val&31)] x)
  6687  	for {
  6688  		x := v_0
  6689  		if v_1.Op != OpRISCV64MOVDconst {
  6690  			break
  6691  		}
  6692  		val := auxIntToInt64(v_1.AuxInt)
  6693  		v.reset(OpRISCV64SRAIW)
  6694  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6695  		v.AddArg(x)
  6696  		return true
  6697  	}
  6698  	return false
  6699  }
  6700  func rewriteValueRISCV64_OpRISCV64SRL(v *Value) bool {
  6701  	v_1 := v.Args[1]
  6702  	v_0 := v.Args[0]
  6703  	// match: (SRL x (MOVDconst [val]))
  6704  	// result: (SRLI [int64(val&63)] x)
  6705  	for {
  6706  		x := v_0
  6707  		if v_1.Op != OpRISCV64MOVDconst {
  6708  			break
  6709  		}
  6710  		val := auxIntToInt64(v_1.AuxInt)
  6711  		v.reset(OpRISCV64SRLI)
  6712  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6713  		v.AddArg(x)
  6714  		return true
  6715  	}
  6716  	return false
  6717  }
  6718  func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool {
  6719  	v_0 := v.Args[0]
  6720  	// match: (SRLI <t> [x] (MOVWUreg y))
  6721  	// cond: x >= 0 && x <= 31
  6722  	// result: (SRLIW <t> [int64(x)] y)
  6723  	for {
  6724  		t := v.Type
  6725  		x := auxIntToInt64(v.AuxInt)
  6726  		if v_0.Op != OpRISCV64MOVWUreg {
  6727  			break
  6728  		}
  6729  		y := v_0.Args[0]
  6730  		if !(x >= 0 && x <= 31) {
  6731  			break
  6732  		}
  6733  		v.reset(OpRISCV64SRLIW)
  6734  		v.Type = t
  6735  		v.AuxInt = int64ToAuxInt(int64(x))
  6736  		v.AddArg(y)
  6737  		return true
  6738  	}
  6739  	// match: (SRLI <t> [x] (MOVBUreg y))
  6740  	// cond: x >= 8
  6741  	// result: (MOVDconst <t> [0])
  6742  	for {
  6743  		t := v.Type
  6744  		x := auxIntToInt64(v.AuxInt)
  6745  		if v_0.Op != OpRISCV64MOVBUreg {
  6746  			break
  6747  		}
  6748  		if !(x >= 8) {
  6749  			break
  6750  		}
  6751  		v.reset(OpRISCV64MOVDconst)
  6752  		v.Type = t
  6753  		v.AuxInt = int64ToAuxInt(0)
  6754  		return true
  6755  	}
  6756  	// match: (SRLI <t> [x] (MOVHUreg y))
  6757  	// cond: x >= 16
  6758  	// result: (MOVDconst <t> [0])
  6759  	for {
  6760  		t := v.Type
  6761  		x := auxIntToInt64(v.AuxInt)
  6762  		if v_0.Op != OpRISCV64MOVHUreg {
  6763  			break
  6764  		}
  6765  		if !(x >= 16) {
  6766  			break
  6767  		}
  6768  		v.reset(OpRISCV64MOVDconst)
  6769  		v.Type = t
  6770  		v.AuxInt = int64ToAuxInt(0)
  6771  		return true
  6772  	}
  6773  	// match: (SRLI <t> [x] (MOVWUreg y))
  6774  	// cond: x >= 32
  6775  	// result: (MOVDconst <t> [0])
  6776  	for {
  6777  		t := v.Type
  6778  		x := auxIntToInt64(v.AuxInt)
  6779  		if v_0.Op != OpRISCV64MOVWUreg {
  6780  			break
  6781  		}
  6782  		if !(x >= 32) {
  6783  			break
  6784  		}
  6785  		v.reset(OpRISCV64MOVDconst)
  6786  		v.Type = t
  6787  		v.AuxInt = int64ToAuxInt(0)
  6788  		return true
  6789  	}
  6790  	// match: (SRLI [x] (MOVDconst [y]))
  6791  	// result: (MOVDconst [int64(uint64(y) >> uint32(x))])
  6792  	for {
  6793  		x := auxIntToInt64(v.AuxInt)
  6794  		if v_0.Op != OpRISCV64MOVDconst {
  6795  			break
  6796  		}
  6797  		y := auxIntToInt64(v_0.AuxInt)
  6798  		v.reset(OpRISCV64MOVDconst)
  6799  		v.AuxInt = int64ToAuxInt(int64(uint64(y) >> uint32(x)))
  6800  		return true
  6801  	}
  6802  	return false
  6803  }
  6804  func rewriteValueRISCV64_OpRISCV64SRLW(v *Value) bool {
  6805  	v_1 := v.Args[1]
  6806  	v_0 := v.Args[0]
  6807  	// match: (SRLW x (MOVDconst [val]))
  6808  	// result: (SRLIW [int64(val&31)] x)
  6809  	for {
  6810  		x := v_0
  6811  		if v_1.Op != OpRISCV64MOVDconst {
  6812  			break
  6813  		}
  6814  		val := auxIntToInt64(v_1.AuxInt)
  6815  		v.reset(OpRISCV64SRLIW)
  6816  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6817  		v.AddArg(x)
  6818  		return true
  6819  	}
  6820  	return false
  6821  }
  6822  func rewriteValueRISCV64_OpRISCV64SUB(v *Value) bool {
  6823  	v_1 := v.Args[1]
  6824  	v_0 := v.Args[0]
  6825  	b := v.Block
  6826  	// match: (SUB x (MOVDconst [val]))
  6827  	// cond: is32Bit(-val)
  6828  	// result: (ADDI [-val] x)
  6829  	for {
  6830  		x := v_0
  6831  		if v_1.Op != OpRISCV64MOVDconst {
  6832  			break
  6833  		}
  6834  		val := auxIntToInt64(v_1.AuxInt)
  6835  		if !(is32Bit(-val)) {
  6836  			break
  6837  		}
  6838  		v.reset(OpRISCV64ADDI)
  6839  		v.AuxInt = int64ToAuxInt(-val)
  6840  		v.AddArg(x)
  6841  		return true
  6842  	}
  6843  	// match: (SUB <t> (MOVDconst [val]) y)
  6844  	// cond: is32Bit(-val)
  6845  	// result: (NEG (ADDI <t> [-val] y))
  6846  	for {
  6847  		t := v.Type
  6848  		if v_0.Op != OpRISCV64MOVDconst {
  6849  			break
  6850  		}
  6851  		val := auxIntToInt64(v_0.AuxInt)
  6852  		y := v_1
  6853  		if !(is32Bit(-val)) {
  6854  			break
  6855  		}
  6856  		v.reset(OpRISCV64NEG)
  6857  		v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, t)
  6858  		v0.AuxInt = int64ToAuxInt(-val)
  6859  		v0.AddArg(y)
  6860  		v.AddArg(v0)
  6861  		return true
  6862  	}
  6863  	// match: (SUB x (MOVDconst [0]))
  6864  	// result: x
  6865  	for {
  6866  		x := v_0
  6867  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  6868  			break
  6869  		}
  6870  		v.copyOf(x)
  6871  		return true
  6872  	}
  6873  	// match: (SUB (MOVDconst [0]) x)
  6874  	// result: (NEG x)
  6875  	for {
  6876  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  6877  			break
  6878  		}
  6879  		x := v_1
  6880  		v.reset(OpRISCV64NEG)
  6881  		v.AddArg(x)
  6882  		return true
  6883  	}
  6884  	return false
  6885  }
  6886  func rewriteValueRISCV64_OpRISCV64SUBW(v *Value) bool {
  6887  	v_1 := v.Args[1]
  6888  	v_0 := v.Args[0]
  6889  	// match: (SUBW x (MOVDconst [0]))
  6890  	// result: (ADDIW [0] x)
  6891  	for {
  6892  		x := v_0
  6893  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  6894  			break
  6895  		}
  6896  		v.reset(OpRISCV64ADDIW)
  6897  		v.AuxInt = int64ToAuxInt(0)
  6898  		v.AddArg(x)
  6899  		return true
  6900  	}
  6901  	// match: (SUBW (MOVDconst [0]) x)
  6902  	// result: (NEGW x)
  6903  	for {
  6904  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  6905  			break
  6906  		}
  6907  		x := v_1
  6908  		v.reset(OpRISCV64NEGW)
  6909  		v.AddArg(x)
  6910  		return true
  6911  	}
  6912  	return false
  6913  }
  6914  func rewriteValueRISCV64_OpRISCV64XOR(v *Value) bool {
  6915  	v_1 := v.Args[1]
  6916  	v_0 := v.Args[0]
  6917  	// match: (XOR (MOVDconst [val]) x)
  6918  	// cond: is32Bit(val)
  6919  	// result: (XORI [val] x)
  6920  	for {
  6921  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6922  			if v_0.Op != OpRISCV64MOVDconst {
  6923  				continue
  6924  			}
  6925  			val := auxIntToInt64(v_0.AuxInt)
  6926  			x := v_1
  6927  			if !(is32Bit(val)) {
  6928  				continue
  6929  			}
  6930  			v.reset(OpRISCV64XORI)
  6931  			v.AuxInt = int64ToAuxInt(val)
  6932  			v.AddArg(x)
  6933  			return true
  6934  		}
  6935  		break
  6936  	}
  6937  	return false
  6938  }
  6939  func rewriteValueRISCV64_OpRotateLeft16(v *Value) bool {
  6940  	v_1 := v.Args[1]
  6941  	v_0 := v.Args[0]
  6942  	b := v.Block
  6943  	typ := &b.Func.Config.Types
  6944  	// match: (RotateLeft16 <t> x y)
  6945  	// result: (OR (SLL <t> x (ANDI [15] <y.Type> y)) (SRL <t> (ZeroExt16to64 x) (ANDI [15] <y.Type> (NEG <y.Type> y))))
  6946  	for {
  6947  		t := v.Type
  6948  		x := v_0
  6949  		y := v_1
  6950  		v.reset(OpRISCV64OR)
  6951  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  6952  		v1 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  6953  		v1.AuxInt = int64ToAuxInt(15)
  6954  		v1.AddArg(y)
  6955  		v0.AddArg2(x, v1)
  6956  		v2 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  6957  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6958  		v3.AddArg(x)
  6959  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  6960  		v4.AuxInt = int64ToAuxInt(15)
  6961  		v5 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
  6962  		v5.AddArg(y)
  6963  		v4.AddArg(v5)
  6964  		v2.AddArg2(v3, v4)
  6965  		v.AddArg2(v0, v2)
  6966  		return true
  6967  	}
  6968  }
  6969  func rewriteValueRISCV64_OpRotateLeft8(v *Value) bool {
  6970  	v_1 := v.Args[1]
  6971  	v_0 := v.Args[0]
  6972  	b := v.Block
  6973  	typ := &b.Func.Config.Types
  6974  	// match: (RotateLeft8 <t> x y)
  6975  	// result: (OR (SLL <t> x (ANDI [7] <y.Type> y)) (SRL <t> (ZeroExt8to64 x) (ANDI [7] <y.Type> (NEG <y.Type> y))))
  6976  	for {
  6977  		t := v.Type
  6978  		x := v_0
  6979  		y := v_1
  6980  		v.reset(OpRISCV64OR)
  6981  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  6982  		v1 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  6983  		v1.AuxInt = int64ToAuxInt(7)
  6984  		v1.AddArg(y)
  6985  		v0.AddArg2(x, v1)
  6986  		v2 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  6987  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6988  		v3.AddArg(x)
  6989  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, y.Type)
  6990  		v4.AuxInt = int64ToAuxInt(7)
  6991  		v5 := b.NewValue0(v.Pos, OpRISCV64NEG, y.Type)
  6992  		v5.AddArg(y)
  6993  		v4.AddArg(v5)
  6994  		v2.AddArg2(v3, v4)
  6995  		v.AddArg2(v0, v2)
  6996  		return true
  6997  	}
  6998  }
  6999  func rewriteValueRISCV64_OpRsh16Ux16(v *Value) bool {
  7000  	v_1 := v.Args[1]
  7001  	v_0 := v.Args[0]
  7002  	b := v.Block
  7003  	typ := &b.Func.Config.Types
  7004  	// match: (Rsh16Ux16 <t> x y)
  7005  	// cond: !shiftIsBounded(v)
  7006  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  7007  	for {
  7008  		t := v.Type
  7009  		x := v_0
  7010  		y := v_1
  7011  		if !(!shiftIsBounded(v)) {
  7012  			break
  7013  		}
  7014  		v.reset(OpRISCV64AND)
  7015  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7016  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7017  		v1.AddArg(x)
  7018  		v0.AddArg2(v1, y)
  7019  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7020  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7021  		v3.AuxInt = int64ToAuxInt(64)
  7022  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7023  		v4.AddArg(y)
  7024  		v3.AddArg(v4)
  7025  		v2.AddArg(v3)
  7026  		v.AddArg2(v0, v2)
  7027  		return true
  7028  	}
  7029  	// match: (Rsh16Ux16 x y)
  7030  	// cond: shiftIsBounded(v)
  7031  	// result: (SRL (ZeroExt16to64 x) y)
  7032  	for {
  7033  		x := v_0
  7034  		y := v_1
  7035  		if !(shiftIsBounded(v)) {
  7036  			break
  7037  		}
  7038  		v.reset(OpRISCV64SRL)
  7039  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7040  		v0.AddArg(x)
  7041  		v.AddArg2(v0, y)
  7042  		return true
  7043  	}
  7044  	return false
  7045  }
  7046  func rewriteValueRISCV64_OpRsh16Ux32(v *Value) bool {
  7047  	v_1 := v.Args[1]
  7048  	v_0 := v.Args[0]
  7049  	b := v.Block
  7050  	typ := &b.Func.Config.Types
  7051  	// match: (Rsh16Ux32 <t> x y)
  7052  	// cond: !shiftIsBounded(v)
  7053  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  7054  	for {
  7055  		t := v.Type
  7056  		x := v_0
  7057  		y := v_1
  7058  		if !(!shiftIsBounded(v)) {
  7059  			break
  7060  		}
  7061  		v.reset(OpRISCV64AND)
  7062  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7063  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7064  		v1.AddArg(x)
  7065  		v0.AddArg2(v1, y)
  7066  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7067  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7068  		v3.AuxInt = int64ToAuxInt(64)
  7069  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7070  		v4.AddArg(y)
  7071  		v3.AddArg(v4)
  7072  		v2.AddArg(v3)
  7073  		v.AddArg2(v0, v2)
  7074  		return true
  7075  	}
  7076  	// match: (Rsh16Ux32 x y)
  7077  	// cond: shiftIsBounded(v)
  7078  	// result: (SRL (ZeroExt16to64 x) y)
  7079  	for {
  7080  		x := v_0
  7081  		y := v_1
  7082  		if !(shiftIsBounded(v)) {
  7083  			break
  7084  		}
  7085  		v.reset(OpRISCV64SRL)
  7086  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7087  		v0.AddArg(x)
  7088  		v.AddArg2(v0, y)
  7089  		return true
  7090  	}
  7091  	return false
  7092  }
  7093  func rewriteValueRISCV64_OpRsh16Ux64(v *Value) bool {
  7094  	v_1 := v.Args[1]
  7095  	v_0 := v.Args[0]
  7096  	b := v.Block
  7097  	typ := &b.Func.Config.Types
  7098  	// match: (Rsh16Ux64 <t> x y)
  7099  	// cond: !shiftIsBounded(v)
  7100  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] y)))
  7101  	for {
  7102  		t := v.Type
  7103  		x := v_0
  7104  		y := v_1
  7105  		if !(!shiftIsBounded(v)) {
  7106  			break
  7107  		}
  7108  		v.reset(OpRISCV64AND)
  7109  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7110  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7111  		v1.AddArg(x)
  7112  		v0.AddArg2(v1, y)
  7113  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7114  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7115  		v3.AuxInt = int64ToAuxInt(64)
  7116  		v3.AddArg(y)
  7117  		v2.AddArg(v3)
  7118  		v.AddArg2(v0, v2)
  7119  		return true
  7120  	}
  7121  	// match: (Rsh16Ux64 x y)
  7122  	// cond: shiftIsBounded(v)
  7123  	// result: (SRL (ZeroExt16to64 x) y)
  7124  	for {
  7125  		x := v_0
  7126  		y := v_1
  7127  		if !(shiftIsBounded(v)) {
  7128  			break
  7129  		}
  7130  		v.reset(OpRISCV64SRL)
  7131  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7132  		v0.AddArg(x)
  7133  		v.AddArg2(v0, y)
  7134  		return true
  7135  	}
  7136  	return false
  7137  }
  7138  func rewriteValueRISCV64_OpRsh16Ux8(v *Value) bool {
  7139  	v_1 := v.Args[1]
  7140  	v_0 := v.Args[0]
  7141  	b := v.Block
  7142  	typ := &b.Func.Config.Types
  7143  	// match: (Rsh16Ux8 <t> x y)
  7144  	// cond: !shiftIsBounded(v)
  7145  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  7146  	for {
  7147  		t := v.Type
  7148  		x := v_0
  7149  		y := v_1
  7150  		if !(!shiftIsBounded(v)) {
  7151  			break
  7152  		}
  7153  		v.reset(OpRISCV64AND)
  7154  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7155  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7156  		v1.AddArg(x)
  7157  		v0.AddArg2(v1, y)
  7158  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  7159  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7160  		v3.AuxInt = int64ToAuxInt(64)
  7161  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7162  		v4.AddArg(y)
  7163  		v3.AddArg(v4)
  7164  		v2.AddArg(v3)
  7165  		v.AddArg2(v0, v2)
  7166  		return true
  7167  	}
  7168  	// match: (Rsh16Ux8 x y)
  7169  	// cond: shiftIsBounded(v)
  7170  	// result: (SRL (ZeroExt16to64 x) y)
  7171  	for {
  7172  		x := v_0
  7173  		y := v_1
  7174  		if !(shiftIsBounded(v)) {
  7175  			break
  7176  		}
  7177  		v.reset(OpRISCV64SRL)
  7178  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7179  		v0.AddArg(x)
  7180  		v.AddArg2(v0, y)
  7181  		return true
  7182  	}
  7183  	return false
  7184  }
  7185  func rewriteValueRISCV64_OpRsh16x16(v *Value) bool {
  7186  	v_1 := v.Args[1]
  7187  	v_0 := v.Args[0]
  7188  	b := v.Block
  7189  	typ := &b.Func.Config.Types
  7190  	// match: (Rsh16x16 <t> x y)
  7191  	// cond: !shiftIsBounded(v)
  7192  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  7193  	for {
  7194  		t := v.Type
  7195  		x := v_0
  7196  		y := v_1
  7197  		if !(!shiftIsBounded(v)) {
  7198  			break
  7199  		}
  7200  		v.reset(OpRISCV64SRA)
  7201  		v.Type = t
  7202  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7203  		v0.AddArg(x)
  7204  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7205  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7206  		v2.AuxInt = int64ToAuxInt(-1)
  7207  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7208  		v3.AuxInt = int64ToAuxInt(64)
  7209  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7210  		v4.AddArg(y)
  7211  		v3.AddArg(v4)
  7212  		v2.AddArg(v3)
  7213  		v1.AddArg2(y, v2)
  7214  		v.AddArg2(v0, v1)
  7215  		return true
  7216  	}
  7217  	// match: (Rsh16x16 x y)
  7218  	// cond: shiftIsBounded(v)
  7219  	// result: (SRA (SignExt16to64 x) y)
  7220  	for {
  7221  		x := v_0
  7222  		y := v_1
  7223  		if !(shiftIsBounded(v)) {
  7224  			break
  7225  		}
  7226  		v.reset(OpRISCV64SRA)
  7227  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7228  		v0.AddArg(x)
  7229  		v.AddArg2(v0, y)
  7230  		return true
  7231  	}
  7232  	return false
  7233  }
  7234  func rewriteValueRISCV64_OpRsh16x32(v *Value) bool {
  7235  	v_1 := v.Args[1]
  7236  	v_0 := v.Args[0]
  7237  	b := v.Block
  7238  	typ := &b.Func.Config.Types
  7239  	// match: (Rsh16x32 <t> x y)
  7240  	// cond: !shiftIsBounded(v)
  7241  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  7242  	for {
  7243  		t := v.Type
  7244  		x := v_0
  7245  		y := v_1
  7246  		if !(!shiftIsBounded(v)) {
  7247  			break
  7248  		}
  7249  		v.reset(OpRISCV64SRA)
  7250  		v.Type = t
  7251  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7252  		v0.AddArg(x)
  7253  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7254  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7255  		v2.AuxInt = int64ToAuxInt(-1)
  7256  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7257  		v3.AuxInt = int64ToAuxInt(64)
  7258  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7259  		v4.AddArg(y)
  7260  		v3.AddArg(v4)
  7261  		v2.AddArg(v3)
  7262  		v1.AddArg2(y, v2)
  7263  		v.AddArg2(v0, v1)
  7264  		return true
  7265  	}
  7266  	// match: (Rsh16x32 x y)
  7267  	// cond: shiftIsBounded(v)
  7268  	// result: (SRA (SignExt16to64 x) y)
  7269  	for {
  7270  		x := v_0
  7271  		y := v_1
  7272  		if !(shiftIsBounded(v)) {
  7273  			break
  7274  		}
  7275  		v.reset(OpRISCV64SRA)
  7276  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7277  		v0.AddArg(x)
  7278  		v.AddArg2(v0, y)
  7279  		return true
  7280  	}
  7281  	return false
  7282  }
  7283  func rewriteValueRISCV64_OpRsh16x64(v *Value) bool {
  7284  	v_1 := v.Args[1]
  7285  	v_0 := v.Args[0]
  7286  	b := v.Block
  7287  	typ := &b.Func.Config.Types
  7288  	// match: (Rsh16x64 <t> x y)
  7289  	// cond: !shiftIsBounded(v)
  7290  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  7291  	for {
  7292  		t := v.Type
  7293  		x := v_0
  7294  		y := v_1
  7295  		if !(!shiftIsBounded(v)) {
  7296  			break
  7297  		}
  7298  		v.reset(OpRISCV64SRA)
  7299  		v.Type = t
  7300  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7301  		v0.AddArg(x)
  7302  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7303  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7304  		v2.AuxInt = int64ToAuxInt(-1)
  7305  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7306  		v3.AuxInt = int64ToAuxInt(64)
  7307  		v3.AddArg(y)
  7308  		v2.AddArg(v3)
  7309  		v1.AddArg2(y, v2)
  7310  		v.AddArg2(v0, v1)
  7311  		return true
  7312  	}
  7313  	// match: (Rsh16x64 x y)
  7314  	// cond: shiftIsBounded(v)
  7315  	// result: (SRA (SignExt16to64 x) y)
  7316  	for {
  7317  		x := v_0
  7318  		y := v_1
  7319  		if !(shiftIsBounded(v)) {
  7320  			break
  7321  		}
  7322  		v.reset(OpRISCV64SRA)
  7323  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7324  		v0.AddArg(x)
  7325  		v.AddArg2(v0, y)
  7326  		return true
  7327  	}
  7328  	return false
  7329  }
  7330  func rewriteValueRISCV64_OpRsh16x8(v *Value) bool {
  7331  	v_1 := v.Args[1]
  7332  	v_0 := v.Args[0]
  7333  	b := v.Block
  7334  	typ := &b.Func.Config.Types
  7335  	// match: (Rsh16x8 <t> x y)
  7336  	// cond: !shiftIsBounded(v)
  7337  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  7338  	for {
  7339  		t := v.Type
  7340  		x := v_0
  7341  		y := v_1
  7342  		if !(!shiftIsBounded(v)) {
  7343  			break
  7344  		}
  7345  		v.reset(OpRISCV64SRA)
  7346  		v.Type = t
  7347  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7348  		v0.AddArg(x)
  7349  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7350  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7351  		v2.AuxInt = int64ToAuxInt(-1)
  7352  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7353  		v3.AuxInt = int64ToAuxInt(64)
  7354  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7355  		v4.AddArg(y)
  7356  		v3.AddArg(v4)
  7357  		v2.AddArg(v3)
  7358  		v1.AddArg2(y, v2)
  7359  		v.AddArg2(v0, v1)
  7360  		return true
  7361  	}
  7362  	// match: (Rsh16x8 x y)
  7363  	// cond: shiftIsBounded(v)
  7364  	// result: (SRA (SignExt16to64 x) y)
  7365  	for {
  7366  		x := v_0
  7367  		y := v_1
  7368  		if !(shiftIsBounded(v)) {
  7369  			break
  7370  		}
  7371  		v.reset(OpRISCV64SRA)
  7372  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7373  		v0.AddArg(x)
  7374  		v.AddArg2(v0, y)
  7375  		return true
  7376  	}
  7377  	return false
  7378  }
  7379  func rewriteValueRISCV64_OpRsh32Ux16(v *Value) bool {
  7380  	v_1 := v.Args[1]
  7381  	v_0 := v.Args[0]
  7382  	b := v.Block
  7383  	typ := &b.Func.Config.Types
  7384  	// match: (Rsh32Ux16 <t> x y)
  7385  	// cond: !shiftIsBounded(v)
  7386  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt16to64 y))))
  7387  	for {
  7388  		t := v.Type
  7389  		x := v_0
  7390  		y := v_1
  7391  		if !(!shiftIsBounded(v)) {
  7392  			break
  7393  		}
  7394  		v.reset(OpRISCV64AND)
  7395  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7396  		v0.AddArg2(x, y)
  7397  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7398  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7399  		v2.AuxInt = int64ToAuxInt(32)
  7400  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7401  		v3.AddArg(y)
  7402  		v2.AddArg(v3)
  7403  		v1.AddArg(v2)
  7404  		v.AddArg2(v0, v1)
  7405  		return true
  7406  	}
  7407  	// match: (Rsh32Ux16 x y)
  7408  	// cond: shiftIsBounded(v)
  7409  	// result: (SRLW x y)
  7410  	for {
  7411  		x := v_0
  7412  		y := v_1
  7413  		if !(shiftIsBounded(v)) {
  7414  			break
  7415  		}
  7416  		v.reset(OpRISCV64SRLW)
  7417  		v.AddArg2(x, y)
  7418  		return true
  7419  	}
  7420  	return false
  7421  }
  7422  func rewriteValueRISCV64_OpRsh32Ux32(v *Value) bool {
  7423  	v_1 := v.Args[1]
  7424  	v_0 := v.Args[0]
  7425  	b := v.Block
  7426  	typ := &b.Func.Config.Types
  7427  	// match: (Rsh32Ux32 <t> x y)
  7428  	// cond: !shiftIsBounded(v)
  7429  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt32to64 y))))
  7430  	for {
  7431  		t := v.Type
  7432  		x := v_0
  7433  		y := v_1
  7434  		if !(!shiftIsBounded(v)) {
  7435  			break
  7436  		}
  7437  		v.reset(OpRISCV64AND)
  7438  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7439  		v0.AddArg2(x, y)
  7440  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7441  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7442  		v2.AuxInt = int64ToAuxInt(32)
  7443  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7444  		v3.AddArg(y)
  7445  		v2.AddArg(v3)
  7446  		v1.AddArg(v2)
  7447  		v.AddArg2(v0, v1)
  7448  		return true
  7449  	}
  7450  	// match: (Rsh32Ux32 x y)
  7451  	// cond: shiftIsBounded(v)
  7452  	// result: (SRLW x y)
  7453  	for {
  7454  		x := v_0
  7455  		y := v_1
  7456  		if !(shiftIsBounded(v)) {
  7457  			break
  7458  		}
  7459  		v.reset(OpRISCV64SRLW)
  7460  		v.AddArg2(x, y)
  7461  		return true
  7462  	}
  7463  	return false
  7464  }
  7465  func rewriteValueRISCV64_OpRsh32Ux64(v *Value) bool {
  7466  	v_1 := v.Args[1]
  7467  	v_0 := v.Args[0]
  7468  	b := v.Block
  7469  	// match: (Rsh32Ux64 <t> x y)
  7470  	// cond: !shiftIsBounded(v)
  7471  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] y)))
  7472  	for {
  7473  		t := v.Type
  7474  		x := v_0
  7475  		y := v_1
  7476  		if !(!shiftIsBounded(v)) {
  7477  			break
  7478  		}
  7479  		v.reset(OpRISCV64AND)
  7480  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7481  		v0.AddArg2(x, y)
  7482  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7483  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7484  		v2.AuxInt = int64ToAuxInt(32)
  7485  		v2.AddArg(y)
  7486  		v1.AddArg(v2)
  7487  		v.AddArg2(v0, v1)
  7488  		return true
  7489  	}
  7490  	// match: (Rsh32Ux64 x y)
  7491  	// cond: shiftIsBounded(v)
  7492  	// result: (SRLW x y)
  7493  	for {
  7494  		x := v_0
  7495  		y := v_1
  7496  		if !(shiftIsBounded(v)) {
  7497  			break
  7498  		}
  7499  		v.reset(OpRISCV64SRLW)
  7500  		v.AddArg2(x, y)
  7501  		return true
  7502  	}
  7503  	return false
  7504  }
  7505  func rewriteValueRISCV64_OpRsh32Ux8(v *Value) bool {
  7506  	v_1 := v.Args[1]
  7507  	v_0 := v.Args[0]
  7508  	b := v.Block
  7509  	typ := &b.Func.Config.Types
  7510  	// match: (Rsh32Ux8 <t> x y)
  7511  	// cond: !shiftIsBounded(v)
  7512  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt8to64 y))))
  7513  	for {
  7514  		t := v.Type
  7515  		x := v_0
  7516  		y := v_1
  7517  		if !(!shiftIsBounded(v)) {
  7518  			break
  7519  		}
  7520  		v.reset(OpRISCV64AND)
  7521  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7522  		v0.AddArg2(x, y)
  7523  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7524  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7525  		v2.AuxInt = int64ToAuxInt(32)
  7526  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7527  		v3.AddArg(y)
  7528  		v2.AddArg(v3)
  7529  		v1.AddArg(v2)
  7530  		v.AddArg2(v0, v1)
  7531  		return true
  7532  	}
  7533  	// match: (Rsh32Ux8 x y)
  7534  	// cond: shiftIsBounded(v)
  7535  	// result: (SRLW x y)
  7536  	for {
  7537  		x := v_0
  7538  		y := v_1
  7539  		if !(shiftIsBounded(v)) {
  7540  			break
  7541  		}
  7542  		v.reset(OpRISCV64SRLW)
  7543  		v.AddArg2(x, y)
  7544  		return true
  7545  	}
  7546  	return false
  7547  }
  7548  func rewriteValueRISCV64_OpRsh32x16(v *Value) bool {
  7549  	v_1 := v.Args[1]
  7550  	v_0 := v.Args[0]
  7551  	b := v.Block
  7552  	typ := &b.Func.Config.Types
  7553  	// match: (Rsh32x16 <t> x y)
  7554  	// cond: !shiftIsBounded(v)
  7555  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt16to64 y)))))
  7556  	for {
  7557  		t := v.Type
  7558  		x := v_0
  7559  		y := v_1
  7560  		if !(!shiftIsBounded(v)) {
  7561  			break
  7562  		}
  7563  		v.reset(OpRISCV64SRAW)
  7564  		v.Type = t
  7565  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7566  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7567  		v1.AuxInt = int64ToAuxInt(-1)
  7568  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7569  		v2.AuxInt = int64ToAuxInt(32)
  7570  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7571  		v3.AddArg(y)
  7572  		v2.AddArg(v3)
  7573  		v1.AddArg(v2)
  7574  		v0.AddArg2(y, v1)
  7575  		v.AddArg2(x, v0)
  7576  		return true
  7577  	}
  7578  	// match: (Rsh32x16 x y)
  7579  	// cond: shiftIsBounded(v)
  7580  	// result: (SRAW x y)
  7581  	for {
  7582  		x := v_0
  7583  		y := v_1
  7584  		if !(shiftIsBounded(v)) {
  7585  			break
  7586  		}
  7587  		v.reset(OpRISCV64SRAW)
  7588  		v.AddArg2(x, y)
  7589  		return true
  7590  	}
  7591  	return false
  7592  }
  7593  func rewriteValueRISCV64_OpRsh32x32(v *Value) bool {
  7594  	v_1 := v.Args[1]
  7595  	v_0 := v.Args[0]
  7596  	b := v.Block
  7597  	typ := &b.Func.Config.Types
  7598  	// match: (Rsh32x32 <t> x y)
  7599  	// cond: !shiftIsBounded(v)
  7600  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt32to64 y)))))
  7601  	for {
  7602  		t := v.Type
  7603  		x := v_0
  7604  		y := v_1
  7605  		if !(!shiftIsBounded(v)) {
  7606  			break
  7607  		}
  7608  		v.reset(OpRISCV64SRAW)
  7609  		v.Type = t
  7610  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7611  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7612  		v1.AuxInt = int64ToAuxInt(-1)
  7613  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7614  		v2.AuxInt = int64ToAuxInt(32)
  7615  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7616  		v3.AddArg(y)
  7617  		v2.AddArg(v3)
  7618  		v1.AddArg(v2)
  7619  		v0.AddArg2(y, v1)
  7620  		v.AddArg2(x, v0)
  7621  		return true
  7622  	}
  7623  	// match: (Rsh32x32 x y)
  7624  	// cond: shiftIsBounded(v)
  7625  	// result: (SRAW x y)
  7626  	for {
  7627  		x := v_0
  7628  		y := v_1
  7629  		if !(shiftIsBounded(v)) {
  7630  			break
  7631  		}
  7632  		v.reset(OpRISCV64SRAW)
  7633  		v.AddArg2(x, y)
  7634  		return true
  7635  	}
  7636  	return false
  7637  }
  7638  func rewriteValueRISCV64_OpRsh32x64(v *Value) bool {
  7639  	v_1 := v.Args[1]
  7640  	v_0 := v.Args[0]
  7641  	b := v.Block
  7642  	// match: (Rsh32x64 <t> x y)
  7643  	// cond: !shiftIsBounded(v)
  7644  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] y))))
  7645  	for {
  7646  		t := v.Type
  7647  		x := v_0
  7648  		y := v_1
  7649  		if !(!shiftIsBounded(v)) {
  7650  			break
  7651  		}
  7652  		v.reset(OpRISCV64SRAW)
  7653  		v.Type = t
  7654  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7655  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7656  		v1.AuxInt = int64ToAuxInt(-1)
  7657  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7658  		v2.AuxInt = int64ToAuxInt(32)
  7659  		v2.AddArg(y)
  7660  		v1.AddArg(v2)
  7661  		v0.AddArg2(y, v1)
  7662  		v.AddArg2(x, v0)
  7663  		return true
  7664  	}
  7665  	// match: (Rsh32x64 x y)
  7666  	// cond: shiftIsBounded(v)
  7667  	// result: (SRAW x y)
  7668  	for {
  7669  		x := v_0
  7670  		y := v_1
  7671  		if !(shiftIsBounded(v)) {
  7672  			break
  7673  		}
  7674  		v.reset(OpRISCV64SRAW)
  7675  		v.AddArg2(x, y)
  7676  		return true
  7677  	}
  7678  	return false
  7679  }
  7680  func rewriteValueRISCV64_OpRsh32x8(v *Value) bool {
  7681  	v_1 := v.Args[1]
  7682  	v_0 := v.Args[0]
  7683  	b := v.Block
  7684  	typ := &b.Func.Config.Types
  7685  	// match: (Rsh32x8 <t> x y)
  7686  	// cond: !shiftIsBounded(v)
  7687  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt8to64 y)))))
  7688  	for {
  7689  		t := v.Type
  7690  		x := v_0
  7691  		y := v_1
  7692  		if !(!shiftIsBounded(v)) {
  7693  			break
  7694  		}
  7695  		v.reset(OpRISCV64SRAW)
  7696  		v.Type = t
  7697  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7698  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7699  		v1.AuxInt = int64ToAuxInt(-1)
  7700  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7701  		v2.AuxInt = int64ToAuxInt(32)
  7702  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7703  		v3.AddArg(y)
  7704  		v2.AddArg(v3)
  7705  		v1.AddArg(v2)
  7706  		v0.AddArg2(y, v1)
  7707  		v.AddArg2(x, v0)
  7708  		return true
  7709  	}
  7710  	// match: (Rsh32x8 x y)
  7711  	// cond: shiftIsBounded(v)
  7712  	// result: (SRAW x y)
  7713  	for {
  7714  		x := v_0
  7715  		y := v_1
  7716  		if !(shiftIsBounded(v)) {
  7717  			break
  7718  		}
  7719  		v.reset(OpRISCV64SRAW)
  7720  		v.AddArg2(x, y)
  7721  		return true
  7722  	}
  7723  	return false
  7724  }
  7725  func rewriteValueRISCV64_OpRsh64Ux16(v *Value) bool {
  7726  	v_1 := v.Args[1]
  7727  	v_0 := v.Args[0]
  7728  	b := v.Block
  7729  	typ := &b.Func.Config.Types
  7730  	// match: (Rsh64Ux16 <t> x y)
  7731  	// cond: !shiftIsBounded(v)
  7732  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  7733  	for {
  7734  		t := v.Type
  7735  		x := v_0
  7736  		y := v_1
  7737  		if !(!shiftIsBounded(v)) {
  7738  			break
  7739  		}
  7740  		v.reset(OpRISCV64AND)
  7741  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7742  		v0.AddArg2(x, y)
  7743  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  7744  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7745  		v2.AuxInt = int64ToAuxInt(64)
  7746  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7747  		v3.AddArg(y)
  7748  		v2.AddArg(v3)
  7749  		v1.AddArg(v2)
  7750  		v.AddArg2(v0, v1)
  7751  		return true
  7752  	}
  7753  	// match: (Rsh64Ux16 x y)
  7754  	// cond: shiftIsBounded(v)
  7755  	// result: (SRL x y)
  7756  	for {
  7757  		x := v_0
  7758  		y := v_1
  7759  		if !(shiftIsBounded(v)) {
  7760  			break
  7761  		}
  7762  		v.reset(OpRISCV64SRL)
  7763  		v.AddArg2(x, y)
  7764  		return true
  7765  	}
  7766  	return false
  7767  }
  7768  func rewriteValueRISCV64_OpRsh64Ux32(v *Value) bool {
  7769  	v_1 := v.Args[1]
  7770  	v_0 := v.Args[0]
  7771  	b := v.Block
  7772  	typ := &b.Func.Config.Types
  7773  	// match: (Rsh64Ux32 <t> x y)
  7774  	// cond: !shiftIsBounded(v)
  7775  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  7776  	for {
  7777  		t := v.Type
  7778  		x := v_0
  7779  		y := v_1
  7780  		if !(!shiftIsBounded(v)) {
  7781  			break
  7782  		}
  7783  		v.reset(OpRISCV64AND)
  7784  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7785  		v0.AddArg2(x, y)
  7786  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  7787  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7788  		v2.AuxInt = int64ToAuxInt(64)
  7789  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7790  		v3.AddArg(y)
  7791  		v2.AddArg(v3)
  7792  		v1.AddArg(v2)
  7793  		v.AddArg2(v0, v1)
  7794  		return true
  7795  	}
  7796  	// match: (Rsh64Ux32 x y)
  7797  	// cond: shiftIsBounded(v)
  7798  	// result: (SRL x y)
  7799  	for {
  7800  		x := v_0
  7801  		y := v_1
  7802  		if !(shiftIsBounded(v)) {
  7803  			break
  7804  		}
  7805  		v.reset(OpRISCV64SRL)
  7806  		v.AddArg2(x, y)
  7807  		return true
  7808  	}
  7809  	return false
  7810  }
  7811  func rewriteValueRISCV64_OpRsh64Ux64(v *Value) bool {
  7812  	v_1 := v.Args[1]
  7813  	v_0 := v.Args[0]
  7814  	b := v.Block
  7815  	// match: (Rsh64Ux64 <t> x y)
  7816  	// cond: !shiftIsBounded(v)
  7817  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
  7818  	for {
  7819  		t := v.Type
  7820  		x := v_0
  7821  		y := v_1
  7822  		if !(!shiftIsBounded(v)) {
  7823  			break
  7824  		}
  7825  		v.reset(OpRISCV64AND)
  7826  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7827  		v0.AddArg2(x, y)
  7828  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  7829  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7830  		v2.AuxInt = int64ToAuxInt(64)
  7831  		v2.AddArg(y)
  7832  		v1.AddArg(v2)
  7833  		v.AddArg2(v0, v1)
  7834  		return true
  7835  	}
  7836  	// match: (Rsh64Ux64 x y)
  7837  	// cond: shiftIsBounded(v)
  7838  	// result: (SRL x y)
  7839  	for {
  7840  		x := v_0
  7841  		y := v_1
  7842  		if !(shiftIsBounded(v)) {
  7843  			break
  7844  		}
  7845  		v.reset(OpRISCV64SRL)
  7846  		v.AddArg2(x, y)
  7847  		return true
  7848  	}
  7849  	return false
  7850  }
  7851  func rewriteValueRISCV64_OpRsh64Ux8(v *Value) bool {
  7852  	v_1 := v.Args[1]
  7853  	v_0 := v.Args[0]
  7854  	b := v.Block
  7855  	typ := &b.Func.Config.Types
  7856  	// match: (Rsh64Ux8 <t> x y)
  7857  	// cond: !shiftIsBounded(v)
  7858  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  7859  	for {
  7860  		t := v.Type
  7861  		x := v_0
  7862  		y := v_1
  7863  		if !(!shiftIsBounded(v)) {
  7864  			break
  7865  		}
  7866  		v.reset(OpRISCV64AND)
  7867  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7868  		v0.AddArg2(x, y)
  7869  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  7870  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7871  		v2.AuxInt = int64ToAuxInt(64)
  7872  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7873  		v3.AddArg(y)
  7874  		v2.AddArg(v3)
  7875  		v1.AddArg(v2)
  7876  		v.AddArg2(v0, v1)
  7877  		return true
  7878  	}
  7879  	// match: (Rsh64Ux8 x y)
  7880  	// cond: shiftIsBounded(v)
  7881  	// result: (SRL x y)
  7882  	for {
  7883  		x := v_0
  7884  		y := v_1
  7885  		if !(shiftIsBounded(v)) {
  7886  			break
  7887  		}
  7888  		v.reset(OpRISCV64SRL)
  7889  		v.AddArg2(x, y)
  7890  		return true
  7891  	}
  7892  	return false
  7893  }
  7894  func rewriteValueRISCV64_OpRsh64x16(v *Value) bool {
  7895  	v_1 := v.Args[1]
  7896  	v_0 := v.Args[0]
  7897  	b := v.Block
  7898  	typ := &b.Func.Config.Types
  7899  	// match: (Rsh64x16 <t> x y)
  7900  	// cond: !shiftIsBounded(v)
  7901  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  7902  	for {
  7903  		t := v.Type
  7904  		x := v_0
  7905  		y := v_1
  7906  		if !(!shiftIsBounded(v)) {
  7907  			break
  7908  		}
  7909  		v.reset(OpRISCV64SRA)
  7910  		v.Type = t
  7911  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7912  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7913  		v1.AuxInt = int64ToAuxInt(-1)
  7914  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7915  		v2.AuxInt = int64ToAuxInt(64)
  7916  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7917  		v3.AddArg(y)
  7918  		v2.AddArg(v3)
  7919  		v1.AddArg(v2)
  7920  		v0.AddArg2(y, v1)
  7921  		v.AddArg2(x, v0)
  7922  		return true
  7923  	}
  7924  	// match: (Rsh64x16 x y)
  7925  	// cond: shiftIsBounded(v)
  7926  	// result: (SRA x y)
  7927  	for {
  7928  		x := v_0
  7929  		y := v_1
  7930  		if !(shiftIsBounded(v)) {
  7931  			break
  7932  		}
  7933  		v.reset(OpRISCV64SRA)
  7934  		v.AddArg2(x, y)
  7935  		return true
  7936  	}
  7937  	return false
  7938  }
  7939  func rewriteValueRISCV64_OpRsh64x32(v *Value) bool {
  7940  	v_1 := v.Args[1]
  7941  	v_0 := v.Args[0]
  7942  	b := v.Block
  7943  	typ := &b.Func.Config.Types
  7944  	// match: (Rsh64x32 <t> x y)
  7945  	// cond: !shiftIsBounded(v)
  7946  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  7947  	for {
  7948  		t := v.Type
  7949  		x := v_0
  7950  		y := v_1
  7951  		if !(!shiftIsBounded(v)) {
  7952  			break
  7953  		}
  7954  		v.reset(OpRISCV64SRA)
  7955  		v.Type = t
  7956  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7957  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7958  		v1.AuxInt = int64ToAuxInt(-1)
  7959  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7960  		v2.AuxInt = int64ToAuxInt(64)
  7961  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7962  		v3.AddArg(y)
  7963  		v2.AddArg(v3)
  7964  		v1.AddArg(v2)
  7965  		v0.AddArg2(y, v1)
  7966  		v.AddArg2(x, v0)
  7967  		return true
  7968  	}
  7969  	// match: (Rsh64x32 x y)
  7970  	// cond: shiftIsBounded(v)
  7971  	// result: (SRA x y)
  7972  	for {
  7973  		x := v_0
  7974  		y := v_1
  7975  		if !(shiftIsBounded(v)) {
  7976  			break
  7977  		}
  7978  		v.reset(OpRISCV64SRA)
  7979  		v.AddArg2(x, y)
  7980  		return true
  7981  	}
  7982  	return false
  7983  }
  7984  func rewriteValueRISCV64_OpRsh64x64(v *Value) bool {
  7985  	v_1 := v.Args[1]
  7986  	v_0 := v.Args[0]
  7987  	b := v.Block
  7988  	// match: (Rsh64x64 <t> x y)
  7989  	// cond: !shiftIsBounded(v)
  7990  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  7991  	for {
  7992  		t := v.Type
  7993  		x := v_0
  7994  		y := v_1
  7995  		if !(!shiftIsBounded(v)) {
  7996  			break
  7997  		}
  7998  		v.reset(OpRISCV64SRA)
  7999  		v.Type = t
  8000  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8001  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8002  		v1.AuxInt = int64ToAuxInt(-1)
  8003  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8004  		v2.AuxInt = int64ToAuxInt(64)
  8005  		v2.AddArg(y)
  8006  		v1.AddArg(v2)
  8007  		v0.AddArg2(y, v1)
  8008  		v.AddArg2(x, v0)
  8009  		return true
  8010  	}
  8011  	// match: (Rsh64x64 x y)
  8012  	// cond: shiftIsBounded(v)
  8013  	// result: (SRA x y)
  8014  	for {
  8015  		x := v_0
  8016  		y := v_1
  8017  		if !(shiftIsBounded(v)) {
  8018  			break
  8019  		}
  8020  		v.reset(OpRISCV64SRA)
  8021  		v.AddArg2(x, y)
  8022  		return true
  8023  	}
  8024  	return false
  8025  }
  8026  func rewriteValueRISCV64_OpRsh64x8(v *Value) bool {
  8027  	v_1 := v.Args[1]
  8028  	v_0 := v.Args[0]
  8029  	b := v.Block
  8030  	typ := &b.Func.Config.Types
  8031  	// match: (Rsh64x8 <t> x y)
  8032  	// cond: !shiftIsBounded(v)
  8033  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  8034  	for {
  8035  		t := v.Type
  8036  		x := v_0
  8037  		y := v_1
  8038  		if !(!shiftIsBounded(v)) {
  8039  			break
  8040  		}
  8041  		v.reset(OpRISCV64SRA)
  8042  		v.Type = t
  8043  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8044  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8045  		v1.AuxInt = int64ToAuxInt(-1)
  8046  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8047  		v2.AuxInt = int64ToAuxInt(64)
  8048  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8049  		v3.AddArg(y)
  8050  		v2.AddArg(v3)
  8051  		v1.AddArg(v2)
  8052  		v0.AddArg2(y, v1)
  8053  		v.AddArg2(x, v0)
  8054  		return true
  8055  	}
  8056  	// match: (Rsh64x8 x y)
  8057  	// cond: shiftIsBounded(v)
  8058  	// result: (SRA x y)
  8059  	for {
  8060  		x := v_0
  8061  		y := v_1
  8062  		if !(shiftIsBounded(v)) {
  8063  			break
  8064  		}
  8065  		v.reset(OpRISCV64SRA)
  8066  		v.AddArg2(x, y)
  8067  		return true
  8068  	}
  8069  	return false
  8070  }
  8071  func rewriteValueRISCV64_OpRsh8Ux16(v *Value) bool {
  8072  	v_1 := v.Args[1]
  8073  	v_0 := v.Args[0]
  8074  	b := v.Block
  8075  	typ := &b.Func.Config.Types
  8076  	// match: (Rsh8Ux16 <t> x y)
  8077  	// cond: !shiftIsBounded(v)
  8078  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  8079  	for {
  8080  		t := v.Type
  8081  		x := v_0
  8082  		y := v_1
  8083  		if !(!shiftIsBounded(v)) {
  8084  			break
  8085  		}
  8086  		v.reset(OpRISCV64AND)
  8087  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8088  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8089  		v1.AddArg(x)
  8090  		v0.AddArg2(v1, y)
  8091  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8092  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8093  		v3.AuxInt = int64ToAuxInt(64)
  8094  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8095  		v4.AddArg(y)
  8096  		v3.AddArg(v4)
  8097  		v2.AddArg(v3)
  8098  		v.AddArg2(v0, v2)
  8099  		return true
  8100  	}
  8101  	// match: (Rsh8Ux16 x y)
  8102  	// cond: shiftIsBounded(v)
  8103  	// result: (SRL (ZeroExt8to64 x) y)
  8104  	for {
  8105  		x := v_0
  8106  		y := v_1
  8107  		if !(shiftIsBounded(v)) {
  8108  			break
  8109  		}
  8110  		v.reset(OpRISCV64SRL)
  8111  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8112  		v0.AddArg(x)
  8113  		v.AddArg2(v0, y)
  8114  		return true
  8115  	}
  8116  	return false
  8117  }
  8118  func rewriteValueRISCV64_OpRsh8Ux32(v *Value) bool {
  8119  	v_1 := v.Args[1]
  8120  	v_0 := v.Args[0]
  8121  	b := v.Block
  8122  	typ := &b.Func.Config.Types
  8123  	// match: (Rsh8Ux32 <t> x y)
  8124  	// cond: !shiftIsBounded(v)
  8125  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  8126  	for {
  8127  		t := v.Type
  8128  		x := v_0
  8129  		y := v_1
  8130  		if !(!shiftIsBounded(v)) {
  8131  			break
  8132  		}
  8133  		v.reset(OpRISCV64AND)
  8134  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8135  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8136  		v1.AddArg(x)
  8137  		v0.AddArg2(v1, y)
  8138  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8139  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8140  		v3.AuxInt = int64ToAuxInt(64)
  8141  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8142  		v4.AddArg(y)
  8143  		v3.AddArg(v4)
  8144  		v2.AddArg(v3)
  8145  		v.AddArg2(v0, v2)
  8146  		return true
  8147  	}
  8148  	// match: (Rsh8Ux32 x y)
  8149  	// cond: shiftIsBounded(v)
  8150  	// result: (SRL (ZeroExt8to64 x) y)
  8151  	for {
  8152  		x := v_0
  8153  		y := v_1
  8154  		if !(shiftIsBounded(v)) {
  8155  			break
  8156  		}
  8157  		v.reset(OpRISCV64SRL)
  8158  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8159  		v0.AddArg(x)
  8160  		v.AddArg2(v0, y)
  8161  		return true
  8162  	}
  8163  	return false
  8164  }
  8165  func rewriteValueRISCV64_OpRsh8Ux64(v *Value) bool {
  8166  	v_1 := v.Args[1]
  8167  	v_0 := v.Args[0]
  8168  	b := v.Block
  8169  	typ := &b.Func.Config.Types
  8170  	// match: (Rsh8Ux64 <t> x y)
  8171  	// cond: !shiftIsBounded(v)
  8172  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] y)))
  8173  	for {
  8174  		t := v.Type
  8175  		x := v_0
  8176  		y := v_1
  8177  		if !(!shiftIsBounded(v)) {
  8178  			break
  8179  		}
  8180  		v.reset(OpRISCV64AND)
  8181  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8182  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8183  		v1.AddArg(x)
  8184  		v0.AddArg2(v1, y)
  8185  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8186  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8187  		v3.AuxInt = int64ToAuxInt(64)
  8188  		v3.AddArg(y)
  8189  		v2.AddArg(v3)
  8190  		v.AddArg2(v0, v2)
  8191  		return true
  8192  	}
  8193  	// match: (Rsh8Ux64 x y)
  8194  	// cond: shiftIsBounded(v)
  8195  	// result: (SRL (ZeroExt8to64 x) y)
  8196  	for {
  8197  		x := v_0
  8198  		y := v_1
  8199  		if !(shiftIsBounded(v)) {
  8200  			break
  8201  		}
  8202  		v.reset(OpRISCV64SRL)
  8203  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8204  		v0.AddArg(x)
  8205  		v.AddArg2(v0, y)
  8206  		return true
  8207  	}
  8208  	return false
  8209  }
  8210  func rewriteValueRISCV64_OpRsh8Ux8(v *Value) bool {
  8211  	v_1 := v.Args[1]
  8212  	v_0 := v.Args[0]
  8213  	b := v.Block
  8214  	typ := &b.Func.Config.Types
  8215  	// match: (Rsh8Ux8 <t> x y)
  8216  	// cond: !shiftIsBounded(v)
  8217  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  8218  	for {
  8219  		t := v.Type
  8220  		x := v_0
  8221  		y := v_1
  8222  		if !(!shiftIsBounded(v)) {
  8223  			break
  8224  		}
  8225  		v.reset(OpRISCV64AND)
  8226  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  8227  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8228  		v1.AddArg(x)
  8229  		v0.AddArg2(v1, y)
  8230  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  8231  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  8232  		v3.AuxInt = int64ToAuxInt(64)
  8233  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8234  		v4.AddArg(y)
  8235  		v3.AddArg(v4)
  8236  		v2.AddArg(v3)
  8237  		v.AddArg2(v0, v2)
  8238  		return true
  8239  	}
  8240  	// match: (Rsh8Ux8 x y)
  8241  	// cond: shiftIsBounded(v)
  8242  	// result: (SRL (ZeroExt8to64 x) y)
  8243  	for {
  8244  		x := v_0
  8245  		y := v_1
  8246  		if !(shiftIsBounded(v)) {
  8247  			break
  8248  		}
  8249  		v.reset(OpRISCV64SRL)
  8250  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8251  		v0.AddArg(x)
  8252  		v.AddArg2(v0, y)
  8253  		return true
  8254  	}
  8255  	return false
  8256  }
  8257  func rewriteValueRISCV64_OpRsh8x16(v *Value) bool {
  8258  	v_1 := v.Args[1]
  8259  	v_0 := v.Args[0]
  8260  	b := v.Block
  8261  	typ := &b.Func.Config.Types
  8262  	// match: (Rsh8x16 <t> x y)
  8263  	// cond: !shiftIsBounded(v)
  8264  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  8265  	for {
  8266  		t := v.Type
  8267  		x := v_0
  8268  		y := v_1
  8269  		if !(!shiftIsBounded(v)) {
  8270  			break
  8271  		}
  8272  		v.reset(OpRISCV64SRA)
  8273  		v.Type = t
  8274  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8275  		v0.AddArg(x)
  8276  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8277  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8278  		v2.AuxInt = int64ToAuxInt(-1)
  8279  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8280  		v3.AuxInt = int64ToAuxInt(64)
  8281  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8282  		v4.AddArg(y)
  8283  		v3.AddArg(v4)
  8284  		v2.AddArg(v3)
  8285  		v1.AddArg2(y, v2)
  8286  		v.AddArg2(v0, v1)
  8287  		return true
  8288  	}
  8289  	// match: (Rsh8x16 x y)
  8290  	// cond: shiftIsBounded(v)
  8291  	// result: (SRA (SignExt8to64 x) y)
  8292  	for {
  8293  		x := v_0
  8294  		y := v_1
  8295  		if !(shiftIsBounded(v)) {
  8296  			break
  8297  		}
  8298  		v.reset(OpRISCV64SRA)
  8299  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8300  		v0.AddArg(x)
  8301  		v.AddArg2(v0, y)
  8302  		return true
  8303  	}
  8304  	return false
  8305  }
  8306  func rewriteValueRISCV64_OpRsh8x32(v *Value) bool {
  8307  	v_1 := v.Args[1]
  8308  	v_0 := v.Args[0]
  8309  	b := v.Block
  8310  	typ := &b.Func.Config.Types
  8311  	// match: (Rsh8x32 <t> x y)
  8312  	// cond: !shiftIsBounded(v)
  8313  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  8314  	for {
  8315  		t := v.Type
  8316  		x := v_0
  8317  		y := v_1
  8318  		if !(!shiftIsBounded(v)) {
  8319  			break
  8320  		}
  8321  		v.reset(OpRISCV64SRA)
  8322  		v.Type = t
  8323  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8324  		v0.AddArg(x)
  8325  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8326  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8327  		v2.AuxInt = int64ToAuxInt(-1)
  8328  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8329  		v3.AuxInt = int64ToAuxInt(64)
  8330  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8331  		v4.AddArg(y)
  8332  		v3.AddArg(v4)
  8333  		v2.AddArg(v3)
  8334  		v1.AddArg2(y, v2)
  8335  		v.AddArg2(v0, v1)
  8336  		return true
  8337  	}
  8338  	// match: (Rsh8x32 x y)
  8339  	// cond: shiftIsBounded(v)
  8340  	// result: (SRA (SignExt8to64 x) y)
  8341  	for {
  8342  		x := v_0
  8343  		y := v_1
  8344  		if !(shiftIsBounded(v)) {
  8345  			break
  8346  		}
  8347  		v.reset(OpRISCV64SRA)
  8348  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8349  		v0.AddArg(x)
  8350  		v.AddArg2(v0, y)
  8351  		return true
  8352  	}
  8353  	return false
  8354  }
  8355  func rewriteValueRISCV64_OpRsh8x64(v *Value) bool {
  8356  	v_1 := v.Args[1]
  8357  	v_0 := v.Args[0]
  8358  	b := v.Block
  8359  	typ := &b.Func.Config.Types
  8360  	// match: (Rsh8x64 <t> x y)
  8361  	// cond: !shiftIsBounded(v)
  8362  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  8363  	for {
  8364  		t := v.Type
  8365  		x := v_0
  8366  		y := v_1
  8367  		if !(!shiftIsBounded(v)) {
  8368  			break
  8369  		}
  8370  		v.reset(OpRISCV64SRA)
  8371  		v.Type = t
  8372  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8373  		v0.AddArg(x)
  8374  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8375  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8376  		v2.AuxInt = int64ToAuxInt(-1)
  8377  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8378  		v3.AuxInt = int64ToAuxInt(64)
  8379  		v3.AddArg(y)
  8380  		v2.AddArg(v3)
  8381  		v1.AddArg2(y, v2)
  8382  		v.AddArg2(v0, v1)
  8383  		return true
  8384  	}
  8385  	// match: (Rsh8x64 x y)
  8386  	// cond: shiftIsBounded(v)
  8387  	// result: (SRA (SignExt8to64 x) y)
  8388  	for {
  8389  		x := v_0
  8390  		y := v_1
  8391  		if !(shiftIsBounded(v)) {
  8392  			break
  8393  		}
  8394  		v.reset(OpRISCV64SRA)
  8395  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8396  		v0.AddArg(x)
  8397  		v.AddArg2(v0, y)
  8398  		return true
  8399  	}
  8400  	return false
  8401  }
  8402  func rewriteValueRISCV64_OpRsh8x8(v *Value) bool {
  8403  	v_1 := v.Args[1]
  8404  	v_0 := v.Args[0]
  8405  	b := v.Block
  8406  	typ := &b.Func.Config.Types
  8407  	// match: (Rsh8x8 <t> x y)
  8408  	// cond: !shiftIsBounded(v)
  8409  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  8410  	for {
  8411  		t := v.Type
  8412  		x := v_0
  8413  		y := v_1
  8414  		if !(!shiftIsBounded(v)) {
  8415  			break
  8416  		}
  8417  		v.reset(OpRISCV64SRA)
  8418  		v.Type = t
  8419  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8420  		v0.AddArg(x)
  8421  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8422  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8423  		v2.AuxInt = int64ToAuxInt(-1)
  8424  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8425  		v3.AuxInt = int64ToAuxInt(64)
  8426  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8427  		v4.AddArg(y)
  8428  		v3.AddArg(v4)
  8429  		v2.AddArg(v3)
  8430  		v1.AddArg2(y, v2)
  8431  		v.AddArg2(v0, v1)
  8432  		return true
  8433  	}
  8434  	// match: (Rsh8x8 x y)
  8435  	// cond: shiftIsBounded(v)
  8436  	// result: (SRA (SignExt8to64 x) y)
  8437  	for {
  8438  		x := v_0
  8439  		y := v_1
  8440  		if !(shiftIsBounded(v)) {
  8441  			break
  8442  		}
  8443  		v.reset(OpRISCV64SRA)
  8444  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8445  		v0.AddArg(x)
  8446  		v.AddArg2(v0, y)
  8447  		return true
  8448  	}
  8449  	return false
  8450  }
  8451  func rewriteValueRISCV64_OpSelect0(v *Value) bool {
  8452  	v_0 := v.Args[0]
  8453  	b := v.Block
  8454  	typ := &b.Func.Config.Types
  8455  	// match: (Select0 (Add64carry x y c))
  8456  	// result: (ADD (ADD <typ.UInt64> x y) c)
  8457  	for {
  8458  		if v_0.Op != OpAdd64carry {
  8459  			break
  8460  		}
  8461  		c := v_0.Args[2]
  8462  		x := v_0.Args[0]
  8463  		y := v_0.Args[1]
  8464  		v.reset(OpRISCV64ADD)
  8465  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  8466  		v0.AddArg2(x, y)
  8467  		v.AddArg2(v0, c)
  8468  		return true
  8469  	}
  8470  	// match: (Select0 (Sub64borrow x y c))
  8471  	// result: (SUB (SUB <typ.UInt64> x y) c)
  8472  	for {
  8473  		if v_0.Op != OpSub64borrow {
  8474  			break
  8475  		}
  8476  		c := v_0.Args[2]
  8477  		x := v_0.Args[0]
  8478  		y := v_0.Args[1]
  8479  		v.reset(OpRISCV64SUB)
  8480  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  8481  		v0.AddArg2(x, y)
  8482  		v.AddArg2(v0, c)
  8483  		return true
  8484  	}
  8485  	// match: (Select0 m:(LoweredMuluhilo x y))
  8486  	// cond: m.Uses == 1
  8487  	// result: (MULHU x y)
  8488  	for {
  8489  		m := v_0
  8490  		if m.Op != OpRISCV64LoweredMuluhilo {
  8491  			break
  8492  		}
  8493  		y := m.Args[1]
  8494  		x := m.Args[0]
  8495  		if !(m.Uses == 1) {
  8496  			break
  8497  		}
  8498  		v.reset(OpRISCV64MULHU)
  8499  		v.AddArg2(x, y)
  8500  		return true
  8501  	}
  8502  	return false
  8503  }
  8504  func rewriteValueRISCV64_OpSelect1(v *Value) bool {
  8505  	v_0 := v.Args[0]
  8506  	b := v.Block
  8507  	typ := &b.Func.Config.Types
  8508  	// match: (Select1 (Add64carry x y c))
  8509  	// result: (OR (SLTU <typ.UInt64> s:(ADD <typ.UInt64> x y) x) (SLTU <typ.UInt64> (ADD <typ.UInt64> s c) s))
  8510  	for {
  8511  		if v_0.Op != OpAdd64carry {
  8512  			break
  8513  		}
  8514  		c := v_0.Args[2]
  8515  		x := v_0.Args[0]
  8516  		y := v_0.Args[1]
  8517  		v.reset(OpRISCV64OR)
  8518  		v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8519  		s := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  8520  		s.AddArg2(x, y)
  8521  		v0.AddArg2(s, x)
  8522  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8523  		v3 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  8524  		v3.AddArg2(s, c)
  8525  		v2.AddArg2(v3, s)
  8526  		v.AddArg2(v0, v2)
  8527  		return true
  8528  	}
  8529  	// match: (Select1 (Sub64borrow x y c))
  8530  	// result: (OR (SLTU <typ.UInt64> x s:(SUB <typ.UInt64> x y)) (SLTU <typ.UInt64> s (SUB <typ.UInt64> s c)))
  8531  	for {
  8532  		if v_0.Op != OpSub64borrow {
  8533  			break
  8534  		}
  8535  		c := v_0.Args[2]
  8536  		x := v_0.Args[0]
  8537  		y := v_0.Args[1]
  8538  		v.reset(OpRISCV64OR)
  8539  		v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8540  		s := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  8541  		s.AddArg2(x, y)
  8542  		v0.AddArg2(x, s)
  8543  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8544  		v3 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  8545  		v3.AddArg2(s, c)
  8546  		v2.AddArg2(s, v3)
  8547  		v.AddArg2(v0, v2)
  8548  		return true
  8549  	}
  8550  	// match: (Select1 m:(LoweredMuluhilo x y))
  8551  	// cond: m.Uses == 1
  8552  	// result: (MUL x y)
  8553  	for {
  8554  		m := v_0
  8555  		if m.Op != OpRISCV64LoweredMuluhilo {
  8556  			break
  8557  		}
  8558  		y := m.Args[1]
  8559  		x := m.Args[0]
  8560  		if !(m.Uses == 1) {
  8561  			break
  8562  		}
  8563  		v.reset(OpRISCV64MUL)
  8564  		v.AddArg2(x, y)
  8565  		return true
  8566  	}
  8567  	return false
  8568  }
  8569  func rewriteValueRISCV64_OpSlicemask(v *Value) bool {
  8570  	v_0 := v.Args[0]
  8571  	b := v.Block
  8572  	// match: (Slicemask <t> x)
  8573  	// result: (SRAI [63] (NEG <t> x))
  8574  	for {
  8575  		t := v.Type
  8576  		x := v_0
  8577  		v.reset(OpRISCV64SRAI)
  8578  		v.AuxInt = int64ToAuxInt(63)
  8579  		v0 := b.NewValue0(v.Pos, OpRISCV64NEG, t)
  8580  		v0.AddArg(x)
  8581  		v.AddArg(v0)
  8582  		return true
  8583  	}
  8584  }
  8585  func rewriteValueRISCV64_OpStore(v *Value) bool {
  8586  	v_2 := v.Args[2]
  8587  	v_1 := v.Args[1]
  8588  	v_0 := v.Args[0]
  8589  	// match: (Store {t} ptr val mem)
  8590  	// cond: t.Size() == 1
  8591  	// result: (MOVBstore ptr val mem)
  8592  	for {
  8593  		t := auxToType(v.Aux)
  8594  		ptr := v_0
  8595  		val := v_1
  8596  		mem := v_2
  8597  		if !(t.Size() == 1) {
  8598  			break
  8599  		}
  8600  		v.reset(OpRISCV64MOVBstore)
  8601  		v.AddArg3(ptr, val, mem)
  8602  		return true
  8603  	}
  8604  	// match: (Store {t} ptr val mem)
  8605  	// cond: t.Size() == 2
  8606  	// result: (MOVHstore ptr val mem)
  8607  	for {
  8608  		t := auxToType(v.Aux)
  8609  		ptr := v_0
  8610  		val := v_1
  8611  		mem := v_2
  8612  		if !(t.Size() == 2) {
  8613  			break
  8614  		}
  8615  		v.reset(OpRISCV64MOVHstore)
  8616  		v.AddArg3(ptr, val, mem)
  8617  		return true
  8618  	}
  8619  	// match: (Store {t} ptr val mem)
  8620  	// cond: t.Size() == 4 && !t.IsFloat()
  8621  	// result: (MOVWstore ptr val mem)
  8622  	for {
  8623  		t := auxToType(v.Aux)
  8624  		ptr := v_0
  8625  		val := v_1
  8626  		mem := v_2
  8627  		if !(t.Size() == 4 && !t.IsFloat()) {
  8628  			break
  8629  		}
  8630  		v.reset(OpRISCV64MOVWstore)
  8631  		v.AddArg3(ptr, val, mem)
  8632  		return true
  8633  	}
  8634  	// match: (Store {t} ptr val mem)
  8635  	// cond: t.Size() == 8 && !t.IsFloat()
  8636  	// result: (MOVDstore ptr val mem)
  8637  	for {
  8638  		t := auxToType(v.Aux)
  8639  		ptr := v_0
  8640  		val := v_1
  8641  		mem := v_2
  8642  		if !(t.Size() == 8 && !t.IsFloat()) {
  8643  			break
  8644  		}
  8645  		v.reset(OpRISCV64MOVDstore)
  8646  		v.AddArg3(ptr, val, mem)
  8647  		return true
  8648  	}
  8649  	// match: (Store {t} ptr val mem)
  8650  	// cond: t.Size() == 4 && t.IsFloat()
  8651  	// result: (FMOVWstore ptr val mem)
  8652  	for {
  8653  		t := auxToType(v.Aux)
  8654  		ptr := v_0
  8655  		val := v_1
  8656  		mem := v_2
  8657  		if !(t.Size() == 4 && t.IsFloat()) {
  8658  			break
  8659  		}
  8660  		v.reset(OpRISCV64FMOVWstore)
  8661  		v.AddArg3(ptr, val, mem)
  8662  		return true
  8663  	}
  8664  	// match: (Store {t} ptr val mem)
  8665  	// cond: t.Size() == 8 && t.IsFloat()
  8666  	// result: (FMOVDstore ptr val mem)
  8667  	for {
  8668  		t := auxToType(v.Aux)
  8669  		ptr := v_0
  8670  		val := v_1
  8671  		mem := v_2
  8672  		if !(t.Size() == 8 && t.IsFloat()) {
  8673  			break
  8674  		}
  8675  		v.reset(OpRISCV64FMOVDstore)
  8676  		v.AddArg3(ptr, val, mem)
  8677  		return true
  8678  	}
  8679  	return false
  8680  }
  8681  func rewriteValueRISCV64_OpZero(v *Value) bool {
  8682  	v_1 := v.Args[1]
  8683  	v_0 := v.Args[0]
  8684  	b := v.Block
  8685  	config := b.Func.Config
  8686  	typ := &b.Func.Config.Types
  8687  	// match: (Zero [0] _ mem)
  8688  	// result: mem
  8689  	for {
  8690  		if auxIntToInt64(v.AuxInt) != 0 {
  8691  			break
  8692  		}
  8693  		mem := v_1
  8694  		v.copyOf(mem)
  8695  		return true
  8696  	}
  8697  	// match: (Zero [1] ptr mem)
  8698  	// result: (MOVBstore ptr (MOVDconst [0]) mem)
  8699  	for {
  8700  		if auxIntToInt64(v.AuxInt) != 1 {
  8701  			break
  8702  		}
  8703  		ptr := v_0
  8704  		mem := v_1
  8705  		v.reset(OpRISCV64MOVBstore)
  8706  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8707  		v0.AuxInt = int64ToAuxInt(0)
  8708  		v.AddArg3(ptr, v0, mem)
  8709  		return true
  8710  	}
  8711  	// match: (Zero [2] {t} ptr mem)
  8712  	// cond: t.Alignment()%2 == 0
  8713  	// result: (MOVHstore ptr (MOVDconst [0]) mem)
  8714  	for {
  8715  		if auxIntToInt64(v.AuxInt) != 2 {
  8716  			break
  8717  		}
  8718  		t := auxToType(v.Aux)
  8719  		ptr := v_0
  8720  		mem := v_1
  8721  		if !(t.Alignment()%2 == 0) {
  8722  			break
  8723  		}
  8724  		v.reset(OpRISCV64MOVHstore)
  8725  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8726  		v0.AuxInt = int64ToAuxInt(0)
  8727  		v.AddArg3(ptr, v0, mem)
  8728  		return true
  8729  	}
  8730  	// match: (Zero [2] ptr mem)
  8731  	// result: (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))
  8732  	for {
  8733  		if auxIntToInt64(v.AuxInt) != 2 {
  8734  			break
  8735  		}
  8736  		ptr := v_0
  8737  		mem := v_1
  8738  		v.reset(OpRISCV64MOVBstore)
  8739  		v.AuxInt = int32ToAuxInt(1)
  8740  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8741  		v0.AuxInt = int64ToAuxInt(0)
  8742  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8743  		v1.AddArg3(ptr, v0, mem)
  8744  		v.AddArg3(ptr, v0, v1)
  8745  		return true
  8746  	}
  8747  	// match: (Zero [4] {t} ptr mem)
  8748  	// cond: t.Alignment()%4 == 0
  8749  	// result: (MOVWstore ptr (MOVDconst [0]) mem)
  8750  	for {
  8751  		if auxIntToInt64(v.AuxInt) != 4 {
  8752  			break
  8753  		}
  8754  		t := auxToType(v.Aux)
  8755  		ptr := v_0
  8756  		mem := v_1
  8757  		if !(t.Alignment()%4 == 0) {
  8758  			break
  8759  		}
  8760  		v.reset(OpRISCV64MOVWstore)
  8761  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8762  		v0.AuxInt = int64ToAuxInt(0)
  8763  		v.AddArg3(ptr, v0, mem)
  8764  		return true
  8765  	}
  8766  	// match: (Zero [4] {t} ptr mem)
  8767  	// cond: t.Alignment()%2 == 0
  8768  	// result: (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))
  8769  	for {
  8770  		if auxIntToInt64(v.AuxInt) != 4 {
  8771  			break
  8772  		}
  8773  		t := auxToType(v.Aux)
  8774  		ptr := v_0
  8775  		mem := v_1
  8776  		if !(t.Alignment()%2 == 0) {
  8777  			break
  8778  		}
  8779  		v.reset(OpRISCV64MOVHstore)
  8780  		v.AuxInt = int32ToAuxInt(2)
  8781  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8782  		v0.AuxInt = int64ToAuxInt(0)
  8783  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8784  		v1.AddArg3(ptr, v0, mem)
  8785  		v.AddArg3(ptr, v0, v1)
  8786  		return true
  8787  	}
  8788  	// match: (Zero [4] ptr mem)
  8789  	// result: (MOVBstore [3] ptr (MOVDconst [0]) (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))))
  8790  	for {
  8791  		if auxIntToInt64(v.AuxInt) != 4 {
  8792  			break
  8793  		}
  8794  		ptr := v_0
  8795  		mem := v_1
  8796  		v.reset(OpRISCV64MOVBstore)
  8797  		v.AuxInt = int32ToAuxInt(3)
  8798  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8799  		v0.AuxInt = int64ToAuxInt(0)
  8800  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8801  		v1.AuxInt = int32ToAuxInt(2)
  8802  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8803  		v2.AuxInt = int32ToAuxInt(1)
  8804  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8805  		v3.AddArg3(ptr, v0, mem)
  8806  		v2.AddArg3(ptr, v0, v3)
  8807  		v1.AddArg3(ptr, v0, v2)
  8808  		v.AddArg3(ptr, v0, v1)
  8809  		return true
  8810  	}
  8811  	// match: (Zero [8] {t} ptr mem)
  8812  	// cond: t.Alignment()%8 == 0
  8813  	// result: (MOVDstore ptr (MOVDconst [0]) mem)
  8814  	for {
  8815  		if auxIntToInt64(v.AuxInt) != 8 {
  8816  			break
  8817  		}
  8818  		t := auxToType(v.Aux)
  8819  		ptr := v_0
  8820  		mem := v_1
  8821  		if !(t.Alignment()%8 == 0) {
  8822  			break
  8823  		}
  8824  		v.reset(OpRISCV64MOVDstore)
  8825  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8826  		v0.AuxInt = int64ToAuxInt(0)
  8827  		v.AddArg3(ptr, v0, mem)
  8828  		return true
  8829  	}
  8830  	// match: (Zero [8] {t} ptr mem)
  8831  	// cond: t.Alignment()%4 == 0
  8832  	// result: (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
  8833  	for {
  8834  		if auxIntToInt64(v.AuxInt) != 8 {
  8835  			break
  8836  		}
  8837  		t := auxToType(v.Aux)
  8838  		ptr := v_0
  8839  		mem := v_1
  8840  		if !(t.Alignment()%4 == 0) {
  8841  			break
  8842  		}
  8843  		v.reset(OpRISCV64MOVWstore)
  8844  		v.AuxInt = int32ToAuxInt(4)
  8845  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8846  		v0.AuxInt = int64ToAuxInt(0)
  8847  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  8848  		v1.AddArg3(ptr, v0, mem)
  8849  		v.AddArg3(ptr, v0, v1)
  8850  		return true
  8851  	}
  8852  	// match: (Zero [8] {t} ptr mem)
  8853  	// cond: t.Alignment()%2 == 0
  8854  	// result: (MOVHstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))))
  8855  	for {
  8856  		if auxIntToInt64(v.AuxInt) != 8 {
  8857  			break
  8858  		}
  8859  		t := auxToType(v.Aux)
  8860  		ptr := v_0
  8861  		mem := v_1
  8862  		if !(t.Alignment()%2 == 0) {
  8863  			break
  8864  		}
  8865  		v.reset(OpRISCV64MOVHstore)
  8866  		v.AuxInt = int32ToAuxInt(6)
  8867  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8868  		v0.AuxInt = int64ToAuxInt(0)
  8869  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8870  		v1.AuxInt = int32ToAuxInt(4)
  8871  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8872  		v2.AuxInt = int32ToAuxInt(2)
  8873  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8874  		v3.AddArg3(ptr, v0, mem)
  8875  		v2.AddArg3(ptr, v0, v3)
  8876  		v1.AddArg3(ptr, v0, v2)
  8877  		v.AddArg3(ptr, v0, v1)
  8878  		return true
  8879  	}
  8880  	// match: (Zero [3] ptr mem)
  8881  	// result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem)))
  8882  	for {
  8883  		if auxIntToInt64(v.AuxInt) != 3 {
  8884  			break
  8885  		}
  8886  		ptr := v_0
  8887  		mem := v_1
  8888  		v.reset(OpRISCV64MOVBstore)
  8889  		v.AuxInt = int32ToAuxInt(2)
  8890  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8891  		v0.AuxInt = int64ToAuxInt(0)
  8892  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8893  		v1.AuxInt = int32ToAuxInt(1)
  8894  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8895  		v2.AddArg3(ptr, v0, mem)
  8896  		v1.AddArg3(ptr, v0, v2)
  8897  		v.AddArg3(ptr, v0, v1)
  8898  		return true
  8899  	}
  8900  	// match: (Zero [6] {t} ptr mem)
  8901  	// cond: t.Alignment()%2 == 0
  8902  	// result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)))
  8903  	for {
  8904  		if auxIntToInt64(v.AuxInt) != 6 {
  8905  			break
  8906  		}
  8907  		t := auxToType(v.Aux)
  8908  		ptr := v_0
  8909  		mem := v_1
  8910  		if !(t.Alignment()%2 == 0) {
  8911  			break
  8912  		}
  8913  		v.reset(OpRISCV64MOVHstore)
  8914  		v.AuxInt = int32ToAuxInt(4)
  8915  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8916  		v0.AuxInt = int64ToAuxInt(0)
  8917  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8918  		v1.AuxInt = int32ToAuxInt(2)
  8919  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8920  		v2.AddArg3(ptr, v0, mem)
  8921  		v1.AddArg3(ptr, v0, v2)
  8922  		v.AddArg3(ptr, v0, v1)
  8923  		return true
  8924  	}
  8925  	// match: (Zero [12] {t} ptr mem)
  8926  	// cond: t.Alignment()%4 == 0
  8927  	// result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)))
  8928  	for {
  8929  		if auxIntToInt64(v.AuxInt) != 12 {
  8930  			break
  8931  		}
  8932  		t := auxToType(v.Aux)
  8933  		ptr := v_0
  8934  		mem := v_1
  8935  		if !(t.Alignment()%4 == 0) {
  8936  			break
  8937  		}
  8938  		v.reset(OpRISCV64MOVWstore)
  8939  		v.AuxInt = int32ToAuxInt(8)
  8940  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8941  		v0.AuxInt = int64ToAuxInt(0)
  8942  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  8943  		v1.AuxInt = int32ToAuxInt(4)
  8944  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  8945  		v2.AddArg3(ptr, v0, mem)
  8946  		v1.AddArg3(ptr, v0, v2)
  8947  		v.AddArg3(ptr, v0, v1)
  8948  		return true
  8949  	}
  8950  	// match: (Zero [16] {t} ptr mem)
  8951  	// cond: t.Alignment()%8 == 0
  8952  	// result: (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
  8953  	for {
  8954  		if auxIntToInt64(v.AuxInt) != 16 {
  8955  			break
  8956  		}
  8957  		t := auxToType(v.Aux)
  8958  		ptr := v_0
  8959  		mem := v_1
  8960  		if !(t.Alignment()%8 == 0) {
  8961  			break
  8962  		}
  8963  		v.reset(OpRISCV64MOVDstore)
  8964  		v.AuxInt = int32ToAuxInt(8)
  8965  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8966  		v0.AuxInt = int64ToAuxInt(0)
  8967  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  8968  		v1.AddArg3(ptr, v0, mem)
  8969  		v.AddArg3(ptr, v0, v1)
  8970  		return true
  8971  	}
  8972  	// match: (Zero [24] {t} ptr mem)
  8973  	// cond: t.Alignment()%8 == 0
  8974  	// result: (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))
  8975  	for {
  8976  		if auxIntToInt64(v.AuxInt) != 24 {
  8977  			break
  8978  		}
  8979  		t := auxToType(v.Aux)
  8980  		ptr := v_0
  8981  		mem := v_1
  8982  		if !(t.Alignment()%8 == 0) {
  8983  			break
  8984  		}
  8985  		v.reset(OpRISCV64MOVDstore)
  8986  		v.AuxInt = int32ToAuxInt(16)
  8987  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8988  		v0.AuxInt = int64ToAuxInt(0)
  8989  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  8990  		v1.AuxInt = int32ToAuxInt(8)
  8991  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  8992  		v2.AddArg3(ptr, v0, mem)
  8993  		v1.AddArg3(ptr, v0, v2)
  8994  		v.AddArg3(ptr, v0, v1)
  8995  		return true
  8996  	}
  8997  	// match: (Zero [32] {t} ptr mem)
  8998  	// cond: t.Alignment()%8 == 0
  8999  	// result: (MOVDstore [24] ptr (MOVDconst [0]) (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))))
  9000  	for {
  9001  		if auxIntToInt64(v.AuxInt) != 32 {
  9002  			break
  9003  		}
  9004  		t := auxToType(v.Aux)
  9005  		ptr := v_0
  9006  		mem := v_1
  9007  		if !(t.Alignment()%8 == 0) {
  9008  			break
  9009  		}
  9010  		v.reset(OpRISCV64MOVDstore)
  9011  		v.AuxInt = int32ToAuxInt(24)
  9012  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9013  		v0.AuxInt = int64ToAuxInt(0)
  9014  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9015  		v1.AuxInt = int32ToAuxInt(16)
  9016  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9017  		v2.AuxInt = int32ToAuxInt(8)
  9018  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  9019  		v3.AddArg3(ptr, v0, mem)
  9020  		v2.AddArg3(ptr, v0, v3)
  9021  		v1.AddArg3(ptr, v0, v2)
  9022  		v.AddArg3(ptr, v0, v1)
  9023  		return true
  9024  	}
  9025  	// match: (Zero [s] {t} ptr mem)
  9026  	// cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice
  9027  	// result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
  9028  	for {
  9029  		s := auxIntToInt64(v.AuxInt)
  9030  		t := auxToType(v.Aux)
  9031  		ptr := v_0
  9032  		mem := v_1
  9033  		if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice) {
  9034  			break
  9035  		}
  9036  		v.reset(OpRISCV64DUFFZERO)
  9037  		v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
  9038  		v.AddArg2(ptr, mem)
  9039  		return true
  9040  	}
  9041  	// match: (Zero [s] {t} ptr mem)
  9042  	// result: (LoweredZero [t.Alignment()] ptr (ADD <ptr.Type> ptr (MOVDconst [s-moveSize(t.Alignment(), config)])) mem)
  9043  	for {
  9044  		s := auxIntToInt64(v.AuxInt)
  9045  		t := auxToType(v.Aux)
  9046  		ptr := v_0
  9047  		mem := v_1
  9048  		v.reset(OpRISCV64LoweredZero)
  9049  		v.AuxInt = int64ToAuxInt(t.Alignment())
  9050  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, ptr.Type)
  9051  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9052  		v1.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  9053  		v0.AddArg2(ptr, v1)
  9054  		v.AddArg3(ptr, v0, mem)
  9055  		return true
  9056  	}
  9057  }
  9058  func rewriteBlockRISCV64(b *Block) bool {
  9059  	typ := &b.Func.Config.Types
  9060  	switch b.Kind {
  9061  	case BlockRISCV64BEQ:
  9062  		// match: (BEQ (MOVDconst [0]) cond yes no)
  9063  		// result: (BEQZ cond yes no)
  9064  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9065  			v_0 := b.Controls[0]
  9066  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9067  				break
  9068  			}
  9069  			cond := b.Controls[1]
  9070  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  9071  			return true
  9072  		}
  9073  		// match: (BEQ cond (MOVDconst [0]) yes no)
  9074  		// result: (BEQZ cond yes no)
  9075  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9076  			cond := b.Controls[0]
  9077  			v_1 := b.Controls[1]
  9078  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9079  				break
  9080  			}
  9081  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  9082  			return true
  9083  		}
  9084  	case BlockRISCV64BEQZ:
  9085  		// match: (BEQZ (SEQZ x) yes no)
  9086  		// result: (BNEZ x yes no)
  9087  		for b.Controls[0].Op == OpRISCV64SEQZ {
  9088  			v_0 := b.Controls[0]
  9089  			x := v_0.Args[0]
  9090  			b.resetWithControl(BlockRISCV64BNEZ, x)
  9091  			return true
  9092  		}
  9093  		// match: (BEQZ (SNEZ x) yes no)
  9094  		// result: (BEQZ x yes no)
  9095  		for b.Controls[0].Op == OpRISCV64SNEZ {
  9096  			v_0 := b.Controls[0]
  9097  			x := v_0.Args[0]
  9098  			b.resetWithControl(BlockRISCV64BEQZ, x)
  9099  			return true
  9100  		}
  9101  		// match: (BEQZ (NEG x) yes no)
  9102  		// result: (BEQZ x yes no)
  9103  		for b.Controls[0].Op == OpRISCV64NEG {
  9104  			v_0 := b.Controls[0]
  9105  			x := v_0.Args[0]
  9106  			b.resetWithControl(BlockRISCV64BEQZ, x)
  9107  			return true
  9108  		}
  9109  		// match: (BEQZ (FNES <t> x y) yes no)
  9110  		// result: (BNEZ (FEQS <t> x y) yes no)
  9111  		for b.Controls[0].Op == OpRISCV64FNES {
  9112  			v_0 := b.Controls[0]
  9113  			t := v_0.Type
  9114  			_ = v_0.Args[1]
  9115  			v_0_0 := v_0.Args[0]
  9116  			v_0_1 := v_0.Args[1]
  9117  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9118  				x := v_0_0
  9119  				y := v_0_1
  9120  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
  9121  				v0.AddArg2(x, y)
  9122  				b.resetWithControl(BlockRISCV64BNEZ, v0)
  9123  				return true
  9124  			}
  9125  		}
  9126  		// match: (BEQZ (FNED <t> x y) yes no)
  9127  		// result: (BNEZ (FEQD <t> x y) yes no)
  9128  		for b.Controls[0].Op == OpRISCV64FNED {
  9129  			v_0 := b.Controls[0]
  9130  			t := v_0.Type
  9131  			_ = v_0.Args[1]
  9132  			v_0_0 := v_0.Args[0]
  9133  			v_0_1 := v_0.Args[1]
  9134  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9135  				x := v_0_0
  9136  				y := v_0_1
  9137  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
  9138  				v0.AddArg2(x, y)
  9139  				b.resetWithControl(BlockRISCV64BNEZ, v0)
  9140  				return true
  9141  			}
  9142  		}
  9143  		// match: (BEQZ (SUB x y) yes no)
  9144  		// result: (BEQ x y yes no)
  9145  		for b.Controls[0].Op == OpRISCV64SUB {
  9146  			v_0 := b.Controls[0]
  9147  			y := v_0.Args[1]
  9148  			x := v_0.Args[0]
  9149  			b.resetWithControl2(BlockRISCV64BEQ, x, y)
  9150  			return true
  9151  		}
  9152  		// match: (BEQZ (SLT x y) yes no)
  9153  		// result: (BGE x y yes no)
  9154  		for b.Controls[0].Op == OpRISCV64SLT {
  9155  			v_0 := b.Controls[0]
  9156  			y := v_0.Args[1]
  9157  			x := v_0.Args[0]
  9158  			b.resetWithControl2(BlockRISCV64BGE, x, y)
  9159  			return true
  9160  		}
  9161  		// match: (BEQZ (SLTU x y) yes no)
  9162  		// result: (BGEU x y yes no)
  9163  		for b.Controls[0].Op == OpRISCV64SLTU {
  9164  			v_0 := b.Controls[0]
  9165  			y := v_0.Args[1]
  9166  			x := v_0.Args[0]
  9167  			b.resetWithControl2(BlockRISCV64BGEU, x, y)
  9168  			return true
  9169  		}
  9170  		// match: (BEQZ (SLTI [x] y) yes no)
  9171  		// result: (BGE y (MOVDconst [x]) yes no)
  9172  		for b.Controls[0].Op == OpRISCV64SLTI {
  9173  			v_0 := b.Controls[0]
  9174  			x := auxIntToInt64(v_0.AuxInt)
  9175  			y := v_0.Args[0]
  9176  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9177  			v0.AuxInt = int64ToAuxInt(x)
  9178  			b.resetWithControl2(BlockRISCV64BGE, y, v0)
  9179  			return true
  9180  		}
  9181  		// match: (BEQZ (SLTIU [x] y) yes no)
  9182  		// result: (BGEU y (MOVDconst [x]) yes no)
  9183  		for b.Controls[0].Op == OpRISCV64SLTIU {
  9184  			v_0 := b.Controls[0]
  9185  			x := auxIntToInt64(v_0.AuxInt)
  9186  			y := v_0.Args[0]
  9187  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9188  			v0.AuxInt = int64ToAuxInt(x)
  9189  			b.resetWithControl2(BlockRISCV64BGEU, y, v0)
  9190  			return true
  9191  		}
  9192  	case BlockRISCV64BGE:
  9193  		// match: (BGE (MOVDconst [0]) cond yes no)
  9194  		// result: (BLEZ cond yes no)
  9195  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9196  			v_0 := b.Controls[0]
  9197  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9198  				break
  9199  			}
  9200  			cond := b.Controls[1]
  9201  			b.resetWithControl(BlockRISCV64BLEZ, cond)
  9202  			return true
  9203  		}
  9204  		// match: (BGE cond (MOVDconst [0]) yes no)
  9205  		// result: (BGEZ cond yes no)
  9206  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9207  			cond := b.Controls[0]
  9208  			v_1 := b.Controls[1]
  9209  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9210  				break
  9211  			}
  9212  			b.resetWithControl(BlockRISCV64BGEZ, cond)
  9213  			return true
  9214  		}
  9215  	case BlockRISCV64BLT:
  9216  		// match: (BLT (MOVDconst [0]) cond yes no)
  9217  		// result: (BGTZ cond yes no)
  9218  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9219  			v_0 := b.Controls[0]
  9220  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9221  				break
  9222  			}
  9223  			cond := b.Controls[1]
  9224  			b.resetWithControl(BlockRISCV64BGTZ, cond)
  9225  			return true
  9226  		}
  9227  		// match: (BLT cond (MOVDconst [0]) yes no)
  9228  		// result: (BLTZ cond yes no)
  9229  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9230  			cond := b.Controls[0]
  9231  			v_1 := b.Controls[1]
  9232  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9233  				break
  9234  			}
  9235  			b.resetWithControl(BlockRISCV64BLTZ, cond)
  9236  			return true
  9237  		}
  9238  	case BlockRISCV64BNE:
  9239  		// match: (BNE (MOVDconst [0]) cond yes no)
  9240  		// result: (BNEZ cond yes no)
  9241  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  9242  			v_0 := b.Controls[0]
  9243  			if auxIntToInt64(v_0.AuxInt) != 0 {
  9244  				break
  9245  			}
  9246  			cond := b.Controls[1]
  9247  			b.resetWithControl(BlockRISCV64BNEZ, cond)
  9248  			return true
  9249  		}
  9250  		// match: (BNE cond (MOVDconst [0]) yes no)
  9251  		// result: (BNEZ cond yes no)
  9252  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  9253  			cond := b.Controls[0]
  9254  			v_1 := b.Controls[1]
  9255  			if auxIntToInt64(v_1.AuxInt) != 0 {
  9256  				break
  9257  			}
  9258  			b.resetWithControl(BlockRISCV64BNEZ, cond)
  9259  			return true
  9260  		}
  9261  	case BlockRISCV64BNEZ:
  9262  		// match: (BNEZ (SEQZ x) yes no)
  9263  		// result: (BEQZ x yes no)
  9264  		for b.Controls[0].Op == OpRISCV64SEQZ {
  9265  			v_0 := b.Controls[0]
  9266  			x := v_0.Args[0]
  9267  			b.resetWithControl(BlockRISCV64BEQZ, x)
  9268  			return true
  9269  		}
  9270  		// match: (BNEZ (SNEZ x) yes no)
  9271  		// result: (BNEZ x yes no)
  9272  		for b.Controls[0].Op == OpRISCV64SNEZ {
  9273  			v_0 := b.Controls[0]
  9274  			x := v_0.Args[0]
  9275  			b.resetWithControl(BlockRISCV64BNEZ, x)
  9276  			return true
  9277  		}
  9278  		// match: (BNEZ (NEG x) yes no)
  9279  		// result: (BNEZ x yes no)
  9280  		for b.Controls[0].Op == OpRISCV64NEG {
  9281  			v_0 := b.Controls[0]
  9282  			x := v_0.Args[0]
  9283  			b.resetWithControl(BlockRISCV64BNEZ, x)
  9284  			return true
  9285  		}
  9286  		// match: (BNEZ (FNES <t> x y) yes no)
  9287  		// result: (BEQZ (FEQS <t> x y) yes no)
  9288  		for b.Controls[0].Op == OpRISCV64FNES {
  9289  			v_0 := b.Controls[0]
  9290  			t := v_0.Type
  9291  			_ = v_0.Args[1]
  9292  			v_0_0 := v_0.Args[0]
  9293  			v_0_1 := v_0.Args[1]
  9294  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9295  				x := v_0_0
  9296  				y := v_0_1
  9297  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
  9298  				v0.AddArg2(x, y)
  9299  				b.resetWithControl(BlockRISCV64BEQZ, v0)
  9300  				return true
  9301  			}
  9302  		}
  9303  		// match: (BNEZ (FNED <t> x y) yes no)
  9304  		// result: (BEQZ (FEQD <t> x y) yes no)
  9305  		for b.Controls[0].Op == OpRISCV64FNED {
  9306  			v_0 := b.Controls[0]
  9307  			t := v_0.Type
  9308  			_ = v_0.Args[1]
  9309  			v_0_0 := v_0.Args[0]
  9310  			v_0_1 := v_0.Args[1]
  9311  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9312  				x := v_0_0
  9313  				y := v_0_1
  9314  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
  9315  				v0.AddArg2(x, y)
  9316  				b.resetWithControl(BlockRISCV64BEQZ, v0)
  9317  				return true
  9318  			}
  9319  		}
  9320  		// match: (BNEZ (SUB x y) yes no)
  9321  		// result: (BNE x y yes no)
  9322  		for b.Controls[0].Op == OpRISCV64SUB {
  9323  			v_0 := b.Controls[0]
  9324  			y := v_0.Args[1]
  9325  			x := v_0.Args[0]
  9326  			b.resetWithControl2(BlockRISCV64BNE, x, y)
  9327  			return true
  9328  		}
  9329  		// match: (BNEZ (SLT x y) yes no)
  9330  		// result: (BLT x y yes no)
  9331  		for b.Controls[0].Op == OpRISCV64SLT {
  9332  			v_0 := b.Controls[0]
  9333  			y := v_0.Args[1]
  9334  			x := v_0.Args[0]
  9335  			b.resetWithControl2(BlockRISCV64BLT, x, y)
  9336  			return true
  9337  		}
  9338  		// match: (BNEZ (SLTU x y) yes no)
  9339  		// result: (BLTU x y yes no)
  9340  		for b.Controls[0].Op == OpRISCV64SLTU {
  9341  			v_0 := b.Controls[0]
  9342  			y := v_0.Args[1]
  9343  			x := v_0.Args[0]
  9344  			b.resetWithControl2(BlockRISCV64BLTU, x, y)
  9345  			return true
  9346  		}
  9347  		// match: (BNEZ (SLTI [x] y) yes no)
  9348  		// result: (BLT y (MOVDconst [x]) yes no)
  9349  		for b.Controls[0].Op == OpRISCV64SLTI {
  9350  			v_0 := b.Controls[0]
  9351  			x := auxIntToInt64(v_0.AuxInt)
  9352  			y := v_0.Args[0]
  9353  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9354  			v0.AuxInt = int64ToAuxInt(x)
  9355  			b.resetWithControl2(BlockRISCV64BLT, y, v0)
  9356  			return true
  9357  		}
  9358  		// match: (BNEZ (SLTIU [x] y) yes no)
  9359  		// result: (BLTU y (MOVDconst [x]) yes no)
  9360  		for b.Controls[0].Op == OpRISCV64SLTIU {
  9361  			v_0 := b.Controls[0]
  9362  			x := auxIntToInt64(v_0.AuxInt)
  9363  			y := v_0.Args[0]
  9364  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9365  			v0.AuxInt = int64ToAuxInt(x)
  9366  			b.resetWithControl2(BlockRISCV64BLTU, y, v0)
  9367  			return true
  9368  		}
  9369  	case BlockIf:
  9370  		// match: (If cond yes no)
  9371  		// result: (BNEZ (MOVBUreg <typ.UInt64> cond) yes no)
  9372  		for {
  9373  			cond := b.Controls[0]
  9374  			v0 := b.NewValue0(cond.Pos, OpRISCV64MOVBUreg, typ.UInt64)
  9375  			v0.AddArg(cond)
  9376  			b.resetWithControl(BlockRISCV64BNEZ, v0)
  9377  			return true
  9378  		}
  9379  	}
  9380  	return false
  9381  }
  9382  

View as plain text