Source file src/cmd/compile/internal/ssa/rewriteLOONG64.go

     1  // Code generated from _gen/LOONG64.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "cmd/compile/internal/types"
     6  
     7  func rewriteValueLOONG64(v *Value) bool {
     8  	switch v.Op {
     9  	case OpAbs:
    10  		v.Op = OpLOONG64ABSD
    11  		return true
    12  	case OpAdd16:
    13  		v.Op = OpLOONG64ADDV
    14  		return true
    15  	case OpAdd32:
    16  		v.Op = OpLOONG64ADDV
    17  		return true
    18  	case OpAdd32F:
    19  		v.Op = OpLOONG64ADDF
    20  		return true
    21  	case OpAdd64:
    22  		v.Op = OpLOONG64ADDV
    23  		return true
    24  	case OpAdd64F:
    25  		v.Op = OpLOONG64ADDD
    26  		return true
    27  	case OpAdd8:
    28  		v.Op = OpLOONG64ADDV
    29  		return true
    30  	case OpAddPtr:
    31  		v.Op = OpLOONG64ADDV
    32  		return true
    33  	case OpAddr:
    34  		return rewriteValueLOONG64_OpAddr(v)
    35  	case OpAnd16:
    36  		v.Op = OpLOONG64AND
    37  		return true
    38  	case OpAnd32:
    39  		v.Op = OpLOONG64AND
    40  		return true
    41  	case OpAnd64:
    42  		v.Op = OpLOONG64AND
    43  		return true
    44  	case OpAnd8:
    45  		v.Op = OpLOONG64AND
    46  		return true
    47  	case OpAndB:
    48  		v.Op = OpLOONG64AND
    49  		return true
    50  	case OpAtomicAdd32:
    51  		v.Op = OpLOONG64LoweredAtomicAdd32
    52  		return true
    53  	case OpAtomicAdd64:
    54  		v.Op = OpLOONG64LoweredAtomicAdd64
    55  		return true
    56  	case OpAtomicAnd32:
    57  		v.Op = OpLOONG64LoweredAtomicAnd32
    58  		return true
    59  	case OpAtomicAnd32value:
    60  		v.Op = OpLOONG64LoweredAtomicAnd32value
    61  		return true
    62  	case OpAtomicAnd64value:
    63  		v.Op = OpLOONG64LoweredAtomicAnd64value
    64  		return true
    65  	case OpAtomicAnd8:
    66  		return rewriteValueLOONG64_OpAtomicAnd8(v)
    67  	case OpAtomicCompareAndSwap32:
    68  		return rewriteValueLOONG64_OpAtomicCompareAndSwap32(v)
    69  	case OpAtomicCompareAndSwap32Variant:
    70  		return rewriteValueLOONG64_OpAtomicCompareAndSwap32Variant(v)
    71  	case OpAtomicCompareAndSwap64:
    72  		v.Op = OpLOONG64LoweredAtomicCas64
    73  		return true
    74  	case OpAtomicCompareAndSwap64Variant:
    75  		v.Op = OpLOONG64LoweredAtomicCas64Variant
    76  		return true
    77  	case OpAtomicExchange32:
    78  		v.Op = OpLOONG64LoweredAtomicExchange32
    79  		return true
    80  	case OpAtomicExchange64:
    81  		v.Op = OpLOONG64LoweredAtomicExchange64
    82  		return true
    83  	case OpAtomicExchange8Variant:
    84  		v.Op = OpLOONG64LoweredAtomicExchange8Variant
    85  		return true
    86  	case OpAtomicLoad32:
    87  		v.Op = OpLOONG64LoweredAtomicLoad32
    88  		return true
    89  	case OpAtomicLoad64:
    90  		v.Op = OpLOONG64LoweredAtomicLoad64
    91  		return true
    92  	case OpAtomicLoad8:
    93  		v.Op = OpLOONG64LoweredAtomicLoad8
    94  		return true
    95  	case OpAtomicLoadPtr:
    96  		v.Op = OpLOONG64LoweredAtomicLoad64
    97  		return true
    98  	case OpAtomicOr32:
    99  		v.Op = OpLOONG64LoweredAtomicOr32
   100  		return true
   101  	case OpAtomicOr32value:
   102  		v.Op = OpLOONG64LoweredAtomicOr32value
   103  		return true
   104  	case OpAtomicOr64value:
   105  		v.Op = OpLOONG64LoweredAtomicOr64value
   106  		return true
   107  	case OpAtomicOr8:
   108  		return rewriteValueLOONG64_OpAtomicOr8(v)
   109  	case OpAtomicStore32:
   110  		v.Op = OpLOONG64LoweredAtomicStore32
   111  		return true
   112  	case OpAtomicStore32Variant:
   113  		v.Op = OpLOONG64LoweredAtomicStore32Variant
   114  		return true
   115  	case OpAtomicStore64:
   116  		v.Op = OpLOONG64LoweredAtomicStore64
   117  		return true
   118  	case OpAtomicStore64Variant:
   119  		v.Op = OpLOONG64LoweredAtomicStore64Variant
   120  		return true
   121  	case OpAtomicStore8:
   122  		v.Op = OpLOONG64LoweredAtomicStore8
   123  		return true
   124  	case OpAtomicStore8Variant:
   125  		v.Op = OpLOONG64LoweredAtomicStore8Variant
   126  		return true
   127  	case OpAtomicStorePtrNoWB:
   128  		v.Op = OpLOONG64LoweredAtomicStore64
   129  		return true
   130  	case OpAvg64u:
   131  		return rewriteValueLOONG64_OpAvg64u(v)
   132  	case OpBitLen16:
   133  		return rewriteValueLOONG64_OpBitLen16(v)
   134  	case OpBitLen32:
   135  		return rewriteValueLOONG64_OpBitLen32(v)
   136  	case OpBitLen64:
   137  		return rewriteValueLOONG64_OpBitLen64(v)
   138  	case OpBitLen8:
   139  		return rewriteValueLOONG64_OpBitLen8(v)
   140  	case OpBitRev16:
   141  		return rewriteValueLOONG64_OpBitRev16(v)
   142  	case OpBitRev32:
   143  		v.Op = OpLOONG64BITREVW
   144  		return true
   145  	case OpBitRev64:
   146  		v.Op = OpLOONG64BITREVV
   147  		return true
   148  	case OpBitRev8:
   149  		v.Op = OpLOONG64BITREV4B
   150  		return true
   151  	case OpBswap16:
   152  		v.Op = OpLOONG64REVB2H
   153  		return true
   154  	case OpBswap32:
   155  		v.Op = OpLOONG64REVB2W
   156  		return true
   157  	case OpBswap64:
   158  		v.Op = OpLOONG64REVBV
   159  		return true
   160  	case OpCeil:
   161  		v.Op = OpLOONG64FRINTPD
   162  		return true
   163  	case OpClosureCall:
   164  		v.Op = OpLOONG64CALLclosure
   165  		return true
   166  	case OpCom16:
   167  		return rewriteValueLOONG64_OpCom16(v)
   168  	case OpCom32:
   169  		return rewriteValueLOONG64_OpCom32(v)
   170  	case OpCom64:
   171  		return rewriteValueLOONG64_OpCom64(v)
   172  	case OpCom8:
   173  		return rewriteValueLOONG64_OpCom8(v)
   174  	case OpCondSelect:
   175  		return rewriteValueLOONG64_OpCondSelect(v)
   176  	case OpConst16:
   177  		return rewriteValueLOONG64_OpConst16(v)
   178  	case OpConst32:
   179  		return rewriteValueLOONG64_OpConst32(v)
   180  	case OpConst32F:
   181  		return rewriteValueLOONG64_OpConst32F(v)
   182  	case OpConst64:
   183  		return rewriteValueLOONG64_OpConst64(v)
   184  	case OpConst64F:
   185  		return rewriteValueLOONG64_OpConst64F(v)
   186  	case OpConst8:
   187  		return rewriteValueLOONG64_OpConst8(v)
   188  	case OpConstBool:
   189  		return rewriteValueLOONG64_OpConstBool(v)
   190  	case OpConstNil:
   191  		return rewriteValueLOONG64_OpConstNil(v)
   192  	case OpCopysign:
   193  		v.Op = OpLOONG64FCOPYSGD
   194  		return true
   195  	case OpCtz16:
   196  		return rewriteValueLOONG64_OpCtz16(v)
   197  	case OpCtz16NonZero:
   198  		v.Op = OpCtz64
   199  		return true
   200  	case OpCtz32:
   201  		v.Op = OpLOONG64CTZW
   202  		return true
   203  	case OpCtz32NonZero:
   204  		v.Op = OpCtz64
   205  		return true
   206  	case OpCtz64:
   207  		v.Op = OpLOONG64CTZV
   208  		return true
   209  	case OpCtz64NonZero:
   210  		v.Op = OpCtz64
   211  		return true
   212  	case OpCtz8:
   213  		return rewriteValueLOONG64_OpCtz8(v)
   214  	case OpCtz8NonZero:
   215  		v.Op = OpCtz64
   216  		return true
   217  	case OpCvt32Fto32:
   218  		v.Op = OpLOONG64TRUNCFW
   219  		return true
   220  	case OpCvt32Fto64:
   221  		v.Op = OpLOONG64TRUNCFV
   222  		return true
   223  	case OpCvt32Fto64F:
   224  		v.Op = OpLOONG64MOVFD
   225  		return true
   226  	case OpCvt32to32F:
   227  		v.Op = OpLOONG64MOVWF
   228  		return true
   229  	case OpCvt32to64F:
   230  		v.Op = OpLOONG64MOVWD
   231  		return true
   232  	case OpCvt64Fto32:
   233  		v.Op = OpLOONG64TRUNCDW
   234  		return true
   235  	case OpCvt64Fto32F:
   236  		v.Op = OpLOONG64MOVDF
   237  		return true
   238  	case OpCvt64Fto64:
   239  		v.Op = OpLOONG64TRUNCDV
   240  		return true
   241  	case OpCvt64to32F:
   242  		v.Op = OpLOONG64MOVVF
   243  		return true
   244  	case OpCvt64to64F:
   245  		v.Op = OpLOONG64MOVVD
   246  		return true
   247  	case OpCvtBoolToUint8:
   248  		v.Op = OpCopy
   249  		return true
   250  	case OpDiv16:
   251  		return rewriteValueLOONG64_OpDiv16(v)
   252  	case OpDiv16u:
   253  		return rewriteValueLOONG64_OpDiv16u(v)
   254  	case OpDiv32:
   255  		return rewriteValueLOONG64_OpDiv32(v)
   256  	case OpDiv32F:
   257  		v.Op = OpLOONG64DIVF
   258  		return true
   259  	case OpDiv32u:
   260  		return rewriteValueLOONG64_OpDiv32u(v)
   261  	case OpDiv64:
   262  		return rewriteValueLOONG64_OpDiv64(v)
   263  	case OpDiv64F:
   264  		v.Op = OpLOONG64DIVD
   265  		return true
   266  	case OpDiv64u:
   267  		v.Op = OpLOONG64DIVVU
   268  		return true
   269  	case OpDiv8:
   270  		return rewriteValueLOONG64_OpDiv8(v)
   271  	case OpDiv8u:
   272  		return rewriteValueLOONG64_OpDiv8u(v)
   273  	case OpEq16:
   274  		return rewriteValueLOONG64_OpEq16(v)
   275  	case OpEq32:
   276  		return rewriteValueLOONG64_OpEq32(v)
   277  	case OpEq32F:
   278  		return rewriteValueLOONG64_OpEq32F(v)
   279  	case OpEq64:
   280  		return rewriteValueLOONG64_OpEq64(v)
   281  	case OpEq64F:
   282  		return rewriteValueLOONG64_OpEq64F(v)
   283  	case OpEq8:
   284  		return rewriteValueLOONG64_OpEq8(v)
   285  	case OpEqB:
   286  		return rewriteValueLOONG64_OpEqB(v)
   287  	case OpEqPtr:
   288  		return rewriteValueLOONG64_OpEqPtr(v)
   289  	case OpFMA:
   290  		v.Op = OpLOONG64FMADDD
   291  		return true
   292  	case OpFloor:
   293  		v.Op = OpLOONG64FRINTMD
   294  		return true
   295  	case OpGetCallerPC:
   296  		v.Op = OpLOONG64LoweredGetCallerPC
   297  		return true
   298  	case OpGetCallerSP:
   299  		v.Op = OpLOONG64LoweredGetCallerSP
   300  		return true
   301  	case OpGetClosurePtr:
   302  		v.Op = OpLOONG64LoweredGetClosurePtr
   303  		return true
   304  	case OpHmul32:
   305  		v.Op = OpLOONG64MULH
   306  		return true
   307  	case OpHmul32u:
   308  		v.Op = OpLOONG64MULHU
   309  		return true
   310  	case OpHmul64:
   311  		v.Op = OpLOONG64MULHV
   312  		return true
   313  	case OpHmul64u:
   314  		v.Op = OpLOONG64MULHVU
   315  		return true
   316  	case OpInterCall:
   317  		v.Op = OpLOONG64CALLinter
   318  		return true
   319  	case OpIsInBounds:
   320  		return rewriteValueLOONG64_OpIsInBounds(v)
   321  	case OpIsNonNil:
   322  		return rewriteValueLOONG64_OpIsNonNil(v)
   323  	case OpIsSliceInBounds:
   324  		return rewriteValueLOONG64_OpIsSliceInBounds(v)
   325  	case OpLOONG64ADDD:
   326  		return rewriteValueLOONG64_OpLOONG64ADDD(v)
   327  	case OpLOONG64ADDF:
   328  		return rewriteValueLOONG64_OpLOONG64ADDF(v)
   329  	case OpLOONG64ADDV:
   330  		return rewriteValueLOONG64_OpLOONG64ADDV(v)
   331  	case OpLOONG64ADDVconst:
   332  		return rewriteValueLOONG64_OpLOONG64ADDVconst(v)
   333  	case OpLOONG64ADDshiftLLV:
   334  		return rewriteValueLOONG64_OpLOONG64ADDshiftLLV(v)
   335  	case OpLOONG64AND:
   336  		return rewriteValueLOONG64_OpLOONG64AND(v)
   337  	case OpLOONG64ANDconst:
   338  		return rewriteValueLOONG64_OpLOONG64ANDconst(v)
   339  	case OpLOONG64DIVV:
   340  		return rewriteValueLOONG64_OpLOONG64DIVV(v)
   341  	case OpLOONG64DIVVU:
   342  		return rewriteValueLOONG64_OpLOONG64DIVVU(v)
   343  	case OpLOONG64LoweredPanicBoundsCR:
   344  		return rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsCR(v)
   345  	case OpLOONG64LoweredPanicBoundsRC:
   346  		return rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsRC(v)
   347  	case OpLOONG64LoweredPanicBoundsRR:
   348  		return rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsRR(v)
   349  	case OpLOONG64MASKEQZ:
   350  		return rewriteValueLOONG64_OpLOONG64MASKEQZ(v)
   351  	case OpLOONG64MASKNEZ:
   352  		return rewriteValueLOONG64_OpLOONG64MASKNEZ(v)
   353  	case OpLOONG64MOVBUload:
   354  		return rewriteValueLOONG64_OpLOONG64MOVBUload(v)
   355  	case OpLOONG64MOVBUloadidx:
   356  		return rewriteValueLOONG64_OpLOONG64MOVBUloadidx(v)
   357  	case OpLOONG64MOVBUreg:
   358  		return rewriteValueLOONG64_OpLOONG64MOVBUreg(v)
   359  	case OpLOONG64MOVBload:
   360  		return rewriteValueLOONG64_OpLOONG64MOVBload(v)
   361  	case OpLOONG64MOVBloadidx:
   362  		return rewriteValueLOONG64_OpLOONG64MOVBloadidx(v)
   363  	case OpLOONG64MOVBreg:
   364  		return rewriteValueLOONG64_OpLOONG64MOVBreg(v)
   365  	case OpLOONG64MOVBstore:
   366  		return rewriteValueLOONG64_OpLOONG64MOVBstore(v)
   367  	case OpLOONG64MOVBstoreidx:
   368  		return rewriteValueLOONG64_OpLOONG64MOVBstoreidx(v)
   369  	case OpLOONG64MOVDF:
   370  		return rewriteValueLOONG64_OpLOONG64MOVDF(v)
   371  	case OpLOONG64MOVDload:
   372  		return rewriteValueLOONG64_OpLOONG64MOVDload(v)
   373  	case OpLOONG64MOVDloadidx:
   374  		return rewriteValueLOONG64_OpLOONG64MOVDloadidx(v)
   375  	case OpLOONG64MOVDstore:
   376  		return rewriteValueLOONG64_OpLOONG64MOVDstore(v)
   377  	case OpLOONG64MOVDstoreidx:
   378  		return rewriteValueLOONG64_OpLOONG64MOVDstoreidx(v)
   379  	case OpLOONG64MOVFload:
   380  		return rewriteValueLOONG64_OpLOONG64MOVFload(v)
   381  	case OpLOONG64MOVFloadidx:
   382  		return rewriteValueLOONG64_OpLOONG64MOVFloadidx(v)
   383  	case OpLOONG64MOVFstore:
   384  		return rewriteValueLOONG64_OpLOONG64MOVFstore(v)
   385  	case OpLOONG64MOVFstoreidx:
   386  		return rewriteValueLOONG64_OpLOONG64MOVFstoreidx(v)
   387  	case OpLOONG64MOVHUload:
   388  		return rewriteValueLOONG64_OpLOONG64MOVHUload(v)
   389  	case OpLOONG64MOVHUloadidx:
   390  		return rewriteValueLOONG64_OpLOONG64MOVHUloadidx(v)
   391  	case OpLOONG64MOVHUreg:
   392  		return rewriteValueLOONG64_OpLOONG64MOVHUreg(v)
   393  	case OpLOONG64MOVHload:
   394  		return rewriteValueLOONG64_OpLOONG64MOVHload(v)
   395  	case OpLOONG64MOVHloadidx:
   396  		return rewriteValueLOONG64_OpLOONG64MOVHloadidx(v)
   397  	case OpLOONG64MOVHreg:
   398  		return rewriteValueLOONG64_OpLOONG64MOVHreg(v)
   399  	case OpLOONG64MOVHstore:
   400  		return rewriteValueLOONG64_OpLOONG64MOVHstore(v)
   401  	case OpLOONG64MOVHstoreidx:
   402  		return rewriteValueLOONG64_OpLOONG64MOVHstoreidx(v)
   403  	case OpLOONG64MOVVload:
   404  		return rewriteValueLOONG64_OpLOONG64MOVVload(v)
   405  	case OpLOONG64MOVVloadidx:
   406  		return rewriteValueLOONG64_OpLOONG64MOVVloadidx(v)
   407  	case OpLOONG64MOVVnop:
   408  		return rewriteValueLOONG64_OpLOONG64MOVVnop(v)
   409  	case OpLOONG64MOVVreg:
   410  		return rewriteValueLOONG64_OpLOONG64MOVVreg(v)
   411  	case OpLOONG64MOVVstore:
   412  		return rewriteValueLOONG64_OpLOONG64MOVVstore(v)
   413  	case OpLOONG64MOVVstoreidx:
   414  		return rewriteValueLOONG64_OpLOONG64MOVVstoreidx(v)
   415  	case OpLOONG64MOVWUload:
   416  		return rewriteValueLOONG64_OpLOONG64MOVWUload(v)
   417  	case OpLOONG64MOVWUloadidx:
   418  		return rewriteValueLOONG64_OpLOONG64MOVWUloadidx(v)
   419  	case OpLOONG64MOVWUreg:
   420  		return rewriteValueLOONG64_OpLOONG64MOVWUreg(v)
   421  	case OpLOONG64MOVWload:
   422  		return rewriteValueLOONG64_OpLOONG64MOVWload(v)
   423  	case OpLOONG64MOVWloadidx:
   424  		return rewriteValueLOONG64_OpLOONG64MOVWloadidx(v)
   425  	case OpLOONG64MOVWreg:
   426  		return rewriteValueLOONG64_OpLOONG64MOVWreg(v)
   427  	case OpLOONG64MOVWstore:
   428  		return rewriteValueLOONG64_OpLOONG64MOVWstore(v)
   429  	case OpLOONG64MOVWstoreidx:
   430  		return rewriteValueLOONG64_OpLOONG64MOVWstoreidx(v)
   431  	case OpLOONG64MULV:
   432  		return rewriteValueLOONG64_OpLOONG64MULV(v)
   433  	case OpLOONG64NEGV:
   434  		return rewriteValueLOONG64_OpLOONG64NEGV(v)
   435  	case OpLOONG64NOR:
   436  		return rewriteValueLOONG64_OpLOONG64NOR(v)
   437  	case OpLOONG64NORconst:
   438  		return rewriteValueLOONG64_OpLOONG64NORconst(v)
   439  	case OpLOONG64OR:
   440  		return rewriteValueLOONG64_OpLOONG64OR(v)
   441  	case OpLOONG64ORN:
   442  		return rewriteValueLOONG64_OpLOONG64ORN(v)
   443  	case OpLOONG64ORconst:
   444  		return rewriteValueLOONG64_OpLOONG64ORconst(v)
   445  	case OpLOONG64REMV:
   446  		return rewriteValueLOONG64_OpLOONG64REMV(v)
   447  	case OpLOONG64REMVU:
   448  		return rewriteValueLOONG64_OpLOONG64REMVU(v)
   449  	case OpLOONG64ROTR:
   450  		return rewriteValueLOONG64_OpLOONG64ROTR(v)
   451  	case OpLOONG64ROTRV:
   452  		return rewriteValueLOONG64_OpLOONG64ROTRV(v)
   453  	case OpLOONG64SGT:
   454  		return rewriteValueLOONG64_OpLOONG64SGT(v)
   455  	case OpLOONG64SGTU:
   456  		return rewriteValueLOONG64_OpLOONG64SGTU(v)
   457  	case OpLOONG64SGTUconst:
   458  		return rewriteValueLOONG64_OpLOONG64SGTUconst(v)
   459  	case OpLOONG64SGTconst:
   460  		return rewriteValueLOONG64_OpLOONG64SGTconst(v)
   461  	case OpLOONG64SLL:
   462  		return rewriteValueLOONG64_OpLOONG64SLL(v)
   463  	case OpLOONG64SLLV:
   464  		return rewriteValueLOONG64_OpLOONG64SLLV(v)
   465  	case OpLOONG64SLLVconst:
   466  		return rewriteValueLOONG64_OpLOONG64SLLVconst(v)
   467  	case OpLOONG64SLLconst:
   468  		return rewriteValueLOONG64_OpLOONG64SLLconst(v)
   469  	case OpLOONG64SRA:
   470  		return rewriteValueLOONG64_OpLOONG64SRA(v)
   471  	case OpLOONG64SRAV:
   472  		return rewriteValueLOONG64_OpLOONG64SRAV(v)
   473  	case OpLOONG64SRAVconst:
   474  		return rewriteValueLOONG64_OpLOONG64SRAVconst(v)
   475  	case OpLOONG64SRL:
   476  		return rewriteValueLOONG64_OpLOONG64SRL(v)
   477  	case OpLOONG64SRLV:
   478  		return rewriteValueLOONG64_OpLOONG64SRLV(v)
   479  	case OpLOONG64SRLVconst:
   480  		return rewriteValueLOONG64_OpLOONG64SRLVconst(v)
   481  	case OpLOONG64SUBD:
   482  		return rewriteValueLOONG64_OpLOONG64SUBD(v)
   483  	case OpLOONG64SUBF:
   484  		return rewriteValueLOONG64_OpLOONG64SUBF(v)
   485  	case OpLOONG64SUBV:
   486  		return rewriteValueLOONG64_OpLOONG64SUBV(v)
   487  	case OpLOONG64SUBVconst:
   488  		return rewriteValueLOONG64_OpLOONG64SUBVconst(v)
   489  	case OpLOONG64XOR:
   490  		return rewriteValueLOONG64_OpLOONG64XOR(v)
   491  	case OpLOONG64XORconst:
   492  		return rewriteValueLOONG64_OpLOONG64XORconst(v)
   493  	case OpLeq16:
   494  		return rewriteValueLOONG64_OpLeq16(v)
   495  	case OpLeq16U:
   496  		return rewriteValueLOONG64_OpLeq16U(v)
   497  	case OpLeq32:
   498  		return rewriteValueLOONG64_OpLeq32(v)
   499  	case OpLeq32F:
   500  		return rewriteValueLOONG64_OpLeq32F(v)
   501  	case OpLeq32U:
   502  		return rewriteValueLOONG64_OpLeq32U(v)
   503  	case OpLeq64:
   504  		return rewriteValueLOONG64_OpLeq64(v)
   505  	case OpLeq64F:
   506  		return rewriteValueLOONG64_OpLeq64F(v)
   507  	case OpLeq64U:
   508  		return rewriteValueLOONG64_OpLeq64U(v)
   509  	case OpLeq8:
   510  		return rewriteValueLOONG64_OpLeq8(v)
   511  	case OpLeq8U:
   512  		return rewriteValueLOONG64_OpLeq8U(v)
   513  	case OpLess16:
   514  		return rewriteValueLOONG64_OpLess16(v)
   515  	case OpLess16U:
   516  		return rewriteValueLOONG64_OpLess16U(v)
   517  	case OpLess32:
   518  		return rewriteValueLOONG64_OpLess32(v)
   519  	case OpLess32F:
   520  		return rewriteValueLOONG64_OpLess32F(v)
   521  	case OpLess32U:
   522  		return rewriteValueLOONG64_OpLess32U(v)
   523  	case OpLess64:
   524  		return rewriteValueLOONG64_OpLess64(v)
   525  	case OpLess64F:
   526  		return rewriteValueLOONG64_OpLess64F(v)
   527  	case OpLess64U:
   528  		return rewriteValueLOONG64_OpLess64U(v)
   529  	case OpLess8:
   530  		return rewriteValueLOONG64_OpLess8(v)
   531  	case OpLess8U:
   532  		return rewriteValueLOONG64_OpLess8U(v)
   533  	case OpLoad:
   534  		return rewriteValueLOONG64_OpLoad(v)
   535  	case OpLocalAddr:
   536  		return rewriteValueLOONG64_OpLocalAddr(v)
   537  	case OpLsh16x16:
   538  		return rewriteValueLOONG64_OpLsh16x16(v)
   539  	case OpLsh16x32:
   540  		return rewriteValueLOONG64_OpLsh16x32(v)
   541  	case OpLsh16x64:
   542  		return rewriteValueLOONG64_OpLsh16x64(v)
   543  	case OpLsh16x8:
   544  		return rewriteValueLOONG64_OpLsh16x8(v)
   545  	case OpLsh32x16:
   546  		return rewriteValueLOONG64_OpLsh32x16(v)
   547  	case OpLsh32x32:
   548  		return rewriteValueLOONG64_OpLsh32x32(v)
   549  	case OpLsh32x64:
   550  		return rewriteValueLOONG64_OpLsh32x64(v)
   551  	case OpLsh32x8:
   552  		return rewriteValueLOONG64_OpLsh32x8(v)
   553  	case OpLsh64x16:
   554  		return rewriteValueLOONG64_OpLsh64x16(v)
   555  	case OpLsh64x32:
   556  		return rewriteValueLOONG64_OpLsh64x32(v)
   557  	case OpLsh64x64:
   558  		return rewriteValueLOONG64_OpLsh64x64(v)
   559  	case OpLsh64x8:
   560  		return rewriteValueLOONG64_OpLsh64x8(v)
   561  	case OpLsh8x16:
   562  		return rewriteValueLOONG64_OpLsh8x16(v)
   563  	case OpLsh8x32:
   564  		return rewriteValueLOONG64_OpLsh8x32(v)
   565  	case OpLsh8x64:
   566  		return rewriteValueLOONG64_OpLsh8x64(v)
   567  	case OpLsh8x8:
   568  		return rewriteValueLOONG64_OpLsh8x8(v)
   569  	case OpMax32F:
   570  		v.Op = OpLOONG64FMAXF
   571  		return true
   572  	case OpMax64F:
   573  		v.Op = OpLOONG64FMAXD
   574  		return true
   575  	case OpMin32F:
   576  		v.Op = OpLOONG64FMINF
   577  		return true
   578  	case OpMin64F:
   579  		v.Op = OpLOONG64FMIND
   580  		return true
   581  	case OpMod16:
   582  		return rewriteValueLOONG64_OpMod16(v)
   583  	case OpMod16u:
   584  		return rewriteValueLOONG64_OpMod16u(v)
   585  	case OpMod32:
   586  		return rewriteValueLOONG64_OpMod32(v)
   587  	case OpMod32u:
   588  		return rewriteValueLOONG64_OpMod32u(v)
   589  	case OpMod64:
   590  		return rewriteValueLOONG64_OpMod64(v)
   591  	case OpMod64u:
   592  		v.Op = OpLOONG64REMVU
   593  		return true
   594  	case OpMod8:
   595  		return rewriteValueLOONG64_OpMod8(v)
   596  	case OpMod8u:
   597  		return rewriteValueLOONG64_OpMod8u(v)
   598  	case OpMove:
   599  		return rewriteValueLOONG64_OpMove(v)
   600  	case OpMul16:
   601  		v.Op = OpLOONG64MULV
   602  		return true
   603  	case OpMul32:
   604  		v.Op = OpLOONG64MULV
   605  		return true
   606  	case OpMul32F:
   607  		v.Op = OpLOONG64MULF
   608  		return true
   609  	case OpMul64:
   610  		v.Op = OpLOONG64MULV
   611  		return true
   612  	case OpMul64F:
   613  		v.Op = OpLOONG64MULD
   614  		return true
   615  	case OpMul8:
   616  		v.Op = OpLOONG64MULV
   617  		return true
   618  	case OpNeg16:
   619  		v.Op = OpLOONG64NEGV
   620  		return true
   621  	case OpNeg32:
   622  		v.Op = OpLOONG64NEGV
   623  		return true
   624  	case OpNeg32F:
   625  		v.Op = OpLOONG64NEGF
   626  		return true
   627  	case OpNeg64:
   628  		v.Op = OpLOONG64NEGV
   629  		return true
   630  	case OpNeg64F:
   631  		v.Op = OpLOONG64NEGD
   632  		return true
   633  	case OpNeg8:
   634  		v.Op = OpLOONG64NEGV
   635  		return true
   636  	case OpNeq16:
   637  		return rewriteValueLOONG64_OpNeq16(v)
   638  	case OpNeq32:
   639  		return rewriteValueLOONG64_OpNeq32(v)
   640  	case OpNeq32F:
   641  		return rewriteValueLOONG64_OpNeq32F(v)
   642  	case OpNeq64:
   643  		return rewriteValueLOONG64_OpNeq64(v)
   644  	case OpNeq64F:
   645  		return rewriteValueLOONG64_OpNeq64F(v)
   646  	case OpNeq8:
   647  		return rewriteValueLOONG64_OpNeq8(v)
   648  	case OpNeqB:
   649  		v.Op = OpLOONG64XOR
   650  		return true
   651  	case OpNeqPtr:
   652  		return rewriteValueLOONG64_OpNeqPtr(v)
   653  	case OpNilCheck:
   654  		v.Op = OpLOONG64LoweredNilCheck
   655  		return true
   656  	case OpNot:
   657  		return rewriteValueLOONG64_OpNot(v)
   658  	case OpOffPtr:
   659  		return rewriteValueLOONG64_OpOffPtr(v)
   660  	case OpOr16:
   661  		v.Op = OpLOONG64OR
   662  		return true
   663  	case OpOr32:
   664  		v.Op = OpLOONG64OR
   665  		return true
   666  	case OpOr64:
   667  		v.Op = OpLOONG64OR
   668  		return true
   669  	case OpOr8:
   670  		v.Op = OpLOONG64OR
   671  		return true
   672  	case OpOrB:
   673  		v.Op = OpLOONG64OR
   674  		return true
   675  	case OpPanicBounds:
   676  		v.Op = OpLOONG64LoweredPanicBoundsRR
   677  		return true
   678  	case OpPopCount16:
   679  		return rewriteValueLOONG64_OpPopCount16(v)
   680  	case OpPopCount32:
   681  		return rewriteValueLOONG64_OpPopCount32(v)
   682  	case OpPopCount64:
   683  		return rewriteValueLOONG64_OpPopCount64(v)
   684  	case OpPrefetchCache:
   685  		return rewriteValueLOONG64_OpPrefetchCache(v)
   686  	case OpPrefetchCacheStreamed:
   687  		return rewriteValueLOONG64_OpPrefetchCacheStreamed(v)
   688  	case OpPubBarrier:
   689  		v.Op = OpLOONG64LoweredPubBarrier
   690  		return true
   691  	case OpRotateLeft16:
   692  		return rewriteValueLOONG64_OpRotateLeft16(v)
   693  	case OpRotateLeft32:
   694  		return rewriteValueLOONG64_OpRotateLeft32(v)
   695  	case OpRotateLeft64:
   696  		return rewriteValueLOONG64_OpRotateLeft64(v)
   697  	case OpRotateLeft8:
   698  		return rewriteValueLOONG64_OpRotateLeft8(v)
   699  	case OpRound32F:
   700  		v.Op = OpLOONG64LoweredRound32F
   701  		return true
   702  	case OpRound64F:
   703  		v.Op = OpLOONG64LoweredRound64F
   704  		return true
   705  	case OpRoundToEven:
   706  		v.Op = OpLOONG64FRINTND
   707  		return true
   708  	case OpRsh16Ux16:
   709  		return rewriteValueLOONG64_OpRsh16Ux16(v)
   710  	case OpRsh16Ux32:
   711  		return rewriteValueLOONG64_OpRsh16Ux32(v)
   712  	case OpRsh16Ux64:
   713  		return rewriteValueLOONG64_OpRsh16Ux64(v)
   714  	case OpRsh16Ux8:
   715  		return rewriteValueLOONG64_OpRsh16Ux8(v)
   716  	case OpRsh16x16:
   717  		return rewriteValueLOONG64_OpRsh16x16(v)
   718  	case OpRsh16x32:
   719  		return rewriteValueLOONG64_OpRsh16x32(v)
   720  	case OpRsh16x64:
   721  		return rewriteValueLOONG64_OpRsh16x64(v)
   722  	case OpRsh16x8:
   723  		return rewriteValueLOONG64_OpRsh16x8(v)
   724  	case OpRsh32Ux16:
   725  		return rewriteValueLOONG64_OpRsh32Ux16(v)
   726  	case OpRsh32Ux32:
   727  		return rewriteValueLOONG64_OpRsh32Ux32(v)
   728  	case OpRsh32Ux64:
   729  		return rewriteValueLOONG64_OpRsh32Ux64(v)
   730  	case OpRsh32Ux8:
   731  		return rewriteValueLOONG64_OpRsh32Ux8(v)
   732  	case OpRsh32x16:
   733  		return rewriteValueLOONG64_OpRsh32x16(v)
   734  	case OpRsh32x32:
   735  		return rewriteValueLOONG64_OpRsh32x32(v)
   736  	case OpRsh32x64:
   737  		return rewriteValueLOONG64_OpRsh32x64(v)
   738  	case OpRsh32x8:
   739  		return rewriteValueLOONG64_OpRsh32x8(v)
   740  	case OpRsh64Ux16:
   741  		return rewriteValueLOONG64_OpRsh64Ux16(v)
   742  	case OpRsh64Ux32:
   743  		return rewriteValueLOONG64_OpRsh64Ux32(v)
   744  	case OpRsh64Ux64:
   745  		return rewriteValueLOONG64_OpRsh64Ux64(v)
   746  	case OpRsh64Ux8:
   747  		return rewriteValueLOONG64_OpRsh64Ux8(v)
   748  	case OpRsh64x16:
   749  		return rewriteValueLOONG64_OpRsh64x16(v)
   750  	case OpRsh64x32:
   751  		return rewriteValueLOONG64_OpRsh64x32(v)
   752  	case OpRsh64x64:
   753  		return rewriteValueLOONG64_OpRsh64x64(v)
   754  	case OpRsh64x8:
   755  		return rewriteValueLOONG64_OpRsh64x8(v)
   756  	case OpRsh8Ux16:
   757  		return rewriteValueLOONG64_OpRsh8Ux16(v)
   758  	case OpRsh8Ux32:
   759  		return rewriteValueLOONG64_OpRsh8Ux32(v)
   760  	case OpRsh8Ux64:
   761  		return rewriteValueLOONG64_OpRsh8Ux64(v)
   762  	case OpRsh8Ux8:
   763  		return rewriteValueLOONG64_OpRsh8Ux8(v)
   764  	case OpRsh8x16:
   765  		return rewriteValueLOONG64_OpRsh8x16(v)
   766  	case OpRsh8x32:
   767  		return rewriteValueLOONG64_OpRsh8x32(v)
   768  	case OpRsh8x64:
   769  		return rewriteValueLOONG64_OpRsh8x64(v)
   770  	case OpRsh8x8:
   771  		return rewriteValueLOONG64_OpRsh8x8(v)
   772  	case OpSelect0:
   773  		return rewriteValueLOONG64_OpSelect0(v)
   774  	case OpSelect1:
   775  		return rewriteValueLOONG64_OpSelect1(v)
   776  	case OpSelectN:
   777  		return rewriteValueLOONG64_OpSelectN(v)
   778  	case OpSignExt16to32:
   779  		v.Op = OpLOONG64MOVHreg
   780  		return true
   781  	case OpSignExt16to64:
   782  		v.Op = OpLOONG64MOVHreg
   783  		return true
   784  	case OpSignExt32to64:
   785  		v.Op = OpLOONG64MOVWreg
   786  		return true
   787  	case OpSignExt8to16:
   788  		v.Op = OpLOONG64MOVBreg
   789  		return true
   790  	case OpSignExt8to32:
   791  		v.Op = OpLOONG64MOVBreg
   792  		return true
   793  	case OpSignExt8to64:
   794  		v.Op = OpLOONG64MOVBreg
   795  		return true
   796  	case OpSlicemask:
   797  		return rewriteValueLOONG64_OpSlicemask(v)
   798  	case OpSqrt:
   799  		v.Op = OpLOONG64SQRTD
   800  		return true
   801  	case OpSqrt32:
   802  		v.Op = OpLOONG64SQRTF
   803  		return true
   804  	case OpStaticCall:
   805  		v.Op = OpLOONG64CALLstatic
   806  		return true
   807  	case OpStore:
   808  		return rewriteValueLOONG64_OpStore(v)
   809  	case OpSub16:
   810  		v.Op = OpLOONG64SUBV
   811  		return true
   812  	case OpSub32:
   813  		v.Op = OpLOONG64SUBV
   814  		return true
   815  	case OpSub32F:
   816  		v.Op = OpLOONG64SUBF
   817  		return true
   818  	case OpSub64:
   819  		v.Op = OpLOONG64SUBV
   820  		return true
   821  	case OpSub64F:
   822  		v.Op = OpLOONG64SUBD
   823  		return true
   824  	case OpSub8:
   825  		v.Op = OpLOONG64SUBV
   826  		return true
   827  	case OpSubPtr:
   828  		v.Op = OpLOONG64SUBV
   829  		return true
   830  	case OpTailCall:
   831  		v.Op = OpLOONG64CALLtail
   832  		return true
   833  	case OpTailCallInter:
   834  		v.Op = OpLOONG64CALLtailinter
   835  		return true
   836  	case OpTrunc:
   837  		v.Op = OpLOONG64FRINTZD
   838  		return true
   839  	case OpTrunc16to8:
   840  		v.Op = OpCopy
   841  		return true
   842  	case OpTrunc32to16:
   843  		v.Op = OpCopy
   844  		return true
   845  	case OpTrunc32to8:
   846  		v.Op = OpCopy
   847  		return true
   848  	case OpTrunc64to16:
   849  		v.Op = OpCopy
   850  		return true
   851  	case OpTrunc64to32:
   852  		v.Op = OpCopy
   853  		return true
   854  	case OpTrunc64to8:
   855  		v.Op = OpCopy
   856  		return true
   857  	case OpWB:
   858  		v.Op = OpLOONG64LoweredWB
   859  		return true
   860  	case OpXor16:
   861  		v.Op = OpLOONG64XOR
   862  		return true
   863  	case OpXor32:
   864  		v.Op = OpLOONG64XOR
   865  		return true
   866  	case OpXor64:
   867  		v.Op = OpLOONG64XOR
   868  		return true
   869  	case OpXor8:
   870  		v.Op = OpLOONG64XOR
   871  		return true
   872  	case OpZero:
   873  		return rewriteValueLOONG64_OpZero(v)
   874  	case OpZeroExt16to32:
   875  		v.Op = OpLOONG64MOVHUreg
   876  		return true
   877  	case OpZeroExt16to64:
   878  		v.Op = OpLOONG64MOVHUreg
   879  		return true
   880  	case OpZeroExt32to64:
   881  		v.Op = OpLOONG64MOVWUreg
   882  		return true
   883  	case OpZeroExt8to16:
   884  		v.Op = OpLOONG64MOVBUreg
   885  		return true
   886  	case OpZeroExt8to32:
   887  		v.Op = OpLOONG64MOVBUreg
   888  		return true
   889  	case OpZeroExt8to64:
   890  		v.Op = OpLOONG64MOVBUreg
   891  		return true
   892  	}
   893  	return false
   894  }
   895  func rewriteValueLOONG64_OpAddr(v *Value) bool {
   896  	v_0 := v.Args[0]
   897  	// match: (Addr {sym} base)
   898  	// result: (MOVVaddr {sym} base)
   899  	for {
   900  		sym := auxToSym(v.Aux)
   901  		base := v_0
   902  		v.reset(OpLOONG64MOVVaddr)
   903  		v.Aux = symToAux(sym)
   904  		v.AddArg(base)
   905  		return true
   906  	}
   907  }
   908  func rewriteValueLOONG64_OpAtomicAnd8(v *Value) bool {
   909  	v_2 := v.Args[2]
   910  	v_1 := v.Args[1]
   911  	v_0 := v.Args[0]
   912  	b := v.Block
   913  	typ := &b.Func.Config.Types
   914  	// match: (AtomicAnd8 ptr val mem)
   915  	// result: (LoweredAtomicAnd32 (AND <typ.Uintptr> (MOVVconst [^3]) ptr) (NORconst [0] <typ.UInt32> (SLLV <typ.UInt32> (XORconst <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] ptr)))) mem)
   916  	for {
   917  		ptr := v_0
   918  		val := v_1
   919  		mem := v_2
   920  		v.reset(OpLOONG64LoweredAtomicAnd32)
   921  		v0 := b.NewValue0(v.Pos, OpLOONG64AND, typ.Uintptr)
   922  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
   923  		v1.AuxInt = int64ToAuxInt(^3)
   924  		v0.AddArg2(v1, ptr)
   925  		v2 := b.NewValue0(v.Pos, OpLOONG64NORconst, typ.UInt32)
   926  		v2.AuxInt = int64ToAuxInt(0)
   927  		v3 := b.NewValue0(v.Pos, OpLOONG64SLLV, typ.UInt32)
   928  		v4 := b.NewValue0(v.Pos, OpLOONG64XORconst, typ.UInt32)
   929  		v4.AuxInt = int64ToAuxInt(0xff)
   930  		v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   931  		v5.AddArg(val)
   932  		v4.AddArg(v5)
   933  		v6 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.UInt64)
   934  		v6.AuxInt = int64ToAuxInt(3)
   935  		v7 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.UInt64)
   936  		v7.AuxInt = int64ToAuxInt(3)
   937  		v7.AddArg(ptr)
   938  		v6.AddArg(v7)
   939  		v3.AddArg2(v4, v6)
   940  		v2.AddArg(v3)
   941  		v.AddArg3(v0, v2, mem)
   942  		return true
   943  	}
   944  }
   945  func rewriteValueLOONG64_OpAtomicCompareAndSwap32(v *Value) bool {
   946  	v_3 := v.Args[3]
   947  	v_2 := v.Args[2]
   948  	v_1 := v.Args[1]
   949  	v_0 := v.Args[0]
   950  	b := v.Block
   951  	typ := &b.Func.Config.Types
   952  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   953  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   954  	for {
   955  		ptr := v_0
   956  		old := v_1
   957  		new := v_2
   958  		mem := v_3
   959  		v.reset(OpLOONG64LoweredAtomicCas32)
   960  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   961  		v0.AddArg(old)
   962  		v.AddArg4(ptr, v0, new, mem)
   963  		return true
   964  	}
   965  }
   966  func rewriteValueLOONG64_OpAtomicCompareAndSwap32Variant(v *Value) bool {
   967  	v_3 := v.Args[3]
   968  	v_2 := v.Args[2]
   969  	v_1 := v.Args[1]
   970  	v_0 := v.Args[0]
   971  	b := v.Block
   972  	typ := &b.Func.Config.Types
   973  	// match: (AtomicCompareAndSwap32Variant ptr old new mem)
   974  	// result: (LoweredAtomicCas32Variant ptr (SignExt32to64 old) new mem)
   975  	for {
   976  		ptr := v_0
   977  		old := v_1
   978  		new := v_2
   979  		mem := v_3
   980  		v.reset(OpLOONG64LoweredAtomicCas32Variant)
   981  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   982  		v0.AddArg(old)
   983  		v.AddArg4(ptr, v0, new, mem)
   984  		return true
   985  	}
   986  }
   987  func rewriteValueLOONG64_OpAtomicOr8(v *Value) bool {
   988  	v_2 := v.Args[2]
   989  	v_1 := v.Args[1]
   990  	v_0 := v.Args[0]
   991  	b := v.Block
   992  	typ := &b.Func.Config.Types
   993  	// match: (AtomicOr8 ptr val mem)
   994  	// result: (LoweredAtomicOr32 (AND <typ.Uintptr> (MOVVconst [^3]) ptr) (SLLV <typ.UInt32> (ZeroExt8to32 val) (SLLVconst <typ.UInt64> [3] (ANDconst <typ.UInt64> [3] ptr))) mem)
   995  	for {
   996  		ptr := v_0
   997  		val := v_1
   998  		mem := v_2
   999  		v.reset(OpLOONG64LoweredAtomicOr32)
  1000  		v0 := b.NewValue0(v.Pos, OpLOONG64AND, typ.Uintptr)
  1001  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1002  		v1.AuxInt = int64ToAuxInt(^3)
  1003  		v0.AddArg2(v1, ptr)
  1004  		v2 := b.NewValue0(v.Pos, OpLOONG64SLLV, typ.UInt32)
  1005  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1006  		v3.AddArg(val)
  1007  		v4 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.UInt64)
  1008  		v4.AuxInt = int64ToAuxInt(3)
  1009  		v5 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.UInt64)
  1010  		v5.AuxInt = int64ToAuxInt(3)
  1011  		v5.AddArg(ptr)
  1012  		v4.AddArg(v5)
  1013  		v2.AddArg2(v3, v4)
  1014  		v.AddArg3(v0, v2, mem)
  1015  		return true
  1016  	}
  1017  }
  1018  func rewriteValueLOONG64_OpAvg64u(v *Value) bool {
  1019  	v_1 := v.Args[1]
  1020  	v_0 := v.Args[0]
  1021  	b := v.Block
  1022  	// match: (Avg64u <t> x y)
  1023  	// result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
  1024  	for {
  1025  		t := v.Type
  1026  		x := v_0
  1027  		y := v_1
  1028  		v.reset(OpLOONG64ADDV)
  1029  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLVconst, t)
  1030  		v0.AuxInt = int64ToAuxInt(1)
  1031  		v1 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
  1032  		v1.AddArg2(x, y)
  1033  		v0.AddArg(v1)
  1034  		v.AddArg2(v0, y)
  1035  		return true
  1036  	}
  1037  }
  1038  func rewriteValueLOONG64_OpBitLen16(v *Value) bool {
  1039  	v_0 := v.Args[0]
  1040  	b := v.Block
  1041  	typ := &b.Func.Config.Types
  1042  	// match: (BitLen16 x)
  1043  	// result: (BitLen64 (ZeroExt16to64 x))
  1044  	for {
  1045  		x := v_0
  1046  		v.reset(OpBitLen64)
  1047  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1048  		v0.AddArg(x)
  1049  		v.AddArg(v0)
  1050  		return true
  1051  	}
  1052  }
  1053  func rewriteValueLOONG64_OpBitLen32(v *Value) bool {
  1054  	v_0 := v.Args[0]
  1055  	b := v.Block
  1056  	// match: (BitLen32 <t> x)
  1057  	// result: (NEGV <t> (SUBVconst <t> [32] (CLZW <t> x)))
  1058  	for {
  1059  		t := v.Type
  1060  		x := v_0
  1061  		v.reset(OpLOONG64NEGV)
  1062  		v.Type = t
  1063  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBVconst, t)
  1064  		v0.AuxInt = int64ToAuxInt(32)
  1065  		v1 := b.NewValue0(v.Pos, OpLOONG64CLZW, t)
  1066  		v1.AddArg(x)
  1067  		v0.AddArg(v1)
  1068  		v.AddArg(v0)
  1069  		return true
  1070  	}
  1071  }
  1072  func rewriteValueLOONG64_OpBitLen64(v *Value) bool {
  1073  	v_0 := v.Args[0]
  1074  	b := v.Block
  1075  	// match: (BitLen64 <t> x)
  1076  	// result: (NEGV <t> (SUBVconst <t> [64] (CLZV <t> x)))
  1077  	for {
  1078  		t := v.Type
  1079  		x := v_0
  1080  		v.reset(OpLOONG64NEGV)
  1081  		v.Type = t
  1082  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBVconst, t)
  1083  		v0.AuxInt = int64ToAuxInt(64)
  1084  		v1 := b.NewValue0(v.Pos, OpLOONG64CLZV, t)
  1085  		v1.AddArg(x)
  1086  		v0.AddArg(v1)
  1087  		v.AddArg(v0)
  1088  		return true
  1089  	}
  1090  }
  1091  func rewriteValueLOONG64_OpBitLen8(v *Value) bool {
  1092  	v_0 := v.Args[0]
  1093  	b := v.Block
  1094  	typ := &b.Func.Config.Types
  1095  	// match: (BitLen8 x)
  1096  	// result: (BitLen64 (ZeroExt8to64 x))
  1097  	for {
  1098  		x := v_0
  1099  		v.reset(OpBitLen64)
  1100  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1101  		v0.AddArg(x)
  1102  		v.AddArg(v0)
  1103  		return true
  1104  	}
  1105  }
  1106  func rewriteValueLOONG64_OpBitRev16(v *Value) bool {
  1107  	v_0 := v.Args[0]
  1108  	b := v.Block
  1109  	// match: (BitRev16 <t> x)
  1110  	// result: (REVB2H (BITREV4B <t> x))
  1111  	for {
  1112  		t := v.Type
  1113  		x := v_0
  1114  		v.reset(OpLOONG64REVB2H)
  1115  		v0 := b.NewValue0(v.Pos, OpLOONG64BITREV4B, t)
  1116  		v0.AddArg(x)
  1117  		v.AddArg(v0)
  1118  		return true
  1119  	}
  1120  }
  1121  func rewriteValueLOONG64_OpCom16(v *Value) bool {
  1122  	v_0 := v.Args[0]
  1123  	b := v.Block
  1124  	typ := &b.Func.Config.Types
  1125  	// match: (Com16 x)
  1126  	// result: (NOR (MOVVconst [0]) x)
  1127  	for {
  1128  		x := v_0
  1129  		v.reset(OpLOONG64NOR)
  1130  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1131  		v0.AuxInt = int64ToAuxInt(0)
  1132  		v.AddArg2(v0, x)
  1133  		return true
  1134  	}
  1135  }
  1136  func rewriteValueLOONG64_OpCom32(v *Value) bool {
  1137  	v_0 := v.Args[0]
  1138  	b := v.Block
  1139  	typ := &b.Func.Config.Types
  1140  	// match: (Com32 x)
  1141  	// result: (NOR (MOVVconst [0]) x)
  1142  	for {
  1143  		x := v_0
  1144  		v.reset(OpLOONG64NOR)
  1145  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1146  		v0.AuxInt = int64ToAuxInt(0)
  1147  		v.AddArg2(v0, x)
  1148  		return true
  1149  	}
  1150  }
  1151  func rewriteValueLOONG64_OpCom64(v *Value) bool {
  1152  	v_0 := v.Args[0]
  1153  	b := v.Block
  1154  	typ := &b.Func.Config.Types
  1155  	// match: (Com64 x)
  1156  	// result: (NOR (MOVVconst [0]) x)
  1157  	for {
  1158  		x := v_0
  1159  		v.reset(OpLOONG64NOR)
  1160  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1161  		v0.AuxInt = int64ToAuxInt(0)
  1162  		v.AddArg2(v0, x)
  1163  		return true
  1164  	}
  1165  }
  1166  func rewriteValueLOONG64_OpCom8(v *Value) bool {
  1167  	v_0 := v.Args[0]
  1168  	b := v.Block
  1169  	typ := &b.Func.Config.Types
  1170  	// match: (Com8 x)
  1171  	// result: (NOR (MOVVconst [0]) x)
  1172  	for {
  1173  		x := v_0
  1174  		v.reset(OpLOONG64NOR)
  1175  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1176  		v0.AuxInt = int64ToAuxInt(0)
  1177  		v.AddArg2(v0, x)
  1178  		return true
  1179  	}
  1180  }
  1181  func rewriteValueLOONG64_OpCondSelect(v *Value) bool {
  1182  	v_2 := v.Args[2]
  1183  	v_1 := v.Args[1]
  1184  	v_0 := v.Args[0]
  1185  	b := v.Block
  1186  	// match: (CondSelect <t> x y cond)
  1187  	// result: (OR (MASKEQZ <t> x cond) (MASKNEZ <t> y cond))
  1188  	for {
  1189  		t := v.Type
  1190  		x := v_0
  1191  		y := v_1
  1192  		cond := v_2
  1193  		v.reset(OpLOONG64OR)
  1194  		v0 := b.NewValue0(v.Pos, OpLOONG64MASKEQZ, t)
  1195  		v0.AddArg2(x, cond)
  1196  		v1 := b.NewValue0(v.Pos, OpLOONG64MASKNEZ, t)
  1197  		v1.AddArg2(y, cond)
  1198  		v.AddArg2(v0, v1)
  1199  		return true
  1200  	}
  1201  }
  1202  func rewriteValueLOONG64_OpConst16(v *Value) bool {
  1203  	// match: (Const16 [val])
  1204  	// result: (MOVVconst [int64(val)])
  1205  	for {
  1206  		val := auxIntToInt16(v.AuxInt)
  1207  		v.reset(OpLOONG64MOVVconst)
  1208  		v.AuxInt = int64ToAuxInt(int64(val))
  1209  		return true
  1210  	}
  1211  }
  1212  func rewriteValueLOONG64_OpConst32(v *Value) bool {
  1213  	// match: (Const32 [val])
  1214  	// result: (MOVVconst [int64(val)])
  1215  	for {
  1216  		val := auxIntToInt32(v.AuxInt)
  1217  		v.reset(OpLOONG64MOVVconst)
  1218  		v.AuxInt = int64ToAuxInt(int64(val))
  1219  		return true
  1220  	}
  1221  }
  1222  func rewriteValueLOONG64_OpConst32F(v *Value) bool {
  1223  	// match: (Const32F [val])
  1224  	// result: (MOVFconst [float64(val)])
  1225  	for {
  1226  		val := auxIntToFloat32(v.AuxInt)
  1227  		v.reset(OpLOONG64MOVFconst)
  1228  		v.AuxInt = float64ToAuxInt(float64(val))
  1229  		return true
  1230  	}
  1231  }
  1232  func rewriteValueLOONG64_OpConst64(v *Value) bool {
  1233  	// match: (Const64 [val])
  1234  	// result: (MOVVconst [int64(val)])
  1235  	for {
  1236  		val := auxIntToInt64(v.AuxInt)
  1237  		v.reset(OpLOONG64MOVVconst)
  1238  		v.AuxInt = int64ToAuxInt(int64(val))
  1239  		return true
  1240  	}
  1241  }
  1242  func rewriteValueLOONG64_OpConst64F(v *Value) bool {
  1243  	// match: (Const64F [val])
  1244  	// result: (MOVDconst [float64(val)])
  1245  	for {
  1246  		val := auxIntToFloat64(v.AuxInt)
  1247  		v.reset(OpLOONG64MOVDconst)
  1248  		v.AuxInt = float64ToAuxInt(float64(val))
  1249  		return true
  1250  	}
  1251  }
  1252  func rewriteValueLOONG64_OpConst8(v *Value) bool {
  1253  	// match: (Const8 [val])
  1254  	// result: (MOVVconst [int64(val)])
  1255  	for {
  1256  		val := auxIntToInt8(v.AuxInt)
  1257  		v.reset(OpLOONG64MOVVconst)
  1258  		v.AuxInt = int64ToAuxInt(int64(val))
  1259  		return true
  1260  	}
  1261  }
  1262  func rewriteValueLOONG64_OpConstBool(v *Value) bool {
  1263  	// match: (ConstBool [t])
  1264  	// result: (MOVVconst [int64(b2i(t))])
  1265  	for {
  1266  		t := auxIntToBool(v.AuxInt)
  1267  		v.reset(OpLOONG64MOVVconst)
  1268  		v.AuxInt = int64ToAuxInt(int64(b2i(t)))
  1269  		return true
  1270  	}
  1271  }
  1272  func rewriteValueLOONG64_OpConstNil(v *Value) bool {
  1273  	// match: (ConstNil)
  1274  	// result: (MOVVconst [0])
  1275  	for {
  1276  		v.reset(OpLOONG64MOVVconst)
  1277  		v.AuxInt = int64ToAuxInt(0)
  1278  		return true
  1279  	}
  1280  }
  1281  func rewriteValueLOONG64_OpCtz16(v *Value) bool {
  1282  	v_0 := v.Args[0]
  1283  	b := v.Block
  1284  	typ := &b.Func.Config.Types
  1285  	// match: (Ctz16 x)
  1286  	// result: (CTZV (OR <typ.UInt64> x (MOVVconst [1<<16])))
  1287  	for {
  1288  		x := v_0
  1289  		v.reset(OpLOONG64CTZV)
  1290  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, typ.UInt64)
  1291  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1292  		v1.AuxInt = int64ToAuxInt(1 << 16)
  1293  		v0.AddArg2(x, v1)
  1294  		v.AddArg(v0)
  1295  		return true
  1296  	}
  1297  }
  1298  func rewriteValueLOONG64_OpCtz8(v *Value) bool {
  1299  	v_0 := v.Args[0]
  1300  	b := v.Block
  1301  	typ := &b.Func.Config.Types
  1302  	// match: (Ctz8 x)
  1303  	// result: (CTZV (OR <typ.UInt64> x (MOVVconst [1<<8])))
  1304  	for {
  1305  		x := v_0
  1306  		v.reset(OpLOONG64CTZV)
  1307  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, typ.UInt64)
  1308  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1309  		v1.AuxInt = int64ToAuxInt(1 << 8)
  1310  		v0.AddArg2(x, v1)
  1311  		v.AddArg(v0)
  1312  		return true
  1313  	}
  1314  }
  1315  func rewriteValueLOONG64_OpDiv16(v *Value) bool {
  1316  	v_1 := v.Args[1]
  1317  	v_0 := v.Args[0]
  1318  	b := v.Block
  1319  	typ := &b.Func.Config.Types
  1320  	// match: (Div16 x y)
  1321  	// result: (DIVV (SignExt16to64 x) (SignExt16to64 y))
  1322  	for {
  1323  		x := v_0
  1324  		y := v_1
  1325  		v.reset(OpLOONG64DIVV)
  1326  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1327  		v0.AddArg(x)
  1328  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1329  		v1.AddArg(y)
  1330  		v.AddArg2(v0, v1)
  1331  		return true
  1332  	}
  1333  }
  1334  func rewriteValueLOONG64_OpDiv16u(v *Value) bool {
  1335  	v_1 := v.Args[1]
  1336  	v_0 := v.Args[0]
  1337  	b := v.Block
  1338  	typ := &b.Func.Config.Types
  1339  	// match: (Div16u x y)
  1340  	// result: (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1341  	for {
  1342  		x := v_0
  1343  		y := v_1
  1344  		v.reset(OpLOONG64DIVVU)
  1345  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1346  		v0.AddArg(x)
  1347  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1348  		v1.AddArg(y)
  1349  		v.AddArg2(v0, v1)
  1350  		return true
  1351  	}
  1352  }
  1353  func rewriteValueLOONG64_OpDiv32(v *Value) bool {
  1354  	v_1 := v.Args[1]
  1355  	v_0 := v.Args[0]
  1356  	b := v.Block
  1357  	typ := &b.Func.Config.Types
  1358  	// match: (Div32 x y)
  1359  	// result: (DIVV (SignExt32to64 x) (SignExt32to64 y))
  1360  	for {
  1361  		x := v_0
  1362  		y := v_1
  1363  		v.reset(OpLOONG64DIVV)
  1364  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1365  		v0.AddArg(x)
  1366  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1367  		v1.AddArg(y)
  1368  		v.AddArg2(v0, v1)
  1369  		return true
  1370  	}
  1371  }
  1372  func rewriteValueLOONG64_OpDiv32u(v *Value) bool {
  1373  	v_1 := v.Args[1]
  1374  	v_0 := v.Args[0]
  1375  	b := v.Block
  1376  	typ := &b.Func.Config.Types
  1377  	// match: (Div32u x y)
  1378  	// result: (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1379  	for {
  1380  		x := v_0
  1381  		y := v_1
  1382  		v.reset(OpLOONG64DIVVU)
  1383  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1384  		v0.AddArg(x)
  1385  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1386  		v1.AddArg(y)
  1387  		v.AddArg2(v0, v1)
  1388  		return true
  1389  	}
  1390  }
  1391  func rewriteValueLOONG64_OpDiv64(v *Value) bool {
  1392  	v_1 := v.Args[1]
  1393  	v_0 := v.Args[0]
  1394  	// match: (Div64 x y)
  1395  	// result: (DIVV x y)
  1396  	for {
  1397  		x := v_0
  1398  		y := v_1
  1399  		v.reset(OpLOONG64DIVV)
  1400  		v.AddArg2(x, y)
  1401  		return true
  1402  	}
  1403  }
  1404  func rewriteValueLOONG64_OpDiv8(v *Value) bool {
  1405  	v_1 := v.Args[1]
  1406  	v_0 := v.Args[0]
  1407  	b := v.Block
  1408  	typ := &b.Func.Config.Types
  1409  	// match: (Div8 x y)
  1410  	// result: (DIVV (SignExt8to64 x) (SignExt8to64 y))
  1411  	for {
  1412  		x := v_0
  1413  		y := v_1
  1414  		v.reset(OpLOONG64DIVV)
  1415  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1416  		v0.AddArg(x)
  1417  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1418  		v1.AddArg(y)
  1419  		v.AddArg2(v0, v1)
  1420  		return true
  1421  	}
  1422  }
  1423  func rewriteValueLOONG64_OpDiv8u(v *Value) bool {
  1424  	v_1 := v.Args[1]
  1425  	v_0 := v.Args[0]
  1426  	b := v.Block
  1427  	typ := &b.Func.Config.Types
  1428  	// match: (Div8u x y)
  1429  	// result: (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1430  	for {
  1431  		x := v_0
  1432  		y := v_1
  1433  		v.reset(OpLOONG64DIVVU)
  1434  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1435  		v0.AddArg(x)
  1436  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1437  		v1.AddArg(y)
  1438  		v.AddArg2(v0, v1)
  1439  		return true
  1440  	}
  1441  }
  1442  func rewriteValueLOONG64_OpEq16(v *Value) bool {
  1443  	v_1 := v.Args[1]
  1444  	v_0 := v.Args[0]
  1445  	b := v.Block
  1446  	typ := &b.Func.Config.Types
  1447  	// match: (Eq16 x y)
  1448  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1449  	for {
  1450  		x := v_0
  1451  		y := v_1
  1452  		v.reset(OpLOONG64SGTU)
  1453  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1454  		v0.AuxInt = int64ToAuxInt(1)
  1455  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1456  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1457  		v2.AddArg(x)
  1458  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1459  		v3.AddArg(y)
  1460  		v1.AddArg2(v2, v3)
  1461  		v.AddArg2(v0, v1)
  1462  		return true
  1463  	}
  1464  }
  1465  func rewriteValueLOONG64_OpEq32(v *Value) bool {
  1466  	v_1 := v.Args[1]
  1467  	v_0 := v.Args[0]
  1468  	b := v.Block
  1469  	typ := &b.Func.Config.Types
  1470  	// match: (Eq32 x y)
  1471  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1472  	for {
  1473  		x := v_0
  1474  		y := v_1
  1475  		v.reset(OpLOONG64SGTU)
  1476  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1477  		v0.AuxInt = int64ToAuxInt(1)
  1478  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1479  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1480  		v2.AddArg(x)
  1481  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1482  		v3.AddArg(y)
  1483  		v1.AddArg2(v2, v3)
  1484  		v.AddArg2(v0, v1)
  1485  		return true
  1486  	}
  1487  }
  1488  func rewriteValueLOONG64_OpEq32F(v *Value) bool {
  1489  	v_1 := v.Args[1]
  1490  	v_0 := v.Args[0]
  1491  	b := v.Block
  1492  	// match: (Eq32F x y)
  1493  	// result: (FPFlagTrue (CMPEQF x y))
  1494  	for {
  1495  		x := v_0
  1496  		y := v_1
  1497  		v.reset(OpLOONG64FPFlagTrue)
  1498  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQF, types.TypeFlags)
  1499  		v0.AddArg2(x, y)
  1500  		v.AddArg(v0)
  1501  		return true
  1502  	}
  1503  }
  1504  func rewriteValueLOONG64_OpEq64(v *Value) bool {
  1505  	v_1 := v.Args[1]
  1506  	v_0 := v.Args[0]
  1507  	b := v.Block
  1508  	typ := &b.Func.Config.Types
  1509  	// match: (Eq64 x y)
  1510  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1511  	for {
  1512  		x := v_0
  1513  		y := v_1
  1514  		v.reset(OpLOONG64SGTU)
  1515  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1516  		v0.AuxInt = int64ToAuxInt(1)
  1517  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1518  		v1.AddArg2(x, y)
  1519  		v.AddArg2(v0, v1)
  1520  		return true
  1521  	}
  1522  }
  1523  func rewriteValueLOONG64_OpEq64F(v *Value) bool {
  1524  	v_1 := v.Args[1]
  1525  	v_0 := v.Args[0]
  1526  	b := v.Block
  1527  	// match: (Eq64F x y)
  1528  	// result: (FPFlagTrue (CMPEQD x y))
  1529  	for {
  1530  		x := v_0
  1531  		y := v_1
  1532  		v.reset(OpLOONG64FPFlagTrue)
  1533  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQD, types.TypeFlags)
  1534  		v0.AddArg2(x, y)
  1535  		v.AddArg(v0)
  1536  		return true
  1537  	}
  1538  }
  1539  func rewriteValueLOONG64_OpEq8(v *Value) bool {
  1540  	v_1 := v.Args[1]
  1541  	v_0 := v.Args[0]
  1542  	b := v.Block
  1543  	typ := &b.Func.Config.Types
  1544  	// match: (Eq8 x y)
  1545  	// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1546  	for {
  1547  		x := v_0
  1548  		y := v_1
  1549  		v.reset(OpLOONG64SGTU)
  1550  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1551  		v0.AuxInt = int64ToAuxInt(1)
  1552  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1553  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1554  		v2.AddArg(x)
  1555  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1556  		v3.AddArg(y)
  1557  		v1.AddArg2(v2, v3)
  1558  		v.AddArg2(v0, v1)
  1559  		return true
  1560  	}
  1561  }
  1562  func rewriteValueLOONG64_OpEqB(v *Value) bool {
  1563  	v_1 := v.Args[1]
  1564  	v_0 := v.Args[0]
  1565  	b := v.Block
  1566  	typ := &b.Func.Config.Types
  1567  	// match: (EqB x y)
  1568  	// result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y))
  1569  	for {
  1570  		x := v_0
  1571  		y := v_1
  1572  		v.reset(OpLOONG64XOR)
  1573  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1574  		v0.AuxInt = int64ToAuxInt(1)
  1575  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.Bool)
  1576  		v1.AddArg2(x, y)
  1577  		v.AddArg2(v0, v1)
  1578  		return true
  1579  	}
  1580  }
  1581  func rewriteValueLOONG64_OpEqPtr(v *Value) bool {
  1582  	v_1 := v.Args[1]
  1583  	v_0 := v.Args[0]
  1584  	b := v.Block
  1585  	typ := &b.Func.Config.Types
  1586  	// match: (EqPtr x y)
  1587  	// result: (SGTU (MOVVconst [1]) (XOR x y))
  1588  	for {
  1589  		x := v_0
  1590  		y := v_1
  1591  		v.reset(OpLOONG64SGTU)
  1592  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1593  		v0.AuxInt = int64ToAuxInt(1)
  1594  		v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  1595  		v1.AddArg2(x, y)
  1596  		v.AddArg2(v0, v1)
  1597  		return true
  1598  	}
  1599  }
  1600  func rewriteValueLOONG64_OpIsInBounds(v *Value) bool {
  1601  	v_1 := v.Args[1]
  1602  	v_0 := v.Args[0]
  1603  	// match: (IsInBounds idx len)
  1604  	// result: (SGTU len idx)
  1605  	for {
  1606  		idx := v_0
  1607  		len := v_1
  1608  		v.reset(OpLOONG64SGTU)
  1609  		v.AddArg2(len, idx)
  1610  		return true
  1611  	}
  1612  }
  1613  func rewriteValueLOONG64_OpIsNonNil(v *Value) bool {
  1614  	v_0 := v.Args[0]
  1615  	b := v.Block
  1616  	typ := &b.Func.Config.Types
  1617  	// match: (IsNonNil ptr)
  1618  	// result: (SGTU ptr (MOVVconst [0]))
  1619  	for {
  1620  		ptr := v_0
  1621  		v.reset(OpLOONG64SGTU)
  1622  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1623  		v0.AuxInt = int64ToAuxInt(0)
  1624  		v.AddArg2(ptr, v0)
  1625  		return true
  1626  	}
  1627  }
  1628  func rewriteValueLOONG64_OpIsSliceInBounds(v *Value) bool {
  1629  	v_1 := v.Args[1]
  1630  	v_0 := v.Args[0]
  1631  	b := v.Block
  1632  	typ := &b.Func.Config.Types
  1633  	// match: (IsSliceInBounds idx len)
  1634  	// result: (XOR (MOVVconst [1]) (SGTU idx len))
  1635  	for {
  1636  		idx := v_0
  1637  		len := v_1
  1638  		v.reset(OpLOONG64XOR)
  1639  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  1640  		v0.AuxInt = int64ToAuxInt(1)
  1641  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  1642  		v1.AddArg2(idx, len)
  1643  		v.AddArg2(v0, v1)
  1644  		return true
  1645  	}
  1646  }
  1647  func rewriteValueLOONG64_OpLOONG64ADDD(v *Value) bool {
  1648  	v_1 := v.Args[1]
  1649  	v_0 := v.Args[0]
  1650  	// match: (ADDD (MULD x y) z)
  1651  	// cond: z.Block.Func.useFMA(v)
  1652  	// result: (FMADDD x y z)
  1653  	for {
  1654  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1655  			if v_0.Op != OpLOONG64MULD {
  1656  				continue
  1657  			}
  1658  			y := v_0.Args[1]
  1659  			x := v_0.Args[0]
  1660  			z := v_1
  1661  			if !(z.Block.Func.useFMA(v)) {
  1662  				continue
  1663  			}
  1664  			v.reset(OpLOONG64FMADDD)
  1665  			v.AddArg3(x, y, z)
  1666  			return true
  1667  		}
  1668  		break
  1669  	}
  1670  	// match: (ADDD z (NEGD (MULD x y)))
  1671  	// cond: z.Block.Func.useFMA(v)
  1672  	// result: (FNMSUBD x y z)
  1673  	for {
  1674  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1675  			z := v_0
  1676  			if v_1.Op != OpLOONG64NEGD {
  1677  				continue
  1678  			}
  1679  			v_1_0 := v_1.Args[0]
  1680  			if v_1_0.Op != OpLOONG64MULD {
  1681  				continue
  1682  			}
  1683  			y := v_1_0.Args[1]
  1684  			x := v_1_0.Args[0]
  1685  			if !(z.Block.Func.useFMA(v)) {
  1686  				continue
  1687  			}
  1688  			v.reset(OpLOONG64FNMSUBD)
  1689  			v.AddArg3(x, y, z)
  1690  			return true
  1691  		}
  1692  		break
  1693  	}
  1694  	return false
  1695  }
  1696  func rewriteValueLOONG64_OpLOONG64ADDF(v *Value) bool {
  1697  	v_1 := v.Args[1]
  1698  	v_0 := v.Args[0]
  1699  	// match: (ADDF (MULF x y) z)
  1700  	// cond: z.Block.Func.useFMA(v)
  1701  	// result: (FMADDF x y z)
  1702  	for {
  1703  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1704  			if v_0.Op != OpLOONG64MULF {
  1705  				continue
  1706  			}
  1707  			y := v_0.Args[1]
  1708  			x := v_0.Args[0]
  1709  			z := v_1
  1710  			if !(z.Block.Func.useFMA(v)) {
  1711  				continue
  1712  			}
  1713  			v.reset(OpLOONG64FMADDF)
  1714  			v.AddArg3(x, y, z)
  1715  			return true
  1716  		}
  1717  		break
  1718  	}
  1719  	// match: (ADDF z (NEGF (MULF x y)))
  1720  	// cond: z.Block.Func.useFMA(v)
  1721  	// result: (FNMSUBF x y z)
  1722  	for {
  1723  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1724  			z := v_0
  1725  			if v_1.Op != OpLOONG64NEGF {
  1726  				continue
  1727  			}
  1728  			v_1_0 := v_1.Args[0]
  1729  			if v_1_0.Op != OpLOONG64MULF {
  1730  				continue
  1731  			}
  1732  			y := v_1_0.Args[1]
  1733  			x := v_1_0.Args[0]
  1734  			if !(z.Block.Func.useFMA(v)) {
  1735  				continue
  1736  			}
  1737  			v.reset(OpLOONG64FNMSUBF)
  1738  			v.AddArg3(x, y, z)
  1739  			return true
  1740  		}
  1741  		break
  1742  	}
  1743  	return false
  1744  }
  1745  func rewriteValueLOONG64_OpLOONG64ADDV(v *Value) bool {
  1746  	v_1 := v.Args[1]
  1747  	v_0 := v.Args[0]
  1748  	b := v.Block
  1749  	typ := &b.Func.Config.Types
  1750  	// match: (ADDV <typ.UInt16> (SRLVconst [8] <typ.UInt16> x) (SLLVconst [8] <typ.UInt16> x))
  1751  	// result: (REVB2H x)
  1752  	for {
  1753  		if v.Type != typ.UInt16 {
  1754  			break
  1755  		}
  1756  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1757  			if v_0.Op != OpLOONG64SRLVconst || v_0.Type != typ.UInt16 || auxIntToInt64(v_0.AuxInt) != 8 {
  1758  				continue
  1759  			}
  1760  			x := v_0.Args[0]
  1761  			if v_1.Op != OpLOONG64SLLVconst || v_1.Type != typ.UInt16 || auxIntToInt64(v_1.AuxInt) != 8 || x != v_1.Args[0] {
  1762  				continue
  1763  			}
  1764  			v.reset(OpLOONG64REVB2H)
  1765  			v.AddArg(x)
  1766  			return true
  1767  		}
  1768  		break
  1769  	}
  1770  	// match: (ADDV (SRLconst [8] (ANDconst [c1] x)) (SLLconst [8] (ANDconst [c2] x)))
  1771  	// cond: uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff
  1772  	// result: (REVB2H x)
  1773  	for {
  1774  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1775  			if v_0.Op != OpLOONG64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
  1776  				continue
  1777  			}
  1778  			v_0_0 := v_0.Args[0]
  1779  			if v_0_0.Op != OpLOONG64ANDconst {
  1780  				continue
  1781  			}
  1782  			c1 := auxIntToInt64(v_0_0.AuxInt)
  1783  			x := v_0_0.Args[0]
  1784  			if v_1.Op != OpLOONG64SLLconst || auxIntToInt64(v_1.AuxInt) != 8 {
  1785  				continue
  1786  			}
  1787  			v_1_0 := v_1.Args[0]
  1788  			if v_1_0.Op != OpLOONG64ANDconst {
  1789  				continue
  1790  			}
  1791  			c2 := auxIntToInt64(v_1_0.AuxInt)
  1792  			if x != v_1_0.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) {
  1793  				continue
  1794  			}
  1795  			v.reset(OpLOONG64REVB2H)
  1796  			v.AddArg(x)
  1797  			return true
  1798  		}
  1799  		break
  1800  	}
  1801  	// match: (ADDV (SRLVconst [8] (AND (MOVVconst [c1]) x)) (SLLVconst [8] (AND (MOVVconst [c2]) x)))
  1802  	// cond: uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff
  1803  	// result: (REVB4H x)
  1804  	for {
  1805  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1806  			if v_0.Op != OpLOONG64SRLVconst || auxIntToInt64(v_0.AuxInt) != 8 {
  1807  				continue
  1808  			}
  1809  			v_0_0 := v_0.Args[0]
  1810  			if v_0_0.Op != OpLOONG64AND {
  1811  				continue
  1812  			}
  1813  			_ = v_0_0.Args[1]
  1814  			v_0_0_0 := v_0_0.Args[0]
  1815  			v_0_0_1 := v_0_0.Args[1]
  1816  			for _i1 := 0; _i1 <= 1; _i1, v_0_0_0, v_0_0_1 = _i1+1, v_0_0_1, v_0_0_0 {
  1817  				if v_0_0_0.Op != OpLOONG64MOVVconst {
  1818  					continue
  1819  				}
  1820  				c1 := auxIntToInt64(v_0_0_0.AuxInt)
  1821  				x := v_0_0_1
  1822  				if v_1.Op != OpLOONG64SLLVconst || auxIntToInt64(v_1.AuxInt) != 8 {
  1823  					continue
  1824  				}
  1825  				v_1_0 := v_1.Args[0]
  1826  				if v_1_0.Op != OpLOONG64AND {
  1827  					continue
  1828  				}
  1829  				_ = v_1_0.Args[1]
  1830  				v_1_0_0 := v_1_0.Args[0]
  1831  				v_1_0_1 := v_1_0.Args[1]
  1832  				for _i2 := 0; _i2 <= 1; _i2, v_1_0_0, v_1_0_1 = _i2+1, v_1_0_1, v_1_0_0 {
  1833  					if v_1_0_0.Op != OpLOONG64MOVVconst {
  1834  						continue
  1835  					}
  1836  					c2 := auxIntToInt64(v_1_0_0.AuxInt)
  1837  					if x != v_1_0_1 || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) {
  1838  						continue
  1839  					}
  1840  					v.reset(OpLOONG64REVB4H)
  1841  					v.AddArg(x)
  1842  					return true
  1843  				}
  1844  			}
  1845  		}
  1846  		break
  1847  	}
  1848  	// match: (ADDV (SRLVconst [8] (AND (MOVVconst [c1]) x)) (SLLVconst [8] (ANDconst [c2] x)))
  1849  	// cond: uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff
  1850  	// result: (REVB4H (ANDconst <x.Type> [0xffffffff] x))
  1851  	for {
  1852  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1853  			if v_0.Op != OpLOONG64SRLVconst || auxIntToInt64(v_0.AuxInt) != 8 {
  1854  				continue
  1855  			}
  1856  			v_0_0 := v_0.Args[0]
  1857  			if v_0_0.Op != OpLOONG64AND {
  1858  				continue
  1859  			}
  1860  			_ = v_0_0.Args[1]
  1861  			v_0_0_0 := v_0_0.Args[0]
  1862  			v_0_0_1 := v_0_0.Args[1]
  1863  			for _i1 := 0; _i1 <= 1; _i1, v_0_0_0, v_0_0_1 = _i1+1, v_0_0_1, v_0_0_0 {
  1864  				if v_0_0_0.Op != OpLOONG64MOVVconst {
  1865  					continue
  1866  				}
  1867  				c1 := auxIntToInt64(v_0_0_0.AuxInt)
  1868  				x := v_0_0_1
  1869  				if v_1.Op != OpLOONG64SLLVconst || auxIntToInt64(v_1.AuxInt) != 8 {
  1870  					continue
  1871  				}
  1872  				v_1_0 := v_1.Args[0]
  1873  				if v_1_0.Op != OpLOONG64ANDconst {
  1874  					continue
  1875  				}
  1876  				c2 := auxIntToInt64(v_1_0.AuxInt)
  1877  				if x != v_1_0.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) {
  1878  					continue
  1879  				}
  1880  				v.reset(OpLOONG64REVB4H)
  1881  				v0 := b.NewValue0(v.Pos, OpLOONG64ANDconst, x.Type)
  1882  				v0.AuxInt = int64ToAuxInt(0xffffffff)
  1883  				v0.AddArg(x)
  1884  				v.AddArg(v0)
  1885  				return true
  1886  			}
  1887  		}
  1888  		break
  1889  	}
  1890  	// match: (ADDV x (MOVVconst <t> [c]))
  1891  	// cond: is32Bit(c) && !t.IsPtr()
  1892  	// result: (ADDVconst [c] x)
  1893  	for {
  1894  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1895  			x := v_0
  1896  			if v_1.Op != OpLOONG64MOVVconst {
  1897  				continue
  1898  			}
  1899  			t := v_1.Type
  1900  			c := auxIntToInt64(v_1.AuxInt)
  1901  			if !(is32Bit(c) && !t.IsPtr()) {
  1902  				continue
  1903  			}
  1904  			v.reset(OpLOONG64ADDVconst)
  1905  			v.AuxInt = int64ToAuxInt(c)
  1906  			v.AddArg(x)
  1907  			return true
  1908  		}
  1909  		break
  1910  	}
  1911  	// match: (ADDV x0 x1:(SLLVconst [c] y))
  1912  	// cond: x1.Uses == 1 && c > 0 && c <= 4
  1913  	// result: (ADDshiftLLV x0 y [c])
  1914  	for {
  1915  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1916  			x0 := v_0
  1917  			x1 := v_1
  1918  			if x1.Op != OpLOONG64SLLVconst {
  1919  				continue
  1920  			}
  1921  			c := auxIntToInt64(x1.AuxInt)
  1922  			y := x1.Args[0]
  1923  			if !(x1.Uses == 1 && c > 0 && c <= 4) {
  1924  				continue
  1925  			}
  1926  			v.reset(OpLOONG64ADDshiftLLV)
  1927  			v.AuxInt = int64ToAuxInt(c)
  1928  			v.AddArg2(x0, y)
  1929  			return true
  1930  		}
  1931  		break
  1932  	}
  1933  	// match: (ADDV x (NEGV y))
  1934  	// result: (SUBV x y)
  1935  	for {
  1936  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1937  			x := v_0
  1938  			if v_1.Op != OpLOONG64NEGV {
  1939  				continue
  1940  			}
  1941  			y := v_1.Args[0]
  1942  			v.reset(OpLOONG64SUBV)
  1943  			v.AddArg2(x, y)
  1944  			return true
  1945  		}
  1946  		break
  1947  	}
  1948  	return false
  1949  }
  1950  func rewriteValueLOONG64_OpLOONG64ADDVconst(v *Value) bool {
  1951  	v_0 := v.Args[0]
  1952  	// match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr))
  1953  	// cond: is32Bit(off1+int64(off2))
  1954  	// result: (MOVVaddr [int32(off1)+int32(off2)] {sym} ptr)
  1955  	for {
  1956  		off1 := auxIntToInt64(v.AuxInt)
  1957  		if v_0.Op != OpLOONG64MOVVaddr {
  1958  			break
  1959  		}
  1960  		off2 := auxIntToInt32(v_0.AuxInt)
  1961  		sym := auxToSym(v_0.Aux)
  1962  		ptr := v_0.Args[0]
  1963  		if !(is32Bit(off1 + int64(off2))) {
  1964  			break
  1965  		}
  1966  		v.reset(OpLOONG64MOVVaddr)
  1967  		v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2))
  1968  		v.Aux = symToAux(sym)
  1969  		v.AddArg(ptr)
  1970  		return true
  1971  	}
  1972  	// match: (ADDVconst [0] x)
  1973  	// result: x
  1974  	for {
  1975  		if auxIntToInt64(v.AuxInt) != 0 {
  1976  			break
  1977  		}
  1978  		x := v_0
  1979  		v.copyOf(x)
  1980  		return true
  1981  	}
  1982  	// match: (ADDVconst [c] (MOVVconst [d]))
  1983  	// result: (MOVVconst [c+d])
  1984  	for {
  1985  		c := auxIntToInt64(v.AuxInt)
  1986  		if v_0.Op != OpLOONG64MOVVconst {
  1987  			break
  1988  		}
  1989  		d := auxIntToInt64(v_0.AuxInt)
  1990  		v.reset(OpLOONG64MOVVconst)
  1991  		v.AuxInt = int64ToAuxInt(c + d)
  1992  		return true
  1993  	}
  1994  	// match: (ADDVconst [c] (ADDVconst [d] x))
  1995  	// cond: is32Bit(c+d)
  1996  	// result: (ADDVconst [c+d] x)
  1997  	for {
  1998  		c := auxIntToInt64(v.AuxInt)
  1999  		if v_0.Op != OpLOONG64ADDVconst {
  2000  			break
  2001  		}
  2002  		d := auxIntToInt64(v_0.AuxInt)
  2003  		x := v_0.Args[0]
  2004  		if !(is32Bit(c + d)) {
  2005  			break
  2006  		}
  2007  		v.reset(OpLOONG64ADDVconst)
  2008  		v.AuxInt = int64ToAuxInt(c + d)
  2009  		v.AddArg(x)
  2010  		return true
  2011  	}
  2012  	// match: (ADDVconst [c] (SUBVconst [d] x))
  2013  	// cond: is32Bit(c-d)
  2014  	// result: (ADDVconst [c-d] x)
  2015  	for {
  2016  		c := auxIntToInt64(v.AuxInt)
  2017  		if v_0.Op != OpLOONG64SUBVconst {
  2018  			break
  2019  		}
  2020  		d := auxIntToInt64(v_0.AuxInt)
  2021  		x := v_0.Args[0]
  2022  		if !(is32Bit(c - d)) {
  2023  			break
  2024  		}
  2025  		v.reset(OpLOONG64ADDVconst)
  2026  		v.AuxInt = int64ToAuxInt(c - d)
  2027  		v.AddArg(x)
  2028  		return true
  2029  	}
  2030  	// match: (ADDVconst [c] x)
  2031  	// cond: is32Bit(c) && c&0xffff == 0 && c != 0
  2032  	// result: (ADDV16const [c] x)
  2033  	for {
  2034  		c := auxIntToInt64(v.AuxInt)
  2035  		x := v_0
  2036  		if !(is32Bit(c) && c&0xffff == 0 && c != 0) {
  2037  			break
  2038  		}
  2039  		v.reset(OpLOONG64ADDV16const)
  2040  		v.AuxInt = int64ToAuxInt(c)
  2041  		v.AddArg(x)
  2042  		return true
  2043  	}
  2044  	return false
  2045  }
  2046  func rewriteValueLOONG64_OpLOONG64ADDshiftLLV(v *Value) bool {
  2047  	v_1 := v.Args[1]
  2048  	v_0 := v.Args[0]
  2049  	// match: (ADDshiftLLV x (MOVVconst [c]) [d])
  2050  	// cond: is12Bit(c<<d)
  2051  	// result: (ADDVconst x [c<<d])
  2052  	for {
  2053  		d := auxIntToInt64(v.AuxInt)
  2054  		x := v_0
  2055  		if v_1.Op != OpLOONG64MOVVconst {
  2056  			break
  2057  		}
  2058  		c := auxIntToInt64(v_1.AuxInt)
  2059  		if !(is12Bit(c << d)) {
  2060  			break
  2061  		}
  2062  		v.reset(OpLOONG64ADDVconst)
  2063  		v.AuxInt = int64ToAuxInt(c << d)
  2064  		v.AddArg(x)
  2065  		return true
  2066  	}
  2067  	return false
  2068  }
  2069  func rewriteValueLOONG64_OpLOONG64AND(v *Value) bool {
  2070  	v_1 := v.Args[1]
  2071  	v_0 := v.Args[0]
  2072  	// match: (AND x (MOVVconst [c]))
  2073  	// cond: is32Bit(c)
  2074  	// result: (ANDconst [c] x)
  2075  	for {
  2076  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2077  			x := v_0
  2078  			if v_1.Op != OpLOONG64MOVVconst {
  2079  				continue
  2080  			}
  2081  			c := auxIntToInt64(v_1.AuxInt)
  2082  			if !(is32Bit(c)) {
  2083  				continue
  2084  			}
  2085  			v.reset(OpLOONG64ANDconst)
  2086  			v.AuxInt = int64ToAuxInt(c)
  2087  			v.AddArg(x)
  2088  			return true
  2089  		}
  2090  		break
  2091  	}
  2092  	// match: (AND x x)
  2093  	// result: x
  2094  	for {
  2095  		x := v_0
  2096  		if x != v_1 {
  2097  			break
  2098  		}
  2099  		v.copyOf(x)
  2100  		return true
  2101  	}
  2102  	// match: (AND x (NORconst [0] y))
  2103  	// result: (ANDN x y)
  2104  	for {
  2105  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  2106  			x := v_0
  2107  			if v_1.Op != OpLOONG64NORconst || auxIntToInt64(v_1.AuxInt) != 0 {
  2108  				continue
  2109  			}
  2110  			y := v_1.Args[0]
  2111  			v.reset(OpLOONG64ANDN)
  2112  			v.AddArg2(x, y)
  2113  			return true
  2114  		}
  2115  		break
  2116  	}
  2117  	return false
  2118  }
  2119  func rewriteValueLOONG64_OpLOONG64ANDconst(v *Value) bool {
  2120  	v_0 := v.Args[0]
  2121  	// match: (ANDconst [0] _)
  2122  	// result: (MOVVconst [0])
  2123  	for {
  2124  		if auxIntToInt64(v.AuxInt) != 0 {
  2125  			break
  2126  		}
  2127  		v.reset(OpLOONG64MOVVconst)
  2128  		v.AuxInt = int64ToAuxInt(0)
  2129  		return true
  2130  	}
  2131  	// match: (ANDconst [-1] x)
  2132  	// result: x
  2133  	for {
  2134  		if auxIntToInt64(v.AuxInt) != -1 {
  2135  			break
  2136  		}
  2137  		x := v_0
  2138  		v.copyOf(x)
  2139  		return true
  2140  	}
  2141  	// match: (ANDconst [c] (MOVVconst [d]))
  2142  	// result: (MOVVconst [c&d])
  2143  	for {
  2144  		c := auxIntToInt64(v.AuxInt)
  2145  		if v_0.Op != OpLOONG64MOVVconst {
  2146  			break
  2147  		}
  2148  		d := auxIntToInt64(v_0.AuxInt)
  2149  		v.reset(OpLOONG64MOVVconst)
  2150  		v.AuxInt = int64ToAuxInt(c & d)
  2151  		return true
  2152  	}
  2153  	// match: (ANDconst [c] (ANDconst [d] x))
  2154  	// result: (ANDconst [c&d] x)
  2155  	for {
  2156  		c := auxIntToInt64(v.AuxInt)
  2157  		if v_0.Op != OpLOONG64ANDconst {
  2158  			break
  2159  		}
  2160  		d := auxIntToInt64(v_0.AuxInt)
  2161  		x := v_0.Args[0]
  2162  		v.reset(OpLOONG64ANDconst)
  2163  		v.AuxInt = int64ToAuxInt(c & d)
  2164  		v.AddArg(x)
  2165  		return true
  2166  	}
  2167  	return false
  2168  }
  2169  func rewriteValueLOONG64_OpLOONG64DIVV(v *Value) bool {
  2170  	v_1 := v.Args[1]
  2171  	v_0 := v.Args[0]
  2172  	// match: (DIVV (MOVVconst [c]) (MOVVconst [d]))
  2173  	// cond: d != 0
  2174  	// result: (MOVVconst [c/d])
  2175  	for {
  2176  		if v_0.Op != OpLOONG64MOVVconst {
  2177  			break
  2178  		}
  2179  		c := auxIntToInt64(v_0.AuxInt)
  2180  		if v_1.Op != OpLOONG64MOVVconst {
  2181  			break
  2182  		}
  2183  		d := auxIntToInt64(v_1.AuxInt)
  2184  		if !(d != 0) {
  2185  			break
  2186  		}
  2187  		v.reset(OpLOONG64MOVVconst)
  2188  		v.AuxInt = int64ToAuxInt(c / d)
  2189  		return true
  2190  	}
  2191  	return false
  2192  }
  2193  func rewriteValueLOONG64_OpLOONG64DIVVU(v *Value) bool {
  2194  	v_1 := v.Args[1]
  2195  	v_0 := v.Args[0]
  2196  	// match: (DIVVU x (MOVVconst [1]))
  2197  	// result: x
  2198  	for {
  2199  		x := v_0
  2200  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  2201  			break
  2202  		}
  2203  		v.copyOf(x)
  2204  		return true
  2205  	}
  2206  	// match: (DIVVU x (MOVVconst [c]))
  2207  	// cond: isPowerOfTwo(c)
  2208  	// result: (SRLVconst [log64(c)] x)
  2209  	for {
  2210  		x := v_0
  2211  		if v_1.Op != OpLOONG64MOVVconst {
  2212  			break
  2213  		}
  2214  		c := auxIntToInt64(v_1.AuxInt)
  2215  		if !(isPowerOfTwo(c)) {
  2216  			break
  2217  		}
  2218  		v.reset(OpLOONG64SRLVconst)
  2219  		v.AuxInt = int64ToAuxInt(log64(c))
  2220  		v.AddArg(x)
  2221  		return true
  2222  	}
  2223  	// match: (DIVVU (MOVVconst [c]) (MOVVconst [d]))
  2224  	// cond: d != 0
  2225  	// result: (MOVVconst [int64(uint64(c)/uint64(d))])
  2226  	for {
  2227  		if v_0.Op != OpLOONG64MOVVconst {
  2228  			break
  2229  		}
  2230  		c := auxIntToInt64(v_0.AuxInt)
  2231  		if v_1.Op != OpLOONG64MOVVconst {
  2232  			break
  2233  		}
  2234  		d := auxIntToInt64(v_1.AuxInt)
  2235  		if !(d != 0) {
  2236  			break
  2237  		}
  2238  		v.reset(OpLOONG64MOVVconst)
  2239  		v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
  2240  		return true
  2241  	}
  2242  	return false
  2243  }
  2244  func rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsCR(v *Value) bool {
  2245  	v_1 := v.Args[1]
  2246  	v_0 := v.Args[0]
  2247  	// match: (LoweredPanicBoundsCR [kind] {p} (MOVVconst [c]) mem)
  2248  	// result: (LoweredPanicBoundsCC [kind] {PanicBoundsCC{Cx:p.C, Cy:c}} mem)
  2249  	for {
  2250  		kind := auxIntToInt64(v.AuxInt)
  2251  		p := auxToPanicBoundsC(v.Aux)
  2252  		if v_0.Op != OpLOONG64MOVVconst {
  2253  			break
  2254  		}
  2255  		c := auxIntToInt64(v_0.AuxInt)
  2256  		mem := v_1
  2257  		v.reset(OpLOONG64LoweredPanicBoundsCC)
  2258  		v.AuxInt = int64ToAuxInt(kind)
  2259  		v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: p.C, Cy: c})
  2260  		v.AddArg(mem)
  2261  		return true
  2262  	}
  2263  	return false
  2264  }
  2265  func rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsRC(v *Value) bool {
  2266  	v_1 := v.Args[1]
  2267  	v_0 := v.Args[0]
  2268  	// match: (LoweredPanicBoundsRC [kind] {p} (MOVVconst [c]) mem)
  2269  	// result: (LoweredPanicBoundsCC [kind] {PanicBoundsCC{Cx:c, Cy:p.C}} mem)
  2270  	for {
  2271  		kind := auxIntToInt64(v.AuxInt)
  2272  		p := auxToPanicBoundsC(v.Aux)
  2273  		if v_0.Op != OpLOONG64MOVVconst {
  2274  			break
  2275  		}
  2276  		c := auxIntToInt64(v_0.AuxInt)
  2277  		mem := v_1
  2278  		v.reset(OpLOONG64LoweredPanicBoundsCC)
  2279  		v.AuxInt = int64ToAuxInt(kind)
  2280  		v.Aux = panicBoundsCCToAux(PanicBoundsCC{Cx: c, Cy: p.C})
  2281  		v.AddArg(mem)
  2282  		return true
  2283  	}
  2284  	return false
  2285  }
  2286  func rewriteValueLOONG64_OpLOONG64LoweredPanicBoundsRR(v *Value) bool {
  2287  	v_2 := v.Args[2]
  2288  	v_1 := v.Args[1]
  2289  	v_0 := v.Args[0]
  2290  	// match: (LoweredPanicBoundsRR [kind] x (MOVVconst [c]) mem)
  2291  	// result: (LoweredPanicBoundsRC [kind] x {PanicBoundsC{C:c}} mem)
  2292  	for {
  2293  		kind := auxIntToInt64(v.AuxInt)
  2294  		x := v_0
  2295  		if v_1.Op != OpLOONG64MOVVconst {
  2296  			break
  2297  		}
  2298  		c := auxIntToInt64(v_1.AuxInt)
  2299  		mem := v_2
  2300  		v.reset(OpLOONG64LoweredPanicBoundsRC)
  2301  		v.AuxInt = int64ToAuxInt(kind)
  2302  		v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
  2303  		v.AddArg2(x, mem)
  2304  		return true
  2305  	}
  2306  	// match: (LoweredPanicBoundsRR [kind] (MOVVconst [c]) y mem)
  2307  	// result: (LoweredPanicBoundsCR [kind] {PanicBoundsC{C:c}} y mem)
  2308  	for {
  2309  		kind := auxIntToInt64(v.AuxInt)
  2310  		if v_0.Op != OpLOONG64MOVVconst {
  2311  			break
  2312  		}
  2313  		c := auxIntToInt64(v_0.AuxInt)
  2314  		y := v_1
  2315  		mem := v_2
  2316  		v.reset(OpLOONG64LoweredPanicBoundsCR)
  2317  		v.AuxInt = int64ToAuxInt(kind)
  2318  		v.Aux = panicBoundsCToAux(PanicBoundsC{C: c})
  2319  		v.AddArg2(y, mem)
  2320  		return true
  2321  	}
  2322  	return false
  2323  }
  2324  func rewriteValueLOONG64_OpLOONG64MASKEQZ(v *Value) bool {
  2325  	v_1 := v.Args[1]
  2326  	v_0 := v.Args[0]
  2327  	// match: (MASKEQZ (MOVVconst [0]) cond)
  2328  	// result: (MOVVconst [0])
  2329  	for {
  2330  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  2331  			break
  2332  		}
  2333  		v.reset(OpLOONG64MOVVconst)
  2334  		v.AuxInt = int64ToAuxInt(0)
  2335  		return true
  2336  	}
  2337  	// match: (MASKEQZ x (MOVVconst [c]))
  2338  	// cond: c == 0
  2339  	// result: (MOVVconst [0])
  2340  	for {
  2341  		if v_1.Op != OpLOONG64MOVVconst {
  2342  			break
  2343  		}
  2344  		c := auxIntToInt64(v_1.AuxInt)
  2345  		if !(c == 0) {
  2346  			break
  2347  		}
  2348  		v.reset(OpLOONG64MOVVconst)
  2349  		v.AuxInt = int64ToAuxInt(0)
  2350  		return true
  2351  	}
  2352  	// match: (MASKEQZ x (MOVVconst [c]))
  2353  	// cond: c != 0
  2354  	// result: x
  2355  	for {
  2356  		x := v_0
  2357  		if v_1.Op != OpLOONG64MOVVconst {
  2358  			break
  2359  		}
  2360  		c := auxIntToInt64(v_1.AuxInt)
  2361  		if !(c != 0) {
  2362  			break
  2363  		}
  2364  		v.copyOf(x)
  2365  		return true
  2366  	}
  2367  	return false
  2368  }
  2369  func rewriteValueLOONG64_OpLOONG64MASKNEZ(v *Value) bool {
  2370  	v_0 := v.Args[0]
  2371  	// match: (MASKNEZ (MOVVconst [0]) cond)
  2372  	// result: (MOVVconst [0])
  2373  	for {
  2374  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  2375  			break
  2376  		}
  2377  		v.reset(OpLOONG64MOVVconst)
  2378  		v.AuxInt = int64ToAuxInt(0)
  2379  		return true
  2380  	}
  2381  	return false
  2382  }
  2383  func rewriteValueLOONG64_OpLOONG64MOVBUload(v *Value) bool {
  2384  	v_1 := v.Args[1]
  2385  	v_0 := v.Args[0]
  2386  	b := v.Block
  2387  	config := b.Func.Config
  2388  	typ := &b.Func.Config.Types
  2389  	// match: (MOVBUload [off] {sym} ptr (MOVBstore [off] {sym} ptr x _))
  2390  	// result: (MOVBUreg x)
  2391  	for {
  2392  		off := auxIntToInt32(v.AuxInt)
  2393  		sym := auxToSym(v.Aux)
  2394  		ptr := v_0
  2395  		if v_1.Op != OpLOONG64MOVBstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  2396  			break
  2397  		}
  2398  		x := v_1.Args[1]
  2399  		if ptr != v_1.Args[0] {
  2400  			break
  2401  		}
  2402  		v.reset(OpLOONG64MOVBUreg)
  2403  		v.AddArg(x)
  2404  		return true
  2405  	}
  2406  	// match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2407  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2408  	// result: (MOVBUload [off1+int32(off2)] {sym} ptr mem)
  2409  	for {
  2410  		off1 := auxIntToInt32(v.AuxInt)
  2411  		sym := auxToSym(v.Aux)
  2412  		if v_0.Op != OpLOONG64ADDVconst {
  2413  			break
  2414  		}
  2415  		off2 := auxIntToInt64(v_0.AuxInt)
  2416  		ptr := v_0.Args[0]
  2417  		mem := v_1
  2418  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2419  			break
  2420  		}
  2421  		v.reset(OpLOONG64MOVBUload)
  2422  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2423  		v.Aux = symToAux(sym)
  2424  		v.AddArg2(ptr, mem)
  2425  		return true
  2426  	}
  2427  	// match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2428  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2429  	// result: (MOVBUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2430  	for {
  2431  		off1 := auxIntToInt32(v.AuxInt)
  2432  		sym1 := auxToSym(v.Aux)
  2433  		if v_0.Op != OpLOONG64MOVVaddr {
  2434  			break
  2435  		}
  2436  		off2 := auxIntToInt32(v_0.AuxInt)
  2437  		sym2 := auxToSym(v_0.Aux)
  2438  		ptr := v_0.Args[0]
  2439  		mem := v_1
  2440  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2441  			break
  2442  		}
  2443  		v.reset(OpLOONG64MOVBUload)
  2444  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2445  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2446  		v.AddArg2(ptr, mem)
  2447  		return true
  2448  	}
  2449  	// match: (MOVBUload [off] {sym} (ADDV ptr idx) mem)
  2450  	// cond: off == 0 && sym == nil
  2451  	// result: (MOVBUloadidx ptr idx mem)
  2452  	for {
  2453  		off := auxIntToInt32(v.AuxInt)
  2454  		sym := auxToSym(v.Aux)
  2455  		if v_0.Op != OpLOONG64ADDV {
  2456  			break
  2457  		}
  2458  		idx := v_0.Args[1]
  2459  		ptr := v_0.Args[0]
  2460  		mem := v_1
  2461  		if !(off == 0 && sym == nil) {
  2462  			break
  2463  		}
  2464  		v.reset(OpLOONG64MOVBUloadidx)
  2465  		v.AddArg3(ptr, idx, mem)
  2466  		return true
  2467  	}
  2468  	// match: (MOVBUload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  2469  	// cond: off == 0 && sym == nil
  2470  	// result: (MOVBUloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  2471  	for {
  2472  		off := auxIntToInt32(v.AuxInt)
  2473  		sym := auxToSym(v.Aux)
  2474  		if v_0.Op != OpLOONG64ADDshiftLLV {
  2475  			break
  2476  		}
  2477  		shift := auxIntToInt64(v_0.AuxInt)
  2478  		idx := v_0.Args[1]
  2479  		ptr := v_0.Args[0]
  2480  		mem := v_1
  2481  		if !(off == 0 && sym == nil) {
  2482  			break
  2483  		}
  2484  		v.reset(OpLOONG64MOVBUloadidx)
  2485  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  2486  		v0.AuxInt = int64ToAuxInt(shift)
  2487  		v0.AddArg(idx)
  2488  		v.AddArg3(ptr, v0, mem)
  2489  		return true
  2490  	}
  2491  	// match: (MOVBUload [off] {sym} (SB) _)
  2492  	// cond: symIsRO(sym)
  2493  	// result: (MOVVconst [int64(read8(sym, int64(off)))])
  2494  	for {
  2495  		off := auxIntToInt32(v.AuxInt)
  2496  		sym := auxToSym(v.Aux)
  2497  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  2498  			break
  2499  		}
  2500  		v.reset(OpLOONG64MOVVconst)
  2501  		v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off))))
  2502  		return true
  2503  	}
  2504  	return false
  2505  }
  2506  func rewriteValueLOONG64_OpLOONG64MOVBUloadidx(v *Value) bool {
  2507  	v_2 := v.Args[2]
  2508  	v_1 := v.Args[1]
  2509  	v_0 := v.Args[0]
  2510  	// match: (MOVBUloadidx ptr (MOVVconst [c]) mem)
  2511  	// cond: is32Bit(c)
  2512  	// result: (MOVBUload [int32(c)] ptr mem)
  2513  	for {
  2514  		ptr := v_0
  2515  		if v_1.Op != OpLOONG64MOVVconst {
  2516  			break
  2517  		}
  2518  		c := auxIntToInt64(v_1.AuxInt)
  2519  		mem := v_2
  2520  		if !(is32Bit(c)) {
  2521  			break
  2522  		}
  2523  		v.reset(OpLOONG64MOVBUload)
  2524  		v.AuxInt = int32ToAuxInt(int32(c))
  2525  		v.AddArg2(ptr, mem)
  2526  		return true
  2527  	}
  2528  	// match: (MOVBUloadidx (MOVVconst [c]) ptr mem)
  2529  	// cond: is32Bit(c)
  2530  	// result: (MOVBUload [int32(c)] ptr mem)
  2531  	for {
  2532  		if v_0.Op != OpLOONG64MOVVconst {
  2533  			break
  2534  		}
  2535  		c := auxIntToInt64(v_0.AuxInt)
  2536  		ptr := v_1
  2537  		mem := v_2
  2538  		if !(is32Bit(c)) {
  2539  			break
  2540  		}
  2541  		v.reset(OpLOONG64MOVBUload)
  2542  		v.AuxInt = int32ToAuxInt(int32(c))
  2543  		v.AddArg2(ptr, mem)
  2544  		return true
  2545  	}
  2546  	return false
  2547  }
  2548  func rewriteValueLOONG64_OpLOONG64MOVBUreg(v *Value) bool {
  2549  	v_0 := v.Args[0]
  2550  	// match: (MOVBUreg (SRLVconst [rc] x))
  2551  	// cond: rc < 8
  2552  	// result: (BSTRPICKV [rc + (7+rc)<<6] x)
  2553  	for {
  2554  		if v_0.Op != OpLOONG64SRLVconst {
  2555  			break
  2556  		}
  2557  		rc := auxIntToInt64(v_0.AuxInt)
  2558  		x := v_0.Args[0]
  2559  		if !(rc < 8) {
  2560  			break
  2561  		}
  2562  		v.reset(OpLOONG64BSTRPICKV)
  2563  		v.AuxInt = int64ToAuxInt(rc + (7+rc)<<6)
  2564  		v.AddArg(x)
  2565  		return true
  2566  	}
  2567  	// match: (MOVBUreg x:(SGT _ _))
  2568  	// result: x
  2569  	for {
  2570  		x := v_0
  2571  		if x.Op != OpLOONG64SGT {
  2572  			break
  2573  		}
  2574  		v.copyOf(x)
  2575  		return true
  2576  	}
  2577  	// match: (MOVBUreg x:(SGTU _ _))
  2578  	// result: x
  2579  	for {
  2580  		x := v_0
  2581  		if x.Op != OpLOONG64SGTU {
  2582  			break
  2583  		}
  2584  		v.copyOf(x)
  2585  		return true
  2586  	}
  2587  	// match: (MOVBUreg x:(XOR (MOVVconst [1]) (SGT _ _)))
  2588  	// result: x
  2589  	for {
  2590  		x := v_0
  2591  		if x.Op != OpLOONG64XOR {
  2592  			break
  2593  		}
  2594  		_ = x.Args[1]
  2595  		x_0 := x.Args[0]
  2596  		x_1 := x.Args[1]
  2597  		for _i0 := 0; _i0 <= 1; _i0, x_0, x_1 = _i0+1, x_1, x_0 {
  2598  			if x_0.Op != OpLOONG64MOVVconst || auxIntToInt64(x_0.AuxInt) != 1 || x_1.Op != OpLOONG64SGT {
  2599  				continue
  2600  			}
  2601  			v.copyOf(x)
  2602  			return true
  2603  		}
  2604  		break
  2605  	}
  2606  	// match: (MOVBUreg x:(XOR (MOVVconst [1]) (SGTU _ _)))
  2607  	// result: x
  2608  	for {
  2609  		x := v_0
  2610  		if x.Op != OpLOONG64XOR {
  2611  			break
  2612  		}
  2613  		_ = x.Args[1]
  2614  		x_0 := x.Args[0]
  2615  		x_1 := x.Args[1]
  2616  		for _i0 := 0; _i0 <= 1; _i0, x_0, x_1 = _i0+1, x_1, x_0 {
  2617  			if x_0.Op != OpLOONG64MOVVconst || auxIntToInt64(x_0.AuxInt) != 1 || x_1.Op != OpLOONG64SGTU {
  2618  				continue
  2619  			}
  2620  			v.copyOf(x)
  2621  			return true
  2622  		}
  2623  		break
  2624  	}
  2625  	// match: (MOVBUreg x:(MOVBUload _ _))
  2626  	// result: (MOVVreg x)
  2627  	for {
  2628  		x := v_0
  2629  		if x.Op != OpLOONG64MOVBUload {
  2630  			break
  2631  		}
  2632  		v.reset(OpLOONG64MOVVreg)
  2633  		v.AddArg(x)
  2634  		return true
  2635  	}
  2636  	// match: (MOVBUreg x:(MOVBUloadidx _ _ _))
  2637  	// result: (MOVVreg x)
  2638  	for {
  2639  		x := v_0
  2640  		if x.Op != OpLOONG64MOVBUloadidx {
  2641  			break
  2642  		}
  2643  		v.reset(OpLOONG64MOVVreg)
  2644  		v.AddArg(x)
  2645  		return true
  2646  	}
  2647  	// match: (MOVBUreg x:(MOVBUreg _))
  2648  	// result: (MOVVreg x)
  2649  	for {
  2650  		x := v_0
  2651  		if x.Op != OpLOONG64MOVBUreg {
  2652  			break
  2653  		}
  2654  		v.reset(OpLOONG64MOVVreg)
  2655  		v.AddArg(x)
  2656  		return true
  2657  	}
  2658  	// match: (MOVBUreg (SLLVconst [lc] x))
  2659  	// cond: lc >= 8
  2660  	// result: (MOVVconst [0])
  2661  	for {
  2662  		if v_0.Op != OpLOONG64SLLVconst {
  2663  			break
  2664  		}
  2665  		lc := auxIntToInt64(v_0.AuxInt)
  2666  		if !(lc >= 8) {
  2667  			break
  2668  		}
  2669  		v.reset(OpLOONG64MOVVconst)
  2670  		v.AuxInt = int64ToAuxInt(0)
  2671  		return true
  2672  	}
  2673  	// match: (MOVBUreg (MOVVconst [c]))
  2674  	// result: (MOVVconst [int64(uint8(c))])
  2675  	for {
  2676  		if v_0.Op != OpLOONG64MOVVconst {
  2677  			break
  2678  		}
  2679  		c := auxIntToInt64(v_0.AuxInt)
  2680  		v.reset(OpLOONG64MOVVconst)
  2681  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  2682  		return true
  2683  	}
  2684  	// match: (MOVBUreg (ANDconst [c] x))
  2685  	// result: (ANDconst [c&0xff] x)
  2686  	for {
  2687  		if v_0.Op != OpLOONG64ANDconst {
  2688  			break
  2689  		}
  2690  		c := auxIntToInt64(v_0.AuxInt)
  2691  		x := v_0.Args[0]
  2692  		v.reset(OpLOONG64ANDconst)
  2693  		v.AuxInt = int64ToAuxInt(c & 0xff)
  2694  		v.AddArg(x)
  2695  		return true
  2696  	}
  2697  	// match: (MOVBUreg x:(SRLconst [c] y))
  2698  	// cond: c >= 24
  2699  	// result: x
  2700  	for {
  2701  		x := v_0
  2702  		if x.Op != OpLOONG64SRLconst {
  2703  			break
  2704  		}
  2705  		c := auxIntToInt64(x.AuxInt)
  2706  		if !(c >= 24) {
  2707  			break
  2708  		}
  2709  		v.copyOf(x)
  2710  		return true
  2711  	}
  2712  	// match: (MOVBUreg x:(ANDconst [c] y))
  2713  	// cond: c >= 0 && int64(uint8(c)) == c
  2714  	// result: x
  2715  	for {
  2716  		x := v_0
  2717  		if x.Op != OpLOONG64ANDconst {
  2718  			break
  2719  		}
  2720  		c := auxIntToInt64(x.AuxInt)
  2721  		if !(c >= 0 && int64(uint8(c)) == c) {
  2722  			break
  2723  		}
  2724  		v.copyOf(x)
  2725  		return true
  2726  	}
  2727  	return false
  2728  }
  2729  func rewriteValueLOONG64_OpLOONG64MOVBload(v *Value) bool {
  2730  	v_1 := v.Args[1]
  2731  	v_0 := v.Args[0]
  2732  	b := v.Block
  2733  	config := b.Func.Config
  2734  	typ := &b.Func.Config.Types
  2735  	// match: (MOVBload [off] {sym} ptr (MOVBstore [off] {sym} ptr x _))
  2736  	// result: (MOVBreg x)
  2737  	for {
  2738  		off := auxIntToInt32(v.AuxInt)
  2739  		sym := auxToSym(v.Aux)
  2740  		ptr := v_0
  2741  		if v_1.Op != OpLOONG64MOVBstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  2742  			break
  2743  		}
  2744  		x := v_1.Args[1]
  2745  		if ptr != v_1.Args[0] {
  2746  			break
  2747  		}
  2748  		v.reset(OpLOONG64MOVBreg)
  2749  		v.AddArg(x)
  2750  		return true
  2751  	}
  2752  	// match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem)
  2753  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2754  	// result: (MOVBload [off1+int32(off2)] {sym} ptr mem)
  2755  	for {
  2756  		off1 := auxIntToInt32(v.AuxInt)
  2757  		sym := auxToSym(v.Aux)
  2758  		if v_0.Op != OpLOONG64ADDVconst {
  2759  			break
  2760  		}
  2761  		off2 := auxIntToInt64(v_0.AuxInt)
  2762  		ptr := v_0.Args[0]
  2763  		mem := v_1
  2764  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2765  			break
  2766  		}
  2767  		v.reset(OpLOONG64MOVBload)
  2768  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2769  		v.Aux = symToAux(sym)
  2770  		v.AddArg2(ptr, mem)
  2771  		return true
  2772  	}
  2773  	// match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  2774  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2775  	// result: (MOVBload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  2776  	for {
  2777  		off1 := auxIntToInt32(v.AuxInt)
  2778  		sym1 := auxToSym(v.Aux)
  2779  		if v_0.Op != OpLOONG64MOVVaddr {
  2780  			break
  2781  		}
  2782  		off2 := auxIntToInt32(v_0.AuxInt)
  2783  		sym2 := auxToSym(v_0.Aux)
  2784  		ptr := v_0.Args[0]
  2785  		mem := v_1
  2786  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2787  			break
  2788  		}
  2789  		v.reset(OpLOONG64MOVBload)
  2790  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2791  		v.Aux = symToAux(mergeSym(sym1, sym2))
  2792  		v.AddArg2(ptr, mem)
  2793  		return true
  2794  	}
  2795  	// match: (MOVBload [off] {sym} (ADDV ptr idx) mem)
  2796  	// cond: off == 0 && sym == nil
  2797  	// result: (MOVBloadidx ptr idx mem)
  2798  	for {
  2799  		off := auxIntToInt32(v.AuxInt)
  2800  		sym := auxToSym(v.Aux)
  2801  		if v_0.Op != OpLOONG64ADDV {
  2802  			break
  2803  		}
  2804  		idx := v_0.Args[1]
  2805  		ptr := v_0.Args[0]
  2806  		mem := v_1
  2807  		if !(off == 0 && sym == nil) {
  2808  			break
  2809  		}
  2810  		v.reset(OpLOONG64MOVBloadidx)
  2811  		v.AddArg3(ptr, idx, mem)
  2812  		return true
  2813  	}
  2814  	// match: (MOVBload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  2815  	// cond: off == 0 && sym == nil
  2816  	// result: (MOVBloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  2817  	for {
  2818  		off := auxIntToInt32(v.AuxInt)
  2819  		sym := auxToSym(v.Aux)
  2820  		if v_0.Op != OpLOONG64ADDshiftLLV {
  2821  			break
  2822  		}
  2823  		shift := auxIntToInt64(v_0.AuxInt)
  2824  		idx := v_0.Args[1]
  2825  		ptr := v_0.Args[0]
  2826  		mem := v_1
  2827  		if !(off == 0 && sym == nil) {
  2828  			break
  2829  		}
  2830  		v.reset(OpLOONG64MOVBloadidx)
  2831  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  2832  		v0.AuxInt = int64ToAuxInt(shift)
  2833  		v0.AddArg(idx)
  2834  		v.AddArg3(ptr, v0, mem)
  2835  		return true
  2836  	}
  2837  	// match: (MOVBload [off] {sym} (SB) _)
  2838  	// cond: symIsRO(sym)
  2839  	// result: (MOVVconst [int64(int8(read8(sym, int64(off))))])
  2840  	for {
  2841  		off := auxIntToInt32(v.AuxInt)
  2842  		sym := auxToSym(v.Aux)
  2843  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  2844  			break
  2845  		}
  2846  		v.reset(OpLOONG64MOVVconst)
  2847  		v.AuxInt = int64ToAuxInt(int64(int8(read8(sym, int64(off)))))
  2848  		return true
  2849  	}
  2850  	return false
  2851  }
  2852  func rewriteValueLOONG64_OpLOONG64MOVBloadidx(v *Value) bool {
  2853  	v_2 := v.Args[2]
  2854  	v_1 := v.Args[1]
  2855  	v_0 := v.Args[0]
  2856  	// match: (MOVBloadidx ptr (MOVVconst [c]) mem)
  2857  	// cond: is32Bit(c)
  2858  	// result: (MOVBload [int32(c)] ptr mem)
  2859  	for {
  2860  		ptr := v_0
  2861  		if v_1.Op != OpLOONG64MOVVconst {
  2862  			break
  2863  		}
  2864  		c := auxIntToInt64(v_1.AuxInt)
  2865  		mem := v_2
  2866  		if !(is32Bit(c)) {
  2867  			break
  2868  		}
  2869  		v.reset(OpLOONG64MOVBload)
  2870  		v.AuxInt = int32ToAuxInt(int32(c))
  2871  		v.AddArg2(ptr, mem)
  2872  		return true
  2873  	}
  2874  	// match: (MOVBloadidx (MOVVconst [c]) ptr mem)
  2875  	// cond: is32Bit(c)
  2876  	// result: (MOVBload [int32(c)] ptr mem)
  2877  	for {
  2878  		if v_0.Op != OpLOONG64MOVVconst {
  2879  			break
  2880  		}
  2881  		c := auxIntToInt64(v_0.AuxInt)
  2882  		ptr := v_1
  2883  		mem := v_2
  2884  		if !(is32Bit(c)) {
  2885  			break
  2886  		}
  2887  		v.reset(OpLOONG64MOVBload)
  2888  		v.AuxInt = int32ToAuxInt(int32(c))
  2889  		v.AddArg2(ptr, mem)
  2890  		return true
  2891  	}
  2892  	return false
  2893  }
  2894  func rewriteValueLOONG64_OpLOONG64MOVBreg(v *Value) bool {
  2895  	v_0 := v.Args[0]
  2896  	// match: (MOVBreg x:(MOVBload _ _))
  2897  	// result: (MOVVreg x)
  2898  	for {
  2899  		x := v_0
  2900  		if x.Op != OpLOONG64MOVBload {
  2901  			break
  2902  		}
  2903  		v.reset(OpLOONG64MOVVreg)
  2904  		v.AddArg(x)
  2905  		return true
  2906  	}
  2907  	// match: (MOVBreg x:(MOVBloadidx _ _ _))
  2908  	// result: (MOVVreg x)
  2909  	for {
  2910  		x := v_0
  2911  		if x.Op != OpLOONG64MOVBloadidx {
  2912  			break
  2913  		}
  2914  		v.reset(OpLOONG64MOVVreg)
  2915  		v.AddArg(x)
  2916  		return true
  2917  	}
  2918  	// match: (MOVBreg x:(MOVBreg _))
  2919  	// result: (MOVVreg x)
  2920  	for {
  2921  		x := v_0
  2922  		if x.Op != OpLOONG64MOVBreg {
  2923  			break
  2924  		}
  2925  		v.reset(OpLOONG64MOVVreg)
  2926  		v.AddArg(x)
  2927  		return true
  2928  	}
  2929  	// match: (MOVBreg (MOVVconst [c]))
  2930  	// result: (MOVVconst [int64(int8(c))])
  2931  	for {
  2932  		if v_0.Op != OpLOONG64MOVVconst {
  2933  			break
  2934  		}
  2935  		c := auxIntToInt64(v_0.AuxInt)
  2936  		v.reset(OpLOONG64MOVVconst)
  2937  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  2938  		return true
  2939  	}
  2940  	// match: (MOVBreg x:(ANDconst [c] y))
  2941  	// cond: c >= 0 && int64(int8(c)) == c
  2942  	// result: x
  2943  	for {
  2944  		x := v_0
  2945  		if x.Op != OpLOONG64ANDconst {
  2946  			break
  2947  		}
  2948  		c := auxIntToInt64(x.AuxInt)
  2949  		if !(c >= 0 && int64(int8(c)) == c) {
  2950  			break
  2951  		}
  2952  		v.copyOf(x)
  2953  		return true
  2954  	}
  2955  	return false
  2956  }
  2957  func rewriteValueLOONG64_OpLOONG64MOVBstore(v *Value) bool {
  2958  	v_2 := v.Args[2]
  2959  	v_1 := v.Args[1]
  2960  	v_0 := v.Args[0]
  2961  	b := v.Block
  2962  	config := b.Func.Config
  2963  	typ := &b.Func.Config.Types
  2964  	// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  2965  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2966  	// result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem)
  2967  	for {
  2968  		off1 := auxIntToInt32(v.AuxInt)
  2969  		sym := auxToSym(v.Aux)
  2970  		if v_0.Op != OpLOONG64ADDVconst {
  2971  			break
  2972  		}
  2973  		off2 := auxIntToInt64(v_0.AuxInt)
  2974  		ptr := v_0.Args[0]
  2975  		val := v_1
  2976  		mem := v_2
  2977  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  2978  			break
  2979  		}
  2980  		v.reset(OpLOONG64MOVBstore)
  2981  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  2982  		v.Aux = symToAux(sym)
  2983  		v.AddArg3(ptr, val, mem)
  2984  		return true
  2985  	}
  2986  	// match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  2987  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  2988  	// result: (MOVBstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  2989  	for {
  2990  		off1 := auxIntToInt32(v.AuxInt)
  2991  		sym1 := auxToSym(v.Aux)
  2992  		if v_0.Op != OpLOONG64MOVVaddr {
  2993  			break
  2994  		}
  2995  		off2 := auxIntToInt32(v_0.AuxInt)
  2996  		sym2 := auxToSym(v_0.Aux)
  2997  		ptr := v_0.Args[0]
  2998  		val := v_1
  2999  		mem := v_2
  3000  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3001  			break
  3002  		}
  3003  		v.reset(OpLOONG64MOVBstore)
  3004  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3005  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3006  		v.AddArg3(ptr, val, mem)
  3007  		return true
  3008  	}
  3009  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  3010  	// result: (MOVBstore [off] {sym} ptr x mem)
  3011  	for {
  3012  		off := auxIntToInt32(v.AuxInt)
  3013  		sym := auxToSym(v.Aux)
  3014  		ptr := v_0
  3015  		if v_1.Op != OpLOONG64MOVBreg {
  3016  			break
  3017  		}
  3018  		x := v_1.Args[0]
  3019  		mem := v_2
  3020  		v.reset(OpLOONG64MOVBstore)
  3021  		v.AuxInt = int32ToAuxInt(off)
  3022  		v.Aux = symToAux(sym)
  3023  		v.AddArg3(ptr, x, mem)
  3024  		return true
  3025  	}
  3026  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  3027  	// result: (MOVBstore [off] {sym} ptr x mem)
  3028  	for {
  3029  		off := auxIntToInt32(v.AuxInt)
  3030  		sym := auxToSym(v.Aux)
  3031  		ptr := v_0
  3032  		if v_1.Op != OpLOONG64MOVBUreg {
  3033  			break
  3034  		}
  3035  		x := v_1.Args[0]
  3036  		mem := v_2
  3037  		v.reset(OpLOONG64MOVBstore)
  3038  		v.AuxInt = int32ToAuxInt(off)
  3039  		v.Aux = symToAux(sym)
  3040  		v.AddArg3(ptr, x, mem)
  3041  		return true
  3042  	}
  3043  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  3044  	// result: (MOVBstore [off] {sym} ptr x mem)
  3045  	for {
  3046  		off := auxIntToInt32(v.AuxInt)
  3047  		sym := auxToSym(v.Aux)
  3048  		ptr := v_0
  3049  		if v_1.Op != OpLOONG64MOVHreg {
  3050  			break
  3051  		}
  3052  		x := v_1.Args[0]
  3053  		mem := v_2
  3054  		v.reset(OpLOONG64MOVBstore)
  3055  		v.AuxInt = int32ToAuxInt(off)
  3056  		v.Aux = symToAux(sym)
  3057  		v.AddArg3(ptr, x, mem)
  3058  		return true
  3059  	}
  3060  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  3061  	// result: (MOVBstore [off] {sym} ptr x mem)
  3062  	for {
  3063  		off := auxIntToInt32(v.AuxInt)
  3064  		sym := auxToSym(v.Aux)
  3065  		ptr := v_0
  3066  		if v_1.Op != OpLOONG64MOVHUreg {
  3067  			break
  3068  		}
  3069  		x := v_1.Args[0]
  3070  		mem := v_2
  3071  		v.reset(OpLOONG64MOVBstore)
  3072  		v.AuxInt = int32ToAuxInt(off)
  3073  		v.Aux = symToAux(sym)
  3074  		v.AddArg3(ptr, x, mem)
  3075  		return true
  3076  	}
  3077  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  3078  	// result: (MOVBstore [off] {sym} ptr x mem)
  3079  	for {
  3080  		off := auxIntToInt32(v.AuxInt)
  3081  		sym := auxToSym(v.Aux)
  3082  		ptr := v_0
  3083  		if v_1.Op != OpLOONG64MOVWreg {
  3084  			break
  3085  		}
  3086  		x := v_1.Args[0]
  3087  		mem := v_2
  3088  		v.reset(OpLOONG64MOVBstore)
  3089  		v.AuxInt = int32ToAuxInt(off)
  3090  		v.Aux = symToAux(sym)
  3091  		v.AddArg3(ptr, x, mem)
  3092  		return true
  3093  	}
  3094  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  3095  	// result: (MOVBstore [off] {sym} ptr x mem)
  3096  	for {
  3097  		off := auxIntToInt32(v.AuxInt)
  3098  		sym := auxToSym(v.Aux)
  3099  		ptr := v_0
  3100  		if v_1.Op != OpLOONG64MOVWUreg {
  3101  			break
  3102  		}
  3103  		x := v_1.Args[0]
  3104  		mem := v_2
  3105  		v.reset(OpLOONG64MOVBstore)
  3106  		v.AuxInt = int32ToAuxInt(off)
  3107  		v.Aux = symToAux(sym)
  3108  		v.AddArg3(ptr, x, mem)
  3109  		return true
  3110  	}
  3111  	// match: (MOVBstore [off] {sym} (ADDV ptr idx) val mem)
  3112  	// cond: off == 0 && sym == nil
  3113  	// result: (MOVBstoreidx ptr idx val mem)
  3114  	for {
  3115  		off := auxIntToInt32(v.AuxInt)
  3116  		sym := auxToSym(v.Aux)
  3117  		if v_0.Op != OpLOONG64ADDV {
  3118  			break
  3119  		}
  3120  		idx := v_0.Args[1]
  3121  		ptr := v_0.Args[0]
  3122  		val := v_1
  3123  		mem := v_2
  3124  		if !(off == 0 && sym == nil) {
  3125  			break
  3126  		}
  3127  		v.reset(OpLOONG64MOVBstoreidx)
  3128  		v.AddArg4(ptr, idx, val, mem)
  3129  		return true
  3130  	}
  3131  	// match: (MOVBstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
  3132  	// cond: off == 0 && sym == nil
  3133  	// result: (MOVBstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
  3134  	for {
  3135  		off := auxIntToInt32(v.AuxInt)
  3136  		sym := auxToSym(v.Aux)
  3137  		if v_0.Op != OpLOONG64ADDshiftLLV {
  3138  			break
  3139  		}
  3140  		shift := auxIntToInt64(v_0.AuxInt)
  3141  		idx := v_0.Args[1]
  3142  		ptr := v_0.Args[0]
  3143  		val := v_1
  3144  		mem := v_2
  3145  		if !(off == 0 && sym == nil) {
  3146  			break
  3147  		}
  3148  		v.reset(OpLOONG64MOVBstoreidx)
  3149  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  3150  		v0.AuxInt = int64ToAuxInt(shift)
  3151  		v0.AddArg(idx)
  3152  		v.AddArg4(ptr, v0, val, mem)
  3153  		return true
  3154  	}
  3155  	return false
  3156  }
  3157  func rewriteValueLOONG64_OpLOONG64MOVBstoreidx(v *Value) bool {
  3158  	v_3 := v.Args[3]
  3159  	v_2 := v.Args[2]
  3160  	v_1 := v.Args[1]
  3161  	v_0 := v.Args[0]
  3162  	// match: (MOVBstoreidx ptr (MOVVconst [c]) val mem)
  3163  	// cond: is32Bit(c)
  3164  	// result: (MOVBstore [int32(c)] ptr val mem)
  3165  	for {
  3166  		ptr := v_0
  3167  		if v_1.Op != OpLOONG64MOVVconst {
  3168  			break
  3169  		}
  3170  		c := auxIntToInt64(v_1.AuxInt)
  3171  		val := v_2
  3172  		mem := v_3
  3173  		if !(is32Bit(c)) {
  3174  			break
  3175  		}
  3176  		v.reset(OpLOONG64MOVBstore)
  3177  		v.AuxInt = int32ToAuxInt(int32(c))
  3178  		v.AddArg3(ptr, val, mem)
  3179  		return true
  3180  	}
  3181  	// match: (MOVBstoreidx (MOVVconst [c]) idx val mem)
  3182  	// cond: is32Bit(c)
  3183  	// result: (MOVBstore [int32(c)] idx val mem)
  3184  	for {
  3185  		if v_0.Op != OpLOONG64MOVVconst {
  3186  			break
  3187  		}
  3188  		c := auxIntToInt64(v_0.AuxInt)
  3189  		idx := v_1
  3190  		val := v_2
  3191  		mem := v_3
  3192  		if !(is32Bit(c)) {
  3193  			break
  3194  		}
  3195  		v.reset(OpLOONG64MOVBstore)
  3196  		v.AuxInt = int32ToAuxInt(int32(c))
  3197  		v.AddArg3(idx, val, mem)
  3198  		return true
  3199  	}
  3200  	return false
  3201  }
  3202  func rewriteValueLOONG64_OpLOONG64MOVDF(v *Value) bool {
  3203  	v_0 := v.Args[0]
  3204  	// match: (MOVDF (ABSD (MOVFD x)))
  3205  	// result: (ABSF x)
  3206  	for {
  3207  		if v_0.Op != OpLOONG64ABSD {
  3208  			break
  3209  		}
  3210  		v_0_0 := v_0.Args[0]
  3211  		if v_0_0.Op != OpLOONG64MOVFD {
  3212  			break
  3213  		}
  3214  		x := v_0_0.Args[0]
  3215  		v.reset(OpLOONG64ABSF)
  3216  		v.AddArg(x)
  3217  		return true
  3218  	}
  3219  	// match: (MOVDF (SQRTD (MOVFD x)))
  3220  	// result: (SQRTF x)
  3221  	for {
  3222  		if v_0.Op != OpLOONG64SQRTD {
  3223  			break
  3224  		}
  3225  		v_0_0 := v_0.Args[0]
  3226  		if v_0_0.Op != OpLOONG64MOVFD {
  3227  			break
  3228  		}
  3229  		x := v_0_0.Args[0]
  3230  		v.reset(OpLOONG64SQRTF)
  3231  		v.AddArg(x)
  3232  		return true
  3233  	}
  3234  	return false
  3235  }
  3236  func rewriteValueLOONG64_OpLOONG64MOVDload(v *Value) bool {
  3237  	v_1 := v.Args[1]
  3238  	v_0 := v.Args[0]
  3239  	b := v.Block
  3240  	config := b.Func.Config
  3241  	typ := &b.Func.Config.Types
  3242  	// match: (MOVDload [off] {sym} ptr (MOVVstore [off] {sym} ptr val _))
  3243  	// result: (MOVVgpfp val)
  3244  	for {
  3245  		off := auxIntToInt32(v.AuxInt)
  3246  		sym := auxToSym(v.Aux)
  3247  		ptr := v_0
  3248  		if v_1.Op != OpLOONG64MOVVstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3249  			break
  3250  		}
  3251  		val := v_1.Args[1]
  3252  		if ptr != v_1.Args[0] {
  3253  			break
  3254  		}
  3255  		v.reset(OpLOONG64MOVVgpfp)
  3256  		v.AddArg(val)
  3257  		return true
  3258  	}
  3259  	// match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3260  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3261  	// result: (MOVDload [off1+int32(off2)] {sym} ptr mem)
  3262  	for {
  3263  		off1 := auxIntToInt32(v.AuxInt)
  3264  		sym := auxToSym(v.Aux)
  3265  		if v_0.Op != OpLOONG64ADDVconst {
  3266  			break
  3267  		}
  3268  		off2 := auxIntToInt64(v_0.AuxInt)
  3269  		ptr := v_0.Args[0]
  3270  		mem := v_1
  3271  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3272  			break
  3273  		}
  3274  		v.reset(OpLOONG64MOVDload)
  3275  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3276  		v.Aux = symToAux(sym)
  3277  		v.AddArg2(ptr, mem)
  3278  		return true
  3279  	}
  3280  	// match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3281  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3282  	// result: (MOVDload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3283  	for {
  3284  		off1 := auxIntToInt32(v.AuxInt)
  3285  		sym1 := auxToSym(v.Aux)
  3286  		if v_0.Op != OpLOONG64MOVVaddr {
  3287  			break
  3288  		}
  3289  		off2 := auxIntToInt32(v_0.AuxInt)
  3290  		sym2 := auxToSym(v_0.Aux)
  3291  		ptr := v_0.Args[0]
  3292  		mem := v_1
  3293  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3294  			break
  3295  		}
  3296  		v.reset(OpLOONG64MOVDload)
  3297  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3298  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3299  		v.AddArg2(ptr, mem)
  3300  		return true
  3301  	}
  3302  	// match: (MOVDload [off] {sym} (ADDV ptr idx) mem)
  3303  	// cond: off == 0 && sym == nil
  3304  	// result: (MOVDloadidx ptr idx mem)
  3305  	for {
  3306  		off := auxIntToInt32(v.AuxInt)
  3307  		sym := auxToSym(v.Aux)
  3308  		if v_0.Op != OpLOONG64ADDV {
  3309  			break
  3310  		}
  3311  		idx := v_0.Args[1]
  3312  		ptr := v_0.Args[0]
  3313  		mem := v_1
  3314  		if !(off == 0 && sym == nil) {
  3315  			break
  3316  		}
  3317  		v.reset(OpLOONG64MOVDloadidx)
  3318  		v.AddArg3(ptr, idx, mem)
  3319  		return true
  3320  	}
  3321  	// match: (MOVDload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  3322  	// cond: off == 0 && sym == nil
  3323  	// result: (MOVDloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  3324  	for {
  3325  		off := auxIntToInt32(v.AuxInt)
  3326  		sym := auxToSym(v.Aux)
  3327  		if v_0.Op != OpLOONG64ADDshiftLLV {
  3328  			break
  3329  		}
  3330  		shift := auxIntToInt64(v_0.AuxInt)
  3331  		idx := v_0.Args[1]
  3332  		ptr := v_0.Args[0]
  3333  		mem := v_1
  3334  		if !(off == 0 && sym == nil) {
  3335  			break
  3336  		}
  3337  		v.reset(OpLOONG64MOVDloadidx)
  3338  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  3339  		v0.AuxInt = int64ToAuxInt(shift)
  3340  		v0.AddArg(idx)
  3341  		v.AddArg3(ptr, v0, mem)
  3342  		return true
  3343  	}
  3344  	return false
  3345  }
  3346  func rewriteValueLOONG64_OpLOONG64MOVDloadidx(v *Value) bool {
  3347  	v_2 := v.Args[2]
  3348  	v_1 := v.Args[1]
  3349  	v_0 := v.Args[0]
  3350  	// match: (MOVDloadidx ptr (MOVVconst [c]) mem)
  3351  	// cond: is32Bit(c)
  3352  	// result: (MOVDload [int32(c)] ptr mem)
  3353  	for {
  3354  		ptr := v_0
  3355  		if v_1.Op != OpLOONG64MOVVconst {
  3356  			break
  3357  		}
  3358  		c := auxIntToInt64(v_1.AuxInt)
  3359  		mem := v_2
  3360  		if !(is32Bit(c)) {
  3361  			break
  3362  		}
  3363  		v.reset(OpLOONG64MOVDload)
  3364  		v.AuxInt = int32ToAuxInt(int32(c))
  3365  		v.AddArg2(ptr, mem)
  3366  		return true
  3367  	}
  3368  	// match: (MOVDloadidx (MOVVconst [c]) ptr mem)
  3369  	// cond: is32Bit(c)
  3370  	// result: (MOVDload [int32(c)] ptr mem)
  3371  	for {
  3372  		if v_0.Op != OpLOONG64MOVVconst {
  3373  			break
  3374  		}
  3375  		c := auxIntToInt64(v_0.AuxInt)
  3376  		ptr := v_1
  3377  		mem := v_2
  3378  		if !(is32Bit(c)) {
  3379  			break
  3380  		}
  3381  		v.reset(OpLOONG64MOVDload)
  3382  		v.AuxInt = int32ToAuxInt(int32(c))
  3383  		v.AddArg2(ptr, mem)
  3384  		return true
  3385  	}
  3386  	return false
  3387  }
  3388  func rewriteValueLOONG64_OpLOONG64MOVDstore(v *Value) bool {
  3389  	v_2 := v.Args[2]
  3390  	v_1 := v.Args[1]
  3391  	v_0 := v.Args[0]
  3392  	b := v.Block
  3393  	config := b.Func.Config
  3394  	typ := &b.Func.Config.Types
  3395  	// match: (MOVDstore [off] {sym} ptr (MOVVgpfp val) mem)
  3396  	// result: (MOVVstore [off] {sym} ptr val mem)
  3397  	for {
  3398  		off := auxIntToInt32(v.AuxInt)
  3399  		sym := auxToSym(v.Aux)
  3400  		ptr := v_0
  3401  		if v_1.Op != OpLOONG64MOVVgpfp {
  3402  			break
  3403  		}
  3404  		val := v_1.Args[0]
  3405  		mem := v_2
  3406  		v.reset(OpLOONG64MOVVstore)
  3407  		v.AuxInt = int32ToAuxInt(off)
  3408  		v.Aux = symToAux(sym)
  3409  		v.AddArg3(ptr, val, mem)
  3410  		return true
  3411  	}
  3412  	// match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3413  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3414  	// result: (MOVDstore [off1+int32(off2)] {sym} ptr val mem)
  3415  	for {
  3416  		off1 := auxIntToInt32(v.AuxInt)
  3417  		sym := auxToSym(v.Aux)
  3418  		if v_0.Op != OpLOONG64ADDVconst {
  3419  			break
  3420  		}
  3421  		off2 := auxIntToInt64(v_0.AuxInt)
  3422  		ptr := v_0.Args[0]
  3423  		val := v_1
  3424  		mem := v_2
  3425  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3426  			break
  3427  		}
  3428  		v.reset(OpLOONG64MOVDstore)
  3429  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3430  		v.Aux = symToAux(sym)
  3431  		v.AddArg3(ptr, val, mem)
  3432  		return true
  3433  	}
  3434  	// match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3435  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3436  	// result: (MOVDstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3437  	for {
  3438  		off1 := auxIntToInt32(v.AuxInt)
  3439  		sym1 := auxToSym(v.Aux)
  3440  		if v_0.Op != OpLOONG64MOVVaddr {
  3441  			break
  3442  		}
  3443  		off2 := auxIntToInt32(v_0.AuxInt)
  3444  		sym2 := auxToSym(v_0.Aux)
  3445  		ptr := v_0.Args[0]
  3446  		val := v_1
  3447  		mem := v_2
  3448  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3449  			break
  3450  		}
  3451  		v.reset(OpLOONG64MOVDstore)
  3452  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3453  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3454  		v.AddArg3(ptr, val, mem)
  3455  		return true
  3456  	}
  3457  	// match: (MOVDstore [off] {sym} (ADDV ptr idx) val mem)
  3458  	// cond: off == 0 && sym == nil
  3459  	// result: (MOVDstoreidx ptr idx val mem)
  3460  	for {
  3461  		off := auxIntToInt32(v.AuxInt)
  3462  		sym := auxToSym(v.Aux)
  3463  		if v_0.Op != OpLOONG64ADDV {
  3464  			break
  3465  		}
  3466  		idx := v_0.Args[1]
  3467  		ptr := v_0.Args[0]
  3468  		val := v_1
  3469  		mem := v_2
  3470  		if !(off == 0 && sym == nil) {
  3471  			break
  3472  		}
  3473  		v.reset(OpLOONG64MOVDstoreidx)
  3474  		v.AddArg4(ptr, idx, val, mem)
  3475  		return true
  3476  	}
  3477  	// match: (MOVDstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
  3478  	// cond: off == 0 && sym == nil
  3479  	// result: (MOVDstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
  3480  	for {
  3481  		off := auxIntToInt32(v.AuxInt)
  3482  		sym := auxToSym(v.Aux)
  3483  		if v_0.Op != OpLOONG64ADDshiftLLV {
  3484  			break
  3485  		}
  3486  		shift := auxIntToInt64(v_0.AuxInt)
  3487  		idx := v_0.Args[1]
  3488  		ptr := v_0.Args[0]
  3489  		val := v_1
  3490  		mem := v_2
  3491  		if !(off == 0 && sym == nil) {
  3492  			break
  3493  		}
  3494  		v.reset(OpLOONG64MOVDstoreidx)
  3495  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  3496  		v0.AuxInt = int64ToAuxInt(shift)
  3497  		v0.AddArg(idx)
  3498  		v.AddArg4(ptr, v0, val, mem)
  3499  		return true
  3500  	}
  3501  	return false
  3502  }
  3503  func rewriteValueLOONG64_OpLOONG64MOVDstoreidx(v *Value) bool {
  3504  	v_3 := v.Args[3]
  3505  	v_2 := v.Args[2]
  3506  	v_1 := v.Args[1]
  3507  	v_0 := v.Args[0]
  3508  	// match: (MOVDstoreidx ptr (MOVVconst [c]) val mem)
  3509  	// cond: is32Bit(c)
  3510  	// result: (MOVDstore [int32(c)] ptr val mem)
  3511  	for {
  3512  		ptr := v_0
  3513  		if v_1.Op != OpLOONG64MOVVconst {
  3514  			break
  3515  		}
  3516  		c := auxIntToInt64(v_1.AuxInt)
  3517  		val := v_2
  3518  		mem := v_3
  3519  		if !(is32Bit(c)) {
  3520  			break
  3521  		}
  3522  		v.reset(OpLOONG64MOVDstore)
  3523  		v.AuxInt = int32ToAuxInt(int32(c))
  3524  		v.AddArg3(ptr, val, mem)
  3525  		return true
  3526  	}
  3527  	// match: (MOVDstoreidx (MOVVconst [c]) idx val mem)
  3528  	// cond: is32Bit(c)
  3529  	// result: (MOVDstore [int32(c)] idx val mem)
  3530  	for {
  3531  		if v_0.Op != OpLOONG64MOVVconst {
  3532  			break
  3533  		}
  3534  		c := auxIntToInt64(v_0.AuxInt)
  3535  		idx := v_1
  3536  		val := v_2
  3537  		mem := v_3
  3538  		if !(is32Bit(c)) {
  3539  			break
  3540  		}
  3541  		v.reset(OpLOONG64MOVDstore)
  3542  		v.AuxInt = int32ToAuxInt(int32(c))
  3543  		v.AddArg3(idx, val, mem)
  3544  		return true
  3545  	}
  3546  	return false
  3547  }
  3548  func rewriteValueLOONG64_OpLOONG64MOVFload(v *Value) bool {
  3549  	v_1 := v.Args[1]
  3550  	v_0 := v.Args[0]
  3551  	b := v.Block
  3552  	config := b.Func.Config
  3553  	typ := &b.Func.Config.Types
  3554  	// match: (MOVFload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _))
  3555  	// result: (MOVWgpfp val)
  3556  	for {
  3557  		off := auxIntToInt32(v.AuxInt)
  3558  		sym := auxToSym(v.Aux)
  3559  		ptr := v_0
  3560  		if v_1.Op != OpLOONG64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3561  			break
  3562  		}
  3563  		val := v_1.Args[1]
  3564  		if ptr != v_1.Args[0] {
  3565  			break
  3566  		}
  3567  		v.reset(OpLOONG64MOVWgpfp)
  3568  		v.AddArg(val)
  3569  		return true
  3570  	}
  3571  	// match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3572  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3573  	// result: (MOVFload [off1+int32(off2)] {sym} ptr mem)
  3574  	for {
  3575  		off1 := auxIntToInt32(v.AuxInt)
  3576  		sym := auxToSym(v.Aux)
  3577  		if v_0.Op != OpLOONG64ADDVconst {
  3578  			break
  3579  		}
  3580  		off2 := auxIntToInt64(v_0.AuxInt)
  3581  		ptr := v_0.Args[0]
  3582  		mem := v_1
  3583  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3584  			break
  3585  		}
  3586  		v.reset(OpLOONG64MOVFload)
  3587  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3588  		v.Aux = symToAux(sym)
  3589  		v.AddArg2(ptr, mem)
  3590  		return true
  3591  	}
  3592  	// match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3593  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3594  	// result: (MOVFload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3595  	for {
  3596  		off1 := auxIntToInt32(v.AuxInt)
  3597  		sym1 := auxToSym(v.Aux)
  3598  		if v_0.Op != OpLOONG64MOVVaddr {
  3599  			break
  3600  		}
  3601  		off2 := auxIntToInt32(v_0.AuxInt)
  3602  		sym2 := auxToSym(v_0.Aux)
  3603  		ptr := v_0.Args[0]
  3604  		mem := v_1
  3605  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3606  			break
  3607  		}
  3608  		v.reset(OpLOONG64MOVFload)
  3609  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3610  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3611  		v.AddArg2(ptr, mem)
  3612  		return true
  3613  	}
  3614  	// match: (MOVFload [off] {sym} (ADDV ptr idx) mem)
  3615  	// cond: off == 0 && sym == nil
  3616  	// result: (MOVFloadidx ptr idx mem)
  3617  	for {
  3618  		off := auxIntToInt32(v.AuxInt)
  3619  		sym := auxToSym(v.Aux)
  3620  		if v_0.Op != OpLOONG64ADDV {
  3621  			break
  3622  		}
  3623  		idx := v_0.Args[1]
  3624  		ptr := v_0.Args[0]
  3625  		mem := v_1
  3626  		if !(off == 0 && sym == nil) {
  3627  			break
  3628  		}
  3629  		v.reset(OpLOONG64MOVFloadidx)
  3630  		v.AddArg3(ptr, idx, mem)
  3631  		return true
  3632  	}
  3633  	// match: (MOVFload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  3634  	// cond: off == 0 && sym == nil
  3635  	// result: (MOVFloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  3636  	for {
  3637  		off := auxIntToInt32(v.AuxInt)
  3638  		sym := auxToSym(v.Aux)
  3639  		if v_0.Op != OpLOONG64ADDshiftLLV {
  3640  			break
  3641  		}
  3642  		shift := auxIntToInt64(v_0.AuxInt)
  3643  		idx := v_0.Args[1]
  3644  		ptr := v_0.Args[0]
  3645  		mem := v_1
  3646  		if !(off == 0 && sym == nil) {
  3647  			break
  3648  		}
  3649  		v.reset(OpLOONG64MOVFloadidx)
  3650  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  3651  		v0.AuxInt = int64ToAuxInt(shift)
  3652  		v0.AddArg(idx)
  3653  		v.AddArg3(ptr, v0, mem)
  3654  		return true
  3655  	}
  3656  	return false
  3657  }
  3658  func rewriteValueLOONG64_OpLOONG64MOVFloadidx(v *Value) bool {
  3659  	v_2 := v.Args[2]
  3660  	v_1 := v.Args[1]
  3661  	v_0 := v.Args[0]
  3662  	// match: (MOVFloadidx ptr (MOVVconst [c]) mem)
  3663  	// cond: is32Bit(c)
  3664  	// result: (MOVFload [int32(c)] ptr mem)
  3665  	for {
  3666  		ptr := v_0
  3667  		if v_1.Op != OpLOONG64MOVVconst {
  3668  			break
  3669  		}
  3670  		c := auxIntToInt64(v_1.AuxInt)
  3671  		mem := v_2
  3672  		if !(is32Bit(c)) {
  3673  			break
  3674  		}
  3675  		v.reset(OpLOONG64MOVFload)
  3676  		v.AuxInt = int32ToAuxInt(int32(c))
  3677  		v.AddArg2(ptr, mem)
  3678  		return true
  3679  	}
  3680  	// match: (MOVFloadidx (MOVVconst [c]) ptr mem)
  3681  	// cond: is32Bit(c)
  3682  	// result: (MOVFload [int32(c)] ptr mem)
  3683  	for {
  3684  		if v_0.Op != OpLOONG64MOVVconst {
  3685  			break
  3686  		}
  3687  		c := auxIntToInt64(v_0.AuxInt)
  3688  		ptr := v_1
  3689  		mem := v_2
  3690  		if !(is32Bit(c)) {
  3691  			break
  3692  		}
  3693  		v.reset(OpLOONG64MOVFload)
  3694  		v.AuxInt = int32ToAuxInt(int32(c))
  3695  		v.AddArg2(ptr, mem)
  3696  		return true
  3697  	}
  3698  	return false
  3699  }
  3700  func rewriteValueLOONG64_OpLOONG64MOVFstore(v *Value) bool {
  3701  	v_2 := v.Args[2]
  3702  	v_1 := v.Args[1]
  3703  	v_0 := v.Args[0]
  3704  	b := v.Block
  3705  	config := b.Func.Config
  3706  	typ := &b.Func.Config.Types
  3707  	// match: (MOVFstore [off] {sym} ptr (MOVWgpfp val) mem)
  3708  	// result: (MOVWstore [off] {sym} ptr val mem)
  3709  	for {
  3710  		off := auxIntToInt32(v.AuxInt)
  3711  		sym := auxToSym(v.Aux)
  3712  		ptr := v_0
  3713  		if v_1.Op != OpLOONG64MOVWgpfp {
  3714  			break
  3715  		}
  3716  		val := v_1.Args[0]
  3717  		mem := v_2
  3718  		v.reset(OpLOONG64MOVWstore)
  3719  		v.AuxInt = int32ToAuxInt(off)
  3720  		v.Aux = symToAux(sym)
  3721  		v.AddArg3(ptr, val, mem)
  3722  		return true
  3723  	}
  3724  	// match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3725  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3726  	// result: (MOVFstore [off1+int32(off2)] {sym} ptr val mem)
  3727  	for {
  3728  		off1 := auxIntToInt32(v.AuxInt)
  3729  		sym := auxToSym(v.Aux)
  3730  		if v_0.Op != OpLOONG64ADDVconst {
  3731  			break
  3732  		}
  3733  		off2 := auxIntToInt64(v_0.AuxInt)
  3734  		ptr := v_0.Args[0]
  3735  		val := v_1
  3736  		mem := v_2
  3737  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3738  			break
  3739  		}
  3740  		v.reset(OpLOONG64MOVFstore)
  3741  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3742  		v.Aux = symToAux(sym)
  3743  		v.AddArg3(ptr, val, mem)
  3744  		return true
  3745  	}
  3746  	// match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3747  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3748  	// result: (MOVFstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  3749  	for {
  3750  		off1 := auxIntToInt32(v.AuxInt)
  3751  		sym1 := auxToSym(v.Aux)
  3752  		if v_0.Op != OpLOONG64MOVVaddr {
  3753  			break
  3754  		}
  3755  		off2 := auxIntToInt32(v_0.AuxInt)
  3756  		sym2 := auxToSym(v_0.Aux)
  3757  		ptr := v_0.Args[0]
  3758  		val := v_1
  3759  		mem := v_2
  3760  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3761  			break
  3762  		}
  3763  		v.reset(OpLOONG64MOVFstore)
  3764  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3765  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3766  		v.AddArg3(ptr, val, mem)
  3767  		return true
  3768  	}
  3769  	// match: (MOVFstore [off] {sym} (ADDV ptr idx) val mem)
  3770  	// cond: off == 0 && sym == nil
  3771  	// result: (MOVFstoreidx ptr idx val mem)
  3772  	for {
  3773  		off := auxIntToInt32(v.AuxInt)
  3774  		sym := auxToSym(v.Aux)
  3775  		if v_0.Op != OpLOONG64ADDV {
  3776  			break
  3777  		}
  3778  		idx := v_0.Args[1]
  3779  		ptr := v_0.Args[0]
  3780  		val := v_1
  3781  		mem := v_2
  3782  		if !(off == 0 && sym == nil) {
  3783  			break
  3784  		}
  3785  		v.reset(OpLOONG64MOVFstoreidx)
  3786  		v.AddArg4(ptr, idx, val, mem)
  3787  		return true
  3788  	}
  3789  	// match: (MOVFstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
  3790  	// cond: off == 0 && sym == nil
  3791  	// result: (MOVFstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
  3792  	for {
  3793  		off := auxIntToInt32(v.AuxInt)
  3794  		sym := auxToSym(v.Aux)
  3795  		if v_0.Op != OpLOONG64ADDshiftLLV {
  3796  			break
  3797  		}
  3798  		shift := auxIntToInt64(v_0.AuxInt)
  3799  		idx := v_0.Args[1]
  3800  		ptr := v_0.Args[0]
  3801  		val := v_1
  3802  		mem := v_2
  3803  		if !(off == 0 && sym == nil) {
  3804  			break
  3805  		}
  3806  		v.reset(OpLOONG64MOVFstoreidx)
  3807  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  3808  		v0.AuxInt = int64ToAuxInt(shift)
  3809  		v0.AddArg(idx)
  3810  		v.AddArg4(ptr, v0, val, mem)
  3811  		return true
  3812  	}
  3813  	return false
  3814  }
  3815  func rewriteValueLOONG64_OpLOONG64MOVFstoreidx(v *Value) bool {
  3816  	v_3 := v.Args[3]
  3817  	v_2 := v.Args[2]
  3818  	v_1 := v.Args[1]
  3819  	v_0 := v.Args[0]
  3820  	// match: (MOVFstoreidx ptr (MOVVconst [c]) val mem)
  3821  	// cond: is32Bit(c)
  3822  	// result: (MOVFstore [int32(c)] ptr val mem)
  3823  	for {
  3824  		ptr := v_0
  3825  		if v_1.Op != OpLOONG64MOVVconst {
  3826  			break
  3827  		}
  3828  		c := auxIntToInt64(v_1.AuxInt)
  3829  		val := v_2
  3830  		mem := v_3
  3831  		if !(is32Bit(c)) {
  3832  			break
  3833  		}
  3834  		v.reset(OpLOONG64MOVFstore)
  3835  		v.AuxInt = int32ToAuxInt(int32(c))
  3836  		v.AddArg3(ptr, val, mem)
  3837  		return true
  3838  	}
  3839  	// match: (MOVFstoreidx (MOVVconst [c]) idx val mem)
  3840  	// cond: is32Bit(c)
  3841  	// result: (MOVFstore [int32(c)] idx val mem)
  3842  	for {
  3843  		if v_0.Op != OpLOONG64MOVVconst {
  3844  			break
  3845  		}
  3846  		c := auxIntToInt64(v_0.AuxInt)
  3847  		idx := v_1
  3848  		val := v_2
  3849  		mem := v_3
  3850  		if !(is32Bit(c)) {
  3851  			break
  3852  		}
  3853  		v.reset(OpLOONG64MOVFstore)
  3854  		v.AuxInt = int32ToAuxInt(int32(c))
  3855  		v.AddArg3(idx, val, mem)
  3856  		return true
  3857  	}
  3858  	return false
  3859  }
  3860  func rewriteValueLOONG64_OpLOONG64MOVHUload(v *Value) bool {
  3861  	v_1 := v.Args[1]
  3862  	v_0 := v.Args[0]
  3863  	b := v.Block
  3864  	config := b.Func.Config
  3865  	typ := &b.Func.Config.Types
  3866  	// match: (MOVHUload [off] {sym} ptr (MOVHstore [off] {sym} ptr x _))
  3867  	// result: (MOVHUreg x)
  3868  	for {
  3869  		off := auxIntToInt32(v.AuxInt)
  3870  		sym := auxToSym(v.Aux)
  3871  		ptr := v_0
  3872  		if v_1.Op != OpLOONG64MOVHstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  3873  			break
  3874  		}
  3875  		x := v_1.Args[1]
  3876  		if ptr != v_1.Args[0] {
  3877  			break
  3878  		}
  3879  		v.reset(OpLOONG64MOVHUreg)
  3880  		v.AddArg(x)
  3881  		return true
  3882  	}
  3883  	// match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3884  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3885  	// result: (MOVHUload [off1+int32(off2)] {sym} ptr mem)
  3886  	for {
  3887  		off1 := auxIntToInt32(v.AuxInt)
  3888  		sym := auxToSym(v.Aux)
  3889  		if v_0.Op != OpLOONG64ADDVconst {
  3890  			break
  3891  		}
  3892  		off2 := auxIntToInt64(v_0.AuxInt)
  3893  		ptr := v_0.Args[0]
  3894  		mem := v_1
  3895  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3896  			break
  3897  		}
  3898  		v.reset(OpLOONG64MOVHUload)
  3899  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3900  		v.Aux = symToAux(sym)
  3901  		v.AddArg2(ptr, mem)
  3902  		return true
  3903  	}
  3904  	// match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3905  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  3906  	// result: (MOVHUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  3907  	for {
  3908  		off1 := auxIntToInt32(v.AuxInt)
  3909  		sym1 := auxToSym(v.Aux)
  3910  		if v_0.Op != OpLOONG64MOVVaddr {
  3911  			break
  3912  		}
  3913  		off2 := auxIntToInt32(v_0.AuxInt)
  3914  		sym2 := auxToSym(v_0.Aux)
  3915  		ptr := v_0.Args[0]
  3916  		mem := v_1
  3917  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  3918  			break
  3919  		}
  3920  		v.reset(OpLOONG64MOVHUload)
  3921  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3922  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3923  		v.AddArg2(ptr, mem)
  3924  		return true
  3925  	}
  3926  	// match: (MOVHUload [off] {sym} (ADDV ptr idx) mem)
  3927  	// cond: off == 0 && sym == nil
  3928  	// result: (MOVHUloadidx ptr idx mem)
  3929  	for {
  3930  		off := auxIntToInt32(v.AuxInt)
  3931  		sym := auxToSym(v.Aux)
  3932  		if v_0.Op != OpLOONG64ADDV {
  3933  			break
  3934  		}
  3935  		idx := v_0.Args[1]
  3936  		ptr := v_0.Args[0]
  3937  		mem := v_1
  3938  		if !(off == 0 && sym == nil) {
  3939  			break
  3940  		}
  3941  		v.reset(OpLOONG64MOVHUloadidx)
  3942  		v.AddArg3(ptr, idx, mem)
  3943  		return true
  3944  	}
  3945  	// match: (MOVHUload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  3946  	// cond: off == 0 && sym == nil
  3947  	// result: (MOVHUloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  3948  	for {
  3949  		off := auxIntToInt32(v.AuxInt)
  3950  		sym := auxToSym(v.Aux)
  3951  		if v_0.Op != OpLOONG64ADDshiftLLV {
  3952  			break
  3953  		}
  3954  		shift := auxIntToInt64(v_0.AuxInt)
  3955  		idx := v_0.Args[1]
  3956  		ptr := v_0.Args[0]
  3957  		mem := v_1
  3958  		if !(off == 0 && sym == nil) {
  3959  			break
  3960  		}
  3961  		v.reset(OpLOONG64MOVHUloadidx)
  3962  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  3963  		v0.AuxInt = int64ToAuxInt(shift)
  3964  		v0.AddArg(idx)
  3965  		v.AddArg3(ptr, v0, mem)
  3966  		return true
  3967  	}
  3968  	// match: (MOVHUload [off] {sym} (SB) _)
  3969  	// cond: symIsRO(sym)
  3970  	// result: (MOVVconst [int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  3971  	for {
  3972  		off := auxIntToInt32(v.AuxInt)
  3973  		sym := auxToSym(v.Aux)
  3974  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  3975  			break
  3976  		}
  3977  		v.reset(OpLOONG64MOVVconst)
  3978  		v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  3979  		return true
  3980  	}
  3981  	return false
  3982  }
  3983  func rewriteValueLOONG64_OpLOONG64MOVHUloadidx(v *Value) bool {
  3984  	v_2 := v.Args[2]
  3985  	v_1 := v.Args[1]
  3986  	v_0 := v.Args[0]
  3987  	// match: (MOVHUloadidx ptr (MOVVconst [c]) mem)
  3988  	// cond: is32Bit(c)
  3989  	// result: (MOVHUload [int32(c)] ptr mem)
  3990  	for {
  3991  		ptr := v_0
  3992  		if v_1.Op != OpLOONG64MOVVconst {
  3993  			break
  3994  		}
  3995  		c := auxIntToInt64(v_1.AuxInt)
  3996  		mem := v_2
  3997  		if !(is32Bit(c)) {
  3998  			break
  3999  		}
  4000  		v.reset(OpLOONG64MOVHUload)
  4001  		v.AuxInt = int32ToAuxInt(int32(c))
  4002  		v.AddArg2(ptr, mem)
  4003  		return true
  4004  	}
  4005  	// match: (MOVHUloadidx (MOVVconst [c]) ptr mem)
  4006  	// cond: is32Bit(c)
  4007  	// result: (MOVHUload [int32(c)] ptr mem)
  4008  	for {
  4009  		if v_0.Op != OpLOONG64MOVVconst {
  4010  			break
  4011  		}
  4012  		c := auxIntToInt64(v_0.AuxInt)
  4013  		ptr := v_1
  4014  		mem := v_2
  4015  		if !(is32Bit(c)) {
  4016  			break
  4017  		}
  4018  		v.reset(OpLOONG64MOVHUload)
  4019  		v.AuxInt = int32ToAuxInt(int32(c))
  4020  		v.AddArg2(ptr, mem)
  4021  		return true
  4022  	}
  4023  	return false
  4024  }
  4025  func rewriteValueLOONG64_OpLOONG64MOVHUreg(v *Value) bool {
  4026  	v_0 := v.Args[0]
  4027  	// match: (MOVHUreg (SRLVconst [rc] x))
  4028  	// cond: rc < 16
  4029  	// result: (BSTRPICKV [rc + (15+rc)<<6] x)
  4030  	for {
  4031  		if v_0.Op != OpLOONG64SRLVconst {
  4032  			break
  4033  		}
  4034  		rc := auxIntToInt64(v_0.AuxInt)
  4035  		x := v_0.Args[0]
  4036  		if !(rc < 16) {
  4037  			break
  4038  		}
  4039  		v.reset(OpLOONG64BSTRPICKV)
  4040  		v.AuxInt = int64ToAuxInt(rc + (15+rc)<<6)
  4041  		v.AddArg(x)
  4042  		return true
  4043  	}
  4044  	// match: (MOVHUreg x:(MOVBUload _ _))
  4045  	// result: (MOVVreg x)
  4046  	for {
  4047  		x := v_0
  4048  		if x.Op != OpLOONG64MOVBUload {
  4049  			break
  4050  		}
  4051  		v.reset(OpLOONG64MOVVreg)
  4052  		v.AddArg(x)
  4053  		return true
  4054  	}
  4055  	// match: (MOVHUreg x:(MOVHUload _ _))
  4056  	// result: (MOVVreg x)
  4057  	for {
  4058  		x := v_0
  4059  		if x.Op != OpLOONG64MOVHUload {
  4060  			break
  4061  		}
  4062  		v.reset(OpLOONG64MOVVreg)
  4063  		v.AddArg(x)
  4064  		return true
  4065  	}
  4066  	// match: (MOVHUreg x:(MOVBUloadidx _ _ _))
  4067  	// result: (MOVVreg x)
  4068  	for {
  4069  		x := v_0
  4070  		if x.Op != OpLOONG64MOVBUloadidx {
  4071  			break
  4072  		}
  4073  		v.reset(OpLOONG64MOVVreg)
  4074  		v.AddArg(x)
  4075  		return true
  4076  	}
  4077  	// match: (MOVHUreg x:(MOVHUloadidx _ _ _))
  4078  	// result: (MOVVreg x)
  4079  	for {
  4080  		x := v_0
  4081  		if x.Op != OpLOONG64MOVHUloadidx {
  4082  			break
  4083  		}
  4084  		v.reset(OpLOONG64MOVVreg)
  4085  		v.AddArg(x)
  4086  		return true
  4087  	}
  4088  	// match: (MOVHUreg x:(MOVBUreg _))
  4089  	// result: (MOVVreg x)
  4090  	for {
  4091  		x := v_0
  4092  		if x.Op != OpLOONG64MOVBUreg {
  4093  			break
  4094  		}
  4095  		v.reset(OpLOONG64MOVVreg)
  4096  		v.AddArg(x)
  4097  		return true
  4098  	}
  4099  	// match: (MOVHUreg x:(MOVHUreg _))
  4100  	// result: (MOVVreg x)
  4101  	for {
  4102  		x := v_0
  4103  		if x.Op != OpLOONG64MOVHUreg {
  4104  			break
  4105  		}
  4106  		v.reset(OpLOONG64MOVVreg)
  4107  		v.AddArg(x)
  4108  		return true
  4109  	}
  4110  	// match: (MOVHUreg (SLLVconst [lc] x))
  4111  	// cond: lc >= 16
  4112  	// result: (MOVVconst [0])
  4113  	for {
  4114  		if v_0.Op != OpLOONG64SLLVconst {
  4115  			break
  4116  		}
  4117  		lc := auxIntToInt64(v_0.AuxInt)
  4118  		if !(lc >= 16) {
  4119  			break
  4120  		}
  4121  		v.reset(OpLOONG64MOVVconst)
  4122  		v.AuxInt = int64ToAuxInt(0)
  4123  		return true
  4124  	}
  4125  	// match: (MOVHUreg (MOVVconst [c]))
  4126  	// result: (MOVVconst [int64(uint16(c))])
  4127  	for {
  4128  		if v_0.Op != OpLOONG64MOVVconst {
  4129  			break
  4130  		}
  4131  		c := auxIntToInt64(v_0.AuxInt)
  4132  		v.reset(OpLOONG64MOVVconst)
  4133  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  4134  		return true
  4135  	}
  4136  	// match: (MOVHUreg x:(SRLconst [c] y))
  4137  	// cond: c >= 16
  4138  	// result: x
  4139  	for {
  4140  		x := v_0
  4141  		if x.Op != OpLOONG64SRLconst {
  4142  			break
  4143  		}
  4144  		c := auxIntToInt64(x.AuxInt)
  4145  		if !(c >= 16) {
  4146  			break
  4147  		}
  4148  		v.copyOf(x)
  4149  		return true
  4150  	}
  4151  	// match: (MOVHUreg x:(ANDconst [c] y))
  4152  	// cond: c >= 0 && int64(uint16(c)) == c
  4153  	// result: x
  4154  	for {
  4155  		x := v_0
  4156  		if x.Op != OpLOONG64ANDconst {
  4157  			break
  4158  		}
  4159  		c := auxIntToInt64(x.AuxInt)
  4160  		if !(c >= 0 && int64(uint16(c)) == c) {
  4161  			break
  4162  		}
  4163  		v.copyOf(x)
  4164  		return true
  4165  	}
  4166  	return false
  4167  }
  4168  func rewriteValueLOONG64_OpLOONG64MOVHload(v *Value) bool {
  4169  	v_1 := v.Args[1]
  4170  	v_0 := v.Args[0]
  4171  	b := v.Block
  4172  	config := b.Func.Config
  4173  	typ := &b.Func.Config.Types
  4174  	// match: (MOVHload [off] {sym} ptr (MOVHstore [off] {sym} ptr x _))
  4175  	// result: (MOVHreg x)
  4176  	for {
  4177  		off := auxIntToInt32(v.AuxInt)
  4178  		sym := auxToSym(v.Aux)
  4179  		ptr := v_0
  4180  		if v_1.Op != OpLOONG64MOVHstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4181  			break
  4182  		}
  4183  		x := v_1.Args[1]
  4184  		if ptr != v_1.Args[0] {
  4185  			break
  4186  		}
  4187  		v.reset(OpLOONG64MOVHreg)
  4188  		v.AddArg(x)
  4189  		return true
  4190  	}
  4191  	// match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4192  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4193  	// result: (MOVHload [off1+int32(off2)] {sym} ptr mem)
  4194  	for {
  4195  		off1 := auxIntToInt32(v.AuxInt)
  4196  		sym := auxToSym(v.Aux)
  4197  		if v_0.Op != OpLOONG64ADDVconst {
  4198  			break
  4199  		}
  4200  		off2 := auxIntToInt64(v_0.AuxInt)
  4201  		ptr := v_0.Args[0]
  4202  		mem := v_1
  4203  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4204  			break
  4205  		}
  4206  		v.reset(OpLOONG64MOVHload)
  4207  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4208  		v.Aux = symToAux(sym)
  4209  		v.AddArg2(ptr, mem)
  4210  		return true
  4211  	}
  4212  	// match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4213  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4214  	// result: (MOVHload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4215  	for {
  4216  		off1 := auxIntToInt32(v.AuxInt)
  4217  		sym1 := auxToSym(v.Aux)
  4218  		if v_0.Op != OpLOONG64MOVVaddr {
  4219  			break
  4220  		}
  4221  		off2 := auxIntToInt32(v_0.AuxInt)
  4222  		sym2 := auxToSym(v_0.Aux)
  4223  		ptr := v_0.Args[0]
  4224  		mem := v_1
  4225  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4226  			break
  4227  		}
  4228  		v.reset(OpLOONG64MOVHload)
  4229  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4230  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4231  		v.AddArg2(ptr, mem)
  4232  		return true
  4233  	}
  4234  	// match: (MOVHload [off] {sym} (ADDV ptr idx) mem)
  4235  	// cond: off == 0 && sym == nil
  4236  	// result: (MOVHloadidx ptr idx mem)
  4237  	for {
  4238  		off := auxIntToInt32(v.AuxInt)
  4239  		sym := auxToSym(v.Aux)
  4240  		if v_0.Op != OpLOONG64ADDV {
  4241  			break
  4242  		}
  4243  		idx := v_0.Args[1]
  4244  		ptr := v_0.Args[0]
  4245  		mem := v_1
  4246  		if !(off == 0 && sym == nil) {
  4247  			break
  4248  		}
  4249  		v.reset(OpLOONG64MOVHloadidx)
  4250  		v.AddArg3(ptr, idx, mem)
  4251  		return true
  4252  	}
  4253  	// match: (MOVHload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  4254  	// cond: off == 0 && sym == nil
  4255  	// result: (MOVHloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  4256  	for {
  4257  		off := auxIntToInt32(v.AuxInt)
  4258  		sym := auxToSym(v.Aux)
  4259  		if v_0.Op != OpLOONG64ADDshiftLLV {
  4260  			break
  4261  		}
  4262  		shift := auxIntToInt64(v_0.AuxInt)
  4263  		idx := v_0.Args[1]
  4264  		ptr := v_0.Args[0]
  4265  		mem := v_1
  4266  		if !(off == 0 && sym == nil) {
  4267  			break
  4268  		}
  4269  		v.reset(OpLOONG64MOVHloadidx)
  4270  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  4271  		v0.AuxInt = int64ToAuxInt(shift)
  4272  		v0.AddArg(idx)
  4273  		v.AddArg3(ptr, v0, mem)
  4274  		return true
  4275  	}
  4276  	// match: (MOVHload [off] {sym} (SB) _)
  4277  	// cond: symIsRO(sym)
  4278  	// result: (MOVVconst [int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
  4279  	for {
  4280  		off := auxIntToInt32(v.AuxInt)
  4281  		sym := auxToSym(v.Aux)
  4282  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  4283  			break
  4284  		}
  4285  		v.reset(OpLOONG64MOVVconst)
  4286  		v.AuxInt = int64ToAuxInt(int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))))
  4287  		return true
  4288  	}
  4289  	return false
  4290  }
  4291  func rewriteValueLOONG64_OpLOONG64MOVHloadidx(v *Value) bool {
  4292  	v_2 := v.Args[2]
  4293  	v_1 := v.Args[1]
  4294  	v_0 := v.Args[0]
  4295  	// match: (MOVHloadidx ptr (MOVVconst [c]) mem)
  4296  	// cond: is32Bit(c)
  4297  	// result: (MOVHload [int32(c)] ptr mem)
  4298  	for {
  4299  		ptr := v_0
  4300  		if v_1.Op != OpLOONG64MOVVconst {
  4301  			break
  4302  		}
  4303  		c := auxIntToInt64(v_1.AuxInt)
  4304  		mem := v_2
  4305  		if !(is32Bit(c)) {
  4306  			break
  4307  		}
  4308  		v.reset(OpLOONG64MOVHload)
  4309  		v.AuxInt = int32ToAuxInt(int32(c))
  4310  		v.AddArg2(ptr, mem)
  4311  		return true
  4312  	}
  4313  	// match: (MOVHloadidx (MOVVconst [c]) ptr mem)
  4314  	// cond: is32Bit(c)
  4315  	// result: (MOVHload [int32(c)] ptr mem)
  4316  	for {
  4317  		if v_0.Op != OpLOONG64MOVVconst {
  4318  			break
  4319  		}
  4320  		c := auxIntToInt64(v_0.AuxInt)
  4321  		ptr := v_1
  4322  		mem := v_2
  4323  		if !(is32Bit(c)) {
  4324  			break
  4325  		}
  4326  		v.reset(OpLOONG64MOVHload)
  4327  		v.AuxInt = int32ToAuxInt(int32(c))
  4328  		v.AddArg2(ptr, mem)
  4329  		return true
  4330  	}
  4331  	return false
  4332  }
  4333  func rewriteValueLOONG64_OpLOONG64MOVHreg(v *Value) bool {
  4334  	v_0 := v.Args[0]
  4335  	// match: (MOVHreg x:(MOVBload _ _))
  4336  	// result: (MOVVreg x)
  4337  	for {
  4338  		x := v_0
  4339  		if x.Op != OpLOONG64MOVBload {
  4340  			break
  4341  		}
  4342  		v.reset(OpLOONG64MOVVreg)
  4343  		v.AddArg(x)
  4344  		return true
  4345  	}
  4346  	// match: (MOVHreg x:(MOVBUload _ _))
  4347  	// result: (MOVVreg x)
  4348  	for {
  4349  		x := v_0
  4350  		if x.Op != OpLOONG64MOVBUload {
  4351  			break
  4352  		}
  4353  		v.reset(OpLOONG64MOVVreg)
  4354  		v.AddArg(x)
  4355  		return true
  4356  	}
  4357  	// match: (MOVHreg x:(MOVHload _ _))
  4358  	// result: (MOVVreg x)
  4359  	for {
  4360  		x := v_0
  4361  		if x.Op != OpLOONG64MOVHload {
  4362  			break
  4363  		}
  4364  		v.reset(OpLOONG64MOVVreg)
  4365  		v.AddArg(x)
  4366  		return true
  4367  	}
  4368  	// match: (MOVHreg x:(MOVBloadidx _ _ _))
  4369  	// result: (MOVVreg x)
  4370  	for {
  4371  		x := v_0
  4372  		if x.Op != OpLOONG64MOVBloadidx {
  4373  			break
  4374  		}
  4375  		v.reset(OpLOONG64MOVVreg)
  4376  		v.AddArg(x)
  4377  		return true
  4378  	}
  4379  	// match: (MOVHreg x:(MOVBUloadidx _ _ _))
  4380  	// result: (MOVVreg x)
  4381  	for {
  4382  		x := v_0
  4383  		if x.Op != OpLOONG64MOVBUloadidx {
  4384  			break
  4385  		}
  4386  		v.reset(OpLOONG64MOVVreg)
  4387  		v.AddArg(x)
  4388  		return true
  4389  	}
  4390  	// match: (MOVHreg x:(MOVHloadidx _ _ _))
  4391  	// result: (MOVVreg x)
  4392  	for {
  4393  		x := v_0
  4394  		if x.Op != OpLOONG64MOVHloadidx {
  4395  			break
  4396  		}
  4397  		v.reset(OpLOONG64MOVVreg)
  4398  		v.AddArg(x)
  4399  		return true
  4400  	}
  4401  	// match: (MOVHreg x:(MOVBreg _))
  4402  	// result: (MOVVreg x)
  4403  	for {
  4404  		x := v_0
  4405  		if x.Op != OpLOONG64MOVBreg {
  4406  			break
  4407  		}
  4408  		v.reset(OpLOONG64MOVVreg)
  4409  		v.AddArg(x)
  4410  		return true
  4411  	}
  4412  	// match: (MOVHreg x:(MOVBUreg _))
  4413  	// result: (MOVVreg x)
  4414  	for {
  4415  		x := v_0
  4416  		if x.Op != OpLOONG64MOVBUreg {
  4417  			break
  4418  		}
  4419  		v.reset(OpLOONG64MOVVreg)
  4420  		v.AddArg(x)
  4421  		return true
  4422  	}
  4423  	// match: (MOVHreg x:(MOVHreg _))
  4424  	// result: (MOVVreg x)
  4425  	for {
  4426  		x := v_0
  4427  		if x.Op != OpLOONG64MOVHreg {
  4428  			break
  4429  		}
  4430  		v.reset(OpLOONG64MOVVreg)
  4431  		v.AddArg(x)
  4432  		return true
  4433  	}
  4434  	// match: (MOVHreg (MOVVconst [c]))
  4435  	// result: (MOVVconst [int64(int16(c))])
  4436  	for {
  4437  		if v_0.Op != OpLOONG64MOVVconst {
  4438  			break
  4439  		}
  4440  		c := auxIntToInt64(v_0.AuxInt)
  4441  		v.reset(OpLOONG64MOVVconst)
  4442  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  4443  		return true
  4444  	}
  4445  	// match: (MOVHreg x:(ANDconst [c] y))
  4446  	// cond: c >= 0 && int64(int16(c)) == c
  4447  	// result: x
  4448  	for {
  4449  		x := v_0
  4450  		if x.Op != OpLOONG64ANDconst {
  4451  			break
  4452  		}
  4453  		c := auxIntToInt64(x.AuxInt)
  4454  		if !(c >= 0 && int64(int16(c)) == c) {
  4455  			break
  4456  		}
  4457  		v.copyOf(x)
  4458  		return true
  4459  	}
  4460  	return false
  4461  }
  4462  func rewriteValueLOONG64_OpLOONG64MOVHstore(v *Value) bool {
  4463  	v_2 := v.Args[2]
  4464  	v_1 := v.Args[1]
  4465  	v_0 := v.Args[0]
  4466  	b := v.Block
  4467  	config := b.Func.Config
  4468  	typ := &b.Func.Config.Types
  4469  	// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4470  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4471  	// result: (MOVHstore [off1+int32(off2)] {sym} ptr val mem)
  4472  	for {
  4473  		off1 := auxIntToInt32(v.AuxInt)
  4474  		sym := auxToSym(v.Aux)
  4475  		if v_0.Op != OpLOONG64ADDVconst {
  4476  			break
  4477  		}
  4478  		off2 := auxIntToInt64(v_0.AuxInt)
  4479  		ptr := v_0.Args[0]
  4480  		val := v_1
  4481  		mem := v_2
  4482  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4483  			break
  4484  		}
  4485  		v.reset(OpLOONG64MOVHstore)
  4486  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4487  		v.Aux = symToAux(sym)
  4488  		v.AddArg3(ptr, val, mem)
  4489  		return true
  4490  	}
  4491  	// match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4492  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4493  	// result: (MOVHstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  4494  	for {
  4495  		off1 := auxIntToInt32(v.AuxInt)
  4496  		sym1 := auxToSym(v.Aux)
  4497  		if v_0.Op != OpLOONG64MOVVaddr {
  4498  			break
  4499  		}
  4500  		off2 := auxIntToInt32(v_0.AuxInt)
  4501  		sym2 := auxToSym(v_0.Aux)
  4502  		ptr := v_0.Args[0]
  4503  		val := v_1
  4504  		mem := v_2
  4505  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4506  			break
  4507  		}
  4508  		v.reset(OpLOONG64MOVHstore)
  4509  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4510  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4511  		v.AddArg3(ptr, val, mem)
  4512  		return true
  4513  	}
  4514  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4515  	// result: (MOVHstore [off] {sym} ptr x mem)
  4516  	for {
  4517  		off := auxIntToInt32(v.AuxInt)
  4518  		sym := auxToSym(v.Aux)
  4519  		ptr := v_0
  4520  		if v_1.Op != OpLOONG64MOVHreg {
  4521  			break
  4522  		}
  4523  		x := v_1.Args[0]
  4524  		mem := v_2
  4525  		v.reset(OpLOONG64MOVHstore)
  4526  		v.AuxInt = int32ToAuxInt(off)
  4527  		v.Aux = symToAux(sym)
  4528  		v.AddArg3(ptr, x, mem)
  4529  		return true
  4530  	}
  4531  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4532  	// result: (MOVHstore [off] {sym} ptr x mem)
  4533  	for {
  4534  		off := auxIntToInt32(v.AuxInt)
  4535  		sym := auxToSym(v.Aux)
  4536  		ptr := v_0
  4537  		if v_1.Op != OpLOONG64MOVHUreg {
  4538  			break
  4539  		}
  4540  		x := v_1.Args[0]
  4541  		mem := v_2
  4542  		v.reset(OpLOONG64MOVHstore)
  4543  		v.AuxInt = int32ToAuxInt(off)
  4544  		v.Aux = symToAux(sym)
  4545  		v.AddArg3(ptr, x, mem)
  4546  		return true
  4547  	}
  4548  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4549  	// result: (MOVHstore [off] {sym} ptr x mem)
  4550  	for {
  4551  		off := auxIntToInt32(v.AuxInt)
  4552  		sym := auxToSym(v.Aux)
  4553  		ptr := v_0
  4554  		if v_1.Op != OpLOONG64MOVWreg {
  4555  			break
  4556  		}
  4557  		x := v_1.Args[0]
  4558  		mem := v_2
  4559  		v.reset(OpLOONG64MOVHstore)
  4560  		v.AuxInt = int32ToAuxInt(off)
  4561  		v.Aux = symToAux(sym)
  4562  		v.AddArg3(ptr, x, mem)
  4563  		return true
  4564  	}
  4565  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  4566  	// result: (MOVHstore [off] {sym} ptr x mem)
  4567  	for {
  4568  		off := auxIntToInt32(v.AuxInt)
  4569  		sym := auxToSym(v.Aux)
  4570  		ptr := v_0
  4571  		if v_1.Op != OpLOONG64MOVWUreg {
  4572  			break
  4573  		}
  4574  		x := v_1.Args[0]
  4575  		mem := v_2
  4576  		v.reset(OpLOONG64MOVHstore)
  4577  		v.AuxInt = int32ToAuxInt(off)
  4578  		v.Aux = symToAux(sym)
  4579  		v.AddArg3(ptr, x, mem)
  4580  		return true
  4581  	}
  4582  	// match: (MOVHstore [off] {sym} (ADDV ptr idx) val mem)
  4583  	// cond: off == 0 && sym == nil
  4584  	// result: (MOVHstoreidx ptr idx val mem)
  4585  	for {
  4586  		off := auxIntToInt32(v.AuxInt)
  4587  		sym := auxToSym(v.Aux)
  4588  		if v_0.Op != OpLOONG64ADDV {
  4589  			break
  4590  		}
  4591  		idx := v_0.Args[1]
  4592  		ptr := v_0.Args[0]
  4593  		val := v_1
  4594  		mem := v_2
  4595  		if !(off == 0 && sym == nil) {
  4596  			break
  4597  		}
  4598  		v.reset(OpLOONG64MOVHstoreidx)
  4599  		v.AddArg4(ptr, idx, val, mem)
  4600  		return true
  4601  	}
  4602  	// match: (MOVHstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
  4603  	// cond: off == 0 && sym == nil
  4604  	// result: (MOVHstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
  4605  	for {
  4606  		off := auxIntToInt32(v.AuxInt)
  4607  		sym := auxToSym(v.Aux)
  4608  		if v_0.Op != OpLOONG64ADDshiftLLV {
  4609  			break
  4610  		}
  4611  		shift := auxIntToInt64(v_0.AuxInt)
  4612  		idx := v_0.Args[1]
  4613  		ptr := v_0.Args[0]
  4614  		val := v_1
  4615  		mem := v_2
  4616  		if !(off == 0 && sym == nil) {
  4617  			break
  4618  		}
  4619  		v.reset(OpLOONG64MOVHstoreidx)
  4620  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  4621  		v0.AuxInt = int64ToAuxInt(shift)
  4622  		v0.AddArg(idx)
  4623  		v.AddArg4(ptr, v0, val, mem)
  4624  		return true
  4625  	}
  4626  	return false
  4627  }
  4628  func rewriteValueLOONG64_OpLOONG64MOVHstoreidx(v *Value) bool {
  4629  	v_3 := v.Args[3]
  4630  	v_2 := v.Args[2]
  4631  	v_1 := v.Args[1]
  4632  	v_0 := v.Args[0]
  4633  	// match: (MOVHstoreidx ptr (MOVVconst [c]) val mem)
  4634  	// cond: is32Bit(c)
  4635  	// result: (MOVHstore [int32(c)] ptr val mem)
  4636  	for {
  4637  		ptr := v_0
  4638  		if v_1.Op != OpLOONG64MOVVconst {
  4639  			break
  4640  		}
  4641  		c := auxIntToInt64(v_1.AuxInt)
  4642  		val := v_2
  4643  		mem := v_3
  4644  		if !(is32Bit(c)) {
  4645  			break
  4646  		}
  4647  		v.reset(OpLOONG64MOVHstore)
  4648  		v.AuxInt = int32ToAuxInt(int32(c))
  4649  		v.AddArg3(ptr, val, mem)
  4650  		return true
  4651  	}
  4652  	// match: (MOVHstoreidx (MOVVconst [c]) idx val mem)
  4653  	// cond: is32Bit(c)
  4654  	// result: (MOVHstore [int32(c)] idx val mem)
  4655  	for {
  4656  		if v_0.Op != OpLOONG64MOVVconst {
  4657  			break
  4658  		}
  4659  		c := auxIntToInt64(v_0.AuxInt)
  4660  		idx := v_1
  4661  		val := v_2
  4662  		mem := v_3
  4663  		if !(is32Bit(c)) {
  4664  			break
  4665  		}
  4666  		v.reset(OpLOONG64MOVHstore)
  4667  		v.AuxInt = int32ToAuxInt(int32(c))
  4668  		v.AddArg3(idx, val, mem)
  4669  		return true
  4670  	}
  4671  	return false
  4672  }
  4673  func rewriteValueLOONG64_OpLOONG64MOVVload(v *Value) bool {
  4674  	v_1 := v.Args[1]
  4675  	v_0 := v.Args[0]
  4676  	b := v.Block
  4677  	config := b.Func.Config
  4678  	typ := &b.Func.Config.Types
  4679  	// match: (MOVVload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _))
  4680  	// result: (MOVVfpgp val)
  4681  	for {
  4682  		off := auxIntToInt32(v.AuxInt)
  4683  		sym := auxToSym(v.Aux)
  4684  		ptr := v_0
  4685  		if v_1.Op != OpLOONG64MOVDstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4686  			break
  4687  		}
  4688  		val := v_1.Args[1]
  4689  		if ptr != v_1.Args[0] {
  4690  			break
  4691  		}
  4692  		v.reset(OpLOONG64MOVVfpgp)
  4693  		v.AddArg(val)
  4694  		return true
  4695  	}
  4696  	// match: (MOVVload [off] {sym} ptr (MOVVstore [off] {sym} ptr x _))
  4697  	// result: (MOVVreg x)
  4698  	for {
  4699  		off := auxIntToInt32(v.AuxInt)
  4700  		sym := auxToSym(v.Aux)
  4701  		ptr := v_0
  4702  		if v_1.Op != OpLOONG64MOVVstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  4703  			break
  4704  		}
  4705  		x := v_1.Args[1]
  4706  		if ptr != v_1.Args[0] {
  4707  			break
  4708  		}
  4709  		v.reset(OpLOONG64MOVVreg)
  4710  		v.AddArg(x)
  4711  		return true
  4712  	}
  4713  	// match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4714  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4715  	// result: (MOVVload [off1+int32(off2)] {sym} ptr mem)
  4716  	for {
  4717  		off1 := auxIntToInt32(v.AuxInt)
  4718  		sym := auxToSym(v.Aux)
  4719  		if v_0.Op != OpLOONG64ADDVconst {
  4720  			break
  4721  		}
  4722  		off2 := auxIntToInt64(v_0.AuxInt)
  4723  		ptr := v_0.Args[0]
  4724  		mem := v_1
  4725  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4726  			break
  4727  		}
  4728  		v.reset(OpLOONG64MOVVload)
  4729  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4730  		v.Aux = symToAux(sym)
  4731  		v.AddArg2(ptr, mem)
  4732  		return true
  4733  	}
  4734  	// match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4735  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4736  	// result: (MOVVload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  4737  	for {
  4738  		off1 := auxIntToInt32(v.AuxInt)
  4739  		sym1 := auxToSym(v.Aux)
  4740  		if v_0.Op != OpLOONG64MOVVaddr {
  4741  			break
  4742  		}
  4743  		off2 := auxIntToInt32(v_0.AuxInt)
  4744  		sym2 := auxToSym(v_0.Aux)
  4745  		ptr := v_0.Args[0]
  4746  		mem := v_1
  4747  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4748  			break
  4749  		}
  4750  		v.reset(OpLOONG64MOVVload)
  4751  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4752  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4753  		v.AddArg2(ptr, mem)
  4754  		return true
  4755  	}
  4756  	// match: (MOVVload [off] {sym} (ADDV ptr idx) mem)
  4757  	// cond: off == 0 && sym == nil
  4758  	// result: (MOVVloadidx ptr idx mem)
  4759  	for {
  4760  		off := auxIntToInt32(v.AuxInt)
  4761  		sym := auxToSym(v.Aux)
  4762  		if v_0.Op != OpLOONG64ADDV {
  4763  			break
  4764  		}
  4765  		idx := v_0.Args[1]
  4766  		ptr := v_0.Args[0]
  4767  		mem := v_1
  4768  		if !(off == 0 && sym == nil) {
  4769  			break
  4770  		}
  4771  		v.reset(OpLOONG64MOVVloadidx)
  4772  		v.AddArg3(ptr, idx, mem)
  4773  		return true
  4774  	}
  4775  	// match: (MOVVload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  4776  	// cond: off == 0 && sym == nil
  4777  	// result: (MOVVloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  4778  	for {
  4779  		off := auxIntToInt32(v.AuxInt)
  4780  		sym := auxToSym(v.Aux)
  4781  		if v_0.Op != OpLOONG64ADDshiftLLV {
  4782  			break
  4783  		}
  4784  		shift := auxIntToInt64(v_0.AuxInt)
  4785  		idx := v_0.Args[1]
  4786  		ptr := v_0.Args[0]
  4787  		mem := v_1
  4788  		if !(off == 0 && sym == nil) {
  4789  			break
  4790  		}
  4791  		v.reset(OpLOONG64MOVVloadidx)
  4792  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  4793  		v0.AuxInt = int64ToAuxInt(shift)
  4794  		v0.AddArg(idx)
  4795  		v.AddArg3(ptr, v0, mem)
  4796  		return true
  4797  	}
  4798  	// match: (MOVVload [off] {sym} (SB) _)
  4799  	// cond: symIsRO(sym)
  4800  	// result: (MOVVconst [int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  4801  	for {
  4802  		off := auxIntToInt32(v.AuxInt)
  4803  		sym := auxToSym(v.Aux)
  4804  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  4805  			break
  4806  		}
  4807  		v.reset(OpLOONG64MOVVconst)
  4808  		v.AuxInt = int64ToAuxInt(int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  4809  		return true
  4810  	}
  4811  	return false
  4812  }
  4813  func rewriteValueLOONG64_OpLOONG64MOVVloadidx(v *Value) bool {
  4814  	v_2 := v.Args[2]
  4815  	v_1 := v.Args[1]
  4816  	v_0 := v.Args[0]
  4817  	// match: (MOVVloadidx ptr (MOVVconst [c]) mem)
  4818  	// cond: is32Bit(c)
  4819  	// result: (MOVVload [int32(c)] ptr mem)
  4820  	for {
  4821  		ptr := v_0
  4822  		if v_1.Op != OpLOONG64MOVVconst {
  4823  			break
  4824  		}
  4825  		c := auxIntToInt64(v_1.AuxInt)
  4826  		mem := v_2
  4827  		if !(is32Bit(c)) {
  4828  			break
  4829  		}
  4830  		v.reset(OpLOONG64MOVVload)
  4831  		v.AuxInt = int32ToAuxInt(int32(c))
  4832  		v.AddArg2(ptr, mem)
  4833  		return true
  4834  	}
  4835  	// match: (MOVVloadidx (MOVVconst [c]) ptr mem)
  4836  	// cond: is32Bit(c)
  4837  	// result: (MOVVload [int32(c)] ptr mem)
  4838  	for {
  4839  		if v_0.Op != OpLOONG64MOVVconst {
  4840  			break
  4841  		}
  4842  		c := auxIntToInt64(v_0.AuxInt)
  4843  		ptr := v_1
  4844  		mem := v_2
  4845  		if !(is32Bit(c)) {
  4846  			break
  4847  		}
  4848  		v.reset(OpLOONG64MOVVload)
  4849  		v.AuxInt = int32ToAuxInt(int32(c))
  4850  		v.AddArg2(ptr, mem)
  4851  		return true
  4852  	}
  4853  	return false
  4854  }
  4855  func rewriteValueLOONG64_OpLOONG64MOVVnop(v *Value) bool {
  4856  	v_0 := v.Args[0]
  4857  	// match: (MOVVnop (MOVVconst [c]))
  4858  	// result: (MOVVconst [c])
  4859  	for {
  4860  		if v_0.Op != OpLOONG64MOVVconst {
  4861  			break
  4862  		}
  4863  		c := auxIntToInt64(v_0.AuxInt)
  4864  		v.reset(OpLOONG64MOVVconst)
  4865  		v.AuxInt = int64ToAuxInt(c)
  4866  		return true
  4867  	}
  4868  	return false
  4869  }
  4870  func rewriteValueLOONG64_OpLOONG64MOVVreg(v *Value) bool {
  4871  	v_0 := v.Args[0]
  4872  	// match: (MOVVreg x)
  4873  	// cond: x.Uses == 1
  4874  	// result: (MOVVnop x)
  4875  	for {
  4876  		x := v_0
  4877  		if !(x.Uses == 1) {
  4878  			break
  4879  		}
  4880  		v.reset(OpLOONG64MOVVnop)
  4881  		v.AddArg(x)
  4882  		return true
  4883  	}
  4884  	// match: (MOVVreg (MOVVconst [c]))
  4885  	// result: (MOVVconst [c])
  4886  	for {
  4887  		if v_0.Op != OpLOONG64MOVVconst {
  4888  			break
  4889  		}
  4890  		c := auxIntToInt64(v_0.AuxInt)
  4891  		v.reset(OpLOONG64MOVVconst)
  4892  		v.AuxInt = int64ToAuxInt(c)
  4893  		return true
  4894  	}
  4895  	return false
  4896  }
  4897  func rewriteValueLOONG64_OpLOONG64MOVVstore(v *Value) bool {
  4898  	v_2 := v.Args[2]
  4899  	v_1 := v.Args[1]
  4900  	v_0 := v.Args[0]
  4901  	b := v.Block
  4902  	config := b.Func.Config
  4903  	typ := &b.Func.Config.Types
  4904  	// match: (MOVVstore [off] {sym} ptr (MOVVfpgp val) mem)
  4905  	// result: (MOVDstore [off] {sym} ptr val mem)
  4906  	for {
  4907  		off := auxIntToInt32(v.AuxInt)
  4908  		sym := auxToSym(v.Aux)
  4909  		ptr := v_0
  4910  		if v_1.Op != OpLOONG64MOVVfpgp {
  4911  			break
  4912  		}
  4913  		val := v_1.Args[0]
  4914  		mem := v_2
  4915  		v.reset(OpLOONG64MOVDstore)
  4916  		v.AuxInt = int32ToAuxInt(off)
  4917  		v.Aux = symToAux(sym)
  4918  		v.AddArg3(ptr, val, mem)
  4919  		return true
  4920  	}
  4921  	// match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4922  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4923  	// result: (MOVVstore [off1+int32(off2)] {sym} ptr val mem)
  4924  	for {
  4925  		off1 := auxIntToInt32(v.AuxInt)
  4926  		sym := auxToSym(v.Aux)
  4927  		if v_0.Op != OpLOONG64ADDVconst {
  4928  			break
  4929  		}
  4930  		off2 := auxIntToInt64(v_0.AuxInt)
  4931  		ptr := v_0.Args[0]
  4932  		val := v_1
  4933  		mem := v_2
  4934  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4935  			break
  4936  		}
  4937  		v.reset(OpLOONG64MOVVstore)
  4938  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4939  		v.Aux = symToAux(sym)
  4940  		v.AddArg3(ptr, val, mem)
  4941  		return true
  4942  	}
  4943  	// match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4944  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  4945  	// result: (MOVVstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  4946  	for {
  4947  		off1 := auxIntToInt32(v.AuxInt)
  4948  		sym1 := auxToSym(v.Aux)
  4949  		if v_0.Op != OpLOONG64MOVVaddr {
  4950  			break
  4951  		}
  4952  		off2 := auxIntToInt32(v_0.AuxInt)
  4953  		sym2 := auxToSym(v_0.Aux)
  4954  		ptr := v_0.Args[0]
  4955  		val := v_1
  4956  		mem := v_2
  4957  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  4958  			break
  4959  		}
  4960  		v.reset(OpLOONG64MOVVstore)
  4961  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4962  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4963  		v.AddArg3(ptr, val, mem)
  4964  		return true
  4965  	}
  4966  	// match: (MOVVstore [off] {sym} (ADDV ptr idx) val mem)
  4967  	// cond: off == 0 && sym == nil
  4968  	// result: (MOVVstoreidx ptr idx val mem)
  4969  	for {
  4970  		off := auxIntToInt32(v.AuxInt)
  4971  		sym := auxToSym(v.Aux)
  4972  		if v_0.Op != OpLOONG64ADDV {
  4973  			break
  4974  		}
  4975  		idx := v_0.Args[1]
  4976  		ptr := v_0.Args[0]
  4977  		val := v_1
  4978  		mem := v_2
  4979  		if !(off == 0 && sym == nil) {
  4980  			break
  4981  		}
  4982  		v.reset(OpLOONG64MOVVstoreidx)
  4983  		v.AddArg4(ptr, idx, val, mem)
  4984  		return true
  4985  	}
  4986  	// match: (MOVVstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
  4987  	// cond: off == 0 && sym == nil
  4988  	// result: (MOVVstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
  4989  	for {
  4990  		off := auxIntToInt32(v.AuxInt)
  4991  		sym := auxToSym(v.Aux)
  4992  		if v_0.Op != OpLOONG64ADDshiftLLV {
  4993  			break
  4994  		}
  4995  		shift := auxIntToInt64(v_0.AuxInt)
  4996  		idx := v_0.Args[1]
  4997  		ptr := v_0.Args[0]
  4998  		val := v_1
  4999  		mem := v_2
  5000  		if !(off == 0 && sym == nil) {
  5001  			break
  5002  		}
  5003  		v.reset(OpLOONG64MOVVstoreidx)
  5004  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  5005  		v0.AuxInt = int64ToAuxInt(shift)
  5006  		v0.AddArg(idx)
  5007  		v.AddArg4(ptr, v0, val, mem)
  5008  		return true
  5009  	}
  5010  	return false
  5011  }
  5012  func rewriteValueLOONG64_OpLOONG64MOVVstoreidx(v *Value) bool {
  5013  	v_3 := v.Args[3]
  5014  	v_2 := v.Args[2]
  5015  	v_1 := v.Args[1]
  5016  	v_0 := v.Args[0]
  5017  	// match: (MOVVstoreidx ptr (MOVVconst [c]) val mem)
  5018  	// cond: is32Bit(c)
  5019  	// result: (MOVVstore [int32(c)] ptr val mem)
  5020  	for {
  5021  		ptr := v_0
  5022  		if v_1.Op != OpLOONG64MOVVconst {
  5023  			break
  5024  		}
  5025  		c := auxIntToInt64(v_1.AuxInt)
  5026  		val := v_2
  5027  		mem := v_3
  5028  		if !(is32Bit(c)) {
  5029  			break
  5030  		}
  5031  		v.reset(OpLOONG64MOVVstore)
  5032  		v.AuxInt = int32ToAuxInt(int32(c))
  5033  		v.AddArg3(ptr, val, mem)
  5034  		return true
  5035  	}
  5036  	// match: (MOVVstoreidx (MOVVconst [c]) idx val mem)
  5037  	// cond: is32Bit(c)
  5038  	// result: (MOVVstore [int32(c)] idx val mem)
  5039  	for {
  5040  		if v_0.Op != OpLOONG64MOVVconst {
  5041  			break
  5042  		}
  5043  		c := auxIntToInt64(v_0.AuxInt)
  5044  		idx := v_1
  5045  		val := v_2
  5046  		mem := v_3
  5047  		if !(is32Bit(c)) {
  5048  			break
  5049  		}
  5050  		v.reset(OpLOONG64MOVVstore)
  5051  		v.AuxInt = int32ToAuxInt(int32(c))
  5052  		v.AddArg3(idx, val, mem)
  5053  		return true
  5054  	}
  5055  	return false
  5056  }
  5057  func rewriteValueLOONG64_OpLOONG64MOVWUload(v *Value) bool {
  5058  	v_1 := v.Args[1]
  5059  	v_0 := v.Args[0]
  5060  	b := v.Block
  5061  	config := b.Func.Config
  5062  	typ := &b.Func.Config.Types
  5063  	// match: (MOVWUload [off] {sym} ptr (MOVFstore [off] {sym} ptr val _))
  5064  	// result: (ZeroExt32to64 (MOVWfpgp <typ.Float32> val))
  5065  	for {
  5066  		off := auxIntToInt32(v.AuxInt)
  5067  		sym := auxToSym(v.Aux)
  5068  		ptr := v_0
  5069  		if v_1.Op != OpLOONG64MOVFstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5070  			break
  5071  		}
  5072  		val := v_1.Args[1]
  5073  		if ptr != v_1.Args[0] {
  5074  			break
  5075  		}
  5076  		v.reset(OpZeroExt32to64)
  5077  		v0 := b.NewValue0(v_1.Pos, OpLOONG64MOVWfpgp, typ.Float32)
  5078  		v0.AddArg(val)
  5079  		v.AddArg(v0)
  5080  		return true
  5081  	}
  5082  	// match: (MOVWUload [off] {sym} ptr (MOVWstore [off] {sym} ptr x _))
  5083  	// result: (MOVWUreg x)
  5084  	for {
  5085  		off := auxIntToInt32(v.AuxInt)
  5086  		sym := auxToSym(v.Aux)
  5087  		ptr := v_0
  5088  		if v_1.Op != OpLOONG64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5089  			break
  5090  		}
  5091  		x := v_1.Args[1]
  5092  		if ptr != v_1.Args[0] {
  5093  			break
  5094  		}
  5095  		v.reset(OpLOONG64MOVWUreg)
  5096  		v.AddArg(x)
  5097  		return true
  5098  	}
  5099  	// match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  5100  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5101  	// result: (MOVWUload [off1+int32(off2)] {sym} ptr mem)
  5102  	for {
  5103  		off1 := auxIntToInt32(v.AuxInt)
  5104  		sym := auxToSym(v.Aux)
  5105  		if v_0.Op != OpLOONG64ADDVconst {
  5106  			break
  5107  		}
  5108  		off2 := auxIntToInt64(v_0.AuxInt)
  5109  		ptr := v_0.Args[0]
  5110  		mem := v_1
  5111  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5112  			break
  5113  		}
  5114  		v.reset(OpLOONG64MOVWUload)
  5115  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5116  		v.Aux = symToAux(sym)
  5117  		v.AddArg2(ptr, mem)
  5118  		return true
  5119  	}
  5120  	// match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5121  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5122  	// result: (MOVWUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  5123  	for {
  5124  		off1 := auxIntToInt32(v.AuxInt)
  5125  		sym1 := auxToSym(v.Aux)
  5126  		if v_0.Op != OpLOONG64MOVVaddr {
  5127  			break
  5128  		}
  5129  		off2 := auxIntToInt32(v_0.AuxInt)
  5130  		sym2 := auxToSym(v_0.Aux)
  5131  		ptr := v_0.Args[0]
  5132  		mem := v_1
  5133  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5134  			break
  5135  		}
  5136  		v.reset(OpLOONG64MOVWUload)
  5137  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5138  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5139  		v.AddArg2(ptr, mem)
  5140  		return true
  5141  	}
  5142  	// match: (MOVWUload [off] {sym} (ADDV ptr idx) mem)
  5143  	// cond: off == 0 && sym == nil
  5144  	// result: (MOVWUloadidx ptr idx mem)
  5145  	for {
  5146  		off := auxIntToInt32(v.AuxInt)
  5147  		sym := auxToSym(v.Aux)
  5148  		if v_0.Op != OpLOONG64ADDV {
  5149  			break
  5150  		}
  5151  		idx := v_0.Args[1]
  5152  		ptr := v_0.Args[0]
  5153  		mem := v_1
  5154  		if !(off == 0 && sym == nil) {
  5155  			break
  5156  		}
  5157  		v.reset(OpLOONG64MOVWUloadidx)
  5158  		v.AddArg3(ptr, idx, mem)
  5159  		return true
  5160  	}
  5161  	// match: (MOVWUload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  5162  	// cond: off == 0 && sym == nil
  5163  	// result: (MOVWUloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  5164  	for {
  5165  		off := auxIntToInt32(v.AuxInt)
  5166  		sym := auxToSym(v.Aux)
  5167  		if v_0.Op != OpLOONG64ADDshiftLLV {
  5168  			break
  5169  		}
  5170  		shift := auxIntToInt64(v_0.AuxInt)
  5171  		idx := v_0.Args[1]
  5172  		ptr := v_0.Args[0]
  5173  		mem := v_1
  5174  		if !(off == 0 && sym == nil) {
  5175  			break
  5176  		}
  5177  		v.reset(OpLOONG64MOVWUloadidx)
  5178  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  5179  		v0.AuxInt = int64ToAuxInt(shift)
  5180  		v0.AddArg(idx)
  5181  		v.AddArg3(ptr, v0, mem)
  5182  		return true
  5183  	}
  5184  	// match: (MOVWUload [off] {sym} (SB) _)
  5185  	// cond: symIsRO(sym)
  5186  	// result: (MOVVconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
  5187  	for {
  5188  		off := auxIntToInt32(v.AuxInt)
  5189  		sym := auxToSym(v.Aux)
  5190  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  5191  			break
  5192  		}
  5193  		v.reset(OpLOONG64MOVVconst)
  5194  		v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
  5195  		return true
  5196  	}
  5197  	return false
  5198  }
  5199  func rewriteValueLOONG64_OpLOONG64MOVWUloadidx(v *Value) bool {
  5200  	v_2 := v.Args[2]
  5201  	v_1 := v.Args[1]
  5202  	v_0 := v.Args[0]
  5203  	// match: (MOVWUloadidx ptr (MOVVconst [c]) mem)
  5204  	// cond: is32Bit(c)
  5205  	// result: (MOVWUload [int32(c)] ptr mem)
  5206  	for {
  5207  		ptr := v_0
  5208  		if v_1.Op != OpLOONG64MOVVconst {
  5209  			break
  5210  		}
  5211  		c := auxIntToInt64(v_1.AuxInt)
  5212  		mem := v_2
  5213  		if !(is32Bit(c)) {
  5214  			break
  5215  		}
  5216  		v.reset(OpLOONG64MOVWUload)
  5217  		v.AuxInt = int32ToAuxInt(int32(c))
  5218  		v.AddArg2(ptr, mem)
  5219  		return true
  5220  	}
  5221  	// match: (MOVWUloadidx (MOVVconst [c]) ptr mem)
  5222  	// cond: is32Bit(c)
  5223  	// result: (MOVWUload [int32(c)] ptr mem)
  5224  	for {
  5225  		if v_0.Op != OpLOONG64MOVVconst {
  5226  			break
  5227  		}
  5228  		c := auxIntToInt64(v_0.AuxInt)
  5229  		ptr := v_1
  5230  		mem := v_2
  5231  		if !(is32Bit(c)) {
  5232  			break
  5233  		}
  5234  		v.reset(OpLOONG64MOVWUload)
  5235  		v.AuxInt = int32ToAuxInt(int32(c))
  5236  		v.AddArg2(ptr, mem)
  5237  		return true
  5238  	}
  5239  	return false
  5240  }
  5241  func rewriteValueLOONG64_OpLOONG64MOVWUreg(v *Value) bool {
  5242  	v_0 := v.Args[0]
  5243  	// match: (MOVWUreg (SRLVconst [rc] x))
  5244  	// cond: rc < 32
  5245  	// result: (BSTRPICKV [rc + (31+rc)<<6] x)
  5246  	for {
  5247  		if v_0.Op != OpLOONG64SRLVconst {
  5248  			break
  5249  		}
  5250  		rc := auxIntToInt64(v_0.AuxInt)
  5251  		x := v_0.Args[0]
  5252  		if !(rc < 32) {
  5253  			break
  5254  		}
  5255  		v.reset(OpLOONG64BSTRPICKV)
  5256  		v.AuxInt = int64ToAuxInt(rc + (31+rc)<<6)
  5257  		v.AddArg(x)
  5258  		return true
  5259  	}
  5260  	// match: (MOVWUreg x:(MOVBUload _ _))
  5261  	// result: (MOVVreg x)
  5262  	for {
  5263  		x := v_0
  5264  		if x.Op != OpLOONG64MOVBUload {
  5265  			break
  5266  		}
  5267  		v.reset(OpLOONG64MOVVreg)
  5268  		v.AddArg(x)
  5269  		return true
  5270  	}
  5271  	// match: (MOVWUreg x:(MOVHUload _ _))
  5272  	// result: (MOVVreg x)
  5273  	for {
  5274  		x := v_0
  5275  		if x.Op != OpLOONG64MOVHUload {
  5276  			break
  5277  		}
  5278  		v.reset(OpLOONG64MOVVreg)
  5279  		v.AddArg(x)
  5280  		return true
  5281  	}
  5282  	// match: (MOVWUreg x:(MOVWUload _ _))
  5283  	// result: (MOVVreg x)
  5284  	for {
  5285  		x := v_0
  5286  		if x.Op != OpLOONG64MOVWUload {
  5287  			break
  5288  		}
  5289  		v.reset(OpLOONG64MOVVreg)
  5290  		v.AddArg(x)
  5291  		return true
  5292  	}
  5293  	// match: (MOVWUreg x:(MOVBUloadidx _ _ _))
  5294  	// result: (MOVVreg x)
  5295  	for {
  5296  		x := v_0
  5297  		if x.Op != OpLOONG64MOVBUloadidx {
  5298  			break
  5299  		}
  5300  		v.reset(OpLOONG64MOVVreg)
  5301  		v.AddArg(x)
  5302  		return true
  5303  	}
  5304  	// match: (MOVWUreg x:(MOVHUloadidx _ _ _))
  5305  	// result: (MOVVreg x)
  5306  	for {
  5307  		x := v_0
  5308  		if x.Op != OpLOONG64MOVHUloadidx {
  5309  			break
  5310  		}
  5311  		v.reset(OpLOONG64MOVVreg)
  5312  		v.AddArg(x)
  5313  		return true
  5314  	}
  5315  	// match: (MOVWUreg x:(MOVWUloadidx _ _ _))
  5316  	// result: (MOVVreg x)
  5317  	for {
  5318  		x := v_0
  5319  		if x.Op != OpLOONG64MOVWUloadidx {
  5320  			break
  5321  		}
  5322  		v.reset(OpLOONG64MOVVreg)
  5323  		v.AddArg(x)
  5324  		return true
  5325  	}
  5326  	// match: (MOVWUreg x:(MOVBUreg _))
  5327  	// result: (MOVVreg x)
  5328  	for {
  5329  		x := v_0
  5330  		if x.Op != OpLOONG64MOVBUreg {
  5331  			break
  5332  		}
  5333  		v.reset(OpLOONG64MOVVreg)
  5334  		v.AddArg(x)
  5335  		return true
  5336  	}
  5337  	// match: (MOVWUreg x:(MOVHUreg _))
  5338  	// result: (MOVVreg x)
  5339  	for {
  5340  		x := v_0
  5341  		if x.Op != OpLOONG64MOVHUreg {
  5342  			break
  5343  		}
  5344  		v.reset(OpLOONG64MOVVreg)
  5345  		v.AddArg(x)
  5346  		return true
  5347  	}
  5348  	// match: (MOVWUreg x:(MOVWUreg _))
  5349  	// result: (MOVVreg x)
  5350  	for {
  5351  		x := v_0
  5352  		if x.Op != OpLOONG64MOVWUreg {
  5353  			break
  5354  		}
  5355  		v.reset(OpLOONG64MOVVreg)
  5356  		v.AddArg(x)
  5357  		return true
  5358  	}
  5359  	// match: (MOVWUreg (SLLVconst [lc] x))
  5360  	// cond: lc >= 32
  5361  	// result: (MOVVconst [0])
  5362  	for {
  5363  		if v_0.Op != OpLOONG64SLLVconst {
  5364  			break
  5365  		}
  5366  		lc := auxIntToInt64(v_0.AuxInt)
  5367  		if !(lc >= 32) {
  5368  			break
  5369  		}
  5370  		v.reset(OpLOONG64MOVVconst)
  5371  		v.AuxInt = int64ToAuxInt(0)
  5372  		return true
  5373  	}
  5374  	// match: (MOVWUreg (MOVVconst [c]))
  5375  	// result: (MOVVconst [int64(uint32(c))])
  5376  	for {
  5377  		if v_0.Op != OpLOONG64MOVVconst {
  5378  			break
  5379  		}
  5380  		c := auxIntToInt64(v_0.AuxInt)
  5381  		v.reset(OpLOONG64MOVVconst)
  5382  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  5383  		return true
  5384  	}
  5385  	// match: (MOVWUreg x:(SRLconst [c] y))
  5386  	// result: x
  5387  	for {
  5388  		x := v_0
  5389  		if x.Op != OpLOONG64SRLconst {
  5390  			break
  5391  		}
  5392  		v.copyOf(x)
  5393  		return true
  5394  	}
  5395  	// match: (MOVWUreg x:(ANDconst [c] y))
  5396  	// cond: c >= 0 && int64(uint32(c)) == c
  5397  	// result: x
  5398  	for {
  5399  		x := v_0
  5400  		if x.Op != OpLOONG64ANDconst {
  5401  			break
  5402  		}
  5403  		c := auxIntToInt64(x.AuxInt)
  5404  		if !(c >= 0 && int64(uint32(c)) == c) {
  5405  			break
  5406  		}
  5407  		v.copyOf(x)
  5408  		return true
  5409  	}
  5410  	return false
  5411  }
  5412  func rewriteValueLOONG64_OpLOONG64MOVWload(v *Value) bool {
  5413  	v_1 := v.Args[1]
  5414  	v_0 := v.Args[0]
  5415  	b := v.Block
  5416  	config := b.Func.Config
  5417  	typ := &b.Func.Config.Types
  5418  	// match: (MOVWload [off] {sym} ptr (MOVWstore [off] {sym} ptr x _))
  5419  	// result: (MOVWreg x)
  5420  	for {
  5421  		off := auxIntToInt32(v.AuxInt)
  5422  		sym := auxToSym(v.Aux)
  5423  		ptr := v_0
  5424  		if v_1.Op != OpLOONG64MOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym {
  5425  			break
  5426  		}
  5427  		x := v_1.Args[1]
  5428  		if ptr != v_1.Args[0] {
  5429  			break
  5430  		}
  5431  		v.reset(OpLOONG64MOVWreg)
  5432  		v.AddArg(x)
  5433  		return true
  5434  	}
  5435  	// match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem)
  5436  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5437  	// result: (MOVWload [off1+int32(off2)] {sym} ptr mem)
  5438  	for {
  5439  		off1 := auxIntToInt32(v.AuxInt)
  5440  		sym := auxToSym(v.Aux)
  5441  		if v_0.Op != OpLOONG64ADDVconst {
  5442  			break
  5443  		}
  5444  		off2 := auxIntToInt64(v_0.AuxInt)
  5445  		ptr := v_0.Args[0]
  5446  		mem := v_1
  5447  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5448  			break
  5449  		}
  5450  		v.reset(OpLOONG64MOVWload)
  5451  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5452  		v.Aux = symToAux(sym)
  5453  		v.AddArg2(ptr, mem)
  5454  		return true
  5455  	}
  5456  	// match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5457  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5458  	// result: (MOVWload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem)
  5459  	for {
  5460  		off1 := auxIntToInt32(v.AuxInt)
  5461  		sym1 := auxToSym(v.Aux)
  5462  		if v_0.Op != OpLOONG64MOVVaddr {
  5463  			break
  5464  		}
  5465  		off2 := auxIntToInt32(v_0.AuxInt)
  5466  		sym2 := auxToSym(v_0.Aux)
  5467  		ptr := v_0.Args[0]
  5468  		mem := v_1
  5469  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5470  			break
  5471  		}
  5472  		v.reset(OpLOONG64MOVWload)
  5473  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5474  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5475  		v.AddArg2(ptr, mem)
  5476  		return true
  5477  	}
  5478  	// match: (MOVWload [off] {sym} (ADDV ptr idx) mem)
  5479  	// cond: off == 0 && sym == nil
  5480  	// result: (MOVWloadidx ptr idx mem)
  5481  	for {
  5482  		off := auxIntToInt32(v.AuxInt)
  5483  		sym := auxToSym(v.Aux)
  5484  		if v_0.Op != OpLOONG64ADDV {
  5485  			break
  5486  		}
  5487  		idx := v_0.Args[1]
  5488  		ptr := v_0.Args[0]
  5489  		mem := v_1
  5490  		if !(off == 0 && sym == nil) {
  5491  			break
  5492  		}
  5493  		v.reset(OpLOONG64MOVWloadidx)
  5494  		v.AddArg3(ptr, idx, mem)
  5495  		return true
  5496  	}
  5497  	// match: (MOVWload [off] {sym} (ADDshiftLLV [shift] ptr idx) mem)
  5498  	// cond: off == 0 && sym == nil
  5499  	// result: (MOVWloadidx ptr (SLLVconst <typ.Int64> [shift] idx) mem)
  5500  	for {
  5501  		off := auxIntToInt32(v.AuxInt)
  5502  		sym := auxToSym(v.Aux)
  5503  		if v_0.Op != OpLOONG64ADDshiftLLV {
  5504  			break
  5505  		}
  5506  		shift := auxIntToInt64(v_0.AuxInt)
  5507  		idx := v_0.Args[1]
  5508  		ptr := v_0.Args[0]
  5509  		mem := v_1
  5510  		if !(off == 0 && sym == nil) {
  5511  			break
  5512  		}
  5513  		v.reset(OpLOONG64MOVWloadidx)
  5514  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  5515  		v0.AuxInt = int64ToAuxInt(shift)
  5516  		v0.AddArg(idx)
  5517  		v.AddArg3(ptr, v0, mem)
  5518  		return true
  5519  	}
  5520  	// match: (MOVWload [off] {sym} (SB) _)
  5521  	// cond: symIsRO(sym)
  5522  	// result: (MOVVconst [int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
  5523  	for {
  5524  		off := auxIntToInt32(v.AuxInt)
  5525  		sym := auxToSym(v.Aux)
  5526  		if v_0.Op != OpSB || !(symIsRO(sym)) {
  5527  			break
  5528  		}
  5529  		v.reset(OpLOONG64MOVVconst)
  5530  		v.AuxInt = int64ToAuxInt(int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))))
  5531  		return true
  5532  	}
  5533  	return false
  5534  }
  5535  func rewriteValueLOONG64_OpLOONG64MOVWloadidx(v *Value) bool {
  5536  	v_2 := v.Args[2]
  5537  	v_1 := v.Args[1]
  5538  	v_0 := v.Args[0]
  5539  	// match: (MOVWloadidx ptr (MOVVconst [c]) mem)
  5540  	// cond: is32Bit(c)
  5541  	// result: (MOVWload [int32(c)] ptr mem)
  5542  	for {
  5543  		ptr := v_0
  5544  		if v_1.Op != OpLOONG64MOVVconst {
  5545  			break
  5546  		}
  5547  		c := auxIntToInt64(v_1.AuxInt)
  5548  		mem := v_2
  5549  		if !(is32Bit(c)) {
  5550  			break
  5551  		}
  5552  		v.reset(OpLOONG64MOVWload)
  5553  		v.AuxInt = int32ToAuxInt(int32(c))
  5554  		v.AddArg2(ptr, mem)
  5555  		return true
  5556  	}
  5557  	// match: (MOVWloadidx (MOVVconst [c]) ptr mem)
  5558  	// cond: is32Bit(c)
  5559  	// result: (MOVWload [int32(c)] ptr mem)
  5560  	for {
  5561  		if v_0.Op != OpLOONG64MOVVconst {
  5562  			break
  5563  		}
  5564  		c := auxIntToInt64(v_0.AuxInt)
  5565  		ptr := v_1
  5566  		mem := v_2
  5567  		if !(is32Bit(c)) {
  5568  			break
  5569  		}
  5570  		v.reset(OpLOONG64MOVWload)
  5571  		v.AuxInt = int32ToAuxInt(int32(c))
  5572  		v.AddArg2(ptr, mem)
  5573  		return true
  5574  	}
  5575  	return false
  5576  }
  5577  func rewriteValueLOONG64_OpLOONG64MOVWreg(v *Value) bool {
  5578  	v_0 := v.Args[0]
  5579  	// match: (MOVWreg x:(MOVBload _ _))
  5580  	// result: (MOVVreg x)
  5581  	for {
  5582  		x := v_0
  5583  		if x.Op != OpLOONG64MOVBload {
  5584  			break
  5585  		}
  5586  		v.reset(OpLOONG64MOVVreg)
  5587  		v.AddArg(x)
  5588  		return true
  5589  	}
  5590  	// match: (MOVWreg x:(MOVBUload _ _))
  5591  	// result: (MOVVreg x)
  5592  	for {
  5593  		x := v_0
  5594  		if x.Op != OpLOONG64MOVBUload {
  5595  			break
  5596  		}
  5597  		v.reset(OpLOONG64MOVVreg)
  5598  		v.AddArg(x)
  5599  		return true
  5600  	}
  5601  	// match: (MOVWreg x:(MOVHload _ _))
  5602  	// result: (MOVVreg x)
  5603  	for {
  5604  		x := v_0
  5605  		if x.Op != OpLOONG64MOVHload {
  5606  			break
  5607  		}
  5608  		v.reset(OpLOONG64MOVVreg)
  5609  		v.AddArg(x)
  5610  		return true
  5611  	}
  5612  	// match: (MOVWreg x:(MOVHUload _ _))
  5613  	// result: (MOVVreg x)
  5614  	for {
  5615  		x := v_0
  5616  		if x.Op != OpLOONG64MOVHUload {
  5617  			break
  5618  		}
  5619  		v.reset(OpLOONG64MOVVreg)
  5620  		v.AddArg(x)
  5621  		return true
  5622  	}
  5623  	// match: (MOVWreg x:(MOVWload _ _))
  5624  	// result: (MOVVreg x)
  5625  	for {
  5626  		x := v_0
  5627  		if x.Op != OpLOONG64MOVWload {
  5628  			break
  5629  		}
  5630  		v.reset(OpLOONG64MOVVreg)
  5631  		v.AddArg(x)
  5632  		return true
  5633  	}
  5634  	// match: (MOVWreg x:(MOVBloadidx _ _ _))
  5635  	// result: (MOVVreg x)
  5636  	for {
  5637  		x := v_0
  5638  		if x.Op != OpLOONG64MOVBloadidx {
  5639  			break
  5640  		}
  5641  		v.reset(OpLOONG64MOVVreg)
  5642  		v.AddArg(x)
  5643  		return true
  5644  	}
  5645  	// match: (MOVWreg x:(MOVBUloadidx _ _ _))
  5646  	// result: (MOVVreg x)
  5647  	for {
  5648  		x := v_0
  5649  		if x.Op != OpLOONG64MOVBUloadidx {
  5650  			break
  5651  		}
  5652  		v.reset(OpLOONG64MOVVreg)
  5653  		v.AddArg(x)
  5654  		return true
  5655  	}
  5656  	// match: (MOVWreg x:(MOVHloadidx _ _ _))
  5657  	// result: (MOVVreg x)
  5658  	for {
  5659  		x := v_0
  5660  		if x.Op != OpLOONG64MOVHloadidx {
  5661  			break
  5662  		}
  5663  		v.reset(OpLOONG64MOVVreg)
  5664  		v.AddArg(x)
  5665  		return true
  5666  	}
  5667  	// match: (MOVWreg x:(MOVHUloadidx _ _ _))
  5668  	// result: (MOVVreg x)
  5669  	for {
  5670  		x := v_0
  5671  		if x.Op != OpLOONG64MOVHUloadidx {
  5672  			break
  5673  		}
  5674  		v.reset(OpLOONG64MOVVreg)
  5675  		v.AddArg(x)
  5676  		return true
  5677  	}
  5678  	// match: (MOVWreg x:(MOVWloadidx _ _ _))
  5679  	// result: (MOVVreg x)
  5680  	for {
  5681  		x := v_0
  5682  		if x.Op != OpLOONG64MOVWloadidx {
  5683  			break
  5684  		}
  5685  		v.reset(OpLOONG64MOVVreg)
  5686  		v.AddArg(x)
  5687  		return true
  5688  	}
  5689  	// match: (MOVWreg x:(MOVBreg _))
  5690  	// result: (MOVVreg x)
  5691  	for {
  5692  		x := v_0
  5693  		if x.Op != OpLOONG64MOVBreg {
  5694  			break
  5695  		}
  5696  		v.reset(OpLOONG64MOVVreg)
  5697  		v.AddArg(x)
  5698  		return true
  5699  	}
  5700  	// match: (MOVWreg x:(MOVBUreg _))
  5701  	// result: (MOVVreg x)
  5702  	for {
  5703  		x := v_0
  5704  		if x.Op != OpLOONG64MOVBUreg {
  5705  			break
  5706  		}
  5707  		v.reset(OpLOONG64MOVVreg)
  5708  		v.AddArg(x)
  5709  		return true
  5710  	}
  5711  	// match: (MOVWreg x:(MOVHreg _))
  5712  	// result: (MOVVreg x)
  5713  	for {
  5714  		x := v_0
  5715  		if x.Op != OpLOONG64MOVHreg {
  5716  			break
  5717  		}
  5718  		v.reset(OpLOONG64MOVVreg)
  5719  		v.AddArg(x)
  5720  		return true
  5721  	}
  5722  	// match: (MOVWreg x:(MOVWreg _))
  5723  	// result: (MOVVreg x)
  5724  	for {
  5725  		x := v_0
  5726  		if x.Op != OpLOONG64MOVWreg {
  5727  			break
  5728  		}
  5729  		v.reset(OpLOONG64MOVVreg)
  5730  		v.AddArg(x)
  5731  		return true
  5732  	}
  5733  	// match: (MOVWreg (MOVVconst [c]))
  5734  	// result: (MOVVconst [int64(int32(c))])
  5735  	for {
  5736  		if v_0.Op != OpLOONG64MOVVconst {
  5737  			break
  5738  		}
  5739  		c := auxIntToInt64(v_0.AuxInt)
  5740  		v.reset(OpLOONG64MOVVconst)
  5741  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  5742  		return true
  5743  	}
  5744  	// match: (MOVWreg x:(ANDconst [c] y))
  5745  	// cond: c >= 0 && int64(int32(c)) == c
  5746  	// result: x
  5747  	for {
  5748  		x := v_0
  5749  		if x.Op != OpLOONG64ANDconst {
  5750  			break
  5751  		}
  5752  		c := auxIntToInt64(x.AuxInt)
  5753  		if !(c >= 0 && int64(int32(c)) == c) {
  5754  			break
  5755  		}
  5756  		v.copyOf(x)
  5757  		return true
  5758  	}
  5759  	return false
  5760  }
  5761  func rewriteValueLOONG64_OpLOONG64MOVWstore(v *Value) bool {
  5762  	v_2 := v.Args[2]
  5763  	v_1 := v.Args[1]
  5764  	v_0 := v.Args[0]
  5765  	b := v.Block
  5766  	config := b.Func.Config
  5767  	typ := &b.Func.Config.Types
  5768  	// match: (MOVWstore [off] {sym} ptr (MOVWfpgp val) mem)
  5769  	// result: (MOVFstore [off] {sym} ptr val mem)
  5770  	for {
  5771  		off := auxIntToInt32(v.AuxInt)
  5772  		sym := auxToSym(v.Aux)
  5773  		ptr := v_0
  5774  		if v_1.Op != OpLOONG64MOVWfpgp {
  5775  			break
  5776  		}
  5777  		val := v_1.Args[0]
  5778  		mem := v_2
  5779  		v.reset(OpLOONG64MOVFstore)
  5780  		v.AuxInt = int32ToAuxInt(off)
  5781  		v.Aux = symToAux(sym)
  5782  		v.AddArg3(ptr, val, mem)
  5783  		return true
  5784  	}
  5785  	// match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  5786  	// cond: is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5787  	// result: (MOVWstore [off1+int32(off2)] {sym} ptr val mem)
  5788  	for {
  5789  		off1 := auxIntToInt32(v.AuxInt)
  5790  		sym := auxToSym(v.Aux)
  5791  		if v_0.Op != OpLOONG64ADDVconst {
  5792  			break
  5793  		}
  5794  		off2 := auxIntToInt64(v_0.AuxInt)
  5795  		ptr := v_0.Args[0]
  5796  		val := v_1
  5797  		mem := v_2
  5798  		if !(is32Bit(int64(off1)+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5799  			break
  5800  		}
  5801  		v.reset(OpLOONG64MOVWstore)
  5802  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5803  		v.Aux = symToAux(sym)
  5804  		v.AddArg3(ptr, val, mem)
  5805  		return true
  5806  	}
  5807  	// match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  5808  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)
  5809  	// result: (MOVWstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem)
  5810  	for {
  5811  		off1 := auxIntToInt32(v.AuxInt)
  5812  		sym1 := auxToSym(v.Aux)
  5813  		if v_0.Op != OpLOONG64MOVVaddr {
  5814  			break
  5815  		}
  5816  		off2 := auxIntToInt32(v_0.AuxInt)
  5817  		sym2 := auxToSym(v_0.Aux)
  5818  		ptr := v_0.Args[0]
  5819  		val := v_1
  5820  		mem := v_2
  5821  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2)) && (ptr.Op != OpSB || !config.ctxt.Flag_dynlink)) {
  5822  			break
  5823  		}
  5824  		v.reset(OpLOONG64MOVWstore)
  5825  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5826  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5827  		v.AddArg3(ptr, val, mem)
  5828  		return true
  5829  	}
  5830  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  5831  	// result: (MOVWstore [off] {sym} ptr x mem)
  5832  	for {
  5833  		off := auxIntToInt32(v.AuxInt)
  5834  		sym := auxToSym(v.Aux)
  5835  		ptr := v_0
  5836  		if v_1.Op != OpLOONG64MOVWreg {
  5837  			break
  5838  		}
  5839  		x := v_1.Args[0]
  5840  		mem := v_2
  5841  		v.reset(OpLOONG64MOVWstore)
  5842  		v.AuxInt = int32ToAuxInt(off)
  5843  		v.Aux = symToAux(sym)
  5844  		v.AddArg3(ptr, x, mem)
  5845  		return true
  5846  	}
  5847  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  5848  	// result: (MOVWstore [off] {sym} ptr x mem)
  5849  	for {
  5850  		off := auxIntToInt32(v.AuxInt)
  5851  		sym := auxToSym(v.Aux)
  5852  		ptr := v_0
  5853  		if v_1.Op != OpLOONG64MOVWUreg {
  5854  			break
  5855  		}
  5856  		x := v_1.Args[0]
  5857  		mem := v_2
  5858  		v.reset(OpLOONG64MOVWstore)
  5859  		v.AuxInt = int32ToAuxInt(off)
  5860  		v.Aux = symToAux(sym)
  5861  		v.AddArg3(ptr, x, mem)
  5862  		return true
  5863  	}
  5864  	// match: (MOVWstore [off] {sym} (ADDV ptr idx) val mem)
  5865  	// cond: off == 0 && sym == nil
  5866  	// result: (MOVWstoreidx ptr idx val mem)
  5867  	for {
  5868  		off := auxIntToInt32(v.AuxInt)
  5869  		sym := auxToSym(v.Aux)
  5870  		if v_0.Op != OpLOONG64ADDV {
  5871  			break
  5872  		}
  5873  		idx := v_0.Args[1]
  5874  		ptr := v_0.Args[0]
  5875  		val := v_1
  5876  		mem := v_2
  5877  		if !(off == 0 && sym == nil) {
  5878  			break
  5879  		}
  5880  		v.reset(OpLOONG64MOVWstoreidx)
  5881  		v.AddArg4(ptr, idx, val, mem)
  5882  		return true
  5883  	}
  5884  	// match: (MOVWstore [off] {sym} (ADDshiftLLV [shift] ptr idx) val mem)
  5885  	// cond: off == 0 && sym == nil
  5886  	// result: (MOVWstoreidx ptr (SLLVconst <typ.Int64> [shift] idx) val mem)
  5887  	for {
  5888  		off := auxIntToInt32(v.AuxInt)
  5889  		sym := auxToSym(v.Aux)
  5890  		if v_0.Op != OpLOONG64ADDshiftLLV {
  5891  			break
  5892  		}
  5893  		shift := auxIntToInt64(v_0.AuxInt)
  5894  		idx := v_0.Args[1]
  5895  		ptr := v_0.Args[0]
  5896  		val := v_1
  5897  		mem := v_2
  5898  		if !(off == 0 && sym == nil) {
  5899  			break
  5900  		}
  5901  		v.reset(OpLOONG64MOVWstoreidx)
  5902  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, typ.Int64)
  5903  		v0.AuxInt = int64ToAuxInt(shift)
  5904  		v0.AddArg(idx)
  5905  		v.AddArg4(ptr, v0, val, mem)
  5906  		return true
  5907  	}
  5908  	return false
  5909  }
  5910  func rewriteValueLOONG64_OpLOONG64MOVWstoreidx(v *Value) bool {
  5911  	v_3 := v.Args[3]
  5912  	v_2 := v.Args[2]
  5913  	v_1 := v.Args[1]
  5914  	v_0 := v.Args[0]
  5915  	// match: (MOVWstoreidx ptr (MOVVconst [c]) val mem)
  5916  	// cond: is32Bit(c)
  5917  	// result: (MOVWstore [int32(c)] ptr val mem)
  5918  	for {
  5919  		ptr := v_0
  5920  		if v_1.Op != OpLOONG64MOVVconst {
  5921  			break
  5922  		}
  5923  		c := auxIntToInt64(v_1.AuxInt)
  5924  		val := v_2
  5925  		mem := v_3
  5926  		if !(is32Bit(c)) {
  5927  			break
  5928  		}
  5929  		v.reset(OpLOONG64MOVWstore)
  5930  		v.AuxInt = int32ToAuxInt(int32(c))
  5931  		v.AddArg3(ptr, val, mem)
  5932  		return true
  5933  	}
  5934  	// match: (MOVWstoreidx (MOVVconst [c]) idx val mem)
  5935  	// cond: is32Bit(c)
  5936  	// result: (MOVWstore [int32(c)] idx val mem)
  5937  	for {
  5938  		if v_0.Op != OpLOONG64MOVVconst {
  5939  			break
  5940  		}
  5941  		c := auxIntToInt64(v_0.AuxInt)
  5942  		idx := v_1
  5943  		val := v_2
  5944  		mem := v_3
  5945  		if !(is32Bit(c)) {
  5946  			break
  5947  		}
  5948  		v.reset(OpLOONG64MOVWstore)
  5949  		v.AuxInt = int32ToAuxInt(int32(c))
  5950  		v.AddArg3(idx, val, mem)
  5951  		return true
  5952  	}
  5953  	return false
  5954  }
  5955  func rewriteValueLOONG64_OpLOONG64MULV(v *Value) bool {
  5956  	v_1 := v.Args[1]
  5957  	v_0 := v.Args[0]
  5958  	b := v.Block
  5959  	config := b.Func.Config
  5960  	// match: (MULV r:(MOVWUreg x) s:(MOVWUreg y))
  5961  	// cond: r.Uses == 1 && s.Uses == 1
  5962  	// result: (MULWVWU x y)
  5963  	for {
  5964  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5965  			r := v_0
  5966  			if r.Op != OpLOONG64MOVWUreg {
  5967  				continue
  5968  			}
  5969  			x := r.Args[0]
  5970  			s := v_1
  5971  			if s.Op != OpLOONG64MOVWUreg {
  5972  				continue
  5973  			}
  5974  			y := s.Args[0]
  5975  			if !(r.Uses == 1 && s.Uses == 1) {
  5976  				continue
  5977  			}
  5978  			v.reset(OpLOONG64MULWVWU)
  5979  			v.AddArg2(x, y)
  5980  			return true
  5981  		}
  5982  		break
  5983  	}
  5984  	// match: (MULV r:(MOVWreg x) s:(MOVWreg y))
  5985  	// cond: r.Uses == 1 && s.Uses == 1
  5986  	// result: (MULWVW x y)
  5987  	for {
  5988  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5989  			r := v_0
  5990  			if r.Op != OpLOONG64MOVWreg {
  5991  				continue
  5992  			}
  5993  			x := r.Args[0]
  5994  			s := v_1
  5995  			if s.Op != OpLOONG64MOVWreg {
  5996  				continue
  5997  			}
  5998  			y := s.Args[0]
  5999  			if !(r.Uses == 1 && s.Uses == 1) {
  6000  				continue
  6001  			}
  6002  			v.reset(OpLOONG64MULWVW)
  6003  			v.AddArg2(x, y)
  6004  			return true
  6005  		}
  6006  		break
  6007  	}
  6008  	// match: (MULV _ (MOVVconst [0]))
  6009  	// result: (MOVVconst [0])
  6010  	for {
  6011  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6012  			if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
  6013  				continue
  6014  			}
  6015  			v.reset(OpLOONG64MOVVconst)
  6016  			v.AuxInt = int64ToAuxInt(0)
  6017  			return true
  6018  		}
  6019  		break
  6020  	}
  6021  	// match: (MULV x (MOVVconst [1]))
  6022  	// result: x
  6023  	for {
  6024  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6025  			x := v_0
  6026  			if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  6027  				continue
  6028  			}
  6029  			v.copyOf(x)
  6030  			return true
  6031  		}
  6032  		break
  6033  	}
  6034  	// match: (MULV x (MOVVconst [c]))
  6035  	// cond: canMulStrengthReduce(config, c)
  6036  	// result: {mulStrengthReduce(v, x, c)}
  6037  	for {
  6038  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6039  			x := v_0
  6040  			if v_1.Op != OpLOONG64MOVVconst {
  6041  				continue
  6042  			}
  6043  			c := auxIntToInt64(v_1.AuxInt)
  6044  			if !(canMulStrengthReduce(config, c)) {
  6045  				continue
  6046  			}
  6047  			v.copyOf(mulStrengthReduce(v, x, c))
  6048  			return true
  6049  		}
  6050  		break
  6051  	}
  6052  	// match: (MULV (MOVVconst [c]) (MOVVconst [d]))
  6053  	// result: (MOVVconst [c*d])
  6054  	for {
  6055  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6056  			if v_0.Op != OpLOONG64MOVVconst {
  6057  				continue
  6058  			}
  6059  			c := auxIntToInt64(v_0.AuxInt)
  6060  			if v_1.Op != OpLOONG64MOVVconst {
  6061  				continue
  6062  			}
  6063  			d := auxIntToInt64(v_1.AuxInt)
  6064  			v.reset(OpLOONG64MOVVconst)
  6065  			v.AuxInt = int64ToAuxInt(c * d)
  6066  			return true
  6067  		}
  6068  		break
  6069  	}
  6070  	return false
  6071  }
  6072  func rewriteValueLOONG64_OpLOONG64NEGV(v *Value) bool {
  6073  	v_0 := v.Args[0]
  6074  	b := v.Block
  6075  	// match: (NEGV (SUBV x y))
  6076  	// result: (SUBV y x)
  6077  	for {
  6078  		if v_0.Op != OpLOONG64SUBV {
  6079  			break
  6080  		}
  6081  		y := v_0.Args[1]
  6082  		x := v_0.Args[0]
  6083  		v.reset(OpLOONG64SUBV)
  6084  		v.AddArg2(y, x)
  6085  		return true
  6086  	}
  6087  	// match: (NEGV <t> s:(ADDVconst [c] (SUBV x y)))
  6088  	// cond: s.Uses == 1 && is12Bit(-c)
  6089  	// result: (ADDVconst [-c] (SUBV <t> y x))
  6090  	for {
  6091  		t := v.Type
  6092  		s := v_0
  6093  		if s.Op != OpLOONG64ADDVconst {
  6094  			break
  6095  		}
  6096  		c := auxIntToInt64(s.AuxInt)
  6097  		s_0 := s.Args[0]
  6098  		if s_0.Op != OpLOONG64SUBV {
  6099  			break
  6100  		}
  6101  		y := s_0.Args[1]
  6102  		x := s_0.Args[0]
  6103  		if !(s.Uses == 1 && is12Bit(-c)) {
  6104  			break
  6105  		}
  6106  		v.reset(OpLOONG64ADDVconst)
  6107  		v.AuxInt = int64ToAuxInt(-c)
  6108  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
  6109  		v0.AddArg2(y, x)
  6110  		v.AddArg(v0)
  6111  		return true
  6112  	}
  6113  	// match: (NEGV (NEGV x))
  6114  	// result: x
  6115  	for {
  6116  		if v_0.Op != OpLOONG64NEGV {
  6117  			break
  6118  		}
  6119  		x := v_0.Args[0]
  6120  		v.copyOf(x)
  6121  		return true
  6122  	}
  6123  	// match: (NEGV <t> s:(ADDVconst [c] (NEGV x)))
  6124  	// cond: s.Uses == 1 && is12Bit(-c)
  6125  	// result: (ADDVconst [-c] x)
  6126  	for {
  6127  		s := v_0
  6128  		if s.Op != OpLOONG64ADDVconst {
  6129  			break
  6130  		}
  6131  		c := auxIntToInt64(s.AuxInt)
  6132  		s_0 := s.Args[0]
  6133  		if s_0.Op != OpLOONG64NEGV {
  6134  			break
  6135  		}
  6136  		x := s_0.Args[0]
  6137  		if !(s.Uses == 1 && is12Bit(-c)) {
  6138  			break
  6139  		}
  6140  		v.reset(OpLOONG64ADDVconst)
  6141  		v.AuxInt = int64ToAuxInt(-c)
  6142  		v.AddArg(x)
  6143  		return true
  6144  	}
  6145  	// match: (NEGV (MOVVconst [c]))
  6146  	// result: (MOVVconst [-c])
  6147  	for {
  6148  		if v_0.Op != OpLOONG64MOVVconst {
  6149  			break
  6150  		}
  6151  		c := auxIntToInt64(v_0.AuxInt)
  6152  		v.reset(OpLOONG64MOVVconst)
  6153  		v.AuxInt = int64ToAuxInt(-c)
  6154  		return true
  6155  	}
  6156  	return false
  6157  }
  6158  func rewriteValueLOONG64_OpLOONG64NOR(v *Value) bool {
  6159  	v_1 := v.Args[1]
  6160  	v_0 := v.Args[0]
  6161  	// match: (NOR x (MOVVconst [c]))
  6162  	// cond: is32Bit(c)
  6163  	// result: (NORconst [c] x)
  6164  	for {
  6165  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6166  			x := v_0
  6167  			if v_1.Op != OpLOONG64MOVVconst {
  6168  				continue
  6169  			}
  6170  			c := auxIntToInt64(v_1.AuxInt)
  6171  			if !(is32Bit(c)) {
  6172  				continue
  6173  			}
  6174  			v.reset(OpLOONG64NORconst)
  6175  			v.AuxInt = int64ToAuxInt(c)
  6176  			v.AddArg(x)
  6177  			return true
  6178  		}
  6179  		break
  6180  	}
  6181  	return false
  6182  }
  6183  func rewriteValueLOONG64_OpLOONG64NORconst(v *Value) bool {
  6184  	v_0 := v.Args[0]
  6185  	// match: (NORconst [c] (MOVVconst [d]))
  6186  	// result: (MOVVconst [^(c|d)])
  6187  	for {
  6188  		c := auxIntToInt64(v.AuxInt)
  6189  		if v_0.Op != OpLOONG64MOVVconst {
  6190  			break
  6191  		}
  6192  		d := auxIntToInt64(v_0.AuxInt)
  6193  		v.reset(OpLOONG64MOVVconst)
  6194  		v.AuxInt = int64ToAuxInt(^(c | d))
  6195  		return true
  6196  	}
  6197  	return false
  6198  }
  6199  func rewriteValueLOONG64_OpLOONG64OR(v *Value) bool {
  6200  	v_1 := v.Args[1]
  6201  	v_0 := v.Args[0]
  6202  	b := v.Block
  6203  	typ := &b.Func.Config.Types
  6204  	// match: (OR <typ.UInt16> (SRLVconst [8] <typ.UInt16> x) (SLLVconst [8] <typ.UInt16> x))
  6205  	// result: (REVB2H x)
  6206  	for {
  6207  		if v.Type != typ.UInt16 {
  6208  			break
  6209  		}
  6210  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6211  			if v_0.Op != OpLOONG64SRLVconst || v_0.Type != typ.UInt16 || auxIntToInt64(v_0.AuxInt) != 8 {
  6212  				continue
  6213  			}
  6214  			x := v_0.Args[0]
  6215  			if v_1.Op != OpLOONG64SLLVconst || v_1.Type != typ.UInt16 || auxIntToInt64(v_1.AuxInt) != 8 || x != v_1.Args[0] {
  6216  				continue
  6217  			}
  6218  			v.reset(OpLOONG64REVB2H)
  6219  			v.AddArg(x)
  6220  			return true
  6221  		}
  6222  		break
  6223  	}
  6224  	// match: (OR (SRLconst [8] (ANDconst [c1] x)) (SLLconst [8] (ANDconst [c2] x)))
  6225  	// cond: uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff
  6226  	// result: (REVB2H x)
  6227  	for {
  6228  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6229  			if v_0.Op != OpLOONG64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
  6230  				continue
  6231  			}
  6232  			v_0_0 := v_0.Args[0]
  6233  			if v_0_0.Op != OpLOONG64ANDconst {
  6234  				continue
  6235  			}
  6236  			c1 := auxIntToInt64(v_0_0.AuxInt)
  6237  			x := v_0_0.Args[0]
  6238  			if v_1.Op != OpLOONG64SLLconst || auxIntToInt64(v_1.AuxInt) != 8 {
  6239  				continue
  6240  			}
  6241  			v_1_0 := v_1.Args[0]
  6242  			if v_1_0.Op != OpLOONG64ANDconst {
  6243  				continue
  6244  			}
  6245  			c2 := auxIntToInt64(v_1_0.AuxInt)
  6246  			if x != v_1_0.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) {
  6247  				continue
  6248  			}
  6249  			v.reset(OpLOONG64REVB2H)
  6250  			v.AddArg(x)
  6251  			return true
  6252  		}
  6253  		break
  6254  	}
  6255  	// match: (OR (SRLVconst [8] (AND (MOVVconst [c1]) x)) (SLLVconst [8] (AND (MOVVconst [c2]) x)))
  6256  	// cond: uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff
  6257  	// result: (REVB4H x)
  6258  	for {
  6259  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6260  			if v_0.Op != OpLOONG64SRLVconst || auxIntToInt64(v_0.AuxInt) != 8 {
  6261  				continue
  6262  			}
  6263  			v_0_0 := v_0.Args[0]
  6264  			if v_0_0.Op != OpLOONG64AND {
  6265  				continue
  6266  			}
  6267  			_ = v_0_0.Args[1]
  6268  			v_0_0_0 := v_0_0.Args[0]
  6269  			v_0_0_1 := v_0_0.Args[1]
  6270  			for _i1 := 0; _i1 <= 1; _i1, v_0_0_0, v_0_0_1 = _i1+1, v_0_0_1, v_0_0_0 {
  6271  				if v_0_0_0.Op != OpLOONG64MOVVconst {
  6272  					continue
  6273  				}
  6274  				c1 := auxIntToInt64(v_0_0_0.AuxInt)
  6275  				x := v_0_0_1
  6276  				if v_1.Op != OpLOONG64SLLVconst || auxIntToInt64(v_1.AuxInt) != 8 {
  6277  					continue
  6278  				}
  6279  				v_1_0 := v_1.Args[0]
  6280  				if v_1_0.Op != OpLOONG64AND {
  6281  					continue
  6282  				}
  6283  				_ = v_1_0.Args[1]
  6284  				v_1_0_0 := v_1_0.Args[0]
  6285  				v_1_0_1 := v_1_0.Args[1]
  6286  				for _i2 := 0; _i2 <= 1; _i2, v_1_0_0, v_1_0_1 = _i2+1, v_1_0_1, v_1_0_0 {
  6287  					if v_1_0_0.Op != OpLOONG64MOVVconst {
  6288  						continue
  6289  					}
  6290  					c2 := auxIntToInt64(v_1_0_0.AuxInt)
  6291  					if x != v_1_0_1 || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) {
  6292  						continue
  6293  					}
  6294  					v.reset(OpLOONG64REVB4H)
  6295  					v.AddArg(x)
  6296  					return true
  6297  				}
  6298  			}
  6299  		}
  6300  		break
  6301  	}
  6302  	// match: (OR (SRLVconst [8] (AND (MOVVconst [c1]) x)) (SLLVconst [8] (ANDconst [c2] x)))
  6303  	// cond: uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff
  6304  	// result: (REVB4H (ANDconst <x.Type> [0xffffffff] x))
  6305  	for {
  6306  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6307  			if v_0.Op != OpLOONG64SRLVconst || auxIntToInt64(v_0.AuxInt) != 8 {
  6308  				continue
  6309  			}
  6310  			v_0_0 := v_0.Args[0]
  6311  			if v_0_0.Op != OpLOONG64AND {
  6312  				continue
  6313  			}
  6314  			_ = v_0_0.Args[1]
  6315  			v_0_0_0 := v_0_0.Args[0]
  6316  			v_0_0_1 := v_0_0.Args[1]
  6317  			for _i1 := 0; _i1 <= 1; _i1, v_0_0_0, v_0_0_1 = _i1+1, v_0_0_1, v_0_0_0 {
  6318  				if v_0_0_0.Op != OpLOONG64MOVVconst {
  6319  					continue
  6320  				}
  6321  				c1 := auxIntToInt64(v_0_0_0.AuxInt)
  6322  				x := v_0_0_1
  6323  				if v_1.Op != OpLOONG64SLLVconst || auxIntToInt64(v_1.AuxInt) != 8 {
  6324  					continue
  6325  				}
  6326  				v_1_0 := v_1.Args[0]
  6327  				if v_1_0.Op != OpLOONG64ANDconst {
  6328  					continue
  6329  				}
  6330  				c2 := auxIntToInt64(v_1_0.AuxInt)
  6331  				if x != v_1_0.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) {
  6332  					continue
  6333  				}
  6334  				v.reset(OpLOONG64REVB4H)
  6335  				v0 := b.NewValue0(v.Pos, OpLOONG64ANDconst, x.Type)
  6336  				v0.AuxInt = int64ToAuxInt(0xffffffff)
  6337  				v0.AddArg(x)
  6338  				v.AddArg(v0)
  6339  				return true
  6340  			}
  6341  		}
  6342  		break
  6343  	}
  6344  	// match: (OR x (MOVVconst [c]))
  6345  	// cond: is32Bit(c)
  6346  	// result: (ORconst [c] x)
  6347  	for {
  6348  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6349  			x := v_0
  6350  			if v_1.Op != OpLOONG64MOVVconst {
  6351  				continue
  6352  			}
  6353  			c := auxIntToInt64(v_1.AuxInt)
  6354  			if !(is32Bit(c)) {
  6355  				continue
  6356  			}
  6357  			v.reset(OpLOONG64ORconst)
  6358  			v.AuxInt = int64ToAuxInt(c)
  6359  			v.AddArg(x)
  6360  			return true
  6361  		}
  6362  		break
  6363  	}
  6364  	// match: (OR x x)
  6365  	// result: x
  6366  	for {
  6367  		x := v_0
  6368  		if x != v_1 {
  6369  			break
  6370  		}
  6371  		v.copyOf(x)
  6372  		return true
  6373  	}
  6374  	// match: (OR x (NORconst [0] y))
  6375  	// result: (ORN x y)
  6376  	for {
  6377  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6378  			x := v_0
  6379  			if v_1.Op != OpLOONG64NORconst || auxIntToInt64(v_1.AuxInt) != 0 {
  6380  				continue
  6381  			}
  6382  			y := v_1.Args[0]
  6383  			v.reset(OpLOONG64ORN)
  6384  			v.AddArg2(x, y)
  6385  			return true
  6386  		}
  6387  		break
  6388  	}
  6389  	return false
  6390  }
  6391  func rewriteValueLOONG64_OpLOONG64ORN(v *Value) bool {
  6392  	v_1 := v.Args[1]
  6393  	v_0 := v.Args[0]
  6394  	// match: (ORN x (MOVVconst [-1]))
  6395  	// result: x
  6396  	for {
  6397  		x := v_0
  6398  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != -1 {
  6399  			break
  6400  		}
  6401  		v.copyOf(x)
  6402  		return true
  6403  	}
  6404  	return false
  6405  }
  6406  func rewriteValueLOONG64_OpLOONG64ORconst(v *Value) bool {
  6407  	v_0 := v.Args[0]
  6408  	// match: (ORconst [0] x)
  6409  	// result: x
  6410  	for {
  6411  		if auxIntToInt64(v.AuxInt) != 0 {
  6412  			break
  6413  		}
  6414  		x := v_0
  6415  		v.copyOf(x)
  6416  		return true
  6417  	}
  6418  	// match: (ORconst [-1] _)
  6419  	// result: (MOVVconst [-1])
  6420  	for {
  6421  		if auxIntToInt64(v.AuxInt) != -1 {
  6422  			break
  6423  		}
  6424  		v.reset(OpLOONG64MOVVconst)
  6425  		v.AuxInt = int64ToAuxInt(-1)
  6426  		return true
  6427  	}
  6428  	// match: (ORconst [c] (MOVVconst [d]))
  6429  	// result: (MOVVconst [c|d])
  6430  	for {
  6431  		c := auxIntToInt64(v.AuxInt)
  6432  		if v_0.Op != OpLOONG64MOVVconst {
  6433  			break
  6434  		}
  6435  		d := auxIntToInt64(v_0.AuxInt)
  6436  		v.reset(OpLOONG64MOVVconst)
  6437  		v.AuxInt = int64ToAuxInt(c | d)
  6438  		return true
  6439  	}
  6440  	// match: (ORconst [c] (ORconst [d] x))
  6441  	// cond: is32Bit(c|d)
  6442  	// result: (ORconst [c|d] x)
  6443  	for {
  6444  		c := auxIntToInt64(v.AuxInt)
  6445  		if v_0.Op != OpLOONG64ORconst {
  6446  			break
  6447  		}
  6448  		d := auxIntToInt64(v_0.AuxInt)
  6449  		x := v_0.Args[0]
  6450  		if !(is32Bit(c | d)) {
  6451  			break
  6452  		}
  6453  		v.reset(OpLOONG64ORconst)
  6454  		v.AuxInt = int64ToAuxInt(c | d)
  6455  		v.AddArg(x)
  6456  		return true
  6457  	}
  6458  	return false
  6459  }
  6460  func rewriteValueLOONG64_OpLOONG64REMV(v *Value) bool {
  6461  	v_1 := v.Args[1]
  6462  	v_0 := v.Args[0]
  6463  	// match: (REMV (MOVVconst [c]) (MOVVconst [d]))
  6464  	// cond: d != 0
  6465  	// result: (MOVVconst [c%d])
  6466  	for {
  6467  		if v_0.Op != OpLOONG64MOVVconst {
  6468  			break
  6469  		}
  6470  		c := auxIntToInt64(v_0.AuxInt)
  6471  		if v_1.Op != OpLOONG64MOVVconst {
  6472  			break
  6473  		}
  6474  		d := auxIntToInt64(v_1.AuxInt)
  6475  		if !(d != 0) {
  6476  			break
  6477  		}
  6478  		v.reset(OpLOONG64MOVVconst)
  6479  		v.AuxInt = int64ToAuxInt(c % d)
  6480  		return true
  6481  	}
  6482  	return false
  6483  }
  6484  func rewriteValueLOONG64_OpLOONG64REMVU(v *Value) bool {
  6485  	v_1 := v.Args[1]
  6486  	v_0 := v.Args[0]
  6487  	// match: (REMVU _ (MOVVconst [1]))
  6488  	// result: (MOVVconst [0])
  6489  	for {
  6490  		if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 1 {
  6491  			break
  6492  		}
  6493  		v.reset(OpLOONG64MOVVconst)
  6494  		v.AuxInt = int64ToAuxInt(0)
  6495  		return true
  6496  	}
  6497  	// match: (REMVU x (MOVVconst [c]))
  6498  	// cond: isPowerOfTwo(c)
  6499  	// result: (ANDconst [c-1] x)
  6500  	for {
  6501  		x := v_0
  6502  		if v_1.Op != OpLOONG64MOVVconst {
  6503  			break
  6504  		}
  6505  		c := auxIntToInt64(v_1.AuxInt)
  6506  		if !(isPowerOfTwo(c)) {
  6507  			break
  6508  		}
  6509  		v.reset(OpLOONG64ANDconst)
  6510  		v.AuxInt = int64ToAuxInt(c - 1)
  6511  		v.AddArg(x)
  6512  		return true
  6513  	}
  6514  	// match: (REMVU (MOVVconst [c]) (MOVVconst [d]))
  6515  	// cond: d != 0
  6516  	// result: (MOVVconst [int64(uint64(c)%uint64(d))])
  6517  	for {
  6518  		if v_0.Op != OpLOONG64MOVVconst {
  6519  			break
  6520  		}
  6521  		c := auxIntToInt64(v_0.AuxInt)
  6522  		if v_1.Op != OpLOONG64MOVVconst {
  6523  			break
  6524  		}
  6525  		d := auxIntToInt64(v_1.AuxInt)
  6526  		if !(d != 0) {
  6527  			break
  6528  		}
  6529  		v.reset(OpLOONG64MOVVconst)
  6530  		v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
  6531  		return true
  6532  	}
  6533  	return false
  6534  }
  6535  func rewriteValueLOONG64_OpLOONG64ROTR(v *Value) bool {
  6536  	v_1 := v.Args[1]
  6537  	v_0 := v.Args[0]
  6538  	// match: (ROTR x (MOVVconst [c]))
  6539  	// result: (ROTRconst x [c&31])
  6540  	for {
  6541  		x := v_0
  6542  		if v_1.Op != OpLOONG64MOVVconst {
  6543  			break
  6544  		}
  6545  		c := auxIntToInt64(v_1.AuxInt)
  6546  		v.reset(OpLOONG64ROTRconst)
  6547  		v.AuxInt = int64ToAuxInt(c & 31)
  6548  		v.AddArg(x)
  6549  		return true
  6550  	}
  6551  	return false
  6552  }
  6553  func rewriteValueLOONG64_OpLOONG64ROTRV(v *Value) bool {
  6554  	v_1 := v.Args[1]
  6555  	v_0 := v.Args[0]
  6556  	// match: (ROTRV x (MOVVconst [c]))
  6557  	// result: (ROTRVconst x [c&63])
  6558  	for {
  6559  		x := v_0
  6560  		if v_1.Op != OpLOONG64MOVVconst {
  6561  			break
  6562  		}
  6563  		c := auxIntToInt64(v_1.AuxInt)
  6564  		v.reset(OpLOONG64ROTRVconst)
  6565  		v.AuxInt = int64ToAuxInt(c & 63)
  6566  		v.AddArg(x)
  6567  		return true
  6568  	}
  6569  	return false
  6570  }
  6571  func rewriteValueLOONG64_OpLOONG64SGT(v *Value) bool {
  6572  	v_1 := v.Args[1]
  6573  	v_0 := v.Args[0]
  6574  	b := v.Block
  6575  	typ := &b.Func.Config.Types
  6576  	// match: (SGT (MOVVconst [c]) (NEGV (SUBVconst [d] x)))
  6577  	// cond: is32Bit(d-c)
  6578  	// result: (SGT x (MOVVconst [d-c]))
  6579  	for {
  6580  		if v_0.Op != OpLOONG64MOVVconst {
  6581  			break
  6582  		}
  6583  		c := auxIntToInt64(v_0.AuxInt)
  6584  		if v_1.Op != OpLOONG64NEGV {
  6585  			break
  6586  		}
  6587  		v_1_0 := v_1.Args[0]
  6588  		if v_1_0.Op != OpLOONG64SUBVconst {
  6589  			break
  6590  		}
  6591  		d := auxIntToInt64(v_1_0.AuxInt)
  6592  		x := v_1_0.Args[0]
  6593  		if !(is32Bit(d - c)) {
  6594  			break
  6595  		}
  6596  		v.reset(OpLOONG64SGT)
  6597  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  6598  		v0.AuxInt = int64ToAuxInt(d - c)
  6599  		v.AddArg2(x, v0)
  6600  		return true
  6601  	}
  6602  	// match: (SGT (MOVVconst [c]) x)
  6603  	// cond: is32Bit(c)
  6604  	// result: (SGTconst [c] x)
  6605  	for {
  6606  		if v_0.Op != OpLOONG64MOVVconst {
  6607  			break
  6608  		}
  6609  		c := auxIntToInt64(v_0.AuxInt)
  6610  		x := v_1
  6611  		if !(is32Bit(c)) {
  6612  			break
  6613  		}
  6614  		v.reset(OpLOONG64SGTconst)
  6615  		v.AuxInt = int64ToAuxInt(c)
  6616  		v.AddArg(x)
  6617  		return true
  6618  	}
  6619  	// match: (SGT x x)
  6620  	// result: (MOVVconst [0])
  6621  	for {
  6622  		x := v_0
  6623  		if x != v_1 {
  6624  			break
  6625  		}
  6626  		v.reset(OpLOONG64MOVVconst)
  6627  		v.AuxInt = int64ToAuxInt(0)
  6628  		return true
  6629  	}
  6630  	return false
  6631  }
  6632  func rewriteValueLOONG64_OpLOONG64SGTU(v *Value) bool {
  6633  	v_1 := v.Args[1]
  6634  	v_0 := v.Args[0]
  6635  	// match: (SGTU (MOVVconst [c]) x)
  6636  	// cond: is32Bit(c)
  6637  	// result: (SGTUconst [c] x)
  6638  	for {
  6639  		if v_0.Op != OpLOONG64MOVVconst {
  6640  			break
  6641  		}
  6642  		c := auxIntToInt64(v_0.AuxInt)
  6643  		x := v_1
  6644  		if !(is32Bit(c)) {
  6645  			break
  6646  		}
  6647  		v.reset(OpLOONG64SGTUconst)
  6648  		v.AuxInt = int64ToAuxInt(c)
  6649  		v.AddArg(x)
  6650  		return true
  6651  	}
  6652  	// match: (SGTU x x)
  6653  	// result: (MOVVconst [0])
  6654  	for {
  6655  		x := v_0
  6656  		if x != v_1 {
  6657  			break
  6658  		}
  6659  		v.reset(OpLOONG64MOVVconst)
  6660  		v.AuxInt = int64ToAuxInt(0)
  6661  		return true
  6662  	}
  6663  	return false
  6664  }
  6665  func rewriteValueLOONG64_OpLOONG64SGTUconst(v *Value) bool {
  6666  	v_0 := v.Args[0]
  6667  	// match: (SGTUconst [c] (MOVVconst [d]))
  6668  	// cond: uint64(c)>uint64(d)
  6669  	// result: (MOVVconst [1])
  6670  	for {
  6671  		c := auxIntToInt64(v.AuxInt)
  6672  		if v_0.Op != OpLOONG64MOVVconst {
  6673  			break
  6674  		}
  6675  		d := auxIntToInt64(v_0.AuxInt)
  6676  		if !(uint64(c) > uint64(d)) {
  6677  			break
  6678  		}
  6679  		v.reset(OpLOONG64MOVVconst)
  6680  		v.AuxInt = int64ToAuxInt(1)
  6681  		return true
  6682  	}
  6683  	// match: (SGTUconst [c] (MOVVconst [d]))
  6684  	// cond: uint64(c)<=uint64(d)
  6685  	// result: (MOVVconst [0])
  6686  	for {
  6687  		c := auxIntToInt64(v.AuxInt)
  6688  		if v_0.Op != OpLOONG64MOVVconst {
  6689  			break
  6690  		}
  6691  		d := auxIntToInt64(v_0.AuxInt)
  6692  		if !(uint64(c) <= uint64(d)) {
  6693  			break
  6694  		}
  6695  		v.reset(OpLOONG64MOVVconst)
  6696  		v.AuxInt = int64ToAuxInt(0)
  6697  		return true
  6698  	}
  6699  	// match: (SGTUconst [c] (MOVBUreg _))
  6700  	// cond: 0xff < uint64(c)
  6701  	// result: (MOVVconst [1])
  6702  	for {
  6703  		c := auxIntToInt64(v.AuxInt)
  6704  		if v_0.Op != OpLOONG64MOVBUreg || !(0xff < uint64(c)) {
  6705  			break
  6706  		}
  6707  		v.reset(OpLOONG64MOVVconst)
  6708  		v.AuxInt = int64ToAuxInt(1)
  6709  		return true
  6710  	}
  6711  	// match: (SGTUconst [c] (MOVHUreg _))
  6712  	// cond: 0xffff < uint64(c)
  6713  	// result: (MOVVconst [1])
  6714  	for {
  6715  		c := auxIntToInt64(v.AuxInt)
  6716  		if v_0.Op != OpLOONG64MOVHUreg || !(0xffff < uint64(c)) {
  6717  			break
  6718  		}
  6719  		v.reset(OpLOONG64MOVVconst)
  6720  		v.AuxInt = int64ToAuxInt(1)
  6721  		return true
  6722  	}
  6723  	// match: (SGTUconst [c] (ANDconst [m] _))
  6724  	// cond: uint64(m) < uint64(c)
  6725  	// result: (MOVVconst [1])
  6726  	for {
  6727  		c := auxIntToInt64(v.AuxInt)
  6728  		if v_0.Op != OpLOONG64ANDconst {
  6729  			break
  6730  		}
  6731  		m := auxIntToInt64(v_0.AuxInt)
  6732  		if !(uint64(m) < uint64(c)) {
  6733  			break
  6734  		}
  6735  		v.reset(OpLOONG64MOVVconst)
  6736  		v.AuxInt = int64ToAuxInt(1)
  6737  		return true
  6738  	}
  6739  	// match: (SGTUconst [c] (SRLVconst _ [d]))
  6740  	// cond: 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  6741  	// result: (MOVVconst [1])
  6742  	for {
  6743  		c := auxIntToInt64(v.AuxInt)
  6744  		if v_0.Op != OpLOONG64SRLVconst {
  6745  			break
  6746  		}
  6747  		d := auxIntToInt64(v_0.AuxInt)
  6748  		if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  6749  			break
  6750  		}
  6751  		v.reset(OpLOONG64MOVVconst)
  6752  		v.AuxInt = int64ToAuxInt(1)
  6753  		return true
  6754  	}
  6755  	return false
  6756  }
  6757  func rewriteValueLOONG64_OpLOONG64SGTconst(v *Value) bool {
  6758  	v_0 := v.Args[0]
  6759  	// match: (SGTconst [c] (MOVVconst [d]))
  6760  	// cond: c>d
  6761  	// result: (MOVVconst [1])
  6762  	for {
  6763  		c := auxIntToInt64(v.AuxInt)
  6764  		if v_0.Op != OpLOONG64MOVVconst {
  6765  			break
  6766  		}
  6767  		d := auxIntToInt64(v_0.AuxInt)
  6768  		if !(c > d) {
  6769  			break
  6770  		}
  6771  		v.reset(OpLOONG64MOVVconst)
  6772  		v.AuxInt = int64ToAuxInt(1)
  6773  		return true
  6774  	}
  6775  	// match: (SGTconst [c] (MOVVconst [d]))
  6776  	// cond: c<=d
  6777  	// result: (MOVVconst [0])
  6778  	for {
  6779  		c := auxIntToInt64(v.AuxInt)
  6780  		if v_0.Op != OpLOONG64MOVVconst {
  6781  			break
  6782  		}
  6783  		d := auxIntToInt64(v_0.AuxInt)
  6784  		if !(c <= d) {
  6785  			break
  6786  		}
  6787  		v.reset(OpLOONG64MOVVconst)
  6788  		v.AuxInt = int64ToAuxInt(0)
  6789  		return true
  6790  	}
  6791  	// match: (SGTconst [c] (MOVBreg _))
  6792  	// cond: 0x7f < c
  6793  	// result: (MOVVconst [1])
  6794  	for {
  6795  		c := auxIntToInt64(v.AuxInt)
  6796  		if v_0.Op != OpLOONG64MOVBreg || !(0x7f < c) {
  6797  			break
  6798  		}
  6799  		v.reset(OpLOONG64MOVVconst)
  6800  		v.AuxInt = int64ToAuxInt(1)
  6801  		return true
  6802  	}
  6803  	// match: (SGTconst [c] (MOVBreg _))
  6804  	// cond: c <= -0x80
  6805  	// result: (MOVVconst [0])
  6806  	for {
  6807  		c := auxIntToInt64(v.AuxInt)
  6808  		if v_0.Op != OpLOONG64MOVBreg || !(c <= -0x80) {
  6809  			break
  6810  		}
  6811  		v.reset(OpLOONG64MOVVconst)
  6812  		v.AuxInt = int64ToAuxInt(0)
  6813  		return true
  6814  	}
  6815  	// match: (SGTconst [c] (MOVBUreg _))
  6816  	// cond: 0xff < c
  6817  	// result: (MOVVconst [1])
  6818  	for {
  6819  		c := auxIntToInt64(v.AuxInt)
  6820  		if v_0.Op != OpLOONG64MOVBUreg || !(0xff < c) {
  6821  			break
  6822  		}
  6823  		v.reset(OpLOONG64MOVVconst)
  6824  		v.AuxInt = int64ToAuxInt(1)
  6825  		return true
  6826  	}
  6827  	// match: (SGTconst [c] (MOVBUreg _))
  6828  	// cond: c < 0
  6829  	// result: (MOVVconst [0])
  6830  	for {
  6831  		c := auxIntToInt64(v.AuxInt)
  6832  		if v_0.Op != OpLOONG64MOVBUreg || !(c < 0) {
  6833  			break
  6834  		}
  6835  		v.reset(OpLOONG64MOVVconst)
  6836  		v.AuxInt = int64ToAuxInt(0)
  6837  		return true
  6838  	}
  6839  	// match: (SGTconst [c] (MOVHreg _))
  6840  	// cond: 0x7fff < c
  6841  	// result: (MOVVconst [1])
  6842  	for {
  6843  		c := auxIntToInt64(v.AuxInt)
  6844  		if v_0.Op != OpLOONG64MOVHreg || !(0x7fff < c) {
  6845  			break
  6846  		}
  6847  		v.reset(OpLOONG64MOVVconst)
  6848  		v.AuxInt = int64ToAuxInt(1)
  6849  		return true
  6850  	}
  6851  	// match: (SGTconst [c] (MOVHreg _))
  6852  	// cond: c <= -0x8000
  6853  	// result: (MOVVconst [0])
  6854  	for {
  6855  		c := auxIntToInt64(v.AuxInt)
  6856  		if v_0.Op != OpLOONG64MOVHreg || !(c <= -0x8000) {
  6857  			break
  6858  		}
  6859  		v.reset(OpLOONG64MOVVconst)
  6860  		v.AuxInt = int64ToAuxInt(0)
  6861  		return true
  6862  	}
  6863  	// match: (SGTconst [c] (MOVHUreg _))
  6864  	// cond: 0xffff < c
  6865  	// result: (MOVVconst [1])
  6866  	for {
  6867  		c := auxIntToInt64(v.AuxInt)
  6868  		if v_0.Op != OpLOONG64MOVHUreg || !(0xffff < c) {
  6869  			break
  6870  		}
  6871  		v.reset(OpLOONG64MOVVconst)
  6872  		v.AuxInt = int64ToAuxInt(1)
  6873  		return true
  6874  	}
  6875  	// match: (SGTconst [c] (MOVHUreg _))
  6876  	// cond: c < 0
  6877  	// result: (MOVVconst [0])
  6878  	for {
  6879  		c := auxIntToInt64(v.AuxInt)
  6880  		if v_0.Op != OpLOONG64MOVHUreg || !(c < 0) {
  6881  			break
  6882  		}
  6883  		v.reset(OpLOONG64MOVVconst)
  6884  		v.AuxInt = int64ToAuxInt(0)
  6885  		return true
  6886  	}
  6887  	// match: (SGTconst [c] (MOVWUreg _))
  6888  	// cond: c < 0
  6889  	// result: (MOVVconst [0])
  6890  	for {
  6891  		c := auxIntToInt64(v.AuxInt)
  6892  		if v_0.Op != OpLOONG64MOVWUreg || !(c < 0) {
  6893  			break
  6894  		}
  6895  		v.reset(OpLOONG64MOVVconst)
  6896  		v.AuxInt = int64ToAuxInt(0)
  6897  		return true
  6898  	}
  6899  	// match: (SGTconst [c] (ANDconst [m] _))
  6900  	// cond: 0 <= m && m < c
  6901  	// result: (MOVVconst [1])
  6902  	for {
  6903  		c := auxIntToInt64(v.AuxInt)
  6904  		if v_0.Op != OpLOONG64ANDconst {
  6905  			break
  6906  		}
  6907  		m := auxIntToInt64(v_0.AuxInt)
  6908  		if !(0 <= m && m < c) {
  6909  			break
  6910  		}
  6911  		v.reset(OpLOONG64MOVVconst)
  6912  		v.AuxInt = int64ToAuxInt(1)
  6913  		return true
  6914  	}
  6915  	// match: (SGTconst [c] (SRLVconst _ [d]))
  6916  	// cond: 0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  6917  	// result: (MOVVconst [1])
  6918  	for {
  6919  		c := auxIntToInt64(v.AuxInt)
  6920  		if v_0.Op != OpLOONG64SRLVconst {
  6921  			break
  6922  		}
  6923  		d := auxIntToInt64(v_0.AuxInt)
  6924  		if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  6925  			break
  6926  		}
  6927  		v.reset(OpLOONG64MOVVconst)
  6928  		v.AuxInt = int64ToAuxInt(1)
  6929  		return true
  6930  	}
  6931  	return false
  6932  }
  6933  func rewriteValueLOONG64_OpLOONG64SLL(v *Value) bool {
  6934  	v_1 := v.Args[1]
  6935  	v_0 := v.Args[0]
  6936  	// match: (SLL _ (MOVVconst [c]))
  6937  	// cond: uint64(c)>=32
  6938  	// result: (MOVVconst [0])
  6939  	for {
  6940  		if v_1.Op != OpLOONG64MOVVconst {
  6941  			break
  6942  		}
  6943  		c := auxIntToInt64(v_1.AuxInt)
  6944  		if !(uint64(c) >= 32) {
  6945  			break
  6946  		}
  6947  		v.reset(OpLOONG64MOVVconst)
  6948  		v.AuxInt = int64ToAuxInt(0)
  6949  		return true
  6950  	}
  6951  	// match: (SLL x (MOVVconst [c]))
  6952  	// cond: uint64(c) >=0 && uint64(c) <=31
  6953  	// result: (SLLconst x [c])
  6954  	for {
  6955  		x := v_0
  6956  		if v_1.Op != OpLOONG64MOVVconst {
  6957  			break
  6958  		}
  6959  		c := auxIntToInt64(v_1.AuxInt)
  6960  		if !(uint64(c) >= 0 && uint64(c) <= 31) {
  6961  			break
  6962  		}
  6963  		v.reset(OpLOONG64SLLconst)
  6964  		v.AuxInt = int64ToAuxInt(c)
  6965  		v.AddArg(x)
  6966  		return true
  6967  	}
  6968  	// match: (SLL x (ANDconst [31] y))
  6969  	// result: (SLL x y)
  6970  	for {
  6971  		x := v_0
  6972  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 31 {
  6973  			break
  6974  		}
  6975  		y := v_1.Args[0]
  6976  		v.reset(OpLOONG64SLL)
  6977  		v.AddArg2(x, y)
  6978  		return true
  6979  	}
  6980  	return false
  6981  }
  6982  func rewriteValueLOONG64_OpLOONG64SLLV(v *Value) bool {
  6983  	v_1 := v.Args[1]
  6984  	v_0 := v.Args[0]
  6985  	// match: (SLLV _ (MOVVconst [c]))
  6986  	// cond: uint64(c)>=64
  6987  	// result: (MOVVconst [0])
  6988  	for {
  6989  		if v_1.Op != OpLOONG64MOVVconst {
  6990  			break
  6991  		}
  6992  		c := auxIntToInt64(v_1.AuxInt)
  6993  		if !(uint64(c) >= 64) {
  6994  			break
  6995  		}
  6996  		v.reset(OpLOONG64MOVVconst)
  6997  		v.AuxInt = int64ToAuxInt(0)
  6998  		return true
  6999  	}
  7000  	// match: (SLLV x (MOVVconst [c]))
  7001  	// result: (SLLVconst x [c])
  7002  	for {
  7003  		x := v_0
  7004  		if v_1.Op != OpLOONG64MOVVconst {
  7005  			break
  7006  		}
  7007  		c := auxIntToInt64(v_1.AuxInt)
  7008  		v.reset(OpLOONG64SLLVconst)
  7009  		v.AuxInt = int64ToAuxInt(c)
  7010  		v.AddArg(x)
  7011  		return true
  7012  	}
  7013  	// match: (SLLV x (ANDconst [63] y))
  7014  	// result: (SLLV x y)
  7015  	for {
  7016  		x := v_0
  7017  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
  7018  			break
  7019  		}
  7020  		y := v_1.Args[0]
  7021  		v.reset(OpLOONG64SLLV)
  7022  		v.AddArg2(x, y)
  7023  		return true
  7024  	}
  7025  	return false
  7026  }
  7027  func rewriteValueLOONG64_OpLOONG64SLLVconst(v *Value) bool {
  7028  	v_0 := v.Args[0]
  7029  	// match: (SLLVconst <t> [c] (ADDV x x))
  7030  	// cond: c < t.Size() * 8 - 1
  7031  	// result: (SLLVconst [c+1] x)
  7032  	for {
  7033  		t := v.Type
  7034  		c := auxIntToInt64(v.AuxInt)
  7035  		if v_0.Op != OpLOONG64ADDV {
  7036  			break
  7037  		}
  7038  		x := v_0.Args[1]
  7039  		if x != v_0.Args[0] || !(c < t.Size()*8-1) {
  7040  			break
  7041  		}
  7042  		v.reset(OpLOONG64SLLVconst)
  7043  		v.AuxInt = int64ToAuxInt(c + 1)
  7044  		v.AddArg(x)
  7045  		return true
  7046  	}
  7047  	// match: (SLLVconst <t> [c] (ADDV x x))
  7048  	// cond: c >= t.Size() * 8 - 1
  7049  	// result: (MOVVconst [0])
  7050  	for {
  7051  		t := v.Type
  7052  		c := auxIntToInt64(v.AuxInt)
  7053  		if v_0.Op != OpLOONG64ADDV {
  7054  			break
  7055  		}
  7056  		x := v_0.Args[1]
  7057  		if x != v_0.Args[0] || !(c >= t.Size()*8-1) {
  7058  			break
  7059  		}
  7060  		v.reset(OpLOONG64MOVVconst)
  7061  		v.AuxInt = int64ToAuxInt(0)
  7062  		return true
  7063  	}
  7064  	// match: (SLLVconst [c] (MOVVconst [d]))
  7065  	// result: (MOVVconst [d<<uint64(c)])
  7066  	for {
  7067  		c := auxIntToInt64(v.AuxInt)
  7068  		if v_0.Op != OpLOONG64MOVVconst {
  7069  			break
  7070  		}
  7071  		d := auxIntToInt64(v_0.AuxInt)
  7072  		v.reset(OpLOONG64MOVVconst)
  7073  		v.AuxInt = int64ToAuxInt(d << uint64(c))
  7074  		return true
  7075  	}
  7076  	return false
  7077  }
  7078  func rewriteValueLOONG64_OpLOONG64SLLconst(v *Value) bool {
  7079  	v_0 := v.Args[0]
  7080  	// match: (SLLconst <t> [c] (ADDV x x))
  7081  	// cond: c < t.Size() * 8 - 1
  7082  	// result: (SLLconst [c+1] x)
  7083  	for {
  7084  		t := v.Type
  7085  		c := auxIntToInt64(v.AuxInt)
  7086  		if v_0.Op != OpLOONG64ADDV {
  7087  			break
  7088  		}
  7089  		x := v_0.Args[1]
  7090  		if x != v_0.Args[0] || !(c < t.Size()*8-1) {
  7091  			break
  7092  		}
  7093  		v.reset(OpLOONG64SLLconst)
  7094  		v.AuxInt = int64ToAuxInt(c + 1)
  7095  		v.AddArg(x)
  7096  		return true
  7097  	}
  7098  	// match: (SLLconst <t> [c] (ADDV x x))
  7099  	// cond: c >= t.Size() * 8 - 1
  7100  	// result: (MOVVconst [0])
  7101  	for {
  7102  		t := v.Type
  7103  		c := auxIntToInt64(v.AuxInt)
  7104  		if v_0.Op != OpLOONG64ADDV {
  7105  			break
  7106  		}
  7107  		x := v_0.Args[1]
  7108  		if x != v_0.Args[0] || !(c >= t.Size()*8-1) {
  7109  			break
  7110  		}
  7111  		v.reset(OpLOONG64MOVVconst)
  7112  		v.AuxInt = int64ToAuxInt(0)
  7113  		return true
  7114  	}
  7115  	return false
  7116  }
  7117  func rewriteValueLOONG64_OpLOONG64SRA(v *Value) bool {
  7118  	v_1 := v.Args[1]
  7119  	v_0 := v.Args[0]
  7120  	// match: (SRA x (MOVVconst [c]))
  7121  	// cond: uint64(c)>=32
  7122  	// result: (SRAconst x [31])
  7123  	for {
  7124  		x := v_0
  7125  		if v_1.Op != OpLOONG64MOVVconst {
  7126  			break
  7127  		}
  7128  		c := auxIntToInt64(v_1.AuxInt)
  7129  		if !(uint64(c) >= 32) {
  7130  			break
  7131  		}
  7132  		v.reset(OpLOONG64SRAconst)
  7133  		v.AuxInt = int64ToAuxInt(31)
  7134  		v.AddArg(x)
  7135  		return true
  7136  	}
  7137  	// match: (SRA x (MOVVconst [c]))
  7138  	// cond: uint64(c) >=0 && uint64(c) <=31
  7139  	// result: (SRAconst x [c])
  7140  	for {
  7141  		x := v_0
  7142  		if v_1.Op != OpLOONG64MOVVconst {
  7143  			break
  7144  		}
  7145  		c := auxIntToInt64(v_1.AuxInt)
  7146  		if !(uint64(c) >= 0 && uint64(c) <= 31) {
  7147  			break
  7148  		}
  7149  		v.reset(OpLOONG64SRAconst)
  7150  		v.AuxInt = int64ToAuxInt(c)
  7151  		v.AddArg(x)
  7152  		return true
  7153  	}
  7154  	// match: (SRA x (ANDconst [31] y))
  7155  	// result: (SRA x y)
  7156  	for {
  7157  		x := v_0
  7158  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 31 {
  7159  			break
  7160  		}
  7161  		y := v_1.Args[0]
  7162  		v.reset(OpLOONG64SRA)
  7163  		v.AddArg2(x, y)
  7164  		return true
  7165  	}
  7166  	return false
  7167  }
  7168  func rewriteValueLOONG64_OpLOONG64SRAV(v *Value) bool {
  7169  	v_1 := v.Args[1]
  7170  	v_0 := v.Args[0]
  7171  	// match: (SRAV x (MOVVconst [c]))
  7172  	// cond: uint64(c)>=64
  7173  	// result: (SRAVconst x [63])
  7174  	for {
  7175  		x := v_0
  7176  		if v_1.Op != OpLOONG64MOVVconst {
  7177  			break
  7178  		}
  7179  		c := auxIntToInt64(v_1.AuxInt)
  7180  		if !(uint64(c) >= 64) {
  7181  			break
  7182  		}
  7183  		v.reset(OpLOONG64SRAVconst)
  7184  		v.AuxInt = int64ToAuxInt(63)
  7185  		v.AddArg(x)
  7186  		return true
  7187  	}
  7188  	// match: (SRAV x (MOVVconst [c]))
  7189  	// result: (SRAVconst x [c])
  7190  	for {
  7191  		x := v_0
  7192  		if v_1.Op != OpLOONG64MOVVconst {
  7193  			break
  7194  		}
  7195  		c := auxIntToInt64(v_1.AuxInt)
  7196  		v.reset(OpLOONG64SRAVconst)
  7197  		v.AuxInt = int64ToAuxInt(c)
  7198  		v.AddArg(x)
  7199  		return true
  7200  	}
  7201  	// match: (SRAV x (ANDconst [63] y))
  7202  	// result: (SRAV x y)
  7203  	for {
  7204  		x := v_0
  7205  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
  7206  			break
  7207  		}
  7208  		y := v_1.Args[0]
  7209  		v.reset(OpLOONG64SRAV)
  7210  		v.AddArg2(x, y)
  7211  		return true
  7212  	}
  7213  	return false
  7214  }
  7215  func rewriteValueLOONG64_OpLOONG64SRAVconst(v *Value) bool {
  7216  	v_0 := v.Args[0]
  7217  	b := v.Block
  7218  	// match: (SRAVconst [rc] (MOVWreg y))
  7219  	// cond: rc >= 0 && rc <= 31
  7220  	// result: (SRAconst [int64(rc)] y)
  7221  	for {
  7222  		rc := auxIntToInt64(v.AuxInt)
  7223  		if v_0.Op != OpLOONG64MOVWreg {
  7224  			break
  7225  		}
  7226  		y := v_0.Args[0]
  7227  		if !(rc >= 0 && rc <= 31) {
  7228  			break
  7229  		}
  7230  		v.reset(OpLOONG64SRAconst)
  7231  		v.AuxInt = int64ToAuxInt(int64(rc))
  7232  		v.AddArg(y)
  7233  		return true
  7234  	}
  7235  	// match: (SRAVconst <t> [rc] (MOVBreg y))
  7236  	// cond: rc >= 8
  7237  	// result: (SRAVconst [63] (SLLVconst <t> [56] y))
  7238  	for {
  7239  		t := v.Type
  7240  		rc := auxIntToInt64(v.AuxInt)
  7241  		if v_0.Op != OpLOONG64MOVBreg {
  7242  			break
  7243  		}
  7244  		y := v_0.Args[0]
  7245  		if !(rc >= 8) {
  7246  			break
  7247  		}
  7248  		v.reset(OpLOONG64SRAVconst)
  7249  		v.AuxInt = int64ToAuxInt(63)
  7250  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
  7251  		v0.AuxInt = int64ToAuxInt(56)
  7252  		v0.AddArg(y)
  7253  		v.AddArg(v0)
  7254  		return true
  7255  	}
  7256  	// match: (SRAVconst <t> [rc] (MOVHreg y))
  7257  	// cond: rc >= 16
  7258  	// result: (SRAVconst [63] (SLLVconst <t> [48] y))
  7259  	for {
  7260  		t := v.Type
  7261  		rc := auxIntToInt64(v.AuxInt)
  7262  		if v_0.Op != OpLOONG64MOVHreg {
  7263  			break
  7264  		}
  7265  		y := v_0.Args[0]
  7266  		if !(rc >= 16) {
  7267  			break
  7268  		}
  7269  		v.reset(OpLOONG64SRAVconst)
  7270  		v.AuxInt = int64ToAuxInt(63)
  7271  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
  7272  		v0.AuxInt = int64ToAuxInt(48)
  7273  		v0.AddArg(y)
  7274  		v.AddArg(v0)
  7275  		return true
  7276  	}
  7277  	// match: (SRAVconst <t> [rc] (MOVWreg y))
  7278  	// cond: rc >= 32
  7279  	// result: (SRAconst [31] y)
  7280  	for {
  7281  		rc := auxIntToInt64(v.AuxInt)
  7282  		if v_0.Op != OpLOONG64MOVWreg {
  7283  			break
  7284  		}
  7285  		y := v_0.Args[0]
  7286  		if !(rc >= 32) {
  7287  			break
  7288  		}
  7289  		v.reset(OpLOONG64SRAconst)
  7290  		v.AuxInt = int64ToAuxInt(31)
  7291  		v.AddArg(y)
  7292  		return true
  7293  	}
  7294  	// match: (SRAVconst [c] (MOVVconst [d]))
  7295  	// result: (MOVVconst [d>>uint64(c)])
  7296  	for {
  7297  		c := auxIntToInt64(v.AuxInt)
  7298  		if v_0.Op != OpLOONG64MOVVconst {
  7299  			break
  7300  		}
  7301  		d := auxIntToInt64(v_0.AuxInt)
  7302  		v.reset(OpLOONG64MOVVconst)
  7303  		v.AuxInt = int64ToAuxInt(d >> uint64(c))
  7304  		return true
  7305  	}
  7306  	return false
  7307  }
  7308  func rewriteValueLOONG64_OpLOONG64SRL(v *Value) bool {
  7309  	v_1 := v.Args[1]
  7310  	v_0 := v.Args[0]
  7311  	// match: (SRL _ (MOVVconst [c]))
  7312  	// cond: uint64(c)>=32
  7313  	// result: (MOVVconst [0])
  7314  	for {
  7315  		if v_1.Op != OpLOONG64MOVVconst {
  7316  			break
  7317  		}
  7318  		c := auxIntToInt64(v_1.AuxInt)
  7319  		if !(uint64(c) >= 32) {
  7320  			break
  7321  		}
  7322  		v.reset(OpLOONG64MOVVconst)
  7323  		v.AuxInt = int64ToAuxInt(0)
  7324  		return true
  7325  	}
  7326  	// match: (SRL x (MOVVconst [c]))
  7327  	// cond: uint64(c) >=0 && uint64(c) <=31
  7328  	// result: (SRLconst x [c])
  7329  	for {
  7330  		x := v_0
  7331  		if v_1.Op != OpLOONG64MOVVconst {
  7332  			break
  7333  		}
  7334  		c := auxIntToInt64(v_1.AuxInt)
  7335  		if !(uint64(c) >= 0 && uint64(c) <= 31) {
  7336  			break
  7337  		}
  7338  		v.reset(OpLOONG64SRLconst)
  7339  		v.AuxInt = int64ToAuxInt(c)
  7340  		v.AddArg(x)
  7341  		return true
  7342  	}
  7343  	// match: (SRL x (ANDconst [31] y))
  7344  	// result: (SRL x y)
  7345  	for {
  7346  		x := v_0
  7347  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 31 {
  7348  			break
  7349  		}
  7350  		y := v_1.Args[0]
  7351  		v.reset(OpLOONG64SRL)
  7352  		v.AddArg2(x, y)
  7353  		return true
  7354  	}
  7355  	return false
  7356  }
  7357  func rewriteValueLOONG64_OpLOONG64SRLV(v *Value) bool {
  7358  	v_1 := v.Args[1]
  7359  	v_0 := v.Args[0]
  7360  	// match: (SRLV _ (MOVVconst [c]))
  7361  	// cond: uint64(c)>=64
  7362  	// result: (MOVVconst [0])
  7363  	for {
  7364  		if v_1.Op != OpLOONG64MOVVconst {
  7365  			break
  7366  		}
  7367  		c := auxIntToInt64(v_1.AuxInt)
  7368  		if !(uint64(c) >= 64) {
  7369  			break
  7370  		}
  7371  		v.reset(OpLOONG64MOVVconst)
  7372  		v.AuxInt = int64ToAuxInt(0)
  7373  		return true
  7374  	}
  7375  	// match: (SRLV x (MOVVconst [c]))
  7376  	// result: (SRLVconst x [c])
  7377  	for {
  7378  		x := v_0
  7379  		if v_1.Op != OpLOONG64MOVVconst {
  7380  			break
  7381  		}
  7382  		c := auxIntToInt64(v_1.AuxInt)
  7383  		v.reset(OpLOONG64SRLVconst)
  7384  		v.AuxInt = int64ToAuxInt(c)
  7385  		v.AddArg(x)
  7386  		return true
  7387  	}
  7388  	// match: (SRLV x (ANDconst [63] y))
  7389  	// result: (SRLV x y)
  7390  	for {
  7391  		x := v_0
  7392  		if v_1.Op != OpLOONG64ANDconst || auxIntToInt64(v_1.AuxInt) != 63 {
  7393  			break
  7394  		}
  7395  		y := v_1.Args[0]
  7396  		v.reset(OpLOONG64SRLV)
  7397  		v.AddArg2(x, y)
  7398  		return true
  7399  	}
  7400  	return false
  7401  }
  7402  func rewriteValueLOONG64_OpLOONG64SRLVconst(v *Value) bool {
  7403  	v_0 := v.Args[0]
  7404  	// match: (SRLVconst [rc] (SLLVconst [lc] x))
  7405  	// cond: lc <= rc
  7406  	// result: (BSTRPICKV [rc-lc + ((64-lc)-1)<<6] x)
  7407  	for {
  7408  		rc := auxIntToInt64(v.AuxInt)
  7409  		if v_0.Op != OpLOONG64SLLVconst {
  7410  			break
  7411  		}
  7412  		lc := auxIntToInt64(v_0.AuxInt)
  7413  		x := v_0.Args[0]
  7414  		if !(lc <= rc) {
  7415  			break
  7416  		}
  7417  		v.reset(OpLOONG64BSTRPICKV)
  7418  		v.AuxInt = int64ToAuxInt(rc - lc + ((64-lc)-1)<<6)
  7419  		v.AddArg(x)
  7420  		return true
  7421  	}
  7422  	// match: (SRLVconst [rc] (MOVWUreg x))
  7423  	// cond: rc < 32
  7424  	// result: (BSTRPICKV [rc + 31<<6] x)
  7425  	for {
  7426  		rc := auxIntToInt64(v.AuxInt)
  7427  		if v_0.Op != OpLOONG64MOVWUreg {
  7428  			break
  7429  		}
  7430  		x := v_0.Args[0]
  7431  		if !(rc < 32) {
  7432  			break
  7433  		}
  7434  		v.reset(OpLOONG64BSTRPICKV)
  7435  		v.AuxInt = int64ToAuxInt(rc + 31<<6)
  7436  		v.AddArg(x)
  7437  		return true
  7438  	}
  7439  	// match: (SRLVconst [rc] (MOVHUreg x))
  7440  	// cond: rc < 16
  7441  	// result: (BSTRPICKV [rc + 15<<6] x)
  7442  	for {
  7443  		rc := auxIntToInt64(v.AuxInt)
  7444  		if v_0.Op != OpLOONG64MOVHUreg {
  7445  			break
  7446  		}
  7447  		x := v_0.Args[0]
  7448  		if !(rc < 16) {
  7449  			break
  7450  		}
  7451  		v.reset(OpLOONG64BSTRPICKV)
  7452  		v.AuxInt = int64ToAuxInt(rc + 15<<6)
  7453  		v.AddArg(x)
  7454  		return true
  7455  	}
  7456  	// match: (SRLVconst [rc] (MOVBUreg x))
  7457  	// cond: rc < 8
  7458  	// result: (BSTRPICKV [rc + 7<<6] x)
  7459  	for {
  7460  		rc := auxIntToInt64(v.AuxInt)
  7461  		if v_0.Op != OpLOONG64MOVBUreg {
  7462  			break
  7463  		}
  7464  		x := v_0.Args[0]
  7465  		if !(rc < 8) {
  7466  			break
  7467  		}
  7468  		v.reset(OpLOONG64BSTRPICKV)
  7469  		v.AuxInt = int64ToAuxInt(rc + 7<<6)
  7470  		v.AddArg(x)
  7471  		return true
  7472  	}
  7473  	// match: (SRLVconst [rc] (MOVWUreg y))
  7474  	// cond: rc >= 0 && rc <= 31
  7475  	// result: (SRLconst [int64(rc)] y)
  7476  	for {
  7477  		rc := auxIntToInt64(v.AuxInt)
  7478  		if v_0.Op != OpLOONG64MOVWUreg {
  7479  			break
  7480  		}
  7481  		y := v_0.Args[0]
  7482  		if !(rc >= 0 && rc <= 31) {
  7483  			break
  7484  		}
  7485  		v.reset(OpLOONG64SRLconst)
  7486  		v.AuxInt = int64ToAuxInt(int64(rc))
  7487  		v.AddArg(y)
  7488  		return true
  7489  	}
  7490  	// match: (SRLVconst [rc] (MOVWUreg x))
  7491  	// cond: rc >= 32
  7492  	// result: (MOVVconst [0])
  7493  	for {
  7494  		rc := auxIntToInt64(v.AuxInt)
  7495  		if v_0.Op != OpLOONG64MOVWUreg {
  7496  			break
  7497  		}
  7498  		if !(rc >= 32) {
  7499  			break
  7500  		}
  7501  		v.reset(OpLOONG64MOVVconst)
  7502  		v.AuxInt = int64ToAuxInt(0)
  7503  		return true
  7504  	}
  7505  	// match: (SRLVconst [rc] (MOVHUreg x))
  7506  	// cond: rc >= 16
  7507  	// result: (MOVVconst [0])
  7508  	for {
  7509  		rc := auxIntToInt64(v.AuxInt)
  7510  		if v_0.Op != OpLOONG64MOVHUreg {
  7511  			break
  7512  		}
  7513  		if !(rc >= 16) {
  7514  			break
  7515  		}
  7516  		v.reset(OpLOONG64MOVVconst)
  7517  		v.AuxInt = int64ToAuxInt(0)
  7518  		return true
  7519  	}
  7520  	// match: (SRLVconst [rc] (MOVBUreg x))
  7521  	// cond: rc >= 8
  7522  	// result: (MOVVconst [0])
  7523  	for {
  7524  		rc := auxIntToInt64(v.AuxInt)
  7525  		if v_0.Op != OpLOONG64MOVBUreg {
  7526  			break
  7527  		}
  7528  		if !(rc >= 8) {
  7529  			break
  7530  		}
  7531  		v.reset(OpLOONG64MOVVconst)
  7532  		v.AuxInt = int64ToAuxInt(0)
  7533  		return true
  7534  	}
  7535  	// match: (SRLVconst [c] (MOVVconst [d]))
  7536  	// result: (MOVVconst [int64(uint64(d)>>uint64(c))])
  7537  	for {
  7538  		c := auxIntToInt64(v.AuxInt)
  7539  		if v_0.Op != OpLOONG64MOVVconst {
  7540  			break
  7541  		}
  7542  		d := auxIntToInt64(v_0.AuxInt)
  7543  		v.reset(OpLOONG64MOVVconst)
  7544  		v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
  7545  		return true
  7546  	}
  7547  	return false
  7548  }
  7549  func rewriteValueLOONG64_OpLOONG64SUBD(v *Value) bool {
  7550  	v_1 := v.Args[1]
  7551  	v_0 := v.Args[0]
  7552  	// match: (SUBD (MULD x y) z)
  7553  	// cond: z.Block.Func.useFMA(v)
  7554  	// result: (FMSUBD x y z)
  7555  	for {
  7556  		if v_0.Op != OpLOONG64MULD {
  7557  			break
  7558  		}
  7559  		y := v_0.Args[1]
  7560  		x := v_0.Args[0]
  7561  		z := v_1
  7562  		if !(z.Block.Func.useFMA(v)) {
  7563  			break
  7564  		}
  7565  		v.reset(OpLOONG64FMSUBD)
  7566  		v.AddArg3(x, y, z)
  7567  		return true
  7568  	}
  7569  	// match: (SUBD z (MULD x y))
  7570  	// cond: z.Block.Func.useFMA(v)
  7571  	// result: (FNMSUBD x y z)
  7572  	for {
  7573  		z := v_0
  7574  		if v_1.Op != OpLOONG64MULD {
  7575  			break
  7576  		}
  7577  		y := v_1.Args[1]
  7578  		x := v_1.Args[0]
  7579  		if !(z.Block.Func.useFMA(v)) {
  7580  			break
  7581  		}
  7582  		v.reset(OpLOONG64FNMSUBD)
  7583  		v.AddArg3(x, y, z)
  7584  		return true
  7585  	}
  7586  	// match: (SUBD z (NEGD (MULD x y)))
  7587  	// cond: z.Block.Func.useFMA(v)
  7588  	// result: (FMADDD x y z)
  7589  	for {
  7590  		z := v_0
  7591  		if v_1.Op != OpLOONG64NEGD {
  7592  			break
  7593  		}
  7594  		v_1_0 := v_1.Args[0]
  7595  		if v_1_0.Op != OpLOONG64MULD {
  7596  			break
  7597  		}
  7598  		y := v_1_0.Args[1]
  7599  		x := v_1_0.Args[0]
  7600  		if !(z.Block.Func.useFMA(v)) {
  7601  			break
  7602  		}
  7603  		v.reset(OpLOONG64FMADDD)
  7604  		v.AddArg3(x, y, z)
  7605  		return true
  7606  	}
  7607  	// match: (SUBD (NEGD (MULD x y)) z)
  7608  	// cond: z.Block.Func.useFMA(v)
  7609  	// result: (FNMADDD x y z)
  7610  	for {
  7611  		if v_0.Op != OpLOONG64NEGD {
  7612  			break
  7613  		}
  7614  		v_0_0 := v_0.Args[0]
  7615  		if v_0_0.Op != OpLOONG64MULD {
  7616  			break
  7617  		}
  7618  		y := v_0_0.Args[1]
  7619  		x := v_0_0.Args[0]
  7620  		z := v_1
  7621  		if !(z.Block.Func.useFMA(v)) {
  7622  			break
  7623  		}
  7624  		v.reset(OpLOONG64FNMADDD)
  7625  		v.AddArg3(x, y, z)
  7626  		return true
  7627  	}
  7628  	return false
  7629  }
  7630  func rewriteValueLOONG64_OpLOONG64SUBF(v *Value) bool {
  7631  	v_1 := v.Args[1]
  7632  	v_0 := v.Args[0]
  7633  	// match: (SUBF (MULF x y) z)
  7634  	// cond: z.Block.Func.useFMA(v)
  7635  	// result: (FMSUBF x y z)
  7636  	for {
  7637  		if v_0.Op != OpLOONG64MULF {
  7638  			break
  7639  		}
  7640  		y := v_0.Args[1]
  7641  		x := v_0.Args[0]
  7642  		z := v_1
  7643  		if !(z.Block.Func.useFMA(v)) {
  7644  			break
  7645  		}
  7646  		v.reset(OpLOONG64FMSUBF)
  7647  		v.AddArg3(x, y, z)
  7648  		return true
  7649  	}
  7650  	// match: (SUBF z (MULF x y))
  7651  	// cond: z.Block.Func.useFMA(v)
  7652  	// result: (FNMSUBF x y z)
  7653  	for {
  7654  		z := v_0
  7655  		if v_1.Op != OpLOONG64MULF {
  7656  			break
  7657  		}
  7658  		y := v_1.Args[1]
  7659  		x := v_1.Args[0]
  7660  		if !(z.Block.Func.useFMA(v)) {
  7661  			break
  7662  		}
  7663  		v.reset(OpLOONG64FNMSUBF)
  7664  		v.AddArg3(x, y, z)
  7665  		return true
  7666  	}
  7667  	// match: (SUBF z (NEGF (MULF x y)))
  7668  	// cond: z.Block.Func.useFMA(v)
  7669  	// result: (FMADDF x y z)
  7670  	for {
  7671  		z := v_0
  7672  		if v_1.Op != OpLOONG64NEGF {
  7673  			break
  7674  		}
  7675  		v_1_0 := v_1.Args[0]
  7676  		if v_1_0.Op != OpLOONG64MULF {
  7677  			break
  7678  		}
  7679  		y := v_1_0.Args[1]
  7680  		x := v_1_0.Args[0]
  7681  		if !(z.Block.Func.useFMA(v)) {
  7682  			break
  7683  		}
  7684  		v.reset(OpLOONG64FMADDF)
  7685  		v.AddArg3(x, y, z)
  7686  		return true
  7687  	}
  7688  	// match: (SUBF (NEGF (MULF x y)) z)
  7689  	// cond: z.Block.Func.useFMA(v)
  7690  	// result: (FNMADDF x y z)
  7691  	for {
  7692  		if v_0.Op != OpLOONG64NEGF {
  7693  			break
  7694  		}
  7695  		v_0_0 := v_0.Args[0]
  7696  		if v_0_0.Op != OpLOONG64MULF {
  7697  			break
  7698  		}
  7699  		y := v_0_0.Args[1]
  7700  		x := v_0_0.Args[0]
  7701  		z := v_1
  7702  		if !(z.Block.Func.useFMA(v)) {
  7703  			break
  7704  		}
  7705  		v.reset(OpLOONG64FNMADDF)
  7706  		v.AddArg3(x, y, z)
  7707  		return true
  7708  	}
  7709  	return false
  7710  }
  7711  func rewriteValueLOONG64_OpLOONG64SUBV(v *Value) bool {
  7712  	v_1 := v.Args[1]
  7713  	v_0 := v.Args[0]
  7714  	// match: (SUBV x (MOVVconst [c]))
  7715  	// cond: is32Bit(c)
  7716  	// result: (SUBVconst [c] x)
  7717  	for {
  7718  		x := v_0
  7719  		if v_1.Op != OpLOONG64MOVVconst {
  7720  			break
  7721  		}
  7722  		c := auxIntToInt64(v_1.AuxInt)
  7723  		if !(is32Bit(c)) {
  7724  			break
  7725  		}
  7726  		v.reset(OpLOONG64SUBVconst)
  7727  		v.AuxInt = int64ToAuxInt(c)
  7728  		v.AddArg(x)
  7729  		return true
  7730  	}
  7731  	// match: (SUBV x (NEGV y))
  7732  	// result: (ADDV x y)
  7733  	for {
  7734  		x := v_0
  7735  		if v_1.Op != OpLOONG64NEGV {
  7736  			break
  7737  		}
  7738  		y := v_1.Args[0]
  7739  		v.reset(OpLOONG64ADDV)
  7740  		v.AddArg2(x, y)
  7741  		return true
  7742  	}
  7743  	// match: (SUBV x x)
  7744  	// result: (MOVVconst [0])
  7745  	for {
  7746  		x := v_0
  7747  		if x != v_1 {
  7748  			break
  7749  		}
  7750  		v.reset(OpLOONG64MOVVconst)
  7751  		v.AuxInt = int64ToAuxInt(0)
  7752  		return true
  7753  	}
  7754  	// match: (SUBV (MOVVconst [0]) x)
  7755  	// result: (NEGV x)
  7756  	for {
  7757  		if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
  7758  			break
  7759  		}
  7760  		x := v_1
  7761  		v.reset(OpLOONG64NEGV)
  7762  		v.AddArg(x)
  7763  		return true
  7764  	}
  7765  	// match: (SUBV (MOVVconst [c]) (NEGV (SUBVconst [d] x)))
  7766  	// result: (ADDVconst [c-d] x)
  7767  	for {
  7768  		if v_0.Op != OpLOONG64MOVVconst {
  7769  			break
  7770  		}
  7771  		c := auxIntToInt64(v_0.AuxInt)
  7772  		if v_1.Op != OpLOONG64NEGV {
  7773  			break
  7774  		}
  7775  		v_1_0 := v_1.Args[0]
  7776  		if v_1_0.Op != OpLOONG64SUBVconst {
  7777  			break
  7778  		}
  7779  		d := auxIntToInt64(v_1_0.AuxInt)
  7780  		x := v_1_0.Args[0]
  7781  		v.reset(OpLOONG64ADDVconst)
  7782  		v.AuxInt = int64ToAuxInt(c - d)
  7783  		v.AddArg(x)
  7784  		return true
  7785  	}
  7786  	return false
  7787  }
  7788  func rewriteValueLOONG64_OpLOONG64SUBVconst(v *Value) bool {
  7789  	v_0 := v.Args[0]
  7790  	// match: (SUBVconst [0] x)
  7791  	// result: x
  7792  	for {
  7793  		if auxIntToInt64(v.AuxInt) != 0 {
  7794  			break
  7795  		}
  7796  		x := v_0
  7797  		v.copyOf(x)
  7798  		return true
  7799  	}
  7800  	// match: (SUBVconst [c] (MOVVconst [d]))
  7801  	// result: (MOVVconst [d-c])
  7802  	for {
  7803  		c := auxIntToInt64(v.AuxInt)
  7804  		if v_0.Op != OpLOONG64MOVVconst {
  7805  			break
  7806  		}
  7807  		d := auxIntToInt64(v_0.AuxInt)
  7808  		v.reset(OpLOONG64MOVVconst)
  7809  		v.AuxInt = int64ToAuxInt(d - c)
  7810  		return true
  7811  	}
  7812  	// match: (SUBVconst [c] (SUBVconst [d] x))
  7813  	// cond: is32Bit(-c-d)
  7814  	// result: (ADDVconst [-c-d] x)
  7815  	for {
  7816  		c := auxIntToInt64(v.AuxInt)
  7817  		if v_0.Op != OpLOONG64SUBVconst {
  7818  			break
  7819  		}
  7820  		d := auxIntToInt64(v_0.AuxInt)
  7821  		x := v_0.Args[0]
  7822  		if !(is32Bit(-c - d)) {
  7823  			break
  7824  		}
  7825  		v.reset(OpLOONG64ADDVconst)
  7826  		v.AuxInt = int64ToAuxInt(-c - d)
  7827  		v.AddArg(x)
  7828  		return true
  7829  	}
  7830  	// match: (SUBVconst [c] (ADDVconst [d] x))
  7831  	// cond: is32Bit(-c+d)
  7832  	// result: (ADDVconst [-c+d] x)
  7833  	for {
  7834  		c := auxIntToInt64(v.AuxInt)
  7835  		if v_0.Op != OpLOONG64ADDVconst {
  7836  			break
  7837  		}
  7838  		d := auxIntToInt64(v_0.AuxInt)
  7839  		x := v_0.Args[0]
  7840  		if !(is32Bit(-c + d)) {
  7841  			break
  7842  		}
  7843  		v.reset(OpLOONG64ADDVconst)
  7844  		v.AuxInt = int64ToAuxInt(-c + d)
  7845  		v.AddArg(x)
  7846  		return true
  7847  	}
  7848  	return false
  7849  }
  7850  func rewriteValueLOONG64_OpLOONG64XOR(v *Value) bool {
  7851  	v_1 := v.Args[1]
  7852  	v_0 := v.Args[0]
  7853  	b := v.Block
  7854  	typ := &b.Func.Config.Types
  7855  	// match: (XOR <typ.UInt16> (SRLVconst [8] <typ.UInt16> x) (SLLVconst [8] <typ.UInt16> x))
  7856  	// result: (REVB2H x)
  7857  	for {
  7858  		if v.Type != typ.UInt16 {
  7859  			break
  7860  		}
  7861  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7862  			if v_0.Op != OpLOONG64SRLVconst || v_0.Type != typ.UInt16 || auxIntToInt64(v_0.AuxInt) != 8 {
  7863  				continue
  7864  			}
  7865  			x := v_0.Args[0]
  7866  			if v_1.Op != OpLOONG64SLLVconst || v_1.Type != typ.UInt16 || auxIntToInt64(v_1.AuxInt) != 8 || x != v_1.Args[0] {
  7867  				continue
  7868  			}
  7869  			v.reset(OpLOONG64REVB2H)
  7870  			v.AddArg(x)
  7871  			return true
  7872  		}
  7873  		break
  7874  	}
  7875  	// match: (XOR (SRLconst [8] (ANDconst [c1] x)) (SLLconst [8] (ANDconst [c2] x)))
  7876  	// cond: uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff
  7877  	// result: (REVB2H x)
  7878  	for {
  7879  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7880  			if v_0.Op != OpLOONG64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
  7881  				continue
  7882  			}
  7883  			v_0_0 := v_0.Args[0]
  7884  			if v_0_0.Op != OpLOONG64ANDconst {
  7885  				continue
  7886  			}
  7887  			c1 := auxIntToInt64(v_0_0.AuxInt)
  7888  			x := v_0_0.Args[0]
  7889  			if v_1.Op != OpLOONG64SLLconst || auxIntToInt64(v_1.AuxInt) != 8 {
  7890  				continue
  7891  			}
  7892  			v_1_0 := v_1.Args[0]
  7893  			if v_1_0.Op != OpLOONG64ANDconst {
  7894  				continue
  7895  			}
  7896  			c2 := auxIntToInt64(v_1_0.AuxInt)
  7897  			if x != v_1_0.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) {
  7898  				continue
  7899  			}
  7900  			v.reset(OpLOONG64REVB2H)
  7901  			v.AddArg(x)
  7902  			return true
  7903  		}
  7904  		break
  7905  	}
  7906  	// match: (XOR (SRLVconst [8] (AND (MOVVconst [c1]) x)) (SLLVconst [8] (AND (MOVVconst [c2]) x)))
  7907  	// cond: uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff
  7908  	// result: (REVB4H x)
  7909  	for {
  7910  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7911  			if v_0.Op != OpLOONG64SRLVconst || auxIntToInt64(v_0.AuxInt) != 8 {
  7912  				continue
  7913  			}
  7914  			v_0_0 := v_0.Args[0]
  7915  			if v_0_0.Op != OpLOONG64AND {
  7916  				continue
  7917  			}
  7918  			_ = v_0_0.Args[1]
  7919  			v_0_0_0 := v_0_0.Args[0]
  7920  			v_0_0_1 := v_0_0.Args[1]
  7921  			for _i1 := 0; _i1 <= 1; _i1, v_0_0_0, v_0_0_1 = _i1+1, v_0_0_1, v_0_0_0 {
  7922  				if v_0_0_0.Op != OpLOONG64MOVVconst {
  7923  					continue
  7924  				}
  7925  				c1 := auxIntToInt64(v_0_0_0.AuxInt)
  7926  				x := v_0_0_1
  7927  				if v_1.Op != OpLOONG64SLLVconst || auxIntToInt64(v_1.AuxInt) != 8 {
  7928  					continue
  7929  				}
  7930  				v_1_0 := v_1.Args[0]
  7931  				if v_1_0.Op != OpLOONG64AND {
  7932  					continue
  7933  				}
  7934  				_ = v_1_0.Args[1]
  7935  				v_1_0_0 := v_1_0.Args[0]
  7936  				v_1_0_1 := v_1_0.Args[1]
  7937  				for _i2 := 0; _i2 <= 1; _i2, v_1_0_0, v_1_0_1 = _i2+1, v_1_0_1, v_1_0_0 {
  7938  					if v_1_0_0.Op != OpLOONG64MOVVconst {
  7939  						continue
  7940  					}
  7941  					c2 := auxIntToInt64(v_1_0_0.AuxInt)
  7942  					if x != v_1_0_1 || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) {
  7943  						continue
  7944  					}
  7945  					v.reset(OpLOONG64REVB4H)
  7946  					v.AddArg(x)
  7947  					return true
  7948  				}
  7949  			}
  7950  		}
  7951  		break
  7952  	}
  7953  	// match: (XOR (SRLVconst [8] (AND (MOVVconst [c1]) x)) (SLLVconst [8] (ANDconst [c2] x)))
  7954  	// cond: uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff
  7955  	// result: (REVB4H (ANDconst <x.Type> [0xffffffff] x))
  7956  	for {
  7957  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  7958  			if v_0.Op != OpLOONG64SRLVconst || auxIntToInt64(v_0.AuxInt) != 8 {
  7959  				continue
  7960  			}
  7961  			v_0_0 := v_0.Args[0]
  7962  			if v_0_0.Op != OpLOONG64AND {
  7963  				continue
  7964  			}
  7965  			_ = v_0_0.Args[1]
  7966  			v_0_0_0 := v_0_0.Args[0]
  7967  			v_0_0_1 := v_0_0.Args[1]
  7968  			for _i1 := 0; _i1 <= 1; _i1, v_0_0_0, v_0_0_1 = _i1+1, v_0_0_1, v_0_0_0 {
  7969  				if v_0_0_0.Op != OpLOONG64MOVVconst {
  7970  					continue
  7971  				}
  7972  				c1 := auxIntToInt64(v_0_0_0.AuxInt)
  7973  				x := v_0_0_1
  7974  				if v_1.Op != OpLOONG64SLLVconst || auxIntToInt64(v_1.AuxInt) != 8 {
  7975  					continue
  7976  				}
  7977  				v_1_0 := v_1.Args[0]
  7978  				if v_1_0.Op != OpLOONG64ANDconst {
  7979  					continue
  7980  				}
  7981  				c2 := auxIntToInt64(v_1_0.AuxInt)
  7982  				if x != v_1_0.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) {
  7983  					continue
  7984  				}
  7985  				v.reset(OpLOONG64REVB4H)
  7986  				v0 := b.NewValue0(v.Pos, OpLOONG64ANDconst, x.Type)
  7987  				v0.AuxInt = int64ToAuxInt(0xffffffff)
  7988  				v0.AddArg(x)
  7989  				v.AddArg(v0)
  7990  				return true
  7991  			}
  7992  		}
  7993  		break
  7994  	}
  7995  	// match: (XOR x (MOVVconst [c]))
  7996  	// cond: is32Bit(c)
  7997  	// result: (XORconst [c] x)
  7998  	for {
  7999  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  8000  			x := v_0
  8001  			if v_1.Op != OpLOONG64MOVVconst {
  8002  				continue
  8003  			}
  8004  			c := auxIntToInt64(v_1.AuxInt)
  8005  			if !(is32Bit(c)) {
  8006  				continue
  8007  			}
  8008  			v.reset(OpLOONG64XORconst)
  8009  			v.AuxInt = int64ToAuxInt(c)
  8010  			v.AddArg(x)
  8011  			return true
  8012  		}
  8013  		break
  8014  	}
  8015  	// match: (XOR x x)
  8016  	// result: (MOVVconst [0])
  8017  	for {
  8018  		x := v_0
  8019  		if x != v_1 {
  8020  			break
  8021  		}
  8022  		v.reset(OpLOONG64MOVVconst)
  8023  		v.AuxInt = int64ToAuxInt(0)
  8024  		return true
  8025  	}
  8026  	return false
  8027  }
  8028  func rewriteValueLOONG64_OpLOONG64XORconst(v *Value) bool {
  8029  	v_0 := v.Args[0]
  8030  	// match: (XORconst [0] x)
  8031  	// result: x
  8032  	for {
  8033  		if auxIntToInt64(v.AuxInt) != 0 {
  8034  			break
  8035  		}
  8036  		x := v_0
  8037  		v.copyOf(x)
  8038  		return true
  8039  	}
  8040  	// match: (XORconst [-1] x)
  8041  	// result: (NORconst [0] x)
  8042  	for {
  8043  		if auxIntToInt64(v.AuxInt) != -1 {
  8044  			break
  8045  		}
  8046  		x := v_0
  8047  		v.reset(OpLOONG64NORconst)
  8048  		v.AuxInt = int64ToAuxInt(0)
  8049  		v.AddArg(x)
  8050  		return true
  8051  	}
  8052  	// match: (XORconst [c] (MOVVconst [d]))
  8053  	// result: (MOVVconst [c^d])
  8054  	for {
  8055  		c := auxIntToInt64(v.AuxInt)
  8056  		if v_0.Op != OpLOONG64MOVVconst {
  8057  			break
  8058  		}
  8059  		d := auxIntToInt64(v_0.AuxInt)
  8060  		v.reset(OpLOONG64MOVVconst)
  8061  		v.AuxInt = int64ToAuxInt(c ^ d)
  8062  		return true
  8063  	}
  8064  	// match: (XORconst [c] (XORconst [d] x))
  8065  	// cond: is32Bit(c^d)
  8066  	// result: (XORconst [c^d] x)
  8067  	for {
  8068  		c := auxIntToInt64(v.AuxInt)
  8069  		if v_0.Op != OpLOONG64XORconst {
  8070  			break
  8071  		}
  8072  		d := auxIntToInt64(v_0.AuxInt)
  8073  		x := v_0.Args[0]
  8074  		if !(is32Bit(c ^ d)) {
  8075  			break
  8076  		}
  8077  		v.reset(OpLOONG64XORconst)
  8078  		v.AuxInt = int64ToAuxInt(c ^ d)
  8079  		v.AddArg(x)
  8080  		return true
  8081  	}
  8082  	return false
  8083  }
  8084  func rewriteValueLOONG64_OpLeq16(v *Value) bool {
  8085  	v_1 := v.Args[1]
  8086  	v_0 := v.Args[0]
  8087  	b := v.Block
  8088  	typ := &b.Func.Config.Types
  8089  	// match: (Leq16 x y)
  8090  	// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
  8091  	for {
  8092  		x := v_0
  8093  		y := v_1
  8094  		v.reset(OpLOONG64XOR)
  8095  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8096  		v0.AuxInt = int64ToAuxInt(1)
  8097  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  8098  		v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8099  		v2.AddArg(x)
  8100  		v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8101  		v3.AddArg(y)
  8102  		v1.AddArg2(v2, v3)
  8103  		v.AddArg2(v0, v1)
  8104  		return true
  8105  	}
  8106  }
  8107  func rewriteValueLOONG64_OpLeq16U(v *Value) bool {
  8108  	v_1 := v.Args[1]
  8109  	v_0 := v.Args[0]
  8110  	b := v.Block
  8111  	typ := &b.Func.Config.Types
  8112  	// match: (Leq16U x y)
  8113  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  8114  	for {
  8115  		x := v_0
  8116  		y := v_1
  8117  		v.reset(OpLOONG64XOR)
  8118  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8119  		v0.AuxInt = int64ToAuxInt(1)
  8120  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8121  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8122  		v2.AddArg(x)
  8123  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8124  		v3.AddArg(y)
  8125  		v1.AddArg2(v2, v3)
  8126  		v.AddArg2(v0, v1)
  8127  		return true
  8128  	}
  8129  }
  8130  func rewriteValueLOONG64_OpLeq32(v *Value) bool {
  8131  	v_1 := v.Args[1]
  8132  	v_0 := v.Args[0]
  8133  	b := v.Block
  8134  	typ := &b.Func.Config.Types
  8135  	// match: (Leq32 x y)
  8136  	// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
  8137  	for {
  8138  		x := v_0
  8139  		y := v_1
  8140  		v.reset(OpLOONG64XOR)
  8141  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8142  		v0.AuxInt = int64ToAuxInt(1)
  8143  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  8144  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8145  		v2.AddArg(x)
  8146  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8147  		v3.AddArg(y)
  8148  		v1.AddArg2(v2, v3)
  8149  		v.AddArg2(v0, v1)
  8150  		return true
  8151  	}
  8152  }
  8153  func rewriteValueLOONG64_OpLeq32F(v *Value) bool {
  8154  	v_1 := v.Args[1]
  8155  	v_0 := v.Args[0]
  8156  	b := v.Block
  8157  	// match: (Leq32F x y)
  8158  	// result: (FPFlagTrue (CMPGEF y x))
  8159  	for {
  8160  		x := v_0
  8161  		y := v_1
  8162  		v.reset(OpLOONG64FPFlagTrue)
  8163  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGEF, types.TypeFlags)
  8164  		v0.AddArg2(y, x)
  8165  		v.AddArg(v0)
  8166  		return true
  8167  	}
  8168  }
  8169  func rewriteValueLOONG64_OpLeq32U(v *Value) bool {
  8170  	v_1 := v.Args[1]
  8171  	v_0 := v.Args[0]
  8172  	b := v.Block
  8173  	typ := &b.Func.Config.Types
  8174  	// match: (Leq32U x y)
  8175  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  8176  	for {
  8177  		x := v_0
  8178  		y := v_1
  8179  		v.reset(OpLOONG64XOR)
  8180  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8181  		v0.AuxInt = int64ToAuxInt(1)
  8182  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8183  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8184  		v2.AddArg(x)
  8185  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8186  		v3.AddArg(y)
  8187  		v1.AddArg2(v2, v3)
  8188  		v.AddArg2(v0, v1)
  8189  		return true
  8190  	}
  8191  }
  8192  func rewriteValueLOONG64_OpLeq64(v *Value) bool {
  8193  	v_1 := v.Args[1]
  8194  	v_0 := v.Args[0]
  8195  	b := v.Block
  8196  	typ := &b.Func.Config.Types
  8197  	// match: (Leq64 x y)
  8198  	// result: (XOR (MOVVconst [1]) (SGT x y))
  8199  	for {
  8200  		x := v_0
  8201  		y := v_1
  8202  		v.reset(OpLOONG64XOR)
  8203  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8204  		v0.AuxInt = int64ToAuxInt(1)
  8205  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  8206  		v1.AddArg2(x, y)
  8207  		v.AddArg2(v0, v1)
  8208  		return true
  8209  	}
  8210  }
  8211  func rewriteValueLOONG64_OpLeq64F(v *Value) bool {
  8212  	v_1 := v.Args[1]
  8213  	v_0 := v.Args[0]
  8214  	b := v.Block
  8215  	// match: (Leq64F x y)
  8216  	// result: (FPFlagTrue (CMPGED y x))
  8217  	for {
  8218  		x := v_0
  8219  		y := v_1
  8220  		v.reset(OpLOONG64FPFlagTrue)
  8221  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGED, types.TypeFlags)
  8222  		v0.AddArg2(y, x)
  8223  		v.AddArg(v0)
  8224  		return true
  8225  	}
  8226  }
  8227  func rewriteValueLOONG64_OpLeq64U(v *Value) bool {
  8228  	v_1 := v.Args[1]
  8229  	v_0 := v.Args[0]
  8230  	b := v.Block
  8231  	typ := &b.Func.Config.Types
  8232  	// match: (Leq64U x y)
  8233  	// result: (XOR (MOVVconst [1]) (SGTU x y))
  8234  	for {
  8235  		x := v_0
  8236  		y := v_1
  8237  		v.reset(OpLOONG64XOR)
  8238  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8239  		v0.AuxInt = int64ToAuxInt(1)
  8240  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8241  		v1.AddArg2(x, y)
  8242  		v.AddArg2(v0, v1)
  8243  		return true
  8244  	}
  8245  }
  8246  func rewriteValueLOONG64_OpLeq8(v *Value) bool {
  8247  	v_1 := v.Args[1]
  8248  	v_0 := v.Args[0]
  8249  	b := v.Block
  8250  	typ := &b.Func.Config.Types
  8251  	// match: (Leq8 x y)
  8252  	// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
  8253  	for {
  8254  		x := v_0
  8255  		y := v_1
  8256  		v.reset(OpLOONG64XOR)
  8257  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8258  		v0.AuxInt = int64ToAuxInt(1)
  8259  		v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool)
  8260  		v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8261  		v2.AddArg(x)
  8262  		v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8263  		v3.AddArg(y)
  8264  		v1.AddArg2(v2, v3)
  8265  		v.AddArg2(v0, v1)
  8266  		return true
  8267  	}
  8268  }
  8269  func rewriteValueLOONG64_OpLeq8U(v *Value) bool {
  8270  	v_1 := v.Args[1]
  8271  	v_0 := v.Args[0]
  8272  	b := v.Block
  8273  	typ := &b.Func.Config.Types
  8274  	// match: (Leq8U x y)
  8275  	// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  8276  	for {
  8277  		x := v_0
  8278  		y := v_1
  8279  		v.reset(OpLOONG64XOR)
  8280  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8281  		v0.AuxInt = int64ToAuxInt(1)
  8282  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8283  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8284  		v2.AddArg(x)
  8285  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8286  		v3.AddArg(y)
  8287  		v1.AddArg2(v2, v3)
  8288  		v.AddArg2(v0, v1)
  8289  		return true
  8290  	}
  8291  }
  8292  func rewriteValueLOONG64_OpLess16(v *Value) bool {
  8293  	v_1 := v.Args[1]
  8294  	v_0 := v.Args[0]
  8295  	b := v.Block
  8296  	typ := &b.Func.Config.Types
  8297  	// match: (Less16 x y)
  8298  	// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
  8299  	for {
  8300  		x := v_0
  8301  		y := v_1
  8302  		v.reset(OpLOONG64SGT)
  8303  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8304  		v0.AddArg(y)
  8305  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  8306  		v1.AddArg(x)
  8307  		v.AddArg2(v0, v1)
  8308  		return true
  8309  	}
  8310  }
  8311  func rewriteValueLOONG64_OpLess16U(v *Value) bool {
  8312  	v_1 := v.Args[1]
  8313  	v_0 := v.Args[0]
  8314  	b := v.Block
  8315  	typ := &b.Func.Config.Types
  8316  	// match: (Less16U x y)
  8317  	// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
  8318  	for {
  8319  		x := v_0
  8320  		y := v_1
  8321  		v.reset(OpLOONG64SGTU)
  8322  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8323  		v0.AddArg(y)
  8324  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8325  		v1.AddArg(x)
  8326  		v.AddArg2(v0, v1)
  8327  		return true
  8328  	}
  8329  }
  8330  func rewriteValueLOONG64_OpLess32(v *Value) bool {
  8331  	v_1 := v.Args[1]
  8332  	v_0 := v.Args[0]
  8333  	b := v.Block
  8334  	typ := &b.Func.Config.Types
  8335  	// match: (Less32 x y)
  8336  	// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
  8337  	for {
  8338  		x := v_0
  8339  		y := v_1
  8340  		v.reset(OpLOONG64SGT)
  8341  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8342  		v0.AddArg(y)
  8343  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8344  		v1.AddArg(x)
  8345  		v.AddArg2(v0, v1)
  8346  		return true
  8347  	}
  8348  }
  8349  func rewriteValueLOONG64_OpLess32F(v *Value) bool {
  8350  	v_1 := v.Args[1]
  8351  	v_0 := v.Args[0]
  8352  	b := v.Block
  8353  	// match: (Less32F x y)
  8354  	// result: (FPFlagTrue (CMPGTF y x))
  8355  	for {
  8356  		x := v_0
  8357  		y := v_1
  8358  		v.reset(OpLOONG64FPFlagTrue)
  8359  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGTF, types.TypeFlags)
  8360  		v0.AddArg2(y, x)
  8361  		v.AddArg(v0)
  8362  		return true
  8363  	}
  8364  }
  8365  func rewriteValueLOONG64_OpLess32U(v *Value) bool {
  8366  	v_1 := v.Args[1]
  8367  	v_0 := v.Args[0]
  8368  	b := v.Block
  8369  	typ := &b.Func.Config.Types
  8370  	// match: (Less32U x y)
  8371  	// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
  8372  	for {
  8373  		x := v_0
  8374  		y := v_1
  8375  		v.reset(OpLOONG64SGTU)
  8376  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8377  		v0.AddArg(y)
  8378  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8379  		v1.AddArg(x)
  8380  		v.AddArg2(v0, v1)
  8381  		return true
  8382  	}
  8383  }
  8384  func rewriteValueLOONG64_OpLess64(v *Value) bool {
  8385  	v_1 := v.Args[1]
  8386  	v_0 := v.Args[0]
  8387  	// match: (Less64 x y)
  8388  	// result: (SGT y x)
  8389  	for {
  8390  		x := v_0
  8391  		y := v_1
  8392  		v.reset(OpLOONG64SGT)
  8393  		v.AddArg2(y, x)
  8394  		return true
  8395  	}
  8396  }
  8397  func rewriteValueLOONG64_OpLess64F(v *Value) bool {
  8398  	v_1 := v.Args[1]
  8399  	v_0 := v.Args[0]
  8400  	b := v.Block
  8401  	// match: (Less64F x y)
  8402  	// result: (FPFlagTrue (CMPGTD y x))
  8403  	for {
  8404  		x := v_0
  8405  		y := v_1
  8406  		v.reset(OpLOONG64FPFlagTrue)
  8407  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPGTD, types.TypeFlags)
  8408  		v0.AddArg2(y, x)
  8409  		v.AddArg(v0)
  8410  		return true
  8411  	}
  8412  }
  8413  func rewriteValueLOONG64_OpLess64U(v *Value) bool {
  8414  	v_1 := v.Args[1]
  8415  	v_0 := v.Args[0]
  8416  	// match: (Less64U x y)
  8417  	// result: (SGTU y x)
  8418  	for {
  8419  		x := v_0
  8420  		y := v_1
  8421  		v.reset(OpLOONG64SGTU)
  8422  		v.AddArg2(y, x)
  8423  		return true
  8424  	}
  8425  }
  8426  func rewriteValueLOONG64_OpLess8(v *Value) bool {
  8427  	v_1 := v.Args[1]
  8428  	v_0 := v.Args[0]
  8429  	b := v.Block
  8430  	typ := &b.Func.Config.Types
  8431  	// match: (Less8 x y)
  8432  	// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
  8433  	for {
  8434  		x := v_0
  8435  		y := v_1
  8436  		v.reset(OpLOONG64SGT)
  8437  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8438  		v0.AddArg(y)
  8439  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8440  		v1.AddArg(x)
  8441  		v.AddArg2(v0, v1)
  8442  		return true
  8443  	}
  8444  }
  8445  func rewriteValueLOONG64_OpLess8U(v *Value) bool {
  8446  	v_1 := v.Args[1]
  8447  	v_0 := v.Args[0]
  8448  	b := v.Block
  8449  	typ := &b.Func.Config.Types
  8450  	// match: (Less8U x y)
  8451  	// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
  8452  	for {
  8453  		x := v_0
  8454  		y := v_1
  8455  		v.reset(OpLOONG64SGTU)
  8456  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8457  		v0.AddArg(y)
  8458  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8459  		v1.AddArg(x)
  8460  		v.AddArg2(v0, v1)
  8461  		return true
  8462  	}
  8463  }
  8464  func rewriteValueLOONG64_OpLoad(v *Value) bool {
  8465  	v_1 := v.Args[1]
  8466  	v_0 := v.Args[0]
  8467  	// match: (Load <t> ptr mem)
  8468  	// cond: t.IsBoolean()
  8469  	// result: (MOVBUload ptr mem)
  8470  	for {
  8471  		t := v.Type
  8472  		ptr := v_0
  8473  		mem := v_1
  8474  		if !(t.IsBoolean()) {
  8475  			break
  8476  		}
  8477  		v.reset(OpLOONG64MOVBUload)
  8478  		v.AddArg2(ptr, mem)
  8479  		return true
  8480  	}
  8481  	// match: (Load <t> ptr mem)
  8482  	// cond: (is8BitInt(t) && t.IsSigned())
  8483  	// result: (MOVBload ptr mem)
  8484  	for {
  8485  		t := v.Type
  8486  		ptr := v_0
  8487  		mem := v_1
  8488  		if !(is8BitInt(t) && t.IsSigned()) {
  8489  			break
  8490  		}
  8491  		v.reset(OpLOONG64MOVBload)
  8492  		v.AddArg2(ptr, mem)
  8493  		return true
  8494  	}
  8495  	// match: (Load <t> ptr mem)
  8496  	// cond: (is8BitInt(t) && !t.IsSigned())
  8497  	// result: (MOVBUload ptr mem)
  8498  	for {
  8499  		t := v.Type
  8500  		ptr := v_0
  8501  		mem := v_1
  8502  		if !(is8BitInt(t) && !t.IsSigned()) {
  8503  			break
  8504  		}
  8505  		v.reset(OpLOONG64MOVBUload)
  8506  		v.AddArg2(ptr, mem)
  8507  		return true
  8508  	}
  8509  	// match: (Load <t> ptr mem)
  8510  	// cond: (is16BitInt(t) && t.IsSigned())
  8511  	// result: (MOVHload ptr mem)
  8512  	for {
  8513  		t := v.Type
  8514  		ptr := v_0
  8515  		mem := v_1
  8516  		if !(is16BitInt(t) && t.IsSigned()) {
  8517  			break
  8518  		}
  8519  		v.reset(OpLOONG64MOVHload)
  8520  		v.AddArg2(ptr, mem)
  8521  		return true
  8522  	}
  8523  	// match: (Load <t> ptr mem)
  8524  	// cond: (is16BitInt(t) && !t.IsSigned())
  8525  	// result: (MOVHUload ptr mem)
  8526  	for {
  8527  		t := v.Type
  8528  		ptr := v_0
  8529  		mem := v_1
  8530  		if !(is16BitInt(t) && !t.IsSigned()) {
  8531  			break
  8532  		}
  8533  		v.reset(OpLOONG64MOVHUload)
  8534  		v.AddArg2(ptr, mem)
  8535  		return true
  8536  	}
  8537  	// match: (Load <t> ptr mem)
  8538  	// cond: (is32BitInt(t) && t.IsSigned())
  8539  	// result: (MOVWload ptr mem)
  8540  	for {
  8541  		t := v.Type
  8542  		ptr := v_0
  8543  		mem := v_1
  8544  		if !(is32BitInt(t) && t.IsSigned()) {
  8545  			break
  8546  		}
  8547  		v.reset(OpLOONG64MOVWload)
  8548  		v.AddArg2(ptr, mem)
  8549  		return true
  8550  	}
  8551  	// match: (Load <t> ptr mem)
  8552  	// cond: (is32BitInt(t) && !t.IsSigned())
  8553  	// result: (MOVWUload ptr mem)
  8554  	for {
  8555  		t := v.Type
  8556  		ptr := v_0
  8557  		mem := v_1
  8558  		if !(is32BitInt(t) && !t.IsSigned()) {
  8559  			break
  8560  		}
  8561  		v.reset(OpLOONG64MOVWUload)
  8562  		v.AddArg2(ptr, mem)
  8563  		return true
  8564  	}
  8565  	// match: (Load <t> ptr mem)
  8566  	// cond: (is64BitInt(t) || isPtr(t))
  8567  	// result: (MOVVload ptr mem)
  8568  	for {
  8569  		t := v.Type
  8570  		ptr := v_0
  8571  		mem := v_1
  8572  		if !(is64BitInt(t) || isPtr(t)) {
  8573  			break
  8574  		}
  8575  		v.reset(OpLOONG64MOVVload)
  8576  		v.AddArg2(ptr, mem)
  8577  		return true
  8578  	}
  8579  	// match: (Load <t> ptr mem)
  8580  	// cond: is32BitFloat(t)
  8581  	// result: (MOVFload ptr mem)
  8582  	for {
  8583  		t := v.Type
  8584  		ptr := v_0
  8585  		mem := v_1
  8586  		if !(is32BitFloat(t)) {
  8587  			break
  8588  		}
  8589  		v.reset(OpLOONG64MOVFload)
  8590  		v.AddArg2(ptr, mem)
  8591  		return true
  8592  	}
  8593  	// match: (Load <t> ptr mem)
  8594  	// cond: is64BitFloat(t)
  8595  	// result: (MOVDload ptr mem)
  8596  	for {
  8597  		t := v.Type
  8598  		ptr := v_0
  8599  		mem := v_1
  8600  		if !(is64BitFloat(t)) {
  8601  			break
  8602  		}
  8603  		v.reset(OpLOONG64MOVDload)
  8604  		v.AddArg2(ptr, mem)
  8605  		return true
  8606  	}
  8607  	return false
  8608  }
  8609  func rewriteValueLOONG64_OpLocalAddr(v *Value) bool {
  8610  	v_1 := v.Args[1]
  8611  	v_0 := v.Args[0]
  8612  	b := v.Block
  8613  	typ := &b.Func.Config.Types
  8614  	// match: (LocalAddr <t> {sym} base mem)
  8615  	// cond: t.Elem().HasPointers()
  8616  	// result: (MOVVaddr {sym} (SPanchored base mem))
  8617  	for {
  8618  		t := v.Type
  8619  		sym := auxToSym(v.Aux)
  8620  		base := v_0
  8621  		mem := v_1
  8622  		if !(t.Elem().HasPointers()) {
  8623  			break
  8624  		}
  8625  		v.reset(OpLOONG64MOVVaddr)
  8626  		v.Aux = symToAux(sym)
  8627  		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
  8628  		v0.AddArg2(base, mem)
  8629  		v.AddArg(v0)
  8630  		return true
  8631  	}
  8632  	// match: (LocalAddr <t> {sym} base _)
  8633  	// cond: !t.Elem().HasPointers()
  8634  	// result: (MOVVaddr {sym} base)
  8635  	for {
  8636  		t := v.Type
  8637  		sym := auxToSym(v.Aux)
  8638  		base := v_0
  8639  		if !(!t.Elem().HasPointers()) {
  8640  			break
  8641  		}
  8642  		v.reset(OpLOONG64MOVVaddr)
  8643  		v.Aux = symToAux(sym)
  8644  		v.AddArg(base)
  8645  		return true
  8646  	}
  8647  	return false
  8648  }
  8649  func rewriteValueLOONG64_OpLsh16x16(v *Value) bool {
  8650  	v_1 := v.Args[1]
  8651  	v_0 := v.Args[0]
  8652  	b := v.Block
  8653  	typ := &b.Func.Config.Types
  8654  	// match: (Lsh16x16 x y)
  8655  	// cond: shiftIsBounded(v)
  8656  	// result: (SLLV x y)
  8657  	for {
  8658  		x := v_0
  8659  		y := v_1
  8660  		if !(shiftIsBounded(v)) {
  8661  			break
  8662  		}
  8663  		v.reset(OpLOONG64SLLV)
  8664  		v.AddArg2(x, y)
  8665  		return true
  8666  	}
  8667  	// match: (Lsh16x16 <t> x y)
  8668  	// cond: !shiftIsBounded(v)
  8669  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  8670  	for {
  8671  		t := v.Type
  8672  		x := v_0
  8673  		y := v_1
  8674  		if !(!shiftIsBounded(v)) {
  8675  			break
  8676  		}
  8677  		v.reset(OpLOONG64MASKEQZ)
  8678  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8679  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8680  		v1.AddArg(y)
  8681  		v0.AddArg2(x, v1)
  8682  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8683  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8684  		v3.AuxInt = int64ToAuxInt(64)
  8685  		v2.AddArg2(v3, v1)
  8686  		v.AddArg2(v0, v2)
  8687  		return true
  8688  	}
  8689  	return false
  8690  }
  8691  func rewriteValueLOONG64_OpLsh16x32(v *Value) bool {
  8692  	v_1 := v.Args[1]
  8693  	v_0 := v.Args[0]
  8694  	b := v.Block
  8695  	typ := &b.Func.Config.Types
  8696  	// match: (Lsh16x32 x y)
  8697  	// cond: shiftIsBounded(v)
  8698  	// result: (SLLV x y)
  8699  	for {
  8700  		x := v_0
  8701  		y := v_1
  8702  		if !(shiftIsBounded(v)) {
  8703  			break
  8704  		}
  8705  		v.reset(OpLOONG64SLLV)
  8706  		v.AddArg2(x, y)
  8707  		return true
  8708  	}
  8709  	// match: (Lsh16x32 <t> x y)
  8710  	// cond: !shiftIsBounded(v)
  8711  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  8712  	for {
  8713  		t := v.Type
  8714  		x := v_0
  8715  		y := v_1
  8716  		if !(!shiftIsBounded(v)) {
  8717  			break
  8718  		}
  8719  		v.reset(OpLOONG64MASKEQZ)
  8720  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8721  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8722  		v1.AddArg(y)
  8723  		v0.AddArg2(x, v1)
  8724  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8725  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8726  		v3.AuxInt = int64ToAuxInt(64)
  8727  		v2.AddArg2(v3, v1)
  8728  		v.AddArg2(v0, v2)
  8729  		return true
  8730  	}
  8731  	return false
  8732  }
  8733  func rewriteValueLOONG64_OpLsh16x64(v *Value) bool {
  8734  	v_1 := v.Args[1]
  8735  	v_0 := v.Args[0]
  8736  	b := v.Block
  8737  	typ := &b.Func.Config.Types
  8738  	// match: (Lsh16x64 x y)
  8739  	// cond: shiftIsBounded(v)
  8740  	// result: (SLLV x y)
  8741  	for {
  8742  		x := v_0
  8743  		y := v_1
  8744  		if !(shiftIsBounded(v)) {
  8745  			break
  8746  		}
  8747  		v.reset(OpLOONG64SLLV)
  8748  		v.AddArg2(x, y)
  8749  		return true
  8750  	}
  8751  	// match: (Lsh16x64 <t> x y)
  8752  	// cond: !shiftIsBounded(v)
  8753  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  8754  	for {
  8755  		t := v.Type
  8756  		x := v_0
  8757  		y := v_1
  8758  		if !(!shiftIsBounded(v)) {
  8759  			break
  8760  		}
  8761  		v.reset(OpLOONG64MASKEQZ)
  8762  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8763  		v0.AddArg2(x, y)
  8764  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8765  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8766  		v2.AuxInt = int64ToAuxInt(64)
  8767  		v1.AddArg2(v2, y)
  8768  		v.AddArg2(v0, v1)
  8769  		return true
  8770  	}
  8771  	return false
  8772  }
  8773  func rewriteValueLOONG64_OpLsh16x8(v *Value) bool {
  8774  	v_1 := v.Args[1]
  8775  	v_0 := v.Args[0]
  8776  	b := v.Block
  8777  	typ := &b.Func.Config.Types
  8778  	// match: (Lsh16x8 x y)
  8779  	// cond: shiftIsBounded(v)
  8780  	// result: (SLLV x y)
  8781  	for {
  8782  		x := v_0
  8783  		y := v_1
  8784  		if !(shiftIsBounded(v)) {
  8785  			break
  8786  		}
  8787  		v.reset(OpLOONG64SLLV)
  8788  		v.AddArg2(x, y)
  8789  		return true
  8790  	}
  8791  	// match: (Lsh16x8 <t> x y)
  8792  	// cond: !shiftIsBounded(v)
  8793  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  8794  	for {
  8795  		t := v.Type
  8796  		x := v_0
  8797  		y := v_1
  8798  		if !(!shiftIsBounded(v)) {
  8799  			break
  8800  		}
  8801  		v.reset(OpLOONG64MASKEQZ)
  8802  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  8803  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8804  		v1.AddArg(y)
  8805  		v0.AddArg2(x, v1)
  8806  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8807  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8808  		v3.AuxInt = int64ToAuxInt(64)
  8809  		v2.AddArg2(v3, v1)
  8810  		v.AddArg2(v0, v2)
  8811  		return true
  8812  	}
  8813  	return false
  8814  }
  8815  func rewriteValueLOONG64_OpLsh32x16(v *Value) bool {
  8816  	v_1 := v.Args[1]
  8817  	v_0 := v.Args[0]
  8818  	b := v.Block
  8819  	typ := &b.Func.Config.Types
  8820  	// match: (Lsh32x16 x y)
  8821  	// cond: shiftIsBounded(v)
  8822  	// result: (SLL x y)
  8823  	for {
  8824  		x := v_0
  8825  		y := v_1
  8826  		if !(shiftIsBounded(v)) {
  8827  			break
  8828  		}
  8829  		v.reset(OpLOONG64SLL)
  8830  		v.AddArg2(x, y)
  8831  		return true
  8832  	}
  8833  	// match: (Lsh32x16 <t> x y)
  8834  	// cond: !shiftIsBounded(v)
  8835  	// result: (MASKEQZ (SLL <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt16to64 y)))
  8836  	for {
  8837  		t := v.Type
  8838  		x := v_0
  8839  		y := v_1
  8840  		if !(!shiftIsBounded(v)) {
  8841  			break
  8842  		}
  8843  		v.reset(OpLOONG64MASKEQZ)
  8844  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  8845  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8846  		v1.AddArg(y)
  8847  		v0.AddArg2(x, v1)
  8848  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8849  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8850  		v3.AuxInt = int64ToAuxInt(32)
  8851  		v2.AddArg2(v3, v1)
  8852  		v.AddArg2(v0, v2)
  8853  		return true
  8854  	}
  8855  	return false
  8856  }
  8857  func rewriteValueLOONG64_OpLsh32x32(v *Value) bool {
  8858  	v_1 := v.Args[1]
  8859  	v_0 := v.Args[0]
  8860  	b := v.Block
  8861  	typ := &b.Func.Config.Types
  8862  	// match: (Lsh32x32 x y)
  8863  	// cond: shiftIsBounded(v)
  8864  	// result: (SLL x y)
  8865  	for {
  8866  		x := v_0
  8867  		y := v_1
  8868  		if !(shiftIsBounded(v)) {
  8869  			break
  8870  		}
  8871  		v.reset(OpLOONG64SLL)
  8872  		v.AddArg2(x, y)
  8873  		return true
  8874  	}
  8875  	// match: (Lsh32x32 <t> x y)
  8876  	// cond: !shiftIsBounded(v)
  8877  	// result: (MASKEQZ (SLL <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt32to64 y)))
  8878  	for {
  8879  		t := v.Type
  8880  		x := v_0
  8881  		y := v_1
  8882  		if !(!shiftIsBounded(v)) {
  8883  			break
  8884  		}
  8885  		v.reset(OpLOONG64MASKEQZ)
  8886  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  8887  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8888  		v1.AddArg(y)
  8889  		v0.AddArg2(x, v1)
  8890  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8891  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8892  		v3.AuxInt = int64ToAuxInt(32)
  8893  		v2.AddArg2(v3, v1)
  8894  		v.AddArg2(v0, v2)
  8895  		return true
  8896  	}
  8897  	return false
  8898  }
  8899  func rewriteValueLOONG64_OpLsh32x64(v *Value) bool {
  8900  	v_1 := v.Args[1]
  8901  	v_0 := v.Args[0]
  8902  	b := v.Block
  8903  	typ := &b.Func.Config.Types
  8904  	// match: (Lsh32x64 x y)
  8905  	// cond: shiftIsBounded(v)
  8906  	// result: (SLL x y)
  8907  	for {
  8908  		x := v_0
  8909  		y := v_1
  8910  		if !(shiftIsBounded(v)) {
  8911  			break
  8912  		}
  8913  		v.reset(OpLOONG64SLL)
  8914  		v.AddArg2(x, y)
  8915  		return true
  8916  	}
  8917  	// match: (Lsh32x64 <t> x y)
  8918  	// cond: !shiftIsBounded(v)
  8919  	// result: (MASKEQZ (SLL <t> x y) (SGTU (MOVVconst <typ.UInt64> [32]) y))
  8920  	for {
  8921  		t := v.Type
  8922  		x := v_0
  8923  		y := v_1
  8924  		if !(!shiftIsBounded(v)) {
  8925  			break
  8926  		}
  8927  		v.reset(OpLOONG64MASKEQZ)
  8928  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  8929  		v0.AddArg2(x, y)
  8930  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8931  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8932  		v2.AuxInt = int64ToAuxInt(32)
  8933  		v1.AddArg2(v2, y)
  8934  		v.AddArg2(v0, v1)
  8935  		return true
  8936  	}
  8937  	return false
  8938  }
  8939  func rewriteValueLOONG64_OpLsh32x8(v *Value) bool {
  8940  	v_1 := v.Args[1]
  8941  	v_0 := v.Args[0]
  8942  	b := v.Block
  8943  	typ := &b.Func.Config.Types
  8944  	// match: (Lsh32x8 x y)
  8945  	// cond: shiftIsBounded(v)
  8946  	// result: (SLL x y)
  8947  	for {
  8948  		x := v_0
  8949  		y := v_1
  8950  		if !(shiftIsBounded(v)) {
  8951  			break
  8952  		}
  8953  		v.reset(OpLOONG64SLL)
  8954  		v.AddArg2(x, y)
  8955  		return true
  8956  	}
  8957  	// match: (Lsh32x8 <t> x y)
  8958  	// cond: !shiftIsBounded(v)
  8959  	// result: (MASKEQZ (SLL <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt8to64 y)))
  8960  	for {
  8961  		t := v.Type
  8962  		x := v_0
  8963  		y := v_1
  8964  		if !(!shiftIsBounded(v)) {
  8965  			break
  8966  		}
  8967  		v.reset(OpLOONG64MASKEQZ)
  8968  		v0 := b.NewValue0(v.Pos, OpLOONG64SLL, t)
  8969  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8970  		v1.AddArg(y)
  8971  		v0.AddArg2(x, v1)
  8972  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  8973  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  8974  		v3.AuxInt = int64ToAuxInt(32)
  8975  		v2.AddArg2(v3, v1)
  8976  		v.AddArg2(v0, v2)
  8977  		return true
  8978  	}
  8979  	return false
  8980  }
  8981  func rewriteValueLOONG64_OpLsh64x16(v *Value) bool {
  8982  	v_1 := v.Args[1]
  8983  	v_0 := v.Args[0]
  8984  	b := v.Block
  8985  	typ := &b.Func.Config.Types
  8986  	// match: (Lsh64x16 x y)
  8987  	// cond: shiftIsBounded(v)
  8988  	// result: (SLLV x y)
  8989  	for {
  8990  		x := v_0
  8991  		y := v_1
  8992  		if !(shiftIsBounded(v)) {
  8993  			break
  8994  		}
  8995  		v.reset(OpLOONG64SLLV)
  8996  		v.AddArg2(x, y)
  8997  		return true
  8998  	}
  8999  	// match: (Lsh64x16 <t> x y)
  9000  	// cond: !shiftIsBounded(v)
  9001  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  9002  	for {
  9003  		t := v.Type
  9004  		x := v_0
  9005  		y := v_1
  9006  		if !(!shiftIsBounded(v)) {
  9007  			break
  9008  		}
  9009  		v.reset(OpLOONG64MASKEQZ)
  9010  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9011  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9012  		v1.AddArg(y)
  9013  		v0.AddArg2(x, v1)
  9014  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9015  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9016  		v3.AuxInt = int64ToAuxInt(64)
  9017  		v2.AddArg2(v3, v1)
  9018  		v.AddArg2(v0, v2)
  9019  		return true
  9020  	}
  9021  	return false
  9022  }
  9023  func rewriteValueLOONG64_OpLsh64x32(v *Value) bool {
  9024  	v_1 := v.Args[1]
  9025  	v_0 := v.Args[0]
  9026  	b := v.Block
  9027  	typ := &b.Func.Config.Types
  9028  	// match: (Lsh64x32 x y)
  9029  	// cond: shiftIsBounded(v)
  9030  	// result: (SLLV x y)
  9031  	for {
  9032  		x := v_0
  9033  		y := v_1
  9034  		if !(shiftIsBounded(v)) {
  9035  			break
  9036  		}
  9037  		v.reset(OpLOONG64SLLV)
  9038  		v.AddArg2(x, y)
  9039  		return true
  9040  	}
  9041  	// match: (Lsh64x32 <t> x y)
  9042  	// cond: !shiftIsBounded(v)
  9043  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  9044  	for {
  9045  		t := v.Type
  9046  		x := v_0
  9047  		y := v_1
  9048  		if !(!shiftIsBounded(v)) {
  9049  			break
  9050  		}
  9051  		v.reset(OpLOONG64MASKEQZ)
  9052  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9053  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9054  		v1.AddArg(y)
  9055  		v0.AddArg2(x, v1)
  9056  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9057  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9058  		v3.AuxInt = int64ToAuxInt(64)
  9059  		v2.AddArg2(v3, v1)
  9060  		v.AddArg2(v0, v2)
  9061  		return true
  9062  	}
  9063  	return false
  9064  }
  9065  func rewriteValueLOONG64_OpLsh64x64(v *Value) bool {
  9066  	v_1 := v.Args[1]
  9067  	v_0 := v.Args[0]
  9068  	b := v.Block
  9069  	typ := &b.Func.Config.Types
  9070  	// match: (Lsh64x64 x y)
  9071  	// cond: shiftIsBounded(v)
  9072  	// result: (SLLV x y)
  9073  	for {
  9074  		x := v_0
  9075  		y := v_1
  9076  		if !(shiftIsBounded(v)) {
  9077  			break
  9078  		}
  9079  		v.reset(OpLOONG64SLLV)
  9080  		v.AddArg2(x, y)
  9081  		return true
  9082  	}
  9083  	// match: (Lsh64x64 <t> x y)
  9084  	// cond: !shiftIsBounded(v)
  9085  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  9086  	for {
  9087  		t := v.Type
  9088  		x := v_0
  9089  		y := v_1
  9090  		if !(!shiftIsBounded(v)) {
  9091  			break
  9092  		}
  9093  		v.reset(OpLOONG64MASKEQZ)
  9094  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9095  		v0.AddArg2(x, y)
  9096  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9097  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9098  		v2.AuxInt = int64ToAuxInt(64)
  9099  		v1.AddArg2(v2, y)
  9100  		v.AddArg2(v0, v1)
  9101  		return true
  9102  	}
  9103  	return false
  9104  }
  9105  func rewriteValueLOONG64_OpLsh64x8(v *Value) bool {
  9106  	v_1 := v.Args[1]
  9107  	v_0 := v.Args[0]
  9108  	b := v.Block
  9109  	typ := &b.Func.Config.Types
  9110  	// match: (Lsh64x8 x y)
  9111  	// cond: shiftIsBounded(v)
  9112  	// result: (SLLV x y)
  9113  	for {
  9114  		x := v_0
  9115  		y := v_1
  9116  		if !(shiftIsBounded(v)) {
  9117  			break
  9118  		}
  9119  		v.reset(OpLOONG64SLLV)
  9120  		v.AddArg2(x, y)
  9121  		return true
  9122  	}
  9123  	// match: (Lsh64x8 <t> x y)
  9124  	// cond: !shiftIsBounded(v)
  9125  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  9126  	for {
  9127  		t := v.Type
  9128  		x := v_0
  9129  		y := v_1
  9130  		if !(!shiftIsBounded(v)) {
  9131  			break
  9132  		}
  9133  		v.reset(OpLOONG64MASKEQZ)
  9134  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9135  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9136  		v1.AddArg(y)
  9137  		v0.AddArg2(x, v1)
  9138  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9139  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9140  		v3.AuxInt = int64ToAuxInt(64)
  9141  		v2.AddArg2(v3, v1)
  9142  		v.AddArg2(v0, v2)
  9143  		return true
  9144  	}
  9145  	return false
  9146  }
  9147  func rewriteValueLOONG64_OpLsh8x16(v *Value) bool {
  9148  	v_1 := v.Args[1]
  9149  	v_0 := v.Args[0]
  9150  	b := v.Block
  9151  	typ := &b.Func.Config.Types
  9152  	// match: (Lsh8x16 x y)
  9153  	// cond: shiftIsBounded(v)
  9154  	// result: (SLLV x y)
  9155  	for {
  9156  		x := v_0
  9157  		y := v_1
  9158  		if !(shiftIsBounded(v)) {
  9159  			break
  9160  		}
  9161  		v.reset(OpLOONG64SLLV)
  9162  		v.AddArg2(x, y)
  9163  		return true
  9164  	}
  9165  	// match: (Lsh8x16 <t> x y)
  9166  	// cond: !shiftIsBounded(v)
  9167  	// result: (MASKEQZ (SLLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
  9168  	for {
  9169  		t := v.Type
  9170  		x := v_0
  9171  		y := v_1
  9172  		if !(!shiftIsBounded(v)) {
  9173  			break
  9174  		}
  9175  		v.reset(OpLOONG64MASKEQZ)
  9176  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9177  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9178  		v1.AddArg(y)
  9179  		v0.AddArg2(x, v1)
  9180  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9181  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9182  		v3.AuxInt = int64ToAuxInt(64)
  9183  		v2.AddArg2(v3, v1)
  9184  		v.AddArg2(v0, v2)
  9185  		return true
  9186  	}
  9187  	return false
  9188  }
  9189  func rewriteValueLOONG64_OpLsh8x32(v *Value) bool {
  9190  	v_1 := v.Args[1]
  9191  	v_0 := v.Args[0]
  9192  	b := v.Block
  9193  	typ := &b.Func.Config.Types
  9194  	// match: (Lsh8x32 x y)
  9195  	// cond: shiftIsBounded(v)
  9196  	// result: (SLLV x y)
  9197  	for {
  9198  		x := v_0
  9199  		y := v_1
  9200  		if !(shiftIsBounded(v)) {
  9201  			break
  9202  		}
  9203  		v.reset(OpLOONG64SLLV)
  9204  		v.AddArg2(x, y)
  9205  		return true
  9206  	}
  9207  	// match: (Lsh8x32 <t> x y)
  9208  	// cond: !shiftIsBounded(v)
  9209  	// result: (MASKEQZ (SLLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
  9210  	for {
  9211  		t := v.Type
  9212  		x := v_0
  9213  		y := v_1
  9214  		if !(!shiftIsBounded(v)) {
  9215  			break
  9216  		}
  9217  		v.reset(OpLOONG64MASKEQZ)
  9218  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9219  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9220  		v1.AddArg(y)
  9221  		v0.AddArg2(x, v1)
  9222  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9223  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9224  		v3.AuxInt = int64ToAuxInt(64)
  9225  		v2.AddArg2(v3, v1)
  9226  		v.AddArg2(v0, v2)
  9227  		return true
  9228  	}
  9229  	return false
  9230  }
  9231  func rewriteValueLOONG64_OpLsh8x64(v *Value) bool {
  9232  	v_1 := v.Args[1]
  9233  	v_0 := v.Args[0]
  9234  	b := v.Block
  9235  	typ := &b.Func.Config.Types
  9236  	// match: (Lsh8x64 x y)
  9237  	// cond: shiftIsBounded(v)
  9238  	// result: (SLLV x y)
  9239  	for {
  9240  		x := v_0
  9241  		y := v_1
  9242  		if !(shiftIsBounded(v)) {
  9243  			break
  9244  		}
  9245  		v.reset(OpLOONG64SLLV)
  9246  		v.AddArg2(x, y)
  9247  		return true
  9248  	}
  9249  	// match: (Lsh8x64 <t> x y)
  9250  	// cond: !shiftIsBounded(v)
  9251  	// result: (MASKEQZ (SLLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
  9252  	for {
  9253  		t := v.Type
  9254  		x := v_0
  9255  		y := v_1
  9256  		if !(!shiftIsBounded(v)) {
  9257  			break
  9258  		}
  9259  		v.reset(OpLOONG64MASKEQZ)
  9260  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9261  		v0.AddArg2(x, y)
  9262  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9263  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9264  		v2.AuxInt = int64ToAuxInt(64)
  9265  		v1.AddArg2(v2, y)
  9266  		v.AddArg2(v0, v1)
  9267  		return true
  9268  	}
  9269  	return false
  9270  }
  9271  func rewriteValueLOONG64_OpLsh8x8(v *Value) bool {
  9272  	v_1 := v.Args[1]
  9273  	v_0 := v.Args[0]
  9274  	b := v.Block
  9275  	typ := &b.Func.Config.Types
  9276  	// match: (Lsh8x8 x y)
  9277  	// cond: shiftIsBounded(v)
  9278  	// result: (SLLV x y)
  9279  	for {
  9280  		x := v_0
  9281  		y := v_1
  9282  		if !(shiftIsBounded(v)) {
  9283  			break
  9284  		}
  9285  		v.reset(OpLOONG64SLLV)
  9286  		v.AddArg2(x, y)
  9287  		return true
  9288  	}
  9289  	// match: (Lsh8x8 <t> x y)
  9290  	// cond: !shiftIsBounded(v)
  9291  	// result: (MASKEQZ (SLLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
  9292  	for {
  9293  		t := v.Type
  9294  		x := v_0
  9295  		y := v_1
  9296  		if !(!shiftIsBounded(v)) {
  9297  			break
  9298  		}
  9299  		v.reset(OpLOONG64MASKEQZ)
  9300  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
  9301  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9302  		v1.AddArg(y)
  9303  		v0.AddArg2(x, v1)
  9304  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
  9305  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9306  		v3.AuxInt = int64ToAuxInt(64)
  9307  		v2.AddArg2(v3, v1)
  9308  		v.AddArg2(v0, v2)
  9309  		return true
  9310  	}
  9311  	return false
  9312  }
  9313  func rewriteValueLOONG64_OpMod16(v *Value) bool {
  9314  	v_1 := v.Args[1]
  9315  	v_0 := v.Args[0]
  9316  	b := v.Block
  9317  	typ := &b.Func.Config.Types
  9318  	// match: (Mod16 x y)
  9319  	// result: (REMV (SignExt16to64 x) (SignExt16to64 y))
  9320  	for {
  9321  		x := v_0
  9322  		y := v_1
  9323  		v.reset(OpLOONG64REMV)
  9324  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9325  		v0.AddArg(x)
  9326  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  9327  		v1.AddArg(y)
  9328  		v.AddArg2(v0, v1)
  9329  		return true
  9330  	}
  9331  }
  9332  func rewriteValueLOONG64_OpMod16u(v *Value) bool {
  9333  	v_1 := v.Args[1]
  9334  	v_0 := v.Args[0]
  9335  	b := v.Block
  9336  	typ := &b.Func.Config.Types
  9337  	// match: (Mod16u x y)
  9338  	// result: (REMVU (ZeroExt16to64 x) (ZeroExt16to64 y))
  9339  	for {
  9340  		x := v_0
  9341  		y := v_1
  9342  		v.reset(OpLOONG64REMVU)
  9343  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9344  		v0.AddArg(x)
  9345  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9346  		v1.AddArg(y)
  9347  		v.AddArg2(v0, v1)
  9348  		return true
  9349  	}
  9350  }
  9351  func rewriteValueLOONG64_OpMod32(v *Value) bool {
  9352  	v_1 := v.Args[1]
  9353  	v_0 := v.Args[0]
  9354  	b := v.Block
  9355  	typ := &b.Func.Config.Types
  9356  	// match: (Mod32 x y)
  9357  	// result: (REMV (SignExt32to64 x) (SignExt32to64 y))
  9358  	for {
  9359  		x := v_0
  9360  		y := v_1
  9361  		v.reset(OpLOONG64REMV)
  9362  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  9363  		v0.AddArg(x)
  9364  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  9365  		v1.AddArg(y)
  9366  		v.AddArg2(v0, v1)
  9367  		return true
  9368  	}
  9369  }
  9370  func rewriteValueLOONG64_OpMod32u(v *Value) bool {
  9371  	v_1 := v.Args[1]
  9372  	v_0 := v.Args[0]
  9373  	b := v.Block
  9374  	typ := &b.Func.Config.Types
  9375  	// match: (Mod32u x y)
  9376  	// result: (REMVU (ZeroExt32to64 x) (ZeroExt32to64 y))
  9377  	for {
  9378  		x := v_0
  9379  		y := v_1
  9380  		v.reset(OpLOONG64REMVU)
  9381  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9382  		v0.AddArg(x)
  9383  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9384  		v1.AddArg(y)
  9385  		v.AddArg2(v0, v1)
  9386  		return true
  9387  	}
  9388  }
  9389  func rewriteValueLOONG64_OpMod64(v *Value) bool {
  9390  	v_1 := v.Args[1]
  9391  	v_0 := v.Args[0]
  9392  	// match: (Mod64 x y)
  9393  	// result: (REMV x y)
  9394  	for {
  9395  		x := v_0
  9396  		y := v_1
  9397  		v.reset(OpLOONG64REMV)
  9398  		v.AddArg2(x, y)
  9399  		return true
  9400  	}
  9401  }
  9402  func rewriteValueLOONG64_OpMod8(v *Value) bool {
  9403  	v_1 := v.Args[1]
  9404  	v_0 := v.Args[0]
  9405  	b := v.Block
  9406  	typ := &b.Func.Config.Types
  9407  	// match: (Mod8 x y)
  9408  	// result: (REMV (SignExt8to64 x) (SignExt8to64 y))
  9409  	for {
  9410  		x := v_0
  9411  		y := v_1
  9412  		v.reset(OpLOONG64REMV)
  9413  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9414  		v0.AddArg(x)
  9415  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  9416  		v1.AddArg(y)
  9417  		v.AddArg2(v0, v1)
  9418  		return true
  9419  	}
  9420  }
  9421  func rewriteValueLOONG64_OpMod8u(v *Value) bool {
  9422  	v_1 := v.Args[1]
  9423  	v_0 := v.Args[0]
  9424  	b := v.Block
  9425  	typ := &b.Func.Config.Types
  9426  	// match: (Mod8u x y)
  9427  	// result: (REMVU (ZeroExt8to64 x) (ZeroExt8to64 y))
  9428  	for {
  9429  		x := v_0
  9430  		y := v_1
  9431  		v.reset(OpLOONG64REMVU)
  9432  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9433  		v0.AddArg(x)
  9434  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9435  		v1.AddArg(y)
  9436  		v.AddArg2(v0, v1)
  9437  		return true
  9438  	}
  9439  }
  9440  func rewriteValueLOONG64_OpMove(v *Value) bool {
  9441  	v_2 := v.Args[2]
  9442  	v_1 := v.Args[1]
  9443  	v_0 := v.Args[0]
  9444  	b := v.Block
  9445  	typ := &b.Func.Config.Types
  9446  	// match: (Move [0] _ _ mem)
  9447  	// result: mem
  9448  	for {
  9449  		if auxIntToInt64(v.AuxInt) != 0 {
  9450  			break
  9451  		}
  9452  		mem := v_2
  9453  		v.copyOf(mem)
  9454  		return true
  9455  	}
  9456  	// match: (Move [1] dst src mem)
  9457  	// result: (MOVBstore dst (MOVBUload src mem) mem)
  9458  	for {
  9459  		if auxIntToInt64(v.AuxInt) != 1 {
  9460  			break
  9461  		}
  9462  		dst := v_0
  9463  		src := v_1
  9464  		mem := v_2
  9465  		v.reset(OpLOONG64MOVBstore)
  9466  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  9467  		v0.AddArg2(src, mem)
  9468  		v.AddArg3(dst, v0, mem)
  9469  		return true
  9470  	}
  9471  	// match: (Move [2] dst src mem)
  9472  	// result: (MOVHstore dst (MOVHUload src mem) mem)
  9473  	for {
  9474  		if auxIntToInt64(v.AuxInt) != 2 {
  9475  			break
  9476  		}
  9477  		dst := v_0
  9478  		src := v_1
  9479  		mem := v_2
  9480  		v.reset(OpLOONG64MOVHstore)
  9481  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  9482  		v0.AddArg2(src, mem)
  9483  		v.AddArg3(dst, v0, mem)
  9484  		return true
  9485  	}
  9486  	// match: (Move [3] dst src mem)
  9487  	// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
  9488  	for {
  9489  		if auxIntToInt64(v.AuxInt) != 3 {
  9490  			break
  9491  		}
  9492  		dst := v_0
  9493  		src := v_1
  9494  		mem := v_2
  9495  		v.reset(OpLOONG64MOVBstore)
  9496  		v.AuxInt = int32ToAuxInt(2)
  9497  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  9498  		v0.AuxInt = int32ToAuxInt(2)
  9499  		v0.AddArg2(src, mem)
  9500  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
  9501  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  9502  		v2.AddArg2(src, mem)
  9503  		v1.AddArg3(dst, v2, mem)
  9504  		v.AddArg3(dst, v0, v1)
  9505  		return true
  9506  	}
  9507  	// match: (Move [4] dst src mem)
  9508  	// result: (MOVWstore dst (MOVWUload src mem) mem)
  9509  	for {
  9510  		if auxIntToInt64(v.AuxInt) != 4 {
  9511  			break
  9512  		}
  9513  		dst := v_0
  9514  		src := v_1
  9515  		mem := v_2
  9516  		v.reset(OpLOONG64MOVWstore)
  9517  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  9518  		v0.AddArg2(src, mem)
  9519  		v.AddArg3(dst, v0, mem)
  9520  		return true
  9521  	}
  9522  	// match: (Move [5] dst src mem)
  9523  	// result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))
  9524  	for {
  9525  		if auxIntToInt64(v.AuxInt) != 5 {
  9526  			break
  9527  		}
  9528  		dst := v_0
  9529  		src := v_1
  9530  		mem := v_2
  9531  		v.reset(OpLOONG64MOVBstore)
  9532  		v.AuxInt = int32ToAuxInt(4)
  9533  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  9534  		v0.AuxInt = int32ToAuxInt(4)
  9535  		v0.AddArg2(src, mem)
  9536  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  9537  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  9538  		v2.AddArg2(src, mem)
  9539  		v1.AddArg3(dst, v2, mem)
  9540  		v.AddArg3(dst, v0, v1)
  9541  		return true
  9542  	}
  9543  	// match: (Move [6] dst src mem)
  9544  	// result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))
  9545  	for {
  9546  		if auxIntToInt64(v.AuxInt) != 6 {
  9547  			break
  9548  		}
  9549  		dst := v_0
  9550  		src := v_1
  9551  		mem := v_2
  9552  		v.reset(OpLOONG64MOVHstore)
  9553  		v.AuxInt = int32ToAuxInt(4)
  9554  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  9555  		v0.AuxInt = int32ToAuxInt(4)
  9556  		v0.AddArg2(src, mem)
  9557  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  9558  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  9559  		v2.AddArg2(src, mem)
  9560  		v1.AddArg3(dst, v2, mem)
  9561  		v.AddArg3(dst, v0, v1)
  9562  		return true
  9563  	}
  9564  	// match: (Move [7] dst src mem)
  9565  	// result: (MOVWstore [3] dst (MOVWUload [3] src mem) (MOVWstore dst (MOVWUload src mem) mem))
  9566  	for {
  9567  		if auxIntToInt64(v.AuxInt) != 7 {
  9568  			break
  9569  		}
  9570  		dst := v_0
  9571  		src := v_1
  9572  		mem := v_2
  9573  		v.reset(OpLOONG64MOVWstore)
  9574  		v.AuxInt = int32ToAuxInt(3)
  9575  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  9576  		v0.AuxInt = int32ToAuxInt(3)
  9577  		v0.AddArg2(src, mem)
  9578  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
  9579  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  9580  		v2.AddArg2(src, mem)
  9581  		v1.AddArg3(dst, v2, mem)
  9582  		v.AddArg3(dst, v0, v1)
  9583  		return true
  9584  	}
  9585  	// match: (Move [8] dst src mem)
  9586  	// result: (MOVVstore dst (MOVVload src mem) mem)
  9587  	for {
  9588  		if auxIntToInt64(v.AuxInt) != 8 {
  9589  			break
  9590  		}
  9591  		dst := v_0
  9592  		src := v_1
  9593  		mem := v_2
  9594  		v.reset(OpLOONG64MOVVstore)
  9595  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9596  		v0.AddArg2(src, mem)
  9597  		v.AddArg3(dst, v0, mem)
  9598  		return true
  9599  	}
  9600  	// match: (Move [9] dst src mem)
  9601  	// result: (MOVBstore [8] dst (MOVBUload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  9602  	for {
  9603  		if auxIntToInt64(v.AuxInt) != 9 {
  9604  			break
  9605  		}
  9606  		dst := v_0
  9607  		src := v_1
  9608  		mem := v_2
  9609  		v.reset(OpLOONG64MOVBstore)
  9610  		v.AuxInt = int32ToAuxInt(8)
  9611  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVBUload, typ.UInt8)
  9612  		v0.AuxInt = int32ToAuxInt(8)
  9613  		v0.AddArg2(src, mem)
  9614  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  9615  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9616  		v2.AddArg2(src, mem)
  9617  		v1.AddArg3(dst, v2, mem)
  9618  		v.AddArg3(dst, v0, v1)
  9619  		return true
  9620  	}
  9621  	// match: (Move [10] dst src mem)
  9622  	// result: (MOVHstore [8] dst (MOVHUload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  9623  	for {
  9624  		if auxIntToInt64(v.AuxInt) != 10 {
  9625  			break
  9626  		}
  9627  		dst := v_0
  9628  		src := v_1
  9629  		mem := v_2
  9630  		v.reset(OpLOONG64MOVHstore)
  9631  		v.AuxInt = int32ToAuxInt(8)
  9632  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVHUload, typ.UInt16)
  9633  		v0.AuxInt = int32ToAuxInt(8)
  9634  		v0.AddArg2(src, mem)
  9635  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  9636  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9637  		v2.AddArg2(src, mem)
  9638  		v1.AddArg3(dst, v2, mem)
  9639  		v.AddArg3(dst, v0, v1)
  9640  		return true
  9641  	}
  9642  	// match: (Move [11] dst src mem)
  9643  	// result: (MOVWstore [7] dst (MOVWload [7] src mem) (MOVVstore dst (MOVVload src mem) mem))
  9644  	for {
  9645  		if auxIntToInt64(v.AuxInt) != 11 {
  9646  			break
  9647  		}
  9648  		dst := v_0
  9649  		src := v_1
  9650  		mem := v_2
  9651  		v.reset(OpLOONG64MOVWstore)
  9652  		v.AuxInt = int32ToAuxInt(7)
  9653  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32)
  9654  		v0.AuxInt = int32ToAuxInt(7)
  9655  		v0.AddArg2(src, mem)
  9656  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  9657  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9658  		v2.AddArg2(src, mem)
  9659  		v1.AddArg3(dst, v2, mem)
  9660  		v.AddArg3(dst, v0, v1)
  9661  		return true
  9662  	}
  9663  	// match: (Move [12] dst src mem)
  9664  	// result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  9665  	for {
  9666  		if auxIntToInt64(v.AuxInt) != 12 {
  9667  			break
  9668  		}
  9669  		dst := v_0
  9670  		src := v_1
  9671  		mem := v_2
  9672  		v.reset(OpLOONG64MOVWstore)
  9673  		v.AuxInt = int32ToAuxInt(8)
  9674  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVWUload, typ.UInt32)
  9675  		v0.AuxInt = int32ToAuxInt(8)
  9676  		v0.AddArg2(src, mem)
  9677  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  9678  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9679  		v2.AddArg2(src, mem)
  9680  		v1.AddArg3(dst, v2, mem)
  9681  		v.AddArg3(dst, v0, v1)
  9682  		return true
  9683  	}
  9684  	// match: (Move [13] dst src mem)
  9685  	// result: (MOVVstore [5] dst (MOVVload [5] src mem) (MOVVstore dst (MOVVload src mem) mem))
  9686  	for {
  9687  		if auxIntToInt64(v.AuxInt) != 13 {
  9688  			break
  9689  		}
  9690  		dst := v_0
  9691  		src := v_1
  9692  		mem := v_2
  9693  		v.reset(OpLOONG64MOVVstore)
  9694  		v.AuxInt = int32ToAuxInt(5)
  9695  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9696  		v0.AuxInt = int32ToAuxInt(5)
  9697  		v0.AddArg2(src, mem)
  9698  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  9699  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9700  		v2.AddArg2(src, mem)
  9701  		v1.AddArg3(dst, v2, mem)
  9702  		v.AddArg3(dst, v0, v1)
  9703  		return true
  9704  	}
  9705  	// match: (Move [14] dst src mem)
  9706  	// result: (MOVVstore [6] dst (MOVVload [6] src mem) (MOVVstore dst (MOVVload src mem) mem))
  9707  	for {
  9708  		if auxIntToInt64(v.AuxInt) != 14 {
  9709  			break
  9710  		}
  9711  		dst := v_0
  9712  		src := v_1
  9713  		mem := v_2
  9714  		v.reset(OpLOONG64MOVVstore)
  9715  		v.AuxInt = int32ToAuxInt(6)
  9716  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9717  		v0.AuxInt = int32ToAuxInt(6)
  9718  		v0.AddArg2(src, mem)
  9719  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  9720  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9721  		v2.AddArg2(src, mem)
  9722  		v1.AddArg3(dst, v2, mem)
  9723  		v.AddArg3(dst, v0, v1)
  9724  		return true
  9725  	}
  9726  	// match: (Move [15] dst src mem)
  9727  	// result: (MOVVstore [7] dst (MOVVload [7] src mem) (MOVVstore dst (MOVVload src mem) mem))
  9728  	for {
  9729  		if auxIntToInt64(v.AuxInt) != 15 {
  9730  			break
  9731  		}
  9732  		dst := v_0
  9733  		src := v_1
  9734  		mem := v_2
  9735  		v.reset(OpLOONG64MOVVstore)
  9736  		v.AuxInt = int32ToAuxInt(7)
  9737  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9738  		v0.AuxInt = int32ToAuxInt(7)
  9739  		v0.AddArg2(src, mem)
  9740  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  9741  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9742  		v2.AddArg2(src, mem)
  9743  		v1.AddArg3(dst, v2, mem)
  9744  		v.AddArg3(dst, v0, v1)
  9745  		return true
  9746  	}
  9747  	// match: (Move [16] dst src mem)
  9748  	// result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  9749  	for {
  9750  		if auxIntToInt64(v.AuxInt) != 16 {
  9751  			break
  9752  		}
  9753  		dst := v_0
  9754  		src := v_1
  9755  		mem := v_2
  9756  		v.reset(OpLOONG64MOVVstore)
  9757  		v.AuxInt = int32ToAuxInt(8)
  9758  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9759  		v0.AuxInt = int32ToAuxInt(8)
  9760  		v0.AddArg2(src, mem)
  9761  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
  9762  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64)
  9763  		v2.AddArg2(src, mem)
  9764  		v1.AddArg3(dst, v2, mem)
  9765  		v.AddArg3(dst, v0, v1)
  9766  		return true
  9767  	}
  9768  	// match: (Move [s] dst src mem)
  9769  	// cond: s > 16 && s < 192 && logLargeCopy(v, s)
  9770  	// result: (LoweredMove [s] dst src mem)
  9771  	for {
  9772  		s := auxIntToInt64(v.AuxInt)
  9773  		dst := v_0
  9774  		src := v_1
  9775  		mem := v_2
  9776  		if !(s > 16 && s < 192 && logLargeCopy(v, s)) {
  9777  			break
  9778  		}
  9779  		v.reset(OpLOONG64LoweredMove)
  9780  		v.AuxInt = int64ToAuxInt(s)
  9781  		v.AddArg3(dst, src, mem)
  9782  		return true
  9783  	}
  9784  	// match: (Move [s] dst src mem)
  9785  	// cond: s >= 192 && logLargeCopy(v, s)
  9786  	// result: (LoweredMoveLoop [s] dst src mem)
  9787  	for {
  9788  		s := auxIntToInt64(v.AuxInt)
  9789  		dst := v_0
  9790  		src := v_1
  9791  		mem := v_2
  9792  		if !(s >= 192 && logLargeCopy(v, s)) {
  9793  			break
  9794  		}
  9795  		v.reset(OpLOONG64LoweredMoveLoop)
  9796  		v.AuxInt = int64ToAuxInt(s)
  9797  		v.AddArg3(dst, src, mem)
  9798  		return true
  9799  	}
  9800  	return false
  9801  }
  9802  func rewriteValueLOONG64_OpNeq16(v *Value) bool {
  9803  	v_1 := v.Args[1]
  9804  	v_0 := v.Args[0]
  9805  	b := v.Block
  9806  	typ := &b.Func.Config.Types
  9807  	// match: (Neq16 x y)
  9808  	// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
  9809  	for {
  9810  		x := v_0
  9811  		y := v_1
  9812  		v.reset(OpLOONG64SGTU)
  9813  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  9814  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  9815  		v1.AddArg(x)
  9816  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  9817  		v2.AddArg(y)
  9818  		v0.AddArg2(v1, v2)
  9819  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9820  		v3.AuxInt = int64ToAuxInt(0)
  9821  		v.AddArg2(v0, v3)
  9822  		return true
  9823  	}
  9824  }
  9825  func rewriteValueLOONG64_OpNeq32(v *Value) bool {
  9826  	v_1 := v.Args[1]
  9827  	v_0 := v.Args[0]
  9828  	b := v.Block
  9829  	typ := &b.Func.Config.Types
  9830  	// match: (Neq32 x y)
  9831  	// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
  9832  	for {
  9833  		x := v_0
  9834  		y := v_1
  9835  		v.reset(OpLOONG64SGTU)
  9836  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  9837  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9838  		v1.AddArg(x)
  9839  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  9840  		v2.AddArg(y)
  9841  		v0.AddArg2(v1, v2)
  9842  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9843  		v3.AuxInt = int64ToAuxInt(0)
  9844  		v.AddArg2(v0, v3)
  9845  		return true
  9846  	}
  9847  }
  9848  func rewriteValueLOONG64_OpNeq32F(v *Value) bool {
  9849  	v_1 := v.Args[1]
  9850  	v_0 := v.Args[0]
  9851  	b := v.Block
  9852  	// match: (Neq32F x y)
  9853  	// result: (FPFlagFalse (CMPEQF x y))
  9854  	for {
  9855  		x := v_0
  9856  		y := v_1
  9857  		v.reset(OpLOONG64FPFlagFalse)
  9858  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQF, types.TypeFlags)
  9859  		v0.AddArg2(x, y)
  9860  		v.AddArg(v0)
  9861  		return true
  9862  	}
  9863  }
  9864  func rewriteValueLOONG64_OpNeq64(v *Value) bool {
  9865  	v_1 := v.Args[1]
  9866  	v_0 := v.Args[0]
  9867  	b := v.Block
  9868  	typ := &b.Func.Config.Types
  9869  	// match: (Neq64 x y)
  9870  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  9871  	for {
  9872  		x := v_0
  9873  		y := v_1
  9874  		v.reset(OpLOONG64SGTU)
  9875  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  9876  		v0.AddArg2(x, y)
  9877  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9878  		v1.AuxInt = int64ToAuxInt(0)
  9879  		v.AddArg2(v0, v1)
  9880  		return true
  9881  	}
  9882  }
  9883  func rewriteValueLOONG64_OpNeq64F(v *Value) bool {
  9884  	v_1 := v.Args[1]
  9885  	v_0 := v.Args[0]
  9886  	b := v.Block
  9887  	// match: (Neq64F x y)
  9888  	// result: (FPFlagFalse (CMPEQD x y))
  9889  	for {
  9890  		x := v_0
  9891  		y := v_1
  9892  		v.reset(OpLOONG64FPFlagFalse)
  9893  		v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQD, types.TypeFlags)
  9894  		v0.AddArg2(x, y)
  9895  		v.AddArg(v0)
  9896  		return true
  9897  	}
  9898  }
  9899  func rewriteValueLOONG64_OpNeq8(v *Value) bool {
  9900  	v_1 := v.Args[1]
  9901  	v_0 := v.Args[0]
  9902  	b := v.Block
  9903  	typ := &b.Func.Config.Types
  9904  	// match: (Neq8 x y)
  9905  	// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
  9906  	for {
  9907  		x := v_0
  9908  		y := v_1
  9909  		v.reset(OpLOONG64SGTU)
  9910  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  9911  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9912  		v1.AddArg(x)
  9913  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  9914  		v2.AddArg(y)
  9915  		v0.AddArg2(v1, v2)
  9916  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9917  		v3.AuxInt = int64ToAuxInt(0)
  9918  		v.AddArg2(v0, v3)
  9919  		return true
  9920  	}
  9921  }
  9922  func rewriteValueLOONG64_OpNeqPtr(v *Value) bool {
  9923  	v_1 := v.Args[1]
  9924  	v_0 := v.Args[0]
  9925  	b := v.Block
  9926  	typ := &b.Func.Config.Types
  9927  	// match: (NeqPtr x y)
  9928  	// result: (SGTU (XOR x y) (MOVVconst [0]))
  9929  	for {
  9930  		x := v_0
  9931  		y := v_1
  9932  		v.reset(OpLOONG64SGTU)
  9933  		v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64)
  9934  		v0.AddArg2(x, y)
  9935  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
  9936  		v1.AuxInt = int64ToAuxInt(0)
  9937  		v.AddArg2(v0, v1)
  9938  		return true
  9939  	}
  9940  }
  9941  func rewriteValueLOONG64_OpNot(v *Value) bool {
  9942  	v_0 := v.Args[0]
  9943  	// match: (Not x)
  9944  	// result: (XORconst [1] x)
  9945  	for {
  9946  		x := v_0
  9947  		v.reset(OpLOONG64XORconst)
  9948  		v.AuxInt = int64ToAuxInt(1)
  9949  		v.AddArg(x)
  9950  		return true
  9951  	}
  9952  }
  9953  func rewriteValueLOONG64_OpOffPtr(v *Value) bool {
  9954  	v_0 := v.Args[0]
  9955  	// match: (OffPtr [off] ptr:(SP))
  9956  	// result: (MOVVaddr [int32(off)] ptr)
  9957  	for {
  9958  		off := auxIntToInt64(v.AuxInt)
  9959  		ptr := v_0
  9960  		if ptr.Op != OpSP {
  9961  			break
  9962  		}
  9963  		v.reset(OpLOONG64MOVVaddr)
  9964  		v.AuxInt = int32ToAuxInt(int32(off))
  9965  		v.AddArg(ptr)
  9966  		return true
  9967  	}
  9968  	// match: (OffPtr [off] ptr)
  9969  	// result: (ADDVconst [off] ptr)
  9970  	for {
  9971  		off := auxIntToInt64(v.AuxInt)
  9972  		ptr := v_0
  9973  		v.reset(OpLOONG64ADDVconst)
  9974  		v.AuxInt = int64ToAuxInt(off)
  9975  		v.AddArg(ptr)
  9976  		return true
  9977  	}
  9978  }
  9979  func rewriteValueLOONG64_OpPopCount16(v *Value) bool {
  9980  	v_0 := v.Args[0]
  9981  	b := v.Block
  9982  	typ := &b.Func.Config.Types
  9983  	// match: (PopCount16 <t> x)
  9984  	// result: (MOVWfpgp <t> (VPCNT16 <typ.Float32> (MOVWgpfp <typ.Float32> (ZeroExt16to32 x))))
  9985  	for {
  9986  		t := v.Type
  9987  		x := v_0
  9988  		v.reset(OpLOONG64MOVWfpgp)
  9989  		v.Type = t
  9990  		v0 := b.NewValue0(v.Pos, OpLOONG64VPCNT16, typ.Float32)
  9991  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWgpfp, typ.Float32)
  9992  		v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  9993  		v2.AddArg(x)
  9994  		v1.AddArg(v2)
  9995  		v0.AddArg(v1)
  9996  		v.AddArg(v0)
  9997  		return true
  9998  	}
  9999  }
 10000  func rewriteValueLOONG64_OpPopCount32(v *Value) bool {
 10001  	v_0 := v.Args[0]
 10002  	b := v.Block
 10003  	typ := &b.Func.Config.Types
 10004  	// match: (PopCount32 <t> x)
 10005  	// result: (MOVWfpgp <t> (VPCNT32 <typ.Float32> (MOVWgpfp <typ.Float32> x)))
 10006  	for {
 10007  		t := v.Type
 10008  		x := v_0
 10009  		v.reset(OpLOONG64MOVWfpgp)
 10010  		v.Type = t
 10011  		v0 := b.NewValue0(v.Pos, OpLOONG64VPCNT32, typ.Float32)
 10012  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWgpfp, typ.Float32)
 10013  		v1.AddArg(x)
 10014  		v0.AddArg(v1)
 10015  		v.AddArg(v0)
 10016  		return true
 10017  	}
 10018  }
 10019  func rewriteValueLOONG64_OpPopCount64(v *Value) bool {
 10020  	v_0 := v.Args[0]
 10021  	b := v.Block
 10022  	typ := &b.Func.Config.Types
 10023  	// match: (PopCount64 <t> x)
 10024  	// result: (MOVVfpgp <t> (VPCNT64 <typ.Float64> (MOVVgpfp <typ.Float64> x)))
 10025  	for {
 10026  		t := v.Type
 10027  		x := v_0
 10028  		v.reset(OpLOONG64MOVVfpgp)
 10029  		v.Type = t
 10030  		v0 := b.NewValue0(v.Pos, OpLOONG64VPCNT64, typ.Float64)
 10031  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVgpfp, typ.Float64)
 10032  		v1.AddArg(x)
 10033  		v0.AddArg(v1)
 10034  		v.AddArg(v0)
 10035  		return true
 10036  	}
 10037  }
 10038  func rewriteValueLOONG64_OpPrefetchCache(v *Value) bool {
 10039  	v_1 := v.Args[1]
 10040  	v_0 := v.Args[0]
 10041  	// match: (PrefetchCache addr mem)
 10042  	// result: (PRELD addr mem [0])
 10043  	for {
 10044  		addr := v_0
 10045  		mem := v_1
 10046  		v.reset(OpLOONG64PRELD)
 10047  		v.AuxInt = int64ToAuxInt(0)
 10048  		v.AddArg2(addr, mem)
 10049  		return true
 10050  	}
 10051  }
 10052  func rewriteValueLOONG64_OpPrefetchCacheStreamed(v *Value) bool {
 10053  	v_1 := v.Args[1]
 10054  	v_0 := v.Args[0]
 10055  	// match: (PrefetchCacheStreamed addr mem)
 10056  	// result: (PRELDX addr mem [(((512 << 1) + (1 << 12)) << 5) + 2])
 10057  	for {
 10058  		addr := v_0
 10059  		mem := v_1
 10060  		v.reset(OpLOONG64PRELDX)
 10061  		v.AuxInt = int64ToAuxInt((((512 << 1) + (1 << 12)) << 5) + 2)
 10062  		v.AddArg2(addr, mem)
 10063  		return true
 10064  	}
 10065  }
 10066  func rewriteValueLOONG64_OpRotateLeft16(v *Value) bool {
 10067  	v_1 := v.Args[1]
 10068  	v_0 := v.Args[0]
 10069  	b := v.Block
 10070  	typ := &b.Func.Config.Types
 10071  	// match: (RotateLeft16 <t> x (MOVVconst [c]))
 10072  	// result: (Or16 (Lsh16x64 <t> x (MOVVconst [c&15])) (Rsh16Ux64 <t> x (MOVVconst [-c&15])))
 10073  	for {
 10074  		t := v.Type
 10075  		x := v_0
 10076  		if v_1.Op != OpLOONG64MOVVconst {
 10077  			break
 10078  		}
 10079  		c := auxIntToInt64(v_1.AuxInt)
 10080  		v.reset(OpOr16)
 10081  		v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
 10082  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10083  		v1.AuxInt = int64ToAuxInt(c & 15)
 10084  		v0.AddArg2(x, v1)
 10085  		v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
 10086  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10087  		v3.AuxInt = int64ToAuxInt(-c & 15)
 10088  		v2.AddArg2(x, v3)
 10089  		v.AddArg2(v0, v2)
 10090  		return true
 10091  	}
 10092  	// match: (RotateLeft16 <t> x y)
 10093  	// result: (ROTR <t> (OR <typ.UInt32> (ZeroExt16to32 x) (SLLVconst <t> (ZeroExt16to32 x) [16])) (NEGV <typ.Int64> y))
 10094  	for {
 10095  		t := v.Type
 10096  		x := v_0
 10097  		y := v_1
 10098  		v.reset(OpLOONG64ROTR)
 10099  		v.Type = t
 10100  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, typ.UInt32)
 10101  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
 10102  		v1.AddArg(x)
 10103  		v2 := b.NewValue0(v.Pos, OpLOONG64SLLVconst, t)
 10104  		v2.AuxInt = int64ToAuxInt(16)
 10105  		v2.AddArg(v1)
 10106  		v0.AddArg2(v1, v2)
 10107  		v3 := b.NewValue0(v.Pos, OpLOONG64NEGV, typ.Int64)
 10108  		v3.AddArg(y)
 10109  		v.AddArg2(v0, v3)
 10110  		return true
 10111  	}
 10112  }
 10113  func rewriteValueLOONG64_OpRotateLeft32(v *Value) bool {
 10114  	v_1 := v.Args[1]
 10115  	v_0 := v.Args[0]
 10116  	b := v.Block
 10117  	// match: (RotateLeft32 x y)
 10118  	// result: (ROTR x (NEGV <y.Type> y))
 10119  	for {
 10120  		x := v_0
 10121  		y := v_1
 10122  		v.reset(OpLOONG64ROTR)
 10123  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, y.Type)
 10124  		v0.AddArg(y)
 10125  		v.AddArg2(x, v0)
 10126  		return true
 10127  	}
 10128  }
 10129  func rewriteValueLOONG64_OpRotateLeft64(v *Value) bool {
 10130  	v_1 := v.Args[1]
 10131  	v_0 := v.Args[0]
 10132  	b := v.Block
 10133  	// match: (RotateLeft64 x y)
 10134  	// result: (ROTRV x (NEGV <y.Type> y))
 10135  	for {
 10136  		x := v_0
 10137  		y := v_1
 10138  		v.reset(OpLOONG64ROTRV)
 10139  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, y.Type)
 10140  		v0.AddArg(y)
 10141  		v.AddArg2(x, v0)
 10142  		return true
 10143  	}
 10144  }
 10145  func rewriteValueLOONG64_OpRotateLeft8(v *Value) bool {
 10146  	v_1 := v.Args[1]
 10147  	v_0 := v.Args[0]
 10148  	b := v.Block
 10149  	typ := &b.Func.Config.Types
 10150  	// match: (RotateLeft8 <t> x (MOVVconst [c]))
 10151  	// result: (Or8 (Lsh8x64 <t> x (MOVVconst [c&7])) (Rsh8Ux64 <t> x (MOVVconst [-c&7])))
 10152  	for {
 10153  		t := v.Type
 10154  		x := v_0
 10155  		if v_1.Op != OpLOONG64MOVVconst {
 10156  			break
 10157  		}
 10158  		c := auxIntToInt64(v_1.AuxInt)
 10159  		v.reset(OpOr8)
 10160  		v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
 10161  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10162  		v1.AuxInt = int64ToAuxInt(c & 7)
 10163  		v0.AddArg2(x, v1)
 10164  		v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
 10165  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10166  		v3.AuxInt = int64ToAuxInt(-c & 7)
 10167  		v2.AddArg2(x, v3)
 10168  		v.AddArg2(v0, v2)
 10169  		return true
 10170  	}
 10171  	// match: (RotateLeft8 <t> x y)
 10172  	// result: (OR <t> (SLLV <t> x (ANDconst <typ.Int64> [7] y)) (SRLV <t> (ZeroExt8to64 x) (ANDconst <typ.Int64> [7] (NEGV <typ.Int64> y))))
 10173  	for {
 10174  		t := v.Type
 10175  		x := v_0
 10176  		y := v_1
 10177  		v.reset(OpLOONG64OR)
 10178  		v.Type = t
 10179  		v0 := b.NewValue0(v.Pos, OpLOONG64SLLV, t)
 10180  		v1 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.Int64)
 10181  		v1.AuxInt = int64ToAuxInt(7)
 10182  		v1.AddArg(y)
 10183  		v0.AddArg2(x, v1)
 10184  		v2 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10185  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10186  		v3.AddArg(x)
 10187  		v4 := b.NewValue0(v.Pos, OpLOONG64ANDconst, typ.Int64)
 10188  		v4.AuxInt = int64ToAuxInt(7)
 10189  		v5 := b.NewValue0(v.Pos, OpLOONG64NEGV, typ.Int64)
 10190  		v5.AddArg(y)
 10191  		v4.AddArg(v5)
 10192  		v2.AddArg2(v3, v4)
 10193  		v.AddArg2(v0, v2)
 10194  		return true
 10195  	}
 10196  }
 10197  func rewriteValueLOONG64_OpRsh16Ux16(v *Value) bool {
 10198  	v_1 := v.Args[1]
 10199  	v_0 := v.Args[0]
 10200  	b := v.Block
 10201  	typ := &b.Func.Config.Types
 10202  	// match: (Rsh16Ux16 x y)
 10203  	// cond: shiftIsBounded(v)
 10204  	// result: (SRLV (ZeroExt16to64 x) y)
 10205  	for {
 10206  		x := v_0
 10207  		y := v_1
 10208  		if !(shiftIsBounded(v)) {
 10209  			break
 10210  		}
 10211  		v.reset(OpLOONG64SRLV)
 10212  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10213  		v0.AddArg(x)
 10214  		v.AddArg2(v0, y)
 10215  		return true
 10216  	}
 10217  	// match: (Rsh16Ux16 <t> x y)
 10218  	// cond: !shiftIsBounded(v)
 10219  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
 10220  	for {
 10221  		t := v.Type
 10222  		x := v_0
 10223  		y := v_1
 10224  		if !(!shiftIsBounded(v)) {
 10225  			break
 10226  		}
 10227  		v.reset(OpLOONG64MASKEQZ)
 10228  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10229  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10230  		v1.AddArg(x)
 10231  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10232  		v2.AddArg(y)
 10233  		v0.AddArg2(v1, v2)
 10234  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10235  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10236  		v4.AuxInt = int64ToAuxInt(64)
 10237  		v3.AddArg2(v4, v2)
 10238  		v.AddArg2(v0, v3)
 10239  		return true
 10240  	}
 10241  	return false
 10242  }
 10243  func rewriteValueLOONG64_OpRsh16Ux32(v *Value) bool {
 10244  	v_1 := v.Args[1]
 10245  	v_0 := v.Args[0]
 10246  	b := v.Block
 10247  	typ := &b.Func.Config.Types
 10248  	// match: (Rsh16Ux32 x y)
 10249  	// cond: shiftIsBounded(v)
 10250  	// result: (SRLV (ZeroExt16to64 x) y)
 10251  	for {
 10252  		x := v_0
 10253  		y := v_1
 10254  		if !(shiftIsBounded(v)) {
 10255  			break
 10256  		}
 10257  		v.reset(OpLOONG64SRLV)
 10258  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10259  		v0.AddArg(x)
 10260  		v.AddArg2(v0, y)
 10261  		return true
 10262  	}
 10263  	// match: (Rsh16Ux32 <t> x y)
 10264  	// cond: !shiftIsBounded(v)
 10265  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
 10266  	for {
 10267  		t := v.Type
 10268  		x := v_0
 10269  		y := v_1
 10270  		if !(!shiftIsBounded(v)) {
 10271  			break
 10272  		}
 10273  		v.reset(OpLOONG64MASKEQZ)
 10274  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10275  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10276  		v1.AddArg(x)
 10277  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10278  		v2.AddArg(y)
 10279  		v0.AddArg2(v1, v2)
 10280  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10281  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10282  		v4.AuxInt = int64ToAuxInt(64)
 10283  		v3.AddArg2(v4, v2)
 10284  		v.AddArg2(v0, v3)
 10285  		return true
 10286  	}
 10287  	return false
 10288  }
 10289  func rewriteValueLOONG64_OpRsh16Ux64(v *Value) bool {
 10290  	v_1 := v.Args[1]
 10291  	v_0 := v.Args[0]
 10292  	b := v.Block
 10293  	typ := &b.Func.Config.Types
 10294  	// match: (Rsh16Ux64 x y)
 10295  	// cond: shiftIsBounded(v)
 10296  	// result: (SRLV (ZeroExt16to64 x) y)
 10297  	for {
 10298  		x := v_0
 10299  		y := v_1
 10300  		if !(shiftIsBounded(v)) {
 10301  			break
 10302  		}
 10303  		v.reset(OpLOONG64SRLV)
 10304  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10305  		v0.AddArg(x)
 10306  		v.AddArg2(v0, y)
 10307  		return true
 10308  	}
 10309  	// match: (Rsh16Ux64 <t> x y)
 10310  	// cond: !shiftIsBounded(v)
 10311  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
 10312  	for {
 10313  		t := v.Type
 10314  		x := v_0
 10315  		y := v_1
 10316  		if !(!shiftIsBounded(v)) {
 10317  			break
 10318  		}
 10319  		v.reset(OpLOONG64MASKEQZ)
 10320  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10321  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10322  		v1.AddArg(x)
 10323  		v0.AddArg2(v1, y)
 10324  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10325  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10326  		v3.AuxInt = int64ToAuxInt(64)
 10327  		v2.AddArg2(v3, y)
 10328  		v.AddArg2(v0, v2)
 10329  		return true
 10330  	}
 10331  	return false
 10332  }
 10333  func rewriteValueLOONG64_OpRsh16Ux8(v *Value) bool {
 10334  	v_1 := v.Args[1]
 10335  	v_0 := v.Args[0]
 10336  	b := v.Block
 10337  	typ := &b.Func.Config.Types
 10338  	// match: (Rsh16Ux8 x y)
 10339  	// cond: shiftIsBounded(v)
 10340  	// result: (SRLV (ZeroExt16to64 x) y)
 10341  	for {
 10342  		x := v_0
 10343  		y := v_1
 10344  		if !(shiftIsBounded(v)) {
 10345  			break
 10346  		}
 10347  		v.reset(OpLOONG64SRLV)
 10348  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10349  		v0.AddArg(x)
 10350  		v.AddArg2(v0, y)
 10351  		return true
 10352  	}
 10353  	// match: (Rsh16Ux8 <t> x y)
 10354  	// cond: !shiftIsBounded(v)
 10355  	// result: (MASKEQZ (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
 10356  	for {
 10357  		t := v.Type
 10358  		x := v_0
 10359  		y := v_1
 10360  		if !(!shiftIsBounded(v)) {
 10361  			break
 10362  		}
 10363  		v.reset(OpLOONG64MASKEQZ)
 10364  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10365  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10366  		v1.AddArg(x)
 10367  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10368  		v2.AddArg(y)
 10369  		v0.AddArg2(v1, v2)
 10370  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10371  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10372  		v4.AuxInt = int64ToAuxInt(64)
 10373  		v3.AddArg2(v4, v2)
 10374  		v.AddArg2(v0, v3)
 10375  		return true
 10376  	}
 10377  	return false
 10378  }
 10379  func rewriteValueLOONG64_OpRsh16x16(v *Value) bool {
 10380  	v_1 := v.Args[1]
 10381  	v_0 := v.Args[0]
 10382  	b := v.Block
 10383  	typ := &b.Func.Config.Types
 10384  	// match: (Rsh16x16 x y)
 10385  	// cond: shiftIsBounded(v)
 10386  	// result: (SRAV (SignExt16to64 x) y)
 10387  	for {
 10388  		x := v_0
 10389  		y := v_1
 10390  		if !(shiftIsBounded(v)) {
 10391  			break
 10392  		}
 10393  		v.reset(OpLOONG64SRAV)
 10394  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 10395  		v0.AddArg(x)
 10396  		v.AddArg2(v0, y)
 10397  		return true
 10398  	}
 10399  	// match: (Rsh16x16 <t> x y)
 10400  	// cond: !shiftIsBounded(v)
 10401  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
 10402  	for {
 10403  		t := v.Type
 10404  		x := v_0
 10405  		y := v_1
 10406  		if !(!shiftIsBounded(v)) {
 10407  			break
 10408  		}
 10409  		v.reset(OpLOONG64SRAV)
 10410  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 10411  		v0.AddArg(x)
 10412  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10413  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10414  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10415  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10416  		v4.AddArg(y)
 10417  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10418  		v5.AuxInt = int64ToAuxInt(63)
 10419  		v3.AddArg2(v4, v5)
 10420  		v2.AddArg(v3)
 10421  		v1.AddArg2(v2, v4)
 10422  		v.AddArg2(v0, v1)
 10423  		return true
 10424  	}
 10425  	return false
 10426  }
 10427  func rewriteValueLOONG64_OpRsh16x32(v *Value) bool {
 10428  	v_1 := v.Args[1]
 10429  	v_0 := v.Args[0]
 10430  	b := v.Block
 10431  	typ := &b.Func.Config.Types
 10432  	// match: (Rsh16x32 x y)
 10433  	// cond: shiftIsBounded(v)
 10434  	// result: (SRAV (SignExt16to64 x) y)
 10435  	for {
 10436  		x := v_0
 10437  		y := v_1
 10438  		if !(shiftIsBounded(v)) {
 10439  			break
 10440  		}
 10441  		v.reset(OpLOONG64SRAV)
 10442  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 10443  		v0.AddArg(x)
 10444  		v.AddArg2(v0, y)
 10445  		return true
 10446  	}
 10447  	// match: (Rsh16x32 <t> x y)
 10448  	// cond: !shiftIsBounded(v)
 10449  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
 10450  	for {
 10451  		t := v.Type
 10452  		x := v_0
 10453  		y := v_1
 10454  		if !(!shiftIsBounded(v)) {
 10455  			break
 10456  		}
 10457  		v.reset(OpLOONG64SRAV)
 10458  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 10459  		v0.AddArg(x)
 10460  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10461  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10462  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10463  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10464  		v4.AddArg(y)
 10465  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10466  		v5.AuxInt = int64ToAuxInt(63)
 10467  		v3.AddArg2(v4, v5)
 10468  		v2.AddArg(v3)
 10469  		v1.AddArg2(v2, v4)
 10470  		v.AddArg2(v0, v1)
 10471  		return true
 10472  	}
 10473  	return false
 10474  }
 10475  func rewriteValueLOONG64_OpRsh16x64(v *Value) bool {
 10476  	v_1 := v.Args[1]
 10477  	v_0 := v.Args[0]
 10478  	b := v.Block
 10479  	typ := &b.Func.Config.Types
 10480  	// match: (Rsh16x64 x y)
 10481  	// cond: shiftIsBounded(v)
 10482  	// result: (SRAV (SignExt16to64 x) y)
 10483  	for {
 10484  		x := v_0
 10485  		y := v_1
 10486  		if !(shiftIsBounded(v)) {
 10487  			break
 10488  		}
 10489  		v.reset(OpLOONG64SRAV)
 10490  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 10491  		v0.AddArg(x)
 10492  		v.AddArg2(v0, y)
 10493  		return true
 10494  	}
 10495  	// match: (Rsh16x64 <t> x y)
 10496  	// cond: !shiftIsBounded(v)
 10497  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
 10498  	for {
 10499  		t := v.Type
 10500  		x := v_0
 10501  		y := v_1
 10502  		if !(!shiftIsBounded(v)) {
 10503  			break
 10504  		}
 10505  		v.reset(OpLOONG64SRAV)
 10506  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 10507  		v0.AddArg(x)
 10508  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10509  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10510  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10511  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10512  		v4.AuxInt = int64ToAuxInt(63)
 10513  		v3.AddArg2(y, v4)
 10514  		v2.AddArg(v3)
 10515  		v1.AddArg2(v2, y)
 10516  		v.AddArg2(v0, v1)
 10517  		return true
 10518  	}
 10519  	return false
 10520  }
 10521  func rewriteValueLOONG64_OpRsh16x8(v *Value) bool {
 10522  	v_1 := v.Args[1]
 10523  	v_0 := v.Args[0]
 10524  	b := v.Block
 10525  	typ := &b.Func.Config.Types
 10526  	// match: (Rsh16x8 x y)
 10527  	// cond: shiftIsBounded(v)
 10528  	// result: (SRAV (SignExt16to64 x) y)
 10529  	for {
 10530  		x := v_0
 10531  		y := v_1
 10532  		if !(shiftIsBounded(v)) {
 10533  			break
 10534  		}
 10535  		v.reset(OpLOONG64SRAV)
 10536  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 10537  		v0.AddArg(x)
 10538  		v.AddArg2(v0, y)
 10539  		return true
 10540  	}
 10541  	// match: (Rsh16x8 <t> x y)
 10542  	// cond: !shiftIsBounded(v)
 10543  	// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
 10544  	for {
 10545  		t := v.Type
 10546  		x := v_0
 10547  		y := v_1
 10548  		if !(!shiftIsBounded(v)) {
 10549  			break
 10550  		}
 10551  		v.reset(OpLOONG64SRAV)
 10552  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
 10553  		v0.AddArg(x)
 10554  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10555  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10556  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10557  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10558  		v4.AddArg(y)
 10559  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10560  		v5.AuxInt = int64ToAuxInt(63)
 10561  		v3.AddArg2(v4, v5)
 10562  		v2.AddArg(v3)
 10563  		v1.AddArg2(v2, v4)
 10564  		v.AddArg2(v0, v1)
 10565  		return true
 10566  	}
 10567  	return false
 10568  }
 10569  func rewriteValueLOONG64_OpRsh32Ux16(v *Value) bool {
 10570  	v_1 := v.Args[1]
 10571  	v_0 := v.Args[0]
 10572  	b := v.Block
 10573  	typ := &b.Func.Config.Types
 10574  	// match: (Rsh32Ux16 x y)
 10575  	// cond: shiftIsBounded(v)
 10576  	// result: (SRL x y)
 10577  	for {
 10578  		x := v_0
 10579  		y := v_1
 10580  		if !(shiftIsBounded(v)) {
 10581  			break
 10582  		}
 10583  		v.reset(OpLOONG64SRL)
 10584  		v.AddArg2(x, y)
 10585  		return true
 10586  	}
 10587  	// match: (Rsh32Ux16 <t> x y)
 10588  	// cond: !shiftIsBounded(v)
 10589  	// result: (MASKEQZ (SRL <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt16to64 y)))
 10590  	for {
 10591  		t := v.Type
 10592  		x := v_0
 10593  		y := v_1
 10594  		if !(!shiftIsBounded(v)) {
 10595  			break
 10596  		}
 10597  		v.reset(OpLOONG64MASKEQZ)
 10598  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
 10599  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10600  		v1.AddArg(y)
 10601  		v0.AddArg2(x, v1)
 10602  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10603  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10604  		v3.AuxInt = int64ToAuxInt(32)
 10605  		v2.AddArg2(v3, v1)
 10606  		v.AddArg2(v0, v2)
 10607  		return true
 10608  	}
 10609  	return false
 10610  }
 10611  func rewriteValueLOONG64_OpRsh32Ux32(v *Value) bool {
 10612  	v_1 := v.Args[1]
 10613  	v_0 := v.Args[0]
 10614  	b := v.Block
 10615  	typ := &b.Func.Config.Types
 10616  	// match: (Rsh32Ux32 x y)
 10617  	// cond: shiftIsBounded(v)
 10618  	// result: (SRL x y)
 10619  	for {
 10620  		x := v_0
 10621  		y := v_1
 10622  		if !(shiftIsBounded(v)) {
 10623  			break
 10624  		}
 10625  		v.reset(OpLOONG64SRL)
 10626  		v.AddArg2(x, y)
 10627  		return true
 10628  	}
 10629  	// match: (Rsh32Ux32 <t> x y)
 10630  	// cond: !shiftIsBounded(v)
 10631  	// result: (MASKEQZ (SRL <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt32to64 y)))
 10632  	for {
 10633  		t := v.Type
 10634  		x := v_0
 10635  		y := v_1
 10636  		if !(!shiftIsBounded(v)) {
 10637  			break
 10638  		}
 10639  		v.reset(OpLOONG64MASKEQZ)
 10640  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
 10641  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10642  		v1.AddArg(y)
 10643  		v0.AddArg2(x, v1)
 10644  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10645  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10646  		v3.AuxInt = int64ToAuxInt(32)
 10647  		v2.AddArg2(v3, v1)
 10648  		v.AddArg2(v0, v2)
 10649  		return true
 10650  	}
 10651  	return false
 10652  }
 10653  func rewriteValueLOONG64_OpRsh32Ux64(v *Value) bool {
 10654  	v_1 := v.Args[1]
 10655  	v_0 := v.Args[0]
 10656  	b := v.Block
 10657  	typ := &b.Func.Config.Types
 10658  	// match: (Rsh32Ux64 x y)
 10659  	// cond: shiftIsBounded(v)
 10660  	// result: (SRL x y)
 10661  	for {
 10662  		x := v_0
 10663  		y := v_1
 10664  		if !(shiftIsBounded(v)) {
 10665  			break
 10666  		}
 10667  		v.reset(OpLOONG64SRL)
 10668  		v.AddArg2(x, y)
 10669  		return true
 10670  	}
 10671  	// match: (Rsh32Ux64 <t> x y)
 10672  	// cond: !shiftIsBounded(v)
 10673  	// result: (MASKEQZ (SRL <t> x y) (SGTU (MOVVconst <typ.UInt64> [32]) y))
 10674  	for {
 10675  		t := v.Type
 10676  		x := v_0
 10677  		y := v_1
 10678  		if !(!shiftIsBounded(v)) {
 10679  			break
 10680  		}
 10681  		v.reset(OpLOONG64MASKEQZ)
 10682  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
 10683  		v0.AddArg2(x, y)
 10684  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10685  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10686  		v2.AuxInt = int64ToAuxInt(32)
 10687  		v1.AddArg2(v2, y)
 10688  		v.AddArg2(v0, v1)
 10689  		return true
 10690  	}
 10691  	return false
 10692  }
 10693  func rewriteValueLOONG64_OpRsh32Ux8(v *Value) bool {
 10694  	v_1 := v.Args[1]
 10695  	v_0 := v.Args[0]
 10696  	b := v.Block
 10697  	typ := &b.Func.Config.Types
 10698  	// match: (Rsh32Ux8 x y)
 10699  	// cond: shiftIsBounded(v)
 10700  	// result: (SRL x y)
 10701  	for {
 10702  		x := v_0
 10703  		y := v_1
 10704  		if !(shiftIsBounded(v)) {
 10705  			break
 10706  		}
 10707  		v.reset(OpLOONG64SRL)
 10708  		v.AddArg2(x, y)
 10709  		return true
 10710  	}
 10711  	// match: (Rsh32Ux8 <t> x y)
 10712  	// cond: !shiftIsBounded(v)
 10713  	// result: (MASKEQZ (SRL <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [32]) (ZeroExt8to64 y)))
 10714  	for {
 10715  		t := v.Type
 10716  		x := v_0
 10717  		y := v_1
 10718  		if !(!shiftIsBounded(v)) {
 10719  			break
 10720  		}
 10721  		v.reset(OpLOONG64MASKEQZ)
 10722  		v0 := b.NewValue0(v.Pos, OpLOONG64SRL, t)
 10723  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10724  		v1.AddArg(y)
 10725  		v0.AddArg2(x, v1)
 10726  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10727  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10728  		v3.AuxInt = int64ToAuxInt(32)
 10729  		v2.AddArg2(v3, v1)
 10730  		v.AddArg2(v0, v2)
 10731  		return true
 10732  	}
 10733  	return false
 10734  }
 10735  func rewriteValueLOONG64_OpRsh32x16(v *Value) bool {
 10736  	v_1 := v.Args[1]
 10737  	v_0 := v.Args[0]
 10738  	b := v.Block
 10739  	typ := &b.Func.Config.Types
 10740  	// match: (Rsh32x16 x y)
 10741  	// cond: shiftIsBounded(v)
 10742  	// result: (SRA x y)
 10743  	for {
 10744  		x := v_0
 10745  		y := v_1
 10746  		if !(shiftIsBounded(v)) {
 10747  			break
 10748  		}
 10749  		v.reset(OpLOONG64SRA)
 10750  		v.AddArg2(x, y)
 10751  		return true
 10752  	}
 10753  	// match: (Rsh32x16 <t> x y)
 10754  	// cond: !shiftIsBounded(v)
 10755  	// result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt16to64 y)))
 10756  	for {
 10757  		t := v.Type
 10758  		x := v_0
 10759  		y := v_1
 10760  		if !(!shiftIsBounded(v)) {
 10761  			break
 10762  		}
 10763  		v.reset(OpLOONG64SRA)
 10764  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10765  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10766  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10767  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10768  		v3.AddArg(y)
 10769  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10770  		v4.AuxInt = int64ToAuxInt(31)
 10771  		v2.AddArg2(v3, v4)
 10772  		v1.AddArg(v2)
 10773  		v0.AddArg2(v1, v3)
 10774  		v.AddArg2(x, v0)
 10775  		return true
 10776  	}
 10777  	return false
 10778  }
 10779  func rewriteValueLOONG64_OpRsh32x32(v *Value) bool {
 10780  	v_1 := v.Args[1]
 10781  	v_0 := v.Args[0]
 10782  	b := v.Block
 10783  	typ := &b.Func.Config.Types
 10784  	// match: (Rsh32x32 x y)
 10785  	// cond: shiftIsBounded(v)
 10786  	// result: (SRA x y)
 10787  	for {
 10788  		x := v_0
 10789  		y := v_1
 10790  		if !(shiftIsBounded(v)) {
 10791  			break
 10792  		}
 10793  		v.reset(OpLOONG64SRA)
 10794  		v.AddArg2(x, y)
 10795  		return true
 10796  	}
 10797  	// match: (Rsh32x32 <t> x y)
 10798  	// cond: !shiftIsBounded(v)
 10799  	// result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt32to64 y)))
 10800  	for {
 10801  		t := v.Type
 10802  		x := v_0
 10803  		y := v_1
 10804  		if !(!shiftIsBounded(v)) {
 10805  			break
 10806  		}
 10807  		v.reset(OpLOONG64SRA)
 10808  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10809  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10810  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10811  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10812  		v3.AddArg(y)
 10813  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10814  		v4.AuxInt = int64ToAuxInt(31)
 10815  		v2.AddArg2(v3, v4)
 10816  		v1.AddArg(v2)
 10817  		v0.AddArg2(v1, v3)
 10818  		v.AddArg2(x, v0)
 10819  		return true
 10820  	}
 10821  	return false
 10822  }
 10823  func rewriteValueLOONG64_OpRsh32x64(v *Value) bool {
 10824  	v_1 := v.Args[1]
 10825  	v_0 := v.Args[0]
 10826  	b := v.Block
 10827  	typ := &b.Func.Config.Types
 10828  	// match: (Rsh32x64 x y)
 10829  	// cond: shiftIsBounded(v)
 10830  	// result: (SRA x y)
 10831  	for {
 10832  		x := v_0
 10833  		y := v_1
 10834  		if !(shiftIsBounded(v)) {
 10835  			break
 10836  		}
 10837  		v.reset(OpLOONG64SRA)
 10838  		v.AddArg2(x, y)
 10839  		return true
 10840  	}
 10841  	// match: (Rsh32x64 <t> x y)
 10842  	// cond: !shiftIsBounded(v)
 10843  	// result: (SRA x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [31]))) y))
 10844  	for {
 10845  		t := v.Type
 10846  		x := v_0
 10847  		y := v_1
 10848  		if !(!shiftIsBounded(v)) {
 10849  			break
 10850  		}
 10851  		v.reset(OpLOONG64SRA)
 10852  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10853  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10854  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10855  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10856  		v3.AuxInt = int64ToAuxInt(31)
 10857  		v2.AddArg2(y, v3)
 10858  		v1.AddArg(v2)
 10859  		v0.AddArg2(v1, y)
 10860  		v.AddArg2(x, v0)
 10861  		return true
 10862  	}
 10863  	return false
 10864  }
 10865  func rewriteValueLOONG64_OpRsh32x8(v *Value) bool {
 10866  	v_1 := v.Args[1]
 10867  	v_0 := v.Args[0]
 10868  	b := v.Block
 10869  	typ := &b.Func.Config.Types
 10870  	// match: (Rsh32x8 x y)
 10871  	// cond: shiftIsBounded(v)
 10872  	// result: (SRA x y)
 10873  	for {
 10874  		x := v_0
 10875  		y := v_1
 10876  		if !(shiftIsBounded(v)) {
 10877  			break
 10878  		}
 10879  		v.reset(OpLOONG64SRA)
 10880  		v.AddArg2(x, y)
 10881  		return true
 10882  	}
 10883  	// match: (Rsh32x8 <t> x y)
 10884  	// cond: !shiftIsBounded(v)
 10885  	// result: (SRA x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [31]))) (ZeroExt8to64 y)))
 10886  	for {
 10887  		t := v.Type
 10888  		x := v_0
 10889  		y := v_1
 10890  		if !(!shiftIsBounded(v)) {
 10891  			break
 10892  		}
 10893  		v.reset(OpLOONG64SRA)
 10894  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 10895  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 10896  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10897  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 10898  		v3.AddArg(y)
 10899  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10900  		v4.AuxInt = int64ToAuxInt(31)
 10901  		v2.AddArg2(v3, v4)
 10902  		v1.AddArg(v2)
 10903  		v0.AddArg2(v1, v3)
 10904  		v.AddArg2(x, v0)
 10905  		return true
 10906  	}
 10907  	return false
 10908  }
 10909  func rewriteValueLOONG64_OpRsh64Ux16(v *Value) bool {
 10910  	v_1 := v.Args[1]
 10911  	v_0 := v.Args[0]
 10912  	b := v.Block
 10913  	typ := &b.Func.Config.Types
 10914  	// match: (Rsh64Ux16 x y)
 10915  	// cond: shiftIsBounded(v)
 10916  	// result: (SRLV x y)
 10917  	for {
 10918  		x := v_0
 10919  		y := v_1
 10920  		if !(shiftIsBounded(v)) {
 10921  			break
 10922  		}
 10923  		v.reset(OpLOONG64SRLV)
 10924  		v.AddArg2(x, y)
 10925  		return true
 10926  	}
 10927  	// match: (Rsh64Ux16 <t> x y)
 10928  	// cond: !shiftIsBounded(v)
 10929  	// result: (MASKEQZ (SRLV <t> x (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
 10930  	for {
 10931  		t := v.Type
 10932  		x := v_0
 10933  		y := v_1
 10934  		if !(!shiftIsBounded(v)) {
 10935  			break
 10936  		}
 10937  		v.reset(OpLOONG64MASKEQZ)
 10938  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10939  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 10940  		v1.AddArg(y)
 10941  		v0.AddArg2(x, v1)
 10942  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10943  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10944  		v3.AuxInt = int64ToAuxInt(64)
 10945  		v2.AddArg2(v3, v1)
 10946  		v.AddArg2(v0, v2)
 10947  		return true
 10948  	}
 10949  	return false
 10950  }
 10951  func rewriteValueLOONG64_OpRsh64Ux32(v *Value) bool {
 10952  	v_1 := v.Args[1]
 10953  	v_0 := v.Args[0]
 10954  	b := v.Block
 10955  	typ := &b.Func.Config.Types
 10956  	// match: (Rsh64Ux32 x y)
 10957  	// cond: shiftIsBounded(v)
 10958  	// result: (SRLV x y)
 10959  	for {
 10960  		x := v_0
 10961  		y := v_1
 10962  		if !(shiftIsBounded(v)) {
 10963  			break
 10964  		}
 10965  		v.reset(OpLOONG64SRLV)
 10966  		v.AddArg2(x, y)
 10967  		return true
 10968  	}
 10969  	// match: (Rsh64Ux32 <t> x y)
 10970  	// cond: !shiftIsBounded(v)
 10971  	// result: (MASKEQZ (SRLV <t> x (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
 10972  	for {
 10973  		t := v.Type
 10974  		x := v_0
 10975  		y := v_1
 10976  		if !(!shiftIsBounded(v)) {
 10977  			break
 10978  		}
 10979  		v.reset(OpLOONG64MASKEQZ)
 10980  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 10981  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 10982  		v1.AddArg(y)
 10983  		v0.AddArg2(x, v1)
 10984  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 10985  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 10986  		v3.AuxInt = int64ToAuxInt(64)
 10987  		v2.AddArg2(v3, v1)
 10988  		v.AddArg2(v0, v2)
 10989  		return true
 10990  	}
 10991  	return false
 10992  }
 10993  func rewriteValueLOONG64_OpRsh64Ux64(v *Value) bool {
 10994  	v_1 := v.Args[1]
 10995  	v_0 := v.Args[0]
 10996  	b := v.Block
 10997  	typ := &b.Func.Config.Types
 10998  	// match: (Rsh64Ux64 x y)
 10999  	// cond: shiftIsBounded(v)
 11000  	// result: (SRLV x y)
 11001  	for {
 11002  		x := v_0
 11003  		y := v_1
 11004  		if !(shiftIsBounded(v)) {
 11005  			break
 11006  		}
 11007  		v.reset(OpLOONG64SRLV)
 11008  		v.AddArg2(x, y)
 11009  		return true
 11010  	}
 11011  	// match: (Rsh64Ux64 <t> x y)
 11012  	// cond: !shiftIsBounded(v)
 11013  	// result: (MASKEQZ (SRLV <t> x y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
 11014  	for {
 11015  		t := v.Type
 11016  		x := v_0
 11017  		y := v_1
 11018  		if !(!shiftIsBounded(v)) {
 11019  			break
 11020  		}
 11021  		v.reset(OpLOONG64MASKEQZ)
 11022  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 11023  		v0.AddArg2(x, y)
 11024  		v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11025  		v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11026  		v2.AuxInt = int64ToAuxInt(64)
 11027  		v1.AddArg2(v2, y)
 11028  		v.AddArg2(v0, v1)
 11029  		return true
 11030  	}
 11031  	return false
 11032  }
 11033  func rewriteValueLOONG64_OpRsh64Ux8(v *Value) bool {
 11034  	v_1 := v.Args[1]
 11035  	v_0 := v.Args[0]
 11036  	b := v.Block
 11037  	typ := &b.Func.Config.Types
 11038  	// match: (Rsh64Ux8 x y)
 11039  	// cond: shiftIsBounded(v)
 11040  	// result: (SRLV x y)
 11041  	for {
 11042  		x := v_0
 11043  		y := v_1
 11044  		if !(shiftIsBounded(v)) {
 11045  			break
 11046  		}
 11047  		v.reset(OpLOONG64SRLV)
 11048  		v.AddArg2(x, y)
 11049  		return true
 11050  	}
 11051  	// match: (Rsh64Ux8 <t> x y)
 11052  	// cond: !shiftIsBounded(v)
 11053  	// result: (MASKEQZ (SRLV <t> x (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
 11054  	for {
 11055  		t := v.Type
 11056  		x := v_0
 11057  		y := v_1
 11058  		if !(!shiftIsBounded(v)) {
 11059  			break
 11060  		}
 11061  		v.reset(OpLOONG64MASKEQZ)
 11062  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 11063  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11064  		v1.AddArg(y)
 11065  		v0.AddArg2(x, v1)
 11066  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11067  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11068  		v3.AuxInt = int64ToAuxInt(64)
 11069  		v2.AddArg2(v3, v1)
 11070  		v.AddArg2(v0, v2)
 11071  		return true
 11072  	}
 11073  	return false
 11074  }
 11075  func rewriteValueLOONG64_OpRsh64x16(v *Value) bool {
 11076  	v_1 := v.Args[1]
 11077  	v_0 := v.Args[0]
 11078  	b := v.Block
 11079  	typ := &b.Func.Config.Types
 11080  	// match: (Rsh64x16 x y)
 11081  	// cond: shiftIsBounded(v)
 11082  	// result: (SRAV x y)
 11083  	for {
 11084  		x := v_0
 11085  		y := v_1
 11086  		if !(shiftIsBounded(v)) {
 11087  			break
 11088  		}
 11089  		v.reset(OpLOONG64SRAV)
 11090  		v.AddArg2(x, y)
 11091  		return true
 11092  	}
 11093  	// match: (Rsh64x16 <t> x y)
 11094  	// cond: !shiftIsBounded(v)
 11095  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
 11096  	for {
 11097  		t := v.Type
 11098  		x := v_0
 11099  		y := v_1
 11100  		if !(!shiftIsBounded(v)) {
 11101  			break
 11102  		}
 11103  		v.reset(OpLOONG64SRAV)
 11104  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 11105  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11106  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11107  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 11108  		v3.AddArg(y)
 11109  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11110  		v4.AuxInt = int64ToAuxInt(63)
 11111  		v2.AddArg2(v3, v4)
 11112  		v1.AddArg(v2)
 11113  		v0.AddArg2(v1, v3)
 11114  		v.AddArg2(x, v0)
 11115  		return true
 11116  	}
 11117  	return false
 11118  }
 11119  func rewriteValueLOONG64_OpRsh64x32(v *Value) bool {
 11120  	v_1 := v.Args[1]
 11121  	v_0 := v.Args[0]
 11122  	b := v.Block
 11123  	typ := &b.Func.Config.Types
 11124  	// match: (Rsh64x32 x y)
 11125  	// cond: shiftIsBounded(v)
 11126  	// result: (SRAV x y)
 11127  	for {
 11128  		x := v_0
 11129  		y := v_1
 11130  		if !(shiftIsBounded(v)) {
 11131  			break
 11132  		}
 11133  		v.reset(OpLOONG64SRAV)
 11134  		v.AddArg2(x, y)
 11135  		return true
 11136  	}
 11137  	// match: (Rsh64x32 <t> x y)
 11138  	// cond: !shiftIsBounded(v)
 11139  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
 11140  	for {
 11141  		t := v.Type
 11142  		x := v_0
 11143  		y := v_1
 11144  		if !(!shiftIsBounded(v)) {
 11145  			break
 11146  		}
 11147  		v.reset(OpLOONG64SRAV)
 11148  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 11149  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11150  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11151  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 11152  		v3.AddArg(y)
 11153  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11154  		v4.AuxInt = int64ToAuxInt(63)
 11155  		v2.AddArg2(v3, v4)
 11156  		v1.AddArg(v2)
 11157  		v0.AddArg2(v1, v3)
 11158  		v.AddArg2(x, v0)
 11159  		return true
 11160  	}
 11161  	return false
 11162  }
 11163  func rewriteValueLOONG64_OpRsh64x64(v *Value) bool {
 11164  	v_1 := v.Args[1]
 11165  	v_0 := v.Args[0]
 11166  	b := v.Block
 11167  	typ := &b.Func.Config.Types
 11168  	// match: (Rsh64x64 x y)
 11169  	// cond: shiftIsBounded(v)
 11170  	// result: (SRAV x y)
 11171  	for {
 11172  		x := v_0
 11173  		y := v_1
 11174  		if !(shiftIsBounded(v)) {
 11175  			break
 11176  		}
 11177  		v.reset(OpLOONG64SRAV)
 11178  		v.AddArg2(x, y)
 11179  		return true
 11180  	}
 11181  	// match: (Rsh64x64 <t> x y)
 11182  	// cond: !shiftIsBounded(v)
 11183  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
 11184  	for {
 11185  		t := v.Type
 11186  		x := v_0
 11187  		y := v_1
 11188  		if !(!shiftIsBounded(v)) {
 11189  			break
 11190  		}
 11191  		v.reset(OpLOONG64SRAV)
 11192  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 11193  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11194  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11195  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11196  		v3.AuxInt = int64ToAuxInt(63)
 11197  		v2.AddArg2(y, v3)
 11198  		v1.AddArg(v2)
 11199  		v0.AddArg2(v1, y)
 11200  		v.AddArg2(x, v0)
 11201  		return true
 11202  	}
 11203  	return false
 11204  }
 11205  func rewriteValueLOONG64_OpRsh64x8(v *Value) bool {
 11206  	v_1 := v.Args[1]
 11207  	v_0 := v.Args[0]
 11208  	b := v.Block
 11209  	typ := &b.Func.Config.Types
 11210  	// match: (Rsh64x8 x y)
 11211  	// cond: shiftIsBounded(v)
 11212  	// result: (SRAV x y)
 11213  	for {
 11214  		x := v_0
 11215  		y := v_1
 11216  		if !(shiftIsBounded(v)) {
 11217  			break
 11218  		}
 11219  		v.reset(OpLOONG64SRAV)
 11220  		v.AddArg2(x, y)
 11221  		return true
 11222  	}
 11223  	// match: (Rsh64x8 <t> x y)
 11224  	// cond: !shiftIsBounded(v)
 11225  	// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
 11226  	for {
 11227  		t := v.Type
 11228  		x := v_0
 11229  		y := v_1
 11230  		if !(!shiftIsBounded(v)) {
 11231  			break
 11232  		}
 11233  		v.reset(OpLOONG64SRAV)
 11234  		v0 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 11235  		v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11236  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11237  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11238  		v3.AddArg(y)
 11239  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11240  		v4.AuxInt = int64ToAuxInt(63)
 11241  		v2.AddArg2(v3, v4)
 11242  		v1.AddArg(v2)
 11243  		v0.AddArg2(v1, v3)
 11244  		v.AddArg2(x, v0)
 11245  		return true
 11246  	}
 11247  	return false
 11248  }
 11249  func rewriteValueLOONG64_OpRsh8Ux16(v *Value) bool {
 11250  	v_1 := v.Args[1]
 11251  	v_0 := v.Args[0]
 11252  	b := v.Block
 11253  	typ := &b.Func.Config.Types
 11254  	// match: (Rsh8Ux16 x y)
 11255  	// cond: shiftIsBounded(v)
 11256  	// result: (SRLV (ZeroExt8to64 x) y)
 11257  	for {
 11258  		x := v_0
 11259  		y := v_1
 11260  		if !(shiftIsBounded(v)) {
 11261  			break
 11262  		}
 11263  		v.reset(OpLOONG64SRLV)
 11264  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11265  		v0.AddArg(x)
 11266  		v.AddArg2(v0, y)
 11267  		return true
 11268  	}
 11269  	// match: (Rsh8Ux16 <t> x y)
 11270  	// cond: !shiftIsBounded(v)
 11271  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y)))
 11272  	for {
 11273  		t := v.Type
 11274  		x := v_0
 11275  		y := v_1
 11276  		if !(!shiftIsBounded(v)) {
 11277  			break
 11278  		}
 11279  		v.reset(OpLOONG64MASKEQZ)
 11280  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 11281  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11282  		v1.AddArg(x)
 11283  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 11284  		v2.AddArg(y)
 11285  		v0.AddArg2(v1, v2)
 11286  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11287  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11288  		v4.AuxInt = int64ToAuxInt(64)
 11289  		v3.AddArg2(v4, v2)
 11290  		v.AddArg2(v0, v3)
 11291  		return true
 11292  	}
 11293  	return false
 11294  }
 11295  func rewriteValueLOONG64_OpRsh8Ux32(v *Value) bool {
 11296  	v_1 := v.Args[1]
 11297  	v_0 := v.Args[0]
 11298  	b := v.Block
 11299  	typ := &b.Func.Config.Types
 11300  	// match: (Rsh8Ux32 x y)
 11301  	// cond: shiftIsBounded(v)
 11302  	// result: (SRLV (ZeroExt8to64 x) y)
 11303  	for {
 11304  		x := v_0
 11305  		y := v_1
 11306  		if !(shiftIsBounded(v)) {
 11307  			break
 11308  		}
 11309  		v.reset(OpLOONG64SRLV)
 11310  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11311  		v0.AddArg(x)
 11312  		v.AddArg2(v0, y)
 11313  		return true
 11314  	}
 11315  	// match: (Rsh8Ux32 <t> x y)
 11316  	// cond: !shiftIsBounded(v)
 11317  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y)))
 11318  	for {
 11319  		t := v.Type
 11320  		x := v_0
 11321  		y := v_1
 11322  		if !(!shiftIsBounded(v)) {
 11323  			break
 11324  		}
 11325  		v.reset(OpLOONG64MASKEQZ)
 11326  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 11327  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11328  		v1.AddArg(x)
 11329  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 11330  		v2.AddArg(y)
 11331  		v0.AddArg2(v1, v2)
 11332  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11333  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11334  		v4.AuxInt = int64ToAuxInt(64)
 11335  		v3.AddArg2(v4, v2)
 11336  		v.AddArg2(v0, v3)
 11337  		return true
 11338  	}
 11339  	return false
 11340  }
 11341  func rewriteValueLOONG64_OpRsh8Ux64(v *Value) bool {
 11342  	v_1 := v.Args[1]
 11343  	v_0 := v.Args[0]
 11344  	b := v.Block
 11345  	typ := &b.Func.Config.Types
 11346  	// match: (Rsh8Ux64 x y)
 11347  	// cond: shiftIsBounded(v)
 11348  	// result: (SRLV (ZeroExt8to64 x) y)
 11349  	for {
 11350  		x := v_0
 11351  		y := v_1
 11352  		if !(shiftIsBounded(v)) {
 11353  			break
 11354  		}
 11355  		v.reset(OpLOONG64SRLV)
 11356  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11357  		v0.AddArg(x)
 11358  		v.AddArg2(v0, y)
 11359  		return true
 11360  	}
 11361  	// match: (Rsh8Ux64 <t> x y)
 11362  	// cond: !shiftIsBounded(v)
 11363  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) y) (SGTU (MOVVconst <typ.UInt64> [64]) y))
 11364  	for {
 11365  		t := v.Type
 11366  		x := v_0
 11367  		y := v_1
 11368  		if !(!shiftIsBounded(v)) {
 11369  			break
 11370  		}
 11371  		v.reset(OpLOONG64MASKEQZ)
 11372  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 11373  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11374  		v1.AddArg(x)
 11375  		v0.AddArg2(v1, y)
 11376  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11377  		v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11378  		v3.AuxInt = int64ToAuxInt(64)
 11379  		v2.AddArg2(v3, y)
 11380  		v.AddArg2(v0, v2)
 11381  		return true
 11382  	}
 11383  	return false
 11384  }
 11385  func rewriteValueLOONG64_OpRsh8Ux8(v *Value) bool {
 11386  	v_1 := v.Args[1]
 11387  	v_0 := v.Args[0]
 11388  	b := v.Block
 11389  	typ := &b.Func.Config.Types
 11390  	// match: (Rsh8Ux8 x y)
 11391  	// cond: shiftIsBounded(v)
 11392  	// result: (SRLV (ZeroExt8to64 x) y)
 11393  	for {
 11394  		x := v_0
 11395  		y := v_1
 11396  		if !(shiftIsBounded(v)) {
 11397  			break
 11398  		}
 11399  		v.reset(OpLOONG64SRLV)
 11400  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11401  		v0.AddArg(x)
 11402  		v.AddArg2(v0, y)
 11403  		return true
 11404  	}
 11405  	// match: (Rsh8Ux8 <t> x y)
 11406  	// cond: !shiftIsBounded(v)
 11407  	// result: (MASKEQZ (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y)))
 11408  	for {
 11409  		t := v.Type
 11410  		x := v_0
 11411  		y := v_1
 11412  		if !(!shiftIsBounded(v)) {
 11413  			break
 11414  		}
 11415  		v.reset(OpLOONG64MASKEQZ)
 11416  		v0 := b.NewValue0(v.Pos, OpLOONG64SRLV, t)
 11417  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11418  		v1.AddArg(x)
 11419  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11420  		v2.AddArg(y)
 11421  		v0.AddArg2(v1, v2)
 11422  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11423  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11424  		v4.AuxInt = int64ToAuxInt(64)
 11425  		v3.AddArg2(v4, v2)
 11426  		v.AddArg2(v0, v3)
 11427  		return true
 11428  	}
 11429  	return false
 11430  }
 11431  func rewriteValueLOONG64_OpRsh8x16(v *Value) bool {
 11432  	v_1 := v.Args[1]
 11433  	v_0 := v.Args[0]
 11434  	b := v.Block
 11435  	typ := &b.Func.Config.Types
 11436  	// match: (Rsh8x16 x y)
 11437  	// cond: shiftIsBounded(v)
 11438  	// result: (SRAV (SignExt8to64 x) y)
 11439  	for {
 11440  		x := v_0
 11441  		y := v_1
 11442  		if !(shiftIsBounded(v)) {
 11443  			break
 11444  		}
 11445  		v.reset(OpLOONG64SRAV)
 11446  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 11447  		v0.AddArg(x)
 11448  		v.AddArg2(v0, y)
 11449  		return true
 11450  	}
 11451  	// match: (Rsh8x16 <t> x y)
 11452  	// cond: !shiftIsBounded(v)
 11453  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
 11454  	for {
 11455  		t := v.Type
 11456  		x := v_0
 11457  		y := v_1
 11458  		if !(!shiftIsBounded(v)) {
 11459  			break
 11460  		}
 11461  		v.reset(OpLOONG64SRAV)
 11462  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 11463  		v0.AddArg(x)
 11464  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 11465  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11466  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11467  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
 11468  		v4.AddArg(y)
 11469  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11470  		v5.AuxInt = int64ToAuxInt(63)
 11471  		v3.AddArg2(v4, v5)
 11472  		v2.AddArg(v3)
 11473  		v1.AddArg2(v2, v4)
 11474  		v.AddArg2(v0, v1)
 11475  		return true
 11476  	}
 11477  	return false
 11478  }
 11479  func rewriteValueLOONG64_OpRsh8x32(v *Value) bool {
 11480  	v_1 := v.Args[1]
 11481  	v_0 := v.Args[0]
 11482  	b := v.Block
 11483  	typ := &b.Func.Config.Types
 11484  	// match: (Rsh8x32 x y)
 11485  	// cond: shiftIsBounded(v)
 11486  	// result: (SRAV (SignExt8to64 x) y)
 11487  	for {
 11488  		x := v_0
 11489  		y := v_1
 11490  		if !(shiftIsBounded(v)) {
 11491  			break
 11492  		}
 11493  		v.reset(OpLOONG64SRAV)
 11494  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 11495  		v0.AddArg(x)
 11496  		v.AddArg2(v0, y)
 11497  		return true
 11498  	}
 11499  	// match: (Rsh8x32 <t> x y)
 11500  	// cond: !shiftIsBounded(v)
 11501  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
 11502  	for {
 11503  		t := v.Type
 11504  		x := v_0
 11505  		y := v_1
 11506  		if !(!shiftIsBounded(v)) {
 11507  			break
 11508  		}
 11509  		v.reset(OpLOONG64SRAV)
 11510  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 11511  		v0.AddArg(x)
 11512  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 11513  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11514  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11515  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
 11516  		v4.AddArg(y)
 11517  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11518  		v5.AuxInt = int64ToAuxInt(63)
 11519  		v3.AddArg2(v4, v5)
 11520  		v2.AddArg(v3)
 11521  		v1.AddArg2(v2, v4)
 11522  		v.AddArg2(v0, v1)
 11523  		return true
 11524  	}
 11525  	return false
 11526  }
 11527  func rewriteValueLOONG64_OpRsh8x64(v *Value) bool {
 11528  	v_1 := v.Args[1]
 11529  	v_0 := v.Args[0]
 11530  	b := v.Block
 11531  	typ := &b.Func.Config.Types
 11532  	// match: (Rsh8x64 x y)
 11533  	// cond: shiftIsBounded(v)
 11534  	// result: (SRAV (SignExt8to64 x) y)
 11535  	for {
 11536  		x := v_0
 11537  		y := v_1
 11538  		if !(shiftIsBounded(v)) {
 11539  			break
 11540  		}
 11541  		v.reset(OpLOONG64SRAV)
 11542  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 11543  		v0.AddArg(x)
 11544  		v.AddArg2(v0, y)
 11545  		return true
 11546  	}
 11547  	// match: (Rsh8x64 <t> x y)
 11548  	// cond: !shiftIsBounded(v)
 11549  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
 11550  	for {
 11551  		t := v.Type
 11552  		x := v_0
 11553  		y := v_1
 11554  		if !(!shiftIsBounded(v)) {
 11555  			break
 11556  		}
 11557  		v.reset(OpLOONG64SRAV)
 11558  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 11559  		v0.AddArg(x)
 11560  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 11561  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11562  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11563  		v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11564  		v4.AuxInt = int64ToAuxInt(63)
 11565  		v3.AddArg2(y, v4)
 11566  		v2.AddArg(v3)
 11567  		v1.AddArg2(v2, y)
 11568  		v.AddArg2(v0, v1)
 11569  		return true
 11570  	}
 11571  	return false
 11572  }
 11573  func rewriteValueLOONG64_OpRsh8x8(v *Value) bool {
 11574  	v_1 := v.Args[1]
 11575  	v_0 := v.Args[0]
 11576  	b := v.Block
 11577  	typ := &b.Func.Config.Types
 11578  	// match: (Rsh8x8 x y)
 11579  	// cond: shiftIsBounded(v)
 11580  	// result: (SRAV (SignExt8to64 x) y)
 11581  	for {
 11582  		x := v_0
 11583  		y := v_1
 11584  		if !(shiftIsBounded(v)) {
 11585  			break
 11586  		}
 11587  		v.reset(OpLOONG64SRAV)
 11588  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 11589  		v0.AddArg(x)
 11590  		v.AddArg2(v0, y)
 11591  		return true
 11592  	}
 11593  	// match: (Rsh8x8 <t> x y)
 11594  	// cond: !shiftIsBounded(v)
 11595  	// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
 11596  	for {
 11597  		t := v.Type
 11598  		x := v_0
 11599  		y := v_1
 11600  		if !(!shiftIsBounded(v)) {
 11601  			break
 11602  		}
 11603  		v.reset(OpLOONG64SRAV)
 11604  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
 11605  		v0.AddArg(x)
 11606  		v1 := b.NewValue0(v.Pos, OpLOONG64OR, t)
 11607  		v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11608  		v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool)
 11609  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
 11610  		v4.AddArg(y)
 11611  		v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11612  		v5.AuxInt = int64ToAuxInt(63)
 11613  		v3.AddArg2(v4, v5)
 11614  		v2.AddArg(v3)
 11615  		v1.AddArg2(v2, v4)
 11616  		v.AddArg2(v0, v1)
 11617  		return true
 11618  	}
 11619  	return false
 11620  }
 11621  func rewriteValueLOONG64_OpSelect0(v *Value) bool {
 11622  	v_0 := v.Args[0]
 11623  	b := v.Block
 11624  	// match: (Select0 (Mul64uhilo x y))
 11625  	// result: (MULHVU x y)
 11626  	for {
 11627  		if v_0.Op != OpMul64uhilo {
 11628  			break
 11629  		}
 11630  		y := v_0.Args[1]
 11631  		x := v_0.Args[0]
 11632  		v.reset(OpLOONG64MULHVU)
 11633  		v.AddArg2(x, y)
 11634  		return true
 11635  	}
 11636  	// match: (Select0 (Mul64uover x y))
 11637  	// result: (MULV x y)
 11638  	for {
 11639  		if v_0.Op != OpMul64uover {
 11640  			break
 11641  		}
 11642  		y := v_0.Args[1]
 11643  		x := v_0.Args[0]
 11644  		v.reset(OpLOONG64MULV)
 11645  		v.AddArg2(x, y)
 11646  		return true
 11647  	}
 11648  	// match: (Select0 <t> (Add64carry x y c))
 11649  	// result: (ADDV (ADDV <t> x y) c)
 11650  	for {
 11651  		t := v.Type
 11652  		if v_0.Op != OpAdd64carry {
 11653  			break
 11654  		}
 11655  		c := v_0.Args[2]
 11656  		x := v_0.Args[0]
 11657  		y := v_0.Args[1]
 11658  		v.reset(OpLOONG64ADDV)
 11659  		v0 := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
 11660  		v0.AddArg2(x, y)
 11661  		v.AddArg2(v0, c)
 11662  		return true
 11663  	}
 11664  	// match: (Select0 <t> (Sub64borrow x y c))
 11665  	// result: (SUBV (SUBV <t> x y) c)
 11666  	for {
 11667  		t := v.Type
 11668  		if v_0.Op != OpSub64borrow {
 11669  			break
 11670  		}
 11671  		c := v_0.Args[2]
 11672  		x := v_0.Args[0]
 11673  		y := v_0.Args[1]
 11674  		v.reset(OpLOONG64SUBV)
 11675  		v0 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
 11676  		v0.AddArg2(x, y)
 11677  		v.AddArg2(v0, c)
 11678  		return true
 11679  	}
 11680  	return false
 11681  }
 11682  func rewriteValueLOONG64_OpSelect1(v *Value) bool {
 11683  	v_0 := v.Args[0]
 11684  	b := v.Block
 11685  	typ := &b.Func.Config.Types
 11686  	// match: (Select1 (Mul64uhilo x y))
 11687  	// result: (MULV x y)
 11688  	for {
 11689  		if v_0.Op != OpMul64uhilo {
 11690  			break
 11691  		}
 11692  		y := v_0.Args[1]
 11693  		x := v_0.Args[0]
 11694  		v.reset(OpLOONG64MULV)
 11695  		v.AddArg2(x, y)
 11696  		return true
 11697  	}
 11698  	// match: (Select1 (Mul64uover x y))
 11699  	// result: (SGTU <typ.Bool> (MULHVU x y) (MOVVconst <typ.UInt64> [0]))
 11700  	for {
 11701  		if v_0.Op != OpMul64uover {
 11702  			break
 11703  		}
 11704  		y := v_0.Args[1]
 11705  		x := v_0.Args[0]
 11706  		v.reset(OpLOONG64SGTU)
 11707  		v.Type = typ.Bool
 11708  		v0 := b.NewValue0(v.Pos, OpLOONG64MULHVU, typ.UInt64)
 11709  		v0.AddArg2(x, y)
 11710  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11711  		v1.AuxInt = int64ToAuxInt(0)
 11712  		v.AddArg2(v0, v1)
 11713  		return true
 11714  	}
 11715  	// match: (Select1 <t> (Add64carry x y c))
 11716  	// result: (OR (SGTU <t> x s:(ADDV <t> x y)) (SGTU <t> s (ADDV <t> s c)))
 11717  	for {
 11718  		t := v.Type
 11719  		if v_0.Op != OpAdd64carry {
 11720  			break
 11721  		}
 11722  		c := v_0.Args[2]
 11723  		x := v_0.Args[0]
 11724  		y := v_0.Args[1]
 11725  		v.reset(OpLOONG64OR)
 11726  		v0 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 11727  		s := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
 11728  		s.AddArg2(x, y)
 11729  		v0.AddArg2(x, s)
 11730  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 11731  		v3 := b.NewValue0(v.Pos, OpLOONG64ADDV, t)
 11732  		v3.AddArg2(s, c)
 11733  		v2.AddArg2(s, v3)
 11734  		v.AddArg2(v0, v2)
 11735  		return true
 11736  	}
 11737  	// match: (Select1 <t> (Sub64borrow x y c))
 11738  	// result: (OR (SGTU <t> s:(SUBV <t> x y) x) (SGTU <t> (SUBV <t> s c) s))
 11739  	for {
 11740  		t := v.Type
 11741  		if v_0.Op != OpSub64borrow {
 11742  			break
 11743  		}
 11744  		c := v_0.Args[2]
 11745  		x := v_0.Args[0]
 11746  		y := v_0.Args[1]
 11747  		v.reset(OpLOONG64OR)
 11748  		v0 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 11749  		s := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
 11750  		s.AddArg2(x, y)
 11751  		v0.AddArg2(s, x)
 11752  		v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, t)
 11753  		v3 := b.NewValue0(v.Pos, OpLOONG64SUBV, t)
 11754  		v3.AddArg2(s, c)
 11755  		v2.AddArg2(v3, s)
 11756  		v.AddArg2(v0, v2)
 11757  		return true
 11758  	}
 11759  	return false
 11760  }
 11761  func rewriteValueLOONG64_OpSelectN(v *Value) bool {
 11762  	v_0 := v.Args[0]
 11763  	b := v.Block
 11764  	config := b.Func.Config
 11765  	// match: (SelectN [0] call:(CALLstatic {sym} dst src (MOVVconst [sz]) mem))
 11766  	// cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)
 11767  	// result: (Move [sz] dst src mem)
 11768  	for {
 11769  		if auxIntToInt64(v.AuxInt) != 0 {
 11770  			break
 11771  		}
 11772  		call := v_0
 11773  		if call.Op != OpLOONG64CALLstatic || len(call.Args) != 4 {
 11774  			break
 11775  		}
 11776  		sym := auxToCall(call.Aux)
 11777  		mem := call.Args[3]
 11778  		dst := call.Args[0]
 11779  		src := call.Args[1]
 11780  		call_2 := call.Args[2]
 11781  		if call_2.Op != OpLOONG64MOVVconst {
 11782  			break
 11783  		}
 11784  		sz := auxIntToInt64(call_2.AuxInt)
 11785  		if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)) {
 11786  			break
 11787  		}
 11788  		v.reset(OpMove)
 11789  		v.AuxInt = int64ToAuxInt(sz)
 11790  		v.AddArg3(dst, src, mem)
 11791  		return true
 11792  	}
 11793  	return false
 11794  }
 11795  func rewriteValueLOONG64_OpSlicemask(v *Value) bool {
 11796  	v_0 := v.Args[0]
 11797  	b := v.Block
 11798  	// match: (Slicemask <t> x)
 11799  	// result: (SRAVconst (NEGV <t> x) [63])
 11800  	for {
 11801  		t := v.Type
 11802  		x := v_0
 11803  		v.reset(OpLOONG64SRAVconst)
 11804  		v.AuxInt = int64ToAuxInt(63)
 11805  		v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t)
 11806  		v0.AddArg(x)
 11807  		v.AddArg(v0)
 11808  		return true
 11809  	}
 11810  }
 11811  func rewriteValueLOONG64_OpStore(v *Value) bool {
 11812  	v_2 := v.Args[2]
 11813  	v_1 := v.Args[1]
 11814  	v_0 := v.Args[0]
 11815  	// match: (Store {t} ptr val mem)
 11816  	// cond: t.Size() == 1
 11817  	// result: (MOVBstore ptr val mem)
 11818  	for {
 11819  		t := auxToType(v.Aux)
 11820  		ptr := v_0
 11821  		val := v_1
 11822  		mem := v_2
 11823  		if !(t.Size() == 1) {
 11824  			break
 11825  		}
 11826  		v.reset(OpLOONG64MOVBstore)
 11827  		v.AddArg3(ptr, val, mem)
 11828  		return true
 11829  	}
 11830  	// match: (Store {t} ptr val mem)
 11831  	// cond: t.Size() == 2
 11832  	// result: (MOVHstore ptr val mem)
 11833  	for {
 11834  		t := auxToType(v.Aux)
 11835  		ptr := v_0
 11836  		val := v_1
 11837  		mem := v_2
 11838  		if !(t.Size() == 2) {
 11839  			break
 11840  		}
 11841  		v.reset(OpLOONG64MOVHstore)
 11842  		v.AddArg3(ptr, val, mem)
 11843  		return true
 11844  	}
 11845  	// match: (Store {t} ptr val mem)
 11846  	// cond: t.Size() == 4 && !t.IsFloat()
 11847  	// result: (MOVWstore ptr val mem)
 11848  	for {
 11849  		t := auxToType(v.Aux)
 11850  		ptr := v_0
 11851  		val := v_1
 11852  		mem := v_2
 11853  		if !(t.Size() == 4 && !t.IsFloat()) {
 11854  			break
 11855  		}
 11856  		v.reset(OpLOONG64MOVWstore)
 11857  		v.AddArg3(ptr, val, mem)
 11858  		return true
 11859  	}
 11860  	// match: (Store {t} ptr val mem)
 11861  	// cond: t.Size() == 8 && !t.IsFloat()
 11862  	// result: (MOVVstore ptr val mem)
 11863  	for {
 11864  		t := auxToType(v.Aux)
 11865  		ptr := v_0
 11866  		val := v_1
 11867  		mem := v_2
 11868  		if !(t.Size() == 8 && !t.IsFloat()) {
 11869  			break
 11870  		}
 11871  		v.reset(OpLOONG64MOVVstore)
 11872  		v.AddArg3(ptr, val, mem)
 11873  		return true
 11874  	}
 11875  	// match: (Store {t} ptr val mem)
 11876  	// cond: t.Size() == 4 && t.IsFloat()
 11877  	// result: (MOVFstore ptr val mem)
 11878  	for {
 11879  		t := auxToType(v.Aux)
 11880  		ptr := v_0
 11881  		val := v_1
 11882  		mem := v_2
 11883  		if !(t.Size() == 4 && t.IsFloat()) {
 11884  			break
 11885  		}
 11886  		v.reset(OpLOONG64MOVFstore)
 11887  		v.AddArg3(ptr, val, mem)
 11888  		return true
 11889  	}
 11890  	// match: (Store {t} ptr val mem)
 11891  	// cond: t.Size() == 8 && t.IsFloat()
 11892  	// result: (MOVDstore ptr val mem)
 11893  	for {
 11894  		t := auxToType(v.Aux)
 11895  		ptr := v_0
 11896  		val := v_1
 11897  		mem := v_2
 11898  		if !(t.Size() == 8 && t.IsFloat()) {
 11899  			break
 11900  		}
 11901  		v.reset(OpLOONG64MOVDstore)
 11902  		v.AddArg3(ptr, val, mem)
 11903  		return true
 11904  	}
 11905  	return false
 11906  }
 11907  func rewriteValueLOONG64_OpZero(v *Value) bool {
 11908  	v_1 := v.Args[1]
 11909  	v_0 := v.Args[0]
 11910  	b := v.Block
 11911  	typ := &b.Func.Config.Types
 11912  	// match: (Zero [0] _ mem)
 11913  	// result: mem
 11914  	for {
 11915  		if auxIntToInt64(v.AuxInt) != 0 {
 11916  			break
 11917  		}
 11918  		mem := v_1
 11919  		v.copyOf(mem)
 11920  		return true
 11921  	}
 11922  	// match: (Zero [1] ptr mem)
 11923  	// result: (MOVBstore ptr (MOVVconst [0]) mem)
 11924  	for {
 11925  		if auxIntToInt64(v.AuxInt) != 1 {
 11926  			break
 11927  		}
 11928  		ptr := v_0
 11929  		mem := v_1
 11930  		v.reset(OpLOONG64MOVBstore)
 11931  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11932  		v0.AuxInt = int64ToAuxInt(0)
 11933  		v.AddArg3(ptr, v0, mem)
 11934  		return true
 11935  	}
 11936  	// match: (Zero [2] ptr mem)
 11937  	// result: (MOVHstore ptr (MOVVconst [0]) mem)
 11938  	for {
 11939  		if auxIntToInt64(v.AuxInt) != 2 {
 11940  			break
 11941  		}
 11942  		ptr := v_0
 11943  		mem := v_1
 11944  		v.reset(OpLOONG64MOVHstore)
 11945  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11946  		v0.AuxInt = int64ToAuxInt(0)
 11947  		v.AddArg3(ptr, v0, mem)
 11948  		return true
 11949  	}
 11950  	// match: (Zero [3] ptr mem)
 11951  	// result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVHstore ptr (MOVVconst [0]) mem))
 11952  	for {
 11953  		if auxIntToInt64(v.AuxInt) != 3 {
 11954  			break
 11955  		}
 11956  		ptr := v_0
 11957  		mem := v_1
 11958  		v.reset(OpLOONG64MOVBstore)
 11959  		v.AuxInt = int32ToAuxInt(2)
 11960  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11961  		v0.AuxInt = int64ToAuxInt(0)
 11962  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem)
 11963  		v1.AddArg3(ptr, v0, mem)
 11964  		v.AddArg3(ptr, v0, v1)
 11965  		return true
 11966  	}
 11967  	// match: (Zero [4] {t} ptr mem)
 11968  	// result: (MOVWstore ptr (MOVVconst [0]) mem)
 11969  	for {
 11970  		if auxIntToInt64(v.AuxInt) != 4 {
 11971  			break
 11972  		}
 11973  		ptr := v_0
 11974  		mem := v_1
 11975  		v.reset(OpLOONG64MOVWstore)
 11976  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11977  		v0.AuxInt = int64ToAuxInt(0)
 11978  		v.AddArg3(ptr, v0, mem)
 11979  		return true
 11980  	}
 11981  	// match: (Zero [5] ptr mem)
 11982  	// result: (MOVBstore [4] ptr (MOVVconst [0]) (MOVWstore ptr (MOVVconst [0]) mem))
 11983  	for {
 11984  		if auxIntToInt64(v.AuxInt) != 5 {
 11985  			break
 11986  		}
 11987  		ptr := v_0
 11988  		mem := v_1
 11989  		v.reset(OpLOONG64MOVBstore)
 11990  		v.AuxInt = int32ToAuxInt(4)
 11991  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 11992  		v0.AuxInt = int64ToAuxInt(0)
 11993  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
 11994  		v1.AddArg3(ptr, v0, mem)
 11995  		v.AddArg3(ptr, v0, v1)
 11996  		return true
 11997  	}
 11998  	// match: (Zero [6] ptr mem)
 11999  	// result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVWstore ptr (MOVVconst [0]) mem))
 12000  	for {
 12001  		if auxIntToInt64(v.AuxInt) != 6 {
 12002  			break
 12003  		}
 12004  		ptr := v_0
 12005  		mem := v_1
 12006  		v.reset(OpLOONG64MOVHstore)
 12007  		v.AuxInt = int32ToAuxInt(4)
 12008  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12009  		v0.AuxInt = int64ToAuxInt(0)
 12010  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
 12011  		v1.AddArg3(ptr, v0, mem)
 12012  		v.AddArg3(ptr, v0, v1)
 12013  		return true
 12014  	}
 12015  	// match: (Zero [7] ptr mem)
 12016  	// result: (MOVWstore [3] ptr (MOVVconst [0]) (MOVWstore ptr (MOVVconst [0]) mem))
 12017  	for {
 12018  		if auxIntToInt64(v.AuxInt) != 7 {
 12019  			break
 12020  		}
 12021  		ptr := v_0
 12022  		mem := v_1
 12023  		v.reset(OpLOONG64MOVWstore)
 12024  		v.AuxInt = int32ToAuxInt(3)
 12025  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12026  		v0.AuxInt = int64ToAuxInt(0)
 12027  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem)
 12028  		v1.AddArg3(ptr, v0, mem)
 12029  		v.AddArg3(ptr, v0, v1)
 12030  		return true
 12031  	}
 12032  	// match: (Zero [8] {t} ptr mem)
 12033  	// result: (MOVVstore ptr (MOVVconst [0]) mem)
 12034  	for {
 12035  		if auxIntToInt64(v.AuxInt) != 8 {
 12036  			break
 12037  		}
 12038  		ptr := v_0
 12039  		mem := v_1
 12040  		v.reset(OpLOONG64MOVVstore)
 12041  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12042  		v0.AuxInt = int64ToAuxInt(0)
 12043  		v.AddArg3(ptr, v0, mem)
 12044  		return true
 12045  	}
 12046  	// match: (Zero [9] ptr mem)
 12047  	// result: (MOVBstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 12048  	for {
 12049  		if auxIntToInt64(v.AuxInt) != 9 {
 12050  			break
 12051  		}
 12052  		ptr := v_0
 12053  		mem := v_1
 12054  		v.reset(OpLOONG64MOVBstore)
 12055  		v.AuxInt = int32ToAuxInt(8)
 12056  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12057  		v0.AuxInt = int64ToAuxInt(0)
 12058  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 12059  		v1.AddArg3(ptr, v0, mem)
 12060  		v.AddArg3(ptr, v0, v1)
 12061  		return true
 12062  	}
 12063  	// match: (Zero [10] ptr mem)
 12064  	// result: (MOVHstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 12065  	for {
 12066  		if auxIntToInt64(v.AuxInt) != 10 {
 12067  			break
 12068  		}
 12069  		ptr := v_0
 12070  		mem := v_1
 12071  		v.reset(OpLOONG64MOVHstore)
 12072  		v.AuxInt = int32ToAuxInt(8)
 12073  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12074  		v0.AuxInt = int64ToAuxInt(0)
 12075  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 12076  		v1.AddArg3(ptr, v0, mem)
 12077  		v.AddArg3(ptr, v0, v1)
 12078  		return true
 12079  	}
 12080  	// match: (Zero [11] ptr mem)
 12081  	// result: (MOVWstore [7] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 12082  	for {
 12083  		if auxIntToInt64(v.AuxInt) != 11 {
 12084  			break
 12085  		}
 12086  		ptr := v_0
 12087  		mem := v_1
 12088  		v.reset(OpLOONG64MOVWstore)
 12089  		v.AuxInt = int32ToAuxInt(7)
 12090  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12091  		v0.AuxInt = int64ToAuxInt(0)
 12092  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 12093  		v1.AddArg3(ptr, v0, mem)
 12094  		v.AddArg3(ptr, v0, v1)
 12095  		return true
 12096  	}
 12097  	// match: (Zero [12] ptr mem)
 12098  	// result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 12099  	for {
 12100  		if auxIntToInt64(v.AuxInt) != 12 {
 12101  			break
 12102  		}
 12103  		ptr := v_0
 12104  		mem := v_1
 12105  		v.reset(OpLOONG64MOVWstore)
 12106  		v.AuxInt = int32ToAuxInt(8)
 12107  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12108  		v0.AuxInt = int64ToAuxInt(0)
 12109  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 12110  		v1.AddArg3(ptr, v0, mem)
 12111  		v.AddArg3(ptr, v0, v1)
 12112  		return true
 12113  	}
 12114  	// match: (Zero [13] ptr mem)
 12115  	// result: (MOVVstore [5] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 12116  	for {
 12117  		if auxIntToInt64(v.AuxInt) != 13 {
 12118  			break
 12119  		}
 12120  		ptr := v_0
 12121  		mem := v_1
 12122  		v.reset(OpLOONG64MOVVstore)
 12123  		v.AuxInt = int32ToAuxInt(5)
 12124  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12125  		v0.AuxInt = int64ToAuxInt(0)
 12126  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 12127  		v1.AddArg3(ptr, v0, mem)
 12128  		v.AddArg3(ptr, v0, v1)
 12129  		return true
 12130  	}
 12131  	// match: (Zero [14] ptr mem)
 12132  	// result: (MOVVstore [6] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 12133  	for {
 12134  		if auxIntToInt64(v.AuxInt) != 14 {
 12135  			break
 12136  		}
 12137  		ptr := v_0
 12138  		mem := v_1
 12139  		v.reset(OpLOONG64MOVVstore)
 12140  		v.AuxInt = int32ToAuxInt(6)
 12141  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12142  		v0.AuxInt = int64ToAuxInt(0)
 12143  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 12144  		v1.AddArg3(ptr, v0, mem)
 12145  		v.AddArg3(ptr, v0, v1)
 12146  		return true
 12147  	}
 12148  	// match: (Zero [15] ptr mem)
 12149  	// result: (MOVVstore [7] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 12150  	for {
 12151  		if auxIntToInt64(v.AuxInt) != 15 {
 12152  			break
 12153  		}
 12154  		ptr := v_0
 12155  		mem := v_1
 12156  		v.reset(OpLOONG64MOVVstore)
 12157  		v.AuxInt = int32ToAuxInt(7)
 12158  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12159  		v0.AuxInt = int64ToAuxInt(0)
 12160  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 12161  		v1.AddArg3(ptr, v0, mem)
 12162  		v.AddArg3(ptr, v0, v1)
 12163  		return true
 12164  	}
 12165  	// match: (Zero [16] ptr mem)
 12166  	// result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore ptr (MOVVconst [0]) mem))
 12167  	for {
 12168  		if auxIntToInt64(v.AuxInt) != 16 {
 12169  			break
 12170  		}
 12171  		ptr := v_0
 12172  		mem := v_1
 12173  		v.reset(OpLOONG64MOVVstore)
 12174  		v.AuxInt = int32ToAuxInt(8)
 12175  		v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12176  		v0.AuxInt = int64ToAuxInt(0)
 12177  		v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem)
 12178  		v1.AddArg3(ptr, v0, mem)
 12179  		v.AddArg3(ptr, v0, v1)
 12180  		return true
 12181  	}
 12182  	// match: (Zero [s] ptr mem)
 12183  	// cond: s > 16 && s < 192
 12184  	// result: (LoweredZero [s] ptr mem)
 12185  	for {
 12186  		s := auxIntToInt64(v.AuxInt)
 12187  		ptr := v_0
 12188  		mem := v_1
 12189  		if !(s > 16 && s < 192) {
 12190  			break
 12191  		}
 12192  		v.reset(OpLOONG64LoweredZero)
 12193  		v.AuxInt = int64ToAuxInt(s)
 12194  		v.AddArg2(ptr, mem)
 12195  		return true
 12196  	}
 12197  	// match: (Zero [s] ptr mem)
 12198  	// cond: s >= 192
 12199  	// result: (LoweredZeroLoop [s] ptr mem)
 12200  	for {
 12201  		s := auxIntToInt64(v.AuxInt)
 12202  		ptr := v_0
 12203  		mem := v_1
 12204  		if !(s >= 192) {
 12205  			break
 12206  		}
 12207  		v.reset(OpLOONG64LoweredZeroLoop)
 12208  		v.AuxInt = int64ToAuxInt(s)
 12209  		v.AddArg2(ptr, mem)
 12210  		return true
 12211  	}
 12212  	return false
 12213  }
 12214  func rewriteBlockLOONG64(b *Block) bool {
 12215  	typ := &b.Func.Config.Types
 12216  	switch b.Kind {
 12217  	case BlockLOONG64BEQ:
 12218  		// match: (BEQ (MOVVconst [0]) cond yes no)
 12219  		// result: (EQZ cond yes no)
 12220  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12221  			v_0 := b.Controls[0]
 12222  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12223  				break
 12224  			}
 12225  			cond := b.Controls[1]
 12226  			b.resetWithControl(BlockLOONG64EQZ, cond)
 12227  			return true
 12228  		}
 12229  		// match: (BEQ cond (MOVVconst [0]) yes no)
 12230  		// result: (EQZ cond yes no)
 12231  		for b.Controls[1].Op == OpLOONG64MOVVconst {
 12232  			cond := b.Controls[0]
 12233  			v_1 := b.Controls[1]
 12234  			if auxIntToInt64(v_1.AuxInt) != 0 {
 12235  				break
 12236  			}
 12237  			b.resetWithControl(BlockLOONG64EQZ, cond)
 12238  			return true
 12239  		}
 12240  	case BlockLOONG64BGE:
 12241  		// match: (BGE (MOVVconst [0]) cond yes no)
 12242  		// result: (LEZ cond yes no)
 12243  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12244  			v_0 := b.Controls[0]
 12245  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12246  				break
 12247  			}
 12248  			cond := b.Controls[1]
 12249  			b.resetWithControl(BlockLOONG64LEZ, cond)
 12250  			return true
 12251  		}
 12252  		// match: (BGE cond (MOVVconst [0]) yes no)
 12253  		// result: (GEZ cond yes no)
 12254  		for b.Controls[1].Op == OpLOONG64MOVVconst {
 12255  			cond := b.Controls[0]
 12256  			v_1 := b.Controls[1]
 12257  			if auxIntToInt64(v_1.AuxInt) != 0 {
 12258  				break
 12259  			}
 12260  			b.resetWithControl(BlockLOONG64GEZ, cond)
 12261  			return true
 12262  		}
 12263  	case BlockLOONG64BGEU:
 12264  		// match: (BGEU (MOVVconst [0]) cond yes no)
 12265  		// result: (EQZ cond yes no)
 12266  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12267  			v_0 := b.Controls[0]
 12268  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12269  				break
 12270  			}
 12271  			cond := b.Controls[1]
 12272  			b.resetWithControl(BlockLOONG64EQZ, cond)
 12273  			return true
 12274  		}
 12275  	case BlockLOONG64BLT:
 12276  		// match: (BLT (MOVVconst [0]) cond yes no)
 12277  		// result: (GTZ cond yes no)
 12278  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12279  			v_0 := b.Controls[0]
 12280  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12281  				break
 12282  			}
 12283  			cond := b.Controls[1]
 12284  			b.resetWithControl(BlockLOONG64GTZ, cond)
 12285  			return true
 12286  		}
 12287  		// match: (BLT cond (MOVVconst [0]) yes no)
 12288  		// result: (LTZ cond yes no)
 12289  		for b.Controls[1].Op == OpLOONG64MOVVconst {
 12290  			cond := b.Controls[0]
 12291  			v_1 := b.Controls[1]
 12292  			if auxIntToInt64(v_1.AuxInt) != 0 {
 12293  				break
 12294  			}
 12295  			b.resetWithControl(BlockLOONG64LTZ, cond)
 12296  			return true
 12297  		}
 12298  	case BlockLOONG64BLTU:
 12299  		// match: (BLTU (MOVVconst [0]) cond yes no)
 12300  		// result: (NEZ cond yes no)
 12301  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12302  			v_0 := b.Controls[0]
 12303  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12304  				break
 12305  			}
 12306  			cond := b.Controls[1]
 12307  			b.resetWithControl(BlockLOONG64NEZ, cond)
 12308  			return true
 12309  		}
 12310  	case BlockLOONG64BNE:
 12311  		// match: (BNE (MOVVconst [0]) cond yes no)
 12312  		// result: (NEZ cond yes no)
 12313  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12314  			v_0 := b.Controls[0]
 12315  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12316  				break
 12317  			}
 12318  			cond := b.Controls[1]
 12319  			b.resetWithControl(BlockLOONG64NEZ, cond)
 12320  			return true
 12321  		}
 12322  		// match: (BNE cond (MOVVconst [0]) yes no)
 12323  		// result: (NEZ cond yes no)
 12324  		for b.Controls[1].Op == OpLOONG64MOVVconst {
 12325  			cond := b.Controls[0]
 12326  			v_1 := b.Controls[1]
 12327  			if auxIntToInt64(v_1.AuxInt) != 0 {
 12328  				break
 12329  			}
 12330  			b.resetWithControl(BlockLOONG64NEZ, cond)
 12331  			return true
 12332  		}
 12333  	case BlockLOONG64EQZ:
 12334  		// match: (EQZ (FPFlagTrue cmp) yes no)
 12335  		// result: (FPF cmp yes no)
 12336  		for b.Controls[0].Op == OpLOONG64FPFlagTrue {
 12337  			v_0 := b.Controls[0]
 12338  			cmp := v_0.Args[0]
 12339  			b.resetWithControl(BlockLOONG64FPF, cmp)
 12340  			return true
 12341  		}
 12342  		// match: (EQZ (FPFlagFalse cmp) yes no)
 12343  		// result: (FPT cmp yes no)
 12344  		for b.Controls[0].Op == OpLOONG64FPFlagFalse {
 12345  			v_0 := b.Controls[0]
 12346  			cmp := v_0.Args[0]
 12347  			b.resetWithControl(BlockLOONG64FPT, cmp)
 12348  			return true
 12349  		}
 12350  		// match: (EQZ (XORconst [1] cmp:(SGT _ _)) yes no)
 12351  		// result: (NEZ cmp yes no)
 12352  		for b.Controls[0].Op == OpLOONG64XORconst {
 12353  			v_0 := b.Controls[0]
 12354  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12355  				break
 12356  			}
 12357  			cmp := v_0.Args[0]
 12358  			if cmp.Op != OpLOONG64SGT {
 12359  				break
 12360  			}
 12361  			b.resetWithControl(BlockLOONG64NEZ, cmp)
 12362  			return true
 12363  		}
 12364  		// match: (EQZ (XORconst [1] cmp:(SGTU _ _)) yes no)
 12365  		// result: (NEZ cmp yes no)
 12366  		for b.Controls[0].Op == OpLOONG64XORconst {
 12367  			v_0 := b.Controls[0]
 12368  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12369  				break
 12370  			}
 12371  			cmp := v_0.Args[0]
 12372  			if cmp.Op != OpLOONG64SGTU {
 12373  				break
 12374  			}
 12375  			b.resetWithControl(BlockLOONG64NEZ, cmp)
 12376  			return true
 12377  		}
 12378  		// match: (EQZ (XORconst [1] cmp:(SGTconst _)) yes no)
 12379  		// result: (NEZ cmp yes no)
 12380  		for b.Controls[0].Op == OpLOONG64XORconst {
 12381  			v_0 := b.Controls[0]
 12382  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12383  				break
 12384  			}
 12385  			cmp := v_0.Args[0]
 12386  			if cmp.Op != OpLOONG64SGTconst {
 12387  				break
 12388  			}
 12389  			b.resetWithControl(BlockLOONG64NEZ, cmp)
 12390  			return true
 12391  		}
 12392  		// match: (EQZ (XORconst [1] cmp:(SGTUconst _)) yes no)
 12393  		// result: (NEZ cmp yes no)
 12394  		for b.Controls[0].Op == OpLOONG64XORconst {
 12395  			v_0 := b.Controls[0]
 12396  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12397  				break
 12398  			}
 12399  			cmp := v_0.Args[0]
 12400  			if cmp.Op != OpLOONG64SGTUconst {
 12401  				break
 12402  			}
 12403  			b.resetWithControl(BlockLOONG64NEZ, cmp)
 12404  			return true
 12405  		}
 12406  		// match: (EQZ (SGTUconst [1] x) yes no)
 12407  		// result: (NEZ x yes no)
 12408  		for b.Controls[0].Op == OpLOONG64SGTUconst {
 12409  			v_0 := b.Controls[0]
 12410  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12411  				break
 12412  			}
 12413  			x := v_0.Args[0]
 12414  			b.resetWithControl(BlockLOONG64NEZ, x)
 12415  			return true
 12416  		}
 12417  		// match: (EQZ (SGTU x (MOVVconst [0])) yes no)
 12418  		// result: (EQZ x yes no)
 12419  		for b.Controls[0].Op == OpLOONG64SGTU {
 12420  			v_0 := b.Controls[0]
 12421  			_ = v_0.Args[1]
 12422  			x := v_0.Args[0]
 12423  			v_0_1 := v_0.Args[1]
 12424  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 12425  				break
 12426  			}
 12427  			b.resetWithControl(BlockLOONG64EQZ, x)
 12428  			return true
 12429  		}
 12430  		// match: (EQZ (SGTconst [0] x) yes no)
 12431  		// result: (GEZ x yes no)
 12432  		for b.Controls[0].Op == OpLOONG64SGTconst {
 12433  			v_0 := b.Controls[0]
 12434  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12435  				break
 12436  			}
 12437  			x := v_0.Args[0]
 12438  			b.resetWithControl(BlockLOONG64GEZ, x)
 12439  			return true
 12440  		}
 12441  		// match: (EQZ (SGT x (MOVVconst [0])) yes no)
 12442  		// result: (LEZ x yes no)
 12443  		for b.Controls[0].Op == OpLOONG64SGT {
 12444  			v_0 := b.Controls[0]
 12445  			_ = v_0.Args[1]
 12446  			x := v_0.Args[0]
 12447  			v_0_1 := v_0.Args[1]
 12448  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 12449  				break
 12450  			}
 12451  			b.resetWithControl(BlockLOONG64LEZ, x)
 12452  			return true
 12453  		}
 12454  		// match: (EQZ (SGTU (MOVVconst [c]) y) yes no)
 12455  		// cond: c >= -2048 && c <= 2047
 12456  		// result: (EQZ (SGTUconst [c] y) yes no)
 12457  		for b.Controls[0].Op == OpLOONG64SGTU {
 12458  			v_0 := b.Controls[0]
 12459  			y := v_0.Args[1]
 12460  			v_0_0 := v_0.Args[0]
 12461  			if v_0_0.Op != OpLOONG64MOVVconst {
 12462  				break
 12463  			}
 12464  			c := auxIntToInt64(v_0_0.AuxInt)
 12465  			if !(c >= -2048 && c <= 2047) {
 12466  				break
 12467  			}
 12468  			v0 := b.NewValue0(v_0.Pos, OpLOONG64SGTUconst, typ.Bool)
 12469  			v0.AuxInt = int64ToAuxInt(c)
 12470  			v0.AddArg(y)
 12471  			b.resetWithControl(BlockLOONG64EQZ, v0)
 12472  			return true
 12473  		}
 12474  		// match: (EQZ (SUBV x y) yes no)
 12475  		// result: (BEQ x y yes no)
 12476  		for b.Controls[0].Op == OpLOONG64SUBV {
 12477  			v_0 := b.Controls[0]
 12478  			y := v_0.Args[1]
 12479  			x := v_0.Args[0]
 12480  			b.resetWithControl2(BlockLOONG64BEQ, x, y)
 12481  			return true
 12482  		}
 12483  		// match: (EQZ (SGT x y) yes no)
 12484  		// result: (BGE y x yes no)
 12485  		for b.Controls[0].Op == OpLOONG64SGT {
 12486  			v_0 := b.Controls[0]
 12487  			y := v_0.Args[1]
 12488  			x := v_0.Args[0]
 12489  			b.resetWithControl2(BlockLOONG64BGE, y, x)
 12490  			return true
 12491  		}
 12492  		// match: (EQZ (SGTU x y) yes no)
 12493  		// result: (BGEU y x yes no)
 12494  		for b.Controls[0].Op == OpLOONG64SGTU {
 12495  			v_0 := b.Controls[0]
 12496  			y := v_0.Args[1]
 12497  			x := v_0.Args[0]
 12498  			b.resetWithControl2(BlockLOONG64BGEU, y, x)
 12499  			return true
 12500  		}
 12501  		// match: (EQZ (SGTconst [c] y) yes no)
 12502  		// result: (BGE y (MOVVconst [c]) yes no)
 12503  		for b.Controls[0].Op == OpLOONG64SGTconst {
 12504  			v_0 := b.Controls[0]
 12505  			c := auxIntToInt64(v_0.AuxInt)
 12506  			y := v_0.Args[0]
 12507  			v0 := b.NewValue0(b.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12508  			v0.AuxInt = int64ToAuxInt(c)
 12509  			b.resetWithControl2(BlockLOONG64BGE, y, v0)
 12510  			return true
 12511  		}
 12512  		// match: (EQZ (SGTUconst [c] y) yes no)
 12513  		// result: (BGEU y (MOVVconst [c]) yes no)
 12514  		for b.Controls[0].Op == OpLOONG64SGTUconst {
 12515  			v_0 := b.Controls[0]
 12516  			c := auxIntToInt64(v_0.AuxInt)
 12517  			y := v_0.Args[0]
 12518  			v0 := b.NewValue0(b.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12519  			v0.AuxInt = int64ToAuxInt(c)
 12520  			b.resetWithControl2(BlockLOONG64BGEU, y, v0)
 12521  			return true
 12522  		}
 12523  		// match: (EQZ (MOVVconst [0]) yes no)
 12524  		// result: (First yes no)
 12525  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12526  			v_0 := b.Controls[0]
 12527  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12528  				break
 12529  			}
 12530  			b.Reset(BlockFirst)
 12531  			return true
 12532  		}
 12533  		// match: (EQZ (MOVVconst [c]) yes no)
 12534  		// cond: c != 0
 12535  		// result: (First no yes)
 12536  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12537  			v_0 := b.Controls[0]
 12538  			c := auxIntToInt64(v_0.AuxInt)
 12539  			if !(c != 0) {
 12540  				break
 12541  			}
 12542  			b.Reset(BlockFirst)
 12543  			b.swapSuccessors()
 12544  			return true
 12545  		}
 12546  		// match: (EQZ (NEGV x) yes no)
 12547  		// result: (EQZ x yes no)
 12548  		for b.Controls[0].Op == OpLOONG64NEGV {
 12549  			v_0 := b.Controls[0]
 12550  			x := v_0.Args[0]
 12551  			b.resetWithControl(BlockLOONG64EQZ, x)
 12552  			return true
 12553  		}
 12554  	case BlockLOONG64GEZ:
 12555  		// match: (GEZ (MOVVconst [c]) yes no)
 12556  		// cond: c >= 0
 12557  		// result: (First yes no)
 12558  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12559  			v_0 := b.Controls[0]
 12560  			c := auxIntToInt64(v_0.AuxInt)
 12561  			if !(c >= 0) {
 12562  				break
 12563  			}
 12564  			b.Reset(BlockFirst)
 12565  			return true
 12566  		}
 12567  		// match: (GEZ (MOVVconst [c]) yes no)
 12568  		// cond: c < 0
 12569  		// result: (First no yes)
 12570  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12571  			v_0 := b.Controls[0]
 12572  			c := auxIntToInt64(v_0.AuxInt)
 12573  			if !(c < 0) {
 12574  				break
 12575  			}
 12576  			b.Reset(BlockFirst)
 12577  			b.swapSuccessors()
 12578  			return true
 12579  		}
 12580  	case BlockLOONG64GTZ:
 12581  		// match: (GTZ (MOVVconst [c]) yes no)
 12582  		// cond: c > 0
 12583  		// result: (First yes no)
 12584  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12585  			v_0 := b.Controls[0]
 12586  			c := auxIntToInt64(v_0.AuxInt)
 12587  			if !(c > 0) {
 12588  				break
 12589  			}
 12590  			b.Reset(BlockFirst)
 12591  			return true
 12592  		}
 12593  		// match: (GTZ (MOVVconst [c]) yes no)
 12594  		// cond: c <= 0
 12595  		// result: (First no yes)
 12596  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12597  			v_0 := b.Controls[0]
 12598  			c := auxIntToInt64(v_0.AuxInt)
 12599  			if !(c <= 0) {
 12600  				break
 12601  			}
 12602  			b.Reset(BlockFirst)
 12603  			b.swapSuccessors()
 12604  			return true
 12605  		}
 12606  	case BlockIf:
 12607  		// match: (If cond yes no)
 12608  		// result: (NEZ (MOVBUreg <typ.UInt64> cond) yes no)
 12609  		for {
 12610  			cond := b.Controls[0]
 12611  			v0 := b.NewValue0(cond.Pos, OpLOONG64MOVBUreg, typ.UInt64)
 12612  			v0.AddArg(cond)
 12613  			b.resetWithControl(BlockLOONG64NEZ, v0)
 12614  			return true
 12615  		}
 12616  	case BlockJumpTable:
 12617  		// match: (JumpTable idx)
 12618  		// result: (JUMPTABLE {makeJumpTableSym(b)} idx (MOVVaddr <typ.Uintptr> {makeJumpTableSym(b)} (SB)))
 12619  		for {
 12620  			idx := b.Controls[0]
 12621  			v0 := b.NewValue0(b.Pos, OpLOONG64MOVVaddr, typ.Uintptr)
 12622  			v0.Aux = symToAux(makeJumpTableSym(b))
 12623  			v1 := b.NewValue0(b.Pos, OpSB, typ.Uintptr)
 12624  			v0.AddArg(v1)
 12625  			b.resetWithControl2(BlockLOONG64JUMPTABLE, idx, v0)
 12626  			b.Aux = symToAux(makeJumpTableSym(b))
 12627  			return true
 12628  		}
 12629  	case BlockLOONG64LEZ:
 12630  		// match: (LEZ (MOVVconst [c]) yes no)
 12631  		// cond: c <= 0
 12632  		// result: (First yes no)
 12633  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12634  			v_0 := b.Controls[0]
 12635  			c := auxIntToInt64(v_0.AuxInt)
 12636  			if !(c <= 0) {
 12637  				break
 12638  			}
 12639  			b.Reset(BlockFirst)
 12640  			return true
 12641  		}
 12642  		// match: (LEZ (MOVVconst [c]) yes no)
 12643  		// cond: c > 0
 12644  		// result: (First no yes)
 12645  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12646  			v_0 := b.Controls[0]
 12647  			c := auxIntToInt64(v_0.AuxInt)
 12648  			if !(c > 0) {
 12649  				break
 12650  			}
 12651  			b.Reset(BlockFirst)
 12652  			b.swapSuccessors()
 12653  			return true
 12654  		}
 12655  	case BlockLOONG64LTZ:
 12656  		// match: (LTZ (MOVVconst [c]) yes no)
 12657  		// cond: c < 0
 12658  		// result: (First yes no)
 12659  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12660  			v_0 := b.Controls[0]
 12661  			c := auxIntToInt64(v_0.AuxInt)
 12662  			if !(c < 0) {
 12663  				break
 12664  			}
 12665  			b.Reset(BlockFirst)
 12666  			return true
 12667  		}
 12668  		// match: (LTZ (MOVVconst [c]) yes no)
 12669  		// cond: c >= 0
 12670  		// result: (First no yes)
 12671  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12672  			v_0 := b.Controls[0]
 12673  			c := auxIntToInt64(v_0.AuxInt)
 12674  			if !(c >= 0) {
 12675  				break
 12676  			}
 12677  			b.Reset(BlockFirst)
 12678  			b.swapSuccessors()
 12679  			return true
 12680  		}
 12681  	case BlockLOONG64NEZ:
 12682  		// match: (NEZ (FPFlagTrue cmp) yes no)
 12683  		// result: (FPT cmp yes no)
 12684  		for b.Controls[0].Op == OpLOONG64FPFlagTrue {
 12685  			v_0 := b.Controls[0]
 12686  			cmp := v_0.Args[0]
 12687  			b.resetWithControl(BlockLOONG64FPT, cmp)
 12688  			return true
 12689  		}
 12690  		// match: (NEZ (FPFlagFalse cmp) yes no)
 12691  		// result: (FPF cmp yes no)
 12692  		for b.Controls[0].Op == OpLOONG64FPFlagFalse {
 12693  			v_0 := b.Controls[0]
 12694  			cmp := v_0.Args[0]
 12695  			b.resetWithControl(BlockLOONG64FPF, cmp)
 12696  			return true
 12697  		}
 12698  		// match: (NEZ (XORconst [1] cmp:(SGT _ _)) yes no)
 12699  		// result: (EQZ cmp yes no)
 12700  		for b.Controls[0].Op == OpLOONG64XORconst {
 12701  			v_0 := b.Controls[0]
 12702  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12703  				break
 12704  			}
 12705  			cmp := v_0.Args[0]
 12706  			if cmp.Op != OpLOONG64SGT {
 12707  				break
 12708  			}
 12709  			b.resetWithControl(BlockLOONG64EQZ, cmp)
 12710  			return true
 12711  		}
 12712  		// match: (NEZ (XORconst [1] cmp:(SGTU _ _)) yes no)
 12713  		// result: (EQZ cmp yes no)
 12714  		for b.Controls[0].Op == OpLOONG64XORconst {
 12715  			v_0 := b.Controls[0]
 12716  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12717  				break
 12718  			}
 12719  			cmp := v_0.Args[0]
 12720  			if cmp.Op != OpLOONG64SGTU {
 12721  				break
 12722  			}
 12723  			b.resetWithControl(BlockLOONG64EQZ, cmp)
 12724  			return true
 12725  		}
 12726  		// match: (NEZ (XORconst [1] cmp:(SGTconst _)) yes no)
 12727  		// result: (EQZ cmp yes no)
 12728  		for b.Controls[0].Op == OpLOONG64XORconst {
 12729  			v_0 := b.Controls[0]
 12730  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12731  				break
 12732  			}
 12733  			cmp := v_0.Args[0]
 12734  			if cmp.Op != OpLOONG64SGTconst {
 12735  				break
 12736  			}
 12737  			b.resetWithControl(BlockLOONG64EQZ, cmp)
 12738  			return true
 12739  		}
 12740  		// match: (NEZ (XORconst [1] cmp:(SGTUconst _)) yes no)
 12741  		// result: (EQZ cmp yes no)
 12742  		for b.Controls[0].Op == OpLOONG64XORconst {
 12743  			v_0 := b.Controls[0]
 12744  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12745  				break
 12746  			}
 12747  			cmp := v_0.Args[0]
 12748  			if cmp.Op != OpLOONG64SGTUconst {
 12749  				break
 12750  			}
 12751  			b.resetWithControl(BlockLOONG64EQZ, cmp)
 12752  			return true
 12753  		}
 12754  		// match: (NEZ (SGTUconst [1] x) yes no)
 12755  		// result: (EQZ x yes no)
 12756  		for b.Controls[0].Op == OpLOONG64SGTUconst {
 12757  			v_0 := b.Controls[0]
 12758  			if auxIntToInt64(v_0.AuxInt) != 1 {
 12759  				break
 12760  			}
 12761  			x := v_0.Args[0]
 12762  			b.resetWithControl(BlockLOONG64EQZ, x)
 12763  			return true
 12764  		}
 12765  		// match: (NEZ (SGTU x (MOVVconst [0])) yes no)
 12766  		// result: (NEZ x yes no)
 12767  		for b.Controls[0].Op == OpLOONG64SGTU {
 12768  			v_0 := b.Controls[0]
 12769  			_ = v_0.Args[1]
 12770  			x := v_0.Args[0]
 12771  			v_0_1 := v_0.Args[1]
 12772  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 12773  				break
 12774  			}
 12775  			b.resetWithControl(BlockLOONG64NEZ, x)
 12776  			return true
 12777  		}
 12778  		// match: (NEZ (SGTconst [0] x) yes no)
 12779  		// result: (LTZ x yes no)
 12780  		for b.Controls[0].Op == OpLOONG64SGTconst {
 12781  			v_0 := b.Controls[0]
 12782  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12783  				break
 12784  			}
 12785  			x := v_0.Args[0]
 12786  			b.resetWithControl(BlockLOONG64LTZ, x)
 12787  			return true
 12788  		}
 12789  		// match: (NEZ (SGT x (MOVVconst [0])) yes no)
 12790  		// result: (GTZ x yes no)
 12791  		for b.Controls[0].Op == OpLOONG64SGT {
 12792  			v_0 := b.Controls[0]
 12793  			_ = v_0.Args[1]
 12794  			x := v_0.Args[0]
 12795  			v_0_1 := v_0.Args[1]
 12796  			if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
 12797  				break
 12798  			}
 12799  			b.resetWithControl(BlockLOONG64GTZ, x)
 12800  			return true
 12801  		}
 12802  		// match: (NEZ (SGTU (MOVVconst [c]) y) yes no)
 12803  		// cond: c >= -2048 && c <= 2047
 12804  		// result: (NEZ (SGTUconst [c] y) yes no)
 12805  		for b.Controls[0].Op == OpLOONG64SGTU {
 12806  			v_0 := b.Controls[0]
 12807  			y := v_0.Args[1]
 12808  			v_0_0 := v_0.Args[0]
 12809  			if v_0_0.Op != OpLOONG64MOVVconst {
 12810  				break
 12811  			}
 12812  			c := auxIntToInt64(v_0_0.AuxInt)
 12813  			if !(c >= -2048 && c <= 2047) {
 12814  				break
 12815  			}
 12816  			v0 := b.NewValue0(v_0.Pos, OpLOONG64SGTUconst, typ.Bool)
 12817  			v0.AuxInt = int64ToAuxInt(c)
 12818  			v0.AddArg(y)
 12819  			b.resetWithControl(BlockLOONG64NEZ, v0)
 12820  			return true
 12821  		}
 12822  		// match: (NEZ (SUBV x y) yes no)
 12823  		// result: (BNE x y yes no)
 12824  		for b.Controls[0].Op == OpLOONG64SUBV {
 12825  			v_0 := b.Controls[0]
 12826  			y := v_0.Args[1]
 12827  			x := v_0.Args[0]
 12828  			b.resetWithControl2(BlockLOONG64BNE, x, y)
 12829  			return true
 12830  		}
 12831  		// match: (NEZ (SGT x y) yes no)
 12832  		// result: (BLT y x yes no)
 12833  		for b.Controls[0].Op == OpLOONG64SGT {
 12834  			v_0 := b.Controls[0]
 12835  			y := v_0.Args[1]
 12836  			x := v_0.Args[0]
 12837  			b.resetWithControl2(BlockLOONG64BLT, y, x)
 12838  			return true
 12839  		}
 12840  		// match: (NEZ (SGTU x y) yes no)
 12841  		// result: (BLTU y x yes no)
 12842  		for b.Controls[0].Op == OpLOONG64SGTU {
 12843  			v_0 := b.Controls[0]
 12844  			y := v_0.Args[1]
 12845  			x := v_0.Args[0]
 12846  			b.resetWithControl2(BlockLOONG64BLTU, y, x)
 12847  			return true
 12848  		}
 12849  		// match: (NEZ (SGTconst [c] y) yes no)
 12850  		// result: (BLT y (MOVVconst [c]) yes no)
 12851  		for b.Controls[0].Op == OpLOONG64SGTconst {
 12852  			v_0 := b.Controls[0]
 12853  			c := auxIntToInt64(v_0.AuxInt)
 12854  			y := v_0.Args[0]
 12855  			v0 := b.NewValue0(b.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12856  			v0.AuxInt = int64ToAuxInt(c)
 12857  			b.resetWithControl2(BlockLOONG64BLT, y, v0)
 12858  			return true
 12859  		}
 12860  		// match: (NEZ (SGTUconst [c] y) yes no)
 12861  		// result: (BLTU y (MOVVconst [c]) yes no)
 12862  		for b.Controls[0].Op == OpLOONG64SGTUconst {
 12863  			v_0 := b.Controls[0]
 12864  			c := auxIntToInt64(v_0.AuxInt)
 12865  			y := v_0.Args[0]
 12866  			v0 := b.NewValue0(b.Pos, OpLOONG64MOVVconst, typ.UInt64)
 12867  			v0.AuxInt = int64ToAuxInt(c)
 12868  			b.resetWithControl2(BlockLOONG64BLTU, y, v0)
 12869  			return true
 12870  		}
 12871  		// match: (NEZ (MOVVconst [0]) yes no)
 12872  		// result: (First no yes)
 12873  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12874  			v_0 := b.Controls[0]
 12875  			if auxIntToInt64(v_0.AuxInt) != 0 {
 12876  				break
 12877  			}
 12878  			b.Reset(BlockFirst)
 12879  			b.swapSuccessors()
 12880  			return true
 12881  		}
 12882  		// match: (NEZ (MOVVconst [c]) yes no)
 12883  		// cond: c != 0
 12884  		// result: (First yes no)
 12885  		for b.Controls[0].Op == OpLOONG64MOVVconst {
 12886  			v_0 := b.Controls[0]
 12887  			c := auxIntToInt64(v_0.AuxInt)
 12888  			if !(c != 0) {
 12889  				break
 12890  			}
 12891  			b.Reset(BlockFirst)
 12892  			return true
 12893  		}
 12894  		// match: (NEZ (NEGV x) yes no)
 12895  		// result: (NEZ x yes no)
 12896  		for b.Controls[0].Op == OpLOONG64NEGV {
 12897  			v_0 := b.Controls[0]
 12898  			x := v_0.Args[0]
 12899  			b.resetWithControl(BlockLOONG64NEZ, x)
 12900  			return true
 12901  		}
 12902  	}
 12903  	return false
 12904  }
 12905  

View as plain text